Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
144 changes: 144 additions & 0 deletions lib/test_fileDownloadAbort.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,144 @@
import os
import requests
import random

__doc__ = """ Download a file and abort before the end of the transfer.
"""

from smashbox.utilities import *
from smashbox.utilities.hash_files import *

filesize = config.get('fileDownloadAbort_filesize', 900000000)
iterations = config.get('fileDownloadAbort_iterations', 25)

if type(filesize) is type(''):
filesize = eval(filesize)

testsets = [
{ 'fileDownloadAbort_filesize': 900000000,
'fileDownloadAbort_iterations': 25
}
]

@add_worker
def main(step):

step(1, 'Preparation')

# cleanup server files from previous run
reset_owncloud_account(num_test_users=1)
check_users(1)

# cleanup all local files for the test
reset_rundir()

d = make_workdir()
run_ocsync(d,user_num=1)

step(2, 'Add a file: filesize=%s'%filesize)

create_hashfile(d,filemask='BLOB.DAT',size=filesize)
list_files(d)
run_ocsync(d,user_num=1)
list_files(d)

reset_server_log_file(True)

step(3, 'Create link share')
user1 = "%s%i"%(config.oc_account_name, 1)

oc_api = get_oc_api()
oc_api.login(user1, config.oc_account_password)

share = oc_api.share_file_with_link('BLOB.DAT', perms=31)
share_url = share.get_link() + '/download'

# Start testing
test_urls = [
{
'url': oc_public_webdav_url(),
'auth': (share.get_token(), ''),
'description': 'Public webdav URL'
},
{
'url': share.get_link() + '/download',
'auth': None,
'description': 'Link share URL'
},
{
'url': os.path.join(oc_webdav_url(), 'BLOB.DAT'),
'auth': (user1, config.oc_account_password),
'description': 'Webdav URL'
},
]

stepCount = 4

for test_url in test_urls:
cases = [
{'use_range': False, 'abort': True, 'description': 'download abort'},
{'use_range': True, 'abort': True, 'description': 'range download abort'},
{'use_range': False, 'abort': False, 'description': 'full download'},
{'use_range': True, 'abort': False, 'description': 'range download'},
]

for case in cases:
step(stepCount, test_url['description'] + ' ' + case['description']);
for i in range(1, iterations):
test_download(i, test_url['url'], test_url['auth'], case['use_range'], case['abort'])
check_and_reset_logs()
stepCount += 1

def check_and_reset_logs():
d = make_workdir()
scrape_log_file(d, True)
reset_server_log_file(True)

if len(reported_errors) > 0:
raise AssertionError('Errors found in log, aborting')

def test_download(i, url, auth = None, use_range = False, abort = False):

if use_range:
range_start = random.randint(8192, filesize)
range_end = random.randint(range_start, filesize - 8192)
else:
range_start = 0
range_end = filesize

if abort:
break_bytes = random.randint(range_start + 8192, range_end - 8192)

text = 'Download iteration %i' % i

headers = {}
if use_range:
headers['Range'] = 'bytes=%i-%i' % (range_start, range_end)
text += ' with range %s' % headers['Range']

if abort:
text += ' aborting after %i bytes' % break_bytes

text += ' of total size %i ' % filesize

text += ' url %s' % url

logger.info(text)

res = requests.get(url, auth=auth, stream=True, headers=headers)

if use_range:
expected_status_code = 206
else:
expected_status_code = 200

error_check(res.status_code == expected_status_code, 'Could not download, status code %i' % res.status_code)

read_bytes = 0;
for chunk in res.iter_content(8192):
read_bytes += len(chunk)
if abort and read_bytes >= break_bytes:
break

res.close()

74 changes: 54 additions & 20 deletions python/smashbox/utilities/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import datetime
import subprocess
import time
import requests

# Utilities to be used in the test-cases.
from smashbox.utilities.version import version_compare
Expand Down Expand Up @@ -317,6 +318,28 @@ def oc_webdav_url(protocol='http',remote_folder="",user_num=None,webdav_endpoint

return protocol + '://' + username + ':' + password + '@' + config.oc_server + '/' + remote_path

def oc_public_webdav_url(protocol='http',remote_folder="",token='',password=''):
""" Get public Webdav URL
"""

if config.oc_ssl_enabled:
protocol += 's'

# strip-off any leading / characters to prevent 1) abspath result from the join below, 2) double // and alike...
remote_folder = remote_folder.lstrip('/')

remote_path = os.path.join(config.oc_root, 'public.php/webdav', remote_folder)

creds = ''
if token:
creds = token
if password:
creds += ':' + password

if creds:
creds += '@'

return protocol + '://' + creds + config.oc_server + '/' + remote_path

# this is a local variable for each worker that keeps track of the repeat count for the current step
ocsync_cnt = {}
Expand Down Expand Up @@ -577,46 +600,53 @@ def fatal_check(expr,message=""):

# ###### Server Log File Scraping ############

def reset_server_log_file():
def reset_server_log_file(force = False):
""" Deletes the existing server log file so that there is a clean
log file for the test run
"""

try:
if not config.oc_check_server_log:
if not force:
try:
if not config.oc_check_server_log:
return
except AttributeError: # allow this option not to be defined at all
return
except AttributeError: # allow this option not to be defined at all
return

logger.info('Removing existing server log file')
cmd = '%s rm -rf %s/owncloud.log' % (config.oc_server_shell_cmd, config.oc_server_datadirectory)
runcmd(cmd)



def scrape_log_file(d):
def scrape_log_file(d, force = False):
""" Copies over the server log file and searches it for specific strings

:param d: The directory where the server log file is to be copied to

"""

try:
if not config.oc_check_server_log:
if not force:
try:
if not config.oc_check_server_log:
return
except AttributeError: # allow this option not to be defined at all
return
except AttributeError: # allow this option not to be defined at all
return

if config.oc_server == '127.0.0.1' or config.oc_server == 'localhost':
cmd = 'cp %s/owncloud.log %s/.' % (config.oc_server_datadirectory, d)
else:
try:
log_user = config.oc_server_log_user
except AttributeError: # allow this option not to be defined at all
log_user = 'root'
cmd = 'scp -P %d %s@%s:%s/owncloud.log %s/.' % (config.scp_port, log_user, config.oc_server, config.oc_server_datadirectory, d)
rtn_code,stdout,stderr = runcmd(cmd)
error_check(rtn_code > 0, 'Could not copy the log file from the server, command returned %s' % rtn_code)
# download server log
log_url = 'http'
if config.oc_ssl_enabled:
log_url += 's'
log_url += '://' + config.oc_admin_user + ':' + config.oc_admin_password + '@' + config.oc_server
log_url += '/' + os.path.join(config.oc_root, 'index.php/settings/admin/log/download')

res = requests.get(log_url)

fatal_check(res.status_code == 200, 'Could not download the log file from the server, status code %i' % res.status_code)

file_handle = open(os.path.join(d, 'owncloud.log'), 'wb', 8192)
for chunk in res.iter_content(8192):
file_handle.write(chunk)
file_handle.close()

# search logfile for string (1 == not found; 0 == found):
cmd = "grep -i \"integrity constraint violation\" %s/owncloud.log" % d
Expand All @@ -627,6 +657,10 @@ def scrape_log_file(d):
rtn_code,stdout,stderr = runcmd(cmd, ignore_exitcode=True, log_warning=False)
error_check(rtn_code > 0, "\"Exception\" message found in server log file")

cmd = "grep -i \"Error\" %s/owncloud.log" % d
rtn_code,stdout,stderr = runcmd(cmd, ignore_exitcode=True, log_warning=False)
error_check(rtn_code > 0, "\"Error\" message found in server log file")

cmd = "grep -i \"could not obtain lock\" %s/owncloud.log" % d
rtn_code,stdout,stderr = runcmd(cmd, ignore_exitcode=True, log_warning=False)
error_check(rtn_code > 0, "\"Could Not Obtain Lock\" message found in server log file")
Expand Down