Fix pep8 issues

This commit is contained in:
Joshua Hesketh 2013-08-09 10:52:34 +10:00
parent 67631b5e68
commit 9f89805baf
4 changed files with 22 additions and 13 deletions

View File

@ -172,11 +172,14 @@ def execute_to_log(cmd, logfile, timeout=-1,
logger.info('[script exit code = %d]' % p.returncode)
def push_file(job_name, file_path, publish_config):
""" Push a log file to a server. Returns the public URL """
method = publish_config['type'] + '_push_file'
if method in locals():
return locals(method)(job_name, dataset['log_file_path'], publish_config)
return locals(method)(job_name, dataset['log_file_path'],
publish_config)
def swift_push_file(job_name, file_path, swift_config):
""" Push a log file to a swift server. """
@ -188,6 +191,7 @@ def swift_push_file(job_name, file_path, swift_config):
obj = con.put_object(swift_config['container'], name, fd)
return con.get_object(swift_config['container'], name)
def local_push_file(job_name, file_path, local_config):
""" Copy the file locally somewhere sensible """
dest = os.path.join(local_config['path'], job_name)
@ -197,6 +201,7 @@ def local_push_file(job_name, file_path, local_config):
os.copyfile(file_path, dest_file)
return dest_file
def scp_push_file(job_name, file_path, local_config):
""" Copy the file remotely over ssh """
pass

View File

@ -20,17 +20,20 @@ somebody """
from lib.utils import push_file
def generate_log_index(logfiles):
""" Create an index of logfiles and links to them """
# Loop over logfile URLs
# Create summary and links
pass
def make_index_file(logfiles):
""" Writes an index into a file for pushing """
generate_log_index(logfiles)
# write out to file
def generate_push_results(datasets):
""" Generates and pushes results """
@ -40,11 +43,11 @@ def generate_push_results(datasets):
if 'publish_to' in dataset['config']:
for publish_config in dataset['config']['publish_to']:
files.append(push_file(dataset['name'],
dataset['log_file_path'],
publish_config))
datasets[i]['files'] = files;
dataset['log_file_path'],
publish_config))
datasets[i]['files'] = files
index_file = make_index_file(datasets, files)
#index_file_url = push_file(index_file)
return files[0]
return files[0]

View File

@ -163,15 +163,15 @@ class Runner(threading.Thread):
dataset['name'] = ent
dataset['path'] = os.path.join(datasets_path, ent)
dataset['job_working_dir'] = os.path.join(
self.config['jobs_working_dir'],
self.job.unique
)
self.config['jobs_working_dir'],
self.job.unique
)
dataset['log_file_path'] = os.path.join(
dataset['job_working_dir'],
dataset['name'] + '.log'
)
dataset['job_working_dir'],
dataset['name'] + '.log'
)
with open(os.path.join(dataset['path'], 'config.json'),
'r') as config_stream:
'r') as config_stream:
dataset['config'] = json.load(config_stream)
self.datasets.append(dataset)

View File

@ -1,4 +1,5 @@
import testtools
class WorkerServerTestCase(testtools.TestCase):
pass
pass