diff --git a/turbo_hipster/task_plugins/gate_real_db_upgrade/handle_results.py b/turbo_hipster/task_plugins/gate_real_db_upgrade/handle_results.py
index 150c780..e434697 100644
--- a/turbo_hipster/task_plugins/gate_real_db_upgrade/handle_results.py
+++ b/turbo_hipster/task_plugins/gate_real_db_upgrade/handle_results.py
@@ -20,7 +20,6 @@ somebody """
import calendar
import time
-import tempfile
import os
import re
@@ -28,48 +27,21 @@ import re
from turbo_hipster.lib.utils import push_file
-def generate_log_index(datasets):
- """ Create an index of logfiles and links to them """
- # Loop over logfile URLs
- # Create summary and links
- output = '
Index of results'
- output += ''
- for dataset in datasets:
- output += '- '
- output += '%s' % (dataset['result_uri'],
- dataset['name'])
- output += ' %s' % (dataset['result'],
- dataset['result'])
- output += '
'
-
- output += '
'
- output += ''
- return output
-
-
-def make_index_file(datasets, index_filename):
- """ Writes an index into a file for pushing """
- index_content = generate_log_index(datasets)
- tempdir = tempfile.mkdtemp()
- fd = open(os.path.join(tempdir, index_filename), 'w')
- fd.write(index_content)
- return os.path.join(tempdir, index_filename)
-
-
def generate_push_results(datasets, publish_config):
""" Generates and pushes results """
+ # NOTE(mikal): because of the way we run the tests in parallel, there is
+ # only ever one dataset per push.
+ link_uri = None
for i, dataset in enumerate(datasets):
result_uri = push_file(dataset['determined_path'],
dataset['job_log_file_path'],
publish_config)
datasets[i]['result_uri'] = result_uri
+ if not link_uri:
+ link_uri = result_uri
- index_file = make_index_file(datasets, 'index.html')
- index_file_url = push_file(dataset['determined_path'], index_file,
- publish_config)
-
- return index_file_url
+ return link_uri
def find_schemas(gitpath):