upload-logs-swift: Turn FileList into a context manager
The FileList is dynamic and needs to be able to keep track of things to cleanup. For example, it has index files added to it from temporary files which should be removed up when we're finished with the list. In a future change we propose a similar addition of a download script for logs which should also be managed. Turn the FileList into a context manager. Modify the index generation to not create a new FileList, but just replace the internal list. Use this for the life-span of the upload by wrapping the relevant parts in a "with:" statement. Change-Id: I7135bf5a55d133ce146e9aa84f00041fc8125cbc
This commit is contained in:
parent
e52bb01280
commit
0fb160ec95
@ -82,272 +82,276 @@ class TestFileList(testtools.TestCase):
|
||||
|
||||
def test_single_dir_trailing_slash(self):
|
||||
'''Test a single directory with a trailing slash'''
|
||||
fl = FileList()
|
||||
fl.add(os.path.join(FIXTURE_DIR, 'logs/'))
|
||||
self.assert_files(fl, [
|
||||
('', 'application/directory', None),
|
||||
('controller', 'application/directory', None),
|
||||
('zuul-info', 'application/directory', None),
|
||||
('job-output.json', 'application/json', None),
|
||||
('controller/subdir', 'application/directory', None),
|
||||
('controller/compressed.gz', 'text/plain', 'gzip'),
|
||||
('controller/cpu-load.svg', 'image/svg+xml', None),
|
||||
('controller/journal.xz', 'text/plain', 'xz'),
|
||||
('controller/service_log.txt', 'text/plain', None),
|
||||
('controller/syslog', 'text/plain', None),
|
||||
('controller/subdir/subdir.txt', 'text/plain', None),
|
||||
('zuul-info/inventory.yaml', 'text/plain', None),
|
||||
('zuul-info/zuul-info.controller.txt', 'text/plain', None),
|
||||
])
|
||||
|
||||
with FileList() as fl:
|
||||
fl.add(os.path.join(FIXTURE_DIR, 'logs/'))
|
||||
self.assert_files(fl, [
|
||||
('', 'application/directory', None),
|
||||
('controller', 'application/directory', None),
|
||||
('zuul-info', 'application/directory', None),
|
||||
('job-output.json', 'application/json', None),
|
||||
('controller/subdir', 'application/directory', None),
|
||||
('controller/compressed.gz', 'text/plain', 'gzip'),
|
||||
('controller/cpu-load.svg', 'image/svg+xml', None),
|
||||
('controller/journal.xz', 'text/plain', 'xz'),
|
||||
('controller/service_log.txt', 'text/plain', None),
|
||||
('controller/syslog', 'text/plain', None),
|
||||
('controller/subdir/subdir.txt', 'text/plain', None),
|
||||
('zuul-info/inventory.yaml', 'text/plain', None),
|
||||
('zuul-info/zuul-info.controller.txt', 'text/plain', None),
|
||||
])
|
||||
|
||||
def test_single_dir(self):
|
||||
'''Test a single directory without a trailing slash'''
|
||||
fl = FileList()
|
||||
fl.add(os.path.join(FIXTURE_DIR, 'logs'))
|
||||
self.assert_files(fl, [
|
||||
('', 'application/directory', None),
|
||||
('logs', 'application/directory', None),
|
||||
('logs/controller', 'application/directory', None),
|
||||
('logs/zuul-info', 'application/directory', None),
|
||||
('logs/job-output.json', 'application/json', None),
|
||||
('logs/controller/subdir', 'application/directory', None),
|
||||
('logs/controller/compressed.gz', 'text/plain', 'gzip'),
|
||||
('logs/controller/cpu-load.svg', 'image/svg+xml', None),
|
||||
('logs/controller/journal.xz', 'text/plain', 'xz'),
|
||||
('logs/controller/service_log.txt', 'text/plain', None),
|
||||
('logs/controller/syslog', 'text/plain', None),
|
||||
('logs/controller/subdir/subdir.txt', 'text/plain', None),
|
||||
('logs/zuul-info/inventory.yaml', 'text/plain', None),
|
||||
('logs/zuul-info/zuul-info.controller.txt', 'text/plain', None),
|
||||
])
|
||||
with FileList() as fl:
|
||||
fl.add(os.path.join(FIXTURE_DIR, 'logs'))
|
||||
self.assert_files(fl, [
|
||||
('', 'application/directory', None),
|
||||
('logs', 'application/directory', None),
|
||||
('logs/controller', 'application/directory', None),
|
||||
('logs/zuul-info', 'application/directory', None),
|
||||
('logs/job-output.json', 'application/json', None),
|
||||
('logs/controller/subdir', 'application/directory', None),
|
||||
('logs/controller/compressed.gz', 'text/plain', 'gzip'),
|
||||
('logs/controller/cpu-load.svg', 'image/svg+xml', None),
|
||||
('logs/controller/journal.xz', 'text/plain', 'xz'),
|
||||
('logs/controller/service_log.txt', 'text/plain', None),
|
||||
('logs/controller/syslog', 'text/plain', None),
|
||||
('logs/controller/subdir/subdir.txt', 'text/plain', None),
|
||||
('logs/zuul-info/inventory.yaml', 'text/plain', None),
|
||||
('logs/zuul-info/zuul-info.controller.txt',
|
||||
'text/plain', None),
|
||||
])
|
||||
|
||||
def test_single_file(self):
|
||||
'''Test a single file'''
|
||||
fl = FileList()
|
||||
fl.add(os.path.join(FIXTURE_DIR,
|
||||
'logs/zuul-info/inventory.yaml'))
|
||||
self.assert_files(fl, [
|
||||
('', 'application/directory', None),
|
||||
('inventory.yaml', 'text/plain', None),
|
||||
])
|
||||
with FileList() as fl:
|
||||
fl.add(os.path.join(FIXTURE_DIR,
|
||||
'logs/zuul-info/inventory.yaml'))
|
||||
self.assert_files(fl, [
|
||||
('', 'application/directory', None),
|
||||
('inventory.yaml', 'text/plain', None),
|
||||
])
|
||||
|
||||
def test_symlinks(self):
|
||||
'''Test symlinks'''
|
||||
fl = FileList()
|
||||
self.useFixture(SymlinkFixture())
|
||||
fl.add(os.path.join(FIXTURE_DIR, 'links/'))
|
||||
self.assert_files(fl, [
|
||||
('', 'application/directory', None),
|
||||
('controller', 'application/directory', None),
|
||||
('good_symlink', 'application/directory', None),
|
||||
('recursive_symlink', 'application/directory', None),
|
||||
('symlink_loop', 'application/directory', None),
|
||||
('symlink_loop_a', 'application/directory', None),
|
||||
('job-output.json', 'application/json', None),
|
||||
('symlink_file', 'text/plain', None),
|
||||
('controller/service_log.txt', 'text/plain', None),
|
||||
('symlink_loop/symlink_loop_b', 'application/directory', None),
|
||||
('symlink_loop/placeholder', 'text/plain', None),
|
||||
])
|
||||
with FileList() as fl:
|
||||
self.useFixture(SymlinkFixture())
|
||||
fl.add(os.path.join(FIXTURE_DIR, 'links/'))
|
||||
self.assert_files(fl, [
|
||||
('', 'application/directory', None),
|
||||
('controller', 'application/directory', None),
|
||||
('good_symlink', 'application/directory', None),
|
||||
('recursive_symlink', 'application/directory', None),
|
||||
('symlink_loop', 'application/directory', None),
|
||||
('symlink_loop_a', 'application/directory', None),
|
||||
('job-output.json', 'application/json', None),
|
||||
('symlink_file', 'text/plain', None),
|
||||
('controller/service_log.txt', 'text/plain', None),
|
||||
('symlink_loop/symlink_loop_b', 'application/directory', None),
|
||||
('symlink_loop/placeholder', 'text/plain', None),
|
||||
])
|
||||
|
||||
def test_index_files(self):
|
||||
'''Test index generation'''
|
||||
fl = FileList()
|
||||
fl.add(os.path.join(FIXTURE_DIR, 'logs'))
|
||||
ix = Indexer()
|
||||
fl = ix.make_indexes(fl)
|
||||
with FileList() as fl:
|
||||
fl.add(os.path.join(FIXTURE_DIR, 'logs'))
|
||||
ix = Indexer()
|
||||
ix.make_indexes(fl)
|
||||
|
||||
self.assert_files(fl, [
|
||||
('', 'application/directory', None),
|
||||
('index.html', 'text/html', None),
|
||||
('logs', 'application/directory', None),
|
||||
('logs/controller', 'application/directory', None),
|
||||
('logs/zuul-info', 'application/directory', None),
|
||||
('logs/job-output.json', 'application/json', None),
|
||||
('logs/index.html', 'text/html', None),
|
||||
('logs/controller/subdir', 'application/directory', None),
|
||||
('logs/controller/compressed.gz', 'text/plain', 'gzip'),
|
||||
('logs/controller/cpu-load.svg', 'image/svg+xml', None),
|
||||
('logs/controller/journal.xz', 'text/plain', 'xz'),
|
||||
('logs/controller/service_log.txt', 'text/plain', None),
|
||||
('logs/controller/syslog', 'text/plain', None),
|
||||
('logs/controller/index.html', 'text/html', None),
|
||||
('logs/controller/subdir/subdir.txt', 'text/plain', None),
|
||||
('logs/controller/subdir/index.html', 'text/html', None),
|
||||
('logs/zuul-info/inventory.yaml', 'text/plain', None),
|
||||
('logs/zuul-info/zuul-info.controller.txt', 'text/plain', None),
|
||||
('logs/zuul-info/index.html', 'text/html', None),
|
||||
])
|
||||
self.assert_files(fl, [
|
||||
('', 'application/directory', None),
|
||||
('index.html', 'text/html', None),
|
||||
('logs', 'application/directory', None),
|
||||
('logs/controller', 'application/directory', None),
|
||||
('logs/zuul-info', 'application/directory', None),
|
||||
('logs/job-output.json', 'application/json', None),
|
||||
('logs/index.html', 'text/html', None),
|
||||
('logs/controller/subdir', 'application/directory', None),
|
||||
('logs/controller/compressed.gz', 'text/plain', 'gzip'),
|
||||
('logs/controller/cpu-load.svg', 'image/svg+xml', None),
|
||||
('logs/controller/journal.xz', 'text/plain', 'xz'),
|
||||
('logs/controller/service_log.txt', 'text/plain', None),
|
||||
('logs/controller/syslog', 'text/plain', None),
|
||||
('logs/controller/index.html', 'text/html', None),
|
||||
('logs/controller/subdir/subdir.txt', 'text/plain', None),
|
||||
('logs/controller/subdir/index.html', 'text/html', None),
|
||||
('logs/zuul-info/inventory.yaml', 'text/plain', None),
|
||||
('logs/zuul-info/zuul-info.controller.txt',
|
||||
'text/plain', None),
|
||||
('logs/zuul-info/index.html', 'text/html', None),
|
||||
])
|
||||
|
||||
top_index = self.find_file(fl, 'index.html')
|
||||
page = open(top_index.full_path).read()
|
||||
page = BeautifulSoup(page, 'html.parser')
|
||||
rows = page.find_all('tr')[1:]
|
||||
top_index = self.find_file(fl, 'index.html')
|
||||
page = open(top_index.full_path).read()
|
||||
page = BeautifulSoup(page, 'html.parser')
|
||||
rows = page.find_all('tr')[1:]
|
||||
|
||||
self.assertEqual(len(rows), 1)
|
||||
self.assertEqual(len(rows), 1)
|
||||
|
||||
self.assertEqual(rows[0].find('a').get('href'), 'logs/')
|
||||
self.assertEqual(rows[0].find('a').text, 'logs/')
|
||||
self.assertEqual(rows[0].find('a').get('href'), 'logs/')
|
||||
self.assertEqual(rows[0].find('a').text, 'logs/')
|
||||
|
||||
subdir_index = self.find_file(fl, 'logs/controller/subdir/index.html')
|
||||
page = open(subdir_index.full_path).read()
|
||||
page = BeautifulSoup(page, 'html.parser')
|
||||
rows = page.find_all('tr')[1:]
|
||||
self.assertEqual(rows[0].find('a').get('href'), '../')
|
||||
self.assertEqual(rows[0].find('a').text, '../')
|
||||
subdir_index = self.find_file(
|
||||
fl, 'logs/controller/subdir/index.html')
|
||||
page = open(subdir_index.full_path).read()
|
||||
page = BeautifulSoup(page, 'html.parser')
|
||||
rows = page.find_all('tr')[1:]
|
||||
self.assertEqual(rows[0].find('a').get('href'), '../')
|
||||
self.assertEqual(rows[0].find('a').text, '../')
|
||||
|
||||
self.assertEqual(rows[1].find('a').get('href'), 'subdir.txt')
|
||||
self.assertEqual(rows[1].find('a').text, 'subdir.txt')
|
||||
self.assertEqual(rows[1].find('a').get('href'), 'subdir.txt')
|
||||
self.assertEqual(rows[1].find('a').text, 'subdir.txt')
|
||||
|
||||
def test_index_files_trailing_slash(self):
|
||||
'''Test index generation with a trailing slash'''
|
||||
fl = FileList()
|
||||
fl.add(os.path.join(FIXTURE_DIR, 'logs/'))
|
||||
ix = Indexer()
|
||||
fl = ix.make_indexes(fl)
|
||||
with FileList() as fl:
|
||||
fl.add(os.path.join(FIXTURE_DIR, 'logs/'))
|
||||
ix = Indexer()
|
||||
ix.make_indexes(fl)
|
||||
|
||||
self.assert_files(fl, [
|
||||
('', 'application/directory', None),
|
||||
('controller', 'application/directory', None),
|
||||
('zuul-info', 'application/directory', None),
|
||||
('job-output.json', 'application/json', None),
|
||||
('index.html', 'text/html', None),
|
||||
('controller/subdir', 'application/directory', None),
|
||||
('controller/compressed.gz', 'text/plain', 'gzip'),
|
||||
('controller/cpu-load.svg', 'image/svg+xml', None),
|
||||
('controller/journal.xz', 'text/plain', 'xz'),
|
||||
('controller/service_log.txt', 'text/plain', None),
|
||||
('controller/syslog', 'text/plain', None),
|
||||
('controller/index.html', 'text/html', None),
|
||||
('controller/subdir/subdir.txt', 'text/plain', None),
|
||||
('controller/subdir/index.html', 'text/html', None),
|
||||
('zuul-info/inventory.yaml', 'text/plain', None),
|
||||
('zuul-info/zuul-info.controller.txt', 'text/plain', None),
|
||||
('zuul-info/index.html', 'text/html', None),
|
||||
])
|
||||
self.assert_files(fl, [
|
||||
('', 'application/directory', None),
|
||||
('controller', 'application/directory', None),
|
||||
('zuul-info', 'application/directory', None),
|
||||
('job-output.json', 'application/json', None),
|
||||
('index.html', 'text/html', None),
|
||||
('controller/subdir', 'application/directory', None),
|
||||
('controller/compressed.gz', 'text/plain', 'gzip'),
|
||||
('controller/cpu-load.svg', 'image/svg+xml', None),
|
||||
('controller/journal.xz', 'text/plain', 'xz'),
|
||||
('controller/service_log.txt', 'text/plain', None),
|
||||
('controller/syslog', 'text/plain', None),
|
||||
('controller/index.html', 'text/html', None),
|
||||
('controller/subdir/subdir.txt', 'text/plain', None),
|
||||
('controller/subdir/index.html', 'text/html', None),
|
||||
('zuul-info/inventory.yaml', 'text/plain', None),
|
||||
('zuul-info/zuul-info.controller.txt', 'text/plain', None),
|
||||
('zuul-info/index.html', 'text/html', None),
|
||||
])
|
||||
|
||||
top_index = self.find_file(fl, 'index.html')
|
||||
page = open(top_index.full_path).read()
|
||||
page = BeautifulSoup(page, 'html.parser')
|
||||
rows = page.find_all('tr')[1:]
|
||||
top_index = self.find_file(fl, 'index.html')
|
||||
page = open(top_index.full_path).read()
|
||||
page = BeautifulSoup(page, 'html.parser')
|
||||
rows = page.find_all('tr')[1:]
|
||||
|
||||
self.assertEqual(len(rows), 3)
|
||||
self.assertEqual(len(rows), 3)
|
||||
|
||||
self.assertEqual(rows[0].find('a').get('href'), 'controller/')
|
||||
self.assertEqual(rows[0].find('a').text, 'controller/')
|
||||
self.assertEqual(rows[0].find('a').get('href'), 'controller/')
|
||||
self.assertEqual(rows[0].find('a').text, 'controller/')
|
||||
|
||||
self.assertEqual(rows[1].find('a').get('href'), 'zuul-info/')
|
||||
self.assertEqual(rows[1].find('a').text, 'zuul-info/')
|
||||
self.assertEqual(rows[1].find('a').get('href'), 'zuul-info/')
|
||||
self.assertEqual(rows[1].find('a').text, 'zuul-info/')
|
||||
|
||||
subdir_index = self.find_file(fl, 'controller/subdir/index.html')
|
||||
page = open(subdir_index.full_path).read()
|
||||
page = BeautifulSoup(page, 'html.parser')
|
||||
rows = page.find_all('tr')[1:]
|
||||
self.assertEqual(rows[0].find('a').get('href'), '../')
|
||||
self.assertEqual(rows[0].find('a').text, '../')
|
||||
subdir_index = self.find_file(fl, 'controller/subdir/index.html')
|
||||
page = open(subdir_index.full_path).read()
|
||||
page = BeautifulSoup(page, 'html.parser')
|
||||
rows = page.find_all('tr')[1:]
|
||||
self.assertEqual(rows[0].find('a').get('href'), '../')
|
||||
self.assertEqual(rows[0].find('a').text, '../')
|
||||
|
||||
self.assertEqual(rows[1].find('a').get('href'), 'subdir.txt')
|
||||
self.assertEqual(rows[1].find('a').text, 'subdir.txt')
|
||||
self.assertEqual(rows[1].find('a').get('href'), 'subdir.txt')
|
||||
self.assertEqual(rows[1].find('a').text, 'subdir.txt')
|
||||
|
||||
def test_topdir_parent_link(self):
|
||||
'''Test index generation creates topdir parent link'''
|
||||
fl = FileList()
|
||||
fl.add(os.path.join(FIXTURE_DIR, 'logs/'))
|
||||
ix = Indexer(create_parent_links=True,
|
||||
create_topdir_parent_link=True)
|
||||
fl = ix.make_indexes(fl)
|
||||
with FileList() as fl:
|
||||
fl.add(os.path.join(FIXTURE_DIR, 'logs/'))
|
||||
ix = Indexer(create_parent_links=True,
|
||||
create_topdir_parent_link=True)
|
||||
ix.make_indexes(fl)
|
||||
|
||||
self.assert_files(fl, [
|
||||
('', 'application/directory', None),
|
||||
('controller', 'application/directory', None),
|
||||
('zuul-info', 'application/directory', None),
|
||||
('job-output.json', 'application/json', None),
|
||||
('index.html', 'text/html', None),
|
||||
('controller/subdir', 'application/directory', None),
|
||||
('controller/compressed.gz', 'text/plain', 'gzip'),
|
||||
('controller/cpu-load.svg', 'image/svg+xml', None),
|
||||
('controller/journal.xz', 'text/plain', 'xz'),
|
||||
('controller/service_log.txt', 'text/plain', None),
|
||||
('controller/syslog', 'text/plain', None),
|
||||
('controller/index.html', 'text/html', None),
|
||||
('controller/subdir/subdir.txt', 'text/plain', None),
|
||||
('controller/subdir/index.html', 'text/html', None),
|
||||
('zuul-info/inventory.yaml', 'text/plain', None),
|
||||
('zuul-info/zuul-info.controller.txt', 'text/plain', None),
|
||||
('zuul-info/index.html', 'text/html', None),
|
||||
])
|
||||
self.assert_files(fl, [
|
||||
('', 'application/directory', None),
|
||||
('controller', 'application/directory', None),
|
||||
('zuul-info', 'application/directory', None),
|
||||
('job-output.json', 'application/json', None),
|
||||
('index.html', 'text/html', None),
|
||||
('controller/subdir', 'application/directory', None),
|
||||
('controller/compressed.gz', 'text/plain', 'gzip'),
|
||||
('controller/cpu-load.svg', 'image/svg+xml', None),
|
||||
('controller/journal.xz', 'text/plain', 'xz'),
|
||||
('controller/service_log.txt', 'text/plain', None),
|
||||
('controller/syslog', 'text/plain', None),
|
||||
('controller/index.html', 'text/html', None),
|
||||
('controller/subdir/subdir.txt', 'text/plain', None),
|
||||
('controller/subdir/index.html', 'text/html', None),
|
||||
('zuul-info/inventory.yaml', 'text/plain', None),
|
||||
('zuul-info/zuul-info.controller.txt', 'text/plain', None),
|
||||
('zuul-info/index.html', 'text/html', None),
|
||||
])
|
||||
|
||||
top_index = self.find_file(fl, 'index.html')
|
||||
page = open(top_index.full_path).read()
|
||||
page = BeautifulSoup(page, 'html.parser')
|
||||
rows = page.find_all('tr')[1:]
|
||||
top_index = self.find_file(fl, 'index.html')
|
||||
page = open(top_index.full_path).read()
|
||||
page = BeautifulSoup(page, 'html.parser')
|
||||
rows = page.find_all('tr')[1:]
|
||||
|
||||
self.assertEqual(len(rows), 4)
|
||||
self.assertEqual(len(rows), 4)
|
||||
|
||||
self.assertEqual(rows[0].find('a').get('href'), '../')
|
||||
self.assertEqual(rows[0].find('a').text, '../')
|
||||
self.assertEqual(rows[0].find('a').get('href'), '../')
|
||||
self.assertEqual(rows[0].find('a').text, '../')
|
||||
|
||||
self.assertEqual(rows[1].find('a').get('href'), 'controller/')
|
||||
self.assertEqual(rows[1].find('a').text, 'controller/')
|
||||
self.assertEqual(rows[1].find('a').get('href'), 'controller/')
|
||||
self.assertEqual(rows[1].find('a').text, 'controller/')
|
||||
|
||||
self.assertEqual(rows[2].find('a').get('href'), 'zuul-info/')
|
||||
self.assertEqual(rows[2].find('a').text, 'zuul-info/')
|
||||
self.assertEqual(rows[2].find('a').get('href'), 'zuul-info/')
|
||||
self.assertEqual(rows[2].find('a').text, 'zuul-info/')
|
||||
|
||||
subdir_index = self.find_file(fl, 'controller/subdir/index.html')
|
||||
page = open(subdir_index.full_path).read()
|
||||
page = BeautifulSoup(page, 'html.parser')
|
||||
rows = page.find_all('tr')[1:]
|
||||
self.assertEqual(rows[0].find('a').get('href'), '../')
|
||||
self.assertEqual(rows[0].find('a').text, '../')
|
||||
subdir_index = self.find_file(fl, 'controller/subdir/index.html')
|
||||
page = open(subdir_index.full_path).read()
|
||||
page = BeautifulSoup(page, 'html.parser')
|
||||
rows = page.find_all('tr')[1:]
|
||||
self.assertEqual(rows[0].find('a').get('href'), '../')
|
||||
self.assertEqual(rows[0].find('a').text, '../')
|
||||
|
||||
self.assertEqual(rows[1].find('a').get('href'), 'subdir.txt')
|
||||
self.assertEqual(rows[1].find('a').text, 'subdir.txt')
|
||||
self.assertEqual(rows[1].find('a').get('href'), 'subdir.txt')
|
||||
self.assertEqual(rows[1].find('a').text, 'subdir.txt')
|
||||
|
||||
def test_no_parent_links(self):
|
||||
'''Test index generation creates topdir parent link'''
|
||||
fl = FileList()
|
||||
fl.add(os.path.join(FIXTURE_DIR, 'logs/'))
|
||||
ix = Indexer(create_parent_links=False,
|
||||
create_topdir_parent_link=False)
|
||||
fl = ix.make_indexes(fl)
|
||||
with FileList() as fl:
|
||||
fl.add(os.path.join(FIXTURE_DIR, 'logs/'))
|
||||
ix = Indexer(create_parent_links=False,
|
||||
create_topdir_parent_link=False)
|
||||
ix.make_indexes(fl)
|
||||
|
||||
self.assert_files(fl, [
|
||||
('', 'application/directory', None),
|
||||
('controller', 'application/directory', None),
|
||||
('zuul-info', 'application/directory', None),
|
||||
('job-output.json', 'application/json', None),
|
||||
('index.html', 'text/html', None),
|
||||
('controller/subdir', 'application/directory', None),
|
||||
('controller/compressed.gz', 'text/plain', 'gzip'),
|
||||
('controller/cpu-load.svg', 'image/svg+xml', None),
|
||||
('controller/journal.xz', 'text/plain', 'xz'),
|
||||
('controller/service_log.txt', 'text/plain', None),
|
||||
('controller/syslog', 'text/plain', None),
|
||||
('controller/index.html', 'text/html', None),
|
||||
('controller/subdir/subdir.txt', 'text/plain', None),
|
||||
('controller/subdir/index.html', 'text/html', None),
|
||||
('zuul-info/inventory.yaml', 'text/plain', None),
|
||||
('zuul-info/zuul-info.controller.txt', 'text/plain', None),
|
||||
('zuul-info/index.html', 'text/html', None),
|
||||
])
|
||||
self.assert_files(fl, [
|
||||
('', 'application/directory', None),
|
||||
('controller', 'application/directory', None),
|
||||
('zuul-info', 'application/directory', None),
|
||||
('job-output.json', 'application/json', None),
|
||||
('index.html', 'text/html', None),
|
||||
('controller/subdir', 'application/directory', None),
|
||||
('controller/compressed.gz', 'text/plain', 'gzip'),
|
||||
('controller/cpu-load.svg', 'image/svg+xml', None),
|
||||
('controller/journal.xz', 'text/plain', 'xz'),
|
||||
('controller/service_log.txt', 'text/plain', None),
|
||||
('controller/syslog', 'text/plain', None),
|
||||
('controller/index.html', 'text/html', None),
|
||||
('controller/subdir/subdir.txt', 'text/plain', None),
|
||||
('controller/subdir/index.html', 'text/html', None),
|
||||
('zuul-info/inventory.yaml', 'text/plain', None),
|
||||
('zuul-info/zuul-info.controller.txt', 'text/plain', None),
|
||||
('zuul-info/index.html', 'text/html', None),
|
||||
])
|
||||
|
||||
top_index = self.find_file(fl, 'index.html')
|
||||
page = open(top_index.full_path).read()
|
||||
page = BeautifulSoup(page, 'html.parser')
|
||||
rows = page.find_all('tr')[1:]
|
||||
top_index = self.find_file(fl, 'index.html')
|
||||
page = open(top_index.full_path).read()
|
||||
page = BeautifulSoup(page, 'html.parser')
|
||||
rows = page.find_all('tr')[1:]
|
||||
|
||||
self.assertEqual(len(rows), 3)
|
||||
self.assertEqual(len(rows), 3)
|
||||
|
||||
self.assertEqual(rows[0].find('a').get('href'), 'controller/')
|
||||
self.assertEqual(rows[0].find('a').text, 'controller/')
|
||||
self.assertEqual(rows[0].find('a').get('href'), 'controller/')
|
||||
self.assertEqual(rows[0].find('a').text, 'controller/')
|
||||
|
||||
self.assertEqual(rows[1].find('a').get('href'), 'zuul-info/')
|
||||
self.assertEqual(rows[1].find('a').text, 'zuul-info/')
|
||||
self.assertEqual(rows[1].find('a').get('href'), 'zuul-info/')
|
||||
self.assertEqual(rows[1].find('a').text, 'zuul-info/')
|
||||
|
||||
subdir_index = self.find_file(fl, 'controller/subdir/index.html')
|
||||
page = open(subdir_index.full_path).read()
|
||||
page = BeautifulSoup(page, 'html.parser')
|
||||
rows = page.find_all('tr')[1:]
|
||||
subdir_index = self.find_file(fl, 'controller/subdir/index.html')
|
||||
page = open(subdir_index.full_path).read()
|
||||
page = BeautifulSoup(page, 'html.parser')
|
||||
rows = page.find_all('tr')[1:]
|
||||
|
||||
self.assertEqual(rows[0].find('a').get('href'), 'subdir.txt')
|
||||
self.assertEqual(rows[0].find('a').text, 'subdir.txt')
|
||||
self.assertEqual(rows[0].find('a').get('href'), 'subdir.txt')
|
||||
self.assertEqual(rows[0].find('a').text, 'subdir.txt')
|
||||
|
@ -182,10 +182,21 @@ class FileDetail():
|
||||
|
||||
|
||||
class FileList(Sequence):
|
||||
'''A collection of FileDetail objects
|
||||
|
||||
This is a list-like group of FileDetail objects, intended to be
|
||||
used as a context manager around the upload process.
|
||||
'''
|
||||
def __init__(self):
|
||||
self.file_list = []
|
||||
self.file_list.append(FileDetail(None, '', ''))
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
pass
|
||||
|
||||
def __getitem__(self, item):
|
||||
return self.file_list.__getitem__(item)
|
||||
|
||||
@ -266,6 +277,16 @@ class Indexer():
|
||||
self.index_filename = 'index.html'
|
||||
|
||||
def make_indexes(self, file_list):
|
||||
'''Make index files
|
||||
|
||||
Args:
|
||||
file_list (FileList): A FileList object to be updated
|
||||
with index files for each directory.
|
||||
Return:
|
||||
No value, the file_list will be updated
|
||||
'''
|
||||
assert isinstance(file_list, FileList)
|
||||
|
||||
folders = collections.OrderedDict()
|
||||
for f in file_list:
|
||||
if f.folder:
|
||||
@ -299,8 +320,7 @@ class Indexer():
|
||||
|
||||
# This appends the index file at the end of the group of files
|
||||
# for each directory.
|
||||
ret_file_list = FileList()
|
||||
newlist = []
|
||||
new_list = []
|
||||
last_dirname = None
|
||||
for f in reversed(list(file_list)):
|
||||
if f.folder:
|
||||
@ -313,13 +333,11 @@ class Indexer():
|
||||
if dirname != last_dirname:
|
||||
index = indexes.pop(dirname, None)
|
||||
if index:
|
||||
newlist.append(index)
|
||||
new_list.append(index)
|
||||
last_dirname = dirname
|
||||
newlist.append(f)
|
||||
newlist.reverse()
|
||||
ret_file_list.file_list = newlist
|
||||
|
||||
return ret_file_list
|
||||
new_list.append(f)
|
||||
new_list.reverse()
|
||||
file_list.file_list = new_list
|
||||
|
||||
def make_index_file(self, folder_links, title):
|
||||
"""Writes an index into a file for pushing"""
|
||||
@ -547,33 +565,33 @@ def run(cloud, container, files,
|
||||
prefix = '/'.join(parts[1:])
|
||||
|
||||
# Create the objects to make sure the arguments are sound.
|
||||
file_list = FileList()
|
||||
indexer = Indexer(create_parent_links=parent_links,
|
||||
create_topdir_parent_link=topdir_parent_link,
|
||||
append_footer=footer)
|
||||
with FileList() as file_list:
|
||||
indexer = Indexer(create_parent_links=parent_links,
|
||||
create_topdir_parent_link=topdir_parent_link,
|
||||
append_footer=footer)
|
||||
|
||||
# Scan the files.
|
||||
for file_path in files:
|
||||
file_list.add(file_path)
|
||||
# Scan the files.
|
||||
for file_path in files:
|
||||
file_list.add(file_path)
|
||||
|
||||
# (Possibly) make indexes.
|
||||
if indexes:
|
||||
file_list = indexer.make_indexes(file_list)
|
||||
# (Possibly) make indexes.
|
||||
if indexes:
|
||||
indexer.make_indexes(file_list)
|
||||
|
||||
logging.debug("List of files prepared to upload:")
|
||||
for x in file_list:
|
||||
logging.debug(x)
|
||||
logging.debug("List of files prepared to upload:")
|
||||
for x in file_list:
|
||||
logging.debug(x)
|
||||
|
||||
# Do no connect to swift or do any uploading in a dry run
|
||||
if dry_run:
|
||||
# No URL is known, so return nothing
|
||||
return
|
||||
# Do no connect to swift or do any uploading in a dry run
|
||||
if dry_run:
|
||||
# No URL is known, so return nothing
|
||||
return
|
||||
|
||||
# Upload.
|
||||
uploader = Uploader(cloud, container, prefix, delete_after,
|
||||
public)
|
||||
uploader.upload(file_list)
|
||||
return uploader.url
|
||||
# Upload.
|
||||
uploader = Uploader(cloud, container, prefix, delete_after,
|
||||
public)
|
||||
uploader.upload(file_list)
|
||||
return uploader.url
|
||||
|
||||
|
||||
def ansible_main():
|
||||
|
Loading…
x
Reference in New Issue
Block a user