upload-logs-swift: test bulk download script change
This implements the production change I98c80f657f38c5e1ed5f28e5d36988a3429ad1f8 in the test role. Review comments should be left there; we can merge this and then parent a job to base-test to test it. Change-Id: Id91350ff1c531fd7266f3bf76681a8415941481f
This commit is contained in:
parent
a6f946472d
commit
ddc0da55a1
@ -78,3 +78,17 @@ This uploads logs to an OpenStack Object Store (Swift) container.
|
|||||||
|
|
||||||
More details can be found at
|
More details can be found at
|
||||||
:zuul:rolevar:`set-zuul-log-path-fact.zuul_log_path_shard_build`.
|
:zuul:rolevar:`set-zuul-log-path-fact.zuul_log_path_shard_build`.
|
||||||
|
|
||||||
|
.. zuul:rolevar:: zuul_log_include_download_script
|
||||||
|
:default: False
|
||||||
|
|
||||||
|
Generate a script from ``zuul_log_download_template`` in the root
|
||||||
|
directory of the uploaded logs to facilitate easy bulk download.
|
||||||
|
|
||||||
|
.. zuul:rolevar:: zuul_log_download_template
|
||||||
|
:default: templates/download-logs.sh.j2
|
||||||
|
|
||||||
|
Path to template file if ``zuul_log_include_download_script`` is
|
||||||
|
set. See the sample file for parameters available to the template.
|
||||||
|
The file will be placed in the root of the uploaded logs (with
|
||||||
|
``.j2`` suffix removed).
|
||||||
|
@ -2,3 +2,5 @@ zuul_log_partition: false
|
|||||||
zuul_log_container: logs
|
zuul_log_container: logs
|
||||||
zuul_log_container_public: true
|
zuul_log_container_public: true
|
||||||
zuul_log_create_indexes: true
|
zuul_log_create_indexes: true
|
||||||
|
zuul_log_include_download_script: true
|
||||||
|
zuul_log_download_template: '{{ role_path }}/templates/download-logs.sh.j2'
|
@ -0,0 +1,84 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Download all logs
|
||||||
|
|
||||||
|
#
|
||||||
|
# To use this file
|
||||||
|
#
|
||||||
|
# curl "http://fakebaseurl.com/download-logs.sh" | bash
|
||||||
|
#
|
||||||
|
# Logs will be copied in a temporary directory as described in the
|
||||||
|
# output. Set DOWNLOAD_DIR to an empty directory if you wish to
|
||||||
|
# override this.
|
||||||
|
#
|
||||||
|
|
||||||
|
BASE_URL=http://fakebaseurl.com
|
||||||
|
|
||||||
|
function log {
|
||||||
|
echo "$(date -Iseconds) | $@"
|
||||||
|
}
|
||||||
|
|
||||||
|
function save_file {
|
||||||
|
local file="$1"
|
||||||
|
|
||||||
|
curl -s --compressed --create-dirs -o "${file}" "${BASE_URL}/${file}"
|
||||||
|
|
||||||
|
# Using --compressed we will send an Accept-Encoding: gzip header
|
||||||
|
# and the data will come to us across the network compressed.
|
||||||
|
# However, sometimes things like OpenStack's log server will send
|
||||||
|
# .gz files (as stored on its disk) uncompressed, so we check if
|
||||||
|
# this really looks like an ASCII file and rename for clarity.
|
||||||
|
if [[ "${file}" == *.gz ]]; then
|
||||||
|
local type=$(file "${file}")
|
||||||
|
if [[ "${type}" =~ "ASCII text" ]] || [[ "${type}" =~ "Unicode text" ]]; then
|
||||||
|
local new_name=${file%.gz}
|
||||||
|
log "Renaming to ${new_name}"
|
||||||
|
mv "${file}" "${new_name}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
if [[ -z "${DOWNLOAD_DIR}" ]]; then
|
||||||
|
DOWNLOAD_DIR=$(mktemp -d --tmpdir zuul-logs.XXXXXX)
|
||||||
|
fi
|
||||||
|
log "Saving logs to ${DOWNLOAD_DIR}"
|
||||||
|
|
||||||
|
pushd "${DOWNLOAD_DIR}" > /dev/null
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
log "Getting ${BASE_URL}/job-output.json [ 0001 / 0010 ]"
|
||||||
|
save_file "job-output.json"
|
||||||
|
|
||||||
|
log "Getting ${BASE_URL}/controller/compressed.gz [ 0002 / 0010 ]"
|
||||||
|
save_file "controller/compressed.gz"
|
||||||
|
|
||||||
|
log "Getting ${BASE_URL}/controller/cpu-load.svg [ 0003 / 0010 ]"
|
||||||
|
save_file "controller/cpu-load.svg"
|
||||||
|
|
||||||
|
log "Getting ${BASE_URL}/controller/journal.xz [ 0004 / 0010 ]"
|
||||||
|
save_file "controller/journal.xz"
|
||||||
|
|
||||||
|
log "Getting ${BASE_URL}/controller/service_log.txt [ 0005 / 0010 ]"
|
||||||
|
save_file "controller/service_log.txt"
|
||||||
|
|
||||||
|
log "Getting ${BASE_URL}/controller/syslog [ 0006 / 0010 ]"
|
||||||
|
save_file "controller/syslog"
|
||||||
|
|
||||||
|
log "Getting ${BASE_URL}/controller/subdir/foo::3.txt [ 0007 / 0010 ]"
|
||||||
|
save_file "controller/subdir/foo::3.txt"
|
||||||
|
|
||||||
|
log "Getting ${BASE_URL}/controller/subdir/subdir.txt [ 0008 / 0010 ]"
|
||||||
|
save_file "controller/subdir/subdir.txt"
|
||||||
|
|
||||||
|
log "Getting ${BASE_URL}/zuul-info/inventory.yaml [ 0009 / 0010 ]"
|
||||||
|
save_file "zuul-info/inventory.yaml"
|
||||||
|
|
||||||
|
log "Getting ${BASE_URL}/zuul-info/zuul-info.controller.txt [ 0010 / 0010 ]"
|
||||||
|
save_file "zuul-info/zuul-info.controller.txt"
|
||||||
|
|
||||||
|
|
||||||
|
popd >/dev/null
|
||||||
|
|
||||||
|
log "Download complete!"
|
@ -30,6 +30,7 @@ import io
|
|||||||
import logging
|
import logging
|
||||||
import mimetypes
|
import mimetypes
|
||||||
import os
|
import os
|
||||||
|
import jinja2
|
||||||
try:
|
try:
|
||||||
import queue as queuelib
|
import queue as queuelib
|
||||||
except ImportError:
|
except ImportError:
|
||||||
@ -54,6 +55,7 @@ import requests.exceptions
|
|||||||
import requestsexceptions
|
import requestsexceptions
|
||||||
import keystoneauth1.exceptions
|
import keystoneauth1.exceptions
|
||||||
|
|
||||||
|
from ansible.module_utils._text import to_text
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -265,13 +267,15 @@ class FileDetail():
|
|||||||
to push to swift.
|
to push to swift.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, full_path, relative_path, filename=None):
|
def __init__(self, full_path, relative_path,
|
||||||
|
filename=None, is_index=False):
|
||||||
"""
|
"""
|
||||||
Args:
|
Args:
|
||||||
full_path (str): The absolute path to the file on disk.
|
full_path (str): The absolute path to the file on disk.
|
||||||
relative_path (str): The relative path from the artifacts source
|
relative_path (str): The relative path from the artifacts source
|
||||||
used for links.
|
used for links.
|
||||||
filename (str): An optional alternate filename in links.
|
filename (str): An optional alternate filename in links.
|
||||||
|
is_index (bool): Is this file an index
|
||||||
"""
|
"""
|
||||||
# Make FileNotFoundError exception to be compatible with python2
|
# Make FileNotFoundError exception to be compatible with python2
|
||||||
try:
|
try:
|
||||||
@ -285,6 +289,7 @@ class FileDetail():
|
|||||||
else:
|
else:
|
||||||
self.filename = filename
|
self.filename = filename
|
||||||
self.relative_path = relative_path
|
self.relative_path = relative_path
|
||||||
|
self.is_index = is_index
|
||||||
|
|
||||||
if self.full_path and os.path.isfile(self.full_path):
|
if self.full_path and os.path.isfile(self.full_path):
|
||||||
mime_guess, encoding = mimetypes.guess_type(self.full_path)
|
mime_guess, encoding = mimetypes.guess_type(self.full_path)
|
||||||
@ -305,7 +310,8 @@ class FileDetail():
|
|||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
t = 'Folder' if self.folder else 'File'
|
t = 'Folder' if self.folder else 'File'
|
||||||
return '<%s %s>' % (t, self.relative_path)
|
return '<%s %s%s>' % (t, self.relative_path,
|
||||||
|
' (index)' if self.is_index else '')
|
||||||
|
|
||||||
|
|
||||||
class FileList(Sequence):
|
class FileList(Sequence):
|
||||||
@ -411,6 +417,7 @@ class Indexer():
|
|||||||
FileList
|
FileList
|
||||||
|
|
||||||
- make_indexes() : make index.html in folders
|
- make_indexes() : make index.html in folders
|
||||||
|
- make_download_script() : make a script to download all logs
|
||||||
"""
|
"""
|
||||||
def __init__(self, file_list):
|
def __init__(self, file_list):
|
||||||
'''
|
'''
|
||||||
@ -530,7 +537,8 @@ class Indexer():
|
|||||||
if full_path:
|
if full_path:
|
||||||
filename = os.path.basename(full_path)
|
filename = os.path.basename(full_path)
|
||||||
relative_name = os.path.join(folder, filename)
|
relative_name = os.path.join(folder, filename)
|
||||||
indexes[folder] = FileDetail(full_path, relative_name)
|
indexes[folder] = FileDetail(full_path, relative_name,
|
||||||
|
is_index=True)
|
||||||
|
|
||||||
# This appends the index file at the end of the group of files
|
# This appends the index file at the end of the group of files
|
||||||
# for each directory.
|
# for each directory.
|
||||||
@ -553,6 +561,41 @@ class Indexer():
|
|||||||
new_list.reverse()
|
new_list.reverse()
|
||||||
self.file_list.file_list = new_list
|
self.file_list.file_list = new_list
|
||||||
|
|
||||||
|
def make_download_script(self, base_url, download_template):
|
||||||
|
'''Make a download script from template
|
||||||
|
|
||||||
|
Note since you need the base_url, it really only makes sense
|
||||||
|
to call this after the Uploader() is initalised.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
base_url (str): The base URL to prefix
|
||||||
|
download_template (str): Path to a jinja2 template
|
||||||
|
|
||||||
|
Return:
|
||||||
|
None; a file with the same name as the template (stripped of
|
||||||
|
.j2 if present) is added to self.file_list for upload.
|
||||||
|
'''
|
||||||
|
# Prune the list to just be files, no indexes (this should run
|
||||||
|
# before indexing anyway)
|
||||||
|
download_files = [f for f in self.file_list
|
||||||
|
if not f.folder and not f.is_index]
|
||||||
|
output_filename = os.path.basename(download_template[:-3]
|
||||||
|
if download_template.endswith('.j2')
|
||||||
|
else download_template)
|
||||||
|
output = os.path.join(self.file_list.get_tempdir(), output_filename)
|
||||||
|
|
||||||
|
with open(download_template) as f, open(output, 'wb') as output:
|
||||||
|
logging.debug("Writing template %s" % output.name)
|
||||||
|
template = jinja2.Template(f.read())
|
||||||
|
rendered = template.stream(
|
||||||
|
base_url=base_url.rstrip('/'),
|
||||||
|
# jinja wants unicode input
|
||||||
|
file_list=[to_text(f.relative_path) for f in download_files])
|
||||||
|
rendered.dump(output, encoding='utf-8')
|
||||||
|
|
||||||
|
download_script = FileDetail(output.name, output_filename)
|
||||||
|
self.file_list.file_list.append(download_script)
|
||||||
|
|
||||||
|
|
||||||
class GzipFilter():
|
class GzipFilter():
|
||||||
chunk_size = 16384
|
chunk_size = 16384
|
||||||
@ -604,7 +647,13 @@ class DeflateFilter():
|
|||||||
|
|
||||||
class Uploader():
|
class Uploader():
|
||||||
def __init__(self, cloud, container, prefix=None, delete_after=None,
|
def __init__(self, cloud, container, prefix=None, delete_after=None,
|
||||||
public=True):
|
public=True, dry_run=False):
|
||||||
|
|
||||||
|
if dry_run:
|
||||||
|
self.dry_run = True
|
||||||
|
self.url = 'http://dry-run-url.com/a/path/'
|
||||||
|
return
|
||||||
|
|
||||||
self.cloud = cloud
|
self.cloud = cloud
|
||||||
self.container = container
|
self.container = container
|
||||||
self.prefix = prefix or ''
|
self.prefix = prefix or ''
|
||||||
@ -670,6 +719,10 @@ class Uploader():
|
|||||||
|
|
||||||
def upload(self, file_list):
|
def upload(self, file_list):
|
||||||
"""Spin up thread pool to upload to swift"""
|
"""Spin up thread pool to upload to swift"""
|
||||||
|
|
||||||
|
if self.dry_run:
|
||||||
|
return
|
||||||
|
|
||||||
num_threads = min(len(file_list), MAX_UPLOAD_THREADS)
|
num_threads = min(len(file_list), MAX_UPLOAD_THREADS)
|
||||||
threads = []
|
threads = []
|
||||||
queue = queuelib.Queue()
|
queue = queuelib.Queue()
|
||||||
@ -753,7 +806,7 @@ class Uploader():
|
|||||||
def run(cloud, container, files,
|
def run(cloud, container, files,
|
||||||
indexes=True, parent_links=True, topdir_parent_link=False,
|
indexes=True, parent_links=True, topdir_parent_link=False,
|
||||||
partition=False, footer='index_footer.html', delete_after=15552000,
|
partition=False, footer='index_footer.html', delete_after=15552000,
|
||||||
prefix=None, public=True, dry_run=False):
|
prefix=None, public=True, dry_run=False, download_template=''):
|
||||||
|
|
||||||
if prefix:
|
if prefix:
|
||||||
prefix = prefix.lstrip('/')
|
prefix = prefix.lstrip('/')
|
||||||
@ -769,8 +822,16 @@ def run(cloud, container, files,
|
|||||||
for file_path in files:
|
for file_path in files:
|
||||||
file_list.add(file_path)
|
file_list.add(file_path)
|
||||||
|
|
||||||
|
# Upload.
|
||||||
|
uploader = Uploader(cloud, container, prefix, delete_after,
|
||||||
|
public, dry_run)
|
||||||
|
|
||||||
indexer = Indexer(file_list)
|
indexer = Indexer(file_list)
|
||||||
|
|
||||||
|
# (Possibly) make download script
|
||||||
|
if download_template:
|
||||||
|
indexer.make_download_script(uploader.url, download_template)
|
||||||
|
|
||||||
# (Possibly) make indexes.
|
# (Possibly) make indexes.
|
||||||
if indexes:
|
if indexes:
|
||||||
indexer.make_indexes(create_parent_links=parent_links,
|
indexer.make_indexes(create_parent_links=parent_links,
|
||||||
@ -781,14 +842,6 @@ def run(cloud, container, files,
|
|||||||
for x in file_list:
|
for x in file_list:
|
||||||
logging.debug(x)
|
logging.debug(x)
|
||||||
|
|
||||||
# Do no connect to swift or do any uploading in a dry run
|
|
||||||
if dry_run:
|
|
||||||
# No URL is known, so return nothing
|
|
||||||
return
|
|
||||||
|
|
||||||
# Upload.
|
|
||||||
uploader = Uploader(cloud, container, prefix, delete_after,
|
|
||||||
public)
|
|
||||||
uploader.upload(file_list)
|
uploader.upload(file_list)
|
||||||
return uploader.url
|
return uploader.url
|
||||||
|
|
||||||
@ -807,6 +860,7 @@ def ansible_main():
|
|||||||
footer=dict(type='str'),
|
footer=dict(type='str'),
|
||||||
delete_after=dict(type='int'),
|
delete_after=dict(type='int'),
|
||||||
prefix=dict(type='str'),
|
prefix=dict(type='str'),
|
||||||
|
download_template=dict(type='str'),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -821,7 +875,8 @@ def ansible_main():
|
|||||||
footer=p.get('footer'),
|
footer=p.get('footer'),
|
||||||
delete_after=p.get('delete_after', 15552000),
|
delete_after=p.get('delete_after', 15552000),
|
||||||
prefix=p.get('prefix'),
|
prefix=p.get('prefix'),
|
||||||
public=p.get('public'))
|
public=p.get('public'),
|
||||||
|
download_template=p.get('download_template'))
|
||||||
except (keystoneauth1.exceptions.http.HttpError,
|
except (keystoneauth1.exceptions.http.HttpError,
|
||||||
requests.exceptions.RequestException):
|
requests.exceptions.RequestException):
|
||||||
s = "Error uploading to %s.%s" % (cloud.name, cloud.config.region_name)
|
s = "Error uploading to %s.%s" % (cloud.name, cloud.config.region_name)
|
||||||
@ -863,6 +918,9 @@ def cli_main():
|
|||||||
'upload. Default is 6 months (15552000 seconds) '
|
'upload. Default is 6 months (15552000 seconds) '
|
||||||
'and if set to 0 X-Delete-After will not be set',
|
'and if set to 0 X-Delete-After will not be set',
|
||||||
type=int)
|
type=int)
|
||||||
|
parser.add_argument('--download-template', default='',
|
||||||
|
help='Path to a Jinja2 template that will be filled '
|
||||||
|
'out to create an automatic download script')
|
||||||
parser.add_argument('--prefix',
|
parser.add_argument('--prefix',
|
||||||
help='Prepend this path to the object names when '
|
help='Prepend this path to the object names when '
|
||||||
'uploading')
|
'uploading')
|
||||||
@ -900,7 +958,8 @@ def cli_main():
|
|||||||
delete_after=args.delete_after,
|
delete_after=args.delete_after,
|
||||||
prefix=args.prefix,
|
prefix=args.prefix,
|
||||||
public=not args.no_public,
|
public=not args.no_public,
|
||||||
dry_run=args.dry_run)
|
dry_run=args.dry_run,
|
||||||
|
download_template=args.download_template)
|
||||||
print(url)
|
print(url)
|
||||||
|
|
||||||
|
|
||||||
|
@ -16,6 +16,12 @@
|
|||||||
tags:
|
tags:
|
||||||
- skip_ansible_lint
|
- skip_ansible_lint
|
||||||
|
|
||||||
|
- name: Set download template
|
||||||
|
set_fact:
|
||||||
|
download_template: "{{ zuul_log_download_template }}"
|
||||||
|
when:
|
||||||
|
- zuul_log_include_download_script
|
||||||
|
|
||||||
- name: Upload logs to swift
|
- name: Upload logs to swift
|
||||||
delegate_to: localhost
|
delegate_to: localhost
|
||||||
zuul_swift_upload:
|
zuul_swift_upload:
|
||||||
@ -28,6 +34,7 @@
|
|||||||
files:
|
files:
|
||||||
- "{{ zuul.executor.log_root }}/"
|
- "{{ zuul.executor.log_root }}/"
|
||||||
delete_after: "{{ zuul_log_delete_after | default(omit) }}"
|
delete_after: "{{ zuul_log_delete_after | default(omit) }}"
|
||||||
|
download_template: "{{ download_template | default(omit) }}"
|
||||||
register: upload_results
|
register: upload_results
|
||||||
|
|
||||||
- name: Return log URL to Zuul
|
- name: Return log URL to Zuul
|
||||||
|
Loading…
Reference in New Issue
Block a user