Add basic support for creating a subunit stream from db

This adds a new command, sql2subunit, to take a run_id and create a
subunit stream from all the data in the db around that run_id.

Change-Id: Id1f95da71f3d4ca8469e985ee904e91fb4acb247
This commit is contained in:
Matthew Treinish 2014-08-02 09:07:01 -04:00
parent 57722c6ef5
commit d43c12d1e1
6 changed files with 138 additions and 13 deletions

View File

@ -75,3 +75,17 @@ run(s) being added. The artifacts option should be used to pass in a url or
path that points to any logs or other external test artifacts related to the path that points to any logs or other external test artifacts related to the
run being added. The run_meta option takes in a dictionary which will be added run being added. The run_meta option takes in a dictionary which will be added
to the database as key value pairs associated with the run being added. to the database as key value pairs associated with the run being added.
Creating a v2 Subunit Stream from the DB
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The sql2subunit utility is used for taking a run_id and creating a subunit
v2 stream from the data in the DB about that run. To create a new subunit
stream run::
sql2subunit $RUN_ID
along with any options that you would normally use to either specify a config
file or the DB connection info. Running this command will print to stdout the
subunit v2 stream for the run specified by $RUN_ID, unless the --out_path
argument is specified to write it to a file instead.

View File

@ -6,6 +6,7 @@ Short Term
* Add more unit tests * Add more unit tests
* Migration tests * Migration tests
* DB API unit tests * DB API unit tests
* Write subunit module
* Flesh out query side of DB API to make it useful for building additional * Flesh out query side of DB API to make it useful for building additional
tooling. tooling.
* Investigate dropping oslo.db from requirements to enable using other * Investigate dropping oslo.db from requirements to enable using other
@ -15,6 +16,5 @@ Short Term
Longer Term Longer Term
----------- -----------
* Add a method of taking a test_run from the DB and create a subunit file
* Add tooling to pull the data and visualize it in fun ways * Add tooling to pull the data and visualize it in fun ways
* Add some statistics functions on top of the DB api to perform analysis * Add some statistics functions on top of the DB api to perform analysis

View File

@ -23,6 +23,7 @@ packages =
[entry_points] [entry_points]
console_scripts = console_scripts =
subunit2sql = subunit2sql.shell:main subunit2sql = subunit2sql.shell:main
sql2subunit = subunit2sql.write_subunit:main
[build_sphinx] [build_sphinx]
source-dir = doc/source source-dir = doc/source

View File

@ -153,6 +153,13 @@ def add_test_run_metadata(meta_dict, test_run_id, session=None):
return metadata return metadata
def get_test_run_metadata(test_run_id, session=None):
session = session or get_session()
query = db_utils.model_query(models.TestRunMetadata, session).filter_by(
test_run_id=test_run_id)
return query.all()
def get_all_tests(): def get_all_tests():
query = db_utils.model_query(models.Test) query = db_utils.model_query(models.Test)
return query.all() return query.all()

View File

@ -23,19 +23,22 @@ from subunit2sql.db import api
from subunit2sql import exceptions from subunit2sql import exceptions
from subunit2sql import read_subunit as subunit from subunit2sql import read_subunit as subunit
shell_opts = [
cfg.StrOpt('state_path', default='$pybasedir',
help='Top level dir for maintaining subunit2sql state'),
cfg.MultiStrOpt('subunit_files', positional=True),
cfg.DictOpt('run_meta', short='r', default=None,
help='Dict of metadata about the run(s)'),
cfg.StrOpt('artifacts', short='a', default=None,
help='Location of run artifacts')
]
CONF = cfg.CONF CONF = cfg.CONF
for opt in shell_opts:
CONF.register_cli_opt(opt)
def cli_opts():
shell_opts = [
cfg.StrOpt('state_path', default='$pybasedir',
help='Top level dir for maintaining subunit2sql state'),
cfg.MultiStrOpt('subunit_files', positional=True),
cfg.DictOpt('run_meta', short='r', default=None,
help='Dict of metadata about the run(s)'),
cfg.StrOpt('artifacts', short='a', default=None,
help='Location of run artifacts')
]
for opt in shell_opts:
CONF.register_cli_opt(opt)
def state_path_def(*args): def state_path_def(*args):
@ -140,6 +143,7 @@ def process_results(results):
def main(): def main():
cli_opts()
parse_args(sys.argv) parse_args(sys.argv)
if CONF.subunit_files: if CONF.subunit_files:
streams = [subunit.ReadSubunit(open(s, 'r')) for s in streams = [subunit.ReadSubunit(open(s, 'r')) for s in

View File

@ -0,0 +1,99 @@
# Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import sys
from oslo.config import cfg
import subunit
from subunit import iso8601
from subunit2sql.db import api
from subunit2sql import shell
STATUS_CODES = frozenset([
'exists',
'fail',
'skip',
'success',
'uxsuccess',
'xfail',
])
CONF = cfg.CONF
def cli_opts():
shell_opts = [
cfg.StrOpt('run_id', required=True, positional=True,
help='Run id to use for creating a subunit stream'),
cfg.StrOpt('out_path', short='o', default=None,
help='Path to write the subunit stream output, if none '
'is specified STDOUT will be used')
]
for opt in shell_opts:
cfg.CONF.register_cli_opt(opt)
def convert_datetime(timestamp):
tz_timestamp = timestamp.replace(tzinfo=iso8601.UTC)
return tz_timestamp
def write_test(output, test_run, test, metadatas):
write_status = output.status
for meta in metadatas:
if meta.key == 'tags':
tags = meta.value
write_status = functools.partial(write_status,
test_tags=tags.split(','))
start_time = convert_datetime(test_run.start_time)
write_status = functools.partial(write_status,
timestamp=start_time)
write_status = functools.partial(write_status, test_id=test.test_id)
if test_run.status in STATUS_CODES:
write_status = functools.partial(write_status,
test_status=test_run.status)
write_status = functools.partial(write_status,
timestamp=convert_datetime(
test_run.stop_time))
write_status()
def sql2subunit(run_id, output=sys.stdout):
session = api.get_session()
test_runs = api.get_test_runs_by_run_id(run_id, session)
output = subunit.v2.StreamResultToBytes(output)
output.startTestRun()
for test in test_runs:
metadatas = api.get_test_run_metadata(test.id, session)
test_i = api.get_test_by_id(test.test_id)
write_test(output, test, test_i, metadatas)
output.stopTestRun()
session.close()
def main():
cli_opts()
shell.parse_args(sys.argv)
if CONF.out_path:
fd = open(CONF.out_path, 'w')
else:
fd = sys.stdout
sql2subunit(CONF.run_id, fd)
if CONF.out_path:
fd.close()
if __name__ == "__main__":
sys.exit(main())