Add microsecond columns to the test_runs table
This commit adds a migration to add 2 columns to the test_runs table to store microseconds for the start and stop time stamps for each test_run row. Previously we were relying on the microseconds to be carried over from the datetime object passed into the sqlalchemy object, however in certain configurations this data was being lost. To avoid this in the future this decouples the microseconds from the timestamp so we're no longer dependent on the underlying db to store this correctly. As part of this it adds a new config flag to skip the functionally optional parts of migrations. If there are operations which are nominally optional in that if they weren't run the functionality wouldn't be any difference in the result given certain DB configurations (in this case if the microseconds were already stripped by the db) Change-Id: Ibaafb7d8fc8a8e8aaf7b96672d5c47f46180e0ca Story: #2000096
This commit is contained in:
parent
3824a533a4
commit
ee6a359650
@ -60,4 +60,8 @@ Properties:
|
||||
the `testtools documentation <http://testtools.readthedocs.org/en/latest/api.html#testtools.StreamResult.status>`_
|
||||
for the details on each status.
|
||||
* **start_time**: The timestamp when test execution started
|
||||
* **start_time_microsecond**: The microsecond component of the timestamp when
|
||||
test execution started
|
||||
* **stop_time**: The timestamp when the test finished executing
|
||||
* **stop_time_microsecond**: The microsecond component of the timestamp when
|
||||
test execution finished
|
||||
|
@ -235,8 +235,14 @@ def create_test_run(test_id, run_id, status, start_time=None,
|
||||
test_run.test_id = test_id
|
||||
test_run.run_id = run_id
|
||||
test_run.status = status
|
||||
test_run.stop_time = end_time.replace(tzinfo=None)
|
||||
test_run.start_time = start_time.replace(tzinfo=None)
|
||||
start_time = start_time.replace(tzinfo=None)
|
||||
start_time_microsecond = start_time.microsecond
|
||||
stop_time = end_time.replace(tzinfo=None)
|
||||
stop_time_microsecond = stop_time.microsecond
|
||||
test_run.stop_time = stop_time
|
||||
test_run.stop_time_microsecond = stop_time_microsecond
|
||||
test_run.start_time = start_time
|
||||
test_run.start_time_microsecond = start_time_microsecond
|
||||
session = session or get_session()
|
||||
with session.begin():
|
||||
session.add(test_run)
|
||||
@ -477,6 +483,11 @@ def get_test_run_duration(test_run_id, session=None):
|
||||
"""
|
||||
session = session or get_session()
|
||||
test_run = get_test_run_by_id(test_run_id, session)
|
||||
start_time = test_run.start_time
|
||||
start_time = start_time.replace(
|
||||
microsecond=test_run.start_time_microsecond)
|
||||
stop_time = test_run.stop_time
|
||||
stop_time = stop_time.replace(microsecond=test_run.stop_time_microsecond)
|
||||
return read_subunit.get_duration(test_run.start_time, test_run.stop_time)
|
||||
|
||||
|
||||
@ -518,25 +529,32 @@ def get_tests_run_dicts_from_run_id(run_id, session=None):
|
||||
session = session or get_session()
|
||||
query = db_utils.model_query(models.Test, session=session).join(
|
||||
models.TestRun).filter_by(run_id=run_id).join(
|
||||
models.TestRunMetadata).values(models.Test.test_id,
|
||||
models.TestRun.status,
|
||||
models.TestRun.start_time,
|
||||
models.TestRun.stop_time,
|
||||
models.TestRunMetadata.key,
|
||||
models.TestRunMetadata.value)
|
||||
models.TestRunMetadata).values(
|
||||
models.Test.test_id,
|
||||
models.TestRun.status,
|
||||
models.TestRun.start_time,
|
||||
models.TestRun.start_time_microsecond,
|
||||
models.TestRun.stop_time,
|
||||
models.TestRun.stop_time_microsecond,
|
||||
models.TestRunMetadata.key,
|
||||
models.TestRunMetadata.value)
|
||||
test_runs = {}
|
||||
for test_run in query:
|
||||
if test_run[0] not in test_runs:
|
||||
start_time = test_run[2]
|
||||
start_time = start_time.replace(microsecond=test_run[3])
|
||||
stop_time = test_run[4]
|
||||
stop_time = stop_time.replace(microsecond=test_run[5])
|
||||
test_runs[test_run[0]] = {
|
||||
'status': test_run[1],
|
||||
'start_time': test_run[2],
|
||||
'stop_time': test_run[3],
|
||||
'start_time': start_time,
|
||||
'stop_time': stop_time,
|
||||
}
|
||||
if test_run[4]:
|
||||
test_runs[test_run[0]]['metadata'] = {test_run[4]: test_run[5]}
|
||||
if test_run[6]:
|
||||
test_runs[test_run[0]]['metadata'] = {test_run[6]: test_run[7]}
|
||||
else:
|
||||
if test_run[4]:
|
||||
test_runs[test_run[0]]['metadata'][test_run[4]] = test_run[5]
|
||||
if test_run[6]:
|
||||
test_runs[test_run[0]]['metadata'][test_run[6]] = test_run[7]
|
||||
return test_runs
|
||||
|
||||
|
||||
@ -554,10 +572,15 @@ def get_test_run_time_series(test_id, session=None):
|
||||
session = session or get_session()
|
||||
query = db_utils.model_query(models.TestRun, session=session).filter_by(
|
||||
test_id=test_id).filter_by(status='success').values(
|
||||
models.TestRun.start_time, models.TestRun.stop_time)
|
||||
models.TestRun.start_time, models.TestRun.start_time_microsecond,
|
||||
models.TestRun.stop_time, models.TestRun.stop_time_microsecond)
|
||||
time_series = {}
|
||||
for test_run in query:
|
||||
time_series[test_run[0]] = (test_run[1] - test_run[0]).total_seconds()
|
||||
start_time = test_run[0]
|
||||
start_time = start_time.replace(microsecond=test_run[1])
|
||||
stop_time = test_run[2]
|
||||
stop_time = stop_time.replace(microsecond=test_run[3])
|
||||
time_series[test_run[0]] = (stop_time - start_time).total_seconds()
|
||||
return time_series
|
||||
|
||||
|
||||
|
@ -87,7 +87,9 @@ class TestRun(BASE, SubunitBase):
|
||||
run_id = sa.Column(sa.String(36), sa.ForeignKey('runs.id'), nullable=False)
|
||||
status = sa.Column(sa.String(256))
|
||||
start_time = sa.Column(sa.DateTime())
|
||||
start_time_microsecond = sa.Column(sa.Integer(), default=0)
|
||||
stop_time = sa.Column(sa.DateTime())
|
||||
stop_time_microsecond = sa.Column(sa.Integer(), default=0)
|
||||
|
||||
|
||||
class RunMetadata(BASE, SubunitBase):
|
||||
|
@ -31,9 +31,19 @@ def state_path_def(*args):
|
||||
"""Return an uninterpolated path relative to $state_path."""
|
||||
return os.path.join('$state_path', *args)
|
||||
|
||||
MIGRATION_OPTS = [
|
||||
cfg.BoolOpt('disable-microsecond-data-migration', short='d', default=False,
|
||||
help="If set to true this option will skip the data migration"
|
||||
" part of the microsecond migration. The schema changes "
|
||||
"will still be run. If the database has already stripped "
|
||||
"out the microseconds from the timestamps this will skip "
|
||||
"converting the microsecond field from the timestamps "
|
||||
"into a separate column")
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.register_cli_opts(options.database_opts, group='database')
|
||||
CONF.register_cli_opts(MIGRATION_OPTS)
|
||||
|
||||
|
||||
def do_alembic_command(config, cmd, *args, **kwargs):
|
||||
|
@ -0,0 +1,60 @@
|
||||
# Copyright 2015 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Add microsecond columns to test_runs table
|
||||
|
||||
Revision ID: 1679b5bc102c
|
||||
Revises: 5332fe255095
|
||||
Create Date: 2015-02-27 18:39:13.275801
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '1679b5bc102c'
|
||||
down_revision = '5332fe255095'
|
||||
|
||||
from alembic import op
|
||||
from oslo.config import cfg
|
||||
from oslo.db.sqlalchemy import utils as db_utils
|
||||
import sqlalchemy as sa
|
||||
|
||||
from subunit2sql.db import api as db_api
|
||||
from subunit2sql.db import models
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column('test_runs', sa.Column('start_time_microsecond',
|
||||
sa.Integer(), default=0))
|
||||
op.add_column('test_runs', sa.Column('stop_time_microsecond',
|
||||
sa.Integer(), default=0))
|
||||
if not CONF.disable_microsecond_data_migration:
|
||||
session = db_api.get_session()
|
||||
query = db_utils.model_query(models.TestRun, session).values(
|
||||
models.TestRun.id, models.TestRun.start_time,
|
||||
models.TestRun.stop_time)
|
||||
for test_run in query:
|
||||
start_micro = test_run[1].microsecond
|
||||
stop_micro = test_run[2].microsecond
|
||||
values = {'start_time_microsecond': start_micro,
|
||||
'stop_time_microsecond': stop_micro}
|
||||
db_api.update_test_run(values, test_run[0], session)
|
||||
session.close()
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column('test_runs', 'stop_time_microsecond')
|
||||
op.drop_column('test_runs', 'start_time_microsecond')
|
@ -492,3 +492,36 @@ class TestWalkMigrations(base.TestCase):
|
||||
# Ensure the test with 2 success each taking 4 sec lists the proper
|
||||
# run_time
|
||||
self.assertIn(('fake_null_test_id', 4.0), run_time_pairs)
|
||||
|
||||
def _pre_upgrade_1679b5bc102c(self, engine):
|
||||
test_runs = get_table(engine, 'test_runs')
|
||||
now = datetime.datetime.now()
|
||||
future_now = now + datetime.timedelta(0, 4)
|
||||
fake_test_runs = {'id': 'abc123',
|
||||
'test_id': 'fake_null_test_id',
|
||||
'run_id': 'fake_run.id',
|
||||
'status': 'success',
|
||||
'start_time': now,
|
||||
'stop_time': future_now}
|
||||
test_runs.insert().values(fake_test_runs).execute()
|
||||
return fake_test_runs
|
||||
|
||||
def _check_1679b5bc102c(self, engine, data):
|
||||
test_runs = get_table(engine, 'test_runs')
|
||||
start_micro = data['start_time'].microsecond
|
||||
stop_micro = data['stop_time'].microsecond
|
||||
result = test_runs.select().execute()
|
||||
row = None
|
||||
for i in result:
|
||||
if i.id == data['id']:
|
||||
row = i
|
||||
break
|
||||
else:
|
||||
self.fail("Row not present")
|
||||
if row.start_time_microsecond == 0 and row.stop_time_microsecond == 0:
|
||||
# Backing db doesn't store subseconds so the migration will just
|
||||
# populate zeros and the data is lost to the ether.
|
||||
pass
|
||||
else:
|
||||
self.assertEqual(start_micro, row.start_time_microsecond)
|
||||
self.assertEqual(row.stop_time_microsecond, stop_micro)
|
||||
|
Loading…
x
Reference in New Issue
Block a user