Add DB migration tests

Change-Id: I0f718cfe37bf42da6b8bbf2e6dd1cbfd28e0f4dc
Closes-Bug: #1670195
This commit is contained in:
Feng Shengqin 2017-03-07 11:03:40 +08:00
parent d5278254c0
commit 7b21325352
8 changed files with 343 additions and 7 deletions

View File

@ -14,8 +14,10 @@ sphinx>=1.5.1 # BSD
oslosphinx>=4.7.0 # Apache-2.0
oslotest>=1.10.0 # Apache-2.0
os-testr>=0.8.0 # Apache-2.0
PyMySQL>=0.7.6 # MIT License
tempest>=14.0.0 # Apache-2.0
testrepository>=0.0.18 # Apache-2.0/BSD
testresources>=0.2.4 # Apache-2.0/BSD
testscenarios>=0.4 # Apache-2.0/BSD
testtools>=1.4.0 # MIT
reno>=1.8.0 # Apache-2.0

13
tools/test-setup.sh Executable file
View File

@ -0,0 +1,13 @@
#!/usr/bin/env bash
# Borrowed from mysql-prep in openstack-infra/project-config/macros.yaml
DB_ROOT_PW=insecure_slave
DB_USER=openstack_citest
DB_PW=openstack_citest
sudo -H mysqladmin -u root password $DB_ROOT_PW
# note; we remove anonymous users first
sudo -H mysql -u root -p$DB_ROOT_PW -h localhost -e "
DELETE FROM mysql.user WHERE User='';
FLUSH PRIVILEGES;
GRANT ALL PRIVILEGES ON *.*
TO '$DB_USER'@'%' identified by '$DB_PW' WITH GRANT OPTION;"

10
tox.ini
View File

@ -1,6 +1,6 @@
[tox]
minversion = 2.0
envlist = py35,py27,pep8
envlist = py35,py27,pep8,migration
skipsdist = True
[testenv]
@ -50,6 +50,14 @@ commands = oslo_debug_helper {posargs}
basepython = python3.4
commands = oslo_debug_helper {posargs}
[testenv:migration]
setenv = {[testenv]setenv}
OS_TEST_PATH=./zun/tests/migration
deps = {[testenv]deps}
commands =
find . -type f -name "*.py[c|o]" -delete
bash tools/pretty_tox.sh '{posargs}'
[flake8]
show-source = True
builtins = _

View File

@ -48,7 +48,8 @@ def run_migrations_online():
with engine.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata
target_metadata=target_metadata,
render_as_batch=True
)
with context.begin_transaction():

View File

@ -38,7 +38,7 @@ _FACADE = None
def _create_facade_lazily():
global _FACADE
if _FACADE is None:
_FACADE = db_session.EngineFacade.from_config(CONF)
_FACADE = db_session.enginefacade.get_legacy_facade()
return _FACADE

View File

@ -16,13 +16,26 @@
import os
import alembic
from alembic import config as alembic_config
import alembic.migration as alembic_migration
from oslo_db import exception as db_exc
from oslo_db.sqlalchemy import enginefacade
from oslo_db.sqlalchemy.migration_cli import manager
from zun.db.sqlalchemy import models
import zun.conf
_MANAGER = None
def _alembic_config():
path = os.path.join(os.path.dirname(__file__), 'alembic.ini')
config = alembic_config.Config(path)
return config
def get_manager():
global _MANAGER
if not _MANAGER:
@ -38,13 +51,17 @@ def get_manager():
return _MANAGER
def version():
def version(config=None, engine=None):
"""Current database version.
:returns: Database version
:rtype: string
"""
return get_manager().version()
if engine is None:
engine = enginefacade.get_legacy_facade().get_engine()
with engine.connect() as conn:
context = alembic_migration.MigrationContext.configure(conn)
return context.get_current_revision()
def upgrade(version):
@ -58,7 +75,7 @@ def upgrade(version):
get_manager().upgrade(version)
def stamp(revision):
def stamp(revision, config=None):
"""Stamps database with provided revision.
Don't run any migrations.
@ -67,7 +84,26 @@ def stamp(revision):
database with most recent revision
:type revision: string
"""
get_manager().stamp(revision)
config = config or _alembic_config()
return alembic.command.stamp(config, revision=revision)
def create_schema(config=None, engine=None):
"""Create database schema from models description.
Can be used for initial installation instead of upgrade('head').
"""
if engine is None:
engine = enginefacade.get_legacy_facade().get_engine()
# NOTE(viktors): If we will use metadata.create_all() for non empty db
# schema, it will only add the new tables, but leave
# existing as is. So we should avoid of this situation.
if version(engine=engine) is not None:
raise db_exc.DbMigrationError("DB schema is already under version"
" control. Use upgrade() instead")
models.Base.metadata.create_all(engine)
stamp('head', config=config)
def revision(message=None, autogenerate=False):

View File

View File

@ -0,0 +1,276 @@
# Copyright 2010-2011 OpenStack Foundation
# Copyright 2012-2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests for database migrations. There are "opportunistic" tests for both mysql
and postgresql in here, which allows testing against these databases in a
properly configured unit test environment.
For the opportunistic testing you need to set up a db named 'openstack_citest'
with user 'openstack_citest' and password 'openstack_citest' on localhost.
The test will then use that db and u/p combo to run the tests.
For postgres on Ubuntu this can be done with the following commands:
::
sudo -u postgres psql
postgres=# create user openstack_citest with createdb login password
'openstack_citest';
postgres=# create database openstack_citest with owner openstack_citest;
"""
import contextlib
import fixtures
from alembic import script
import mock
from oslo_db import exception as db_exc
from oslo_db.sqlalchemy import enginefacade
from oslo_db.sqlalchemy import test_base
from oslo_log import log as logging
import sqlalchemy
import sqlalchemy.exc
from zun.db.sqlalchemy import migration
from zun.tests import base
import os
LOG = logging.getLogger(__name__)
# NOTE(vdrok): This was introduced after migration tests started taking more
# time in gate. Timeout value in seconds for tests performing migrations.
MIGRATIONS_TIMEOUT = 300
def _get_connect_string(backend, user, passwd, database):
"""Get database connection
Try to get a connection with a very specific set of values, if we get
these then we'll run the tests, otherwise they are skipped
"""
if backend == "postgres":
backend = "postgresql+psycopg2"
elif backend == "mysql":
backend = "mysql+pymysql"
elif backend == "sqlite":
path = os.path.abspath(os.path.join(os.path.dirname(__file__), '../'))
return 'sqlite:///' + os.path.join(path, database)
else:
raise Exception("Unrecognized backend: '%s'" % backend)
return ("%(backend)s://%(user)s:%(passwd)s@localhost/%(database)s"
% {'backend': backend, 'user': user, 'passwd': passwd,
'database': database})
def _is_backend_avail(backend, user, passwd, database):
try:
connect_uri = _get_connect_string(backend, user, passwd, database)
engine = sqlalchemy.create_engine(connect_uri)
connection = engine.connect()
except Exception:
# intentionally catch all to handle exceptions even if we don't
# have any backend code loaded.
return False
else:
connection.close()
engine.dispose()
return True
@contextlib.contextmanager
def patch_with_engine(engine):
with mock.patch.object(enginefacade.get_legacy_facade(),
'get_engine') as patch_engine:
patch_engine.return_value = engine
yield
class WalkVersionsMixin(object):
def _walk_versions(self, engine=None, alembic_cfg=None):
# Determine latest version script from the repo, then
# upgrade from 1 through to the latest, with no data
# in the databases. This just checks that the schema itself
# upgrades successfully.
# Place the database under version control
with patch_with_engine(engine):
script_directory = script.ScriptDirectory.from_config(alembic_cfg)
self.assertIsNone(self.migration_api.version(alembic_cfg))
versions = [ver for ver in script_directory.walk_revisions()]
for version in reversed(versions):
self._migrate_up(engine, alembic_cfg,
version.revision, with_data=True)
def _migrate_up(self, engine, config, version, with_data=False):
"""migrate up to a new version of the db.
We allow for data insertion and post checks at every
migration version with special _pre_upgrade_### and
_check_### functions in the main test.
"""
# NOTE(sdague): try block is here because it's impossible to debug
# where a failed data migration happens otherwise
try:
if with_data:
data = None
pre_upgrade = getattr(
self, "_pre_upgrade_%s" % version, None)
if pre_upgrade:
data = pre_upgrade(engine)
self.migration_api.upgrade(version)
self.assertEqual(version, self.migration_api.version(config))
if with_data:
check = getattr(self, "_check_%s" % version, None)
if check:
check(engine, data)
except Exception:
LOG.error(("Failed to migrate to version %(version)s on engine "
"%(engine)s"),
{'version': version, 'engine': engine})
raise
class TestWalkVersions(base.TestCase, WalkVersionsMixin):
def setUp(self):
super(TestWalkVersions, self).setUp()
self.migration_api = mock.MagicMock()
self.engine = mock.MagicMock()
self.config = mock.MagicMock()
self.versions = [mock.Mock(revision='2b2'), mock.Mock(revision='1a1')]
def test_migrate_up(self):
self.migration_api.version.return_value = 'dsa123'
self._migrate_up(self.engine, self.config, 'dsa123')
self.migration_api.upgrade.assert_called_with('dsa123')
self.migration_api.version.assert_called_with(self.config)
def test_migrate_up_with_data(self):
test_value = {"a": 1, "b": 2}
self.migration_api.version.return_value = '141'
self._pre_upgrade_141 = mock.MagicMock()
self._pre_upgrade_141.return_value = test_value
self._check_141 = mock.MagicMock()
self._migrate_up(self.engine, self.config, '141', True)
self._pre_upgrade_141.assert_called_with(self.engine)
self._check_141.assert_called_with(self.engine, test_value)
@mock.patch.object(script, 'ScriptDirectory')
@mock.patch.object(WalkVersionsMixin, '_migrate_up')
def test_walk_versions_all_default(self, _migrate_up, script_directory):
fc = script_directory.from_config()
fc.walk_revisions.return_value = self.versions
self.migration_api.version.return_value = None
self._walk_versions(self.engine, self.config)
self.migration_api.version.assert_called_with(self.config)
upgraded = [mock.call(self.engine, self.config, v.revision,
with_data=True) for v in reversed(self.versions)]
self.assertEqual(self._migrate_up.call_args_list, upgraded)
@mock.patch.object(script, 'ScriptDirectory')
@mock.patch.object(WalkVersionsMixin, '_migrate_up')
def test_walk_versions_all_false(self, _migrate_up, script_directory):
fc = script_directory.from_config()
fc.walk_revisions.return_value = self.versions
self.migration_api.version.return_value = None
self._walk_versions(self.engine, self.config)
upgraded = [mock.call(self.engine, self.config, v.revision,
with_data=True) for v in reversed(self.versions)]
self.assertEqual(upgraded, self._migrate_up.call_args_list)
class MigrationCheckersMixin(object):
def setUp(self):
super(MigrationCheckersMixin, self).setUp()
self.config = migration._alembic_config()
self.migration_api = migration
self.useFixture(fixtures.Timeout(MIGRATIONS_TIMEOUT, gentle=True))
def test_walk_versions(self):
db_name = str(self.engine.url).rsplit('/', 1)[-1]
self.config.attributes['test_db_name'] = db_name
self._walk_versions(self.engine, self.config)
def test_connect_fail(self):
"""Test that we can trigger a database connection failure
Test that we can fail gracefully to ensure we don't break people
without specific database backend.
Pass on Sqlite since it has no concept of connecting as a user
"""
if _is_backend_avail(self.FIXTURE.DRIVER, "zun_migration_fail",
self.FIXTURE.USERNAME, self.FIXTURE.DBNAME):
if self.FIXTURE.DRIVER != 'sqlite':
self.fail("Shouldn't have connected")
def test_upgrade_and_version(self):
with patch_with_engine(self.engine):
self.migration_api.upgrade('head')
self.assertIsNotNone(self.migration_api.version())
def test_create_schema_and_version(self):
with patch_with_engine(self.engine):
self.migration_api.create_schema()
self.assertIsNotNone(self.migration_api.version())
# TODO(swatson): Find a patch version to make the next two tests make sense
def test_upgrade_and_create_schema(self):
with patch_with_engine(self.engine):
self.migration_api.upgrade('2d1354bbf76e')
self.assertRaises(db_exc.DbMigrationError,
self.migration_api.create_schema)
def test_upgrade_twice(self):
with patch_with_engine(self.engine):
self.migration_api.upgrade('2d1354bbf76e')
v1 = self.migration_api.version()
self.migration_api.upgrade('head')
v2 = self.migration_api.version()
self.assertNotEqual(v1, v2)
class TestMigrationsMySQL(MigrationCheckersMixin,
WalkVersionsMixin,
test_base.MySQLOpportunisticTestCase):
def _setUp(self):
self.addCleanup(self.engine.dispose)
# Investigate if there is model/migration sync issue in zun and enable testing.
# See implementation in ironic's test_migrations.ModelsMigrationsSyncMysql for
# re-enabling sync tests.
# TODO(swatson): Enable SQLite tests once SQLite migration issues are fixed
# SQLite doesn't support "drop column" or "alter table" calls. We have at least
# 2 migrations using drop columns, and at least 1 using an alter table call.
# Might need to modify those migrations for SQLite compatibility.