From aae3f55304ff161b401b803769350e9cb66b0e8c Mon Sep 17 00:00:00 2001 From: Svetlana Shturm Date: Thu, 5 Sep 2013 12:50:03 +0400 Subject: [PATCH] Fix wrong migrations For Postgres migrations are failed. More then we can get for mysql an error "Specified key was too long; max key length is 1000 bytes" in b6ae66d05e3 migration in some versions of dialect. Fixes-Bug: #1219776 Change-Id: Id9b0e1eb0e685053291367f7eb62fef68b6b2f84 --- .../versions/2c3ccda5a3ad_fix_uniq_name.py | 22 ++++----- .../b6ae66d05e3_remove_extra_indexes.py | 49 ++++++++++++------- 2 files changed, 41 insertions(+), 30 deletions(-) diff --git a/ceilometer/storage/sqlalchemy/alembic/versions/2c3ccda5a3ad_fix_uniq_name.py b/ceilometer/storage/sqlalchemy/alembic/versions/2c3ccda5a3ad_fix_uniq_name.py index a99f0194b..d00d368b1 100644 --- a/ceilometer/storage/sqlalchemy/alembic/versions/2c3ccda5a3ad_fix_uniq_name.py +++ b/ceilometer/storage/sqlalchemy/alembic/versions/2c3ccda5a3ad_fix_uniq_name.py @@ -32,13 +32,13 @@ from alembic import op TABLE_NAME = 'sourceassoc' -OLD_NAME = 'uniq_sourceassoc0meter_id' -NEW_NAME = 'uniq_sourceassoc0meter_id0user_id' +UNIQ_NAME = 'uniq_sourceassoc0meter_id0user_id' COLUMNS = ('meter_id', 'user_id') -def change_uniq(table_name, old_name, new_name, columns): - engine = op.get_bind().engine +def change_uniq(table_name, uniq_name, columns, downgrade=False): + bind = op.get_bind() + engine = bind.engine if engine.name == 'sqlite': return if engine.name == 'mysql': @@ -50,12 +50,10 @@ def change_uniq(table_name, old_name, new_name, columns): op.drop_constraint('fk_sourceassoc_user_id', table_name, type_='foreignkey') - try: - # For some versions of dialects constraint can be skipped. - op.drop_constraint(old_name, table_name=table_name, type_='unique') - except Exception: - pass - op.create_unique_constraint(new_name, table_name, columns) + if downgrade: + op.drop_constraint(uniq_name, table_name=table_name, type_='unique') + else: + op.create_unique_constraint(uniq_name, table_name, columns) if engine.name == 'mysql': op.create_foreign_key('fk_sourceassoc_meter_id', table_name, 'meter', ['meter_id'], ['id']) @@ -64,8 +62,8 @@ def change_uniq(table_name, old_name, new_name, columns): def upgrade(): - change_uniq(TABLE_NAME, OLD_NAME, NEW_NAME, COLUMNS) + change_uniq(TABLE_NAME, UNIQ_NAME, COLUMNS) def downgrade(): - change_uniq(TABLE_NAME, NEW_NAME, OLD_NAME, COLUMNS) + change_uniq(TABLE_NAME, UNIQ_NAME, COLUMNS, downgrade=True) diff --git a/ceilometer/storage/sqlalchemy/alembic/versions/b6ae66d05e3_remove_extra_indexes.py b/ceilometer/storage/sqlalchemy/alembic/versions/b6ae66d05e3_remove_extra_indexes.py index ec260e53d..8f89a9ae5 100644 --- a/ceilometer/storage/sqlalchemy/alembic/versions/b6ae66d05e3_remove_extra_indexes.py +++ b/ceilometer/storage/sqlalchemy/alembic/versions/b6ae66d05e3_remove_extra_indexes.py @@ -29,53 +29,66 @@ revision = 'b6ae66d05e3' down_revision = '17738166b91' from alembic import op +import sqlalchemy as sa INDEXES = ( - # ([dialects], table_name, index_name, create/delete, uniq/not uniq) + # ([dialects], table_name, index_name, create/delete, uniq/not_uniq, + # length_limited) (['mysql', 'sqlite', 'postgresql'], 'resource', 'resource_user_id_project_id_key', - ('user_id', 'project_id'), True, False), - (['mysql'], 'source', 'id', ('id',), False, True)) + ('user_id', 'project_id'), True, False, True), + (['mysql'], 'source', 'id', ('id',), False, True, False)) -def index_cleanup(engine_names, table_name, uniq_name, columns, create=True, - unique=False): - engine = op.get_bind().engine +def index_cleanup(engine_names, table_name, uniq_name, columns, create, + unique, limited): + bind = op.get_bind() + engine = bind.engine if engine.name not in engine_names: return if create: - # We have unique constraint in postgres for `resource` table. - # But it should be a simple index. So, we should delete unique key - # before index creation. - if engine.name == 'postgresql': - op.drop_constraint(uniq_name, table_name, type_='unique') - op.create_index(uniq_name, table_name, columns, unique=unique) + if limited and engine.name == 'mysql': + # For some versions of mysql we can get an error + # "Specified key was too long; max key length is 1000 bytes". + # We should create an index by hand in this case with limited + # length of columns. + meta = sa.MetaData() + meta.bind = engine + table = sa.Table(table_name, meta, autoload=True) + columns_mysql = ",".join((c + "(100)" for c in columns)) + sql = ("create index %s ON %s (%s)" % (uniq_name, table, + columns_mysql)) + engine.execute(sql) + else: + op.create_index(uniq_name, table_name, columns, unique=unique) else: if unique: op.drop_constraint(uniq_name, table_name, type_='unique') else: op.drop_index(uniq_name, table_name=table_name) - if engine.name == 'postgresql': - op.create_unique_constraint(uniq_name, table_name, columns) def upgrade(): - for engine_names, table_name, uniq_name, columns, create, uniq in INDEXES: + for (engine_names, table_name, uniq_name, columns, create, uniq, + limited) in INDEXES: index_cleanup(engine_names, table_name, uniq_name, columns, create, - uniq) + uniq, + limited) def downgrade(): - for engine_names, table_name, uniq_name, columns, create, uniq in INDEXES: + for (engine_names, table_name, uniq_name, columns, create, uniq, + limited) in INDEXES: index_cleanup(engine_names, table_name, uniq_name, columns, not create, - uniq) + uniq, + limited)