From c048d3fb480cbd9033152c745802691876b7ba0d Mon Sep 17 00:00:00 2001 From: John Kasperski Date: Thu, 25 Sep 2014 10:38:45 -0500 Subject: [PATCH] Update migration scripts to support DB2 Three of the migration scripts are causing failures with DB2. - DB2 doesn't support nullable column in primary key - Hard coded SQL statements which use False/True as Boolean arguments are not compatible with DB2. In DB2, Boolean columns are created as small integer with a constraint to allow only 0 & 1. - Hardcoded update rows from other table sql is not compatible with DB2 - Foreign key constraints require additional handling Co-authored-by: Rahul Priyadarshi Change-Id: I82e2d1c522b81fed90a1e5cc6f2321f80797cf7b Closes-Bug: #1328019 --- ...1d7f831a591_add_constraint_for_routerid.py | 107 +++++++++++++----- ...7f7f7c456_l3_extension_distributed_mode.py | 12 +- ...cbf1c_unify_nsx_router_extra_attributes.py | 7 ++ 3 files changed, 95 insertions(+), 31 deletions(-) diff --git a/neutron/db/migration/alembic_migrations/versions/31d7f831a591_add_constraint_for_routerid.py b/neutron/db/migration/alembic_migrations/versions/31d7f831a591_add_constraint_for_routerid.py index 7deebe0ed..0ba5f58be 100644 --- a/neutron/db/migration/alembic_migrations/versions/31d7f831a591_add_constraint_for_routerid.py +++ b/neutron/db/migration/alembic_migrations/versions/31d7f831a591_add_constraint_for_routerid.py @@ -27,6 +27,7 @@ down_revision = '37f322991f59' from alembic import op import sqlalchemy as sa +from sqlalchemy.engine import reflection TABLE_NAME = 'routerl3agentbindings' PK_NAME = 'pk_routerl3agentbindings' @@ -49,7 +50,7 @@ def upgrade(): # and all the duplicate records which violate the PK # constraint need to be removed. context = op.get_context() - if context.bind.dialect.name == 'postgresql': + if context.bind.dialect.name in ('postgresql', 'ibm_db_sa'): op.execute('DELETE FROM %(table)s WHERE id in (' 'SELECT %(table)s.id FROM %(table)s LEFT OUTER JOIN ' '(SELECT MIN(id) as id, router_id, l3_agent_id ' @@ -65,6 +66,19 @@ def upgrade(): op.drop_column(TABLE_NAME, 'id') + # DB2 doesn't support nullable column in primary key + if context.bind.dialect.name == 'ibm_db_sa': + op.alter_column( + table_name=TABLE_NAME, + column_name='router_id', + nullable=False + ) + op.alter_column( + table_name=TABLE_NAME, + column_name='l3_agent_id', + nullable=False + ) + op.create_primary_key( name=PK_NAME, table_name=TABLE_NAME, @@ -79,16 +93,32 @@ def downgrade(): # Drop the existed foreign key constraints # In order to perform primary key changes - op.drop_constraint( - name=fk_names[dialect]['l3_agent_id'], - table_name=TABLE_NAME, - type_='foreignkey' - ) - op.drop_constraint( - name=fk_names[dialect]['router_id'], - table_name=TABLE_NAME, - type_='foreignkey' - ) + db2fks = {} + if dialect == 'ibm_db_sa': + # NOTE(mriedem): In DB2 the foreign key names are randomly generated + # if you didn't originally explicitly name them, so the name is like + # SQLxxxxx where the suffix is a random integer. Therefore we go + # through and just drop all of the foreign keys and save them so we + # can re-create them later after the primary key is dropped. + inspector = reflection.Inspector.from_engine(op.get_bind().engine) + db2fks = inspector.get_foreign_keys(TABLE_NAME) + for fk in db2fks: + op.drop_constraint( + name=fk.get('name'), + table_name=TABLE_NAME, + type_='foreignkey' + ) + else: + op.drop_constraint( + name=fk_names[dialect]['l3_agent_id'], + table_name=TABLE_NAME, + type_='foreignkey' + ) + op.drop_constraint( + name=fk_names[dialect]['router_id'], + table_name=TABLE_NAME, + type_='foreignkey' + ) op.drop_constraint( name=PK_NAME, @@ -101,27 +131,46 @@ def downgrade(): sa.Column('id', sa.String(32)) ) - # Restore the foreign key constraints - op.create_foreign_key( - name=fk_names[dialect]['router_id'], - source=TABLE_NAME, - referent='routers', - local_cols=['router_id'], - remote_cols=['id'], - ondelete='CASCADE' - ) - - op.create_foreign_key( - name=fk_names[dialect]['l3_agent_id'], - source=TABLE_NAME, - referent='agents', - local_cols=['l3_agent_id'], - remote_cols=['id'], - ondelete='CASCADE' - ) + if dialect == 'ibm_db_sa': + # DB2 doesn't support nullable column in primary key + op.alter_column( + table_name=TABLE_NAME, + column_name='id', + nullable=False + ) op.create_primary_key( name=PK_NAME, table_name=TABLE_NAME, cols=['id'] ) + + # Restore the foreign key constraints + if dialect == 'ibm_db_sa': + for fk in db2fks: + op.create_foreign_key( + name=fk.get('name'), + source=TABLE_NAME, + referent=fk.get('referred_table'), + local_cols=fk.get('constrained_columns'), + remote_cols=fk.get('referred_columns'), + ondelete='CASCADE' + ) + else: + op.create_foreign_key( + name=fk_names[dialect]['router_id'], + source=TABLE_NAME, + referent='routers', + local_cols=['router_id'], + remote_cols=['id'], + ondelete='CASCADE' + ) + + op.create_foreign_key( + name=fk_names[dialect]['l3_agent_id'], + source=TABLE_NAME, + referent='agents', + local_cols=['l3_agent_id'], + remote_cols=['id'], + ondelete='CASCADE' + ) diff --git a/neutron/db/migration/alembic_migrations/versions/3927f7f7c456_l3_extension_distributed_mode.py b/neutron/db/migration/alembic_migrations/versions/3927f7f7c456_l3_extension_distributed_mode.py index 1dc8456ae..c4c8cf9db 100644 --- a/neutron/db/migration/alembic_migrations/versions/3927f7f7c456_l3_extension_distributed_mode.py +++ b/neutron/db/migration/alembic_migrations/versions/3927f7f7c456_l3_extension_distributed_mode.py @@ -29,6 +29,7 @@ import sqlalchemy as sa def upgrade(): + context = op.get_context() op.create_table( 'router_extra_attributes', sa.Column('router_id', sa.String(length=36), nullable=False), @@ -38,8 +39,15 @@ def upgrade(): ['router_id'], ['routers.id'], ondelete='CASCADE'), sa.PrimaryKeyConstraint('router_id') ) - op.execute("INSERT INTO router_extra_attributes SELECT id as router_id, " - "False as distributed from routers") + if context.bind.dialect.name == 'ibm_db_sa': + # NOTE(mriedem): DB2 stores booleans as 0 and 1. + op.execute("INSERT INTO router_extra_attributes " + "SELECT id as router_id, " + "0 as distributed from routers") + else: + op.execute("INSERT INTO router_extra_attributes " + "SELECT id as router_id, " + "False as distributed from routers") def downgrade(): diff --git a/neutron/db/migration/alembic_migrations/versions/884573acbf1c_unify_nsx_router_extra_attributes.py b/neutron/db/migration/alembic_migrations/versions/884573acbf1c_unify_nsx_router_extra_attributes.py index 15e6e670d..5b5846299 100644 --- a/neutron/db/migration/alembic_migrations/versions/884573acbf1c_unify_nsx_router_extra_attributes.py +++ b/neutron/db/migration/alembic_migrations/versions/884573acbf1c_unify_nsx_router_extra_attributes.py @@ -38,6 +38,13 @@ def _migrate_data(old_table, new_table): "FROM %(old_table)s old_t " "WHERE new_t.router_id = old_t.router_id") % {'new_table': new_table, 'old_table': old_table}) + elif engine.name == 'ibm_db_sa': + op.execute(("UPDATE %(new_table)s new_t " + "SET (distributed, service_router) = " + "(SELECT old_t.distributed, old_t.service_router " + "FROM %(old_table)s old_t " + "WHERE new_t.router_id = old_t.router_id)") % + {'new_table': new_table, 'old_table': old_table}) else: op.execute(("UPDATE %(new_table)s new_t " "INNER JOIN %(old_table)s as old_t " -- 2.45.2