]> review.fuel-infra Code Review - openstack-build/neutron-build.git/commitdiff
Drop and recreate FK if adding new PK to routerl3bindings
authorJakub Libosvar <libosvar@redhat.com>
Fri, 31 Oct 2014 14:49:40 +0000 (15:49 +0100)
committerJakub Libosvar <libosvar@redhat.com>
Wed, 5 Nov 2014 14:10:04 +0000 (15:10 +0100)
Since MySQL 5.6 FK columns cannot be altered. As a workaround we can
drop the FK before ALTER COLUMN and then recreate it.

Change-Id: Icd1b3075cf29a6b0c477a4ddea2e6ebee91baef8
Closes-Bug: 1384555

neutron/db/migration/__init__.py
neutron/db/migration/alembic_migrations/versions/31d7f831a591_add_constraint_for_routerid.py

index 55c1443f1fea4c88cedd208c97f909b221d0b336..86cce385af93bbdd87236b4de6f95cb97332eec6 100644 (file)
 #    License for the specific language governing permissions and limitations
 #    under the License.
 
+import contextlib
 import functools
 
 from alembic import context
 from alembic import op
 import sqlalchemy as sa
+from sqlalchemy.engine import reflection
 
 
 def skip_if_offline(func):
@@ -122,3 +124,35 @@ def create_table_if_not_exist_psql(table_name, values):
                "WHERE NOT table_exist(%(name)r);" %
                {'name': table_name,
                 'columns': values})
+
+
+def remove_foreign_keys(table, foreign_keys):
+    for fk in foreign_keys:
+        op.drop_constraint(
+            name=fk['name'],
+            table_name=table,
+            type_='foreignkey'
+        )
+
+
+def create_foreign_keys(table, foreign_keys):
+    for fk in foreign_keys:
+        op.create_foreign_key(
+            name=fk['name'],
+            source=table,
+            referent=fk['referred_table'],
+            local_cols=fk['constrained_columns'],
+            remote_cols=fk['referred_columns'],
+            ondelete='CASCADE'
+        )
+
+
+@contextlib.contextmanager
+def remove_fks_from_table(table):
+    try:
+        inspector = reflection.Inspector.from_engine(op.get_bind())
+        foreign_keys = inspector.get_foreign_keys(table)
+        remove_foreign_keys(table, foreign_keys)
+        yield
+    finally:
+        create_foreign_keys(table, foreign_keys)
index 0ba5f58be7d002f05705414a91b573d5d97db350..232156748e9ae1be34342fa0c5d2793f8dd6a3c6 100644 (file)
@@ -27,22 +27,12 @@ down_revision = '37f322991f59'
 
 from alembic import op
 import sqlalchemy as sa
-from sqlalchemy.engine import reflection
+
+from neutron.db import migration
 
 TABLE_NAME = 'routerl3agentbindings'
 PK_NAME = 'pk_routerl3agentbindings'
 
-fk_names = {'postgresql':
-            {'router_id':
-                'routerl3agentbindings_router_id_fkey',
-             'l3_agent_id':
-                'routerl3agentbindings_l3_agent_id_fkey'},
-            'mysql':
-            {'router_id':
-                'routerl3agentbindings_ibfk_2',
-             'l3_agent_id':
-                'routerl3agentbindings_ibfk_1'}}
-
 
 def upgrade():
     # In order to sanitize the data during migration,
@@ -66,60 +56,32 @@ def upgrade():
 
     op.drop_column(TABLE_NAME, 'id')
 
-    # DB2 doesn't support nullable column in primary key
-    if context.bind.dialect.name == 'ibm_db_sa':
-        op.alter_column(
-            table_name=TABLE_NAME,
-            column_name='router_id',
-            nullable=False
-        )
-        op.alter_column(
+    with migration.remove_fks_from_table(TABLE_NAME):
+        # DB2 doesn't support nullable column in primary key
+        if context.bind.dialect.name == 'ibm_db_sa':
+            op.alter_column(
+                table_name=TABLE_NAME,
+                column_name='router_id',
+                nullable=False
+            )
+            op.alter_column(
+                table_name=TABLE_NAME,
+                column_name='l3_agent_id',
+                nullable=False
+            )
+
+        op.create_primary_key(
+            name=PK_NAME,
             table_name=TABLE_NAME,
-            column_name='l3_agent_id',
-            nullable=False
+            cols=['router_id', 'l3_agent_id']
         )
 
-    op.create_primary_key(
-        name=PK_NAME,
-        table_name=TABLE_NAME,
-        cols=['router_id', 'l3_agent_id']
-    )
-
 
 def downgrade():
 
     context = op.get_context()
     dialect = context.bind.dialect.name
 
-    # Drop the existed foreign key constraints
-    # In order to perform primary key changes
-    db2fks = {}
-    if dialect == 'ibm_db_sa':
-        # NOTE(mriedem): In DB2 the foreign key names are randomly generated
-        # if you didn't originally explicitly name them, so the name is like
-        # SQLxxxxx where the suffix is a random integer.  Therefore we go
-        # through and just drop all of the foreign keys and save them so we
-        # can re-create them later after the primary key is dropped.
-        inspector = reflection.Inspector.from_engine(op.get_bind().engine)
-        db2fks = inspector.get_foreign_keys(TABLE_NAME)
-        for fk in db2fks:
-            op.drop_constraint(
-                name=fk.get('name'),
-                table_name=TABLE_NAME,
-                type_='foreignkey'
-            )
-    else:
-        op.drop_constraint(
-            name=fk_names[dialect]['l3_agent_id'],
-            table_name=TABLE_NAME,
-            type_='foreignkey'
-        )
-        op.drop_constraint(
-            name=fk_names[dialect]['router_id'],
-            table_name=TABLE_NAME,
-            type_='foreignkey'
-        )
-
     op.drop_constraint(
         name=PK_NAME,
         table_name=TABLE_NAME,
@@ -139,38 +101,9 @@ def downgrade():
             nullable=False
         )
 
-    op.create_primary_key(
-        name=PK_NAME,
-        table_name=TABLE_NAME,
-        cols=['id']
-    )
-
-    # Restore the foreign key constraints
-    if dialect == 'ibm_db_sa':
-        for fk in db2fks:
-            op.create_foreign_key(
-                name=fk.get('name'),
-                source=TABLE_NAME,
-                referent=fk.get('referred_table'),
-                local_cols=fk.get('constrained_columns'),
-                remote_cols=fk.get('referred_columns'),
-                ondelete='CASCADE'
-            )
-    else:
-        op.create_foreign_key(
-            name=fk_names[dialect]['router_id'],
-            source=TABLE_NAME,
-            referent='routers',
-            local_cols=['router_id'],
-            remote_cols=['id'],
-            ondelete='CASCADE'
-        )
-
-        op.create_foreign_key(
-            name=fk_names[dialect]['l3_agent_id'],
-            source=TABLE_NAME,
-            referent='agents',
-            local_cols=['l3_agent_id'],
-            remote_cols=['id'],
-            ondelete='CASCADE'
+    with migration.remove_fks_from_table(TABLE_NAME):
+        op.create_primary_key(
+            name=PK_NAME,
+            table_name=TABLE_NAME,
+            cols=['id']
         )