business logic.
+How we manage database migration rules
+--------------------------------------
+
+Since Liberty, Neutron maintains two parallel alembic migration branches.
+
+The first one, called 'expand', is used to store expansion-only migration
+rules. Those rules are strictly additive and can be applied while
+neutron-server is running. Examples of additive database schema changes are:
+creating a new table, adding a new table column, adding a new index, etc.
+
+The second branch, called 'contract', is used to store those migration rules
+that are not safe to apply while neutron-server is running. Those include:
+column or table removal, moving data from one part of the database into another
+(renaming a column, transforming single table into multiple, etc.), introducing
+or modifying constraints, etc.
+
+The intent of the split is to allow invoking those safe migrations from
+'expand' branch while neutron-server is running, reducing downtime needed to
+upgrade the service.
+
+To apply just expansion rules, execute:
+
+- neutron-db-manage upgrade liberty_expand@head
+
+After the first step is done, you can stop neutron-server, apply remaining
+non-expansive migration rules, if any:
+
+- neutron-db-manage upgrade liberty_contract@head
+
+and finally, start your neutron-server again.
+
+If you are not interested in applying safe migration rules while the service is
+running, you can still upgrade database the old way, by stopping the service,
+and then applying all available rules:
+
+- neutron-db-manage upgrade head[s]
+
+It will apply all the rules from both the expand and the contract branches, in
+proper order.
+
+
Tests to verify that database migrations and models are in sync
---------------------------------------------------------------
+++ /dev/null
-52c5312f6baf
--- /dev/null
+30018084ec99
+52c5312f6baf
+kilo
--- /dev/null
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+
+"""Initial no-op Liberty contract rule.
+
+Revision ID: 30018084ec99
+Revises: None
+Create Date: 2015-06-22 00:00:00.000000
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = '30018084ec99'
+down_revision = None
+depends_on = ('kilo',)
+branch_labels = ('liberty_contract',)
+
+
+def upgrade():
+ pass
# revision identifiers, used by Alembic.
revision = '354db87e3225'
-down_revision = 'kilo'
+down_revision = None
+branch_labels = ('liberty_expand',)
+depends_on = ('kilo',)
+
from alembic import op
import sqlalchemy as sa
from neutron.common import repos
-HEAD_FILENAME = 'HEAD'
+
+# TODO(ihrachyshka): maintain separate HEAD files per branch
+HEADS_FILENAME = 'HEADS'
+CURRENT_RELEASE = "liberty"
+MIGRATION_BRANCHES = ('expand', 'contract')
+
mods = repos.NeutronModules()
VALID_SERVICES = map(mods.alembic_name, mods.installed_list())
def do_check_migration(config, cmd):
do_alembic_command(config, 'branches')
- validate_head_file(config)
+ validate_heads_file(config)
def add_alembic_subparser(sub, cmd):
raise SystemExit(_('Negative delta (downgrade) not supported'))
revision = '%s+%d' % (revision, delta)
+ # leave branchless 'head' revision request backward compatible by applying
+ # all heads in all available branches.
+ if revision == 'head':
+ revision = 'heads'
if not CONF.command.sql:
run_sanity_checks(config, revision)
do_alembic_command(config, cmd, revision, sql=CONF.command.sql)
sql=CONF.command.sql)
-def do_revision(config, cmd):
- do_alembic_command(config, cmd,
- message=CONF.command.message,
- autogenerate=CONF.command.autogenerate,
- sql=CONF.command.sql)
- update_head_file(config)
-
+def _get_branch_head(branch):
+ '''Get the latest @head specification for a branch.'''
+ return '%s_%s@head' % (CURRENT_RELEASE, branch)
-def validate_head_file(config):
- script = alembic_script.ScriptDirectory.from_config(config)
- if len(script.get_heads()) > 1:
- alembic_util.err(_('Timeline branches unable to generate timeline'))
- head_path = os.path.join(script.versions, HEAD_FILENAME)
- if (os.path.isfile(head_path) and
- open(head_path).read().strip() == script.get_current_head()):
- return
+def do_revision(config, cmd):
+ '''Generate new revision files, one per branch.'''
+ if _separate_migration_branches_supported(CONF):
+ for branch in MIGRATION_BRANCHES:
+ version_path = _get_version_branch_path(CONF, branch)
+ head = _get_branch_head(branch)
+ do_alembic_command(config, cmd,
+ message=CONF.command.message,
+ autogenerate=CONF.command.autogenerate,
+ sql=CONF.command.sql,
+ version_path=version_path,
+ head=head)
else:
- alembic_util.err(_('HEAD file does not match migration timeline head'))
+ do_alembic_command(config, cmd,
+ message=CONF.command.message,
+ autogenerate=CONF.command.autogenerate,
+ sql=CONF.command.sql)
+ update_heads_file(config)
-def update_head_file(config):
+def _get_sorted_heads(script):
+ '''Get the list of heads for all branches, sorted.'''
+ heads = script.get_heads()
+ # +1 stands for the core 'kilo' branch, the one that didn't have branches
+ if len(heads) > len(MIGRATION_BRANCHES) + 1:
+ alembic_util.err(_('No new branches are allowed except: %s') %
+ ' '.join(MIGRATION_BRANCHES))
+ return sorted(heads)
+
+
+def validate_heads_file(config):
+ '''Check that HEADS file contains the latest heads for each branch.'''
script = alembic_script.ScriptDirectory.from_config(config)
- if len(script.get_heads()) > 1:
- alembic_util.err(_('Timeline branches unable to generate timeline'))
+ heads = _get_sorted_heads(script)
+ heads_path = _get_heads_file_path(CONF)
+ try:
+ with open(heads_path) as file_:
+ if file_.read().split() == heads:
+ return
+ except IOError:
+ pass
+ alembic_util.err(_('HEADS file does not match migration timeline heads'))
+
- head_path = os.path.join(script.versions, HEAD_FILENAME)
- with open(head_path, 'w+') as f:
- f.write(script.get_current_head())
+def update_heads_file(config):
+ '''Update HEADS file with the latest branch heads.'''
+ script = alembic_script.ScriptDirectory.from_config(config)
+ heads = _get_sorted_heads(script)
+ heads_path = _get_heads_file_path(CONF)
+ with open(heads_path, 'w+') as f:
+ f.write('\n'.join(heads))
def add_command_parsers(subparsers):
CONF.register_cli_opt(command_opt)
+def _get_neutron_service_base(neutron_config):
+ '''Return base python namespace name for a service.'''
+ if neutron_config.service:
+ validate_service_installed(neutron_config.service)
+ return "neutron_%s" % neutron_config.service
+ return "neutron"
+
+
+def _get_root_versions_dir(neutron_config):
+ '''Return root directory that contains all migration rules.'''
+ service_base = _get_neutron_service_base(neutron_config)
+ root_module = importutils.import_module(service_base)
+ return os.path.join(
+ os.path.dirname(root_module.__file__),
+ 'db/migration/alembic_migrations/versions')
+
+
+def _get_heads_file_path(neutron_config):
+ '''Return the path of the file that contains all latest heads, sorted.'''
+ return os.path.join(
+ _get_root_versions_dir(neutron_config),
+ HEADS_FILENAME)
+
+
+def _get_version_branch_path(neutron_config, branch=None):
+ version_path = _get_root_versions_dir(neutron_config)
+ if branch:
+ return os.path.join(version_path, CURRENT_RELEASE, branch)
+ return version_path
+
+
+def _separate_migration_branches_supported(neutron_config):
+ '''Detect whether split migration branches are supported.'''
+ # Use HEADS file to indicate the new, split migration world
+ return os.path.exists(_get_heads_file_path(neutron_config))
+
+
+def _set_version_locations(config):
+ '''Make alembic see all revisions in all migration branches.'''
+ version_paths = []
+
+ version_paths.append(_get_version_branch_path(CONF))
+ if _separate_migration_branches_supported(CONF):
+ for branch in MIGRATION_BRANCHES:
+ version_paths.append(_get_version_branch_path(CONF, branch))
+
+ config.set_main_option('version_locations', ' '.join(version_paths))
+
+
def validate_service_installed(service):
if not importutils.try_import('neutron_%s' % service):
alembic_util.err(_('Package neutron-%s not installed') % service)
def get_script_location(neutron_config):
location = '%s.db.migration:alembic_migrations'
- if neutron_config.service:
- validate_service_installed(neutron_config.service)
- base = "neutron_%s" % neutron_config.service
- else:
- base = "neutron"
- return location % base
+ return location % _get_neutron_service_base(neutron_config)
def get_alembic_config():
config = alembic_config.Config(os.path.join(os.path.dirname(__file__),
'alembic.ini'))
config.set_main_option('script_location', get_script_location(CONF))
+ _set_version_locations(config)
return config
script_dir = alembic_script.ScriptDirectory.from_config(config)
def check_sanity(rev, context):
- for script in script_dir.iterate_revisions(revision, rev):
+ # TODO(ihrachyshka): here we use internal API for alembic; we may need
+ # alembic to expose implicit_base= argument into public
+ # iterate_revisions() call
+ for script in script_dir.revision_map.iterate_revisions(
+ revision, rev, implicit_base=True):
if hasattr(script.module, 'check_sanity'):
script.module.check_sanity(context.connection)
return []
def db_sync(self, engine):
cfg.CONF.set_override('connection', engine.url, group='database')
- migration.do_alembic_command(self.alembic_config, 'upgrade', 'head')
+ migration.do_alembic_command(self.alembic_config, 'upgrade', 'heads')
cfg.CONF.clear_override('connection', group='database')
def get_engine(self):
self.mock_alembic_err = mock.patch('alembic.util.err').start()
self.mock_alembic_err.side_effect = SystemExit
- def _main_test_helper(self, argv, func_name, exp_args=(), exp_kwargs={}):
+ def _main_test_helper(self, argv, func_name, exp_args=(), exp_kwargs=[{}]):
with mock.patch.object(sys, 'argv', argv), mock.patch.object(
cli, 'run_sanity_checks'):
cli.main()
self.do_alembic_cmd.assert_has_calls(
- [mock.call(mock.ANY, func_name, *exp_args, **exp_kwargs)]
+ [mock.call(mock.ANY, func_name, *exp_args, **kwargs)
+ for kwargs in exp_kwargs]
)
def test_stamp(self):
['prog', 'stamp', 'foo'],
'stamp',
('foo',),
- {'sql': False}
+ [{'sql': False}]
)
self._main_test_helper(
['prog', 'stamp', 'foo', '--sql'],
'stamp',
('foo',),
- {'sql': True}
+ [{'sql': True}]
)
def test_current(self):
self._main_test_helper(['prog', 'history'], 'history')
def test_check_migration(self):
- with mock.patch.object(cli, 'validate_head_file') as validate:
+ with mock.patch.object(cli, 'validate_heads_file') as validate:
self._main_test_helper(['prog', 'check_migration'], 'branches')
validate.assert_called_once_with(mock.ANY)
- def test_database_sync_revision(self):
- with mock.patch.object(cli, 'update_head_file') as update:
+ def _test_database_sync_revision(self, separate_branches=True):
+ with mock.patch.object(cli, 'update_heads_file') as update:
+ class FakeConfig(object):
+ service = ''
+
+ fake_config = FakeConfig()
+ if separate_branches:
+ expected_kwargs = [
+ {'message': 'message', 'sql': False, 'autogenerate': True,
+ 'version_path':
+ cli._get_version_branch_path(fake_config, branch),
+ 'head': cli._get_branch_head(branch)}
+ for branch in cli.MIGRATION_BRANCHES]
+ else:
+ expected_kwargs = [{
+ 'message': 'message', 'sql': False, 'autogenerate': True,
+ }]
self._main_test_helper(
['prog', 'revision', '--autogenerate', '-m', 'message'],
'revision',
- (),
- {'message': 'message', 'sql': False, 'autogenerate': True}
+ (), expected_kwargs
)
update.assert_called_once_with(mock.ANY)
-
update.reset_mock()
+
+ for kwarg in expected_kwargs:
+ kwarg['autogenerate'] = False
+ kwarg['sql'] = True
+
self._main_test_helper(
['prog', 'revision', '--sql', '-m', 'message'],
'revision',
- (),
- {'message': 'message', 'sql': True, 'autogenerate': False}
+ (), expected_kwargs
)
update.assert_called_once_with(mock.ANY)
+ def test_database_sync_revision(self):
+ self._test_database_sync_revision()
+
+ @mock.patch.object(cli, '_separate_migration_branches_supported',
+ return_value=False)
+ def test_database_sync_revision_no_branches(self, *args):
+ # Test that old branchless approach is still supported
+ self._test_database_sync_revision(separate_branches=False)
+
def test_upgrade(self):
self._main_test_helper(
['prog', 'upgrade', '--sql', 'head'],
'upgrade',
- ('head',),
- {'sql': True}
+ ('heads',),
+ [{'sql': True}]
)
self._main_test_helper(
['prog', 'upgrade', '--delta', '3'],
'upgrade',
('+3',),
- {'sql': False}
+ [{'sql': False}]
)
self._main_test_helper(
['prog', 'upgrade', 'kilo', '--delta', '3'],
'upgrade',
('kilo+3',),
- {'sql': False}
+ [{'sql': False}]
)
def assert_command_fails(self, command):
def test_upgrade_rejects_delta_with_relative_revision(self):
self.assert_command_fails(['prog', 'upgrade', '+2', '--delta', '3'])
- def _test_validate_head_file_helper(self, heads, file_content=None):
+ def _test_validate_heads_file_helper(self, heads, file_content=None):
with mock.patch('alembic.script.ScriptDirectory.from_config') as fc:
fc.return_value.get_heads.return_value = heads
fc.return_value.get_current_head.return_value = heads[0]
is_file.return_value = file_content is not None
if file_content in heads:
- cli.validate_head_file(mock.sentinel.config)
+ cli.validate_heads_file(mock.sentinel.config)
else:
self.assertRaises(
SystemExit,
- cli.validate_head_file,
+ cli.validate_heads_file,
mock.sentinel.config
)
self.mock_alembic_err.assert_called_once_with(mock.ANY)
fc.assert_called_once_with(mock.sentinel.config)
- def test_validate_head_file_multiple_heads(self):
- self._test_validate_head_file_helper(['a', 'b'])
+ def test_validate_heads_file_multiple_heads(self):
+ self._test_validate_heads_file_helper(['a', 'b'])
- def test_validate_head_file_missing_file(self):
- self._test_validate_head_file_helper(['a'])
+ def test_validate_heads_file_missing_file(self):
+ self._test_validate_heads_file_helper(['a'])
- def test_validate_head_file_wrong_contents(self):
- self._test_validate_head_file_helper(['a'], 'b')
+ def test_validate_heads_file_wrong_contents(self):
+ self._test_validate_heads_file_helper(['a'], 'b')
def test_validate_head_success(self):
- self._test_validate_head_file_helper(['a'], 'a')
+ self._test_validate_heads_file_helper(['a'], 'a')
+
+ def test_update_heads_file_two_heads(self):
+ with mock.patch('alembic.script.ScriptDirectory.from_config') as fc:
+ heads = ('b', 'a')
+ fc.return_value.get_heads.return_value = heads
+ with mock.patch('six.moves.builtins.open') as mock_open:
+ mock_open.return_value.__enter__ = lambda s: s
+ mock_open.return_value.__exit__ = mock.Mock()
+
+ cli.update_heads_file(mock.sentinel.config)
+ mock_open.return_value.write.assert_called_once_with(
+ '\n'.join(sorted(heads)))
- def test_update_head_file_multiple_heads(self):
+ def test_update_heads_file_excessive_heads_negative(self):
with mock.patch('alembic.script.ScriptDirectory.from_config') as fc:
- fc.return_value.get_heads.return_value = ['a', 'b']
+ heads = ('b', 'a', 'c', 'kilo')
+ fc.return_value.get_heads.return_value = heads
self.assertRaises(
SystemExit,
- cli.update_head_file,
+ cli.update_heads_file,
mock.sentinel.config
)
self.mock_alembic_err.assert_called_once_with(mock.ANY)
- fc.assert_called_once_with(mock.sentinel.config)
- def test_update_head_file_success(self):
+ def test_update_heads_file_success(self):
with mock.patch('alembic.script.ScriptDirectory.from_config') as fc:
- fc.return_value.get_heads.return_value = ['a']
- fc.return_value.get_current_head.return_value = 'a'
+ heads = ('a', 'b')
+ fc.return_value.get_heads.return_value = heads
+ fc.return_value.get_current_head.return_value = heads
with mock.patch('six.moves.builtins.open') as mock_open:
mock_open.return_value.__enter__ = lambda s: s
mock_open.return_value.__exit__ = mock.Mock()
- cli.update_head_file(mock.sentinel.config)
- mock_open.return_value.write.assert_called_once_with('a')
- fc.assert_called_once_with(mock.sentinel.config)
+ cli.update_heads_file(mock.sentinel.config)
+ mock_open.return_value.write.assert_called_once_with(
+ '\n'.join(heads))