From c9a55d852e3f56a955039e99b628ce0b1c1e95af Mon Sep 17 00:00:00 2001 From: =?utf8?q?Micha=C5=82=20Dulko?= Date: Fri, 5 Feb 2016 15:41:01 +0100 Subject: [PATCH] Add SIGHUP handlers to reset RPC version pins Adding SIGHUP handlers (by implementing reset from oslo.service) to cinder-scheduler, cinder-backup and cinder-volume that reset cached RPC version pins. This is to avoid the need to restart all the services when upgrade of the deployment is completed. Some changes go a little deep into the stack, because to reload all the pins we need to recreate .rpcapi.API objects that are stored in the memory. Please note that SIGHUP signal is handled by oslo.service only when service runs in daemon mode (without tty attached). To test this commit in DevStack you need to add "&" to the end of the command that starts the service. Situation is more complicated with the API service, so we're leaving it with restart required for now. In the deployments with HA cinder-api is typically behind a load balancer, so restarting individual nodes one-by-one should be easy. DocImpact: Add information on rolling upgrades procedures to the docs. Implements: blueprint rpc-object-compatibility Change-Id: I03ed74e17dc9a4b9aa2ddcfbeb36a106a0f035f8 --- cinder/backup/manager.py | 5 ++++ cinder/manager.py | 16 +++++++++++++ cinder/scheduler/driver.py | 4 ++++ cinder/scheduler/manager.py | 4 ++++ cinder/service.py | 4 ++++ cinder/tests/unit/scheduler/test_scheduler.py | 15 ++++++++++++ cinder/tests/unit/test_backup.py | 24 +++++++++++++++++++ cinder/tests/unit/test_service.py | 12 ++++++++++ cinder/tests/unit/test_volume.py | 15 ++++++++++++ .../rpc_compatibility-375be8ac3158981d.yaml | 16 +++++++++++++ 10 files changed, 115 insertions(+) create mode 100644 releasenotes/notes/rpc_compatibility-375be8ac3158981d.yaml diff --git a/cinder/backup/manager.py b/cinder/backup/manager.py index 3aa486d8b..339a9df11 100644 --- a/cinder/backup/manager.py +++ b/cinder/backup/manager.py @@ -123,6 +123,11 @@ class BackupManager(manager.SchedulerDependentManager): LOG.exception(_LE("Problem cleaning incomplete backup " "operations.")) + def reset(self): + super(BackupManager, self).reset() + self.backup_rpcapi = backup_rpcapi.BackupAPI() + self.volume_rpcapi = volume_rpcapi.VolumeAPI() + def _cleanup_incomplete_backup_operations(self, ctxt): LOG.info(_LI("Cleaning up incomplete backup operations.")) diff --git a/cinder/manager.py b/cinder/manager.py index 45f9e1452..d725dc6ed 100644 --- a/cinder/manager.py +++ b/cinder/manager.py @@ -58,6 +58,8 @@ import oslo_messaging as messaging from oslo_service import periodic_task from cinder.db import base +from cinder.i18n import _LI +from cinder import rpc from cinder.scheduler import rpcapi as scheduler_rpcapi from cinder import version @@ -127,6 +129,16 @@ class Manager(base.Base, PeriodicTasks): """ return True + def reset(self): + """Method executed when SIGHUP is caught by the process. + + We're utilizing it to reset RPC API version pins to avoid restart of + the service when rolling upgrade is completed. + """ + LOG.info(_LI('Resetting cached RPC version pins.')) + rpc.LAST_OBJ_VERSIONS = {} + rpc.LAST_RPC_VERSIONS = {} + class SchedulerDependentManager(Manager): """Periodically send capability updates to the Scheduler services. @@ -162,3 +174,7 @@ class SchedulerDependentManager(Manager): def _add_to_threadpool(self, func, *args, **kwargs): self._tp.spawn_n(func, *args, **kwargs) + + def reset(self): + super(SchedulerDependentManager, self).reset() + self.scheduler_rpcapi = scheduler_rpcapi.SchedulerAPI() diff --git a/cinder/scheduler/driver.py b/cinder/scheduler/driver.py index 02b081611..94d47f147 100644 --- a/cinder/scheduler/driver.py +++ b/cinder/scheduler/driver.py @@ -74,6 +74,10 @@ class Scheduler(object): CONF.scheduler_host_manager) self.volume_rpcapi = volume_rpcapi.VolumeAPI() + def reset(self): + """Reset volume RPC API object to load new version pins.""" + self.volume_rpcapi = volume_rpcapi.VolumeAPI() + def is_ready(self): """Returns True if Scheduler is ready to accept requests. diff --git a/cinder/scheduler/manager.py b/cinder/scheduler/manager.py index ced49c315..d68508a7e 100644 --- a/cinder/scheduler/manager.py +++ b/cinder/scheduler/manager.py @@ -75,6 +75,10 @@ class SchedulerManager(manager.Manager): eventlet.sleep(CONF.periodic_interval) self._startup_delay = False + def reset(self): + super(SchedulerManager, self).reset() + self.driver.reset() + def update_service_capabilities(self, context, service_name=None, host=None, capabilities=None, **kwargs): """Process a capability update from a service node.""" diff --git a/cinder/service.py b/cinder/service.py index 5d03ee55b..e3acf2973 100644 --- a/cinder/service.py +++ b/cinder/service.py @@ -359,6 +359,10 @@ class Service(service.Service): self.model_disconnected = True LOG.exception(_LE('Exception encountered: ')) + def reset(self): + self.manager.reset() + super(Service, self).reset() + class WSGIService(service.ServiceBase): """Provides ability to launch API from a 'paste' configuration.""" diff --git a/cinder/tests/unit/scheduler/test_scheduler.py b/cinder/tests/unit/scheduler/test_scheduler.py index 1830cfa6b..b5c6f709d 100644 --- a/cinder/tests/unit/scheduler/test_scheduler.py +++ b/cinder/tests/unit/scheduler/test_scheduler.py @@ -69,6 +69,21 @@ class SchedulerManagerTestCase(test.TestCase): sleep_mock.assert_called_once_with(CONF.periodic_interval) self.assertFalse(self.manager._startup_delay) + @mock.patch('cinder.rpc.LAST_RPC_VERSIONS', {'cinder-volume': '1.3'}) + @mock.patch('cinder.rpc.LAST_OBJ_VERSIONS', {'cinder-volume': '1.5'}) + def test_reset(self): + mgr = self.manager_cls() + + volume_rpcapi = mgr.driver.volume_rpcapi + self.assertEqual('1.3', volume_rpcapi.client.version_cap) + self.assertEqual('1.5', + volume_rpcapi.client.serializer._base.version_cap) + mgr.reset() + + volume_rpcapi = mgr.driver.volume_rpcapi + self.assertIsNone(volume_rpcapi.client.version_cap) + self.assertIsNone(volume_rpcapi.client.serializer._base.version_cap) + @mock.patch('cinder.scheduler.driver.Scheduler.' 'update_service_capabilities') def test_update_service_capabilities_empty_dict(self, _mock_update_cap): diff --git a/cinder/tests/unit/test_backup.py b/cinder/tests/unit/test_backup.py index 0796911ad..d6ef545d5 100644 --- a/cinder/tests/unit/test_backup.py +++ b/cinder/tests/unit/test_backup.py @@ -290,6 +290,30 @@ class BackupTestCase(BaseBackupTest): mock_add_threadpool.assert_has_calls(calls, any_order=True) self.assertEqual(2, mock_add_threadpool.call_count) + @mock.patch('cinder.rpc.LAST_RPC_VERSIONS', {'cinder-backup': '1.3', + 'cinder-volume': '1.7'}) + @mock.patch('cinder.rpc.LAST_OBJ_VERSIONS', {'cinder-backup': '1.5', + 'cinder-volume': '1.4'}) + def test_reset(self): + backup_mgr = manager.BackupManager() + + backup_rpcapi = backup_mgr.backup_rpcapi + volume_rpcapi = backup_mgr.volume_rpcapi + self.assertEqual('1.3', backup_rpcapi.client.version_cap) + self.assertEqual('1.5', + backup_rpcapi.client.serializer._base.version_cap) + self.assertEqual('1.7', volume_rpcapi.client.version_cap) + self.assertEqual('1.4', + volume_rpcapi.client.serializer._base.version_cap) + backup_mgr.reset() + + backup_rpcapi = backup_mgr.backup_rpcapi + volume_rpcapi = backup_mgr.volume_rpcapi + self.assertIsNone(backup_rpcapi.client.version_cap) + self.assertIsNone(backup_rpcapi.client.serializer._base.version_cap) + self.assertIsNone(volume_rpcapi.client.version_cap) + self.assertIsNone(volume_rpcapi.client.serializer._base.version_cap) + def test_is_working(self): self.assertTrue(self.backup_mgr.is_working()) diff --git a/cinder/tests/unit/test_service.py b/cinder/tests/unit/test_service.py index 585287f0b..96ac9830e 100644 --- a/cinder/tests/unit/test_service.py +++ b/cinder/tests/unit/test_service.py @@ -83,6 +83,18 @@ class ServiceManagerTestCase(test.TestCase): serv.start() self.assertEqual('service', serv.test_method()) + @mock.patch('cinder.rpc.LAST_OBJ_VERSIONS', {'test': '1.5'}) + @mock.patch('cinder.rpc.LAST_RPC_VERSIONS', {'test': '1.3'}) + def test_reset(self): + serv = service.Service('test', + 'test', + 'test', + 'cinder.tests.unit.test_service.FakeManager') + serv.start() + serv.reset() + self.assertEqual({}, rpc.LAST_OBJ_VERSIONS) + self.assertEqual({}, rpc.LAST_RPC_VERSIONS) + class ServiceFlagsTestCase(test.TestCase): def test_service_enabled_on_create_based_on_flag(self): diff --git a/cinder/tests/unit/test_volume.py b/cinder/tests/unit/test_volume.py index 86e394b86..2131421dc 100644 --- a/cinder/tests/unit/test_volume.py +++ b/cinder/tests/unit/test_volume.py @@ -374,6 +374,21 @@ class VolumeTestCase(BaseVolumeTestCase): manager.init_host() self.assertEqual(0, mock_add_p_task.call_count) + @mock.patch('cinder.rpc.LAST_RPC_VERSIONS', {'cinder-scheduler': '1.3'}) + @mock.patch('cinder.rpc.LAST_OBJ_VERSIONS', {'cinder-scheduler': '1.5'}) + def test_reset(self): + vol_mgr = vol_manager.VolumeManager() + + scheduler_rpcapi = vol_mgr.scheduler_rpcapi + self.assertEqual('1.3', scheduler_rpcapi.client.version_cap) + self.assertEqual('1.5', + scheduler_rpcapi.client.serializer._base.version_cap) + vol_mgr.reset() + + scheduler_rpcapi = vol_mgr.scheduler_rpcapi + self.assertIsNone(scheduler_rpcapi.client.version_cap) + self.assertIsNone(scheduler_rpcapi.client.serializer._base.version_cap) + @mock.patch.object(vol_manager.VolumeManager, 'update_service_capabilities') def test_report_filter_goodness_function(self, mock_update): diff --git a/releasenotes/notes/rpc_compatibility-375be8ac3158981d.yaml b/releasenotes/notes/rpc_compatibility-375be8ac3158981d.yaml new file mode 100644 index 000000000..31c560dd4 --- /dev/null +++ b/releasenotes/notes/rpc_compatibility-375be8ac3158981d.yaml @@ -0,0 +1,16 @@ +--- +features: + - Added RPC backward compatibility layer similar to the + one implemented in Nova. This means that Cinder + services can be upgraded one-by-one without breakage. + After all the services are upgraded SIGHUP signals + should be issued to all the services to signal them + to reload cached minimum RPC versions. Alternative + is of course restart of them. Please note that + cinder-api service doesn't support SIGHUP yet. + Please also take into account that all the rolling + upgrades capabilities are considered tech preview, + as we don't have a CI testing it yet. +upgrade: + - Starting from Mitaka release Cinder is having a tech + preview of rolling upgrades support. -- 2.45.2