]> review.fuel-infra Code Review - openstack-build/neutron-build.git/commitdiff
DVR: only notify needed agents on new VM port creation
authorOleg Bondarev <obondarev@mirantis.com>
Tue, 6 Oct 2015 15:14:33 +0000 (18:14 +0300)
committerOleg Bondarev <obondarev@mirantis.com>
Fri, 23 Oct 2015 09:34:08 +0000 (12:34 +0300)
When a new VM which should be serviced by a DVR router appears
on compute host, this router is scheduled to that host and
notification is sent. Before the patch it was a broad notification
while really we only need to notify agent on target host.
This should decrease the load on neutron server at scale.

Closes-Bug: #1486795
Change-Id: Id48b6f6a71530c4f6092d2a07b2db1a5cd300c05

neutron/db/l3_agentschedulers_db.py
neutron/db/l3_dvrscheduler_db.py
neutron/tests/functional/services/l3_router/test_l3_dvr_router_plugin.py
neutron/tests/unit/scheduler/test_l3_agent_scheduler.py

index 4ccde0bdaf5f35d4fe0fdaf84029055b7df151af..9498c092f83736ba782b923c408b5e7d6a28e112 100644 (file)
@@ -181,6 +181,12 @@ class L3AgentSchedulerDbMixin(l3agentscheduler.L3AgentSchedulerPluginBase,
         if not is_suitable_agent:
             raise l3agentscheduler.InvalidL3Agent(id=agent['id'])
 
+    def check_l3_agent_router_binding(self, context, router_id, agent_id):
+        query = context.session.query(RouterL3AgentBinding)
+        bindings = query.filter_by(router_id=router_id,
+                                   l3_agent_id=agent_id).all()
+        return bool(bindings)
+
     def check_agent_router_scheduling_needed(self, context, agent, router):
         """Check if the router scheduling is needed.
 
index 425f47ec5c4d92be8243419b5d30464cc943990f..8d4e203d4ab60f397eb1a446cc5bb9ab21f4ebc0 100644 (file)
@@ -99,27 +99,31 @@ class L3_DVRsch_db_mixin(l3agent_sch_db.L3AgentSchedulerDbMixin):
     """
 
     def dvr_update_router_addvm(self, context, port):
+        port_dict = self._core_plugin.get_port(context, port['id'])
+        port_host = port_dict['binding:host_id']
+        l3_agent_on_host = (self.get_l3_agents(
+            context, filters={'host': [port_host]}) or [None])[0]
+        if not l3_agent_on_host:
+            return
+
         ips = port['fixed_ips']
-        for ip in ips:
-            subnet = ip['subnet_id']
-            filter_sub = {'fixed_ips': {'subnet_id': [subnet]},
-                          'device_owner':
-                          [n_const.DEVICE_OWNER_DVR_INTERFACE]}
-            ports = self._core_plugin.get_ports(context, filters=filter_sub)
-            for port in ports:
-                router_id = port['device_id']
-                router_dict = self.get_router(context, router_id)
-                if router_dict.get('distributed', False):
-                    payload = {'subnet_id': subnet}
-                    self.l3_rpc_notifier.routers_updated(
-                        context, [router_id], None, payload)
-                    LOG.debug('DVR: dvr_update_router_addvm %s ', router_id)
+        router_ids = self.get_dvr_routers_by_portid(context, port['id'], ips)
+        for router_id in router_ids:
+            if not self.check_l3_agent_router_binding(
+                    context, router_id, l3_agent_on_host['id']):
+                self.schedule_router(
+                    context, router_id, candidates=[l3_agent_on_host])
+            LOG.debug('DVR: dvr_update_router_addvm %s ', router_id)
+
+        self.l3_rpc_notifier.routers_updated_on_host(
+            context, router_ids, port_host)
 
-    def get_dvr_routers_by_portid(self, context, port_id):
+    def get_dvr_routers_by_portid(self, context, port_id, fixed_ips=None):
         """Gets the dvr routers on vmport subnets."""
         router_ids = set()
-        port_dict = self._core_plugin.get_port(context, port_id)
-        fixed_ips = port_dict['fixed_ips']
+        if fixed_ips is None:
+            port_dict = self._core_plugin.get_port(context, port_id)
+            fixed_ips = port_dict['fixed_ips']
         for fixedip in fixed_ips:
             vm_subnet = fixedip['subnet_id']
             filter_sub = {'fixed_ips': {'subnet_id': [vm_subnet]},
index b054301b6474605b278b10d71e54b07e86290ec8..4a2904d694b379f206d4bd0e5beeb8b2186fc6a9 100644 (file)
@@ -391,3 +391,39 @@ class L3DvrTestCase(ml2_test_base.ML2TestFramework):
 
     def test_delete_floating_ip_agent_notification_non_dvr(self):
         self._test_delete_floating_ip_agent_notification(dvr=False)
+
+    def test_update_vm_port_host_router_update(self):
+        # register l3 agent in dvr mode in addition to existing dvr_snat agent
+        HOST = 'host1'
+        dvr_agent = helpers.register_l3_agent(
+            host=HOST, agent_mode=l3_const.L3_AGENT_MODE_DVR)
+        router = self._create_router()
+        with self.subnet() as subnet:
+            self.l3_plugin.add_router_interface(
+                self.context, router['id'],
+                {'subnet_id': subnet['subnet']['id']})
+
+            # since there are no vm ports on HOST, at this point the router
+            # should be scheduled to only dvr_snat agent
+            agents = self.l3_plugin.list_l3_agents_hosting_router(
+                self.context, router['id'])
+            self.assertEqual(1, len(agents['agents']))
+            self.assertEqual(self.l3_agent['id'], agents['agents'][0]['id'])
+            with mock.patch.object(self.l3_plugin,
+                                   '_l3_rpc_notifier') as l3_notifier,\
+                    self.port(subnet=subnet,
+                              device_owner='compute:None') as port:
+                self.core_plugin.update_port(
+                    self.context, port['port']['id'],
+                    {'port': {'binding:host_id': HOST}})
+
+                # now router should be scheduled to both agents
+                agents = self.l3_plugin.list_l3_agents_hosting_router(
+                    self.context, router['id'])
+                self.assertEqual(2, len(agents['agents']))
+                self.assertIn(dvr_agent['id'],
+                              [agent['id'] for agent in agents['agents']])
+                # and notification should only be sent to the agent on HOST
+                l3_notifier.routers_updated_on_host.assert_called_once_with(
+                    self.context, {router['id']}, HOST)
+                self.assertFalse(l3_notifier.routers_updated.called)
index ccf0012bded795807956274318bce7651f4e5aed..fed5935870740034ddee3ed88a573369e711e123 100644 (file)
@@ -1026,8 +1026,10 @@ class L3DvrSchedulerTestCase(testlib_api.SqlTestCase):
 
     def test_dvr_update_router_addvm(self):
         port = {
+                'id': 'port1',
                 'device_id': 'abcd',
                 'device_owner': 'compute:nova',
+                'binding:host_id': 'host1',
                 'fixed_ips': [
                     {
                         'subnet_id': '80947d4a-fbc8-484b-9f92-623a6bfcf3e0',
@@ -1059,39 +1061,27 @@ class L3DvrSchedulerTestCase(testlib_api.SqlTestCase):
                 ]
             }
         ]
-        r1 = {
-              'id': 'r1',
-              'distributed': True,
-        }
-        r2 = {
-              'id': 'r2',
-              'distributed': True,
-        }
+        agent_on_host = {'id': 'agent1'}
 
         with mock.patch(
             'neutron.db.db_base_plugin_v2.NeutronDbPluginV2' '.get_ports',
             return_value=dvr_ports),\
-                mock.patch(
-                    'neutron.manager.NeutronManager.get_service_plugins',
-                    return_value=mock.Mock()),\
-                mock.patch('neutron.db.l3_db.L3_NAT_db_mixin.get_router',
-                           router_id='r1', return_value=r1),\
-                mock.patch('neutron.db.l3_db.L3_NAT_db_mixin.get_router',
-                           router_id='r2', return_value=r2),\
                 mock.patch('neutron.api.rpc.agentnotifiers.l3_rpc_agent_api'
-                           '.L3AgentNotifyAPI'):
+                           '.L3AgentNotifyAPI'),\
+                mock.patch(
+                    'neutron.db.db_base_plugin_v2.NeutronDbPluginV2.get_port',
+                    return_value=port),\
+                mock.patch.object(
+                        self.dut, 'get_l3_agents',
+                        return_value=[agent_on_host]) as get_l3_agents:
             self.dut.dvr_update_router_addvm(self.adminContext, port)
-            self.assertEqual(
-                self.dut.l3_rpc_notifier.routers_updated.call_count, 2)
-            payload = {'subnet_id': port['fixed_ips'][0]['subnet_id']}
-            expected_calls = [
-                mock.call.routers_updated(
-                    self.adminContext, ['r1'], None, payload),
-                mock.call.routers_updated(
-                    self.adminContext, ['r2'], None, payload)
-            ]
-            self.dut.l3_rpc_notifier.routers_updated.assert_has_calls(
-                expected_calls, any_order=True)
+
+            get_l3_agents.assert_called_once_with(
+                self.adminContext, filters={'host': [port['binding:host_id']]})
+            (self.dut.l3_rpc_notifier.routers_updated_on_host.
+                assert_called_once_with(
+                    self.adminContext, {'r1', 'r2'}, 'host1'))
+            self.assertFalse(self.dut.l3_rpc_notifier.routers_updated.called)
 
     def test_get_dvr_routers_by_portid(self):
         dvr_port = {