]> review.fuel-infra Code Review - openstack-build/neutron-build.git/commitdiff
Use admin context when removing DVR router on vm port deletion
authorOleg Bondarev <obondarev@mirantis.com>
Tue, 15 Dec 2015 14:58:51 +0000 (17:58 +0300)
committerOleg Bondarev <obondarev@mirantis.com>
Wed, 13 Jan 2016 09:43:23 +0000 (12:43 +0300)
In case non-admin tenant removes last VM on a shared network (owned
by admin) connected to a DVR router (also owned by admin) we need
to remove the router from the host where there are no more dvr
serviceable ports. Commit edbade486102a219810137d1c6b916e87475d477
fixed logic that determines routers that should be removed from host.
However in order to actually remove the router we also need admin
context.

This was not caught by unit tests and one reason for that is so called
'mock everything' approach which is evil and generally useless.
This patch replaces unit tests with functional tests that we able
to catch the bug.

Closes-Bug: #1424096
Change-Id: Ia6cdf2294562c2a2727350c78eeab155097e0c33

neutron/db/l3_dvrscheduler_db.py
neutron/tests/functional/services/l3_router/test_l3_dvr_router_plugin.py
neutron/tests/unit/scheduler/test_l3_agent_scheduler.py

index 497546725a81c318ac92cef39c2073ccae49438c..ada1c799aae75b715d6f73b2c47e99f38e9a759b 100644 (file)
@@ -460,8 +460,11 @@ def _notify_port_delete(event, resource, trigger, **kwargs):
         service_constants.L3_ROUTER_NAT)
     l3plugin.update_arp_entry_for_dvr_service_port(context, port, "del")
     for router in removed_routers:
+        # we need admin context in case a tenant removes the last dvr
+        # serviceable port on a shared network owned by admin, where router
+        # is also owned by admin
         l3plugin.remove_router_from_l3_agent(
-            context, router['agent_id'], router['router_id'])
+            context.elevated(), router['agent_id'], router['router_id'])
 
 
 def _notify_l3_agent_port_update(resource, event, trigger, **kwargs):
index dd2c70435f17aa7dc2a564d59f3f6444ad403024..c15c42deab7c3d134f8268d555ad722fa5236091 100644 (file)
@@ -16,6 +16,7 @@ import mock
 
 from neutron.api.v2 import attributes
 from neutron.common import constants
+from neutron import context
 from neutron.extensions import external_net
 from neutron.extensions import portbindings
 from neutron.tests.common import helpers
@@ -509,4 +510,52 @@ class L3DvrTestCase(ml2_test_base.ML2TestFramework):
                 l3_notifier.routers_updated_on_host.assert_called_once_with(
                     self.context, {router['id']}, HOST2)
                 l3_notifier.router_removed_from_agent.assert_called_once_with(
-                    self.context, router['id'], HOST1)
+                    mock.ANY, router['id'], HOST1)
+
+    def _test_router_remove_from_agent_on_vm_port_deletion(
+            self, non_admin_port=False):
+        # register l3 agent in dvr mode in addition to existing dvr_snat agent
+        HOST = 'host1'
+        non_admin_tenant = 'tenant1'
+        dvr_agent = helpers.register_l3_agent(
+            host=HOST, agent_mode=constants.L3_AGENT_MODE_DVR)
+        router = self._create_router()
+        with self.network(shared=True) as net,\
+                self.subnet(network=net) as subnet,\
+                self.port(subnet=subnet,
+                          device_owner=DEVICE_OWNER_COMPUTE,
+                          tenant_id=non_admin_tenant,
+                          set_context=non_admin_port) as port:
+            self.core_plugin.update_port(
+                    self.context, port['port']['id'],
+                    {'port': {portbindings.HOST_ID: HOST}})
+            self.l3_plugin.add_router_interface(
+                self.context, router['id'],
+                {'subnet_id': subnet['subnet']['id']})
+
+            # router should be scheduled to agent on HOST
+            agents = self.l3_plugin.list_l3_agents_hosting_router(
+                self.context, router['id'])
+            self.assertEqual(1, len(agents['agents']))
+            self.assertEqual(dvr_agent['id'], agents['agents'][0]['id'])
+
+            notifier = self.l3_plugin.agent_notifiers[
+                constants.AGENT_TYPE_L3]
+            with mock.patch.object(
+                    notifier, 'router_removed_from_agent') as remove_mock:
+                ctx = context.Context(
+                    '', non_admin_tenant) if non_admin_port else self.context
+                self._delete('ports', port['port']['id'], neutron_context=ctx)
+                # now when port is deleted the router should be unscheduled
+                agents = self.l3_plugin.list_l3_agents_hosting_router(
+                    self.context, router['id'])
+                self.assertEqual(0, len(agents['agents']))
+                remove_mock.assert_called_once_with(
+                    mock.ANY, router['id'], HOST)
+
+    def test_router_remove_from_agent_on_vm_port_deletion(self):
+        self._test_router_remove_from_agent_on_vm_port_deletion()
+
+    def test_admin_router_remove_from_agent_on_vm_port_deletion(self):
+        self._test_router_remove_from_agent_on_vm_port_deletion(
+            non_admin_port=True)
index 80a5db52b256ab92c8724a7ad82e92accdc581c7..db991108ad5cac21ab40659aac5315fc27c3bedd 100644 (file)
@@ -1029,7 +1029,7 @@ class L3DvrSchedulerTestCase(testlib_api.SqlTestCase):
             l3_dvrscheduler_db._notify_l3_agent_port_update(
                 'port', 'after_update', mock.ANY, **kwargs)
             l3plugin.remove_router_from_l3_agent.assert_called_once_with(
-                self.adminContext, 'foo_agent', 'foo_id')
+                mock.ANY, 'foo_agent', 'foo_id')
             self.assertEqual(
                 2, l3plugin.update_arp_entry_for_dvr_service_port.call_count)
             l3plugin.dvr_handle_new_service_port.assert_called_once_with(
@@ -1078,7 +1078,7 @@ class L3DvrSchedulerTestCase(testlib_api.SqlTestCase):
             self.assertFalse(
                 l3plugin.dvr_handle_new_service_port.called)
             l3plugin.remove_router_from_l3_agent.assert_called_once_with(
-                self.adminContext, 'foo_agent', 'foo_id')
+                mock.ANY, 'foo_agent', 'foo_id')
 
     def test__notify_port_delete(self):
         plugin = manager.NeutronManager.get_plugin()
@@ -1103,7 +1103,7 @@ class L3DvrSchedulerTestCase(testlib_api.SqlTestCase):
                 assert_called_once_with(
                     self.adminContext, mock.ANY, 'del')
             l3plugin.remove_router_from_l3_agent.assert_called_once_with(
-                self.adminContext, 'foo_agent', 'foo_id')
+                mock.ANY, 'foo_agent', 'foo_id')
 
     def test_dvr_handle_new_service_port(self):
         port = {
@@ -1231,197 +1231,6 @@ class L3DvrSchedulerTestCase(testlib_api.SqlTestCase):
                                                         r1['id'])
             self.assertEqual(len(sub_ids), 0)
 
-    def _create_port(self, port_name, tenant_id, host, subnet_id, ip_address,
-                     status='ACTIVE',
-                     device_owner=DEVICE_OWNER_COMPUTE_NOVA):
-        return {
-            'id': port_name + '-port-id',
-            'tenant_id': tenant_id,
-            'device_id': port_name,
-            'device_owner': device_owner,
-            'status': status,
-            portbindings.HOST_ID: host,
-            'fixed_ips': [
-                {
-                    'subnet_id': subnet_id,
-                    'ip_address': ip_address
-                }
-            ]
-        }
-
-    def test_dvr_deletens_if_no_port_no_routers(self):
-        # Delete a vm port, the port subnet has no router interface.
-        vm_tenant_id = 'tenant-1'
-        my_context = n_context.Context('user-1', vm_tenant_id, is_admin=False)
-        vm_port_host = 'compute-node-1'
-
-        vm_port = self._create_port(
-            'deleted-vm', vm_tenant_id, vm_port_host,
-            'shared-subnet', '10.10.10.3',
-            status='INACTIVE')
-
-        vm_port_id = vm_port['id']
-        fakePortDB = FakePortDB([vm_port])
-
-        with mock.patch.object(my_context,
-                               'elevated',
-                               return_value=self.adminContext),\
-                mock.patch(
-                    'neutron.plugins.ml2.db.get_port_binding_host',
-                    return_value=vm_port_host) as mock_get_port_binding_host,\
-                mock.patch('neutron.db.db_base_plugin_v2.NeutronDbPluginV2.'
-                           'get_ports', side_effect=fakePortDB.get_ports),\
-                mock.patch('neutron.db.db_base_plugin_v2.NeutronDbPluginV2.'
-                           'get_port', return_value=vm_port):
-
-            routers = self.dut.dvr_deletens_if_no_port(my_context, vm_port_id)
-            self.assertEqual([], routers)
-            mock_get_port_binding_host.assert_called_once_with(
-                self.adminContext.session, vm_port_id)
-
-    def test_dvr_deletens_if_no_ports_no_removeable_routers(self):
-        # A VM port is deleted, but the router can't be unscheduled from the
-        # compute node because there is another VM port present.
-        vm_tenant_id = 'tenant-1'
-        my_context = n_context.Context('user-1', vm_tenant_id, is_admin=False)
-        shared_subnet_id = '80947d4a-fbc8-484b-9f92-623a6bfcf3e0',
-        vm_port_host = 'compute-node-1'
-
-        dvr_port = self._create_port(
-            'dvr-router', 'admin-tenant', vm_port_host,
-            shared_subnet_id, '10.10.10.1',
-            device_owner=constants.DEVICE_OWNER_DVR_INTERFACE)
-
-        deleted_vm_port = self._create_port(
-            'deleted-vm', vm_tenant_id, vm_port_host,
-            shared_subnet_id, '10.10.10.3',
-            status='INACTIVE')
-        deleted_vm_port_id = deleted_vm_port['id']
-
-        fakePortDB = FakePortDB([deleted_vm_port, dvr_port])
-
-        vm_port_binding = {
-            'port_id': deleted_vm_port_id,
-            'host': vm_port_host
-        }
-
-        with mock.patch.object(my_context,
-                               'elevated',
-                               return_value=self.adminContext),\
-                mock.patch(
-                    'neutron.plugins.ml2.db.get_port_binding_host',
-                    return_value=vm_port_host) as mock_get_port_binding_host,\
-                mock.patch('neutron.db.db_base_plugin_v2.NeutronDbPluginV2.'
-                           'get_port', side_effect=fakePortDB.get_port),\
-                mock.patch('neutron.db.db_base_plugin_v2.NeutronDbPluginV2.'
-                           'get_ports', side_effect=fakePortDB.get_ports) as\
-                mock_get_ports,\
-                mock.patch('neutron.db.db_base_plugin_v2.NeutronDbPluginV2.'
-                           '_get_ports_query'),\
-                mock.patch('neutron.plugins.ml2.db.'
-                           'get_dvr_port_binding_by_host',
-                           return_value=vm_port_binding) as\
-                mock_get_dvr_port_binding_by_host,\
-                mock.patch.object(self.dut,
-                                  'check_dvr_serviceable_ports_on_host',
-                                  return_value=True):
-
-            routers = self.dut.dvr_deletens_if_no_port(
-                my_context, deleted_vm_port_id)
-            self.assertEqual([], routers)
-
-            mock_get_port_binding_host.assert_called_once_with(
-                self.adminContext.session, deleted_vm_port_id)
-            self.assertTrue(mock_get_ports.called)
-            self.assertFalse(mock_get_dvr_port_binding_by_host.called)
-
-    def _test_dvr_deletens_if_no_ports_delete_routers(self,
-                                                      vm_tenant,
-                                                      router_tenant):
-        class FakeAgent(object):
-            def __init__(self, id, host, agent_type):
-                self.id = id
-                self.host = host
-                self.agent_type = agent_type
-
-        my_context = n_context.Context('user-1', vm_tenant, is_admin=False)
-        shared_subnet_id = '80947d4a-fbc8-484b-9f92-623a6bfcf3e0',
-        vm_port_host = 'compute-node-1'
-
-        router_id = 'dvr-router'
-        dvr_port = self._create_port(
-            router_id, router_tenant, vm_port_host,
-            shared_subnet_id, '10.10.10.1',
-            device_owner=constants.DEVICE_OWNER_DVR_INTERFACE)
-        dvr_port_id = dvr_port['id']
-
-        deleted_vm_port = self._create_port(
-            'deleted-vm', vm_tenant, vm_port_host,
-            shared_subnet_id, '10.10.10.3',
-            status='INACTIVE')
-        deleted_vm_port_id = deleted_vm_port['id']
-
-        fakePortDB = FakePortDB([dvr_port, deleted_vm_port])
-
-        dvr_port_binding = {
-            'port_id': dvr_port_id, 'host': vm_port_host
-        }
-
-        agent_id = 'l3-agent-on-compute-node-1'
-        l3_agent_on_vm_host = FakeAgent(agent_id,
-                                        vm_port_host,
-                                        constants.AGENT_TYPE_L3)
-
-        with mock.patch.object(my_context,
-                               'elevated',
-                               return_value=self.adminContext),\
-                mock.patch(
-                    'neutron.plugins.ml2.db.get_port_binding_host',
-                    return_value=vm_port_host) as mock_get_port_binding_host,\
-                mock.patch('neutron.db.db_base_plugin_v2.NeutronDbPluginV2.'
-                           'get_port', side_effect=fakePortDB.get_port),\
-                mock.patch('neutron.db.db_base_plugin_v2.NeutronDbPluginV2.'
-                           'get_ports', side_effect=fakePortDB.get_ports) as\
-                mock_get_ports,\
-                mock.patch('neutron.plugins.ml2.db.'
-                           'get_dvr_port_binding_by_host',
-                           return_value=dvr_port_binding) as\
-                mock_get_dvr_port_binding_by_host,\
-                mock.patch('neutron.db.agents_db.AgentDbMixin.'
-                           '_get_agent_by_type_and_host',
-                           return_value=l3_agent_on_vm_host),\
-                mock.patch.object(self.dut,
-                                  'check_dvr_serviceable_ports_on_host',
-                                  return_value=False):
-
-            routers = self.dut.dvr_deletens_if_no_port(
-                my_context, deleted_vm_port_id)
-
-            expected_router = {
-                'router_id': router_id,
-                'host': vm_port_host,
-                'agent_id': agent_id
-            }
-            self.assertEqual([expected_router], routers)
-
-            mock_get_port_binding_host.assert_called_once_with(
-                self.adminContext.session, deleted_vm_port_id)
-            self.assertTrue(mock_get_ports.called)
-            mock_get_dvr_port_binding_by_host.assert_called_once_with(
-                my_context.session, dvr_port_id, vm_port_host)
-
-    def test_dvr_deletens_if_no_ports_delete_admin_routers(self):
-        # test to see whether the last VM using a router created
-        # by the admin will be unscheduled on the compute node
-        self._test_dvr_deletens_if_no_ports_delete_routers(
-            'tenant-1', 'admin-tenant')
-
-    def test_dvr_deletens_if_no_ports_delete_tenant_routers(self):
-        # test to see whether the last VM using a tenant's private
-        # router will be unscheduled on the compute node
-        self._test_dvr_deletens_if_no_ports_delete_routers(
-            'tenant-1', 'tenant-1')
-
     def _prepare_schedule_snat_tests(self):
         agent = agents_db.Agent()
         agent.admin_state_up = True