]> review.fuel-infra Code Review - openstack-build/neutron-build.git/commitdiff
Enable servicing lbaasV2 vip by DVR
authorHong Hui Xiao <xiaohhui@cn.ibm.com>
Thu, 10 Sep 2015 10:38:01 +0000 (06:38 -0400)
committerHong Hui Xiao <xiaohhui@cn.ibm.com>
Mon, 14 Sep 2015 02:38:29 +0000 (22:38 -0400)
Currently, the vip of lbaasV2 will not have l3 network with DVR.
This prevent the usercase of lbaasV2 + DVR. This patch aims to
enable servicing lbaasv2 vip by DVR.

Change-Id: I1b51550437994fbe78d4db904641d4d9fb75d82e
Closes-Bug: #1493809

neutron/common/utils.py
neutron/tests/unit/common/test_utils.py
neutron/tests/unit/plugins/ml2/drivers/openvswitch/agent/test_ovs_neutron_agent.py
neutron/tests/unit/plugins/ml2/test_plugin.py
neutron/tests/unit/scheduler/test_l3_agent_scheduler.py

index a8b79e99fb5db26815a9eb8f01face21b8c62a0a..b96327bbb3c6b89be794ee4f69352fc80f12e0fd 100644 (file)
@@ -368,6 +368,7 @@ def is_dvr_serviced(device_owner):
     indirectly associated with DVR.
     """
     dvr_serviced_device_owners = (n_const.DEVICE_OWNER_LOADBALANCER,
+                                  n_const.DEVICE_OWNER_LOADBALANCERV2,
                                   n_const.DEVICE_OWNER_DHCP)
     return (device_owner.startswith('compute:') or
             device_owner in dvr_serviced_device_owners)
index f6aee3da935b92e054f00bc96027caf8d152c170..973a938dc7130cb45e9df8af40b30ae050350305 100644 (file)
@@ -562,6 +562,9 @@ class TestDvrServices(base.BaseTestCase):
     def test_is_dvr_serviced_with_lb_port(self):
         self._test_is_dvr_serviced(constants.DEVICE_OWNER_LOADBALANCER, True)
 
+    def test_is_dvr_serviced_with_lbv2_port(self):
+        self._test_is_dvr_serviced(constants.DEVICE_OWNER_LOADBALANCERV2, True)
+
     def test_is_dvr_serviced_with_dhcp_port(self):
         self._test_is_dvr_serviced(constants.DEVICE_OWNER_DHCP, True)
 
index 1280b10aacbce55d58fbfc6cbd9ba74847f7f86e..bf44028d71e700537f8c171633d02f6a22428bb0 100644 (file)
@@ -1935,6 +1935,16 @@ class TestOvsDvrNeutronAgent(object):
         self._test_port_bound_for_dvr_on_vxlan_network(
             device_owner=n_const.DEVICE_OWNER_LOADBALANCER, ip_version=6)
 
+    def test_port_bound_for_dvr_with_lbaasv2_vip_ports(self):
+        self._test_port_bound_for_dvr_on_vlan_network(
+            device_owner=n_const.DEVICE_OWNER_LOADBALANCERV2)
+        self._test_port_bound_for_dvr_on_vlan_network(
+            device_owner=n_const.DEVICE_OWNER_LOADBALANCERV2, ip_version=6)
+        self._test_port_bound_for_dvr_on_vxlan_network(
+            device_owner=n_const.DEVICE_OWNER_LOADBALANCERV2)
+        self._test_port_bound_for_dvr_on_vxlan_network(
+            device_owner=n_const.DEVICE_OWNER_LOADBALANCERV2, ip_version=6)
+
     def test_port_bound_for_dvr_with_dhcp_ports(self):
         self._test_port_bound_for_dvr_on_vlan_network(
             device_owner=n_const.DEVICE_OWNER_DHCP)
@@ -2211,6 +2221,12 @@ class TestOvsDvrNeutronAgent(object):
         self._test_treat_devices_removed_for_dvr(
             device_owner=n_const.DEVICE_OWNER_LOADBALANCER, ip_version=6)
 
+    def test_treat_devices_removed_for_dvr_with_lbaasv2_vip_ports(self):
+        self._test_treat_devices_removed_for_dvr(
+            device_owner=n_const.DEVICE_OWNER_LOADBALANCERV2)
+        self._test_treat_devices_removed_for_dvr(
+            device_owner=n_const.DEVICE_OWNER_LOADBALANCERV2, ip_version=6)
+
     def test_treat_devices_removed_for_dvr_with_dhcp_ports(self):
         self._test_treat_devices_removed_for_dvr(
             device_owner=n_const.DEVICE_OWNER_DHCP)
index a9d0c43b73dba95f063eab3fead9121449629605..5e6ad08015f2aaf47a9a96edcea1e667c81e9b68 100644 (file)
@@ -633,6 +633,10 @@ class TestMl2PortsV2(test_plugin.TestPortsV2, Ml2PluginV2TestCase):
         self.assertTrue(utils.is_dvr_serviced(
             constants.DEVICE_OWNER_LOADBALANCER))
 
+    def test_check_if_lbaasv2_vip_port_serviced_by_dvr(self):
+        self.assertTrue(utils.is_dvr_serviced(
+            constants.DEVICE_OWNER_LOADBALANCERV2))
+
     def test_check_if_dhcp_port_serviced_by_dvr(self):
         self.assertTrue(utils.is_dvr_serviced(constants.DEVICE_OWNER_DHCP))
 
@@ -785,6 +789,10 @@ class TestMl2DvrPortsV2(TestMl2PortsV2):
         self._test_delete_dvr_serviced_port(
             device_owner=constants.DEVICE_OWNER_LOADBALANCER)
 
+    def test_delete_lbaasv2_vip_port(self):
+        self._test_delete_dvr_serviced_port(
+            device_owner=constants.DEVICE_OWNER_LOADBALANCERV2)
+
     def test_concurrent_csnat_port_delete(self):
         plugin = manager.NeutronManager.get_service_plugins()[
             p_const.L3_ROUTER_NAT]
index f1156c456970ed5fb333a012c905394f274398fe..4f549248a743ebff3eb570d89155a1d77bdb3f05 100644 (file)
@@ -1187,13 +1187,13 @@ class L3DvrSchedulerTestCase(testlib_api.SqlTestCase):
                                                     'my-subnet-id')
             self.assertTrue(result)
 
-    def test_dvr_serviced_vip_port_exists_on_subnet(self):
+    def _test_dvr_serviced_vip_port_exists_on_subnet(self, device_owner):
         vip_port = {
                 'id': 'lbaas-vip-port1',
                 'device_id': 'vip-pool-id',
                 'status': 'ACTIVE',
                 'binding:host_id': 'thisHost',
-                'device_owner': constants.DEVICE_OWNER_LOADBALANCER,
+                'device_owner': device_owner,
                 'fixed_ips': [
                     {
                         'subnet_id': 'my-subnet-id',
@@ -1203,6 +1203,14 @@ class L3DvrSchedulerTestCase(testlib_api.SqlTestCase):
         }
         self._test_dvr_serviced_port_exists_on_subnet(port=vip_port)
 
+    def test_dvr_serviced_lbaas_vip_port_exists_on_subnet(self):
+        self._test_dvr_serviced_vip_port_exists_on_subnet(
+                        device_owner=constants.DEVICE_OWNER_LOADBALANCER)
+
+    def test_dvr_serviced_lbaasv2_vip_port_exists_on_subnet(self):
+        self._test_dvr_serviced_vip_port_exists_on_subnet(
+                        device_owner=constants.DEVICE_OWNER_LOADBALANCERV2)
+
     def _create_port(self, port_name, tenant_id, host, subnet_id, ip_address,
                      status='ACTIVE',
                      device_owner='compute:nova'):