From 5bbe2dcac438565f29dbf2386f3bfdd60f416f55 Mon Sep 17 00:00:00 2001 From: Michael Price Date: Sun, 16 Aug 2015 10:58:41 -0400 Subject: [PATCH] NetApp E-Series over-subscription support Add support to the E-Series driver for over-subscription/overprovisioning. This allows the Cinder scheduler to more accurately represent the remaining space on a pool when thin provisioned Cinder volumes are in use on an E-Series backend. Co-Authored-By: Tom Barron DocImpact Partially-implements: blueprint netapp-block--over-subscription-thin-provisioning Change-Id: I5fb8f480168b9195221412d70f187c5ddfc4f527 --- .../volume/drivers/netapp/eseries/fakes.py | 8 ++ .../drivers/netapp/eseries/test_library.py | 113 ++++++++++++++++++ .../volume/drivers/netapp/eseries/library.py | 14 ++- 3 files changed, 134 insertions(+), 1 deletion(-) diff --git a/cinder/tests/unit/volume/drivers/netapp/eseries/fakes.py b/cinder/tests/unit/volume/drivers/netapp/eseries/fakes.py index 2258c7a62..d582f1f58 100644 --- a/cinder/tests/unit/volume/drivers/netapp/eseries/fakes.py +++ b/cinder/tests/unit/volume/drivers/netapp/eseries/fakes.py @@ -476,6 +476,14 @@ VOLUMES = [ VOLUME = VOLUMES[0] +STORAGE_POOL = { + 'label': 'DDP', + 'volumeGroupRef': 'fakevolgroupref', + 'raidLevel': 'raidDiskPool', + 'usedSpace': '16413217521664', + 'totalRaidedSpace': '16637410312192', +} + INITIATOR_NAME = 'iqn.1998-01.com.vmware:localhost-28a58148' INITIATOR_NAME_2 = 'iqn.1998-01.com.vmware:localhost-28a58149' INITIATOR_NAME_3 = 'iqn.1998-01.com.vmware:localhost-28a58150' diff --git a/cinder/tests/unit/volume/drivers/netapp/eseries/test_library.py b/cinder/tests/unit/volume/drivers/netapp/eseries/test_library.py index 6ab343b51..3657af40e 100644 --- a/cinder/tests/unit/volume/drivers/netapp/eseries/test_library.py +++ b/cinder/tests/unit/volume/drivers/netapp/eseries/test_library.py @@ -21,6 +21,7 @@ import copy import ddt import mock +from oslo_utils import units import six from cinder import exception @@ -245,6 +246,118 @@ class NetAppEseriesLibraryTestCase(test.TestCase): 'test_vg2': {'netapp_disk_encryption': 'true'}}, ssc_stats) + @ddt.data(True, False) + def test_get_volume_stats(self, refresh): + fake_stats = {'key': 'val'} + + def populate_stats(): + self.library._stats = fake_stats + + self.library._update_volume_stats = mock.Mock( + side_effect=populate_stats) + self.library._update_ssc_info = mock.Mock() + self.library._ssc_stats = {self.library.THIN_UQ_SPEC: True} + + actual = self.library.get_volume_stats(refresh = refresh) + + if(refresh): + self.library._update_volume_stats.assert_called_once_with() + self.assertEqual(fake_stats, actual) + else: + self.assertEqual(0, self.library._update_volume_stats.call_count) + self.assertEqual(0, self.library._update_ssc_info.call_count) + + def test_get_volume_stats_no_ssc(self): + """Validate that SSC data is collected if not yet populated""" + fake_stats = {'key': 'val'} + + def populate_stats(): + self.library._stats = fake_stats + + self.library._update_volume_stats = mock.Mock( + side_effect=populate_stats) + self.library._update_ssc_info = mock.Mock() + self.library._ssc_stats = None + + actual = self.library.get_volume_stats(refresh = True) + + self.library._update_volume_stats.assert_called_once_with() + self.library._update_ssc_info.assert_called_once_with() + self.assertEqual(fake_stats, actual) + + def test_update_volume_stats_provisioning(self): + """Validate pool capacity calculations""" + fake_pool = copy.deepcopy(eseries_fake.STORAGE_POOL) + self.library._get_storage_pools = mock.Mock(return_value=[fake_pool]) + self.mock_object(self.library, '_ssc_stats', new_attr={fake_pool[ + "volumeGroupRef"]: {self.library.THIN_UQ_SPEC: True}}) + self.library.configuration = mock.Mock() + reserved_pct = 5 + over_subscription_ratio = 1.0 + self.library.configuration.max_over_subscription_ratio = ( + over_subscription_ratio) + self.library.configuration.reserved_percentage = reserved_pct + total_gb = int(fake_pool['totalRaidedSpace']) / units.Gi + used_gb = int(fake_pool['usedSpace']) / units.Gi + free_gb = total_gb - used_gb + + self.library._update_volume_stats() + + self.assertEqual(1, len(self.library._stats['pools'])) + pool_stats = self.library._stats['pools'][0] + self.assertEqual(fake_pool['label'], pool_stats.get('pool_name')) + self.assertEqual(reserved_pct, pool_stats['reserved_percentage']) + self.assertEqual(over_subscription_ratio, + pool_stats['max_oversubscription_ratio']) + self.assertEqual(total_gb, pool_stats.get('total_capacity_gb')) + self.assertEqual(used_gb, pool_stats.get('provisioned_capacity_gb')) + self.assertEqual(free_gb, pool_stats.get('free_capacity_gb')) + + @ddt.data(False, True) + def test_update_volume_stats_thin_provisioning(self, thin_provisioning): + """Validate that thin provisioning support is correctly reported""" + fake_pool = copy.deepcopy(eseries_fake.STORAGE_POOL) + self.library._get_storage_pools = mock.Mock(return_value=[fake_pool]) + self.mock_object(self.library, '_ssc_stats', new_attr={fake_pool[ + "volumeGroupRef"]: {self.library.THIN_UQ_SPEC: thin_provisioning}}) + + self.library._update_volume_stats() + + self.assertEqual(1, len(self.library._stats['pools'])) + pool_stats = self.library._stats['pools'][0] + self.assertEqual(thin_provisioning, pool_stats.get( + 'thin_provisioning_support')) + # Should always be True + self.assertTrue(pool_stats.get('thick_provisioning_support')) + + def test_update_volume_stats_ssc(self): + """Ensure that the SSC data is correctly reported in the pool stats""" + ssc = {self.library.THIN_UQ_SPEC: True, 'key': 'val'} + fake_pool = copy.deepcopy(eseries_fake.STORAGE_POOL) + self.library._get_storage_pools = mock.Mock(return_value=[fake_pool]) + self.mock_object(self.library, '_ssc_stats', new_attr={fake_pool[ + "volumeGroupRef"]: ssc}) + + self.library._update_volume_stats() + + self.assertEqual(1, len(self.library._stats['pools'])) + pool_stats = self.library._stats['pools'][0] + for key in ssc: + self.assertIn(key, pool_stats) + self.assertEqual(ssc[key], pool_stats[key]) + + def test_update_volume_stats_no_ssc(self): + """Ensure that that pool stats are correctly reported without SSC""" + fake_pool = copy.deepcopy(eseries_fake.STORAGE_POOL) + self.library._get_storage_pools = mock.Mock(return_value=[fake_pool]) + self.library._update_volume_stats() + + self.assertEqual(1, len(self.library._stats['pools'])) + pool_stats = self.library._stats['pools'][0] + self.assertFalse(pool_stats.get('thin_provisioning_support')) + # Should always be True + self.assertTrue(pool_stats.get('thick_provisioning_support')) + def test_terminate_connection_iscsi_no_hosts(self): connector = {'initiator': eseries_fake.INITIATOR_NAME} diff --git a/cinder/volume/drivers/netapp/eseries/library.py b/cinder/volume/drivers/netapp/eseries/library.py index 27a3a653c..8ed9765f5 100644 --- a/cinder/volume/drivers/netapp/eseries/library.py +++ b/cinder/volume/drivers/netapp/eseries/library.py @@ -2,6 +2,7 @@ # Copyright (c) 2015 Rushil Chugh # Copyright (c) 2015 Navneet Singh # Copyright (c) 2015 Yogesh Kshirsagar +# Copyright (c) 2015 Tom Barron # Copyright (c) 2015 Michael Price # All Rights Reserved. # @@ -903,9 +904,13 @@ class NetAppESeriesLibrary(object): cinder_pool = {} cinder_pool["pool_name"] = storage_pool.get("label") cinder_pool["QoS_support"] = False - cinder_pool["reserved_percentage"] = 0 + cinder_pool["reserved_percentage"] = ( + self.configuration.reserved_percentage) + cinder_pool["max_oversubscription_ratio"] = ( + self.configuration.max_over_subscription_ratio) tot_bytes = int(storage_pool.get("totalRaidedSpace", 0)) used_bytes = int(storage_pool.get("usedSpace", 0)) + cinder_pool["provisioned_capacity_gb"] = used_bytes / units.Gi cinder_pool["free_capacity_gb"] = ((tot_bytes - used_bytes) / units.Gi) cinder_pool["total_capacity_gb"] = tot_bytes / units.Gi @@ -914,7 +919,14 @@ class NetAppESeriesLibrary(object): storage_pool["volumeGroupRef"]) if pool_ssc_stats: + thin = pool_ssc_stats.get(self.THIN_UQ_SPEC) or False cinder_pool.update(pool_ssc_stats) + else: + thin = False + cinder_pool["thin_provisioning_support"] = thin + # All E-Series pools support thick provisioning + cinder_pool["thick_provisioning_support"] = True + data["pools"].append(cinder_pool) self._stats = data -- 2.45.2