From 998bee4a5da75a9eb39b6d3bdc6a96791fb6d8aa Mon Sep 17 00:00:00 2001 From: Michael Kerrin Date: Wed, 14 Aug 2013 10:16:42 +0000 Subject: [PATCH] Re-enable a lot of cinder scheduler tests This highlight and fixes a NoneType exception in the schedulers _get_weighted_candidates method This was originally added to get around the fact that the filter tests required that cinder was correctly installed on the system. But the is_cinder_installed condition always returning False and this hid problems of old packages been installed in our system. Also 930f5891b0815e1b49b9b2cc840e0c24b2796e84 added the setup.py automatically to run_tests.sh so we can remove this. See https://review.openstack.org/#/c/20213/ for history. Fixes bug: 1213226 Change-Id: I86fee802c0543355471ddbd712e52ccec750cea0 --- cinder/scheduler/filter_scheduler.py | 2 +- cinder/tests/scheduler/fakes.py | 31 +++++++------ .../tests/scheduler/test_capacity_weigher.py | 6 --- .../tests/scheduler/test_filter_scheduler.py | 42 ------------------ cinder/tests/scheduler/test_host_filters.py | 14 ------ cinder/tests/scheduler/test_host_manager.py | 43 ++++++++++++------- cinder/tests/utils.py | 7 --- 7 files changed, 44 insertions(+), 101 deletions(-) diff --git a/cinder/scheduler/filter_scheduler.py b/cinder/scheduler/filter_scheduler.py index 83a135ef7..3f2449ccb 100644 --- a/cinder/scheduler/filter_scheduler.py +++ b/cinder/scheduler/filter_scheduler.py @@ -222,7 +222,7 @@ class FilterScheduler(driver.Scheduler): hosts = self.host_manager.get_filtered_hosts(hosts, filter_properties) if not hosts: - return None + return [] LOG.debug(_("Filtered %s") % hosts) # weighted_host = WeightedHost() ... the best diff --git a/cinder/tests/scheduler/fakes.py b/cinder/tests/scheduler/fakes.py index 6668a1087..4059f1130 100644 --- a/cinder/tests/scheduler/fakes.py +++ b/cinder/tests/scheduler/fakes.py @@ -24,21 +24,6 @@ from cinder.scheduler import filter_scheduler from cinder.scheduler import host_manager -VOLUME_SERVICES = [ - dict(id=1, host='host1', topic='volume', disabled=False, - availability_zone='zone1', updated_at=timeutils.utcnow()), - dict(id=2, host='host2', topic='volume', disabled=False, - availability_zone='zone1', updated_at=timeutils.utcnow()), - dict(id=3, host='host3', topic='volume', disabled=False, - availability_zone='zone2', updated_at=timeutils.utcnow()), - dict(id=4, host='host4', topic='volume', disabled=False, - availability_zone='zone3', updated_at=timeutils.utcnow()), - # service on host5 is disabled - dict(id=5, host='host5', topic='volume', disabled=True, - availability_zone='zone4', updated_at=timeutils.utcnow()), -] - - class FakeFilterScheduler(filter_scheduler.FilterScheduler): def __init__(self, *args, **kwargs): super(FakeFilterScheduler, self).__init__(*args, **kwargs) @@ -79,5 +64,19 @@ class FakeHostState(host_manager.HostState): def mox_host_manager_db_calls(mock, context): mock.StubOutWithMock(db, 'service_get_all_by_topic') + services = [ + dict(id=1, host='host1', topic='volume', disabled=False, + availability_zone='zone1', updated_at=timeutils.utcnow()), + dict(id=2, host='host2', topic='volume', disabled=False, + availability_zone='zone1', updated_at=timeutils.utcnow()), + dict(id=3, host='host3', topic='volume', disabled=False, + availability_zone='zone2', updated_at=timeutils.utcnow()), + dict(id=4, host='host4', topic='volume', disabled=False, + availability_zone='zone3', updated_at=timeutils.utcnow()), + # service on host5 is disabled + dict(id=5, host='host5', topic='volume', disabled=True, + availability_zone='zone4', updated_at=timeutils.utcnow()), + ] + db.service_get_all_by_topic(mox.IgnoreArg(), - mox.IgnoreArg()).AndReturn(VOLUME_SERVICES) + mox.IgnoreArg()).AndReturn(services) diff --git a/cinder/tests/scheduler/test_capacity_weigher.py b/cinder/tests/scheduler/test_capacity_weigher.py index 4232bbd03..9498da836 100644 --- a/cinder/tests/scheduler/test_capacity_weigher.py +++ b/cinder/tests/scheduler/test_capacity_weigher.py @@ -48,8 +48,6 @@ class CapacityWeigherTestCase(test.TestCase): self.mox.ResetAll() return host_states - @testtools.skipIf(not test_utils.is_cinder_installed(), - 'Test requires Cinder installed') def test_default_of_spreading_first(self): hostinfo_list = self._get_all_hosts() @@ -63,8 +61,6 @@ class CapacityWeigherTestCase(test.TestCase): self.assertEqual(weighed_host.weight, 921.0) self.assertEqual(weighed_host.obj.host, 'host1') - @testtools.skipIf(not test_utils.is_cinder_installed(), - 'Test requires Cinder installed') def test_capacity_weight_multiplier1(self): self.flags(capacity_weight_multiplier=-1.0) hostinfo_list = self._get_all_hosts() @@ -79,8 +75,6 @@ class CapacityWeigherTestCase(test.TestCase): self.assertEqual(weighed_host.weight, -190.0) self.assertEqual(weighed_host.obj.host, 'host4') - @testtools.skipIf(not test_utils.is_cinder_installed(), - 'Test requires Cinder installed') def test_capacity_weight_multiplier2(self): self.flags(capacity_weight_multiplier=2.0) hostinfo_list = self._get_all_hosts() diff --git a/cinder/tests/scheduler/test_filter_scheduler.py b/cinder/tests/scheduler/test_filter_scheduler.py index c747c62a6..a821dc139 100644 --- a/cinder/tests/scheduler/test_filter_scheduler.py +++ b/cinder/tests/scheduler/test_filter_scheduler.py @@ -30,17 +30,11 @@ from cinder.tests.scheduler import test_scheduler from cinder.tests import utils as test_utils -def fake_get_filtered_hosts(hosts, filter_properties): - return list(hosts) - - class FilterSchedulerTestCase(test_scheduler.SchedulerTestCase): """Test case for Filter Scheduler.""" driver_cls = filter_scheduler.FilterScheduler - @testtools.skipIf(not test_utils.is_cinder_installed(), - 'Test requires Cinder installed (try setup.py develop') def test_create_volume_no_hosts(self): """ Ensure empty hosts & child_zones result in NoValidHosts exception. @@ -58,8 +52,6 @@ class FilterSchedulerTestCase(test_scheduler.SchedulerTestCase): self.assertRaises(exception.NoValidHost, sched.schedule_create_volume, fake_context, request_spec, {}) - @testtools.skipIf(not test_utils.is_cinder_installed(), - 'Test requires Cinder installed (try setup.py develop') def test_create_volume_non_admin(self): """Test creating an instance locally using run_instance, passing a non-admin context. DB actions should work. @@ -85,29 +77,16 @@ class FilterSchedulerTestCase(test_scheduler.SchedulerTestCase): fake_context, request_spec, {}) self.assertTrue(self.was_admin) - @testtools.skipIf(not test_utils.is_cinder_installed(), - 'Test requires Cinder installed (try setup.py develop') def test_schedule_happy_day(self): """Make sure there's nothing glaringly wrong with _schedule() by doing a happy day pass through. """ - self.next_weight = 1.0 - - def _fake_weigh_objects(_self, functions, hosts, options): - self.next_weight += 2.0 - host_state = hosts[0] - return [weights.WeighedHost(host_state, self.next_weight)] - sched = fakes.FakeFilterScheduler() sched.host_manager = fakes.FakeHostManager() fake_context = context.RequestContext('user', 'project', is_admin=True) - self.stubs.Set(sched.host_manager, 'get_filtered_hosts', - fake_get_filtered_hosts) - self.stubs.Set(weights.HostWeightHandler, - 'get_weighed_objects', _fake_weigh_objects) fakes.mox_host_manager_db_calls(self.mox, fake_context) request_spec = {'volume_type': {'name': 'LVM_iSCSI'}, @@ -129,8 +108,6 @@ class FilterSchedulerTestCase(test_scheduler.SchedulerTestCase): self.assertRaises(exception.InvalidParameterValue, fakes.FakeFilterScheduler) - @testtools.skipIf(not test_utils.is_cinder_installed(), - 'Test requires Cinder installed (try setup.py develop') def test_retry_disabled(self): # Retry info should not get populated when re-scheduling is off. self.flags(scheduler_max_attempts=1) @@ -147,8 +124,6 @@ class FilterSchedulerTestCase(test_scheduler.SchedulerTestCase): # should not have retry info in the populated filter properties: self.assertFalse("retry" in filter_properties) - @testtools.skipIf(not test_utils.is_cinder_installed(), - 'Test requires Cinder installed (try setup.py develop') def test_retry_attempt_one(self): # Test retry logic on initial scheduling attempt. self.flags(scheduler_max_attempts=2) @@ -165,8 +140,6 @@ class FilterSchedulerTestCase(test_scheduler.SchedulerTestCase): num_attempts = filter_properties['retry']['num_attempts'] self.assertEqual(1, num_attempts) - @testtools.skipIf(not test_utils.is_cinder_installed(), - 'Test requires Cinder installed (try setup.py develop') def test_retry_attempt_two(self): # Test retry logic when re-scheduling. self.flags(scheduler_max_attempts=2) @@ -229,29 +202,16 @@ class FilterSchedulerTestCase(test_scheduler.SchedulerTestCase): self.assertEqual(1024, host_state.total_capacity_gb) def _host_passes_filters_setup(self): - self.next_weight = 1.0 - - def _fake_weigh_objects(_self, functions, hosts, options): - self.next_weight += 2.0 - host_state = hosts[0] - return [weights.WeighedHost(host_state, self.next_weight)] - sched = fakes.FakeFilterScheduler() sched.host_manager = fakes.FakeHostManager() fake_context = context.RequestContext('user', 'project', is_admin=True) - self.stubs.Set(sched.host_manager, 'get_filtered_hosts', - fake_get_filtered_hosts) - self.stubs.Set(weights.HostWeightHandler, - 'get_weighed_objects', _fake_weigh_objects) fakes.mox_host_manager_db_calls(self.mox, fake_context) self.mox.ReplayAll() return (sched, fake_context) - @testtools.skipIf(not test_utils.is_cinder_installed(), - 'Test requires Cinder installed (try setup.py develop') def test_host_passes_filters_happy_day(self): """Do a successful pass through of with host_passes_filters().""" sched, ctx = self._host_passes_filters_setup() @@ -262,8 +222,6 @@ class FilterSchedulerTestCase(test_scheduler.SchedulerTestCase): ret_host = sched.host_passes_filters(ctx, 'host1', request_spec, {}) self.assertEqual(ret_host.host, 'host1') - @testtools.skipIf(not test_utils.is_cinder_installed(), - 'Test requires Cinder installed (try setup.py develop') def test_host_passes_filters_no_capacity(self): """Fail the host due to insufficient capacity.""" sched, ctx = self._host_passes_filters_setup() diff --git a/cinder/tests/scheduler/test_host_filters.py b/cinder/tests/scheduler/test_host_filters.py index 7034879a3..9e44f1cfc 100644 --- a/cinder/tests/scheduler/test_host_filters.py +++ b/cinder/tests/scheduler/test_host_filters.py @@ -78,8 +78,6 @@ class HostFiltersTestCase(test.TestCase): return ret_value self.stubs.Set(utils, 'service_is_up', fake_service_is_up) - @testtools.skipIf(not test_utils.is_cinder_installed(), - 'Test requires Cinder installed') def test_capacity_filter_passes(self): self._stub_service_is_up(True) filt_cls = self.class_map['CapacityFilter']() @@ -91,8 +89,6 @@ class HostFiltersTestCase(test.TestCase): 'service': service}) self.assertTrue(filt_cls.host_passes(host, filter_properties)) - @testtools.skipIf(not test_utils.is_cinder_installed(), - 'Test requires Cinder installed') def test_capacity_filter_fails(self): self._stub_service_is_up(True) filt_cls = self.class_map['CapacityFilter']() @@ -105,8 +101,6 @@ class HostFiltersTestCase(test.TestCase): 'service': service}) self.assertFalse(filt_cls.host_passes(host, filter_properties)) - @testtools.skipIf(not test_utils.is_cinder_installed(), - 'Test requires Cinder installed') def test_capacity_filter_passes_infinite(self): self._stub_service_is_up(True) filt_cls = self.class_map['CapacityFilter']() @@ -118,8 +112,6 @@ class HostFiltersTestCase(test.TestCase): 'service': service}) self.assertTrue(filt_cls.host_passes(host, filter_properties)) - @testtools.skipIf(not test_utils.is_cinder_installed(), - 'Test requires Cinder installed') def test_capacity_filter_passes_unknown(self): self._stub_service_is_up(True) filt_cls = self.class_map['CapacityFilter']() @@ -131,8 +123,6 @@ class HostFiltersTestCase(test.TestCase): 'service': service}) self.assertTrue(filt_cls.host_passes(host, filter_properties)) - @testtools.skipIf(not test_utils.is_cinder_installed(), - 'Test requires Cinder installed') def test_retry_filter_disabled(self): # Test case where retry/re-scheduling is disabled. filt_cls = self.class_map['RetryFilter']() @@ -140,8 +130,6 @@ class HostFiltersTestCase(test.TestCase): filter_properties = {} self.assertTrue(filt_cls.host_passes(host, filter_properties)) - @testtools.skipIf(not test_utils.is_cinder_installed(), - 'Test requires Cinder installed') def test_retry_filter_pass(self): # Node not previously tried. filt_cls = self.class_map['RetryFilter']() @@ -150,8 +138,6 @@ class HostFiltersTestCase(test.TestCase): filter_properties = dict(retry=retry) self.assertTrue(filt_cls.host_passes(host, filter_properties)) - @testtools.skipIf(not test_utils.is_cinder_installed(), - 'Test requires Cinder installed') def test_retry_filter_fail(self): # Node was already tried. filt_cls = self.class_map['RetryFilter']() diff --git a/cinder/tests/scheduler/test_host_manager.py b/cinder/tests/scheduler/test_host_manager.py index c6588785f..90f0ddd2e 100644 --- a/cinder/tests/scheduler/test_host_manager.py +++ b/cinder/tests/scheduler/test_host_manager.py @@ -143,26 +143,39 @@ class HostManagerTestCase(test.TestCase): self.mox.StubOutWithMock(host_manager.LOG, 'warn') self.mox.StubOutWithMock(host_manager.utils, 'service_is_up') - ret_services = fakes.VOLUME_SERVICES - db.service_get_all_by_topic(context, topic).AndReturn(ret_services) - host_manager.utils.service_is_up(ret_services[0]).AndReturn(True) - host_manager.utils.service_is_up(ret_services[1]).AndReturn(True) - host_manager.utils.service_is_up(ret_services[2]).AndReturn(True) - host_manager.utils.service_is_up(ret_services[3]).AndReturn(True) - host_manager.utils.service_is_up(ret_services[4]).AndReturn(True) + services = [ + dict(id=1, host='host1', topic='volume', disabled=False, + availability_zone='zone1', updated_at=timeutils.utcnow()), + dict(id=2, host='host2', topic='volume', disabled=False, + availability_zone='zone1', updated_at=timeutils.utcnow()), + dict(id=3, host='host3', topic='volume', disabled=False, + availability_zone='zone2', updated_at=timeutils.utcnow()), + dict(id=4, host='host4', topic='volume', disabled=False, + availability_zone='zone3', updated_at=timeutils.utcnow()), + # service on host5 is disabled + dict(id=5, host='host5', topic='volume', disabled=True, + availability_zone='zone4', updated_at=timeutils.utcnow()), + ] + + db.service_get_all_by_topic(context, topic).AndReturn(services) + host_manager.utils.service_is_up(services[0]).AndReturn(True) + host_manager.utils.service_is_up(services[1]).AndReturn(True) + host_manager.utils.service_is_up(services[2]).AndReturn(True) + host_manager.utils.service_is_up(services[3]).AndReturn(True) + host_manager.utils.service_is_up(services[4]).AndReturn(True) # Disabled service host_manager.LOG.warn("volume service is down or disabled. " "(host: host5)") - db.service_get_all_by_topic(context, topic).AndReturn(ret_services) - host_manager.utils.service_is_up(ret_services[0]).AndReturn(True) - host_manager.utils.service_is_up(ret_services[1]).AndReturn(True) - host_manager.utils.service_is_up(ret_services[2]).AndReturn(True) - host_manager.utils.service_is_up(ret_services[3]).AndReturn(False) + db.service_get_all_by_topic(context, topic).AndReturn(services) + host_manager.utils.service_is_up(services[0]).AndReturn(True) + host_manager.utils.service_is_up(services[1]).AndReturn(True) + host_manager.utils.service_is_up(services[2]).AndReturn(True) + host_manager.utils.service_is_up(services[3]).AndReturn(False) # Stopped service host_manager.LOG.warn("volume service is down or disabled. " "(host: host4)") - host_manager.utils.service_is_up(ret_services[4]).AndReturn(True) + host_manager.utils.service_is_up(services[4]).AndReturn(True) # Disabled service host_manager.LOG.warn("volume service is down or disabled. " "(host: host5)") @@ -174,7 +187,7 @@ class HostManagerTestCase(test.TestCase): self.assertEqual(len(host_state_map), 4) # Check that service is up for i in xrange(4): - volume_node = fakes.VOLUME_SERVICES[i] + volume_node = services[i] host = volume_node['host'] self.assertEqual(host_state_map[host].service, volume_node) @@ -184,7 +197,7 @@ class HostManagerTestCase(test.TestCase): self.assertEqual(len(host_state_map), 3) for i in xrange(3): - volume_node = fakes.VOLUME_SERVICES[i] + volume_node = services[i] host = volume_node['host'] self.assertEqual(host_state_map[host].service, volume_node) diff --git a/cinder/tests/utils.py b/cinder/tests/utils.py index ccf38cba5..5ee91e8df 100644 --- a/cinder/tests/utils.py +++ b/cinder/tests/utils.py @@ -26,13 +26,6 @@ def get_test_admin_context(): return context.get_admin_context() -def is_cinder_installed(): - if os.path.exists('../../cinder.cinder.egg-info'): - return True - else: - return False - - def create_volume(ctxt, host='test_host', display_name='test_volume', -- 2.45.2