]> review.fuel-infra Code Review - openstack-build/cinder-build.git/commitdiff
Leverage dict comprehension in PEP-0274
authorChangBo Guo(gcb) <eric.guo@easystack.cn>
Wed, 24 Dec 2014 14:42:58 +0000 (22:42 +0800)
committerChangBo Guo(gcb) <eric.guo@easystack.cn>
Fri, 22 May 2015 02:06:00 +0000 (10:06 +0800)
PEP-0274 introduced dict comprehensions to replace dict constructor
with a sqeuence of key-pairs[1], these are benefits:
  First, it makes the code look neater.
  Second, it gains a micro-optimization.

Cinder dropped python 2.6 support in Kilo, we can leverage this now.
Note: This commit doesn't handle dict constructor with kwargs.
This commit also adds a hacking rule.

[1]http://legacy.python.org/dev/peps/pep-0274/

Change-Id: Ie65796438160b1aa1f47c8519fb9943e81bff3e9

19 files changed:
HACKING.rst
cinder/api/contrib/quotas.py
cinder/api/v1/limits.py
cinder/api/v1/volumes.py
cinder/api/v2/limits.py
cinder/api/v2/views/volumes.py
cinder/db/sqlalchemy/api.py
cinder/hacking/checks.py
cinder/quota.py
cinder/tests/unit/api/v1/test_limits.py
cinder/tests/unit/api/v2/test_limits.py
cinder/tests/unit/test_db_api.py
cinder/tests/unit/test_gpfs.py
cinder/tests/unit/test_hacking.py
cinder/tests/unit/test_quota.py
cinder/tests/unit/test_volume.py
cinder/tests/unit/test_volume_glance_metadata.py
cinder/volume/api.py
cinder/volume/drivers/ibm/flashsystem.py

index d526dbef8044165b0b26006ecf1aca3e8bdf8e73..88f40adc7af2d0052dff065e728c166e51a852a8 100644 (file)
@@ -17,6 +17,7 @@ Cinder Specific Commandments
 - [N329] LOG.exception and LOG.error messages require translations `_LE()`.
 - [N330] LOG.warning messages require translations `_LW()`.
 - [N333] Ensure that oslo namespaces are used for namespaced libraries.
+- [N336] Must use a dict comprehension instead of a dict constructor with a sequence of key-value pairs.
 - [C301] timeutils.utcnow() from oslo_utils should be used instead of datetime.now().
 - [C302] six.text_type should be used instead of unicode
 - [C303] Ensure that there are no 'print()' statements in code that is being committed.
@@ -25,7 +26,6 @@ Cinder Specific Commandments
 - [C306] timeutils.strtime() must not be used (deprecated).
 - [C307] LOG.warn is deprecated. Enforce use of LOG.warning.
 
-
 General
 -------
 - Use 'raise' instead of 'raise e' to preserve original traceback or exception being reraised::
index 0b1eae7fbe3654f524d2e5e0e5f9088c28c12250..ec0536957795daf29c25f16bea2da3e44b1a10cb 100644 (file)
@@ -76,7 +76,7 @@ class QuotaSetsController(wsgi.Controller):
         if usages:
             return values
         else:
-            return dict((k, v['limit']) for k, v in values.items())
+            return {k: v['limit'] for k, v in values.items()}
 
     @wsgi.serializers(xml=QuotaTemplate)
     def show(self, req, id):
index eca339eb8e0b42d1d8531b7353b20572ca15781d..d8c6afabf53ca9a43427510c0dc071db2e73439f 100644 (file)
@@ -84,7 +84,7 @@ class LimitsController(wsgi.Controller):
         context = req.environ['cinder.context']
         quotas = QUOTAS.get_project_quotas(context, context.project_id,
                                            usages=False)
-        abs_limits = dict((k, v['limit']) for k, v in quotas.items())
+        abs_limits = {k: v['limit'] for k, v in quotas.items()}
         rate_limits = req.environ.get("cinder.limits", [])
 
         builder = self._get_view_builder(req)
@@ -108,7 +108,7 @@ class Limit(object):
         60 * 60 * 24: "DAY",
     }
 
-    UNIT_MAP = dict([(v, k) for k, v in UNITS.items()])
+    UNIT_MAP = {v: k for k, v in UNITS.items()}
 
     def __init__(self, verb, uri, regex, value, unit):
         """Initialize a new `Limit`.
index d7f89f21d949423fbdf4e75ec0db5b0eb0e5eafc..6beba81a869b6401b4c0a086421d48eaabf3934f 100644 (file)
@@ -120,7 +120,7 @@ def _translate_volume_summary_view(context, vol, image_id=None):
 
     if vol.get('volume_metadata'):
         metadata = vol.get('volume_metadata')
-        d['metadata'] = dict((item['key'], item['value']) for item in metadata)
+        d['metadata'] = {item['key']: item['value'] for item in metadata}
     # avoid circular ref when vol is a Volume instance
     elif vol.get('metadata') and isinstance(vol.get('metadata'), dict):
         d['metadata'] = vol['metadata']
index a75f39bbc356c9423e8a9f079b35cffe127ef11e..f3df1c4b90bc2a03c4091ba2260c69c62c789c20 100644 (file)
@@ -84,7 +84,7 @@ class LimitsController(wsgi.Controller):
         context = req.environ['cinder.context']
         quotas = QUOTAS.get_project_quotas(context, context.project_id,
                                            usages=False)
-        abs_limits = dict((k, v['limit']) for k, v in quotas.items())
+        abs_limits = {k: v['limit'] for k, v in quotas.items()}
         rate_limits = req.environ.get("cinder.limits", [])
 
         builder = self._get_view_builder(req)
@@ -108,7 +108,7 @@ class Limit(object):
         60 * 60 * 24: "DAY",
     }
 
-    UNIT_MAP = dict([(v, k) for k, v in UNITS.items()])
+    UNIT_MAP = {v: k for k, v in UNITS.items()}
 
     def __init__(self, verb, uri, regex, value, unit):
         """Initialize a new `Limit`.
index 54cbcd7c448e9ee59f6a953adbac2d8388f584f4..df453fd8b354754ffd9e9691a9f30dda7aa3fa86 100644 (file)
@@ -105,7 +105,7 @@ class ViewBuilder(common.ViewBuilder):
         """Retrieve the metadata of the volume object."""
         if volume.get('volume_metadata'):
             metadata = volume.get('volume_metadata')
-            return dict((item['key'], item['value']) for item in metadata)
+            return {item['key']: item['value'] for item in metadata}
         # avoid circular ref when vol is a Volume instance
         elif volume.get('metadata') and isinstance(volume.get('metadata'),
                                                    dict):
index 17199ad99bbc036dd3d251ec4f76b8ac0674088f..d04f304b99e55d744275fd757a793b77cbb54af4 100644 (file)
@@ -470,8 +470,8 @@ def _dict_with_extra_specs(inst_type_query):
     'extra_specs' : {'k1': 'v1'}
     """
     inst_type_dict = dict(inst_type_query)
-    extra_specs = dict([(x['key'], x['value'])
-                        for x in inst_type_query['extra_specs']])
+    extra_specs = {x['key']: x['value']
+                   for x in inst_type_query['extra_specs']}
     inst_type_dict['extra_specs'] = extra_specs
     return inst_type_dict
 
@@ -742,7 +742,7 @@ def _get_quota_usages(context, session, project_id):
         filter_by(project_id=project_id).\
         with_lockmode('update').\
         all()
-    return dict((row.resource, row) for row in rows)
+    return {row.resource: row for row in rows}
 
 
 @require_context
@@ -877,8 +877,8 @@ def quota_reserve(context, resources, quotas, deltas, expire,
         LOG.warning(_LW("Change will make usage less than 0 for the following "
                         "resources: %s"), unders)
     if overs:
-        usages = dict((k, dict(in_use=v['in_use'], reserved=v['reserved']))
-                      for k, v in usages.items())
+        usages = {k: dict(in_use=v['in_use'], reserved=v['reserved'])
+                  for k, v in usages.items()}
         raise exception.OverQuota(overs=sorted(overs), quotas=quotas,
                                   usages=usages)
 
index 831843de107c58492f13347f868ba010888acebd..d0c13c8c583a213611ac6b38bc27844b0e9fe60d 100644 (file)
@@ -43,6 +43,7 @@ underscore_import_check = re.compile(r"(.)*i18n\s+import\s+_(.)*")
 custom_underscore_check = re.compile(r"(.)*_\s*=\s*(.)*")
 no_audit_log = re.compile(r"(.)*LOG\.audit(.)*")
 no_print_statements = re.compile(r"\s*print\s*\(.+\).*")
+dict_constructor_with_list_copy_re = re.compile(r".*\bdict\((\[)?(\(|\[)")
 
 # NOTE(jsbryant): When other oslo libraries switch over non-namespaced
 # imports, we will need to add them to the regex below.
@@ -303,6 +304,13 @@ def no_log_warn(logical_line):
         yield (0, msg)
 
 
+def dict_constructor_with_list_copy(logical_line):
+    msg = ("N336: Must use a dict comprehension instead of a dict constructor "
+           "with a sequence of key-value pairs.")
+    if dict_constructor_with_list_copy_re.match(logical_line):
+        yield (0, msg)
+
+
 def factory(register):
     register(no_vi_headers)
     register(no_translate_debug_logs)
@@ -319,3 +327,4 @@ def factory(register):
     register(check_no_log_audit)
     register(check_no_contextlib_nested)
     register(no_log_warn)
+    register(dict_constructor_with_list_copy)
index 717955d68c7b25cb71e4668005b4ac4137ab95da..808710f786a0aa5dd2830c2596f5cfb09268ab5d 100644 (file)
@@ -244,8 +244,8 @@ class DbQuotaDriver(object):
         else:
             sync_filt = lambda x: not hasattr(x, 'sync')
         desired = set(keys)
-        sub_resources = dict((k, v) for k, v in resources.items()
-                             if k in desired and sync_filt(v))
+        sub_resources = {k: v for k, v in resources.items()
+                         if k in desired and sync_filt(v)}
 
         # Make sure we accounted for all of them...
         if len(keys) != len(sub_resources):
@@ -257,7 +257,7 @@ class DbQuotaDriver(object):
                                          project_id,
                                          context.quota_class, usages=False)
 
-        return dict((k, v['limit']) for k, v in quotas.items())
+        return {k: v['limit'] for k, v in quotas.items()}
 
     def limit_check(self, context, resources, values, project_id=None):
         """Check simple quota limits.
index fe47ef39160c554ae567a760f9236124b4946997..beff81f69f123c6745362b29ba45bf68f19ccb06 100644 (file)
@@ -55,8 +55,7 @@ class BaseLimitTestSuite(test.TestCase):
         self.absolute_limits = {}
 
         def stub_get_project_quotas(context, project_id, usages=True):
-            return dict((k, dict(limit=v))
-                        for k, v in self.absolute_limits.items())
+            return {k: dict(limit=v) for k, v in self.absolute_limits.items()}
 
         self.stubs.Set(cinder.quota.QUOTAS, "get_project_quotas",
                        stub_get_project_quotas)
index 30b267c5689c0b8edf87a8b3aac745e2587e0cfc..2747204ff8990b2140752e71be9996af71f0d1bb 100644 (file)
@@ -55,8 +55,7 @@ class BaseLimitTestSuite(test.TestCase):
         self.absolute_limits = {}
 
         def stub_get_project_quotas(context, project_id, usages=True):
-            return dict((k, dict(limit=v))
-                        for k, v in self.absolute_limits.items())
+            return {k: dict(limit=v) for k, v in self.absolute_limits.items()}
 
         self.stubs.Set(cinder.quota.QUOTAS, "get_project_quotas",
                        stub_get_project_quotas)
index 85147ae9e8c391f3194d00b98d89a03f3792d684..65e0a83de8c889007dd75ffa5353ea826f7948e8 100644 (file)
@@ -63,8 +63,8 @@ class ModelsObjectComparatorMixin(object):
     def _dict_from_object(self, obj, ignored_keys):
         if ignored_keys is None:
             ignored_keys = []
-        return dict([(k, v) for k, v in obj.iteritems()
-                    if k not in ignored_keys])
+        return {k: v for k, v in obj.iteritems()
+                if k not in ignored_keys}
 
     def _assertEqualObjects(self, obj1, obj2, ignored_keys=None):
         obj1 = self._dict_from_object(obj1, ignored_keys)
@@ -537,8 +537,8 @@ class DBAPIVolumeTestCase(BaseTest):
                 # metadata is a dict, compare the 'key' and 'value' of each
                 if key == 'volume_metadata':
                     self.assertEqual(len(val1), len(val2))
-                    val1_dict = dict((x.key, x.value) for x in val1)
-                    val2_dict = dict((x.key, x.value) for x in val2)
+                    val1_dict = {x.key: x.value for x in val1}
+                    val2_dict = {x.key: x.value for x in val2}
                     self.assertDictMatch(val1_dict, val2_dict)
                 else:
                     self.assertEqual(val1, val2)
@@ -1104,7 +1104,7 @@ class DBAPIEncryptionTestCase(BaseTest):
                 step = str(step)
             return val + step
 
-        return [dict([(k, compose(v, i)) for k, v in values.items()])
+        return [{k: compose(v, i) for k, v in values.items()}
                 for i in range(1, 4)]
 
     def test_volume_type_encryption_create(self):
@@ -1504,7 +1504,7 @@ class DBAPIBackupTestCase(BaseTest):
                 step = str(step)
             return val + step
 
-        return [dict([(k, compose(v, i)) for k, v in base_values.items()])
+        return [{k: compose(v, i) for k, v in base_values.items()}
                 for i in range(1, 4)]
 
     def test_backup_create(self):
index 33315c3535210818e20f6c5890eeefada8272aca..bb13ec0e0bdd65a45808523877e055ef5f98f179 100644 (file)
@@ -650,21 +650,16 @@ class GPFSDriverTestCase(test.TestCase):
     @mock.patch('cinder.volume.drivers.ibm.gpfs.GPFSDriver.'
                 '_gpfs_change_attributes')
     def test_set_volume_attributes(self, mock_change_attributes, mock_mkfs):
-        metadata = [dict([('key', 'data_pool_name'), ('value', 'test')]),
-                    dict([('key', 'replicas'), ('value', 'test')]),
-                    dict([('key', 'dio'), ('value', 'test')]),
-                    dict([('key', 'write_affinity_depth'), ('value', 'test')]),
-                    dict([('key', 'block_group_factor'), ('value', 'test')]),
-                    dict([('key', 'write_affinity_failure_group'),
-                          ('value', 'test')]),
-                    dict([('key', 'test'),
-                          ('value', 'test')]),
-                    dict([('key', 'fstype'),
-                          ('value', 'test')]),
-                    dict([('key', 'fslabel'),
-                          ('value', 'test')]),
-                    dict([('key', 'test'),
-                          ('value', 'test')])]
+        metadata = [{'key': 'data_pool_name', 'value': 'test'},
+                    {'key': 'replicas', 'value': 'test'},
+                    {'key': 'dio', 'value': 'test'},
+                    {'key': 'write_affinity_depth', 'value': 'test'},
+                    {'key': 'block_group_factor', 'value': 'test'},
+                    {'key': 'write_affinity_failure_group', 'value': 'test'},
+                    {'key': 'test', 'value': 'test'},
+                    {'key': 'fstype', 'value': 'test'},
+                    {'key': 'fslabel', 'value': 'test'},
+                    {'key': 'test', 'value': 'test'}]
 
         self.driver._set_volume_attributes('', '', metadata)
 
index 86147e390646a67cbc9b513da7dbd4ef197f5288..4f0cd889453a52db76255dddd5ef72a023fe7d72 100644 (file)
@@ -325,3 +325,34 @@ class HackingTestCase(test.TestCase):
         self.assertEqual(1, len(list(checks.check_no_print_statements(
             "print ('My print with space')",
             "cinder/volume/anotherFile.py", False))))
+
+    def test_dict_constructor_with_list_copy(self):
+        self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
+            "    dict([(i, connect_info[i])"))))
+
+        self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
+            "    attrs = dict([(k, _from_json(v))"))))
+
+        self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
+            "        type_names = dict((value, key) for key, value in"))))
+
+        self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
+            "   dict((value, key) for key, value in"))))
+
+        self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
+            "foo(param=dict((k, v) for k, v in bar.items()))"))))
+
+        self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
+            " dict([[i,i] for i in range(3)])"))))
+
+        self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
+            "  dd = dict([i,i] for i in range(3))"))))
+
+        self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy(
+            "  dict()"))))
+
+        self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy(
+            "        create_kwargs = dict(snapshot=snapshot,"))))
+
+        self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy(
+            "      self._render_dict(xml, data_el, data.__dict__)"))))
index 0d543e02edd0dca26bd7a08d5869ef755dc0f891..559430a306c6c8c06f49bfc80f23119b3ffe3a34 100644 (file)
@@ -1045,8 +1045,7 @@ class DbQuotaDriverTestCase(test.TestCase):
                                     quota_class=None, defaults=True,
                                     usages=True):
             self.calls.append('get_project_quotas')
-            return dict((k, dict(limit=v.default))
-                        for k, v in resources.items())
+            return {k: dict(limit=v.default) for k, v in resources.items()}
 
         self.stubs.Set(self.driver, 'get_project_quotas',
                        fake_get_project_quotas)
index 14080e75537a984268fc8c9dc15e65fbf5e284c1..7c53a1be9fdd61e16ed4bad120f4ea6fc34a6a24 100644 (file)
@@ -3108,8 +3108,8 @@ class VolumeTestCase(BaseVolumeTestCase):
         # ensure that volume's glance metadata is copied
         # to snapshot's glance metadata
         self.assertEqual(len(vol_glance_meta), len(snap_glance_meta))
-        vol_glance_dict = dict((x.key, x.value) for x in vol_glance_meta)
-        snap_glance_dict = dict((x.key, x.value) for x in snap_glance_meta)
+        vol_glance_dict = {x.key: x.value for x in vol_glance_meta}
+        snap_glance_dict = {x.key: x.value for x in snap_glance_meta}
         self.assertDictMatch(vol_glance_dict, snap_glance_dict)
 
         # ensure that snapshot's status is changed to 'available'
index dec95ac6d363d0d37f54c739aa9b63291298ae64..249d81ab92be8543ef628f0b4c63508097100ead 100644 (file)
@@ -148,7 +148,7 @@ class VolumeGlanceMetadataTestCase(test.TestCase):
         db.volume_glance_metadata_copy_to_volume(self.ctxt, vol2['id'],
                                                  snapshot['id'])
         metadata = db.volume_glance_metadata_get(self.ctxt, vol2['id'])
-        metadata = dict([(m['key'], m['value']) for m in metadata])
+        metadata = {m['key']: m['value'] for m in metadata}
         self.assertEqual(metadata, {'m1': 'v1'})
 
     def test_volume_snapshot_glance_metadata_get_nonexistent(self):
index 70676159d723a3341903a89594da4dcb3f963842..32ffd6b26513654551df5e4b1effc3605b03983d 100644 (file)
@@ -1106,9 +1106,7 @@ class API(base.Base):
         db_data = self.db.volume_glance_metadata_get(context, volume['id'])
         LOG.info(_LI("Get volume image-metadata completed successfully."),
                  resource=volume)
-        return dict(
-            (meta_entry.key, meta_entry.value) for meta_entry in db_data
-        )
+        return {meta_entry.key: meta_entry.value for meta_entry in db_data}
 
     def _check_volume_availability(self, volume, force):
         """Check if the volume can be used."""
@@ -1141,11 +1139,10 @@ class API(base.Base):
                 custom_property_set = (set(volume_image_metadata).difference
                                        (set(glance_core_properties)))
                 if custom_property_set:
-                    metadata.update(dict(properties=dict((custom_property,
-                                                          volume_image_metadata
-                                                          [custom_property])
-                                    for custom_property
-                                    in custom_property_set)))
+                    properties = {custom_property:
+                                  volume_image_metadata[custom_property]
+                                  for custom_property in custom_property_set}
+                    metadata.update(dict(properties=properties))
             except exception.GlanceMetadataNotFound:
                 # If volume is not created from image, No glance metadata
                 # would be available for that volume in
index e3c2bce5cb2cbb4839249143e26a71aef8ddfc47..67b22a2813d0ff49d3be72e352144a9c23e6b152 100644 (file)
@@ -164,8 +164,8 @@ class FlashSystemDriver(san.SanDriver):
 
         host_name = connector['host']
         if isinstance(host_name, six.text_type):
-            unicode_host_name_filter = dict((ord(six.text_type(char)), u'-')
-                                            for char in invalid_ch_in_host)
+            unicode_host_name_filter = {ord(six.text_type(char)): u'-'
+                                        for char in invalid_ch_in_host}
             host_name = host_name.translate(unicode_host_name_filter)
         elif isinstance(host_name, str):
             string_host_name_filter = string.maketrans(
@@ -459,7 +459,7 @@ class FlashSystemDriver(san.SanDriver):
             (_('_get_hdr_dic: attribute headers and values do not match.\n '
                'Headers: %(header)s\n Values: %(row)s.')
              % {'header': six.text_type(header), 'row': six.text_type(row)}))
-        dic = dict((a, v) for a, v in map(None, attributes, values))
+        dic = {a: v for a, v in map(None, attributes, values)}
         return dic
 
     def _get_conn_fc_wwpns(self):