]> review.fuel-infra Code Review - openstack-build/heat-build.git/commitdiff
heat cleanups to align with pep8 1.3.4
authorSteven Hardy <shardy@redhat.com>
Wed, 19 Dec 2012 13:13:30 +0000 (13:13 +0000)
committerSteven Hardy <shardy@redhat.com>
Thu, 20 Dec 2012 09:26:16 +0000 (09:26 +0000)
Lots of (mostly whitespace) cleanups to align all the non-test code with
pep8 v1.3.4

ref bug 1092057

Change-Id: I444b288444dba4ec1da5854bd276d091c06d8489
Signed-off-by: Steven Hardy <shardy@redhat.com>
65 files changed:
bin/heat-cfn
bin/heat-engine
heat/api/aws/ec2token.py
heat/api/aws/exception.py
heat/api/aws/utils.py
heat/api/cfn/v1/__init__.py
heat/api/cfn/v1/stacks.py
heat/api/cfn/v1/waitcondition.py
heat/api/cloudwatch/__init__.py
heat/api/cloudwatch/watch.py
heat/api/middleware/version_negotiation.py
heat/cfn_client/boto_client.py
heat/cfn_client/boto_client_cloudwatch.py
heat/cfn_client/client.py
heat/cfn_client/utils.py
heat/common/auth.py
heat/common/auth_token.py
heat/common/client.py
heat/common/config.py
heat/common/context.py
heat/common/heat_keystoneclient.py
heat/common/identifier.py
heat/common/template_format.py
heat/common/wsgi.py
heat/db/api.py
heat/db/sqlalchemy/api.py
heat/db/sqlalchemy/migrate_repo/versions/001_norwhal.py
heat/db/sqlalchemy/migrate_repo/versions/004_guest_watch.py
heat/db/sqlalchemy/migrate_repo/versions/005_user_creds.py
heat/db/sqlalchemy/migrate_repo/versions/012_stack_id_uuid.py
heat/db/sqlalchemy/migrate_repo/versions/013_owner_id_uuid.py
heat/db/sqlalchemy/migrate_repo/versions/014_watch_stackid.py
heat/db/sqlalchemy/migration.py
heat/db/sqlalchemy/models.py
heat/db/sqlalchemy/session.py
heat/engine/api.py
heat/engine/clients.py
heat/engine/parameters.py
heat/engine/parser.py
heat/engine/properties.py
heat/engine/resource.py
heat/engine/resources/cloud_watch.py
heat/engine/resources/instance.py
heat/engine/resources/loadbalancer.py
heat/engine/resources/quantum/floatingip.py
heat/engine/resources/quantum/net.py
heat/engine/resources/quantum/port.py
heat/engine/resources/quantum/quantum.py
heat/engine/resources/quantum/router.py
heat/engine/resources/quantum/subnet.py
heat/engine/resources/s3.py
heat/engine/resources/security_group.py
heat/engine/resources/stack.py
heat/engine/resources/user.py
heat/engine/resources/volume.py
heat/engine/resources/vpc.py
heat/engine/resources/wait_condition.py
heat/engine/service.py
heat/engine/watchrule.py
heat/openstack/common/iniparser.py
heat/openstack/common/log.py
heat/openstack/common/notifier/list_notifier.py
heat/rpc/client.py
heat/testing/runner.py
tools/install_venv.py

index 7887ee4d73fff9d6fe091b8d2a4854e0185380ae..8157a6fede27e6375c8adc86f3d924dc971ea412 100755 (executable)
@@ -366,7 +366,7 @@ def stack_resources_list_details(options, arguments):
     usage = ('''Usage:
 %s resource-list-details stack_name [logical_resource_id]
 %s resource-list-details physical_resource_id [logical_resource_id]''' %
-        (scriptname, scriptname))
+             (scriptname, scriptname))
 
     try:
         name_or_pid = arguments.pop(0)
@@ -545,14 +545,14 @@ def parse_options(parser, cli_args):
 
     if options.debug:
         logging.basicConfig(format='%(levelname)s:%(message)s',
-            level=logging.DEBUG)
+                            level=logging.DEBUG)
         logging.debug("Debug level logging enabled")
     elif options.verbose:
         logging.basicConfig(format='%(levelname)s:%(message)s',
-            level=logging.INFO)
+                            level=logging.INFO)
     else:
         logging.basicConfig(format='%(levelname)s:%(message)s',
-            level=logging.WARNING)
+                            level=logging.WARNING)
 
     return (options, command, args)
 
@@ -574,20 +574,19 @@ def print_help(options, args):
 def lookup_command(parser, command_name):
     base_commands = {'help': print_help}
 
-    stack_commands = {
-                'create': stack_create,
-                'update': stack_update,
-                'delete': stack_delete,
-                'list': stack_list,
-                'events_list': stack_events_list,  # DEPRECATED
-                'event-list': stack_events_list,
-                'resource': stack_resource_show,
-                'resource-list': stack_resources_list,
-                'resource-list-details': stack_resources_list_details,
-                'validate': template_validate,
-                'gettemplate': get_template,
-                'estimate-template-cost': estimate_template_cost,
-                'describe': stack_describe}
+    stack_commands = {'create': stack_create,
+                      'update': stack_update,
+                      'delete': stack_delete,
+                      'list': stack_list,
+                      'events_list': stack_events_list,  # DEPRECATED
+                      'event-list': stack_events_list,
+                      'resource': stack_resource_show,
+                      'resource-list': stack_resources_list,
+                      'resource-list-details': stack_resources_list_details,
+                      'validate': template_validate,
+                      'gettemplate': get_template,
+                      'estimate-template-cost': estimate_template_cost,
+                      'describe': stack_describe}
 
     commands = {}
     for command_set in (base_commands, stack_commands):
index b083b4a4ea4de0d3889560701974b7e2c7612e54..21a10d947a2da0d697acca4f04abd9f8dc79524e 100755 (executable)
@@ -57,7 +57,6 @@ if __name__ == '__main__':
     from heat.engine import service as engine
 
     db_api.configure()
-    srv = engine.EngineService(cfg.CONF.host,
-                                'engine')
+    srv = engine.EngineService(cfg.CONF.host, 'engine')
     launcher = service.launch(srv)
     launcher.wait()
index 5becab2cf096f975416c405ecb9cc644ea0bd278..9088311ef864fa411c1b73b83551122928ef2c17 100644 (file)
@@ -76,7 +76,7 @@ class EC2Token(wsgi.Middleware):
                                     'verb': req.method,
                                     'path': req.path,
                                     'params': auth_params,
-                                   }}
+                                    }}
         creds_json = None
         try:
             creds_json = json.dumps(creds)
index 4450a74f7746395a9c4dfa606f2ec00f92dd1590..60712082cd79157f8fdea6377da04fdd3c0381d4 100644 (file)
@@ -57,7 +57,7 @@ class HeatAPIException(webob.exc.HTTPError):
         else:
             message = self.explanation
         return {'ErrorResponse': {'Error': {'Type': self.err_type,
-            'Code': self.title, 'Message': message}}}
+                'Code': self.title, 'Message': message}}}
 
 
 # Common Error Subclasses:
index 68c9b34d784b85f745775a3ae4514a07064c5d48..dd43e79313e2addf4cc1ce9c9c037a5c81eca8ba 100644 (file)
@@ -51,7 +51,7 @@ def extract_param_pairs(params, prefix='', keyname='', valuename=''):
     """
     plist = extract_param_list(params, prefix)
     kvs = [(p[keyname], p[valuename]) for p in plist
-                                      if keyname in p and valuename in p]
+           if keyname in p and valuename in p]
 
     return dict(kvs)
 
index fa33bfd5e038c9c4e50c2a90be842e98f7e7a0e3..2654e0742f79a66d5a5b47ba67bf0cc76d9a4ab4 100644 (file)
@@ -67,7 +67,7 @@ class API(wsgi.Router):
 
         for action in self._actions:
             mapper.connect("/", controller=stacks_resource, action=action,
-                conditions=conditions(action))
+                           conditions=conditions(action))
 
         mapper.connect("/", controller=stacks_resource, action="index")
 
index 20ccdf1bbdcb3684b54ba9bdadbf0077adf5ac47..15b630e8461b09779c6b6acdbb340386f15bba45 100644 (file)
@@ -74,9 +74,9 @@ class StackController(object):
         Parameters.member.1.ParameterValue
         """
         return api_utils.extract_param_pairs(params,
-                                            prefix='Parameters',
-                                            keyname='ParameterKey',
-                                            valuename='ParameterValue')
+                                             prefix='Parameters',
+                                             keyname='ParameterKey',
+                                             valuename='ParameterValue')
 
     def _get_identity(self, con, stack_name):
         """
@@ -128,7 +128,7 @@ class StackController(object):
             return exception.map_remote_error(ex)
 
         res = {'StackSummaries': [format_stack_summary(s)
-                                   for s in stack_list['stacks']]}
+                                  for s in stack_list['stacks']]}
 
         return api_utils.format_response('ListStacks', res)
 
@@ -145,8 +145,8 @@ class StackController(object):
             }
 
             def replacecolon(d):
-                return dict(map(lambda (k, v):
-                    (k.replace(':', '.'), v), d.items()))
+                return dict(map(lambda (k, v): (k.replace(':', '.'), v),
+                                d.items()))
 
             def transform(attrs):
                 """
@@ -193,9 +193,9 @@ class StackController(object):
             # Reformat Parameters dict-of-dict into AWS API format
             # This is a list-of-dict with nasty "ParameterKey" : key
             # "ParameterValue" : value format.
-            result['Parameters'] = [{'ParameterKey':k,
-                'ParameterValue':v}
-                for (k, v) in result['Parameters'].items()]
+            result['Parameters'] = [{'ParameterKey': k,
+                                    'ParameterValue': v}
+                                    for (k, v) in result['Parameters'].items()]
 
             return self._id_format(result)
 
@@ -237,9 +237,10 @@ class StackController(object):
         return None
 
     CREATE_OR_UPDATE_ACTION = (
-        CREATE_STACK, UPDATE_STACK
-        ) = (
-        "CreateStack", "UpdateStack")
+        CREATE_STACK, UPDATE_STACK,
+    ) = (
+        "CreateStack", "UpdateStack",
+    )
 
     def create(self, req):
         return self.create_or_update(req, self.CREATE_STACK)
@@ -349,7 +350,10 @@ class StackController(object):
         Get the estimated monthly cost of a template
         """
         return api_utils.format_response('EstimateTemplateCost',
-            {'Url': 'http://en.wikipedia.org/wiki/Gratis'})
+                                         {'Url':
+                                          'http://en.wikipedia.org/wiki/Gratis'
+                                          }
+                                         )
 
     def validate_template(self, req):
         """
@@ -421,8 +425,8 @@ class StackController(object):
             }
 
             result = api_utils.reformat_dict_keys(keymap, e)
-            result['ResourceProperties'] = json.dumps(
-                                           result['ResourceProperties'])
+            result['ResourceProperties'] = json.dumps(result[
+                                                      'ResourceProperties'])
 
             return self._id_format(result)
 
@@ -439,7 +443,7 @@ class StackController(object):
         result = [format_stack_event(e) for e in events]
 
         return api_utils.format_response('DescribeStackEvents',
-            {'StackEvents': result})
+                                         {'StackEvents': result})
 
     def describe_stack_resource(self, req):
         """
@@ -472,9 +476,10 @@ class StackController(object):
 
         try:
             identity = self._get_identity(con, req.params['StackName'])
-            resource_details = self.engine_rpcapi.describe_stack_resource(con,
-                        stack_identity=identity,
-                        resource_name=req.params.get('LogicalResourceId'))
+            resource_details = self.engine_rpcapi.describe_stack_resource(
+                con,
+                stack_identity=identity,
+                resource_name=req.params.get('LogicalResourceId'))
 
         except rpc_common.RemoteError as ex:
             return exception.map_remote_error(ex)
@@ -482,7 +487,7 @@ class StackController(object):
         result = format_resource_detail(resource_details)
 
         return api_utils.format_response('DescribeStackResource',
-            {'StackResourceDetail': result})
+                                         {'StackResourceDetail': result})
 
     def describe_stack_resources(self, req):
         """
@@ -530,7 +535,8 @@ class StackController(object):
 
         try:
             identity = self._get_identity(con, stack_name)
-            resources = self.engine_rpcapi.describe_stack_resources(con,
+            resources = self.engine_rpcapi.describe_stack_resources(
+                con,
                 stack_identity=identity,
                 physical_resource_id=physical_resource_id,
                 logical_resource_id=req.params.get('LogicalResourceId'))
@@ -541,7 +547,7 @@ class StackController(object):
         result = [format_stack_resource(r) for r in resources]
 
         return api_utils.format_response('DescribeStackResources',
-            {'StackResources': result})
+                                         {'StackResources': result})
 
     def list_stack_resources(self, req):
         """
@@ -567,15 +573,16 @@ class StackController(object):
 
         try:
             identity = self._get_identity(con, req.params['StackName'])
-            resources = self.engine_rpcapi.list_stack_resources(con,
-                    stack_identity=identity)
+            resources = self.engine_rpcapi.list_stack_resources(
+                con,
+                stack_identity=identity)
         except rpc_common.RemoteError as ex:
             return exception.map_remote_error(ex)
 
         summaries = [format_resource_summary(r) for r in resources]
 
         return api_utils.format_response('ListStackResources',
-            {'StackResourceSummaries': summaries})
+                                         {'StackResourceSummaries': summaries})
 
 
 def create_resource(options):
index c08924da94c6d64d6d8aeedfe64969afdc6a16ab..15be0a70ce8ede5bdc1f7f430d8fffb830905cd4 100644 (file)
@@ -30,10 +30,11 @@ class WaitConditionController:
         con = req.context
         identity = identifier.ResourceIdentifier.from_arn(arn)
         try:
-            md = self.engine.metadata_update(con,
-                     stack_id=dict(identity.stack()),
-                     resource_name=identity.resource_name,
-                     metadata=body)
+            md = self.engine.metadata_update(
+                con,
+                stack_id=dict(identity.stack()),
+                resource_name=identity.resource_name,
+                metadata=body)
         except rpc_common.RemoteError as ex:
             return exception.map_remote_error(ex)
 
index 73076bc26ff30d978d67089eb8e4f8e14e58463d..750c8155e1a35cae691be199539b74a58865f478 100644 (file)
@@ -70,7 +70,7 @@ class API(wsgi.Router):
 
         for action in self._actions:
             mapper.connect("/", controller=controller_resource, action=action,
-                conditions=conditions(action))
+                           conditions=conditions(action))
 
         mapper.connect("/", controller=controller_resource, action="index")
 
index 26de46b86221caa7021eb78fe5266c7310fd5b7d..1e27bb38b9e94c1e9e56512b14e40c40b4f4791f 100644 (file)
@@ -73,37 +73,39 @@ class WatchController(object):
             Reformat engine output into the AWS "MetricAlarm" format
             """
             keymap = {
-            engine_api.WATCH_ACTIONS_ENABLED: 'ActionsEnabled',
-            engine_api.WATCH_ALARM_ACTIONS: 'AlarmActions',
-            engine_api.WATCH_TOPIC: 'AlarmArn',
-            engine_api.WATCH_UPDATED_TIME:
+                engine_api.WATCH_ACTIONS_ENABLED: 'ActionsEnabled',
+                engine_api.WATCH_ALARM_ACTIONS: 'AlarmActions',
+                engine_api.WATCH_TOPIC: 'AlarmArn',
+                engine_api.WATCH_UPDATED_TIME:
                 'AlarmConfigurationUpdatedTimestamp',
-            engine_api.WATCH_DESCRIPTION: 'AlarmDescription',
-            engine_api.WATCH_NAME: 'AlarmName',
-            engine_api.WATCH_COMPARISON: 'ComparisonOperator',
-            engine_api.WATCH_DIMENSIONS: 'Dimensions',
-            engine_api.WATCH_PERIODS: 'EvaluationPeriods',
-            engine_api.WATCH_INSUFFICIENT_ACTIONS: 'InsufficientDataActions',
-            engine_api.WATCH_METRIC_NAME: 'MetricName',
-            engine_api.WATCH_NAMESPACE: 'Namespace',
-            engine_api.WATCH_OK_ACTIONS: 'OKActions',
-            engine_api.WATCH_PERIOD: 'Period',
-            engine_api.WATCH_STATE_REASON: 'StateReason',
-            engine_api.WATCH_STATE_REASON_DATA: 'StateReasonData',
-            engine_api.WATCH_STATE_UPDATED_TIME: 'StateUpdatedTimestamp',
-            engine_api.WATCH_STATE_VALUE: 'StateValue',
-            engine_api.WATCH_STATISTIC: 'Statistic',
-            engine_api.WATCH_THRESHOLD: 'Threshold',
-            engine_api.WATCH_UNIT: 'Unit'}
+                engine_api.WATCH_DESCRIPTION: 'AlarmDescription',
+                engine_api.WATCH_NAME: 'AlarmName',
+                engine_api.WATCH_COMPARISON: 'ComparisonOperator',
+                engine_api.WATCH_DIMENSIONS: 'Dimensions',
+                engine_api.WATCH_PERIODS: 'EvaluationPeriods',
+                engine_api.WATCH_INSUFFICIENT_ACTIONS:
+                'InsufficientDataActions',
+                engine_api.WATCH_METRIC_NAME: 'MetricName',
+                engine_api.WATCH_NAMESPACE: 'Namespace',
+                engine_api.WATCH_OK_ACTIONS: 'OKActions',
+                engine_api.WATCH_PERIOD: 'Period',
+                engine_api.WATCH_STATE_REASON: 'StateReason',
+                engine_api.WATCH_STATE_REASON_DATA: 'StateReasonData',
+                engine_api.WATCH_STATE_UPDATED_TIME: 'StateUpdatedTimestamp',
+                engine_api.WATCH_STATE_VALUE: 'StateValue',
+                engine_api.WATCH_STATISTIC: 'Statistic',
+                engine_api.WATCH_THRESHOLD: 'Threshold',
+                engine_api.WATCH_UNIT: 'Unit'}
 
             # AWS doesn't return StackId in the main MetricAlarm
             # structure, so we add StackId as a dimension to all responses
             a[engine_api.WATCH_DIMENSIONS].append({'StackId':
-                                           a[engine_api.WATCH_STACK_ID]})
+                                                  a[engine_api.WATCH_STACK_ID]
+                                                   })
 
             # Reformat dimensions list into AWS API format
             a[engine_api.WATCH_DIMENSIONS] = self._reformat_dimensions(
-                                             a[engine_api.WATCH_DIMENSIONS])
+                a[engine_api.WATCH_DIMENSIONS])
 
             return api_utils.reformat_dict_keys(keymap, a)
 
@@ -120,7 +122,7 @@ class WatchController(object):
             return exception.map_remote_error(ex)
 
         res = {'MetricAlarms': [format_metric_alarm(a)
-                                   for a in watch_list]}
+                                for a in watch_list]}
 
         result = api_utils.format_response("DescribeAlarms", res)
         return result
@@ -243,9 +245,9 @@ class WatchController(object):
         dimensions = []
         for p in metric_data:
             dimension = api_utils.extract_param_pairs(p,
-                                                   prefix='Dimensions',
-                                                   keyname='Name',
-                                                   valuename='Value')
+                                                      prefix='Dimensions',
+                                                      keyname='Name',
+                                                      valuename='Value')
             if 'AlarmName' in dimension:
                 watch_name = dimension['AlarmName']
             else:
@@ -283,8 +285,8 @@ class WatchController(object):
         """
         # Map from AWS state names to those used in the engine
         state_map = {'OK': engine_api.WATCH_STATE_OK,
-                      'ALARM': engine_api.WATCH_STATE_ALARM,
-                      'INSUFFICIENT_DATA': engine_api.WATCH_STATE_NODATA}
+                     'ALARM': engine_api.WATCH_STATE_ALARM,
+                     'INSUFFICIENT_DATA': engine_api.WATCH_STATE_NODATA}
 
         con = req.context
         parms = dict(req.params)
index de82a00b8e31b2d2c92f4f8aad8c3fa90f08624b..c484d9e01e33f03df4d3298be087c13fee6b1032 100644 (file)
@@ -47,7 +47,7 @@ class VersionNegotiationFilter(wsgi.Middleware):
         # API controller
         msg = _("Processing request: %(method)s %(path)s Accept: "
                 "%(accept)s") % ({'method': req.method,
-                'path': req.path, 'accept': req.accept})
+                                  'path': req.path, 'accept': req.accept})
         logger.debug(msg)
 
         # If the request is for /versions, just return the versions container
@@ -57,7 +57,7 @@ class VersionNegotiationFilter(wsgi.Middleware):
         match = self._match_version_string(req.path_info_peek(), req)
         if match:
             if (req.environ['api.major_version'] == 1 and
-                req.environ['api.minor_version'] == 0):
+                    req.environ['api.minor_version'] == 0):
                 logger.debug(_("Matched versioned URI. Version: %d.%d"),
                              req.environ['api.major_version'],
                              req.environ['api.minor_version'])
@@ -78,7 +78,7 @@ class VersionNegotiationFilter(wsgi.Middleware):
             match = self._match_version_string(accept_version, req)
             if match:
                 if (req.environ['api.major_version'] == 1 and
-                    req.environ['api.minor_version'] == 0):
+                        req.environ['api.minor_version'] == 0):
                     logger.debug(_("Matched versioned media type. "
                                  "Version: %d.%d"),
                                  req.environ['api.major_version'],
index b08d74a8bbdec0c96f65531dd87084e7f689fe76..2914a4f9aa4290b8701645f440830a7df06475fa 100644 (file)
@@ -40,25 +40,29 @@ class BotoClient(CloudFormationConnection):
 
     def create_stack(self, **kwargs):
         if 'TemplateUrl' in kwargs:
-            return super(BotoClient, self).create_stack(kwargs['StackName'],
-                                     template_url=kwargs['TemplateUrl'],
-                                     parameters=kwargs['Parameters'])
+            return super(BotoClient, self).create_stack(
+                kwargs['StackName'],
+                template_url=kwargs['TemplateUrl'],
+                parameters=kwargs['Parameters'])
         elif 'TemplateBody' in kwargs:
-            return super(BotoClient, self).create_stack(kwargs['StackName'],
-                                     template_body=kwargs['TemplateBody'],
-                                     parameters=kwargs['Parameters'])
+            return super(BotoClient, self).create_stack(
+                kwargs['StackName'],
+                template_body=kwargs['TemplateBody'],
+                parameters=kwargs['Parameters'])
         else:
             logger.error("Must specify TemplateUrl or TemplateBody!")
 
     def update_stack(self, **kwargs):
         if 'TemplateUrl' in kwargs:
-            return super(BotoClient, self).update_stack(kwargs['StackName'],
-                                     template_url=kwargs['TemplateUrl'],
-                                     parameters=kwargs['Parameters'])
+            return super(BotoClient, self).update_stack(
+                kwargs['StackName'],
+                template_url=kwargs['TemplateUrl'],
+                parameters=kwargs['Parameters'])
         elif 'TemplateBody' in kwargs:
-            return super(BotoClient, self).update_stack(kwargs['StackName'],
-                                     template_body=kwargs['TemplateBody'],
-                                     parameters=kwargs['Parameters'])
+            return super(BotoClient, self).update_stack(
+                kwargs['StackName'],
+                template_body=kwargs['TemplateBody'],
+                parameters=kwargs['Parameters'])
         else:
             logger.error("Must specify TemplateUrl or TemplateBody!")
 
@@ -67,11 +71,11 @@ class BotoClient(CloudFormationConnection):
 
     def list_stack_events(self, **kwargs):
         return super(BotoClient, self).describe_stack_events(
-                     kwargs['StackName'])
+            kwargs['StackName'])
 
     def describe_stack_resource(self, **kwargs):
         return super(BotoClient, self).describe_stack_resource(
-                     kwargs['StackName'], kwargs['LogicalResourceId'])
+            kwargs['StackName'], kwargs['LogicalResourceId'])
 
     def describe_stack_resources(self, **kwargs):
         # Check if this is a StackName, if not assume it's a physical res ID
@@ -83,29 +87,29 @@ class BotoClient(CloudFormationConnection):
         stack_names = [s.stack_name for s in list_stacks]
         if kwargs['NameOrPid'] in stack_names:
             logger.debug("Looking up resources for StackName:%s" %
-                          kwargs['NameOrPid'])
+                         kwargs['NameOrPid'])
             return super(BotoClient, self).describe_stack_resources(
-                         stack_name_or_id=kwargs['NameOrPid'],
-                         logical_resource_id=kwargs['LogicalResourceId'])
+                stack_name_or_id=kwargs['NameOrPid'],
+                logical_resource_id=kwargs['LogicalResourceId'])
         else:
             logger.debug("Looking up resources for PhysicalResourceId:%s" %
-                          kwargs['NameOrPid'])
+                         kwargs['NameOrPid'])
             return super(BotoClient, self).describe_stack_resources(
-                         stack_name_or_id=None,
-                         logical_resource_id=kwargs['LogicalResourceId'],
-                         physical_resource_id=kwargs['NameOrPid'])
+                stack_name_or_id=None,
+                logical_resource_id=kwargs['LogicalResourceId'],
+                physical_resource_id=kwargs['NameOrPid'])
 
     def list_stack_resources(self, **kwargs):
         return super(BotoClient, self).list_stack_resources(
-                     kwargs['StackName'])
+            kwargs['StackName'])
 
     def validate_template(self, **kwargs):
         if 'TemplateUrl' in kwargs:
             return super(BotoClient, self).validate_template(
-                         template_url=kwargs['TemplateUrl'])
+                template_url=kwargs['TemplateUrl'])
         elif 'TemplateBody' in kwargs:
             return super(BotoClient, self).validate_template(
-                         template_body=kwargs['TemplateBody'])
+                template_body=kwargs['TemplateBody'])
         else:
             logger.error("Must specify TemplateUrl or TemplateBody!")
 
@@ -115,14 +119,14 @@ class BotoClient(CloudFormationConnection):
     def estimate_template_cost(self, **kwargs):
         if 'TemplateUrl' in kwargs:
             return super(BotoClient, self).estimate_template_cost(
-                         kwargs['StackName'],
-                         template_url=kwargs['TemplateUrl'],
-                         parameters=kwargs['Parameters'])
+                kwargs['StackName'],
+                template_url=kwargs['TemplateUrl'],
+                parameters=kwargs['Parameters'])
         elif 'TemplateBody' in kwargs:
             return super(BotoClient, self).estimate_template_cost(
-                         kwargs['StackName'],
-                         template_body=kwargs['TemplateBody'],
-                         parameters=kwargs['Parameters'])
+                kwargs['StackName'],
+                template_body=kwargs['TemplateBody'],
+                parameters=kwargs['Parameters'])
         else:
             logger.error("Must specify TemplateUrl or TemplateBody!")
 
@@ -139,7 +143,7 @@ class BotoClient(CloudFormationConnection):
             ret.append("ResourceProperties : %s" % event.resource_properties)
             ret.append("ResourceStatus : %s" % event.resource_status)
             ret.append("ResourceStatusReason : %s" %
-                        event.resource_status_reason)
+                       event.resource_status_reason)
             ret.append("ResourceType : %s" % event.resource_type)
             ret.append("StackId : %s" % event.stack_id)
             ret.append("StackName : %s" % event.stack_name)
@@ -180,7 +184,7 @@ class BotoClient(CloudFormationConnection):
             ret.append("PhysicalResourceId : %s" % res.physical_resource_id)
             ret.append("ResourceStatus : %s" % res.resource_status)
             ret.append("ResourceStatusReason : %s" %
-                        res.resource_status_reason)
+                       res.resource_status_reason)
             ret.append("ResourceType : %s" % res.resource_type)
             ret.append("StackId : %s" % res.stack_id)
             ret.append("StackName : %s" % res.stack_name)
@@ -196,12 +200,12 @@ class BotoClient(CloudFormationConnection):
         ret = []
         for res in resources:
             ret.append("LastUpdatedTimestamp : %s" %
-                        res.last_updated_timestamp)
+                       res.last_updated_timestamp)
             ret.append("LogicalResourceId : %s" % res.logical_resource_id)
             ret.append("PhysicalResourceId : %s" % res.physical_resource_id)
             ret.append("ResourceStatus : %s" % res.resource_status)
             ret.append("ResourceStatusReason : %s" %
-                        res.resource_status_reason)
+                       res.resource_status_reason)
             ret.append("ResourceType : %s" % res.resource_type)
             ret.append("--")
         return '\n'.join(ret)
@@ -218,7 +222,7 @@ class BotoClient(CloudFormationConnection):
         For now, we format the dict response as a workaround
         '''
         resource_detail = res['DescribeStackResourceResponse'][
-                   'DescribeStackResourceResult']['StackResourceDetail']
+            'DescribeStackResourceResult']['StackResourceDetail']
         ret = []
         for key in resource_detail:
             ret.append("%s : %s" % (key, resource_detail[key]))
@@ -285,8 +289,10 @@ def get_client(host, port=None, username=None,
     # Also note is_secure is defaulted to False as HTTPS connections
     # don't seem to work atm, FIXME
     cloudformation = BotoClient(aws_access_key_id=aws_access_key,
-        aws_secret_access_key=aws_secret_key, is_secure=False,
-        port=port, path="/v1")
+                                aws_secret_access_key=aws_secret_key,
+                                is_secure=False,
+                                port=port,
+                                path="/v1")
     if cloudformation:
         logger.debug("Got CF connection object OK")
     else:
index 76f0199aa8a33b5e35c080a7b53968edc4f328c5..2258bdd04021a30b39597fded717ceef21382c2d 100644 (file)
@@ -34,12 +34,12 @@ class BotoCWClient(CloudWatchConnection):
     # TODO : These should probably go in the CW API and be imported
     DEFAULT_NAMESPACE = "heat/unknown"
     METRIC_UNITS = ("Seconds", "Microseconds", "Milliseconds", "Bytes",
-                  "Kilobytes", "Megabytes", "Gigabytes", "Terabytes",
-                  "Bits", "Kilobits", "Megabits", "Gigabits", "Terabits",
-                  "Percent", "Count", "Bytes/Second", "Kilobytes/Second",
-                  "Megabytes/Second", "Gigabytes/Second", "Terabytes/Second",
-                  "Bits/Second", "Kilobits/Second", "Megabits/Second",
-                  "Gigabits/Second", "Terabits/Second", "Count/Second", None)
+                    "Kilobytes", "Megabytes", "Gigabytes", "Terabytes",
+                    "Bits", "Kilobits", "Megabits", "Gigabits", "Terabits",
+                    "Percent", "Count", "Bytes/Second", "Kilobytes/Second",
+                    "Megabytes/Second", "Gigabytes/Second", "Terabytes/Second",
+                    "Bits/Second", "Kilobits/Second", "Megabits/Second",
+                    "Gigabits/Second", "Terabits/Second", "Count/Second", None)
     METRIC_COMPARISONS = (">=", ">", "<", "<=")
     ALARM_STATES = ("OK", "ALARM", "INSUFFICIENT_DATA")
     METRIC_STATISTICS = ("Average", "Sum", "SampleCount", "Maximum", "Minimum")
@@ -56,7 +56,7 @@ class BotoCWClient(CloudWatchConnection):
         except KeyError:
             name = None
         return super(BotoCWClient, self).describe_alarms(
-                     alarm_names=[name])
+            alarm_names=[name])
 
     def list_metrics(self, **kwargs):
         # list_metrics returns non-null index in next_token if there
@@ -75,10 +75,10 @@ class BotoCWClient(CloudWatchConnection):
         token = None
         while True:
             results.append(super(BotoCWClient, self).list_metrics(
-                                 next_token=token,
-                                 dimensions=None,
-                                 metric_name=name,
-                                 namespace=None))
+                           next_token=token,
+                           dimensions=None,
+                           metric_name=name,
+                           namespace=None))
             if not token:
                 break
 
@@ -94,8 +94,8 @@ class BotoCWClient(CloudWatchConnection):
             metric_value = kwargs['MetricValue']
             metric_namespace = kwargs['Namespace']
         except KeyError:
-            logger.error("Must pass MetricName, MetricUnit, " +\
-                          "Namespace, MetricValue!")
+            logger.error("Must pass MetricName, MetricUnit, " +
+                         "Namespace, MetricValue!")
             return
 
         try:
@@ -116,20 +116,20 @@ class BotoCWClient(CloudWatchConnection):
             return
 
         return super(BotoCWClient, self).put_metric_data(
-                     namespace=metric_namespace,
-                     name=metric_name,
-                     value=metric_value,
-                     timestamp=None,  # This means use "now" in the engine
-                     unit=metric_unit,
-                     dimensions=metric_dims,
-                     statistics=None)
+            namespace=metric_namespace,
+            name=metric_name,
+            value=metric_value,
+            timestamp=None,  # This means use "now" in the engine
+            unit=metric_unit,
+            dimensions=metric_dims,
+            statistics=None)
 
     def set_alarm_state(self, **kwargs):
         return super(BotoCWClient, self).set_alarm_state(
-                     alarm_name=kwargs['AlarmName'],
-                     state_reason=kwargs['StateReason'],
-                     state_value=kwargs['StateValue'],
-                     state_reason_data=kwargs['StateReasonData'])
+            alarm_name=kwargs['AlarmName'],
+            state_reason=kwargs['StateReason'],
+            state_value=kwargs['StateValue'],
+            state_reason_data=kwargs['StateReasonData'])
 
     def format_metric_alarm(self, alarms):
         '''
@@ -144,19 +144,19 @@ class BotoCWClient(CloudWatchConnection):
             ret.append("AlarmActions : %s" % s.alarm_actions)
             ret.append("AlarmArn : %s" % s.alarm_arn)
             ret.append("AlarmConfigurationUpdatedTimestamp : %s" %
-                        s.last_updated)
+                       s.last_updated)
             ret.append("ComparisonOperator : %s" % s.comparison)
             ret.append("Dimensions : %s" % s.dimensions)
             ret.append("EvaluationPeriods : %s" % s.evaluation_periods)
             ret.append("InsufficientDataActions : %s" %
-                        s.insufficient_data_actions)
+                       s.insufficient_data_actions)
             ret.append("MetricName : %s" % s.metric)
             ret.append("Namespace : %s" % s.namespace)
             ret.append("OKActions : %s" % s.ok_actions)
             ret.append("Period : %s" % s.period)
             ret.append("StateReason : %s" % s.state_reason)
             ret.append("StateUpdatedTimestamp : %s" %
-                        s.last_updated)
+                       s.last_updated)
             ret.append("StateValue : %s" % s.state_value)
             ret.append("Statistic : %s" % s.statistic)
             ret.append("Threshold : %s" % s.threshold)
@@ -199,8 +199,10 @@ def get_client(port=None, aws_access_key=None, aws_secret_key=None):
     # Also note is_secure is defaulted to False as HTTPS connections
     # don't seem to work atm, FIXME
     cloudwatch = BotoCWClient(aws_access_key_id=aws_access_key,
-        aws_secret_access_key=aws_secret_key, is_secure=False,
-        port=port, path="/v1")
+                              aws_secret_access_key=aws_secret_key,
+                              is_secure=False,
+                              port=port,
+                              path="/v1")
     if cloudwatch:
         logger.debug("Got CW connection object OK")
     else:
index c00db9c51bd74c36e4809f869f5704ea74c186bb..49dc3e59f1af5f81c4250020c8c958c969060dcc 100644 (file)
@@ -31,7 +31,7 @@ SUPPORTED_PARAMS = ('StackName', 'TemplateBody', 'TemplateUrl',
                     'SignatureVersion', 'Timestamp', 'AWSAccessKeyId',
                     'Signature', 'TimeoutInMinutes',
                     'LogicalResourceId', 'PhysicalResourceId', 'NextToken',
-)
+                    )
 
 
 class V1Client(base_client.BaseClient):
@@ -85,7 +85,7 @@ class V1Client(base_client.BaseClient):
                 'LogicalResourceId': kwargs['LogicalResourceId']}
             try:
                 result = self.stack_request("DescribeStackResources", "GET",
-                                        **parameters)
+                                            **parameters)
             except Exception:
                 logger.debug("Failed to lookup resource details with key %s:%s"
                              % (lookup_key, lookup_value))
@@ -167,8 +167,7 @@ def get_client(host, port=None, username=None,
                  tenant=tenant,
                  auth_url=auth_url,
                  strategy=force_strategy or auth_strategy,
-                 region=region,
-    )
+                 region=region)
 
     if creds['strategy'] == 'keystone' and not creds['auth_url']:
         msg = ("--auth_url option or OS_AUTH_URL environment variable "
@@ -176,14 +175,14 @@ def get_client(host, port=None, username=None,
         raise exception.ClientConfigurationError(msg)
 
     use_ssl = (creds['auth_url'] is not None and
-        creds['auth_url'].find('https') != -1)
+               creds['auth_url'].find('https') != -1)
 
     client = HeatClient
 
     return client(host=host,
-                port=port,
-                use_ssl=use_ssl,
-                auth_tok=auth_token,
-                creds=creds,
-                insecure=insecure,
-                service_type='cloudformation')
+                  port=port,
+                  use_ssl=use_ssl,
+                  auth_tok=auth_token,
+                  creds=creds,
+                  insecure=insecure,
+                  service_type='cloudformation')
index 9370e27226f4a3b150769d35894ed6ac5bebe0eb..85e02224a665adaccc08c21e63184aac37f8fea3 100644 (file)
@@ -33,15 +33,14 @@ def catch_error(action):
                 return SUCCESS if ret is None else ret
             except exception.NotAuthorized:
                 LOG.error("Not authorized to make this request. Check " +
-                      "your credentials (OS_USERNAME, OS_PASSWORD, " +
-                      "OS_TENANT_NAME, OS_AUTH_URL and OS_AUTH_STRATEGY).")
+                          "your credentials (OS_USERNAME, OS_PASSWORD, " +
+                          "OS_TENANT_NAME, OS_AUTH_URL and OS_AUTH_STRATEGY).")
                 return FAILURE
             except exception.ClientConfigurationError:
                 raise
             except exception.KeystoneError, e:
                 LOG.error("Keystone did not finish the authentication and "
-                              "returned the following message:\n\n%s"
-                              % e.message)
+                          "returned the following message:\n\n%s" % e.message)
                 return FAILURE
             except Exception, e:
                 options = arguments[0]
index b4eb389222b3b2e365b1491da4dc87b674e75e5a..9d40a040bbf832f02c4979ea0423cbde0c05ca08 100644 (file)
@@ -196,7 +196,7 @@ class KeystoneStrategy(BaseStrategy):
             region_matches = lambda e: region is None or e['region'] == region
 
             endpoints = [ep for s in service_catalog if service_type_matches(s)
-                            for ep in s['endpoints'] if region_matches(ep)]
+                         for ep in s['endpoints'] if region_matches(ep)]
 
             if len(endpoints) > 1:
                 raise exception.RegionAmbiguity(region=region)
@@ -213,17 +213,14 @@ class KeystoneStrategy(BaseStrategy):
                 "tenantName": creds['tenant'],
                 "passwordCredentials": {
                     "username": creds['username'],
-                    "password": creds['password']
-                    }
-                }
-            }
+                    "password": creds['password']}}}
 
         headers = {}
         headers['Content-Type'] = 'application/json'
         req_body = json.dumps(creds)
 
         resp, resp_body = self._do_request(
-                token_url, 'POST', headers=headers, body=req_body)
+            token_url, 'POST', headers=headers, body=req_body)
 
         if resp.status == 200:
             resp_auth = json.loads(resp_body)['access']
index e7b4d6450230de16f2d10f8414bacbbe698ba20a..bf558b31f53854f3072b922b9a0be15eb7eb1902 100644 (file)
@@ -208,7 +208,7 @@ class AuthProtocol(object):
             'X-Role',
         )
         LOG.debug('Removing headers from request environment: %s' %
-                     ','.join(auth_headers))
+                  ','.join(auth_headers))
         self._remove_headers(env, auth_headers)
 
     def _get_user_token_from_header(self, env):
@@ -363,7 +363,7 @@ class AuthProtocol(object):
             self.admin_token = None
         else:
             LOG.error('Bad response code while validating token: %s' %
-                         response.status)
+                      response.status)
         if retry:
             LOG.info('Retrying validation')
             return self._validate_user_token(user_token, False)
index f24991557b38afc558e479dcae8b0c1cfc8199d4..b253194e85202dc97309e4659509a6134061f2fb 100644 (file)
@@ -301,21 +301,21 @@ class BaseClient(object):
                 raise exception.ClientConnectionError(msg)
 
             if (self.key_file is not None and
-                not os.path.exists(self.key_file)):
+                    not os.path.exists(self.key_file)):
                 msg = _("The key file you specified %s does not "
                         "exist") % self.key_file
                 raise exception.ClientConnectionError(msg)
             connect_kwargs['key_file'] = self.key_file
 
             if (self.cert_file is not None and
-                not os.path.exists(self.cert_file)):
+                    not os.path.exists(self.cert_file)):
                 msg = _("The cert file you specified %s does not "
                         "exist") % self.cert_file
                 raise exception.ClientConnectionError(msg)
             connect_kwargs['cert_file'] = self.cert_file
 
             if (self.ca_file is not None and
-                not os.path.exists(self.ca_file)):
+                    not os.path.exists(self.ca_file)):
                 msg = _("The CA file you specified %s does not "
                         "exist") % self.ca_file
                 raise exception.ClientConnectionError(msg)
index 16cf488a1b92b039d3f073605e74c2ef4746de1c..eb262327537e03bee53ecd581deac291aeda3b46 100644 (file)
@@ -34,80 +34,80 @@ DEFAULT_PORT = 8000
 paste_deploy_group = cfg.OptGroup('paste_deploy')
 paste_deploy_opts = [
     cfg.StrOpt('flavor'),
-    cfg.StrOpt('config_file'),
-    ]
+    cfg.StrOpt('config_file')]
 
 
-bind_opts = [cfg.IntOpt('bind_port', default=8000),
-             cfg.StrOpt('bind_host', default='127.0.0.1')]
+bind_opts = [
+    cfg.IntOpt('bind_port', default=8000),
+    cfg.StrOpt('bind_host', default='127.0.0.1')]
 
 service_opts = [
-cfg.IntOpt('report_interval',
-           default=10,
-           help='seconds between nodes reporting state to datastore'),
-cfg.IntOpt('periodic_interval',
-           default=60,
-           help='seconds between running periodic tasks'),
-cfg.StrOpt('ec2_listen',
-           default="0.0.0.0",
-           help='IP address for EC2 API to listen'),
-cfg.IntOpt('ec2_listen_port',
-           default=8773,
-           help='port for ec2 api to listen'),
-cfg.StrOpt('osapi_compute_listen',
-           default="0.0.0.0",
-           help='IP address for OpenStack API to listen'),
-cfg.IntOpt('osapi_compute_listen_port',
-           default=8774,
-           help='list port for osapi compute'),
-cfg.StrOpt('osapi_volume_listen',
-           default="0.0.0.0",
-           help='IP address for OpenStack Volume API to listen'),
-cfg.IntOpt('osapi_volume_listen_port',
-           default=8776,
-           help='port for os volume api to listen'),
-cfg.StrOpt('heat_metadata_server_url',
-           default="",
-           help='URL of the Heat metadata server'),
-cfg.StrOpt('heat_waitcondition_server_url',
-           default="",
-           help='URL of the Heat waitcondition server'),
-cfg.StrOpt('heat_watch_server_url',
-           default="",
-           help='URL of the Heat cloudwatch server'),
-cfg.StrOpt('heat_stack_user_role',
-           default="heat_stack_user",
-           help='Keystone role for heat template-defined users'),
-]
+    cfg.IntOpt('report_interval',
+               default=10,
+               help='seconds between nodes reporting state to datastore'),
+    cfg.IntOpt('periodic_interval',
+               default=60,
+               help='seconds between running periodic tasks'),
+    cfg.StrOpt('ec2_listen',
+               default="0.0.0.0",
+               help='IP address for EC2 API to listen'),
+    cfg.IntOpt('ec2_listen_port',
+               default=8773,
+               help='port for ec2 api to listen'),
+    cfg.StrOpt('osapi_compute_listen',
+               default="0.0.0.0",
+               help='IP address for OpenStack API to listen'),
+    cfg.IntOpt('osapi_compute_listen_port',
+               default=8774,
+               help='list port for osapi compute'),
+    cfg.StrOpt('osapi_volume_listen',
+               default="0.0.0.0",
+               help='IP address for OpenStack Volume API to listen'),
+    cfg.IntOpt('osapi_volume_listen_port',
+               default=8776,
+               help='port for os volume api to listen'),
+    cfg.StrOpt('heat_metadata_server_url',
+               default="",
+               help='URL of the Heat metadata server'),
+    cfg.StrOpt('heat_waitcondition_server_url',
+               default="",
+               help='URL of the Heat waitcondition server'),
+    cfg.StrOpt('heat_watch_server_url',
+               default="",
+               help='URL of the Heat cloudwatch server'),
+    cfg.StrOpt('heat_stack_user_role',
+               default="heat_stack_user",
+               help='Keystone role for heat template-defined users')]
+
 db_opts = [
-cfg.StrOpt('sql_connection',
-           default='mysql://heat:heat@localhost/heat',
-           help='The SQLAlchemy connection string used to connect to the '
-                'database'),
-cfg.IntOpt('sql_idle_timeout',
-           default=3600,
-           help='timeout before idle sql connections are reaped'),
-]
+    cfg.StrOpt('sql_connection',
+               default='mysql://heat:heat@localhost/heat',
+               help='The SQLAlchemy connection string used to connect to the '
+               'database'),
+    cfg.IntOpt('sql_idle_timeout',
+               default=3600,
+               help='timeout before idle sql connections are reaped')]
+
 engine_opts = [
-cfg.StrOpt('instance_driver',
-           default='heat.engine.nova',
-           help='Driver to use for controlling instances'),
-cfg.ListOpt('plugin_dirs',
-            default=['/usr/lib64/heat', '/usr/lib/heat'],
-            help='List of directories to search for Plugins'),
-]
+    cfg.StrOpt('instance_driver',
+               default='heat.engine.nova',
+               help='Driver to use for controlling instances'),
+    cfg.ListOpt('plugin_dirs',
+                default=['/usr/lib64/heat', '/usr/lib/heat'],
+                help='List of directories to search for Plugins')]
+
 rpc_opts = [
-cfg.StrOpt('host',
-           default=socket.gethostname(),
-           help='Name of the engine node.  This can be an opaque identifier.'
-                'It is not necessarily a hostname, FQDN, or IP address.'),
-cfg.StrOpt('control_exchange',
-           default='heat',
-           help='AMQP exchange to connect to if using RabbitMQ or Qpid'),
-cfg.StrOpt('engine_topic',
-           default='engine',
-           help='the topic engine nodes listen on')
-]
+    cfg.StrOpt('host',
+               default=socket.gethostname(),
+               help='Name of the engine node. '
+                    'This can be an opaque identifier.'
+                    'It is not necessarily a hostname, FQDN, or IP address.'),
+    cfg.StrOpt('control_exchange',
+               default='heat',
+               help='AMQP exchange to connect to if using RabbitMQ or Qpid'),
+    cfg.StrOpt('engine_topic',
+               default='engine',
+               help='the topic engine nodes listen on')]
 
 
 def register_api_opts():
index a135755d60ec5eb6fe66658471208d73ba3afd22..0bdca8e150ac13d95a91f4b08ec8f69455bd07a9 100644 (file)
@@ -114,10 +114,8 @@ def get_admin_context(read_deleted="no"):
 
 class ContextMiddleware(wsgi.Middleware):
 
-    opts = [
-        cfg.BoolOpt('owner_is_tenant', default=True),
-        cfg.StrOpt('admin_role', default='admin'),
-        ]
+    opts = [cfg.BoolOpt('owner_is_tenant', default=True),
+            cfg.StrOpt('admin_role', default='admin')]
 
     def __init__(self, app, conf, **local_conf):
         cfg.CONF.register_opts(self.opts)
index 43083caae56d5a70c8aa7464bced6c6d09404a9c..48444a53a12ada66814dd28f7eff8ef136e93f14 100644 (file)
@@ -73,7 +73,7 @@ class KeystoneClient(object):
         # deployed on an instance (hence are implicitly untrusted)
         roles = self.client.roles.list()
         stack_user_role = [r.id for r in roles
-                         if r.name == cfg.CONF.heat_stack_user_role]
+                           if r.name == cfg.CONF.heat_stack_user_role]
         if len(stack_user_role) == 1:
             role_id = stack_user_role[0]
             logger.debug("Adding user %s to role %s" % (user.id, role_id))
index 9739ee50ca023fbac4ec50590ab696657cef6e08..9e6f0e11f87193289a20f849befb2ba9782d581f 100644 (file)
@@ -75,7 +75,7 @@ class HeatIdentifier(collections.Mapping):
         # Sanity check the URL
         urlp = urlparse.urlparse(url)
         if (urlp.scheme not in ('http', 'https') or
-           not urlp.netloc or not urlp.path):
+                not urlp.netloc or not urlp.path):
             raise ValueError('"%s" is not a valid URL' % url)
 
         # Remove any query-string and extract the ARN
index 7e120ded4c52b2c1856c9d5e1a92c30121dacc69..e3679d28b4543cedd7c3cfe410f5187b93c11bfb 100644 (file)
@@ -43,10 +43,10 @@ def parse(tmpl_str):
         except yaml.scanner.ScannerError as e:
             raise ValueError(e)
         else:
-            if tpl == None:
+            if tpl is None:
                 tpl = {}
             default_for_missing(tpl, u'HeatTemplateFormatVersion',
-                HEAT_VERSIONS)
+                                HEAT_VERSIONS)
     return tpl
 
 
@@ -73,7 +73,7 @@ def convert_json_to_yaml(json_str):
     global key_order
     # Replace AWS format version with Heat format version
     json_str = re.sub('"AWSTemplateFormatVersion"\s*:\s*"[^"]+"\s*,',
-        '', json_str)
+                      '', json_str)
 
     # insert a sortable order into the key to preserve file ordering
     key_order = 0
index 53813e550dc85c5d0e46b3ee02a5311c6e955451..02a046dc5d264ab41df7a11a9f7fe176997f316e 100644 (file)
@@ -102,8 +102,8 @@ def get_socket(conf, default_port):
     # support IPv6 in getaddrinfo(). We need to get around this in the
     # future or monitor upstream for a fix
     address_family = [addr[0] for addr in socket.getaddrinfo(bind_addr[0],
-            bind_addr[1], socket.AF_UNSPEC, socket.SOCK_STREAM)
-            if addr[0] in (socket.AF_INET, socket.AF_INET6)][0]
+                      bind_addr[1], socket.AF_UNSPEC, socket.SOCK_STREAM)
+                      if addr[0] in (socket.AF_INET, socket.AF_INET6)][0]
 
     conf.register_opts(socket_opts)
 
@@ -239,10 +239,11 @@ class Server(object):
         eventlet.patcher.monkey_patch(all=False, socket=True)
         self.pool = eventlet.GreenPool(size=self.threads)
         try:
-            eventlet_wsgi_server(self.sock, self.application,
-                    custom_pool=self.pool,
-                    url_length_limit=URL_LENGTH_LIMIT,
-                    log=WritableLogger(self.logger))
+            eventlet_wsgi_server(self.sock,
+                                 self.application,
+                                 custom_pool=self.pool,
+                                 url_length_limit=URL_LENGTH_LIMIT,
+                                 log=WritableLogger(self.logger))
         except socket.error, err:
             if err[0] != errno.EINVAL:
                 raise
index abac96ca068cb6030c75d09ccb18d7d496e2cbea..dc93bf888c392f78c1df58f190481af2fa3e2897 100644 (file)
@@ -34,8 +34,7 @@ SQL_IDLE_TIMEOUT = 3600
 db_opts = [
     cfg.StrOpt('db_backend',
                default='sqlalchemy',
-               help='The backend to use for db'),
-    ]
+               help='The backend to use for db')]
 
 IMPL = utils.LazyPluggable('db_backend',
                            sqlalchemy='heat.db.sqlalchemy.api')
index 9cb14de3fdc86141a2e41c64cd452bf45cba7502..5f9b2ef9da9ca3610761e70b6ce99bd24fb33140 100644 (file)
@@ -69,8 +69,8 @@ def resource_get(context, resource_id):
 
 def resource_get_by_name_and_stack(context, resource_name, stack_id):
     result = model_query(context, models.Resource).\
-                        filter_by(name=resource_name).\
-                        filter_by(stack_id=stack_id).first()
+        filter_by(name=resource_name).\
+        filter_by(stack_id=stack_id).first()
 
     return result
 
@@ -80,7 +80,7 @@ def resource_get_by_physical_resource_id(context, physical_resource_id):
               .filter_by(nova_instance=physical_resource_id)
               .first())
     if (result is not None and context is not None and
-        result.stack.tenant != context.tenant_id):
+            result.stack.tenant != context.tenant_id):
         return None
     return result
 
@@ -103,7 +103,7 @@ def resource_create(context, values):
 
 def resource_get_all_by_stack(context, stack_id):
     results = model_query(context, models.Resource).\
-                filter_by(stack_id=stack_id).all()
+        filter_by(stack_id=stack_id).all()
 
     if not results:
         raise NotFound("no resources for stack_id %s were found" % stack_id)
@@ -113,9 +113,9 @@ def resource_get_all_by_stack(context, stack_id):
 
 def stack_get_by_name(context, stack_name, owner_id=None):
     query = model_query(context, models.Stack).\
-                        filter_by(tenant=context.tenant_id).\
-                        filter_by(name=stack_name).\
-                        filter_by(owner_id=owner_id)
+        filter_by(tenant=context.tenant_id).\
+        filter_by(name=stack_name).\
+        filter_by(owner_id=owner_id)
 
     return query.first()
 
@@ -129,7 +129,7 @@ def stack_get(context, stack_id, admin=False):
         return result
 
     if (result is not None and context is not None and
-        result.tenant != context.tenant_id):
+            result.tenant != context.tenant_id):
         return None
 
     return result
@@ -137,14 +137,14 @@ def stack_get(context, stack_id, admin=False):
 
 def stack_get_all(context):
     results = model_query(context, models.Stack).\
-                         filter_by(owner_id=None).all()
+        filter_by(owner_id=None).all()
     return results
 
 
 def stack_get_all_by_tenant(context):
     results = model_query(context, models.Stack).\
-                         filter_by(owner_id=None).\
-                         filter_by(tenant=context.tenant_id).all()
+        filter_by(owner_id=None).\
+        filter_by(tenant=context.tenant_id).all()
     return results
 
 
@@ -160,7 +160,7 @@ def stack_update(context, stack_id, values):
 
     if not stack:
         raise NotFound('Attempt to update a stack with id: %s %s' %
-                        (stack_id, 'that does not exist'))
+                      (stack_id, 'that does not exist'))
 
     old_template_id = stack.raw_template_id
 
@@ -180,7 +180,7 @@ def stack_delete(context, stack_id):
     s = stack_get(context, stack_id)
     if not s:
         raise NotFound('Attempt to delete a stack with id: %s %s' %
-                        (stack_id, 'that does not exist'))
+                      (stack_id, 'that does not exist'))
 
     session = Session.object_session(s)
 
@@ -236,18 +236,18 @@ def event_get_all(context):
 
 def event_get_all_by_tenant(context):
     stacks = model_query(context, models.Stack).\
-                          filter_by(tenant=context.tenant_id).all()
+        filter_by(tenant=context.tenant_id).all()
     results = []
     for stack in stacks:
         results.extend(model_query(context, models.Event).
-                                   filter_by(stack_id=stack.id).all())
+                       filter_by(stack_id=stack.id).all())
 
     return results
 
 
 def event_get_all_by_stack(context, stack_id):
     results = model_query(context, models.Event).\
-                        filter_by(stack_id=stack_id).all()
+        filter_by(stack_id=stack_id).all()
 
     return results
 
@@ -261,13 +261,13 @@ def event_create(context, values):
 
 def watch_rule_get(context, watch_rule_id):
     result = model_query(context, models.WatchRule).\
-                        filter_by(id=watch_rule_id).first()
+        filter_by(id=watch_rule_id).first()
     return result
 
 
 def watch_rule_get_by_name(context, watch_rule_name):
     result = model_query(context, models.WatchRule).\
-                        filter_by(name=watch_rule_name).first()
+        filter_by(name=watch_rule_name).first()
     return result
 
 
@@ -278,7 +278,7 @@ def watch_rule_get_all(context):
 
 def watch_rule_get_all_by_stack(context, stack_id):
     results = model_query(context, models.WatchRule).\
-                          filter_by(stack_id=stack_id).all()
+        filter_by(stack_id=stack_id).all()
     return results
 
 
@@ -294,7 +294,7 @@ def watch_rule_update(context, watch_id, values):
 
     if not wr:
         raise NotFound('Attempt to update a watch with id: %s %s' %
-                        (watch_id, 'that does not exist'))
+                      (watch_id, 'that does not exist'))
 
     wr.update(values)
     wr.save(_session(context))
@@ -302,11 +302,11 @@ def watch_rule_update(context, watch_id, values):
 
 def watch_rule_delete(context, watch_name):
     wr = model_query(context, models.WatchRule).\
-                        filter_by(name=watch_name).first()
+        filter_by(name=watch_name).first()
 
     if not wr:
         raise NotFound('Attempt to delete a watch_rule with name: %s %s' %
-                        (watch_name, 'that does not exist'))
+                      (watch_name, 'that does not exist'))
 
     session = Session.object_session(wr)
 
@@ -331,11 +331,11 @@ def watch_data_get_all(context):
 
 def watch_data_delete(context, watch_name):
     ds = model_query(context, models.WatchRule).\
-                     filter_by(name=watch_name).all()
+        filter_by(name=watch_name).all()
 
     if not ds:
         raise NotFound('Attempt to delete watch_data with name: %s %s' %
-                        (watch_name, 'that does not exist'))
+                      (watch_name, 'that does not exist'))
 
     session = Session.object_session(ds)
     for d in ds:
index fc2c13373fc0beec5f1639f9b379b4ab5abb9de1..84d901c5f06da07555c3bd662bba6e76577a7229 100644 (file)
@@ -19,9 +19,12 @@ def upgrade(migrate_engine):
         Column('id', Integer, primary_key=True),
         Column('created_at', DateTime(timezone=False)),
         Column('updated_at', DateTime(timezone=False)),
-        Column('name', String(length=255, convert_unicode=False,
-                      assert_unicode=None,
-                      unicode_error=None, _warn_on_bytestring=False)),
+        Column('name', String(
+            length=255,
+            convert_unicode=False,
+            assert_unicode=None,
+            unicode_error=None,
+            _warn_on_bytestring=False)),
         Column('raw_template_id', Integer, ForeignKey("raw_template.id"),
                nullable=False),
     )
@@ -32,27 +35,37 @@ def upgrade(migrate_engine):
         Column('stack_id', Integer, ForeignKey("stack.id"), nullable=False),
         Column('created_at', DateTime(timezone=False)),
         Column('updated_at', DateTime(timezone=False)),
-        Column('name', String(length=255, convert_unicode=False,
-                      assert_unicode=None,
-                      unicode_error=None, _warn_on_bytestring=False)),
+        Column('name', String(
+            length=255,
+            convert_unicode=False,
+            assert_unicode=None,
+            unicode_error=None,
+            _warn_on_bytestring=False)),
     )
 
     resource = Table(
         'resource', meta,
         Column('id', Integer, primary_key=True),
-        Column('nova_instance', String(length=255, convert_unicode=False,
-              assert_unicode=None,
-              unicode_error=None, _warn_on_bytestring=False)),
-        Column('name', String(length=255, convert_unicode=False,
-              assert_unicode=None,
-              unicode_error=None, _warn_on_bytestring=False)),
-
+        Column('nova_instance', String(
+            length=255,
+            convert_unicode=False,
+            assert_unicode=None,
+            unicode_error=None,
+            _warn_on_bytestring=False)),
+        Column('name', String(
+            length=255,
+            convert_unicode=False,
+            assert_unicode=None,
+            unicode_error=None,
+            _warn_on_bytestring=False)),
         Column('created_at', DateTime(timezone=False)),
         Column('updated_at', DateTime(timezone=False)),
-        Column('state', String(length=255, convert_unicode=False,
-                                           assert_unicode=None,
-                                           unicode_error=None,
-                                           _warn_on_bytestring=False)),
+        Column('state', String(
+            length=255,
+            convert_unicode=False,
+            assert_unicode=None,
+            unicode_error=None,
+            _warn_on_bytestring=False)),
         Column('state_description', String(length=255, convert_unicode=False,
                                            assert_unicode=None,
                                            unicode_error=None,
index ac4b89f7ed9f466a68ef00581e293aa0b3f7b788..ed93612195dddc9bcb8656d7e61a5201fd80beae 100644 (file)
@@ -11,9 +11,11 @@ def upgrade(migrate_engine):
         Column('id', Integer, primary_key=True),
         Column('created_at', DateTime(timezone=False)),
         Column('updated_at', DateTime(timezone=False)),
-        Column('stack_name', String(length=255, convert_unicode=False,
-                              assert_unicode=None,
-                              unicode_error=None, _warn_on_bytestring=False)),
+        Column('stack_name', String(length=255,
+                                    convert_unicode=False,
+                                    assert_unicode=None,
+                                    unicode_error=None,
+                                    _warn_on_bytestring=False)),
         Column('name', String(length=255, convert_unicode=False,
                               assert_unicode=None,
                               unicode_error=None, _warn_on_bytestring=False)),
index 4846c19dfa5b66b2d7edfbf9200d1811a375c22f..705862edf99c28afa897cad7928672ea12bf50a7 100644 (file)
@@ -33,9 +33,9 @@ def upgrade(migrate_engine):
         Column('auth_url', Text()),
         Column('aws_auth_url', Text()),
         Column('tenant_id', String(length=256, convert_unicode=False,
-                                assert_unicode=None,
-                                unicode_error=None,
-                                _warn_on_bytestring=False)),
+                                   assert_unicode=None,
+                                   unicode_error=None,
+                                   _warn_on_bytestring=False)),
         Column('aws_creds', Text())
     )
 
index e87e7f1c1e5bb627db3a1d68f9ffe60e4cebd003..b2127f781762cd1effdb9df44a04e8a21f1f4bf3 100644 (file)
@@ -16,18 +16,21 @@ def upgrade(migrate_engine):
         fkeys = list(event.c.stack_id.foreign_keys)
         if fkeys:
             fkey_name = fkeys[0].constraint.name
-            ForeignKeyConstraint(columns=[event.c.stack_id],
-                    refcolumns=[stack.c.id],
-                    name=fkey_name).drop()
+            ForeignKeyConstraint(
+                columns=[event.c.stack_id],
+                refcolumns=[stack.c.id],
+                name=fkey_name).drop()
 
         fkeys = list(resource.c.stack_id.foreign_keys)
         if fkeys:
             fkey_name = fkeys[0].constraint.name
-            ForeignKeyConstraint(columns=[resource.c.stack_id],
-                                 refcolumns=[stack.c.id],
-                                 name=fkey_name).drop()
+            ForeignKeyConstraint(
+                columns=[resource.c.stack_id],
+                refcolumns=[stack.c.id],
+                name=fkey_name).drop()
 
-    stack.c.id.alter(String(36), primary_key=True,
+    stack.c.id.alter(
+        String(36), primary_key=True,
         default=uuidutils.generate_uuid)
     event.c.stack_id.alter(String(36), nullable=False)
     resource.c.stack_id.alter(String(36), nullable=False)
@@ -35,16 +38,18 @@ def upgrade(migrate_engine):
     fkeys = list(event.c.stack_id.foreign_keys)
     if fkeys:
         fkey_name = fkeys[0].constraint.name
-        ForeignKeyConstraint(columns=[event.c.stack_id],
-                refcolumns=[stack.c.id],
-                name=fkey_name).create()
+        ForeignKeyConstraint(
+            columns=[event.c.stack_id],
+            refcolumns=[stack.c.id],
+            name=fkey_name).create()
 
     fkeys = list(resource.c.stack_id.foreign_keys)
     if fkeys:
         fkey_name = fkeys[0].constraint.name
-        ForeignKeyConstraint(columns=[resource.c.stack_id],
-                             refcolumns=[stack.c.id],
-                             name=fkey_name).create()
+        ForeignKeyConstraint(
+            columns=[resource.c.stack_id],
+            refcolumns=[stack.c.id],
+            name=fkey_name).create()
 
 
 def downgrade(migrate_engine):
@@ -63,18 +68,21 @@ def downgrade(migrate_engine):
     fkeys = list(event.c.stack_id.foreign_keys)
     if fkeys:
         fkey_name = fkeys[0].constraint.name
-        ForeignKeyConstraint(columns=[event.c.stack_id],
-                refcolumns=[stack.c.id],
-                name=fkey_name).drop()
+        ForeignKeyConstraint(
+            columns=[event.c.stack_id],
+            refcolumns=[stack.c.id],
+            name=fkey_name).drop()
 
     fkeys = list(resource.c.stack_id.foreign_keys)
     if fkeys:
         fkey_name = fkeys[0].constraint.name
-        ForeignKeyConstraint(columns=[resource.c.stack_id],
-                             refcolumns=[stack.c.id],
-                             name=fkey_name).drop()
+        ForeignKeyConstraint(
+            columns=[resource.c.stack_id],
+            refcolumns=[stack.c.id],
+            name=fkey_name).drop()
 
-    stack.c.id.alter(Integer, primary_key=True,
+    stack.c.id.alter(
+        Integer, primary_key=True,
         default=utils.generate_uuid)
     event.c.stack_id.alter(Integer, nullable=False)
     resource.c.stack_id.alter(Integer, nullable=False)
@@ -82,13 +90,15 @@ def downgrade(migrate_engine):
     fkeys = list(event.c.stack_id.foreign_keys)
     if fkeys:
         fkey_name = fkeys[0].constraint.name
-        ForeignKeyConstraint(columns=[event.c.stack_id],
-                refcolumns=[stack.c.id],
-                name=fkey_name).create()
+        ForeignKeyConstraint(
+            columns=[event.c.stack_id],
+            refcolumns=[stack.c.id],
+            name=fkey_name).create()
 
     fkeys = list(resource.c.stack_id.foreign_keys)
     if fkeys:
         fkey_name = fkeys[0].constraint.name
-        ForeignKeyConstraint(columns=[resource.c.stack_id],
-                             refcolumns=[stack.c.id],
-                             name=fkey_name).create()
+        ForeignKeyConstraint(
+            columns=[resource.c.stack_id],
+            refcolumns=[stack.c.id],
+            name=fkey_name).create()
index 1d5d6e2c763c925864d04d97fcd4cfa1beb85ac3..e0825c4cf8a6af2d37a25936c72ffdb6081a9c3e 100644 (file)
@@ -13,18 +13,20 @@ def upgrade(migrate_engine):
         fkeys = list(stack.c.owner_id.foreign_keys)
         if fkeys:
             fkey_name = fkeys[0].constraint.name
-            ForeignKeyConstraint(columns=[stack.c.owner_id],
-                    refcolumns=[stack.c.id],
-                    name=fkey_name).drop()
+            ForeignKeyConstraint(columns=[
+                stack.c.owner_id],
+                refcolumns=[stack.c.id],
+                name=fkey_name).drop()
 
     stack.c.owner_id.alter(String(36), nullable=True)
 
     fkeys = list(stack.c.owner_id.foreign_keys)
     if fkeys:
         fkey_name = fkeys[0].constraint.name
-        ForeignKeyConstraint(columns=[stack.c.owner_id],
-                refcolumns=[stack.c.id],
-                name=fkey_name).create()
+        ForeignKeyConstraint(
+            columns=[stack.c.owner_id],
+            refcolumns=[stack.c.id],
+            name=fkey_name).create()
 
 
 def downgrade(migrate_engine):
@@ -41,22 +43,25 @@ def downgrade(migrate_engine):
     fkeys = list(stack.c.owner_id.foreign_keys)
     if fkeys:
         fkey_name = fkeys[0].constraint.name
-        ForeignKeyConstraint(columns=[stack.c.owner_id],
-                refcolumns=[stack.c.id],
-                name=fkey_name).drop()
+        ForeignKeyConstraint(
+            columns=[stack.c.owner_id],
+            refcolumns=[stack.c.id],
+            name=fkey_name).drop()
 
     stack.c.owner_id.alter(Integer, nullable=True)
 
     fkeys = list(event.c.stack_id.foreign_keys)
     if fkeys:
         fkey_name = fkeys[0].constraint.name
-        ForeignKeyConstraint(columns=[event.c.stack_id],
-                refcolumns=[stack.c.id],
-                name=fkey_name).create()
+        ForeignKeyConstraint(
+            columns=[event.c.stack_id],
+            refcolumns=[stack.c.id],
+            name=fkey_name).create()
 
     fkeys = list(stack.c.owner_id.foreign_keys)
     if fkeys:
         fkey_name = fkeys[0].constraint.name
-        ForeignKeyConstraint(columns=[stack.c.owner_id],
-                refcolumns=[stack.c.id],
-                name=fkey_name).create()
+        ForeignKeyConstraint(
+            columns=[stack.c.owner_id],
+            refcolumns=[stack.c.id],
+            name=fkey_name).create()
index 6baa8d41a6b48cd9f6c621e2ff7f52a61022035d..707928d6de4f393715ca274922863cbeed355b53 100644 (file)
@@ -9,7 +9,7 @@ def upgrade(migrate_engine):
     watch_rule = Table('watch_rule', meta, autoload=True)
 
     Column('stack_id', String(length=36), ForeignKey("stack.id"),
-               nullable=False).create(watch_rule)
+           nullable=False).create(watch_rule)
 
     watch_rule.c.stack_name.drop()
 
index 95af8d91cbe4960e212d40001aa37c51ad94a760..4ac9d6dcdfbd90c26eee7cfc5d6ab452bfd69975 100644 (file)
@@ -43,8 +43,8 @@ def patched_with_engine(f, *a, **kw):
 #                on that version or higher, this can be removed
 MIN_PKG_VERSION = dist_version.StrictVersion('0.7.3')
 if (not hasattr(migrate, '__version__') or
-    dist_version.StrictVersion(migrate.__version__) < MIN_PKG_VERSION):
-    migrate_util.with_engine = patched_with_engine
+        dist_version.StrictVersion(migrate.__version__) < MIN_PKG_VERSION):
+        migrate_util.with_engine = patched_with_engine
 
 
 # NOTE(jkoelker) Delay importing migrate until we are patched
@@ -90,7 +90,7 @@ def db_version():
         meta.reflect(bind=engine)
         try:
             for table in ('stack', 'resource', 'event',
-                            'parsed_template', 'raw_template'):
+                          'parsed_template', 'raw_template'):
                 assert table in meta.tables
             return db_version_control(1)
         except AssertionError:
index 125e4bbd842d8a88314c6661ba42ea8f057bb7a7..6de48bf6b2dc5de74cd187950fa8c2c3a682420c 100644 (file)
@@ -144,20 +144,22 @@ class Stack(BASE, HeatBase):
 
     __tablename__ = 'stack'
 
-    id = Column(String, primary_key=True,
-        default=uuidutils.generate_uuid)
+    id = Column(String, primary_key=True, default=uuidutils.generate_uuid)
     name = Column(String)
-    raw_template_id = Column(Integer, ForeignKey('raw_template.id'),
-                            nullable=False)
-    raw_template = relationship(RawTemplate,
-        backref=backref('stack'))
+    raw_template_id = Column(
+        Integer,
+        ForeignKey('raw_template.id'),
+        nullable=False)
+    raw_template = relationship(RawTemplate, backref=backref('stack'))
     username = Column(String)
     tenant = Column(String)
     status = Column('status', String)
     status_reason = Column('status_reason', String)
     parameters = Column('parameters', Json)
-    user_creds_id = Column(Integer, ForeignKey('user_creds.id'),
-                           nullable=False)
+    user_creds_id = Column(
+        Integer,
+        ForeignKey('user_creds.id'),
+        nullable=False)
     owner_id = Column(Integer, nullable=True)
     timeout = Column(Integer)
 
@@ -180,8 +182,7 @@ class UserCreds(BASE, HeatBase):
     aws_auth_url = Column(String)
     tenant_id = Column(String)
     aws_creds = Column(String)
-    stack = relationship(Stack,
-        backref=backref('user_creds'))
+    stack = relationship(Stack, backref=backref('user_creds'))
 
 
 class Event(BASE, HeatBase):
@@ -190,10 +191,8 @@ class Event(BASE, HeatBase):
     __tablename__ = 'event'
 
     id = Column(Integer, primary_key=True)
-    stack_id = Column(String, ForeignKey('stack.id'),
-                        nullable=False)
-    stack = relationship(Stack,
-        backref=backref('events'))
+    stack_id = Column(String, ForeignKey('stack.id'), nullable=False)
+    stack = relationship(Stack, backref=backref('events'))
 
     name = Column(String)
     logical_resource_id = Column(String)
@@ -216,8 +215,7 @@ class Resource(BASE, HeatBase):
     # odd name as "metadata" is reserved
     rsrc_metadata = Column('rsrc_metadata', Json)
 
-    stack_id = Column(String, ForeignKey('stack.id'),
-                                 nullable=False)
+    stack_id = Column(String, ForeignKey('stack.id'), nullable=False)
     stack = relationship(Stack, backref=backref('resources'))
 
 
@@ -232,8 +230,7 @@ class WatchRule(BASE, HeatBase):
     state = Column('state', String)
     last_evaluated = Column(DateTime, default=timeutils.utcnow)
 
-    stack_id = Column(String, ForeignKey('stack.id'),
-                                 nullable=False)
+    stack_id = Column(String, ForeignKey('stack.id'), nullable=False)
     stack = relationship(Stack, backref=backref('watch_rule'))
 
 
@@ -245,6 +242,8 @@ class WatchData(BASE, HeatBase):
     id = Column(Integer, primary_key=True)
     data = Column('data', Json)
 
-    watch_rule_id = Column(Integer, ForeignKey('watch_rule.id'),
-                           nullable=False)
+    watch_rule_id = Column(
+        Integer,
+        ForeignKey('watch_rule.id'),
+        nullable=False)
     watch_rule = relationship(WatchRule, backref=backref('watch_data'))
index b2623163864c6d08ed7b7824f4fe9413f1b30de5..7764c0f8d26b24c329aab636d5679b2f4de158b6 100644 (file)
@@ -87,9 +87,10 @@ def get_engine():
 
 def get_maker(engine, autocommit=True, expire_on_commit=False):
     """Return a SQLAlchemy sessionmaker using the given engine."""
-    ses = sqlalchemy.orm.sessionmaker(bind=engine,
-                                       autocommit=autocommit,
-                                       expire_on_commit=expire_on_commit)
+    ses = sqlalchemy.orm.sessionmaker(
+        bind=engine,
+        autocommit=autocommit,
+        expire_on_commit=expire_on_commit)
     return sqlalchemy.orm.scoped_session(ses)
 
 
index db337e32da35fe9d114fbf0853adcbba442e06b7..95d8eb76425773ff74854b832fa1b3e223849850 100644 (file)
@@ -145,7 +145,7 @@ def format_watch(watch):
         WATCH_STATE_REASON: watch.rule.get(RULE_STATE_REASON),
         WATCH_STATE_REASON_DATA: watch.rule.get(RULE_STATE_REASON_DATA),
         WATCH_STATE_UPDATED_TIME: timeutils.isotime(
-                                  watch.rule.get(RULE_STATE_UPDATED_TIME)),
+            watch.rule.get(RULE_STATE_UPDATED_TIME)),
         WATCH_STATE_VALUE: watch.state,
         WATCH_STATISTIC: watch.rule.get(RULE_STATISTIC),
         WATCH_THRESHOLD: watch.rule.get(RULE_THRESHOLD),
index 5868313c48691cba3f6c12f3abef8b38e8c092e4..6ac05054f989eacd312b9807b2bab71dc8d45f98 100644 (file)
@@ -116,7 +116,7 @@ class Clients(object):
             # Lookup endpoint for object-store service type
             service_type = 'object-store'
             endpoints = self.keystone().service_catalog.get_endpoints(
-                        service_type=service_type)
+                service_type=service_type)
             if len(endpoints[service_type]) == 1:
                 args['preauthurl'] = endpoints[service_type][0]['publicURL']
             else:
@@ -155,7 +155,7 @@ class Clients(object):
             args['token'] = con.auth_token
         else:
             logger.error("Quantum connection failed, "
-                "no password or auth_token!")
+                         "no password or auth_token!")
             return None
         logger.debug('quantum args %s', args)
 
index da26625ac1ea488c1313e1b7f122e2c7a9e49db3..3b6d56b77bd9c054e02b86450b5301cfb2800d30 100644 (file)
@@ -262,8 +262,8 @@ class Parameters(collections.Mapping):
         Map the supplied filter function onto each Parameter (with an
         optional filter function) and return the resulting dictionary.
         '''
-        return dict((n, func(p)) for n, p in self.params.iteritems()
-                                 if filter_func(p))
+        return dict((n, func(p))
+                    for n, p in self.params.iteritems() if filter_func(p))
 
     def user_parameters(self):
         '''
index 56cb057a28c9a0f9b685b8f91311674c994a22b7..461e6c2e60a84dbd3c3d814a4f77b702259e8a57 100644 (file)
@@ -277,7 +277,7 @@ class Stack(object):
                 for res in reversed(self):
                     if not res.name in newstack.keys():
                         logger.debug("resource %s not found in updated stack"
-                                      % res.name + " definition, deleting")
+                                     % res.name + " definition, deleting")
                         result = res.destroy()
                         if result:
                             failures.append('Resource %s delete failed'
@@ -289,7 +289,7 @@ class Stack(object):
                 for res in newstack:
                     if not res.name in self.keys():
                         logger.debug("resource %s not found in current stack"
-                                      % res.name + " definition, adding")
+                                     % res.name + " definition, adding")
                         res.stack = self
                         self[res.name] = res
                         result = self[res.name].create()
@@ -313,8 +313,8 @@ class Stack(object):
                 # Currently all resource have a default handle_update method
                 # which returns "requires replacement" (res.UPDATE_REPLACE)
                 for res in newstack:
-                    if self.resolve_runtime_data(
-                        self[res.name].t) != self.resolve_runtime_data(res.t):
+                    if self.resolve_runtime_data(self[res.name].t) !=\
+                            self.resolve_runtime_data(res.t):
 
                         # Can fail if underlying resource class does not
                         # implement update logic or update requires replacement
@@ -337,7 +337,7 @@ class Stack(object):
                                                     % res.name)
                         else:
                             logger.warning("Cannot update resource %s," %
-                                            res.name + " reason %s" % retval)
+                                           res.name + " reason %s" % retval)
                             failures.append('Resource %s update failed'
                                             % res.name)
 
index a3af960ef213a8bb1b5243a449ed187e096b2f1d..00a657cc2357dd613950877956d545c8f6cf5e83 100644 (file)
@@ -29,8 +29,8 @@ class Property(object):
         for key in self.schema:
             assert key in SCHEMA_KEYS, 'Unknown schema key "%s"' % key
 
-        assert self.type() in SCHEMA_TYPES, \
-               'Unknown property type "%s"' % self.type()
+        assert self.type() in SCHEMA_TYPES,\
+            'Unknown property type "%s"' % self.type()
 
     def required(self):
         return self.schema.get(REQUIRED, False)
@@ -103,7 +103,7 @@ class Property(object):
 
     def _validate_list(self, value):
         if (not isinstance(value, collections.Sequence) or
-            isinstance(value, basestring)):
+                isinstance(value, basestring)):
             raise TypeError('"%s" is not a list' % repr(value))
 
         for v in value:
index a9f580363cd23d1ac719810093529592ff7006c8..9280620765b5ac4570f3db04cad1a5a7c8746557 100644 (file)
@@ -45,7 +45,7 @@ def _register_class(resource_type, resource_class):
     logger.info(_('Registering resource type %s') % resource_type)
     if resource_type in _resource_classes:
         logger.warning(_('Replacing existing resource type %s') %
-                resource_type)
+                       resource_type)
 
     _resource_classes[resource_type] = resource_class
 
index 492a9e715c0525f91407a2598cffde44b9991099..79a27a8bd1d5b95646030f5e7ea668161efa6fc4 100644 (file)
@@ -28,28 +28,49 @@ class CloudWatchAlarm(resource.Resource):
                          'AllowedValues': ['GreaterThanOrEqualToThreshold',
                          'GreaterThanThreshold', 'LessThanThreshold',
                          'LessThanOrEqualToThreshold']},
-        'AlarmDescription': {'Type': 'String'},
-        'EvaluationPeriods': {'Type': 'String'},
-        'MetricName': {'Type': 'String'},
-        'Namespace': {'Type': 'String'},
-        'Period': {'Type': 'String'},
-        'Statistic': {'Type': 'String',
-                      'AllowedValues': ['SampleCount', 'Average', 'Sum',
-                                        'Minimum', 'Maximum']},
-        'AlarmActions': {'Type': 'List'},
-        'OKActions': {'Type': 'List'},
-        'Dimensions': {'Type': 'List'},
-        'InsufficientDataActions': {'Type': 'List'},
-        'Threshold': {'Type': 'String'},
-        'Units': {'Type': 'String',
-                  'AllowedValues': ['Seconds', 'Microseconds', 'Milliseconds',
-                  'Bytes', 'Kilobytes', 'Megabytes', 'Gigabytes',
-                  'Terabytes', 'Bits', 'Kilobits', 'Megabits', 'Gigabits',
-                  'Terabits', 'Percent', 'Count', 'Bytes/Second',
-                  'Kilobytes/Second', 'Megabytes/Second', 'Gigabytes/Second',
-                  'Terabytes/Second', 'Bits/Second', 'Kilobits/Second',
-                  'Megabits/Second', 'Gigabits/Second', 'Terabits/Second',
-                  'Count/Second', None]}}
+                         'AlarmDescription': {'Type': 'String'},
+                         'EvaluationPeriods': {'Type': 'String'},
+                         'MetricName': {'Type': 'String'},
+                         'Namespace': {'Type': 'String'},
+                         'Period': {'Type': 'String'},
+                         'Statistic': {'Type': 'String',
+                                       'AllowedValues': ['SampleCount',
+                                                         'Average',
+                                                         'Sum',
+                                                         'Minimum',
+                                                         'Maximum']},
+                         'AlarmActions': {'Type': 'List'},
+                         'OKActions': {'Type': 'List'},
+                         'Dimensions': {'Type': 'List'},
+                         'InsufficientDataActions': {'Type': 'List'},
+                         'Threshold': {'Type': 'String'},
+                         'Units': {'Type': 'String',
+                                   'AllowedValues': ['Seconds',
+                                                     'Microseconds',
+                                                     'Milliseconds',
+                                                     'Bytes',
+                                                     'Kilobytes',
+                                                     'Megabytes',
+                                                     'Gigabytes',
+                                                     'Terabytes',
+                                                     'Bits',
+                                                     'Kilobits',
+                                                     'Megabits',
+                                                     'Gigabits',
+                                                     'Terabits',
+                                                     'Percent',
+                                                     'Count',
+                                                     'Bytes/Second',
+                                                     'Kilobytes/Second',
+                                                     'Megabytes/Second',
+                                                     'Gigabytes/Second',
+                                                     'Terabytes/Second',
+                                                     'Bits/Second',
+                                                     'Kilobits/Second',
+                                                     'Megabits/Second',
+                                                     'Gigabits/Second',
+                                                     'Terabits/Second',
+                                                     'Count/Second', None]}}
 
     strict_dependency = False
 
index e2fc243600885e3ef71d947a9eb84e2a2be49d75..5836c3a94292521e359e89a9a3d8844b070c224d 100644 (file)
@@ -51,7 +51,7 @@ class Restarter(resource.Resource):
 
         if victim is None:
             logger.info('%s Alarm, can not find instance %s' %
-                    (self.name, self.properties['InstanceId']))
+                       (self.name, self.properties['InstanceId']))
             return
 
         logger.info('%s Alarm, restarting resource: %s' %
@@ -67,9 +67,9 @@ class Instance(resource.Resource):
                              'Required': True}}
 
     properties_schema = {'ImageId': {'Type': 'String',
-                                    'Required': True},
+                                     'Required': True},
                          'InstanceType': {'Type': 'String',
-                                    'Required': True},
+                                          'Required': True},
                          'KeyName': {'Type': 'String',
                                      'Required': True},
                          'AvailabilityZone': {'Type': 'String',
@@ -92,7 +92,7 @@ class Instance(resource.Resource):
                          'SourceDestCheck': {'Type': 'Boolean',
                                              'Implemented': False},
                          'SubnetId': {'Type': 'String',
-                                       'Implemented': False},
+                                      'Implemented': False},
                          'Tags': {'Type': 'List',
                                   'Schema': {'Type': 'Map',
                                              'Schema': tags_schema}},
@@ -210,11 +210,11 @@ class Instance(resource.Resource):
         return self.mime_string
 
     def handle_create(self):
-        if self.properties.get('SecurityGroups') == None:
+        if self.properties.get('SecurityGroups') is None:
             security_groups = None
         else:
-            security_groups = [self.physical_resource_name_find(sg) for sg in
-                    self.properties.get('SecurityGroups')]
+            security_groups = [self.physical_resource_name_find(sg)
+                               for sg in self.properties.get('SecurityGroups')]
 
         userdata = self.properties['UserData'] or ''
         userdata += '\ntouch /var/lib/cloud/instance/provision-finished\n'
@@ -286,7 +286,7 @@ class Instance(resource.Resource):
         if res:
             return res
 
-        #check validity of key
+        # check validity of key
         try:
             key_name = self.properties['KeyName']
         except ValueError:
index bcb3cae1589173400b7452a6c3a264f1a61b4f99..2d5fea7c36fd54ba56fd05692e2d0a6cd5ebc4ae 100644 (file)
@@ -178,7 +178,7 @@ class LoadBalancer(stack.Stack):
         'Timeout': {'Type': 'Number',
                     'Required': True},
         'UnhealthyThreshold': {'Type': 'Number',
-                              'Required': True},
+                               'Required': True},
     }
 
     properties_schema = {
index 298a5a727a36f79c18d6fe7f843c4b7ecabc897e..c18af1eca608cb1cbbef0dae4853814f56037f0d 100644 (file)
@@ -22,12 +22,11 @@ logger = logging.getLogger(__name__)
 
 class FloatingIP(quantum.QuantumResource):
     properties_schema = {'floating_network_id': {'Type': 'String',
-                                    'Required': True},
-                        'value_specs': {'Type': 'Map',
-                                       'Default': {}},
-                        'port_id': {'Type': 'String'},
-                        'fixed_ip_address': {'Type': 'String'},
-    }
+                                                 'Required': True},
+                         'value_specs': {'Type': 'Map',
+                                         'Default': {}},
+                         'port_id': {'Type': 'String'},
+                         'fixed_ip_address': {'Type': 'String'}}
 
     def handle_create(self):
         props = self.prepare_properties(self.properties, self.name)
@@ -47,11 +46,10 @@ class FloatingIP(quantum.QuantumResource):
 
 class FloatingIPAssociation(quantum.QuantumResource):
     properties_schema = {'floatingip_id': {'Type': 'String',
-                                    'Required': True},
-                        'port_id': {'Type': 'String',
-                                    'Required': True},
-                        'fixed_ip_address': {'Type': 'String'}
-    }
+                                           'Required': True},
+                         'port_id': {'Type': 'String',
+                                     'Required': True},
+                         'fixed_ip_address': {'Type': 'String'}}
 
     def __init__(self, name, json_snippet, stack):
         super(FloatingIPAssociation, self).__init__(name, json_snippet, stack)
@@ -69,7 +67,7 @@ class FloatingIPAssociation(quantum.QuantumResource):
         client = self.quantum()
         (floatingip_id, port_id) = self.resource_id.split(':')
         client.update_floatingip(floatingip_id,
-            {'floatingip': {'port_id': None}})
+                                 {'floatingip': {'port_id': None}})
 
 
 def resource_mapping():
index d592f5c884a0cb31f4cdb1fb43efa0129cac3c1a..768b6da7192cc7856917bb86b19045a1773fcfed 100644 (file)
@@ -22,11 +22,10 @@ logger = logging.getLogger(__name__)
 
 class Net(quantum.QuantumResource):
     properties_schema = {'name': {'Type': 'String'},
-                        'value_specs': {'Type': 'Map',
-                                       'Default': {}},
-                        'admin_state_up': {'Default': True,
-                                          'Type': 'Boolean'},
-    }
+                         'value_specs': {'Type': 'Map',
+                                         'Default': {}},
+                         'admin_state_up': {'Default': True,
+                                            'Type': 'Boolean'}}
 
     def __init__(self, name, json_snippet, stack):
         super(Net, self).__init__(name, json_snippet, stack)
index abceb95ed3c0b8e801411daab8a94caf3423a789..eba805eea07b49439c48cfb13a1be0a988aa2241 100644 (file)
@@ -23,23 +23,22 @@ logger = logging.getLogger(__name__)
 class Port(quantum.QuantumResource):
 
     fixed_ip_schema = {'subnet_id': {'Type': 'String',
-                                  'Required': True},
-                        'ip_address': {'Type': 'String',
-                               'Required': True}}
+                                     'Required': True},
+                       'ip_address': {'Type': 'String',
+                                      'Required': True}}
 
     properties_schema = {'network_id': {'Type': 'String',
-                                    'Required': True},
-                        'name': {'Type': 'String'},
-                        'value_specs': {'Type': 'Map',
-                                       'Default': {}},
-                        'admin_state_up': {'Default': True,
-                                          'Type': 'Boolean'},
-                        'fixed_ips': {'Type': 'List',
-                                      'Schema': {'Type': 'Map',
-                                                 'Schema': fixed_ip_schema}},
-                        'mac_address': {'Type': 'String'},
-                        'device_id': {'Type': 'String'},
-    }
+                                        'Required': True},
+                         'name': {'Type': 'String'},
+                         'value_specs': {'Type': 'Map',
+                                         'Default': {}},
+                         'admin_state_up': {'Default': True,
+                                            'Type': 'Boolean'},
+                         'fixed_ips': {'Type': 'List',
+                                       'Schema': {'Type': 'Map',
+                                                  'Schema': fixed_ip_schema}},
+                         'mac_address': {'Type': 'String'},
+                         'device_id': {'Type': 'String'}}
 
     def __init__(self, name, json_snippet, stack):
         super(Port, self).__init__(name, json_snippet, stack)
index 40dfe2adf2d56e219fcd882cd0f665cea5d4eda8..d156815a7429d925a2e266c91c427ad30145854a 100644 (file)
@@ -61,7 +61,7 @@ class QuantumResource(resource.Resource):
         values.
         '''
         props = dict((k, v) for k, v in properties.items()
-            if v is not None and k != 'value_specs')
+                     if v is not None and k != 'value_specs')
 
         if 'name' in properties.keys():
             props.setdefault('name', name)
@@ -82,8 +82,7 @@ class QuantumResource(resource.Resource):
         if key in attributes.keys():
             return attributes[key]
 
-        raise exception.InvalidTemplateAttribute(resource=name,
-                                                     key=key)
+        raise exception.InvalidTemplateAttribute(resource=name, key=key)
 
     def handle_update(self):
         return self.UPDATE_REPLACE
index d7184183f674c5c743fa9c40a2a32a87d723f4ef..9f74d23f69e8855fdb517a2dc3cdb57fbae9f077 100644 (file)
@@ -23,11 +23,10 @@ logger = logging.getLogger(__name__)
 
 class Router(quantum.QuantumResource):
     properties_schema = {'name': {'Type': 'String'},
-                        'value_specs': {'Type': 'Map',
-                                       'Default': {}},
-                        'admin_state_up': {'Type': 'Boolean',
-                                          'Default': True},
-    }
+                         'value_specs': {'Type': 'Map',
+                                         'Default': {}},
+                         'admin_state_up': {'Type': 'Boolean',
+                                            'Default': True}}
 
     def __init__(self, name, json_snippet, stack):
         super(Router, self).__init__(name, json_snippet, stack)
@@ -49,10 +48,9 @@ class Router(quantum.QuantumResource):
 
 class RouterInterface(quantum.QuantumResource):
     properties_schema = {'router_id': {'Type': 'String',
-                                      'Required': True},
-                        'subnet_id': {'Type': 'String',
-                                      'Required': True},
-    }
+                                       'Required': True},
+                         'subnet_id': {'Type': 'String',
+                                       'Required': True}}
 
     def __init__(self, name, json_snippet, stack):
         super(RouterInterface, self).__init__(name, json_snippet, stack)
@@ -61,22 +59,21 @@ class RouterInterface(quantum.QuantumResource):
         router_id = self.properties.get('router_id')
         subnet_id = self.properties.get('subnet_id')
         self.quantum().add_interface_router(router_id,
-            {'subnet_id': subnet_id})
+                                            {'subnet_id': subnet_id})
         self.resource_id_set('%s:%s' % (router_id, subnet_id))
 
     def handle_delete(self):
         client = self.quantum()
         (router_id, subnet_id) = self.resource_id.split(':')
         client.remove_interface_router(router_id,
-            {'subnet_id': subnet_id})
+                                       {'subnet_id': subnet_id})
 
 
 class RouterGateway(quantum.QuantumResource):
     properties_schema = {'router_id': {'Type': 'String',
-                                      'Required': True},
-                        'network_id': {'Type': 'String',
-                                      'Required': True},
-    }
+                                       'Required': True},
+                         'network_id': {'Type': 'String',
+                                        'Required': True}}
 
     def __init__(self, name, json_snippet, stack):
         super(RouterGateway, self).__init__(name, json_snippet, stack)
@@ -85,7 +82,7 @@ class RouterGateway(quantum.QuantumResource):
         router_id = self.properties.get('router_id')
         network_id = self.properties.get('network_id')
         self.quantum().add_gateway_router(router_id,
-            {'network_id': network_id})
+                                          {'network_id': network_id})
         self.resource_id_set('%s:%s' % (router_id, network_id))
 
     def handle_delete(self):
index e00ba2f1239c044bcba93bdbb93368aca5c26bae..d6db25b85e72dcc3b7df38bc7bd2c6327aaad96d 100644 (file)
@@ -23,29 +23,28 @@ logger = logging.getLogger(__name__)
 class Subnet(quantum.QuantumResource):
 
     allocation_schema = {'start': {'Type': 'String',
-                                  'Required': True},
-                        'end': {'Type': 'String',
-                               'Required': True}}
+                                   'Required': True},
+                         'end': {'Type': 'String',
+                                 'Required': True}}
 
     properties_schema = {'network_id': {'Type': 'String',
-                                    'Required': True},
-                        'cidr': {'Type': 'String',
-                                'Required': True},
-                        'value_specs': {'Type': 'Map',
-                                       'Default': {}},
-                        'name': {'Type': 'String'},
-                        'admin_state_up': {'Default': True,
-                                          'Type': 'Boolean'},
-                        'ip_version': {'Type': 'Integer',
-                                      'AllowedValues': [4, 6],
-                                      'Default': 4},
-                        'gateway_ip': {'Type': 'String'},
-                        'allocation_pools': {'Type': 'List',
-                                             'Schema': {
-                                                 'Type': 'Map',
-                                                 'Schema': allocation_schema
-                                             }}
-    }
+                                        'Required': True},
+                         'cidr': {'Type': 'String',
+                                  'Required': True},
+                         'value_specs': {'Type': 'Map',
+                                         'Default': {}},
+                         'name': {'Type': 'String'},
+                         'admin_state_up': {'Default': True,
+                                            'Type': 'Boolean'},
+                         'ip_version': {'Type': 'Integer',
+                                        'AllowedValues': [4, 6],
+                                        'Default': 4},
+                         'gateway_ip': {'Type': 'String'},
+                         'allocation_pools': {'Type': 'List',
+                                              'Schema': {
+                                              'Type': 'Map',
+                                              'Schema': allocation_schema
+                                              }}}
 
     def __init__(self, name, json_snippet, stack):
         super(Subnet, self).__init__(name, json_snippet, stack)
index 4ca070f58eb3a90579439440a40ae90662146cbf..dbb9ad6fbb7b60200da4052449c881f0f0ae88da 100644 (file)
@@ -29,19 +29,18 @@ class S3Bucket(resource.Resource):
     website_schema = {'IndexDocument': {'Type': 'String'},
                       'ErrorDocument': {'Type': 'String'}}
     properties_schema = {'AccessControl': {
-                           'Type': 'String',
-                           'AllowedValues': ['Private',
-                                             'PublicRead',
-                                             'PublicReadWrite',
-                                             'AuthenticatedRead',
-                                             'BucketOwnerRead',
-                                             'BucketOwnerFullControl']},
-                        'DeletionPolicy': {
-                            'Type': 'String',
-                            'AllowedValues': ['Delete',
-                                              'Retain']},
-                        'WebsiteConfiguration': {'Type': 'Map',
-                                                 'Schema': website_schema}}
+                         'Type': 'String',
+                         'AllowedValues': ['Private',
+                                           'PublicRead',
+                                           'PublicReadWrite',
+                                           'AuthenticatedRead',
+                                           'BucketOwnerRead',
+                                           'BucketOwnerFullControl']},
+                         'DeletionPolicy': {
+                         'Type': 'String',
+                         'AllowedValues': ['Delete', 'Retain']},
+                         'WebsiteConfiguration': {'Type': 'Map',
+                                                  'Schema': website_schema}}
 
     def __init__(self, name, json_snippet, stack):
         super(S3Bucket, self).__init__(name, json_snippet, stack)
@@ -63,7 +62,7 @@ class S3Bucket(resource.Resource):
     def handle_create(self):
         """Create a bucket."""
         container = S3Bucket._create_container_name(
-                            self.physical_resource_name())
+            self.physical_resource_name())
         headers = {}
         logger.debug('S3Bucket create container %s with headers %s' %
                      (container, headers))
@@ -116,7 +115,7 @@ class S3Bucket(resource.Resource):
             return parsed[1].split(':')[0]
         elif key == 'WebsiteURL':
             return '%s://%s%s/%s' % (parsed[0], parsed[1], parsed[2],
-                                      self.resource_id)
+                                     self.resource_id)
         else:
             raise exception.InvalidTemplateAttribute(resource=self.name,
                                                      key=key)
index 287c0244d59559ba58beb995461fe84aae9adce0..981c00f88c5b8ed823b35a2158dcb5d50ad0024c 100644 (file)
@@ -28,7 +28,7 @@ class SecurityGroup(resource.Resource):
                                    'Implemented': False},
                          'SecurityGroupIngress': {'Type': 'List'},
                          'SecurityGroupEgress': {'Type': 'List',
-                                                  'Implemented': False}}
+                                                 'Implemented': False}}
 
     def __init__(self, name, json_snippet, stack):
         super(SecurityGroup, self).__init__(name, json_snippet, stack)
@@ -44,8 +44,8 @@ class SecurityGroup(resource.Resource):
 
         if not sec:
             sec = self.nova().security_groups.create(
-                                          self.physical_resource_name(),
-                                          self.properties['GroupDescription'])
+                self.physical_resource_name(),
+                self.properties['GroupDescription'])
 
         self.resource_id_set(sec.id)
         if self.properties['SecurityGroupIngress']:
index 45668d1abd4508df6a0a5659004514484ca245a0..a3ea872bf1811a8e067636fc4a2b4e3098dec1fa 100644 (file)
@@ -67,7 +67,7 @@ class Stack(resource.Resource):
         stack = self.nested()
         if op not in stack.outputs:
             raise exception.InvalidTemplateAttribute(
-                        resource=self.physical_resource_name(), key=key)
+                resource=self.physical_resource_name(), key=key)
 
         return stack.output(op)
 
@@ -102,7 +102,7 @@ class NestedStack(Stack):
     def FnGetAtt(self, key):
         if not key.startswith('Outputs.'):
             raise exception.InvalidTemplateAttribute(
-                        resource=self.physical_resource_name(), key=key)
+                resource=self.physical_resource_name(), key=key)
 
         prefix, dot, op = key.partition('.')
         return self.get_output(op)
index 66759ecf9d035ab0162a977158c803217d09e7d6..1d6938ece80abee9d91980cb994f875a87831c91 100644 (file)
@@ -33,7 +33,7 @@ class User(resource.Resource):
                          'LoginProfile': {'Type': 'Map',
                                           'Schema': {
                                               'Password': {'Type': 'String'}
-                                           }},
+                                          }},
                          'Policies': {'Type': 'List'}}
 
     def __init__(self, name, json_snippet, stack):
@@ -42,11 +42,11 @@ class User(resource.Resource):
     def handle_create(self):
         passwd = ''
         if self.properties['LoginProfile'] and \
-            'Password' in self.properties['LoginProfile']:
-            passwd = self.properties['LoginProfile']['Password']
+                'Password' in self.properties['LoginProfile']:
+                passwd = self.properties['LoginProfile']['Password']
 
         uid = self.keystone().create_stack_user(self.physical_resource_name(),
-                                                 passwd)
+                                                passwd)
         self.resource_id_set(uid)
 
     def handle_update(self):
@@ -64,7 +64,7 @@ class User(resource.Resource):
     def FnGetAtt(self, key):
         #TODO Implement Arn attribute
         raise exception.InvalidTemplateAttribute(
-                resource=self.physical_resource_name(), key=key)
+            resource=self.physical_resource_name(), key=key)
 
 
 class AccessKey(resource.Resource):
@@ -159,7 +159,7 @@ class AccessKey(resource.Resource):
             log_res = "<SANITIZED>"
         else:
             raise exception.InvalidTemplateAttribute(
-                        resource=self.physical_resource_name(), key=key)
+                resource=self.physical_resource_name(), key=key)
 
         logger.info('%s.GetAtt(%s) == %s' % (self.physical_resource_name(),
                                              key, log_res))
index f4b88f236a30055b8ae2f5b11de097ad8874c22e..70587c762ed19266c91d30eef0aa3ae135486eab 100644 (file)
@@ -34,9 +34,10 @@ class Volume(resource.Resource):
         super(Volume, self).__init__(name, json_snippet, stack)
 
     def handle_create(self):
-        vol = self.nova('volume').volumes.create(self.properties['Size'],
-                            display_name=self.physical_resource_name(),
-                            display_description=self.physical_resource_name())
+        vol = self.nova('volume').volumes.create(
+            self.properties['Size'],
+            display_name=self.physical_resource_name(),
+            display_description=self.physical_resource_name())
 
         while vol.status == 'creating':
             eventlet.sleep(1)
@@ -119,7 +120,7 @@ class VolumeAttachment(resource.Resource):
                 vol.get()
         except clients.novaclient.exceptions.NotFound as e:
             logger.warning('Deleting VolumeAttachment %s %s - not found' %
-                    (server_id, volume_id))
+                          (server_id, volume_id))
 
 
 def resource_mapping():
index 1e761020d44d61aab4ed4f97093f59b20fbec259..1b589cf51470ce84ed8c469b567f8be361fbf03f 100644 (file)
@@ -22,10 +22,10 @@ logger = logging.getLogger(__name__)
 class VPC(resource.Resource):
     properties_schema = {'CidrBlock': {'Type': 'String'},
                          'InstanceTenancy': {'Type': 'String',
-                            'AllowedValues': ['default', 'dedicated'],
-                            'Default': 'default',
-                            'Implemented': False}
-    }
+                                             'AllowedValues': ['default',
+                                                               'dedicated'],
+                                             'Default': 'default',
+                                             'Implemented': False}}
 
     def __init__(self, name, json_snippet, stack):
         super(VPC, self).__init__(name, json_snippet, stack)
index 0d849549b631b3675030e1d9eb3280bdd0c20da3..0bf139eadaedc19dbcff50bb113a582fa8ff16f1 100644 (file)
@@ -85,7 +85,7 @@ class WaitConditionHandle(resource.Resource):
     def handle_create(self):
         # Create a keystone user so we can create a signed URL via FnGetRefId
         user_id = self.keystone().create_stack_user(
-                   self.physical_resource_name())
+            self.physical_resource_name())
         kp = self.keystone().get_ec2_keypair(user_id)
         if not kp:
             raise exception.Error("Error creating ec2 keypair for user %s" %
@@ -129,8 +129,8 @@ class WaitCondition(resource.Resource):
     properties_schema = {'Handle': {'Type': 'String',
                                     'Required': True},
                          'Timeout': {'Type': 'Number',
-                                    'Required': True,
-                                    'MinValue': '1'},
+                                     'Required': True,
+                                     'MinValue': '1'},
                          'Count': {'Type': 'Number',
                                    'MinValue': '1'}}
 
index a2c35a17c4220af281d783f8d9ec5f1d27e74f0d..b4fb9957fcf3df2f8c082b73f8d19349a92f7dff 100644 (file)
@@ -168,7 +168,7 @@ class EngineService(service.Service):
             for s in stacks:
                 try:
                     stack = parser.Stack.load(context, stack=s,
-                        resolve_data=False)
+                                              resolve_data=False)
                 except exception.NotFound:
                     # The stack may have been deleted between listing
                     # and formatting
@@ -382,8 +382,8 @@ class EngineService(service.Service):
         if stack_identity is not None:
             s = self._get_stack(context, stack_identity)
         else:
-            rs = db_api.resource_get_by_physical_resource_id(context,
-                    physical_resource_id)
+            rs = db_api.resource_get_by_physical_resource_id(
+                context, physical_resource_id)
             if not rs:
                 msg = "The specified PhysicalResourceId doesn't exist"
                 raise AttributeError(msg)
@@ -400,8 +400,8 @@ class EngineService(service.Service):
             name_match = lambda r: True
 
         return [api.format_stack_resource(resource)
-                for resource in stack if resource.id is not None and
-                                         name_match(resource)]
+                for resource in stack
+                if resource.id is not None and name_match(resource)]
 
     @request_context
     def list_stack_resources(self, context, stack_identity):
@@ -443,7 +443,7 @@ class EngineService(service.Service):
         stack = db_api.stack_get(admin_context, sid, admin=True)
         if not stack:
             logger.error("Unable to retrieve stack %s for periodic task" %
-                        sid)
+                         sid)
             return
         user_creds = db_api.user_creds_get(stack.user_creds_id)
         stack_context = context.RequestContext.from_dict(user_creds)
@@ -502,7 +502,7 @@ class EngineService(service.Service):
         # DB API and schema does not yet allow us to easily query by
         # namespace/metric, but we will want this at some point
         # for now, the API can query all metric data and filter locally
-        if namespace != None or metric_name != None:
+        if namespace is not None or metric_name is not None:
             logger.error("Filtering by namespace/metric not yet supported")
             return
 
index dfcae8adae650f16651fd8d1b70b8073f4cf9b16..8d91e9312f1508d616c702b5350e24b63a83da58 100644 (file)
@@ -63,13 +63,13 @@ class WatchRule(object):
         '''
         Load the watchrule object, either by name or via an existing DB object
         '''
-        if watch == None:
+        if watch is None:
             try:
                 watch = db_api.watch_rule_get_by_name(context, watch_name)
             except Exception as ex:
                 logger.warn('WatchRule.load (%s) db error %s' %
                             (watch_name, str(ex)))
-        if watch == None:
+        if watch is None:
             raise AttributeError('Unknown watch name %s' % watch_name)
         else:
             return cls(context=context,
@@ -237,7 +237,7 @@ class WatchRule(object):
         else:
             s = db_api.stack_get(self.context, self.stack_id)
             if s and s.status in (parser.Stack.CREATE_COMPLETE,
-                                          parser.Stack.UPDATE_COMPLETE):
+                                  parser.Stack.UPDATE_COMPLETE):
                 stack = parser.Stack.load(self.context, stack=s)
                 for a in self.rule[self.ACTION_MAP[new_state]]:
                     greenpool.spawn_n(stack[a].alarm)
@@ -274,7 +274,7 @@ class WatchRule(object):
         if state != self.state:
             if self.rule_action(state):
                 logger.debug("Overriding state %s for watch %s with %s" %
-                         (self.state, self.name, state))
+                            (self.state, self.name, state))
             else:
                 logger.warning("Unable to override state %s for watch %s" %
-                         (self.state, self.name))
+                              (self.state, self.name))
index 241284449e32c0be0200df6eec92b46b632d4cca..9bf399f0c7c9f6098887715b8b7e8c360e7deecd 100644 (file)
@@ -54,7 +54,7 @@ class BaseParser(object):
 
         value = value.strip()
         if ((value and value[0] == value[-1]) and
-            (value[0] == "\"" or value[0] == "'")):
+                (value[0] == "\"" or value[0] == "'")):
             value = value[1:-1]
         return key.strip(), [value]
 
index 7c1faeee3d3f61be8a7abbd54dd295ab9212b4c8..c7777239790670b01dcb0912b37ce8552add6526 100644 (file)
@@ -258,7 +258,7 @@ class JSONFormatter(logging.Formatter):
 class PublishErrorsHandler(logging.Handler):
     def emit(self, record):
         if ('heat.openstack.common.notifier.log_notifier' in
-            CONF.notification_driver):
+                CONF.notification_driver):
             return
         notifier.api.notify(None, 'error.publisher',
                             'error_notification',
@@ -418,7 +418,7 @@ class LegacyFormatter(logging.Formatter):
             self._fmt = CONF.logging_default_format_string
 
         if (record.levelno == logging.DEBUG and
-            CONF.logging_debug_format_suffix):
+                CONF.logging_debug_format_suffix):
             self._fmt += " " + CONF.logging_debug_format_suffix
 
         # Cache this on the record, Logger will respect our formated copy
index 0a0586eaf972351d573f881ad5022af06f73a395..d8050b4b87f1229be3115edff4d4e257cef389c0 100644 (file)
@@ -19,9 +19,10 @@ from heat.openstack.common import importutils
 from heat.openstack.common import log as logging
 
 
-list_notifier_drivers_opt = cfg.MultiStrOpt('list_notifier_drivers',
-        default=['heat.openstack.common.notifier.no_op_notifier'],
-        help='List of drivers to send notifications')
+list_notifier_drivers_opt = cfg.MultiStrOpt(
+    'list_notifier_drivers',
+    default=['heat.openstack.common.notifier.no_op_notifier'],
+    help='List of drivers to send notifications')
 
 CONF = cfg.CONF
 CONF.register_opt(list_notifier_drivers_opt)
index b39583f0ab238953f01897ecb71899b428616f77..70852f88887f8fceaede30010076671baf4b444a 100644 (file)
@@ -52,8 +52,8 @@ class EngineClient(heat.openstack.common.rpc.proxy.RpcProxy):
 
     def __init__(self):
         super(EngineClient, self).__init__(
-                topic=FLAGS.engine_topic,
-                default_version=self.BASE_RPC_API_VERSION)
+            topic=FLAGS.engine_topic,
+            default_version=self.BASE_RPC_API_VERSION)
 
     def identify_stack(self, ctxt, stack_name):
         """
@@ -160,9 +160,10 @@ class EngineClient(heat.openstack.common.rpc.proxy.RpcProxy):
         :param params: Params passed from API.
         """
         rpc_method = self.cast if cast else self.call
-        return rpc_method(ctxt, self.make_msg('delete_stack',
-                                              stack_identity=stack_identity),
-                  topic=_engine_topic(self.topic, ctxt, None))
+        return rpc_method(ctxt,
+                          self.make_msg('delete_stack',
+                                        stack_identity=stack_identity),
+                          topic=_engine_topic(self.topic, ctxt, None))
 
     def list_resource_types(self, ctxt):
         """
index 82af7794529b814d3a80b63e135c00634c36604d..a475d315c40764c9e667b4f649c7e6a9aa6be716 100755 (executable)
@@ -122,19 +122,18 @@ class _Win32Colorizer(object):
     def __init__(self, stream):
         import win32console as win
         red, green, blue, bold = (win.FOREGROUND_RED, win.FOREGROUND_GREEN,
-                                 win.FOREGROUND_BLUE, win.FOREGROUND_INTENSITY)
+                                  win.FOREGROUND_BLUE, win.FOREGROUND_INTENSITY
+                                  )
         self.stream = stream
         self.screenBuffer = win.GetStdHandle(win.STD_OUT_HANDLE)
-        self._colors = {
-            'normal': red | green | blue,
-            'red': red | bold,
-            'green': green | bold,
-            'blue': blue | bold,
-            'yellow': red | green | bold,
-            'magenta': red | blue | bold,
-            'cyan': green | blue | bold,
-            'white': red | green | blue | bold
-            }
+        self._colors = {'normal': red | green | blue,
+                        'red': red | bold,
+                        'green': green | bold,
+                        'blue': blue | bold,
+                        'yellow': red | green | bold,
+                        'magenta': red | blue | bold,
+                        'cyan': green | blue | bold,
+                        'white': red | green | blue | bold}
 
     def supported(cls, stream=sys.stdout):
         try:
index ca03fcb0967134ba23a66d278664baa2b2d572e2..25143617c152507ce4dff80ac9a8f17a81a9ef79 100644 (file)
@@ -143,8 +143,8 @@ class Fedora(Distro):
 
 
 def get_distro():
-    if os.path.exists('/etc/fedora-release') or \
-       os.path.exists('/etc/redhat-release'):
+    if (os.path.exists('/etc/fedora-release') or
+            os.path.exists('/etc/redhat-release')):
         return Fedora()
     else:
         return Distro()
@@ -228,8 +228,9 @@ def parse_args():
     """Parse command-line arguments"""
     parser = optparse.OptionParser()
     parser.add_option("-n", "--no-site-packages", dest="no_site_packages",
-        default=False, action="store_true",
-        help="Do not inherit packages from global Python install")
+                      default=False, action="store_true",
+                      help=
+                      "Do not inherit packages from global Python install")
     return parser.parse_args()