]> review.fuel-infra Code Review - openstack-build/cinder-build.git/commitdiff
Update some Oslo Packages
authorWalter A. Boring IV <walter.boring@hp.com>
Tue, 29 Jan 2013 17:32:41 +0000 (09:32 -0800)
committerWalter A. Boring IV <walter.boring@hp.com>
Tue, 29 Jan 2013 17:36:36 +0000 (09:36 -0800)
I exluded packages that caused unit tests to fail.   We'll have
to tackle those individually.

Change-Id: I24c9c29decf1f171a214a837e69fc47d7e763a92

cinder/openstack/common/excutils.py
cinder/openstack/common/importutils.py
cinder/openstack/common/iniparser.py
cinder/openstack/common/jsonutils.py
cinder/openstack/common/lockutils.py
cinder/openstack/common/notifier/api.py
cinder/openstack/common/notifier/rabbit_notifier.py
cinder/openstack/common/setup.py
cinder/openstack/common/timeutils.py
cinder/openstack/common/uuidutils.py

index 5dd48301760e4778904bdc32f2d748e0e9ef7980..9cfabcf4a853ed8f63442155c447c30d175d6aef 100644 (file)
@@ -24,6 +24,8 @@ import logging
 import sys
 import traceback
 
+from cinder.openstack.common.gettextutils import _
+
 
 @contextlib.contextmanager
 def save_and_reraise_exception():
@@ -43,7 +45,7 @@ def save_and_reraise_exception():
     try:
         yield
     except Exception:
-        logging.error('Original exception being dropped: %s' %
-                      (traceback.format_exception(type_, value, tb)))
+        logging.error(_('Original exception being dropped: %s'),
+                      traceback.format_exception(type_, value, tb))
         raise
     raise type_, value, tb
index f45372b4dba607251b5a79795be42d94b94da8f9..9dec764fb4097bf5860c562a514baec88571fe79 100644 (file)
@@ -29,7 +29,7 @@ def import_class(import_str):
     try:
         __import__(mod_str)
         return getattr(sys.modules[mod_str], class_str)
-    except (ValueError, AttributeError), exc:
+    except (ValueError, AttributeError):
         raise ImportError('Class %s cannot be found (%s)' %
                           (class_str,
                            traceback.format_exception(*sys.exc_info())))
@@ -57,3 +57,11 @@ def import_module(import_str):
     """Import a module."""
     __import__(import_str)
     return sys.modules[import_str]
+
+
+def try_import(import_str, default=None):
+    """Try to import a module and if it fails return default."""
+    try:
+        return import_module(import_str)
+    except ImportError:
+        return default
index 241284449e32c0be0200df6eec92b46b632d4cca..9bf399f0c7c9f6098887715b8b7e8c360e7deecd 100644 (file)
@@ -54,7 +54,7 @@ class BaseParser(object):
 
         value = value.strip()
         if ((value and value[0] == value[-1]) and
-            (value[0] == "\"" or value[0] == "'")):
+                (value[0] == "\"" or value[0] == "'")):
             value = value[1:-1]
         return key.strip(), [value]
 
index bab6a365143a5bb3251357c7aa2fe6aa7a2f803e..d3026744407c00499e332f3aeb2dced241dfafea 100644 (file)
@@ -120,7 +120,7 @@ def to_primitive(value, convert_instances=False, level=0):
                                 level=level + 1)
         else:
             return value
-    except TypeError, e:
+    except TypeError:
         # Class objects are tricky since they may define something like
         # __iter__ defined but it isn't callable as list().
         return unicode(value)
index 418bc3a50a2420f33dee9fe4e4f344f0a3d43dbf..16964edbd6932d299aeba17281dbf0f2c96374b3 100644 (file)
@@ -220,6 +220,11 @@ def synchronized(name, lock_file_prefix, external=False, lock_path=None):
                                        'method': f.__name__})
                             retval = f(*args, **kwargs)
                     finally:
+                        LOG.debug(_('Released file lock "%(lock)s" at %(path)s'
+                                    ' for method "%(method)s"...'),
+                                  {'lock': name,
+                                   'path': lock_file_path,
+                                   'method': f.__name__})
                         # NOTE(vish): This removes the tempdir if we needed
                         #             to create one. This is used to cleanup
                         #             the locks left behind by unit tests.
index 8acd5785cf43e10a0edc7442b875ee6373b01103..b1415d599917377c347f6e7b37cf9b083bc73565 100644 (file)
@@ -137,10 +137,11 @@ def notify(context, publisher_id, event_type, priority, payload):
     for driver in _get_drivers():
         try:
             driver.notify(context, msg)
-        except Exception, e:
+        except Exception as e:
             LOG.exception(_("Problem '%(e)s' attempting to "
                             "send to notification system. "
-                            "Payload=%(payload)s") % locals())
+                            "Payload=%(payload)s")
+                          % dict(e=e, payload=payload))
 
 
 _drivers = None
@@ -166,7 +167,7 @@ def add_driver(notification_driver):
         try:
             driver = importutils.import_module(notification_driver)
             _drivers[notification_driver] = driver
-        except ImportError as e:
+        except ImportError:
             LOG.exception(_("Failed to load notifier %s. "
                             "These notifications will not be sent.") %
                           notification_driver)
index 89b69ca7ec282dd977a0b6d9ec98eb80bc82fee6..2ffe9524e903d97418f2eec993b12845379826ec 100644 (file)
@@ -1,4 +1,4 @@
-# Copyright 2011 OpenStack LLC.
+# Copyright 2012 Red Hat, Inc.
 # All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
 #    under the License.
 
 
-from cinder.openstack.common import cfg
-from cinder.openstack.common import context as req_context
 from cinder.openstack.common.gettextutils import _
 from cinder.openstack.common import log as logging
-from cinder.openstack.common import rpc
+from cinder.openstack.common.notifier import rpc_notifier
 
 LOG = logging.getLogger(__name__)
 
-notification_topic_opt = cfg.ListOpt(
-    'notification_topics', default=['notifications', ],
-    help='AMQP topic used for openstack notifications')
-
-CONF = cfg.CONF
-CONF.register_opt(notification_topic_opt)
-
 
 def notify(context, message):
-    """Sends a notification to the RabbitMQ"""
-    if not context:
-        context = req_context.get_admin_context()
-    priority = message.get('priority',
-                           CONF.default_notification_level)
-    priority = priority.lower()
-    for topic in CONF.notification_topics:
-        topic = '%s.%s' % (topic, priority)
-        try:
-            rpc.notify(context, topic, message)
-        except Exception, e:
-            LOG.exception(_("Could not send notification to %(topic)s. "
-                            "Payload=%(message)s"), locals())
+    """Deprecated in Grizzly. Please use rpc_notifier instead."""
+
+    LOG.deprecated(_("The rabbit_notifier is now deprecated."
+                     " Please use rpc_notifier instead."))
+    rpc_notifier.notify(context, message)
index 4e2a57717fba1d0cc554bbc2608a4e6ad49e3e4f..cc8b99e3a8d0551526e39c178748915755514e4b 100644 (file)
@@ -1,6 +1,7 @@
 # vim: tabstop=4 shiftwidth=4 softtabstop=4
 
 # Copyright 2011 OpenStack LLC.
+# Copyright 2012-2013 Hewlett-Packard Development Company, L.P.
 # All Rights Reserved.
 #
 #    Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -19,7 +20,7 @@
 Utilities with minimum-depends for use in setup.py
 """
 
-import datetime
+import email
 import os
 import re
 import subprocess
@@ -33,11 +34,12 @@ def parse_mailmap(mailmap='.mailmap'):
     if os.path.exists(mailmap):
         with open(mailmap, 'r') as fp:
             for l in fp:
-                l = l.strip()
-                if not l.startswith('#') and ' ' in l:
-                    canonical_email, alias = [x for x in l.split(' ')
-                                              if x.startswith('<')]
-                    mapping[alias] = canonical_email
+                try:
+                    canonical_email, alias = re.match(
+                        r'[^#]*?(<.+>).*(<.+>).*', l).groups()
+                except AttributeError:
+                    continue
+                mapping[alias] = canonical_email
     return mapping
 
 
@@ -45,8 +47,8 @@ def canonicalize_emails(changelog, mapping):
     """Takes in a string and an email alias mapping and replaces all
        instances of the aliases in the string with their real email.
     """
-    for alias, email in mapping.iteritems():
-        changelog = changelog.replace(alias, email)
+    for alias, email_address in mapping.iteritems():
+        changelog = changelog.replace(alias, email_address)
     return changelog
 
 
@@ -106,19 +108,17 @@ def parse_dependency_links(requirements_files=['requirements.txt',
     return dependency_links
 
 
-def write_requirements():
-    venv = os.environ.get('VIRTUAL_ENV', None)
-    if venv is not None:
-        with open("requirements.txt", "w") as req_file:
-            output = subprocess.Popen(["pip", "-E", venv, "freeze", "-l"],
-                                      stdout=subprocess.PIPE)
-            requirements = output.communicate()[0].strip()
-            req_file.write(requirements)
-
-
-def _run_shell_command(cmd):
-    output = subprocess.Popen(["/bin/sh", "-c", cmd],
-                              stdout=subprocess.PIPE)
+def _run_shell_command(cmd, throw_on_error=False):
+    if os.name == 'nt':
+        output = subprocess.Popen(["cmd.exe", "/C", cmd],
+                                  stdout=subprocess.PIPE,
+                                  stderr=subprocess.PIPE)
+    else:
+        output = subprocess.Popen(["/bin/sh", "-c", cmd],
+                                  stdout=subprocess.PIPE,
+                                  stderr=subprocess.PIPE)
+    if output.returncode and throw_on_error:
+        raise Exception("%s returned %d" % cmd, output.returncode)
     out = output.communicate()
     if len(out) == 0:
         return None
@@ -127,55 +127,6 @@ def _run_shell_command(cmd):
     return out[0].strip()
 
 
-def _get_git_next_version_suffix(branch_name):
-    datestamp = datetime.datetime.now().strftime('%Y%m%d')
-    if branch_name == 'milestone-proposed':
-        revno_prefix = "r"
-    else:
-        revno_prefix = ""
-    _run_shell_command("git fetch origin +refs/meta/*:refs/remotes/meta/*")
-    milestone_cmd = "git show meta/openstack/release:%s" % branch_name
-    milestonever = _run_shell_command(milestone_cmd)
-    if not milestonever:
-        milestonever = ""
-    post_version = _get_git_post_version()
-    # post version should look like:
-    # 0.1.1.4.gcc9e28a
-    # where the bit after the last . is the short sha, and the bit between
-    # the last and second to last is the revno count
-    (revno, sha) = post_version.split(".")[-2:]
-    first_half = "%s~%s" % (milestonever, datestamp)
-    second_half = "%s%s.%s" % (revno_prefix, revno, sha)
-    return ".".join((first_half, second_half))
-
-
-def _get_git_current_tag():
-    return _run_shell_command("git tag --contains HEAD")
-
-
-def _get_git_tag_info():
-    return _run_shell_command("git describe --tags")
-
-
-def _get_git_post_version():
-    current_tag = _get_git_current_tag()
-    if current_tag is not None:
-        return current_tag
-    else:
-        tag_info = _get_git_tag_info()
-        if tag_info is None:
-            base_version = "0.0"
-            cmd = "git --no-pager log --oneline"
-            out = _run_shell_command(cmd)
-            revno = len(out.split("\n"))
-            sha = _run_shell_command("git describe --always")
-        else:
-            tag_infos = tag_info.split("-")
-            base_version = "-".join(tag_infos[:-2])
-            (revno, sha) = tag_infos[-2:]
-        return "%s.%s.%s" % (base_version, revno, sha)
-
-
 def write_git_changelog():
     """Write a changelog based on the git changelog."""
     new_changelog = 'ChangeLog'
@@ -221,26 +172,6 @@ _rst_template = """%(heading)s
 """
 
 
-def read_versioninfo(project):
-    """Read the versioninfo file. If it doesn't exist, we're in a github
-       zipball, and there's really no way to know what version we really
-       are, but that should be ok, because the utility of that should be
-       just about nil if this code path is in use in the first place."""
-    versioninfo_path = os.path.join(project, 'versioninfo')
-    if os.path.exists(versioninfo_path):
-        with open(versioninfo_path, 'r') as vinfo:
-            version = vinfo.read().strip()
-    else:
-        version = "0.0.0"
-    return version
-
-
-def write_versioninfo(project, version):
-    """Write a simple file containing the version of the package."""
-    with open(os.path.join(project, 'versioninfo'), 'w') as fil:
-        fil.write("%s\n" % version)
-
-
 def get_cmdclass():
     """Return dict of commands to run from setup.py."""
 
@@ -270,6 +201,9 @@ def get_cmdclass():
         from sphinx.setup_command import BuildDoc
 
         class LocalBuildDoc(BuildDoc):
+
+            builders = ['html', 'man']
+
             def generate_autoindex(self):
                 print "**Autodocumenting from %s" % os.path.abspath(os.curdir)
                 modules = {}
@@ -305,56 +239,97 @@ def get_cmdclass():
                 if not os.getenv('SPHINX_DEBUG'):
                     self.generate_autoindex()
 
-                for builder in ['html', 'man']:
+                for builder in self.builders:
                     self.builder = builder
                     self.finalize_options()
                     self.project = self.distribution.get_name()
                     self.version = self.distribution.get_version()
                     self.release = self.distribution.get_version()
                     BuildDoc.run(self)
+
+        class LocalBuildLatex(LocalBuildDoc):
+            builders = ['latex']
+
         cmdclass['build_sphinx'] = LocalBuildDoc
+        cmdclass['build_sphinx_latex'] = LocalBuildLatex
     except ImportError:
         pass
 
     return cmdclass
 
 
-def get_git_branchname():
-    for branch in _run_shell_command("git branch --color=never").split("\n"):
-        if branch.startswith('*'):
-            _branch_name = branch.split()[1].strip()
-    if _branch_name == "(no":
-        _branch_name = "no-branch"
-    return _branch_name
+def _get_revno():
+    """Return the number of commits since the most recent tag.
 
+    We use git-describe to find this out, but if there are no
+    tags then we fall back to counting commits since the beginning
+    of time.
+    """
+    describe = _run_shell_command("git describe --always")
+    if "-" in describe:
+        return describe.rsplit("-", 2)[-2]
 
-def get_pre_version(projectname, base_version):
-    """Return a version which is leading up to a version that will
-       be released in the future."""
-    if os.path.isdir('.git'):
-        current_tag = _get_git_current_tag()
-        if current_tag is not None:
-            version = current_tag
-        else:
-            branch_name = os.getenv('BRANCHNAME',
-                                    os.getenv('GERRIT_REFNAME',
-                                              get_git_branchname()))
-            version_suffix = _get_git_next_version_suffix(branch_name)
-            version = "%s~%s" % (base_version, version_suffix)
-        write_versioninfo(projectname, version)
-        return version
-    else:
-        version = read_versioninfo(projectname)
-    return version
+    # no tags found
+    revlist = _run_shell_command("git rev-list --abbrev-commit HEAD")
+    return len(revlist.splitlines())
 
 
-def get_post_version(projectname):
+def get_version_from_git(pre_version):
     """Return a version which is equal to the tag that's on the current
     revision if there is one, or tag plus number of additional revisions
     if the current revision has no tag."""
 
     if os.path.isdir('.git'):
-        version = _get_git_post_version()
-        write_versioninfo(projectname, version)
+        if pre_version:
+            try:
+                return  _run_shell_command(
+                    "git describe --exact-match",
+                    throw_on_error=True).replace('-', '.')
+            except Exception:
+                sha = _run_shell_command("git log -n1 --pretty=format:%h")
+                return "%s.a%s.g%s" % (pre_version, _get_revno(), sha)
+        else:
+            return _run_shell_command(
+                "git describe --always").replace('-', '.')
+    return None
+
+
+def get_version_from_pkg_info(package_name):
+    """Get the version from PKG-INFO file if we can."""
+    try:
+        pkg_info_file = open('PKG-INFO', 'r')
+    except (IOError, OSError):
+        return None
+    try:
+        pkg_info = email.message_from_file(pkg_info_file)
+    except email.MessageError:
+        return None
+    # Check to make sure we're in our own dir
+    if pkg_info.get('Name', None) != package_name:
+        return None
+    return pkg_info.get('Version', None)
+
+
+def get_version(package_name, pre_version=None):
+    """Get the version of the project. First, try getting it from PKG-INFO, if
+    it exists. If it does, that means we're in a distribution tarball or that
+    install has happened. Otherwise, if there is no PKG-INFO file, pull the
+    version from git.
+
+    We do not support setup.py version sanity in git archive tarballs, nor do
+    we support packagers directly sucking our git repo into theirs. We expect
+    that a source tarball be made from our git repo - or that if someone wants
+    to make a source tarball from a fork of our repo with additional tags in it
+    that they understand and desire the results of doing that.
+    """
+    version = os.environ.get("OSLO_PACKAGE_VERSION", None)
+    if version:
+        return version
+    version = get_version_from_pkg_info(package_name)
+    if version:
+        return version
+    version = get_version_from_git(pre_version)
+    if version:
         return version
-    return read_versioninfo(projectname)
+    raise Exception("Versioning for this project requires either an sdist"
+                    " tarball, or access to an upstream git repository.")
index 86004391de06a7144b6dce653344880bc93ccbf0..5a011e8181fe9f3e6b42504acb26478d2dc8e1f3 100644 (file)
@@ -71,11 +71,15 @@ def normalize_time(timestamp):
 
 def is_older_than(before, seconds):
     """Return True if before is older than seconds."""
+    if isinstance(before, basestring):
+        before = parse_strtime(before).replace(tzinfo=None)
     return utcnow() - before > datetime.timedelta(seconds=seconds)
 
 
 def is_newer_than(after, seconds):
     """Return True if after is newer than seconds."""
+    if isinstance(after, basestring):
+        after = parse_strtime(after).replace(tzinfo=None)
     return after - utcnow() > datetime.timedelta(seconds=seconds)
 
 
@@ -87,22 +91,37 @@ def utcnow_ts():
 def utcnow():
     """Overridable version of utils.utcnow."""
     if utcnow.override_time:
-        return utcnow.override_time
+        try:
+            return utcnow.override_time.pop(0)
+        except AttributeError:
+            return utcnow.override_time
     return datetime.datetime.utcnow()
 
 
+def iso8601_from_timestamp(timestamp):
+    """Returns a iso8601 formated date from timestamp"""
+    return isotime(datetime.datetime.utcfromtimestamp(timestamp))
+
+
 utcnow.override_time = None
 
 
 def set_time_override(override_time=datetime.datetime.utcnow()):
-    """Override utils.utcnow to return a constant time."""
+    """
+    Override utils.utcnow to return a constant time or a list thereof,
+    one at a time.
+    """
     utcnow.override_time = override_time
 
 
 def advance_time_delta(timedelta):
     """Advance overridden time using a datetime.timedelta."""
     assert(not utcnow.override_time is None)
-    utcnow.override_time += timedelta
+    try:
+        for dt in utcnow.override_time:
+            dt += timedelta
+    except TypeError:
+        utcnow.override_time += timedelta
 
 
 def advance_time_seconds(seconds):
@@ -135,3 +154,29 @@ def unmarshall_time(tyme):
                              minute=tyme['minute'],
                              second=tyme['second'],
                              microsecond=tyme['microsecond'])
+
+
+def delta_seconds(before, after):
+    """
+    Compute the difference in seconds between two date, time, or
+    datetime objects (as a float, to microsecond resolution).
+    """
+    delta = after - before
+    try:
+        return delta.total_seconds()
+    except AttributeError:
+        return ((delta.days * 24 * 3600) + delta.seconds +
+                float(delta.microseconds) / (10 ** 6))
+
+
+def is_soon(dt, window):
+    """
+    Determines if time is going to happen in the next window seconds.
+
+    :params dt: the time
+    :params window: minimum seconds to remain to consider the time not soon
+
+    :return: True if expiration is within the given duration
+    """
+    soon = (utcnow() + datetime.timedelta(seconds=window))
+    return normalize_time(dt) < soon
index 51042a798dfae767235541c0652227488c7c7526..7608acb9421fe93b28d8a0fffab2d66190548149 100644 (file)
@@ -22,6 +22,10 @@ UUID related utilities and helper functions.
 import uuid
 
 
+def generate_uuid():
+    return str(uuid.uuid4())
+
+
 def is_uuid_like(val):
     """Returns validation of a value as a UUID.