import six
-from cinder.openstack.common.gettextutils import _
+from cinder.openstack.common.gettextutils import _LE
class save_and_reraise_exception(object):
decide_if_need_reraise()
if not should_be_reraised:
ctxt.reraise = False
+
+ If another exception occurs and reraise flag is False,
+ the saved exception will not be logged.
+
+ If the caller wants to raise new exception during exception handling
+ he/she sets reraise to False initially with an ability to set it back to
+ True if needed::
+
+ except Exception:
+ with save_and_reraise_exception(reraise=False) as ctxt:
+ [if statements to determine whether to raise a new exception]
+ # Not raising a new exception, so reraise
+ ctxt.reraise = True
"""
- def __init__(self):
- self.reraise = True
+ def __init__(self, reraise=True):
+ self.reraise = reraise
def __enter__(self):
self.type_, self.value, self.tb, = sys.exc_info()
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is not None:
- logging.error(_('Original exception being dropped: %s'),
- traceback.format_exception(self.type_,
- self.value,
- self.tb))
+ if self.reraise:
+ logging.error(_LE('Original exception being dropped: %s'),
+ traceback.format_exception(self.type_,
+ self.value,
+ self.tb))
return False
if self.reraise:
six.reraise(self.type_, self.value, self.tb)
if (cur_time - last_log_time > 60 or
this_exc_message != last_exc_message):
logging.exception(
- _('Unexpected exception occurred %d time(s)... '
- 'retrying.') % exc_count)
+ _LE('Unexpected exception occurred %d time(s)... '
+ 'retrying.') % exc_count)
last_log_time = cur_time
last_exc_message = this_exc_message
exc_count = 0
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
# Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# License for the specific language governing permissions and limitations
# under the License.
-
import contextlib
import errno
import os
+import tempfile
from cinder.openstack.common import excutils
-from cinder.openstack.common.gettextutils import _
from cinder.openstack.common import log as logging
LOG = logging.getLogger(__name__)
"""
global _FILE_CACHE
- if force_reload and filename in _FILE_CACHE:
- del _FILE_CACHE[filename]
+ if force_reload:
+ delete_cached_file(filename)
reloaded = False
mtime = os.path.getmtime(filename)
cache_info = _FILE_CACHE.setdefault(filename, {})
if not cache_info or mtime > cache_info.get('mtime', 0):
- LOG.debug(_("Reloading cached file %s") % filename)
+ LOG.debug("Reloading cached file %s" % filename)
with open(filename) as fap:
cache_info['data'] = fap.read()
cache_info['mtime'] = mtime
return (reloaded, cache_info['data'])
-def delete_if_exists(path):
+def delete_cached_file(filename):
+ """Delete cached file if present.
+
+ :param filename: filename to delete
+ """
+ global _FILE_CACHE
+
+ if filename in _FILE_CACHE:
+ del _FILE_CACHE[filename]
+
+
+def delete_if_exists(path, remove=os.unlink):
"""Delete a file, but ignore file not found error.
:param path: File to delete
+ :param remove: Optional function to remove passed path
"""
try:
- os.unlink(path)
+ remove(path)
except OSError as e:
- if e.errno == errno.ENOENT:
- return
- else:
+ if e.errno != errno.ENOENT:
raise
@contextlib.contextmanager
-def remove_path_on_error(path):
+def remove_path_on_error(path, remove=delete_if_exists):
"""Protect code that wants to operate on PATH atomically.
Any exception will cause PATH to be removed.
:param path: File to work with
+ :param remove: Optional function to remove passed path
"""
+
try:
yield
except Exception:
with excutils.save_and_reraise_exception():
- delete_if_exists(path)
+ remove(path)
def file_open(*args, **kwargs):
"""Open file
- see built-in file() documentation for more details
+ see built-in open() documentation for more details
Note: The reason this is kept in a separate module is to easily
be able to provide a stub module that doesn't alter system
state at all (for unit tests)
"""
- return file(*args, **kwargs)
+ return open(*args, **kwargs)
+
+
+def write_to_tempfile(content, path=None, suffix='', prefix='tmp'):
+ """Create temporary file or use existing file.
+
+ This util is needed for creating temporary file with
+ specified content, suffix and prefix. If path is not None,
+ it will be used for writing content. If the path doesn't
+ exist it'll be created.
+
+ :param content: content for temporary file.
+ :param path: same as parameter 'dir' for mkstemp
+ :param suffix: same as parameter 'suffix' for mkstemp
+ :param prefix: same as parameter 'prefix' for mkstemp
+
+ For example: it can be used in database tests for creating
+ configuration files.
+ """
+ if path:
+ ensure_tree(path)
+
+ (fd, path) = tempfile.mkstemp(suffix=suffix, dir=path, prefix=prefix)
+ try:
+ os.write(fd, content)
+ finally:
+ os.close(fd)
+ return path
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright (c) 2011 OpenStack Foundation
+# Copyright (c) 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# License for the specific language governing permissions and limitations
# under the License.
-"""Common Policy Engine Implementation"""
+"""
+Common Policy Engine Implementation
+
+Policies can be expressed in one of two forms: A list of lists, or a
+string written in the new policy language.
+
+In the list-of-lists representation, each check inside the innermost
+list is combined as with an "and" conjunction--for that check to pass,
+all the specified checks must pass. These innermost lists are then
+combined as with an "or" conjunction. This is the original way of
+expressing policies, but there now exists a new way: the policy
+language.
+
+In the policy language, each check is specified the same way as in the
+list-of-lists representation: a simple "a:b" pair that is matched to
+the correct code to perform that check. However, conjunction
+operators are available, allowing for more expressiveness in crafting
+policies.
+
+As an example, take the following rule, expressed in the list-of-lists
+representation::
+
+ [["role:admin"], ["project_id:%(project_id)s", "role:projectadmin"]]
+
+In the policy language, this becomes::
+
+ role:admin or (project_id:%(project_id)s and role:projectadmin)
+
+The policy language also has the "not" operator, allowing a richer
+policy rule::
+
+ project_id:%(project_id)s and not role:dunce
+
+It is possible to perform policy checks on the following user
+attributes (obtained through the token): user_id, domain_id or
+project_id::
+
+ domain_id:<some_value>
+
+Attributes sent along with API calls can be used by the policy engine
+(on the right side of the expression), by using the following syntax::
+
+ <some_value>:user.id
-import logging
-import urllib
-import urllib2
+Contextual attributes of objects identified by their IDs are loaded
+from the database. They are also available to the policy engine and
+can be checked through the `target` keyword::
-from cinder.openstack.common.gettextutils import _
+ <some_value>:target.role.name
+
+All these attributes (related to users, API calls, and context) can be
+checked against each other or against constants, be it literals (True,
+<a_number>) or strings.
+
+Finally, two special policy checks should be mentioned; the policy
+check "@" will always accept an access, and the policy check "!" will
+always reject an access. (Note that if a rule is either the empty
+list ("[]") or the empty string, this is equivalent to the "@" policy
+check.) Of these, the "!" policy check is probably the most useful,
+as it allows particular rules to be explicitly disabled.
+"""
+
+import abc
+import ast
+import re
+
+from oslo.config import cfg
+import six
+import six.moves.urllib.parse as urlparse
+import six.moves.urllib.request as urlrequest
+
+from cinder.openstack.common import fileutils
+from cinder.openstack.common.gettextutils import _, _LE
from cinder.openstack.common import jsonutils
+from cinder.openstack.common import log as logging
+
+
+policy_opts = [
+ cfg.StrOpt('policy_file',
+ default='policy.json',
+ help=_('The JSON file that defines policies.')),
+ cfg.StrOpt('policy_default_rule',
+ default='default',
+ help=_('Default rule. Enforced when a requested rule is not '
+ 'found.')),
+]
+CONF = cfg.CONF
+CONF.register_opts(policy_opts)
LOG = logging.getLogger(__name__)
+_checks = {}
+
+
+class PolicyNotAuthorized(Exception):
+
+ def __init__(self, rule):
+ msg = _("Policy doesn't allow %s to be performed.") % rule
+ super(PolicyNotAuthorized, self).__init__(msg)
+
+
+class Rules(dict):
+ """A store for rules. Handles the default_rule setting directly."""
+
+ @classmethod
+ def load_json(cls, data, default_rule=None):
+ """Allow loading of JSON rule data."""
+
+ # Suck in the JSON data and parse the rules
+ rules = dict((k, parse_rule(v)) for k, v in
+ jsonutils.loads(data).items())
+
+ return cls(rules, default_rule)
+
+ def __init__(self, rules=None, default_rule=None):
+ """Initialize the Rules store."""
+
+ super(Rules, self).__init__(rules or {})
+ self.default_rule = default_rule
+
+ def __missing__(self, key):
+ """Implements the default rule handling."""
+
+ if isinstance(self.default_rule, dict):
+ raise KeyError(key)
+
+ # If the default rule isn't actually defined, do something
+ # reasonably intelligent
+ if not self.default_rule:
+ raise KeyError(key)
+
+ if isinstance(self.default_rule, BaseCheck):
+ return self.default_rule
+
+ # We need to check this or we can get infinite recursion
+ if self.default_rule not in self:
+ raise KeyError(key)
+
+ elif isinstance(self.default_rule, six.string_types):
+ return self[self.default_rule]
-_BRAIN = None
+ def __str__(self):
+ """Dumps a string representation of the rules."""
+ # Start by building the canonical strings for the rules
+ out_rules = {}
+ for key, value in self.items():
+ # Use empty string for singleton TrueCheck instances
+ if isinstance(value, TrueCheck):
+ out_rules[key] = ''
+ else:
+ out_rules[key] = str(value)
-def set_brain(brain):
- """Set the brain used by enforce().
+ # Dump a pretty-printed JSON representation
+ return jsonutils.dumps(out_rules, indent=4)
- Defaults use Brain() if not set.
+class Enforcer(object):
+ """Responsible for loading and enforcing rules.
+
+ :param policy_file: Custom policy file to use, if none is
+ specified, `CONF.policy_file` will be
+ used.
+ :param rules: Default dictionary / Rules to use. It will be
+ considered just in the first instantiation. If
+ `load_rules(True)`, `clear()` or `set_rules(True)`
+ is called this will be overwritten.
+ :param default_rule: Default rule to use, CONF.default_rule will
+ be used if none is specified.
+ :param use_conf: Whether to load rules from cache or config file.
"""
- global _BRAIN
- _BRAIN = brain
+ def __init__(self, policy_file=None, rules=None,
+ default_rule=None, use_conf=True):
+ self.rules = Rules(rules, default_rule)
+ self.default_rule = default_rule or CONF.policy_default_rule
-def reset():
- """Clear the brain used by enforce()."""
- global _BRAIN
- _BRAIN = None
+ self.policy_path = None
+ self.policy_file = policy_file or CONF.policy_file
+ self.use_conf = use_conf
+ def set_rules(self, rules, overwrite=True, use_conf=False):
+ """Create a new Rules object based on the provided dict of rules.
-def enforce(match_list, target_dict, credentials_dict, exc=None,
- *args, **kwargs):
- """Enforces authorization of some rules against credentials.
+ :param rules: New rules to use. It should be an instance of dict.
+ :param overwrite: Whether to overwrite current rules or update them
+ with the new rules.
+ :param use_conf: Whether to reload rules from cache or config file.
+ """
- :param match_list: nested tuples of data to match against
+ if not isinstance(rules, dict):
+ raise TypeError(_("Rules must be an instance of dict or Rules, "
+ "got %s instead") % type(rules))
+ self.use_conf = use_conf
+ if overwrite:
+ self.rules = Rules(rules, self.default_rule)
+ else:
+ self.rules.update(rules)
- The basic brain supports three types of match lists:
+ def clear(self):
+ """Clears Enforcer rules, policy's cache and policy's path."""
+ self.set_rules({})
+ fileutils.delete_cached_file(self.policy_path)
+ self.default_rule = None
+ self.policy_path = None
- 1) rules
+ def load_rules(self, force_reload=False):
+ """Loads policy_path's rules.
- looks like: ``('rule:compute:get_instance',)``
+ Policy file is cached and will be reloaded if modified.
- Retrieves the named rule from the rules dict and recursively
- checks against the contents of the rule.
+ :param force_reload: Whether to overwrite current rules.
+ """
- 2) roles
+ if force_reload:
+ self.use_conf = force_reload
- looks like: ``('role:compute:admin',)``
+ if self.use_conf:
+ if not self.policy_path:
+ self.policy_path = self._get_policy_path()
- Matches if the specified role is in credentials_dict['roles'].
+ reloaded, data = fileutils.read_cached_file(
+ self.policy_path, force_reload=force_reload)
+ if reloaded or not self.rules:
+ rules = Rules.load_json(data, self.default_rule)
+ self.set_rules(rules)
+ LOG.debug("Rules successfully reloaded")
- 3) generic
+ def _get_policy_path(self):
+ """Locate the policy json data file.
- looks like: ``('tenant_id:%(tenant_id)s',)``
+ :param policy_file: Custom policy file to locate.
- Substitutes values from the target dict into the match using
- the % operator and matches them against the creds dict.
+ :returns: The policy path
- Combining rules:
+ :raises: ConfigFilesNotFoundError if the file couldn't
+ be located.
+ """
+ policy_file = CONF.find_file(self.policy_file)
+
+ if policy_file:
+ return policy_file
+
+ raise cfg.ConfigFilesNotFoundError((self.policy_file,))
+
+ def enforce(self, rule, target, creds, do_raise=False,
+ exc=None, *args, **kwargs):
+ """Checks authorization of a rule against the target and credentials.
+
+ :param rule: A string or BaseCheck instance specifying the rule
+ to evaluate.
+ :param target: As much information about the object being operated
+ on as possible, as a dictionary.
+ :param creds: As much information about the user performing the
+ action as possible, as a dictionary.
+ :param do_raise: Whether to raise an exception or not if check
+ fails.
+ :param exc: Class of the exception to raise if the check fails.
+ Any remaining arguments passed to check() (both
+ positional and keyword arguments) will be passed to
+ the exception class. If not specified, PolicyNotAuthorized
+ will be used.
+
+ :return: Returns False if the policy does not allow the action and
+ exc is not provided; otherwise, returns a value that
+ evaluates to True. Note: for rules using the "case"
+ expression, this True value will be the specified string
+ from the expression.
+ """
- The brain returns True if any of the outer tuple of rules
- match and also True if all of the inner tuples match. You
- can use this to perform simple boolean logic. For
- example, the following rule would return True if the creds
- contain the role 'admin' OR the if the tenant_id matches
- the target dict AND the creds contains the role
- 'compute_sysadmin':
+ self.load_rules()
- ::
+ # Allow the rule to be a Check tree
+ if isinstance(rule, BaseCheck):
+ result = rule(target, creds, self)
+ elif not self.rules:
+ # No rules to reference means we're going to fail closed
+ result = False
+ else:
+ try:
+ # Evaluate the rule
+ result = self.rules[rule](target, creds, self)
+ except KeyError:
+ LOG.debug("Rule [%s] doesn't exist" % rule)
+ # If the rule doesn't exist, fail closed
+ result = False
- {
- "rule:combined": (
- 'role:admin',
- ('tenant_id:%(tenant_id)s', 'role:compute_sysadmin')
- )
- }
+ # If it is False, raise the exception if requested
+ if do_raise and not result:
+ if exc:
+ raise exc(*args, **kwargs)
- Note that rule and role are reserved words in the credentials match, so
- you can't match against properties with those names. Custom brains may
- also add new reserved words. For example, the HttpBrain adds http as a
- reserved word.
+ raise PolicyNotAuthorized(rule)
- :param target_dict: dict of object properties
+ return result
- Target dicts contain as much information as we can about the object being
- operated on.
- :param credentials_dict: dict of actor properties
+@six.add_metaclass(abc.ABCMeta)
+class BaseCheck(object):
+ """Abstract base class for Check classes."""
- Credentials dicts contain as much information as we can about the user
- performing the action.
+ @abc.abstractmethod
+ def __str__(self):
+ """String representation of the Check tree rooted at this node."""
- :param exc: exception to raise
+ pass
- Class of the exception to raise if the check fails. Any remaining
- arguments passed to enforce() (both positional and keyword arguments)
- will be passed to the exception class. If exc is not provided, returns
- False.
+ @abc.abstractmethod
+ def __call__(self, target, cred, enforcer):
+ """Triggers if instance of the class is called.
+
+ Performs the check. Returns False to reject the access or a
+ true value (not necessary True) to accept the access.
+ """
+
+ pass
+
+
+class FalseCheck(BaseCheck):
+ """A policy check that always returns False (disallow)."""
+
+ def __str__(self):
+ """Return a string representation of this check."""
+
+ return "!"
+
+ def __call__(self, target, cred, enforcer):
+ """Check the policy."""
- :return: True if the policy allows the action
- :return: False if the policy does not allow the action and exc is not set
- """
- global _BRAIN
- if not _BRAIN:
- _BRAIN = Brain()
- if not _BRAIN.check(match_list, target_dict, credentials_dict):
- if exc:
- raise exc(*args, **kwargs)
return False
- return True
-class Brain(object):
- """Implements policy checking."""
+class TrueCheck(BaseCheck):
+ """A policy check that always returns True (allow)."""
- _checks = {}
+ def __str__(self):
+ """Return a string representation of this check."""
- @classmethod
- def _register(cls, name, func):
- cls._checks[name] = func
+ return "@"
- @classmethod
- def load_json(cls, data, default_rule=None):
- """Init a brain using json instead of a rules dictionary."""
- rules_dict = jsonutils.loads(data)
- return cls(rules=rules_dict, default_rule=default_rule)
+ def __call__(self, target, cred, enforcer):
+ """Check the policy."""
- def __init__(self, rules=None, default_rule=None):
- if self.__class__ != Brain:
- LOG.warning(_("Inheritance-based rules are deprecated; use "
- "the default brain instead of %s.") %
- self.__class__.__name__)
+ return True
- self.rules = rules or {}
- self.default_rule = default_rule
- def add_rule(self, key, match):
- self.rules[key] = match
+class Check(BaseCheck):
+ """A base class to allow for user-defined policy checks."""
- def _check(self, match, target_dict, cred_dict):
- try:
- match_kind, match_value = match.split(':', 1)
- except Exception:
- LOG.exception(_("Failed to understand rule %(match)r") % locals())
- # If the rule is invalid, fail closed
- return False
+ def __init__(self, kind, match):
+ """Initiates Check instance.
- func = None
- try:
- old_func = getattr(self, '_check_%s' % match_kind)
- except AttributeError:
- func = self._checks.get(match_kind, self._checks.get(None, None))
- else:
- LOG.warning(_("Inheritance-based rules are deprecated; update "
- "_check_%s") % match_kind)
- func = lambda brain, kind, value, target, cred: old_func(value,
- target,
- cred)
-
- if not func:
- LOG.error(_("No handler for matches of kind %s") % match_kind)
- # Fail closed
- return False
+ :param kind: The kind of the check, i.e., the field before the
+ ':'.
+ :param match: The match of the check, i.e., the field after
+ the ':'.
+ """
+
+ self.kind = kind
+ self.match = match
+
+ def __str__(self):
+ """Return a string representation of this check."""
+
+ return "%s:%s" % (self.kind, self.match)
+
+
+class NotCheck(BaseCheck):
+ """Implements the "not" logical operator.
+
+ A policy check that inverts the result of another policy check.
+ """
+
+ def __init__(self, rule):
+ """Initialize the 'not' check.
+
+ :param rule: The rule to negate. Must be a Check.
+ """
+
+ self.rule = rule
+
+ def __str__(self):
+ """Return a string representation of this check."""
+
+ return "not %s" % self.rule
- return func(self, match_kind, match_value, target_dict, cred_dict)
+ def __call__(self, target, cred, enforcer):
+ """Check the policy.
- def check(self, match_list, target_dict, cred_dict):
- """Checks authorization of some rules against credentials.
+ Returns the logical inverse of the wrapped check.
+ """
+
+ return not self.rule(target, cred, enforcer)
+
+
+class AndCheck(BaseCheck):
+ """Implements the "and" logical operator.
+
+ A policy check that requires that a list of other checks all return True.
+ """
+
+ def __init__(self, rules):
+ """Initialize the 'and' check.
- Detailed description of the check with examples in policy.enforce().
+ :param rules: A list of rules that will be tested.
+ """
+
+ self.rules = rules
- :param match_list: nested tuples of data to match against
- :param target_dict: dict of object properties
- :param credentials_dict: dict of actor properties
+ def __str__(self):
+ """Return a string representation of this check."""
- :returns: True if the check passes
+ return "(%s)" % ' and '.join(str(r) for r in self.rules)
+ def __call__(self, target, cred, enforcer):
+ """Check the policy.
+
+ Requires that all rules accept in order to return True.
"""
- if not match_list:
- return True
- for and_list in match_list:
- if isinstance(and_list, basestring):
- and_list = (and_list,)
- if all([self._check(item, target_dict, cred_dict)
- for item in and_list]):
+
+ for rule in self.rules:
+ if not rule(target, cred, enforcer):
+ return False
+
+ return True
+
+ def add_check(self, rule):
+ """Adds rule to be tested.
+
+ Allows addition of another rule to the list of rules that will
+ be tested. Returns the AndCheck object for convenience.
+ """
+
+ self.rules.append(rule)
+ return self
+
+
+class OrCheck(BaseCheck):
+ """Implements the "or" operator.
+
+ A policy check that requires that at least one of a list of other
+ checks returns True.
+ """
+
+ def __init__(self, rules):
+ """Initialize the 'or' check.
+
+ :param rules: A list of rules that will be tested.
+ """
+
+ self.rules = rules
+
+ def __str__(self):
+ """Return a string representation of this check."""
+
+ return "(%s)" % ' or '.join(str(r) for r in self.rules)
+
+ def __call__(self, target, cred, enforcer):
+ """Check the policy.
+
+ Requires that at least one rule accept in order to return True.
+ """
+
+ for rule in self.rules:
+ if rule(target, cred, enforcer):
return True
return False
+ def add_check(self, rule):
+ """Adds rule to be tested.
+
+ Allows addition of another rule to the list of rules that will
+ be tested. Returns the OrCheck object for convenience.
+ """
+
+ self.rules.append(rule)
+ return self
+
-class HttpBrain(Brain):
- """A brain that can check external urls for policy.
+def _parse_check(rule):
+ """Parse a single base check rule into an appropriate Check object."""
- Posts json blobs for target and credentials.
+ # Handle the special checks
+ if rule == '!':
+ return FalseCheck()
+ elif rule == '@':
+ return TrueCheck()
- Note that this brain is deprecated; the http check is registered
- by default.
+ try:
+ kind, match = rule.split(':', 1)
+ except Exception:
+ LOG.exception(_LE("Failed to understand rule %s") % rule)
+ # If the rule is invalid, we'll fail closed
+ return FalseCheck()
+
+ # Find what implements the check
+ if kind in _checks:
+ return _checks[kind](kind, match)
+ elif None in _checks:
+ return _checks[None](kind, match)
+ else:
+ LOG.error(_LE("No handler for matches of kind %s") % kind)
+ return FalseCheck()
+
+
+def _parse_list_rule(rule):
+ """Translates the old list-of-lists syntax into a tree of Check objects.
+
+ Provided for backwards compatibility.
"""
- pass
+ # Empty rule defaults to True
+ if not rule:
+ return TrueCheck()
+ # Outer list is joined by "or"; inner list by "and"
+ or_list = []
+ for inner_rule in rule:
+ # Elide empty inner lists
+ if not inner_rule:
+ continue
-def register(name, func=None):
+ # Handle bare strings
+ if isinstance(inner_rule, six.string_types):
+ inner_rule = [inner_rule]
+
+ # Parse the inner rules into Check objects
+ and_list = [_parse_check(r) for r in inner_rule]
+
+ # Append the appropriate check to the or_list
+ if len(and_list) == 1:
+ or_list.append(and_list[0])
+ else:
+ or_list.append(AndCheck(and_list))
+
+ # If we have only one check, omit the "or"
+ if not or_list:
+ return FalseCheck()
+ elif len(or_list) == 1:
+ return or_list[0]
+
+ return OrCheck(or_list)
+
+
+# Used for tokenizing the policy language
+_tokenize_re = re.compile(r'\s+')
+
+
+def _parse_tokenize(rule):
+ """Tokenizer for the policy language.
+
+ Most of the single-character tokens are specified in the
+ _tokenize_re; however, parentheses need to be handled specially,
+ because they can appear inside a check string. Thankfully, those
+ parentheses that appear inside a check string can never occur at
+ the very beginning or end ("%(variable)s" is the correct syntax).
+ """
+
+ for tok in _tokenize_re.split(rule):
+ # Skip empty tokens
+ if not tok or tok.isspace():
+ continue
+
+ # Handle leading parens on the token
+ clean = tok.lstrip('(')
+ for i in range(len(tok) - len(clean)):
+ yield '(', '('
+
+ # If it was only parentheses, continue
+ if not clean:
+ continue
+ else:
+ tok = clean
+
+ # Handle trailing parens on the token
+ clean = tok.rstrip(')')
+ trail = len(tok) - len(clean)
+
+ # Yield the cleaned token
+ lowered = clean.lower()
+ if lowered in ('and', 'or', 'not'):
+ # Special tokens
+ yield lowered, clean
+ elif clean:
+ # Not a special token, but not composed solely of ')'
+ if len(tok) >= 2 and ((tok[0], tok[-1]) in
+ [('"', '"'), ("'", "'")]):
+ # It's a quoted string
+ yield 'string', tok[1:-1]
+ else:
+ yield 'check', _parse_check(clean)
+
+ # Yield the trailing parens
+ for i in range(trail):
+ yield ')', ')'
+
+
+class ParseStateMeta(type):
+ """Metaclass for the ParseState class.
+
+ Facilitates identifying reduction methods.
+ """
+
+ def __new__(mcs, name, bases, cls_dict):
+ """Create the class.
+
+ Injects the 'reducers' list, a list of tuples matching token sequences
+ to the names of the corresponding reduction methods.
+ """
+
+ reducers = []
+
+ for key, value in cls_dict.items():
+ if not hasattr(value, 'reducers'):
+ continue
+ for reduction in value.reducers:
+ reducers.append((reduction, key))
+
+ cls_dict['reducers'] = reducers
+
+ return super(ParseStateMeta, mcs).__new__(mcs, name, bases, cls_dict)
+
+
+def reducer(*tokens):
+ """Decorator for reduction methods.
+
+ Arguments are a sequence of tokens, in order, which should trigger running
+ this reduction method.
"""
- Register a function as a policy check.
+
+ def decorator(func):
+ # Make sure we have a list of reducer sequences
+ if not hasattr(func, 'reducers'):
+ func.reducers = []
+
+ # Add the tokens to the list of reducer sequences
+ func.reducers.append(list(tokens))
+
+ return func
+
+ return decorator
+
+
+@six.add_metaclass(ParseStateMeta)
+class ParseState(object):
+ """Implement the core of parsing the policy language.
+
+ Uses a greedy reduction algorithm to reduce a sequence of tokens into
+ a single terminal, the value of which will be the root of the Check tree.
+
+ Note: error reporting is rather lacking. The best we can get with
+ this parser formulation is an overall "parse failed" error.
+ Fortunately, the policy language is simple enough that this
+ shouldn't be that big a problem.
+ """
+
+ def __init__(self):
+ """Initialize the ParseState."""
+
+ self.tokens = []
+ self.values = []
+
+ def reduce(self):
+ """Perform a greedy reduction of the token stream.
+
+ If a reducer method matches, it will be executed, then the
+ reduce() method will be called recursively to search for any more
+ possible reductions.
+ """
+
+ for reduction, methname in self.reducers:
+ if (len(self.tokens) >= len(reduction) and
+ self.tokens[-len(reduction):] == reduction):
+ # Get the reduction method
+ meth = getattr(self, methname)
+
+ # Reduce the token stream
+ results = meth(*self.values[-len(reduction):])
+
+ # Update the tokens and values
+ self.tokens[-len(reduction):] = [r[0] for r in results]
+ self.values[-len(reduction):] = [r[1] for r in results]
+
+ # Check for any more reductions
+ return self.reduce()
+
+ def shift(self, tok, value):
+ """Adds one more token to the state. Calls reduce()."""
+
+ self.tokens.append(tok)
+ self.values.append(value)
+
+ # Do a greedy reduce...
+ self.reduce()
+
+ @property
+ def result(self):
+ """Obtain the final result of the parse.
+
+ Raises ValueError if the parse failed to reduce to a single result.
+ """
+
+ if len(self.values) != 1:
+ raise ValueError("Could not parse rule")
+ return self.values[0]
+
+ @reducer('(', 'check', ')')
+ @reducer('(', 'and_expr', ')')
+ @reducer('(', 'or_expr', ')')
+ def _wrap_check(self, _p1, check, _p2):
+ """Turn parenthesized expressions into a 'check' token."""
+
+ return [('check', check)]
+
+ @reducer('check', 'and', 'check')
+ def _make_and_expr(self, check1, _and, check2):
+ """Create an 'and_expr'.
+
+ Join two checks by the 'and' operator.
+ """
+
+ return [('and_expr', AndCheck([check1, check2]))]
+
+ @reducer('and_expr', 'and', 'check')
+ def _extend_and_expr(self, and_expr, _and, check):
+ """Extend an 'and_expr' by adding one more check."""
+
+ return [('and_expr', and_expr.add_check(check))]
+
+ @reducer('check', 'or', 'check')
+ def _make_or_expr(self, check1, _or, check2):
+ """Create an 'or_expr'.
+
+ Join two checks by the 'or' operator.
+ """
+
+ return [('or_expr', OrCheck([check1, check2]))]
+
+ @reducer('or_expr', 'or', 'check')
+ def _extend_or_expr(self, or_expr, _or, check):
+ """Extend an 'or_expr' by adding one more check."""
+
+ return [('or_expr', or_expr.add_check(check))]
+
+ @reducer('not', 'check')
+ def _make_not_expr(self, _not, check):
+ """Invert the result of another check."""
+
+ return [('check', NotCheck(check))]
+
+
+def _parse_text_rule(rule):
+ """Parses policy to the tree.
+
+ Translates a policy written in the policy language into a tree of
+ Check objects.
+ """
+
+ # Empty rule means always accept
+ if not rule:
+ return TrueCheck()
+
+ # Parse the token stream
+ state = ParseState()
+ for tok, value in _parse_tokenize(rule):
+ state.shift(tok, value)
+
+ try:
+ return state.result
+ except ValueError:
+ # Couldn't parse the rule
+ LOG.exception(_LE("Failed to understand rule %r") % rule)
+
+ # Fail closed
+ return FalseCheck()
+
+
+def parse_rule(rule):
+ """Parses a policy rule into a tree of Check objects."""
+
+ # If the rule is a string, it's in the policy language
+ if isinstance(rule, six.string_types):
+ return _parse_text_rule(rule)
+ return _parse_list_rule(rule)
+
+
+def register(name, func=None):
+ """Register a function or Check class as a policy check.
:param name: Gives the name of the check type, e.g., 'rule',
- 'role', etc. If name is None, a default function
+ 'role', etc. If name is None, a default check type
will be registered.
- :param func: If given, provides the function to register. If not
- given, returns a function taking one argument to
- specify the function to register, allowing use as a
- decorator.
+ :param func: If given, provides the function or class to register.
+ If not given, returns a function taking one argument
+ to specify the function or class to register,
+ allowing use as a decorator.
"""
- # Perform the actual decoration by registering the function.
- # Returns the function for compliance with the decorator
- # interface.
+ # Perform the actual decoration by registering the function or
+ # class. Returns the function or class for compliance with the
+ # decorator interface.
def decorator(func):
- # Register the function
- Brain._register(name, func)
+ _checks[name] = func
return func
- # If the function is given, do the registration
+ # If the function or class is given, do the registration
if func:
return decorator(func)
@register("rule")
-def _check_rule(brain, match_kind, match, target_dict, cred_dict):
- """Recursively checks credentials based on the brains rules."""
- try:
- new_match_list = brain.rules[match]
- except KeyError:
- if brain.default_rule and match != brain.default_rule:
- new_match_list = ('rule:%s' % brain.default_rule,)
- else:
- return False
+class RuleCheck(Check):
+ def __call__(self, target, creds, enforcer):
+ """Recursively checks credentials based on the defined rules."""
- return brain.check(new_match_list, target_dict, cred_dict)
+ try:
+ return enforcer.rules[self.match](target, creds, enforcer)
+ except KeyError:
+ # We don't have any matching rule; fail closed
+ return False
@register("role")
-def _check_role(brain, match_kind, match, target_dict, cred_dict):
- """Check that there is a matching role in the cred dict."""
- return match.lower() in [x.lower() for x in cred_dict['roles']]
+class RoleCheck(Check):
+ def __call__(self, target, creds, enforcer):
+ """Check that there is a matching role in the cred dict."""
+
+ return self.match.lower() in [x.lower() for x in creds['roles']]
@register('http')
-def _check_http(brain, match_kind, match, target_dict, cred_dict):
- """Check http: rules by calling to a remote server.
+class HttpCheck(Check):
+ def __call__(self, target, creds, enforcer):
+ """Check http: rules by calling to a remote server.
- This example implementation simply verifies that the response is
- exactly 'True'. A custom brain using response codes could easily
- be implemented.
+ This example implementation simply verifies that the response
+ is exactly 'True'.
+ """
- """
- url = 'http:' + (match % target_dict)
- data = {'target': jsonutils.dumps(target_dict),
- 'credentials': jsonutils.dumps(cred_dict)}
- post_data = urllib.urlencode(data)
- f = urllib2.urlopen(url, post_data)
- return f.read() == "True"
+ url = ('http:' + self.match) % target
+ data = {'target': jsonutils.dumps(target),
+ 'credentials': jsonutils.dumps(creds)}
+ post_data = urlparse.urlencode(data)
+ f = urlrequest.urlopen(url, post_data)
+ return f.read() == "True"
@register(None)
-def _check_generic(brain, match_kind, match, target_dict, cred_dict):
- """Check an individual match.
+class GenericCheck(Check):
+ def __call__(self, target, creds, enforcer):
+ """Check an individual match.
- Matches look like:
+ Matches look like:
- tenant:%(tenant_id)s
- role:compute:admin
+ tenant:%(tenant_id)s
+ role:compute:admin
+ True:%(user.enabled)s
+ 'Member':%(role.name)s
+ """
- """
+ # TODO(termie): do dict inspection via dot syntax
+ try:
+ match = self.match % target
+ except KeyError:
+ # While doing GenericCheck if key not
+ # present in Target return false
+ return False
- # TODO(termie): do dict inspection via dot syntax
- match = match % target_dict
- if match_kind in cred_dict:
- return match == unicode(cred_dict[match_kind])
- return False
+ try:
+ # Try to interpret self.kind as a literal
+ leftval = ast.literal_eval(self.kind)
+ except ValueError:
+ try:
+ leftval = creds[self.kind]
+ except KeyError:
+ return False
+ return match == six.text_type(leftval)
from oslo.config import cfg
from cinder import exception
-from cinder.i18n import _
from cinder.openstack.common import policy
-from cinder import utils
-
-
-policy_opts = [
- cfg.StrOpt('policy_file',
- default='policy.json',
- help=_('JSON file representing policy')),
- cfg.StrOpt('policy_default_rule',
- default='default',
- help=_('Rule checked when requested rule is not found')), ]
CONF = cfg.CONF
-CONF.register_opts(policy_opts)
-
-_POLICY_PATH = None
-_POLICY_CACHE = {}
-
-def reset():
- global _POLICY_PATH
- global _POLICY_CACHE
- _POLICY_PATH = None
- _POLICY_CACHE = {}
- policy.reset()
+_ENFORCER = None
def init():
- global _POLICY_PATH
- global _POLICY_CACHE
- if not _POLICY_PATH:
- _POLICY_PATH = utils.find_config(CONF.policy_file)
- utils.read_cached_file(_POLICY_PATH, _POLICY_CACHE,
- reload_func=_set_brain)
-
-
-def _set_brain(data):
- default_rule = CONF.policy_default_rule
- policy.set_brain(policy.Brain.load_json(data, default_rule))
+ global _ENFORCER
+ if not _ENFORCER:
+ _ENFORCER = policy.Enforcer()
def enforce_action(context, action):
applied to the given action using the policy enforcement api.
"""
- target = {
- 'project_id': context.project_id,
- 'user_id': context.user_id,
- }
- enforce(context, action, target)
+ return enforce(context, action, {'project_id': context.project_id,
+ 'user_id': context.user_id})
def enforce(context, action, target):
for object creation this should be a dictionary representing the
location of the object e.g. ``{'project_id': context.project_id}``
- :raises cinder.exception.PolicyNotAuthorized: if verification fails.
+ :raises PolicyNotAuthorized: if verification fails.
"""
init()
- match_list = ('rule:%s' % action,)
- credentials = context.to_dict()
-
- policy.enforce(match_list, target, credentials,
- exception.PolicyNotAuthorized, action=action)
+ return _ENFORCER.enforce(action, target, context.to_dict(),
+ do_raise=True,
+ exc=exception.PolicyNotAuthorized,
+ action=action)
def check_is_admin(roles):
"""
init()
- action = 'context_is_admin'
- match_list = ('rule:%s' % action,)
# include project_id on target to avoid KeyError if context_is_admin
# policy definition is missing, and default admin_or_owner rule
# attempts to apply. Since our credentials dict does not include a
target = {'project_id': ''}
credentials = {'roles': roles}
- return policy.enforce(match_list, target, credentials)
+ return _ENFORCER.enforce('context_is_admin', target, credentials)
CONF.set_override('fatal_exception_format_errors', True)
# This will be cleaned up by the NestedTempfile fixture
CONF.set_override('lock_path', tempfile.mkdtemp())
+ CONF.set_override('policy_file',
+ os.path.join(
+ os.path.abspath(
+ os.path.join(
+ os.path.dirname(__file__),
+ '..',
+ )
+ ),
+ 'cinder/tests/policy.json'))
def _common_cleanup(self):
"""Runs after each test method to tear down test environment."""
{
- "context_is_admin": [["role:admin"]],
- "admin_api": [["is_admin:True"]],
- "admin_or_owner": [["is_admin:True"], ["project_id:%(project_id)s"]],
+ "context_is_admin": "role:admin",
+ "admin_api": "is_admin:True",
+ "admin_or_owner": "is_admin:True or project_id:%(project_id)s",
- "volume:create": [],
- "volume:get": [["rule:admin_or_owner"]],
- "volume:get_all": [],
- "volume:get_volume_metadata": [],
- "volume:delete_volume_metadata": [],
- "volume:update_volume_metadata": [],
- "volume:get_volume_admin_metadata": [["rule:admin_api"]],
- "volume:delete_volume_admin_metadata": [["rule:admin_api"]],
- "volume:update_volume_admin_metadata": [["rule:admin_api"]],
- "volume:delete": [],
- "volume:update": [],
- "volume:attach": [],
- "volume:detach": [],
- "volume:reserve_volume": [],
- "volume:unreserve_volume": [],
- "volume:begin_detaching": [],
- "volume:roll_detaching": [],
- "volume:initialize_connection": [],
- "volume:terminate_connection": [],
- "volume:create_snapshot": [],
- "volume:delete_snapshot": [],
- "volume:get_snapshot": [],
- "volume:get_all_snapshots": [],
- "volume:update_snapshot": [],
- "volume:extend": [],
- "volume:migrate_volume": [["rule:admin_api"]],
- "volume:migrate_volume_completion": [["rule:admin_api"]],
- "volume:update_readonly_flag": [],
- "volume:retype": [],
- "volume:copy_volume_to_image": [],
+ "volume:create": "",
+ "volume:get": "rule:admin_or_owner",
+ "volume:get_all": "",
+ "volume:get_volume_metadata": "",
+ "volume:delete_volume_metadata": "",
+ "volume:update_volume_metadata": "",
+ "volume:get_volume_admin_metadata": "rule:admin_api",
+ "volume:delete_volume_admin_metadata": "rule:admin_api",
+ "volume:update_volume_admin_metadata": "rule:admin_api",
+ "volume:delete": "",
+ "volume:update": "",
+ "volume:attach": "",
+ "volume:detach": "",
+ "volume:reserve_volume": "",
+ "volume:unreserve_volume": "",
+ "volume:begin_detaching": "",
+ "volume:roll_detaching": "",
+ "volume:initialize_connection": "",
+ "volume:terminate_connection": "",
+ "volume:create_snapshot": "",
+ "volume:delete_snapshot": "",
+ "volume:get_snapshot": "",
+ "volume:get_all_snapshots": "",
+ "volume:update_snapshot": "",
+ "volume:extend": "",
+ "volume:migrate_volume": "rule:admin_api",
+ "volume:migrate_volume_completion": "rule:admin_api",
+ "volume:update_readonly_flag": "",
+ "volume:retype": "",
+ "volume:copy_volume_to_image": "",
- "volume_extension:volume_admin_actions:reset_status": [["rule:admin_api"]],
- "volume_extension:snapshot_admin_actions:reset_status": [["rule:admin_api"]],
- "volume_extension:volume_admin_actions:force_delete": [["rule:admin_api"]],
- "volume_extension:snapshot_admin_actions:force_delete": [["rule:admin_api"]],
- "volume_extension:volume_admin_actions:force_detach": [["rule:admin_api"]],
- "volume_extension:volume_admin_actions:migrate_volume": [["rule:admin_api"]],
- "volume_extension:volume_admin_actions:migrate_volume_completion": [["rule:admin_api"]],
- "volume_extension:volume_actions:upload_image": [],
- "volume_extension:types_manage": [],
- "volume_extension:types_extra_specs": [],
- "volume_extension:volume_type_encryption": [["rule:admin_api"]],
- "volume_extension:volume_encryption_metadata": [["rule:admin_or_owner"]],
- "volume_extension:qos_specs_manage": [],
- "volume_extension:extended_snapshot_attributes": [],
- "volume_extension:volume_image_metadata": [],
- "volume_extension:volume_host_attribute": [["rule:admin_api"]],
- "volume_extension:volume_tenant_attribute": [["rule:admin_api"]],
- "volume_extension:volume_mig_status_attribute": [["rule:admin_api"]],
- "volume_extension:hosts": [["rule:admin_api"]],
- "volume_extension:quotas:show": [],
- "volume_extension:quotas:update": [],
- "volume_extension:quotas:delete": [],
- "volume_extension:quota_classes": [],
- "volume_extension:volume_manage": [["rule:admin_api"]],
- "volume_extension:volume_unmanage": [["rule:admin_api"]],
+ "volume_extension:volume_admin_actions:reset_status": "rule:admin_api",
+ "volume_extension:snapshot_admin_actions:reset_status": "rule:admin_api",
+ "volume_extension:volume_admin_actions:force_delete": "rule:admin_api",
+ "volume_extension:snapshot_admin_actions:force_delete": "rule:admin_api",
+ "volume_extension:volume_admin_actions:force_detach": "rule:admin_api",
+ "volume_extension:volume_admin_actions:migrate_volume": "rule:admin_api",
+ "volume_extension:volume_admin_actions:migrate_volume_completion": "rule:admin_api",
+ "volume_extension:volume_actions:upload_image": "",
+ "volume_extension:types_manage": "",
+ "volume_extension:types_extra_specs": "",
+ "volume_extension:volume_type_encryption": "rule:admin_api",
+ "volume_extension:volume_encryption_metadata": "rule:admin_or_owner",
+ "volume_extension:qos_specs_manage": "",
+ "volume_extension:extended_snapshot_attributes": "",
+ "volume_extension:volume_image_metadata": "",
+ "volume_extension:volume_host_attribute": "rule:admin_api",
+ "volume_extension:volume_tenant_attribute": "rule:admin_api",
+ "volume_extension:volume_mig_status_attribute": "rule:admin_api",
+ "volume_extension:hosts": "rule:admin_api",
+ "volume_extension:quotas:show": "",
+ "volume_extension:quotas:update": "",
+ "volume_extension:quotas:delete": "",
+ "volume_extension:quota_classes": "",
+ "volume_extension:volume_manage": "rule:admin_api",
+ "volume_extension:volume_unmanage": "rule:admin_api",
- "limits_extension:used_limits": [],
+ "limits_extension:used_limits": "",
- "snapshot_extension:snapshot_actions:update_snapshot_status": [],
+ "snapshot_extension:snapshot_actions:update_snapshot_status": "",
- "volume:create_transfer": [],
- "volume:accept_transfer": [],
- "volume:delete_transfer": [],
- "volume:get_all_transfers": [],
+ "volume:create_transfer": "",
+ "volume:accept_transfer": "",
+ "volume:delete_transfer": "",
+ "volume:get_all_transfers": "",
- "backup:create" : [],
- "backup:delete": [],
- "backup:get": [],
- "backup:get_all": [],
- "backup:restore": [],
- "backup:backup-import": [["rule:admin_api"]],
- "backup:backup-export": [["rule:admin_api"]],
+ "backup:create" : "",
+ "backup:delete": "",
+ "backup:get": "",
+ "backup:get_all": "",
+ "backup:restore": "",
+ "backup:backup-import": "rule:admin_api",
+ "backup:backup-export": "rule:admin_api",
- "volume_extension:replication:promote": [["rule:admin_api"]],
- "volume_extension:replication:reenable": [["rule:admin_api"]],
+ "volume_extension:replication:promote": "rule:admin_api",
+ "volume_extension:replication:reenable": "rule:admin_api",
- "consistencygroup:create" : [],
- "consistencygroup:delete": [],
- "consistencygroup:get": [],
- "consistencygroup:get_all": [],
+ "consistencygroup:create" : "",
+ "consistencygroup:delete": "",
+ "consistencygroup:get": "",
+ "consistencygroup:get_all": "",
- "consistencygroup:create_cgsnapshot" : [],
- "consistencygroup:delete_cgsnapshot": [],
- "consistencygroup:get_cgsnapshot": [],
- "consistencygroup:get_all_cgsnapshots": []
+ "consistencygroup:create_cgsnapshot" : "",
+ "consistencygroup:delete_cgsnapshot": "",
+ "consistencygroup:get_cgsnapshot": "",
+ "consistencygroup:get_all_cgsnapshots": ""
}
from cinder import context
from cinder import exception
from cinder.image import image_utils
+from cinder.openstack.common import fileutils
from cinder.openstack.common import processutils
from cinder.openstack.common import units
from cinder import test
def test_file_unlinked(self):
mox = self.mox
mox.StubOutWithMock(image_utils, 'create_temporary_file')
- mox.StubOutWithMock(image_utils.os, 'unlink')
+ mox.StubOutWithMock(fileutils, 'delete_if_exists')
image_utils.create_temporary_file().AndReturn('somefile')
- image_utils.os.unlink('somefile')
+ fileutils.delete_if_exists('somefile')
mox.ReplayAll()
def test_file_unlinked_on_error(self):
mox = self.mox
mox.StubOutWithMock(image_utils, 'create_temporary_file')
- mox.StubOutWithMock(image_utils.os, 'unlink')
+ mox.StubOutWithMock(fileutils, 'delete_if_exists')
image_utils.create_temporary_file().AndReturn('somefile')
- image_utils.os.unlink('somefile')
+ fileutils.delete_if_exists('somefile')
mox.ReplayAll()
mox.StubOutWithMock(image_utils, 'fix_vhd_chain')
mox.StubOutWithMock(image_utils, 'coalesce_chain')
mox.StubOutWithMock(image_utils.os, 'unlink')
+ mox.StubOutWithMock(fileutils, 'delete_if_exists')
mox.StubOutWithMock(image_utils, 'rename_file')
image_utils.temporary_dir().AndReturn(fake_context('somedir'))
image_utils.fix_vhd_chain(['somedir/0.vhd', 'somedir/1.vhd'])
image_utils.coalesce_chain(
['somedir/0.vhd', 'somedir/1.vhd']).AndReturn('somedir/1.vhd')
- image_utils.os.unlink('image')
+ fileutils.delete_if_exists('image')
image_utils.rename_file('somedir/1.vhd', 'image')
mox.ReplayAll()
+++ /dev/null
-
-# Copyright 2011 Piston Cloud Computing, Inc.
-# All Rights Reserved.
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""Test of Policy Engine For Cinder."""
-
-import os.path
-import urllib2
-
-from oslo.config import cfg
-import six
-
-from cinder import context
-from cinder import exception
-import cinder.openstack.common.policy
-from cinder.openstack.common import policy as common_policy
-from cinder import policy
-from cinder import test
-from cinder import utils
-
-
-CONF = cfg.CONF
-
-
-class PolicyFileTestCase(test.TestCase):
- def setUp(self):
- super(PolicyFileTestCase, self).setUp()
- # since is_admin is defined by policy, create context before reset
- self.context = context.RequestContext('fake', 'fake')
- policy.reset()
- self.target = {}
- self.addCleanup(policy.reset)
-
- def test_modified_policy_reloads(self):
- with utils.tempdir() as tmpdir:
- tmpfilename = os.path.join(tmpdir, 'policy')
- self.flags(policy_file=tmpfilename)
-
- action = "example:test"
- with open(tmpfilename, "w") as policyfile:
- policyfile.write("""{"example:test": []}""")
- policy.enforce(self.context, action, self.target)
- with open(tmpfilename, "w") as policyfile:
- policyfile.write("""{"example:test": ["false:false"]}""")
- # NOTE(vish): reset stored policy cache so we don't have to
- # sleep(1)
- policy._POLICY_CACHE = {}
- self.assertRaises(exception.PolicyNotAuthorized, policy.enforce,
- self.context, action, self.target)
-
-
-class PolicyTestCase(test.TestCase):
- def setUp(self):
- super(PolicyTestCase, self).setUp()
- policy.reset()
- # NOTE(vish): preload rules to circumvent reloading from file
- policy.init()
- rules = {
- "true": [],
- "example:allowed": [],
- "example:denied": [["false:false"]],
- "example:get_http": [["http:http://www.example.com"]],
- "example:my_file": [["role:compute_admin"],
- ["project_id:%(project_id)s"]],
- "example:early_and_fail": [["false:false", "rule:true"]],
- "example:early_or_success": [["rule:true"], ["false:false"]],
- "example:lowercase_admin": [["role:admin"], ["role:sysadmin"]],
- "example:uppercase_admin": [["role:ADMIN"], ["role:sysadmin"]],
- }
- # NOTE(vish): then overload underlying brain
- common_policy.set_brain(common_policy.Brain(rules))
- self.context = context.RequestContext('fake', 'fake', roles=['member'])
- self.target = {}
- self.addCleanup(policy.reset)
-
- def test_enforce_nonexistent_action_throws(self):
- action = "example:noexist"
- self.assertRaises(exception.PolicyNotAuthorized, policy.enforce,
- self.context, action, self.target)
-
- def test_enforce_bad_action_throws(self):
- action = "example:denied"
- self.assertRaises(exception.PolicyNotAuthorized, policy.enforce,
- self.context, action, self.target)
-
- def test_enforce_good_action(self):
- action = "example:allowed"
- policy.enforce(self.context, action, self.target)
-
- def test_enforce_http_true(self):
-
- def fakeurlopen(url, post_data):
- return six.StringIO("True")
- self.stubs.Set(urllib2, 'urlopen', fakeurlopen)
- action = "example:get_http"
- target = {}
- result = policy.enforce(self.context, action, target)
- self.assertIsNone(result)
-
- def test_enforce_http_false(self):
-
- def fakeurlopen(url, post_data):
- return six.StringIO("False")
- self.stubs.Set(urllib2, 'urlopen', fakeurlopen)
- action = "example:get_http"
- target = {}
- self.assertRaises(exception.PolicyNotAuthorized, policy.enforce,
- self.context, action, target)
-
- def test_templatized_enforcement(self):
- target_mine = {'project_id': 'fake'}
- target_not_mine = {'project_id': 'another'}
- action = "example:my_file"
- policy.enforce(self.context, action, target_mine)
- self.assertRaises(exception.PolicyNotAuthorized, policy.enforce,
- self.context, action, target_not_mine)
-
- def test_early_AND_enforcement(self):
- action = "example:early_and_fail"
- self.assertRaises(exception.PolicyNotAuthorized, policy.enforce,
- self.context, action, self.target)
-
- def test_early_OR_enforcement(self):
- action = "example:early_or_success"
- policy.enforce(self.context, action, self.target)
-
- def test_ignore_case_role_check(self):
- lowercase_action = "example:lowercase_admin"
- uppercase_action = "example:uppercase_admin"
- # NOTE(dprince) we mix case in the Admin role here to ensure
- # case is ignored
- admin_context = context.RequestContext('admin',
- 'fake',
- roles=['AdMiN'])
- policy.enforce(admin_context, lowercase_action, self.target)
- policy.enforce(admin_context, uppercase_action, self.target)
-
-
-class DefaultPolicyTestCase(test.TestCase):
-
- def setUp(self):
- super(DefaultPolicyTestCase, self).setUp()
- policy.reset()
- policy.init()
-
- self.rules = {
- "default": [],
- "example:exist": [["false:false"]]
- }
-
- self._set_brain('default')
-
- self.context = context.RequestContext('fake', 'fake')
-
- self.addCleanup(policy.reset)
-
- def _set_brain(self, default_rule):
- brain = cinder.openstack.common.policy.Brain(self.rules,
- default_rule)
- cinder.openstack.common.policy.set_brain(brain)
-
- def test_policy_called(self):
- self.assertRaises(exception.PolicyNotAuthorized, policy.enforce,
- self.context, "example:exist", {})
-
- def test_not_found_policy_calls_default(self):
- policy.enforce(self.context, "example:noexist", {})
-
- def test_default_not_found(self):
- self._set_brain("default_noexist")
- self.assertRaises(exception.PolicyNotAuthorized, policy.enforce,
- self.context, "example:noexist", {})
-
-
-class ContextIsAdminPolicyTestCase(test.TestCase):
-
- def setUp(self):
- super(ContextIsAdminPolicyTestCase, self).setUp()
- policy.reset()
- policy.init()
-
- def test_default_admin_role_is_admin(self):
- ctx = context.RequestContext('fake', 'fake', roles=['johnny-admin'])
- self.assertFalse(ctx.is_admin)
- ctx = context.RequestContext('fake', 'fake', roles=['admin'])
- self.assertTrue(ctx.is_admin)
-
- def test_custom_admin_role_is_admin(self):
- # define explicit rules for context_is_admin
- rules = {
- 'context_is_admin': [["role:administrator"], ["role:johnny-admin"]]
- }
- brain = common_policy.Brain(rules, CONF.policy_default_rule)
- common_policy.set_brain(brain)
- ctx = context.RequestContext('fake', 'fake', roles=['johnny-admin'])
- self.assertTrue(ctx.is_admin)
- ctx = context.RequestContext('fake', 'fake', roles=['administrator'])
- self.assertTrue(ctx.is_admin)
- # default rule no longer applies
- ctx = context.RequestContext('fake', 'fake', roles=['admin'])
- self.assertFalse(ctx.is_admin)
-
- def test_context_is_admin_undefined(self):
- rules = {
- "admin_or_owner": [["role:admin"], ["project_id:%(project_id)s"]],
- "default": [["rule:admin_or_owner"]],
- }
- brain = common_policy.Brain(rules, CONF.policy_default_rule)
- common_policy.set_brain(brain)
- ctx = context.RequestContext('fake', 'fake')
- self.assertFalse(ctx.is_admin)
- ctx = context.RequestContext('fake', 'fake', roles=['admin'])
- self.assertTrue(ctx.is_admin)
CONF.glance_port)
self.assertEqual(generated_url, actual_url)
- @mock.patch('__builtin__.open')
- @mock.patch('os.path.getmtime', return_value=1)
- def test_read_cached_file(self, mock_mtime, mock_open):
- fake_file = "/this/is/a/fake"
- cache_data = {"data": 1123, "mtime": 2}
- mock_open.return_value = _get_local_mock_open()
- data = utils.read_cached_file(fake_file, cache_data)
- self.assertEqual(cache_data["data"], data)
- mock_open.assert_called_once_with(fake_file)
-
- @mock.patch('__builtin__.open')
- @mock.patch('os.path.getmtime', return_value=1)
- def test_read_modified_cached_file(self, mock_mtime, mock_open):
- fake_data = 'lorem ipsum'
- fake_file = "/this/is/a/fake"
- mock_open.return_value = _get_local_mock_open(fake_data)
- cache_data = {"data": 'original data', "mtime": 2}
- mock_reload = mock.Mock()
- data = utils.read_cached_file(fake_file,
- cache_data,
- reload_func=mock_reload)
- self.assertEqual(data, fake_data)
- mock_reload.assert_called_once_with(fake_data)
- mock_open.assert_called_once_with(fake_file)
-
def test_read_file_as_root(self):
def fake_execute(*args, **kwargs):
if args[1] == 'bad':
def setUp(self):
super(VolumePolicyTestCase, self).setUp()
- cinder.policy.reset()
cinder.policy.init()
self.context = context.get_admin_context()
self.stubs.Set(brick_lvm.LVM, '_vg_exists', lambda x: True)
- self.addCleanup(cinder.policy.reset)
-
- def _set_rules(self, rules):
- cinder.common.policy.set_brain(cinder.common.policy.Brain(rules))
def test_check_policy(self):
self.mox.StubOutWithMock(cinder.policy, 'enforce')
mox.IgnoreArg())
windows_utils.WindowsUtils.change_disk_status(volume['name'],
mox.IsA(bool))
- os.unlink(mox.IsA(str))
vhdutils.VHDUtils.convert_vhd(fake_temp_path,
fake_volume_path,
constants.VHD_TYPE_FIXED)
return hostname
-def read_cached_file(filename, cache_info, reload_func=None):
- """Read from a file if it has been modified.
-
- :param cache_info: dictionary to hold opaque cache.
- :param reload_func: optional function to be called with data when
- file is reloaded due to a modification.
-
- :returns: data from file
-
- """
- mtime = os.path.getmtime(filename)
- if not cache_info or mtime != cache_info.get('mtime'):
- with open(filename) as fap:
- cache_info['data'] = fap.read()
- cache_info['mtime'] = mtime
- if reload_func:
- reload_func(cache_info['data'])
- return cache_info['data']
-
-
def hash_file(file_like_object):
"""Generate a hash for the contents of a file."""
checksum = hashlib.sha1()
#fatal_exception_format_errors=false
-#
-# Options defined in cinder.policy
-#
-
-# JSON file representing policy (string value)
-#policy_file=policy.json
-
-# Rule checked when requested rule is not found (string value)
-#policy_default_rule=default
-
-
#
# Options defined in cinder.quota
#
#run_external_periodic_tasks=true
+#
+# Options defined in cinder.openstack.common.policy
+#
+
+# The JSON file that defines policies. (string value)
+#policy_file=policy.json
+
+# Default rule. Enforced when a requested rule is not found.
+# (string value)
+#policy_default_rule=default
+
+
#
# Options defined in cinder.scheduler.driver
#
{
- "context_is_admin": [["role:admin"]],
- "admin_or_owner": [["is_admin:True"], ["project_id:%(project_id)s"]],
- "default": [["rule:admin_or_owner"]],
+ "context_is_admin": "role:admin",
+ "admin_or_owner": "is_admin:True or project_id:%(project_id)s",
+ "default": "rule:admin_or_owner",
- "admin_api": [["is_admin:True"]],
+ "admin_api": "is_admin:True",
- "volume:create": [],
- "volume:get_all": [],
- "volume:get_volume_metadata": [],
- "volume:get_volume_admin_metadata": [["rule:admin_api"]],
- "volume:delete_volume_admin_metadata": [["rule:admin_api"]],
- "volume:update_volume_admin_metadata": [["rule:admin_api"]],
- "volume:get_snapshot": [],
- "volume:get_all_snapshots": [],
- "volume:extend": [],
- "volume:update_readonly_flag": [],
- "volume:retype": [],
+ "volume:create": "",
+ "volume:get_all": "",
+ "volume:get_volume_metadata": "",
+ "volume:get_volume_admin_metadata": "rule:admin_api",
+ "volume:delete_volume_admin_metadata": "rule:admin_api",
+ "volume:update_volume_admin_metadata": "rule:admin_api",
+ "volume:get_snapshot": "",
+ "volume:get_all_snapshots": "",
+ "volume:extend": "",
+ "volume:update_readonly_flag": "",
+ "volume:retype": "",
- "volume_extension:types_manage": [["rule:admin_api"]],
- "volume_extension:types_extra_specs": [["rule:admin_api"]],
- "volume_extension:volume_type_encryption": [["rule:admin_api"]],
- "volume_extension:volume_encryption_metadata": [["rule:admin_or_owner"]],
- "volume_extension:extended_snapshot_attributes": [],
- "volume_extension:volume_image_metadata": [],
+ "volume_extension:types_manage": "rule:admin_api",
+ "volume_extension:types_extra_specs": "rule:admin_api",
+ "volume_extension:volume_type_encryption": "rule:admin_api",
+ "volume_extension:volume_encryption_metadata": "rule:admin_or_owner",
+ "volume_extension:extended_snapshot_attributes": "",
+ "volume_extension:volume_image_metadata": "",
- "volume_extension:quotas:show": [],
- "volume_extension:quotas:update": [["rule:admin_api"]],
- "volume_extension:quota_classes": [],
+ "volume_extension:quotas:show": "",
+ "volume_extension:quotas:update": "rule:admin_api",
+ "volume_extension:quota_classes": "",
- "volume_extension:volume_admin_actions:reset_status": [["rule:admin_api"]],
- "volume_extension:snapshot_admin_actions:reset_status": [["rule:admin_api"]],
- "volume_extension:volume_admin_actions:force_delete": [["rule:admin_api"]],
- "volume_extension:volume_admin_actions:force_detach": [["rule:admin_api"]],
- "volume_extension:snapshot_admin_actions:force_delete": [["rule:admin_api"]],
- "volume_extension:volume_admin_actions:migrate_volume": [["rule:admin_api"]],
- "volume_extension:volume_admin_actions:migrate_volume_completion": [["rule:admin_api"]],
+ "volume_extension:volume_admin_actions:reset_status": "rule:admin_api",
+ "volume_extension:snapshot_admin_actions:reset_status": "rule:admin_api",
+ "volume_extension:volume_admin_actions:force_delete": "rule:admin_api",
+ "volume_extension:volume_admin_actions:force_detach": "rule:admin_api",
+ "volume_extension:snapshot_admin_actions:force_delete": "rule:admin_api",
+ "volume_extension:volume_admin_actions:migrate_volume": "rule:admin_api",
+ "volume_extension:volume_admin_actions:migrate_volume_completion": "rule:admin_api",
- "volume_extension:volume_host_attribute": [["rule:admin_api"]],
- "volume_extension:volume_tenant_attribute": [["rule:admin_or_owner"]],
- "volume_extension:volume_mig_status_attribute": [["rule:admin_api"]],
- "volume_extension:hosts": [["rule:admin_api"]],
- "volume_extension:services": [["rule:admin_api"]],
+ "volume_extension:volume_host_attribute": "rule:admin_api",
+ "volume_extension:volume_tenant_attribute": "rule:admin_or_owner",
+ "volume_extension:volume_mig_status_attribute": "rule:admin_api",
+ "volume_extension:hosts": "rule:admin_api",
+ "volume_extension:services": "rule:admin_api",
- "volume_extension:volume_manage": [["rule:admin_api"]],
- "volume_extension:volume_unmanage": [["rule:admin_api"]],
+ "volume_extension:volume_manage": "rule:admin_api",
+ "volume_extension:volume_unmanage": "rule:admin_api",
- "volume:services": [["rule:admin_api"]],
+ "volume:services": "rule:admin_api",
- "volume:create_transfer": [],
- "volume:accept_transfer": [],
- "volume:delete_transfer": [],
- "volume:get_all_transfers": [],
+ "volume:create_transfer": "",
+ "volume:accept_transfer": "",
+ "volume:delete_transfer": "",
+ "volume:get_all_transfers": "",
- "volume_extension:replication:promote": ["rule:admin_api"],
- "volume_extension:replication:reenable": ["rule:admin_api"],
+ "volume_extension:replication:promote": "rule:admin_api",
+ "volume_extension:replication:reenable": "rule:admin_api",
- "backup:create" : [],
- "backup:delete": [],
- "backup:get": [],
- "backup:get_all": [],
- "backup:restore": [],
- "backup:backup-import": [["rule:admin_api"]],
- "backup:backup-export": [["rule:admin_api"]],
+ "backup:create" : "",
+ "backup:delete": "",
+ "backup:get": "",
+ "backup:get_all": "",
+ "backup:restore": "",
+ "backup:backup-import": "rule:admin_api",
+ "backup:backup-export": "rule:admin_api",
- "snapshot_extension:snapshot_actions:update_snapshot_status": [],
+ "snapshot_extension:snapshot_actions:update_snapshot_status": "",
- "consistencygroup:create" : [["group:nobody"]],
- "consistencygroup:delete": [["group:nobody"]],
- "consistencygroup:get": [["group:nobody"]],
- "consistencygroup:get_all": [["group:nobody"]],
+ "consistencygroup:create" : "group:nobody",
+ "consistencygroup:delete": "group:nobody",
+ "consistencygroup:get": "group:nobody",
+ "consistencygroup:get_all": "group:nobody",
- "consistencygroup:create_cgsnapshot" : [],
- "consistencygroup:delete_cgsnapshot": [],
- "consistencygroup:get_cgsnapshot": [],
- "consistencygroup:get_all_cgsnapshots": []
+ "consistencygroup:create_cgsnapshot" : "",
+ "consistencygroup:delete_cgsnapshot": "",
+ "consistencygroup:get_cgsnapshot": "",
+ "consistencygroup:get_all_cgsnapshots": ""
}