Sync oslo-incubator code to latest

Currently, gettextutils has graduated from oslo-incubator, so we
need to port to new library oslo.i18n, this sync is the first move
to do that.

Sync to latest:
    commit 62339865b57eeaa3212ce61427feb19a8c0b0c0e
    Add more to the release notes
    Change-Id: Ifad658f532b5d549144008170bfe2e4181038ddf

Change-Id: I3a2432ba2960c74db614e017e29bd95c560176f6
Partial-Bug: #1389546
This commit is contained in:
ZhiQiang Fan 2014-12-04 02:37:40 +08:00
parent 4bbdc368ff
commit 1707533e2f
7 changed files with 96 additions and 52 deletions

View File

@ -16,25 +16,30 @@ See http://docs.openstack.org/developer/oslo.i18n/usage.html
"""
import oslo.i18n
try:
import oslo.i18n
# NOTE(dhellmann): This reference to o-s-l-o will be replaced by the
# application name when this module is synced into the separate
# repository. It is OK to have more than one translation function
# using the same domain, since there will still only be one message
# catalog.
_translators = oslo.i18n.TranslatorFactory(domain='ceilometer')
# NOTE(dhellmann): This reference to o-s-l-o will be replaced by the
# application name when this module is synced into the separate
# repository. It is OK to have more than one translation function
# using the same domain, since there will still only be one message
# catalog.
_translators = oslo.i18n.TranslatorFactory(domain='ceilometer')
# The primary translation function using the well-known name "_"
_ = _translators.primary
# The primary translation function using the well-known name "_"
_ = _translators.primary
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
except ImportError:
# NOTE(dims): Support for cases where a project wants to use
# code from ceilometer-incubator, but is not ready to be internationalized
# (like tempest)
_ = _LI = _LW = _LE = _LC = lambda x: x

View File

@ -117,10 +117,6 @@ def get_context_from_function_and_args(function, args, kwargs):
def is_user_context(context):
"""Indicates if the request context is a normal user."""
if not context:
if not context or context.is_admin:
return False
if context.is_admin:
return False
if not context.user_id or not context.project_id:
return False
return True
return context.user_id and context.project_id

View File

@ -16,6 +16,7 @@
from __future__ import print_function
import copy
import errno
import gc
import os
@ -49,6 +50,12 @@ CONF.register_opts(eventlet_backdoor_opts)
LOG = logging.getLogger(__name__)
def list_opts():
"""Entry point for oslo.config-generator.
"""
return [(None, copy.deepcopy(eventlet_backdoor_opts))]
class EventletBackdoorConfigValueError(Exception):
def __init__(self, port_range, help_msg, ex):
msg = ('Invalid backdoor_port configuration %(range)s: %(ex)s. '

View File

@ -15,13 +15,12 @@
import contextlib
import errno
import logging
import os
import tempfile
from oslo.utils import excutils
from ceilometer.openstack.common import log as logging
LOG = logging.getLogger(__name__)
_FILE_CACHE = {}

View File

@ -27,6 +27,7 @@ It also allows setting of formatting information through conf.
"""
import copy
import inspect
import itertools
import logging
@ -174,6 +175,16 @@ CONF.register_cli_opts(logging_cli_opts)
CONF.register_opts(generic_log_opts)
CONF.register_opts(log_opts)
def list_opts():
"""Entry point for oslo.config-generator."""
return [(None, copy.deepcopy(common_cli_opts)),
(None, copy.deepcopy(logging_cli_opts)),
(None, copy.deepcopy(generic_log_opts)),
(None, copy.deepcopy(log_opts)),
]
# our new audit level
# NOTE(jkoelker) Since we synthesized an audit level, make the logging
# module aware of it so it acts like other levels.
@ -498,14 +509,9 @@ def _setup_logging_from_conf(project, version):
log_root.addHandler(streamlog)
if CONF.publish_errors:
try:
handler = importutils.import_object(
"ceilometer.openstack.common.log_handler.PublishErrorsHandler",
logging.ERROR)
except ImportError:
handler = importutils.import_object(
"oslo.messaging.notify.log_handler.PublishErrorsHandler",
logging.ERROR)
handler = importutils.import_object(
"oslo.messaging.notify.log_handler.PublishErrorsHandler",
logging.ERROR)
log_root.addHandler(handler)
datefmt = CONF.log_date_format
@ -546,9 +552,11 @@ def _setup_logging_from_conf(project, version):
# TODO(bogdando) use the format provided by RFCSysLogHandler
# after existing syslog format deprecation in J
if CONF.use_syslog_rfc_format:
syslog = RFCSysLogHandler(facility=facility)
syslog = RFCSysLogHandler(address='/dev/log',
facility=facility)
else:
syslog = logging.handlers.SysLogHandler(facility=facility)
syslog = logging.handlers.SysLogHandler(address='/dev/log',
facility=facility)
log_root.addHandler(syslog)
except socket.error:
log_root.error('Unable to add syslog handler. Verify that syslog '

View File

@ -77,6 +77,7 @@ as it allows particular rules to be explicitly disabled.
import abc
import ast
import copy
import os
import re
@ -87,7 +88,7 @@ import six.moves.urllib.parse as urlparse
import six.moves.urllib.request as urlrequest
from ceilometer.openstack.common import fileutils
from ceilometer.openstack.common._i18n import _, _LE, _LW
from ceilometer.openstack.common._i18n import _, _LE, _LI
from ceilometer.openstack.common import log as logging
@ -101,8 +102,12 @@ policy_opts = [
'found.')),
cfg.MultiStrOpt('policy_dirs',
default=['policy.d'],
help=_('The directories of policy configuration files is '
'stored')),
help=_('Directories where policy configuration files are '
'stored. They can be relative to any directory '
'in the search path defined by the config_dir '
'option, or absolute paths. The file defined by '
'policy_file must exist for these directories to '
'be searched.')),
]
CONF = cfg.CONF
@ -113,6 +118,11 @@ LOG = logging.getLogger(__name__)
_checks = {}
def list_opts():
"""Entry point for oslo.config-generator."""
return [(None, copy.deepcopy(policy_opts))]
class PolicyNotAuthorized(Exception):
def __init__(self, rule):
@ -189,16 +199,19 @@ class Enforcer(object):
:param default_rule: Default rule to use, CONF.default_rule will
be used if none is specified.
:param use_conf: Whether to load rules from cache or config file.
:param overwrite: Whether to overwrite existing rules when reload rules
from config file.
"""
def __init__(self, policy_file=None, rules=None,
default_rule=None, use_conf=True):
self.rules = Rules(rules, default_rule)
default_rule=None, use_conf=True, overwrite=True):
self.default_rule = default_rule or CONF.policy_default_rule
self.rules = Rules(rules, self.default_rule)
self.policy_path = None
self.policy_file = policy_file or CONF.policy_file
self.use_conf = use_conf
self.overwrite = overwrite
def set_rules(self, rules, overwrite=True, use_conf=False):
"""Create a new Rules object based on the provided dict of rules.
@ -230,7 +243,7 @@ class Enforcer(object):
Policy file is cached and will be reloaded if modified.
:param force_reload: Whether to overwrite current rules.
:param force_reload: Whether to reload rules from config file.
"""
if force_reload:
@ -240,18 +253,20 @@ class Enforcer(object):
if not self.policy_path:
self.policy_path = self._get_policy_path(self.policy_file)
self._load_policy_file(self.policy_path, force_reload)
self._load_policy_file(self.policy_path, force_reload,
overwrite=self.overwrite)
for path in CONF.policy_dirs:
try:
path = self._get_policy_path(path)
except cfg.ConfigFilesNotFoundError:
LOG.warn(_LW("Can not find policy directories %s"), path)
LOG.info(_LI("Can not find policy directory: %s"), path)
continue
self._walk_through_policy_directory(path,
self._load_policy_file,
force_reload, False)
def _walk_through_policy_directory(self, path, func, *args):
@staticmethod
def _walk_through_policy_directory(path, func, *args):
# We do not iterate over sub-directories.
policy_files = next(os.walk(path))[2]
policy_files.sort()
@ -261,9 +276,9 @@ class Enforcer(object):
def _load_policy_file(self, path, force_reload, overwrite=True):
reloaded, data = fileutils.read_cached_file(
path, force_reload=force_reload)
if reloaded or not self.rules:
if reloaded or not self.rules or not overwrite:
rules = Rules.load_json(data, self.default_rule)
self.set_rules(rules, overwrite)
self.set_rules(rules, overwrite=overwrite, use_conf=True)
LOG.debug("Rules successfully reloaded")
def _get_policy_path(self, path):
@ -299,7 +314,7 @@ class Enforcer(object):
:param do_raise: Whether to raise an exception or not if check
fails.
:param exc: Class of the exception to raise if the check fails.
Any remaining arguments passed to check() (both
Any remaining arguments passed to enforce() (both
positional and keyword arguments) will be passed to
the exception class. If not specified, PolicyNotAuthorized
will be used.
@ -883,7 +898,17 @@ class HttpCheck(Check):
"""
url = ('http:' + self.match) % target
data = {'target': jsonutils.dumps(target),
# Convert instances of object() in target temporarily to
# empty dict to avoid circular reference detection
# errors in jsonutils.dumps().
temp_target = copy.deepcopy(target)
for key in target.keys():
element = target.get(key)
if type(element) is object:
temp_target[key] = {}
data = {'target': jsonutils.dumps(temp_target),
'credentials': jsonutils.dumps(creds)}
post_data = urlparse.urlencode(data)
f = urlrequest.urlopen(url, post_data)
@ -903,7 +928,6 @@ class GenericCheck(Check):
'Member':%(role.name)s
"""
# TODO(termie): do dict inspection via dot syntax
try:
match = self.match % target
except KeyError:
@ -916,7 +940,10 @@ class GenericCheck(Check):
leftval = ast.literal_eval(self.kind)
except ValueError:
try:
leftval = creds[self.kind]
kind_parts = self.kind.split('.')
leftval = creds
for kind_part in kind_parts:
leftval = leftval[kind_part]
except KeyError:
return False
return match == six.text_type(leftval)

View File

@ -96,6 +96,8 @@ class ThreadGroup(object):
continue
try:
x.stop()
except eventlet.greenlet.GreenletExit:
pass
except Exception as ex:
LOG.exception(ex)