From bad264dad9a77fd1cb25debf44f60a0cb65b3933 Mon Sep 17 00:00:00 2001 From: Flavio Percoco Date: Tue, 11 Mar 2014 10:39:46 +0100 Subject: [PATCH] Sync from oslo-incubator This patch is intended to bring in `config.generator`. It also syncs Marconi's modules to the latest oslo-incubator version for the sake of being up-to-date. Change-Id: I9cd3ccf0ce509349d561132025add42dec83ca0a --- marconi/openstack/common/__init__.py | 17 ++ marconi/openstack/common/config/__init__.py | 0 marconi/openstack/common/config/generator.py | 302 +++++++++++++++++++ marconi/openstack/common/excutils.py | 8 +- marconi/openstack/common/fileutils.py | 3 +- marconi/openstack/common/gettextutils.py | 39 +-- marconi/openstack/common/jsonutils.py | 10 +- marconi/openstack/common/lockutils.py | 145 ++++++--- marconi/openstack/common/log.py | 6 +- openstack-common.conf | 1 + requirements.txt | 1 + 11 files changed, 446 insertions(+), 86 deletions(-) create mode 100644 marconi/openstack/common/config/__init__.py create mode 100644 marconi/openstack/common/config/generator.py diff --git a/marconi/openstack/common/__init__.py b/marconi/openstack/common/__init__.py index e69de29bb..d1223eaf7 100644 --- a/marconi/openstack/common/__init__.py +++ b/marconi/openstack/common/__init__.py @@ -0,0 +1,17 @@ +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import six + + +six.add_move(six.MovedModule('mox', 'mox', 'mox3.mox')) diff --git a/marconi/openstack/common/config/__init__.py b/marconi/openstack/common/config/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/marconi/openstack/common/config/generator.py b/marconi/openstack/common/config/generator.py new file mode 100644 index 000000000..aa532a550 --- /dev/null +++ b/marconi/openstack/common/config/generator.py @@ -0,0 +1,302 @@ +# Copyright 2012 SINA Corporation +# Copyright 2014 Cisco Systems, Inc. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# + +"""Extracts OpenStack config option info from module(s).""" + +from __future__ import print_function + +import argparse +import imp +import os +import re +import socket +import sys +import textwrap + +from oslo.config import cfg +import six +import stevedore.named + +from marconi.openstack.common import gettextutils +from marconi.openstack.common import importutils + +gettextutils.install('marconi') + +STROPT = "StrOpt" +BOOLOPT = "BoolOpt" +INTOPT = "IntOpt" +FLOATOPT = "FloatOpt" +LISTOPT = "ListOpt" +DICTOPT = "DictOpt" +MULTISTROPT = "MultiStrOpt" + +OPT_TYPES = { + STROPT: 'string value', + BOOLOPT: 'boolean value', + INTOPT: 'integer value', + FLOATOPT: 'floating point value', + LISTOPT: 'list value', + DICTOPT: 'dict value', + MULTISTROPT: 'multi valued', +} + +OPTION_REGEX = re.compile(r"(%s)" % "|".join([STROPT, BOOLOPT, INTOPT, + FLOATOPT, LISTOPT, DICTOPT, + MULTISTROPT])) + +PY_EXT = ".py" +BASEDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), + "../../../../")) +WORDWRAP_WIDTH = 60 + + +def generate(argv): + parser = argparse.ArgumentParser( + description='generate sample configuration file', + ) + parser.add_argument('-m', dest='modules', action='append') + parser.add_argument('-l', dest='libraries', action='append') + parser.add_argument('srcfiles', nargs='*') + parsed_args = parser.parse_args(argv) + + mods_by_pkg = dict() + for filepath in parsed_args.srcfiles: + pkg_name = filepath.split(os.sep)[1] + mod_str = '.'.join(['.'.join(filepath.split(os.sep)[:-1]), + os.path.basename(filepath).split('.')[0]]) + mods_by_pkg.setdefault(pkg_name, list()).append(mod_str) + # NOTE(lzyeval): place top level modules before packages + pkg_names = sorted(pkg for pkg in mods_by_pkg if pkg.endswith(PY_EXT)) + ext_names = sorted(pkg for pkg in mods_by_pkg if pkg not in pkg_names) + pkg_names.extend(ext_names) + + # opts_by_group is a mapping of group name to an options list + # The options list is a list of (module, options) tuples + opts_by_group = {'DEFAULT': []} + + if parsed_args.modules: + for module_name in parsed_args.modules: + module = _import_module(module_name) + if module: + for group, opts in _list_opts(module): + opts_by_group.setdefault(group, []).append((module_name, + opts)) + + # Look for entry points defined in libraries (or applications) for + # option discovery, and include their return values in the output. + # + # Each entry point should be a function returning an iterable + # of pairs with the group name (or None for the default group) + # and the list of Opt instances for that group. + if parsed_args.libraries: + loader = stevedore.named.NamedExtensionManager( + 'oslo.config.opts', + names=list(set(parsed_args.libraries)), + invoke_on_load=False, + ) + for ext in loader: + for group, opts in ext.plugin(): + opt_list = opts_by_group.setdefault(group or 'DEFAULT', []) + opt_list.append((ext.name, opts)) + + for pkg_name in pkg_names: + mods = mods_by_pkg.get(pkg_name) + mods.sort() + for mod_str in mods: + if mod_str.endswith('.__init__'): + mod_str = mod_str[:mod_str.rfind(".")] + + mod_obj = _import_module(mod_str) + if not mod_obj: + raise RuntimeError("Unable to import module %s" % mod_str) + + for group, opts in _list_opts(mod_obj): + opts_by_group.setdefault(group, []).append((mod_str, opts)) + + print_group_opts('DEFAULT', opts_by_group.pop('DEFAULT', [])) + for group in sorted(opts_by_group.keys()): + print_group_opts(group, opts_by_group[group]) + + +def _import_module(mod_str): + try: + if mod_str.startswith('bin.'): + imp.load_source(mod_str[4:], os.path.join('bin', mod_str[4:])) + return sys.modules[mod_str[4:]] + else: + return importutils.import_module(mod_str) + except Exception as e: + sys.stderr.write("Error importing module %s: %s\n" % (mod_str, str(e))) + return None + + +def _is_in_group(opt, group): + "Check if opt is in group." + for value in group._opts.values(): + # NOTE(llu): Temporary workaround for bug #1262148, wait until + # newly released oslo.config support '==' operator. + if not(value['opt'] != opt): + return True + return False + + +def _guess_groups(opt, mod_obj): + # is it in the DEFAULT group? + if _is_in_group(opt, cfg.CONF): + return 'DEFAULT' + + # what other groups is it in? + for value in cfg.CONF.values(): + if isinstance(value, cfg.CONF.GroupAttr): + if _is_in_group(opt, value._group): + return value._group.name + + raise RuntimeError( + "Unable to find group for option %s, " + "maybe it's defined twice in the same group?" + % opt.name + ) + + +def _list_opts(obj): + def is_opt(o): + return (isinstance(o, cfg.Opt) and + not isinstance(o, cfg.SubCommandOpt)) + + opts = list() + for attr_str in dir(obj): + attr_obj = getattr(obj, attr_str) + if is_opt(attr_obj): + opts.append(attr_obj) + elif (isinstance(attr_obj, list) and + all(map(lambda x: is_opt(x), attr_obj))): + opts.extend(attr_obj) + + ret = {} + for opt in opts: + ret.setdefault(_guess_groups(opt, obj), []).append(opt) + return ret.items() + + +def print_group_opts(group, opts_by_module): + print("[%s]" % group) + print('') + for mod, opts in opts_by_module: + print('#') + print('# Options defined in %s' % mod) + print('#') + print('') + for opt in opts: + _print_opt(opt) + print('') + + +def _get_my_ip(): + try: + csock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + csock.connect(('8.8.8.8', 80)) + (addr, port) = csock.getsockname() + csock.close() + return addr + except socket.error: + return None + + +def _sanitize_default(name, value): + """Set up a reasonably sensible default for pybasedir, my_ip and host.""" + if value.startswith(sys.prefix): + # NOTE(jd) Don't use os.path.join, because it is likely to think the + # second part is an absolute pathname and therefore drop the first + # part. + value = os.path.normpath("/usr/" + value[len(sys.prefix):]) + elif value.startswith(BASEDIR): + return value.replace(BASEDIR, '/usr/lib/python/site-packages') + elif BASEDIR in value: + return value.replace(BASEDIR, '') + elif value == _get_my_ip(): + return '10.0.0.1' + elif value in (socket.gethostname(), socket.getfqdn()) and 'host' in name: + return 'marconi' + elif value.strip() != value: + return '"%s"' % value + return value + + +def _print_opt(opt): + opt_name, opt_default, opt_help = opt.dest, opt.default, opt.help + if not opt_help: + sys.stderr.write('WARNING: "%s" is missing help string.\n' % opt_name) + opt_help = "" + opt_type = None + try: + opt_type = OPTION_REGEX.search(str(type(opt))).group(0) + except (ValueError, AttributeError) as err: + sys.stderr.write("%s\n" % str(err)) + sys.exit(1) + opt_help = u'%s (%s)' % (opt_help, + OPT_TYPES[opt_type]) + print('#', "\n# ".join(textwrap.wrap(opt_help, WORDWRAP_WIDTH))) + if opt.deprecated_opts: + for deprecated_opt in opt.deprecated_opts: + if deprecated_opt.name: + deprecated_group = (deprecated_opt.group if + deprecated_opt.group else "DEFAULT") + print('# Deprecated group/name - [%s]/%s' % + (deprecated_group, + deprecated_opt.name)) + try: + if opt_default is None: + print('#%s=' % opt_name) + elif opt_type == STROPT: + assert(isinstance(opt_default, six.string_types)) + print('#%s=%s' % (opt_name, _sanitize_default(opt_name, + opt_default))) + elif opt_type == BOOLOPT: + assert(isinstance(opt_default, bool)) + print('#%s=%s' % (opt_name, str(opt_default).lower())) + elif opt_type == INTOPT: + assert(isinstance(opt_default, int) and + not isinstance(opt_default, bool)) + print('#%s=%s' % (opt_name, opt_default)) + elif opt_type == FLOATOPT: + assert(isinstance(opt_default, float)) + print('#%s=%s' % (opt_name, opt_default)) + elif opt_type == LISTOPT: + assert(isinstance(opt_default, list)) + print('#%s=%s' % (opt_name, ','.join(opt_default))) + elif opt_type == DICTOPT: + assert(isinstance(opt_default, dict)) + opt_default_strlist = [str(key) + ':' + str(value) + for (key, value) in opt_default.items()] + print('#%s=%s' % (opt_name, ','.join(opt_default_strlist))) + elif opt_type == MULTISTROPT: + assert(isinstance(opt_default, list)) + if not opt_default: + opt_default = [''] + for default in opt_default: + print('#%s=%s' % (opt_name, default)) + print('') + except Exception: + sys.stderr.write('Error in option "%s"\n' % opt_name) + sys.exit(1) + + +def main(): + generate(sys.argv[1:]) + +if __name__ == '__main__': + main() diff --git a/marconi/openstack/common/excutils.py b/marconi/openstack/common/excutils.py index 500bc1818..c0817428d 100644 --- a/marconi/openstack/common/excutils.py +++ b/marconi/openstack/common/excutils.py @@ -24,7 +24,7 @@ import traceback import six -from marconi.openstack.common.gettextutils import _ +from marconi.openstack.common.gettextutils import _LE class save_and_reraise_exception(object): @@ -59,7 +59,7 @@ class save_and_reraise_exception(object): def __exit__(self, exc_type, exc_val, exc_tb): if exc_type is not None: - logging.error(_('Original exception being dropped: %s'), + logging.error(_LE('Original exception being dropped: %s'), traceback.format_exception(self.type_, self.value, self.tb)) @@ -88,8 +88,8 @@ def forever_retry_uncaught_exceptions(infunc): if (cur_time - last_log_time > 60 or this_exc_message != last_exc_message): logging.exception( - _('Unexpected exception occurred %d time(s)... ' - 'retrying.') % exc_count) + _LE('Unexpected exception occurred %d time(s)... ' + 'retrying.') % exc_count) last_log_time = cur_time last_exc_message = this_exc_message exc_count = 0 diff --git a/marconi/openstack/common/fileutils.py b/marconi/openstack/common/fileutils.py index 715293bed..b7c2285ab 100644 --- a/marconi/openstack/common/fileutils.py +++ b/marconi/openstack/common/fileutils.py @@ -19,7 +19,6 @@ import os import tempfile from marconi.openstack.common import excutils -from marconi.openstack.common.gettextutils import _ from marconi.openstack.common import log as logging LOG = logging.getLogger(__name__) @@ -59,7 +58,7 @@ def read_cached_file(filename, force_reload=False): cache_info = _FILE_CACHE.setdefault(filename, {}) if not cache_info or mtime > cache_info.get('mtime', 0): - LOG.debug(_("Reloading cached file %s") % filename) + LOG.debug("Reloading cached file %s" % filename) with open(filename) as fap: cache_info['data'] = fap.read() cache_info['mtime'] = mtime diff --git a/marconi/openstack/common/gettextutils.py b/marconi/openstack/common/gettextutils.py index f3c0bf5c9..f5fba2e0f 100644 --- a/marconi/openstack/common/gettextutils.py +++ b/marconi/openstack/common/gettextutils.py @@ -28,7 +28,6 @@ import gettext import locale from logging import handlers import os -import re from babel import localedata import six @@ -248,43 +247,15 @@ class Message(six.text_type): if other is None: params = (other,) elif isinstance(other, dict): - params = self._trim_dictionary_parameters(other) + # Merge the dictionaries + params = {} + if isinstance(self.params, dict): + params.update(self._copy_param(self.params)) + params.update(self._copy_param(other)) else: params = self._copy_param(other) return params - def _trim_dictionary_parameters(self, dict_param): - """Return a dict that only has matching entries in the msgid.""" - # NOTE(luisg): Here we trim down the dictionary passed as parameters - # to avoid carrying a lot of unnecessary weight around in the message - # object, for example if someone passes in Message() % locals() but - # only some params are used, and additionally we prevent errors for - # non-deepcopyable objects by unicoding() them. - - # Look for %(param) keys in msgid; - # Skip %% and deal with the case where % is first character on the line - keys = re.findall('(?:[^%]|^)?%\((\w*)\)[a-z]', self.msgid) - - # If we don't find any %(param) keys but have a %s - if not keys and re.findall('(?:[^%]|^)%[a-z]', self.msgid): - # Apparently the full dictionary is the parameter - params = self._copy_param(dict_param) - else: - params = {} - # Save our existing parameters as defaults to protect - # ourselves from losing values if we are called through an - # (erroneous) chain that builds a valid Message with - # arguments, and then does something like "msg % kwds" - # where kwds is an empty dictionary. - src = {} - if isinstance(self.params, dict): - src.update(self.params) - src.update(dict_param) - for key in keys: - params[key] = self._copy_param(src[key]) - - return params - def _copy_param(self, param): try: return copy.deepcopy(param) diff --git a/marconi/openstack/common/jsonutils.py b/marconi/openstack/common/jsonutils.py index cb59417e0..43fe0c6ba 100644 --- a/marconi/openstack/common/jsonutils.py +++ b/marconi/openstack/common/jsonutils.py @@ -36,17 +36,9 @@ import functools import inspect import itertools import json -try: - import xmlrpclib -except ImportError: - # NOTE(jaypipes): xmlrpclib was renamed to xmlrpc.client in Python3 - # however the function and object call signatures - # remained the same. This whole try/except block should - # be removed and replaced with a call to six.moves once - # six 1.4.2 is released. See http://bit.ly/1bqrVzu - import xmlrpc.client as xmlrpclib import six +import six.moves.xmlrpc_client as xmlrpclib from marconi.openstack.common import gettextutils from marconi.openstack.common import importutils diff --git a/marconi/openstack/common/lockutils.py b/marconi/openstack/common/lockutils.py index ee69be400..963dd5705 100644 --- a/marconi/openstack/common/lockutils.py +++ b/marconi/openstack/common/lockutils.py @@ -15,6 +15,7 @@ import contextlib import errno +import fcntl import functools import os import shutil @@ -28,7 +29,7 @@ import weakref from oslo.config import cfg from marconi.openstack.common import fileutils -from marconi.openstack.common.gettextutils import _ +from marconi.openstack.common.gettextutils import _, _LE, _LI from marconi.openstack.common import log as logging @@ -52,7 +53,7 @@ def set_defaults(lock_path): cfg.set_defaults(util_opts, lock_path=lock_path) -class _InterProcessLock(object): +class _FileLock(object): """Lock implementation which allows multiple locks, working around issues like bugs.debian.org/cgi-bin/bugreport.cgi?bug=632857 and does not require any cleanup. Since the lock is always held on a file @@ -79,7 +80,7 @@ class _InterProcessLock(object): if not os.path.exists(basedir): fileutils.ensure_tree(basedir) - LOG.info(_('Created lock path: %s'), basedir) + LOG.info(_LI('Created lock path: %s'), basedir) self.lockfile = open(self.fname, 'w') @@ -90,7 +91,7 @@ class _InterProcessLock(object): # Also upon reading the MSDN docs for locking(), it seems # to have a laughable 10 attempts "blocking" mechanism. self.trylock() - LOG.debug(_('Got file lock "%s"'), self.fname) + LOG.debug('Got file lock "%s"', self.fname) return True except IOError as e: if e.errno in (errno.EACCES, errno.EAGAIN): @@ -114,14 +115,17 @@ class _InterProcessLock(object): try: self.unlock() self.lockfile.close() - LOG.debug(_('Released file lock "%s"'), self.fname) + LOG.debug('Released file lock "%s"', self.fname) except IOError: - LOG.exception(_("Could not release the acquired lock `%s`"), + LOG.exception(_LE("Could not release the acquired lock `%s`"), self.fname) def __exit__(self, exc_type, exc_val, exc_tb): self.release() + def exists(self): + return os.path.exists(self.fname) + def trylock(self): raise NotImplementedError() @@ -129,7 +133,7 @@ class _InterProcessLock(object): raise NotImplementedError() -class _WindowsLock(_InterProcessLock): +class _WindowsLock(_FileLock): def trylock(self): msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_NBLCK, 1) @@ -137,7 +141,7 @@ class _WindowsLock(_InterProcessLock): msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_UNLCK, 1) -class _PosixLock(_InterProcessLock): +class _FcntlLock(_FileLock): def trylock(self): fcntl.lockf(self.lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB) @@ -145,35 +149,106 @@ class _PosixLock(_InterProcessLock): fcntl.lockf(self.lockfile, fcntl.LOCK_UN) +class _PosixLock(object): + def __init__(self, name): + # Hash the name because it's not valid to have POSIX semaphore + # names with things like / in them. Then use base64 to encode + # the digest() instead taking the hexdigest() because the + # result is shorter and most systems can't have shm sempahore + # names longer than 31 characters. + h = hashlib.sha1() + h.update(name.encode('ascii')) + self.name = str((b'/' + base64.urlsafe_b64encode( + h.digest())).decode('ascii')) + + def acquire(self, timeout=None): + self.semaphore = posix_ipc.Semaphore(self.name, + flags=posix_ipc.O_CREAT, + initial_value=1) + self.semaphore.acquire(timeout) + return self + + def __enter__(self): + self.acquire() + return self + + def release(self): + self.semaphore.release() + self.semaphore.close() + + def __exit__(self, exc_type, exc_val, exc_tb): + self.release() + + def exists(self): + try: + semaphore = posix_ipc.Semaphore(self.name) + except posix_ipc.ExistentialError: + return False + else: + semaphore.close() + return True + + if os.name == 'nt': import msvcrt InterProcessLock = _WindowsLock + FileLock = _WindowsLock else: - import fcntl + import base64 + import hashlib + import posix_ipc InterProcessLock = _PosixLock + FileLock = _FcntlLock _semaphores = weakref.WeakValueDictionary() _semaphores_lock = threading.Lock() -def external_lock(name, lock_file_prefix=None): - with internal_lock(name): - LOG.debug(_('Attempting to grab external lock "%(lock)s"'), - {'lock': name}) +def _get_lock_path(name, lock_file_prefix, lock_path=None): + # NOTE(mikal): the lock name cannot contain directory + # separators + name = name.replace(os.sep, '_') + if lock_file_prefix: + sep = '' if lock_file_prefix.endswith('-') else '-' + name = '%s%s%s' % (lock_file_prefix, sep, name) - # NOTE(mikal): the lock name cannot contain directory - # separators - name = name.replace(os.sep, '_') - if lock_file_prefix: - sep = '' if lock_file_prefix.endswith('-') else '-' - name = '%s%s%s' % (lock_file_prefix, sep, name) + local_lock_path = lock_path or CONF.lock_path - if not CONF.lock_path: + if not local_lock_path: + # NOTE(bnemec): Create a fake lock path for posix locks so we don't + # unnecessarily raise the RequiredOptError below. + if InterProcessLock is not _PosixLock: raise cfg.RequiredOptError('lock_path') + local_lock_path = 'posixlock:/' - lock_file_path = os.path.join(CONF.lock_path, name) + return os.path.join(local_lock_path, name) - return InterProcessLock(lock_file_path) + +def external_lock(name, lock_file_prefix=None, lock_path=None): + LOG.debug('Attempting to grab external lock "%(lock)s"', + {'lock': name}) + + lock_file_path = _get_lock_path(name, lock_file_prefix, lock_path) + + # NOTE(bnemec): If an explicit lock_path was passed to us then it + # means the caller is relying on file-based locking behavior, so + # we can't use posix locks for those calls. + if lock_path: + return FileLock(lock_file_path) + return InterProcessLock(lock_file_path) + + +def remove_external_lock_file(name, lock_file_prefix=None): + """Remove a external lock file when it's not used anymore + This will be helpful when we have a lot of lock files + """ + with internal_lock(name): + lock_file_path = _get_lock_path(name, lock_file_prefix) + try: + os.remove(lock_file_path) + except OSError: + LOG.info(_LI('Failed to remove file %(file)s'), + {'file': lock_file_path}) def internal_lock(name): @@ -184,12 +259,12 @@ def internal_lock(name): sem = threading.Semaphore() _semaphores[name] = sem - LOG.debug(_('Got semaphore "%(lock)s"'), {'lock': name}) + LOG.debug('Got semaphore "%(lock)s"', {'lock': name}) return sem @contextlib.contextmanager -def lock(name, lock_file_prefix=None, external=False): +def lock(name, lock_file_prefix=None, external=False, lock_path=None): """Context based lock This function yields a `threading.Semaphore` instance (if we don't use @@ -204,15 +279,17 @@ def lock(name, lock_file_prefix=None, external=False): workers both run a a method decorated with @synchronized('mylock', external=True), only one of them will execute at a time. """ - if external and not CONF.disable_process_locking: - lock = external_lock(name, lock_file_prefix) - else: - lock = internal_lock(name) - with lock: - yield lock + int_lock = internal_lock(name) + with int_lock: + if external and not CONF.disable_process_locking: + ext_lock = external_lock(name, lock_file_prefix, lock_path) + with ext_lock: + yield ext_lock + else: + yield int_lock -def synchronized(name, lock_file_prefix=None, external=False): +def synchronized(name, lock_file_prefix=None, external=False, lock_path=None): """Synchronization decorator. Decorating a method like so:: @@ -240,12 +317,12 @@ def synchronized(name, lock_file_prefix=None, external=False): @functools.wraps(f) def inner(*args, **kwargs): try: - with lock(name, lock_file_prefix, external): - LOG.debug(_('Got semaphore / lock "%(function)s"'), + with lock(name, lock_file_prefix, external, lock_path): + LOG.debug('Got semaphore / lock "%(function)s"', {'function': f.__name__}) return f(*args, **kwargs) finally: - LOG.debug(_('Semaphore / lock released "%(function)s"'), + LOG.debug('Semaphore / lock released "%(function)s"', {'function': f.__name__}) return inner return wrap diff --git a/marconi/openstack/common/log.py b/marconi/openstack/common/log.py index 9e058935b..826a1ab15 100644 --- a/marconi/openstack/common/log.py +++ b/marconi/openstack/common/log.py @@ -15,7 +15,7 @@ # License for the specific language governing permissions and limitations # under the License. -"""Openstack logging handler. +"""OpenStack logging handler. This module adds to logging functionality by adding the option to specify a context object when calling the various log methods. If the context object @@ -357,7 +357,7 @@ class ContextAdapter(BaseLoggerAdapter): extra.update(_dictify_context(context)) instance = kwargs.pop('instance', None) - instance_uuid = (extra.get('instance_uuid', None) or + instance_uuid = (extra.get('instance_uuid') or kwargs.pop('instance_uuid', None)) instance_extra = '' if instance: @@ -654,7 +654,7 @@ class ContextFormatter(logging.Formatter): if key not in record.__dict__: record.__dict__[key] = '' - if record.__dict__.get('request_id', None): + if record.__dict__.get('request_id'): self._fmt = CONF.logging_context_format_string else: self._fmt = CONF.logging_default_format_string diff --git a/openstack-common.conf b/openstack-common.conf index f13751052..727f694ab 100644 --- a/openstack-common.conf +++ b/openstack-common.conf @@ -1,5 +1,6 @@ [DEFAULT] module=cache +module=config.generator module=excutils module=fileutils module=gettextutils diff --git a/requirements.txt b/requirements.txt index de8023135..13abfead2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,6 +7,7 @@ jsonschema>=2.0.0,<3.0.0 iso8601>=0.1.8 msgpack-python MySQL-python +posix_ipc pymongo>=2.4 python-keystoneclient>=0.6.0 python-memcached>=1.48