Sync oslo-cache from oslo-incubator
This patch removes Marconi's local copy of the oslo-incubator cache work. The patch also removes the memcached backend, although the version coming from oslo doesn't have support for it yet. The backend will be pulled in again as soon as it is updated upstream. Change-Id: I487b2178d9657eb7abe020f06436b14531bdb993
This commit is contained in:
parent
b14c7e07b7
commit
48764e6999
13
marconi/common/cache/__init__.py
vendored
13
marconi/common/cache/__init__.py
vendored
@ -1,13 +0,0 @@
|
|||||||
# Copyright 2013 Red Hat, Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
13
marconi/common/cache/_backends/__init__.py
vendored
13
marconi/common/cache/_backends/__init__.py
vendored
@ -1,13 +0,0 @@
|
|||||||
# Copyright 2013 Red Hat, Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
105
marconi/common/cache/_backends/memcached.py
vendored
105
marconi/common/cache/_backends/memcached.py
vendored
@ -1,105 +0,0 @@
|
|||||||
# Copyright 2013 Red Hat, Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
import time
|
|
||||||
|
|
||||||
import memcache
|
|
||||||
from oslo.config import cfg
|
|
||||||
|
|
||||||
from marconi.common.cache import backends
|
|
||||||
|
|
||||||
|
|
||||||
_memcache_opts = [
|
|
||||||
cfg.ListOpt('memcached_servers',
|
|
||||||
default=['127.0.0.1:11211'],
|
|
||||||
help='Memcached servers or None for in process cache.'),
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class MemcachedBackend(backends.BaseCache):
|
|
||||||
|
|
||||||
def __init__(self, conf, group, cache_namespace):
|
|
||||||
conf.register_opts(_memcache_opts, group=group)
|
|
||||||
super(MemcachedBackend, self).__init__(conf, group, cache_namespace)
|
|
||||||
self._client = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def _cache(self):
|
|
||||||
if not self._client:
|
|
||||||
self._client = memcache.Client(self.conf.memcached_servers)
|
|
||||||
return self._client
|
|
||||||
|
|
||||||
def _get_ttl(self, ttl):
|
|
||||||
"""Correct ttl for memcached."""
|
|
||||||
|
|
||||||
if ttl > 2592000:
|
|
||||||
# NOTE(flaper87): If ttl is bigger than 30 days,
|
|
||||||
# it needs to be translated to timestamp.
|
|
||||||
#
|
|
||||||
# See http://code.google.com/p/memcached/wiki/FAQ
|
|
||||||
# "You can set expire times up to 30 days in the
|
|
||||||
# future. After that memcached interprets it as a
|
|
||||||
# date, and will expire the item after said date.
|
|
||||||
# This is a simple (but obscure) mechanic."
|
|
||||||
return ttl + int(time.time())
|
|
||||||
return ttl
|
|
||||||
|
|
||||||
def set(self, key, value, ttl=0):
|
|
||||||
key = self._prepare_key(key)
|
|
||||||
return self._cache.set(key, value, self._get_ttl(ttl))
|
|
||||||
|
|
||||||
def unset(self, key):
|
|
||||||
self._cache.delete(self._prepare_key(key))
|
|
||||||
|
|
||||||
def get(self, key, default=None):
|
|
||||||
key = self._prepare_key(key)
|
|
||||||
value = self._cache.get(key)
|
|
||||||
return value is None and default or value
|
|
||||||
|
|
||||||
def get_many(self, keys, default=None):
|
|
||||||
new_keys = map(self._prepare_key, keys)
|
|
||||||
ret = self._cache.get_multi(new_keys)
|
|
||||||
|
|
||||||
m = dict(zip(new_keys, keys))
|
|
||||||
for cache_key, key in m.items():
|
|
||||||
yield (key, ret.get(cache_key, default))
|
|
||||||
|
|
||||||
def set_many(self, data, ttl=0):
|
|
||||||
safe_data = {}
|
|
||||||
for key, value in data.items():
|
|
||||||
key = self._prepare_key(key)
|
|
||||||
safe_data[key] = value
|
|
||||||
self._cache.set_multi(safe_data, self._get_ttl(ttl))
|
|
||||||
|
|
||||||
def unset_many(self, keys, version=None):
|
|
||||||
self._cache.delete_multi(map(self._prepare_key, keys))
|
|
||||||
|
|
||||||
def incr(self, key, delta=1):
|
|
||||||
key = self._prepare_key(key)
|
|
||||||
|
|
||||||
try:
|
|
||||||
if delta < 0:
|
|
||||||
#NOTE(flaper87): memcached doesn't support a negative delta
|
|
||||||
return self._cache.decr(key, -delta)
|
|
||||||
|
|
||||||
return self._cache.incr(key, delta)
|
|
||||||
except ValueError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def add(self, key, value, ttl=0):
|
|
||||||
key = self._prepare_key(key)
|
|
||||||
return self._cache.add(key, value, self._get_ttl(ttl))
|
|
||||||
|
|
||||||
def flush(self):
|
|
||||||
self._cache.flush_all()
|
|
87
marconi/common/cache/_backends/memory.py
vendored
87
marconi/common/cache/_backends/memory.py
vendored
@ -1,87 +0,0 @@
|
|||||||
# Copyright 2013 Red Hat, Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
|
|
||||||
from marconi.common.cache import backends
|
|
||||||
from marconi.openstack.common import lockutils
|
|
||||||
from marconi.openstack.common import timeutils
|
|
||||||
|
|
||||||
|
|
||||||
class MemoryBackend(backends.BaseCache):
|
|
||||||
|
|
||||||
def __init__(self, conf, group, cache_namespace):
|
|
||||||
super(MemoryBackend, self).__init__(conf, group, cache_namespace)
|
|
||||||
self._cache = {}
|
|
||||||
self._keys_expires = {}
|
|
||||||
|
|
||||||
def set(self, key, value, ttl=0):
|
|
||||||
key = self._prepare_key(key)
|
|
||||||
with lockutils.lock(key):
|
|
||||||
expires_at = 0
|
|
||||||
if ttl != 0:
|
|
||||||
expires_at = timeutils.utcnow_ts() + ttl
|
|
||||||
|
|
||||||
self._cache[key] = (expires_at, value)
|
|
||||||
|
|
||||||
if expires_at:
|
|
||||||
self._keys_expires.setdefault(expires_at, set()).add(key)
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def get(self, key, default=None):
|
|
||||||
key = self._prepare_key(key)
|
|
||||||
with lockutils.lock(key):
|
|
||||||
now = timeutils.utcnow_ts()
|
|
||||||
|
|
||||||
try:
|
|
||||||
timeout, value = self._cache[key]
|
|
||||||
|
|
||||||
if timeout and now >= timeout:
|
|
||||||
del self._cache[key]
|
|
||||||
return default
|
|
||||||
|
|
||||||
return value
|
|
||||||
except KeyError:
|
|
||||||
return default
|
|
||||||
|
|
||||||
def _purge_expired(self):
|
|
||||||
"""Removes expired keys from the cache."""
|
|
||||||
|
|
||||||
now = timeutils.utcnow_ts()
|
|
||||||
for timeout in sorted(self._keys_expires.keys()):
|
|
||||||
|
|
||||||
# NOTE(flaper87): If timeout is greater
|
|
||||||
# than `now`, stop the iteration, remaining
|
|
||||||
# keys have not expired.
|
|
||||||
if now < timeout:
|
|
||||||
break
|
|
||||||
|
|
||||||
# NOTE(flaper87): Unset every key in
|
|
||||||
# this set from the cache if its timeout
|
|
||||||
# is equal to `timeout`. (They key might
|
|
||||||
# have been updated)
|
|
||||||
for subkey in self._keys_expires.pop(timeout):
|
|
||||||
if self._cache[subkey][0] == timeout:
|
|
||||||
del self._cache[subkey]
|
|
||||||
|
|
||||||
def unset(self, key):
|
|
||||||
self._purge_expired()
|
|
||||||
|
|
||||||
# NOTE(flaper87): Delete the key. Using pop
|
|
||||||
# since it could have been deleted already
|
|
||||||
self._cache.pop(self._prepare_key(key), None)
|
|
||||||
|
|
||||||
def flush(self):
|
|
||||||
self._cache = {}
|
|
||||||
self._keys_expires = {}
|
|
188
marconi/common/cache/backends.py
vendored
188
marconi/common/cache/backends.py
vendored
@ -1,188 +0,0 @@
|
|||||||
# Copyright 2013 Red Hat, Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
import abc
|
|
||||||
import six
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class BaseCache(object):
|
|
||||||
|
|
||||||
def __init__(self, conf, group, cache_namespace):
|
|
||||||
self.conf = conf[group]
|
|
||||||
self._cache_namespace = cache_namespace
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def set(self, key, value, ttl=0):
|
|
||||||
"""Sets or updates a cache entry
|
|
||||||
|
|
||||||
:params key: Item key as string.
|
|
||||||
:params value: Value to assign to the key. This
|
|
||||||
can be anything that is handled
|
|
||||||
by the current backend.
|
|
||||||
:params ttl: Key's timeout in seconds.
|
|
||||||
|
|
||||||
:returns: True if the operation succeeds.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def get(self, key, default=None):
|
|
||||||
"""Gets one item from the cache
|
|
||||||
|
|
||||||
:params key: Key for the item to retrieve
|
|
||||||
from the cache.
|
|
||||||
:params default: The default value to return.
|
|
||||||
|
|
||||||
:returns: `key`'s value in the cache if it exists,
|
|
||||||
otherwise `default` should be returned.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def unset(self, key):
|
|
||||||
"""Removes an item from cache.
|
|
||||||
|
|
||||||
:params key: The key to remove.
|
|
||||||
|
|
||||||
:returns: The key value if there's one
|
|
||||||
"""
|
|
||||||
|
|
||||||
def _prepare_key(self, key):
|
|
||||||
"""Prepares the key
|
|
||||||
|
|
||||||
This method concatenates the cache_namespace
|
|
||||||
and the key so it can be used in the cache.
|
|
||||||
|
|
||||||
NOTE: All cache backends have to call it
|
|
||||||
explicitly where needed.
|
|
||||||
|
|
||||||
:param key: The key to be prefixed
|
|
||||||
"""
|
|
||||||
prepared = key
|
|
||||||
if self._cache_namespace:
|
|
||||||
prepared = ("%(prefix)s-%(key)s" %
|
|
||||||
{'prefix': self._cache_namespace, 'key': key})
|
|
||||||
|
|
||||||
# NOTE(cpp-cabrera): some caching backends (memcache) enforce
|
|
||||||
# that the key type must be bytes. This is here to ensure that
|
|
||||||
# this precondition is respected and that users can continue
|
|
||||||
# to use the caching layer transparently.
|
|
||||||
if isinstance(prepared, six.text_type):
|
|
||||||
prepared = prepared.encode('utf8')
|
|
||||||
|
|
||||||
return prepared
|
|
||||||
|
|
||||||
def add(self, key, value, ttl=0):
|
|
||||||
"""Sets the value for a key if it doesn't exist
|
|
||||||
|
|
||||||
:params key: Key to create as string.
|
|
||||||
:params value: Value to assign to the key. This
|
|
||||||
can be anything that is handled
|
|
||||||
by current backend.
|
|
||||||
:params ttl: Key's timeout in seconds.
|
|
||||||
|
|
||||||
:returns: False if the key exists, otherwise,
|
|
||||||
`set`'s result will be returned.
|
|
||||||
"""
|
|
||||||
|
|
||||||
if self.get(key) is not None:
|
|
||||||
return False
|
|
||||||
return self.set(key, value, ttl)
|
|
||||||
|
|
||||||
def get_many(self, keys, default=None):
|
|
||||||
"""Gets key's value from cache
|
|
||||||
|
|
||||||
:params keys: List of keys to retrieve.
|
|
||||||
:params default: The default value to return
|
|
||||||
for each key that is not in
|
|
||||||
the cache.
|
|
||||||
|
|
||||||
:returns: A generator of (key, value)
|
|
||||||
"""
|
|
||||||
for k in keys:
|
|
||||||
val = self.get(k, default=default)
|
|
||||||
if val is not None:
|
|
||||||
yield (k, val)
|
|
||||||
|
|
||||||
def has_key(self, key):
|
|
||||||
"""Verifies that a key exists.
|
|
||||||
|
|
||||||
:params key: The key to verify.
|
|
||||||
|
|
||||||
:returns: True if the key exists, otherwise
|
|
||||||
False.
|
|
||||||
"""
|
|
||||||
return self.get(key) is not None
|
|
||||||
|
|
||||||
def set_many(self, data, ttl=0):
|
|
||||||
"""Puts several items into the cache at once
|
|
||||||
|
|
||||||
Depending on the backend, this operation may or may
|
|
||||||
not be efficient. The default implementation calls
|
|
||||||
set for each (key, value) pair passed, other backends
|
|
||||||
support set_many operations as part of their protocols.
|
|
||||||
|
|
||||||
:params data: A dictionary like {key: val} to store
|
|
||||||
in the cache.
|
|
||||||
:params ttl: Key's timeout in seconds.
|
|
||||||
"""
|
|
||||||
for key, value in data.items():
|
|
||||||
self.set(key, value, ttl=ttl)
|
|
||||||
|
|
||||||
def unset_many(self, keys):
|
|
||||||
"""Removes several keys from the cache at once
|
|
||||||
|
|
||||||
:params keys: List of keys to retrieve.
|
|
||||||
"""
|
|
||||||
for key in keys:
|
|
||||||
self.unset(key)
|
|
||||||
|
|
||||||
def incr(self, key, delta=1):
|
|
||||||
"""Increments the value for a key
|
|
||||||
|
|
||||||
NOTE: This method is not synchronized because
|
|
||||||
get and set are.
|
|
||||||
|
|
||||||
:params key: The key for the value to be incremented
|
|
||||||
:params delta: Number of units by which to increment
|
|
||||||
the value. Pass a negative number to
|
|
||||||
decrement the value.
|
|
||||||
|
|
||||||
:returns: The new value
|
|
||||||
"""
|
|
||||||
value = self.get(key)
|
|
||||||
if value is None:
|
|
||||||
return None
|
|
||||||
new_value = value + delta
|
|
||||||
self.set(key, new_value)
|
|
||||||
return new_value
|
|
||||||
|
|
||||||
def append(self, key, tail):
|
|
||||||
"""Appends `value` to `key`'s value.
|
|
||||||
|
|
||||||
:params key: The key of the value to which
|
|
||||||
`tail` should be appended.
|
|
||||||
:params tail: The value to append to the
|
|
||||||
original.
|
|
||||||
|
|
||||||
:returns: The new value
|
|
||||||
"""
|
|
||||||
value = self.get(key)
|
|
||||||
if value is None:
|
|
||||||
return None
|
|
||||||
new_value = value + tail
|
|
||||||
self.set(key, new_value)
|
|
||||||
return new_value
|
|
||||||
|
|
||||||
def flush(self):
|
|
||||||
"""Flushes all items from the cache."""
|
|
61
marconi/common/cache/cache.py
vendored
61
marconi/common/cache/cache.py
vendored
@ -1,61 +0,0 @@
|
|||||||
# Copyright 2013 Red Hat, Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""Cache library.
|
|
||||||
|
|
||||||
Supported configuration options:
|
|
||||||
|
|
||||||
`cache_backend`: Name of the cache backend to use.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from oslo.config import cfg
|
|
||||||
from stevedore import driver
|
|
||||||
|
|
||||||
|
|
||||||
_cache_options = [
|
|
||||||
cfg.StrOpt('cache_backend',
|
|
||||||
default='memory',
|
|
||||||
help='The cache driver to use, default value is `memory`.'),
|
|
||||||
cfg.StrOpt('cache_prefix',
|
|
||||||
default=None,
|
|
||||||
help='Prefix to use in every cache key'),
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def get_cache(conf):
|
|
||||||
"""Loads the cache backend
|
|
||||||
|
|
||||||
This function loads the cache backend
|
|
||||||
specified in the given configuration.
|
|
||||||
|
|
||||||
:param conf: Configuration instance to use
|
|
||||||
"""
|
|
||||||
|
|
||||||
# NOTE(flaper87): oslo.config checks if options
|
|
||||||
# exist before registering them. The code bellow
|
|
||||||
# should be safe.
|
|
||||||
cache_group = cfg.OptGroup(name='oslo_cache',
|
|
||||||
title='Cache options')
|
|
||||||
|
|
||||||
conf.register_group(cache_group)
|
|
||||||
conf.register_opts(_cache_options, group=cache_group)
|
|
||||||
|
|
||||||
kwargs = dict(cache_namespace=conf.oslo_cache.cache_prefix)
|
|
||||||
|
|
||||||
backend = conf.oslo_cache.cache_backend
|
|
||||||
mgr = driver.DriverManager('marconi.common.cache.backends', backend,
|
|
||||||
invoke_on_load=True,
|
|
||||||
invoke_args=[conf, cache_group.name],
|
|
||||||
invoke_kwds=kwargs)
|
|
||||||
return mgr.driver
|
|
0
marconi/openstack/common/cache/__init__.py
vendored
Normal file
0
marconi/openstack/common/cache/__init__.py
vendored
Normal file
0
marconi/openstack/common/cache/_backends/__init__.py
vendored
Normal file
0
marconi/openstack/common/cache/_backends/__init__.py
vendored
Normal file
165
marconi/openstack/common/cache/_backends/memory.py
vendored
Normal file
165
marconi/openstack/common/cache/_backends/memory.py
vendored
Normal file
@ -0,0 +1,165 @@
|
|||||||
|
# Copyright 2013 Red Hat, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
import collections
|
||||||
|
|
||||||
|
from marconi.openstack.common.cache import backends
|
||||||
|
from marconi.openstack.common import lockutils
|
||||||
|
from marconi.openstack.common import timeutils
|
||||||
|
|
||||||
|
|
||||||
|
class MemoryBackend(backends.BaseCache):
|
||||||
|
|
||||||
|
def __init__(self, parsed_url, options=None):
|
||||||
|
super(MemoryBackend, self).__init__(parsed_url, options)
|
||||||
|
self._clear()
|
||||||
|
|
||||||
|
def _set_unlocked(self, key, value, ttl=0):
|
||||||
|
expires_at = 0
|
||||||
|
if ttl != 0:
|
||||||
|
expires_at = timeutils.utcnow_ts() + ttl
|
||||||
|
|
||||||
|
self._cache[key] = (expires_at, value)
|
||||||
|
|
||||||
|
if expires_at:
|
||||||
|
self._keys_expires[expires_at].add(key)
|
||||||
|
|
||||||
|
def _set(self, key, value, ttl=0, not_exists=False):
|
||||||
|
with lockutils.lock(key):
|
||||||
|
|
||||||
|
# NOTE(flaper87): This is needed just in `set`
|
||||||
|
# calls, hence it's not in `_set_unlocked`
|
||||||
|
if not_exists and self._exists_unlocked(key):
|
||||||
|
return False
|
||||||
|
|
||||||
|
self._set_unlocked(key, value, ttl)
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _get_unlocked(self, key, default=None):
|
||||||
|
now = timeutils.utcnow_ts()
|
||||||
|
|
||||||
|
try:
|
||||||
|
timeout, value = self._cache[key]
|
||||||
|
except KeyError:
|
||||||
|
return (0, default)
|
||||||
|
|
||||||
|
if timeout and now >= timeout:
|
||||||
|
|
||||||
|
# NOTE(flaper87): Record expired,
|
||||||
|
# remove it from the cache but catch
|
||||||
|
# KeyError and ValueError in case
|
||||||
|
# _purge_expired removed this key already.
|
||||||
|
try:
|
||||||
|
del self._cache[key]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
# NOTE(flaper87): Keys with ttl == 0
|
||||||
|
# don't exist in the _keys_expires dict
|
||||||
|
self._keys_expires[timeout].remove(key)
|
||||||
|
except (KeyError, ValueError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
return (0, default)
|
||||||
|
|
||||||
|
return (timeout, value)
|
||||||
|
|
||||||
|
def _get(self, key, default=None):
|
||||||
|
with lockutils.lock(key):
|
||||||
|
return self._get_unlocked(key, default)[1]
|
||||||
|
|
||||||
|
def _exists_unlocked(self, key):
|
||||||
|
now = timeutils.utcnow_ts()
|
||||||
|
try:
|
||||||
|
timeout = self._cache[key][0]
|
||||||
|
return not timeout or now <= timeout
|
||||||
|
except KeyError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __contains__(self, key):
|
||||||
|
with lockutils.lock(key):
|
||||||
|
return self._exists_unlocked(key)
|
||||||
|
|
||||||
|
def _incr_append(self, key, other):
|
||||||
|
with lockutils.lock(key):
|
||||||
|
timeout, value = self._get_unlocked(key)
|
||||||
|
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
ttl = timeutils.utcnow_ts() - timeout
|
||||||
|
new_value = value + other
|
||||||
|
self._set_unlocked(key, new_value, ttl)
|
||||||
|
return new_value
|
||||||
|
|
||||||
|
def _incr(self, key, delta):
|
||||||
|
if not isinstance(delta, int):
|
||||||
|
raise TypeError('delta must be an int instance')
|
||||||
|
|
||||||
|
return self._incr_append(key, delta)
|
||||||
|
|
||||||
|
def _append_tail(self, key, tail):
|
||||||
|
return self._incr_append(key, tail)
|
||||||
|
|
||||||
|
def _purge_expired(self):
|
||||||
|
"""Removes expired keys from the cache."""
|
||||||
|
|
||||||
|
now = timeutils.utcnow_ts()
|
||||||
|
for timeout in sorted(self._keys_expires.keys()):
|
||||||
|
|
||||||
|
# NOTE(flaper87): If timeout is greater
|
||||||
|
# than `now`, stop the iteration, remaining
|
||||||
|
# keys have not expired.
|
||||||
|
if now < timeout:
|
||||||
|
break
|
||||||
|
|
||||||
|
# NOTE(flaper87): Unset every key in
|
||||||
|
# this set from the cache if its timeout
|
||||||
|
# is equal to `timeout`. (The key might
|
||||||
|
# have been updated)
|
||||||
|
for subkey in self._keys_expires.pop(timeout):
|
||||||
|
try:
|
||||||
|
if self._cache[subkey][0] == timeout:
|
||||||
|
del self._cache[subkey]
|
||||||
|
except KeyError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
def __delitem__(self, key):
|
||||||
|
self._purge_expired()
|
||||||
|
|
||||||
|
# NOTE(flaper87): Delete the key. Using pop
|
||||||
|
# since it could have been deleted already
|
||||||
|
value = self._cache.pop(key, None)
|
||||||
|
|
||||||
|
if value:
|
||||||
|
try:
|
||||||
|
# NOTE(flaper87): Keys with ttl == 0
|
||||||
|
# don't exist in the _keys_expires dict
|
||||||
|
self._keys_expires[value[0]].remove(value[1])
|
||||||
|
except (KeyError, ValueError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _clear(self):
|
||||||
|
self._cache = {}
|
||||||
|
self._keys_expires = collections.defaultdict(set)
|
||||||
|
|
||||||
|
def _get_many(self, keys, default):
|
||||||
|
return super(MemoryBackend, self)._get_many(keys, default)
|
||||||
|
|
||||||
|
def _set_many(self, data, ttl=0):
|
||||||
|
return super(MemoryBackend, self)._set_many(data, ttl)
|
||||||
|
|
||||||
|
def _unset_many(self, keys):
|
||||||
|
return super(MemoryBackend, self)._unset_many(keys)
|
263
marconi/openstack/common/cache/backends.py
vendored
Normal file
263
marconi/openstack/common/cache/backends.py
vendored
Normal file
@ -0,0 +1,263 @@
|
|||||||
|
# Copyright 2013 Red Hat, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
import abc
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
|
||||||
|
NOTSET = object()
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class BaseCache(object):
|
||||||
|
"""Base Cache Abstraction
|
||||||
|
|
||||||
|
:params parsed_url: Parsed url object.
|
||||||
|
:params options: A dictionary with configuration parameters
|
||||||
|
for the cache. For example:
|
||||||
|
- default_ttl: An integer defining the default ttl
|
||||||
|
for keys.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, parsed_url, options=None):
|
||||||
|
self._parsed_url = parsed_url
|
||||||
|
self._options = options or {}
|
||||||
|
self._default_ttl = int(self._options.get('default_ttl', 0))
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _set(self, key, value, ttl, not_exists=False):
|
||||||
|
"""Implementations of this class have to override this method."""
|
||||||
|
|
||||||
|
def set(self, key, value, ttl, not_exists=False):
|
||||||
|
"""Sets or updates a cache entry
|
||||||
|
|
||||||
|
NOTE: Thread-safety is required and has to be
|
||||||
|
guaranteed by the backend implementation.
|
||||||
|
|
||||||
|
:params key: Item key as string.
|
||||||
|
:type key: `unicode string`
|
||||||
|
:params value: Value to assign to the key. This
|
||||||
|
can be anything that is handled
|
||||||
|
by the current backend.
|
||||||
|
:params ttl: Key's timeout in seconds. 0 means
|
||||||
|
no timeout.
|
||||||
|
:type ttl: int
|
||||||
|
:params not_exists: If True, the key will be set
|
||||||
|
if it doesn't exist. Otherwise,
|
||||||
|
it'll always be set.
|
||||||
|
:type not_exists: bool
|
||||||
|
|
||||||
|
:returns: True if the operation succeeds, False otherwise.
|
||||||
|
"""
|
||||||
|
if ttl is None:
|
||||||
|
ttl = self._default_ttl
|
||||||
|
|
||||||
|
return self._set(key, value, ttl, not_exists)
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
self.set(key, value, self._default_ttl)
|
||||||
|
|
||||||
|
def setdefault(self, key, value):
|
||||||
|
"""Sets the key value to `value` if it doesn't exist
|
||||||
|
|
||||||
|
:params key: Item key as string.
|
||||||
|
:type key: `unicode string`
|
||||||
|
:params value: Value to assign to the key. This
|
||||||
|
can be anything that is handled
|
||||||
|
by the current backend.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return self[key]
|
||||||
|
except KeyError:
|
||||||
|
self[key] = value
|
||||||
|
return value
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get(self, key, default):
|
||||||
|
"""Implementations of this class have to override this method."""
|
||||||
|
|
||||||
|
def get(self, key, default=None):
|
||||||
|
"""Gets one item from the cache
|
||||||
|
|
||||||
|
NOTE: Thread-safety is required and it has to be
|
||||||
|
guaranteed by the backend implementation.
|
||||||
|
|
||||||
|
:params key: Key for the item to retrieve
|
||||||
|
from the cache.
|
||||||
|
:params default: The default value to return.
|
||||||
|
|
||||||
|
:returns: `key`'s value in the cache if it exists,
|
||||||
|
otherwise `default` should be returned.
|
||||||
|
"""
|
||||||
|
return self._get(key, default)
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
value = self.get(key, NOTSET)
|
||||||
|
|
||||||
|
if value is NOTSET:
|
||||||
|
raise KeyError
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def __delitem__(self, key):
|
||||||
|
"""Removes an item from cache.
|
||||||
|
|
||||||
|
NOTE: Thread-safety is required and it has to be
|
||||||
|
guaranteed by the backend implementation.
|
||||||
|
|
||||||
|
:params key: The key to remove.
|
||||||
|
|
||||||
|
:returns: The key value if there's one
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _clear(self):
|
||||||
|
"""Implementations of this class have to override this method."""
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
"""Removes all items from the cache.
|
||||||
|
|
||||||
|
NOTE: Thread-safety is required and it has to be
|
||||||
|
guaranteed by the backend implementation.
|
||||||
|
"""
|
||||||
|
return self._clear()
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _incr(self, key, delta):
|
||||||
|
"""Implementations of this class have to override this method."""
|
||||||
|
|
||||||
|
def incr(self, key, delta=1):
|
||||||
|
"""Increments the value for a key
|
||||||
|
|
||||||
|
:params key: The key for the value to be incremented
|
||||||
|
:params delta: Number of units by which to increment
|
||||||
|
the value. Pass a negative number to
|
||||||
|
decrement the value.
|
||||||
|
|
||||||
|
:returns: The new value
|
||||||
|
"""
|
||||||
|
return self._incr(key, delta)
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _append_tail(self, key, tail):
|
||||||
|
"""Implementations of this class have to override this method."""
|
||||||
|
|
||||||
|
def append_tail(self, key, tail):
|
||||||
|
"""Appends `tail` to `key`'s value.
|
||||||
|
|
||||||
|
:params key: The key of the value to which
|
||||||
|
`tail` should be appended.
|
||||||
|
:params tail: The list of values to append to the
|
||||||
|
original.
|
||||||
|
|
||||||
|
:returns: The new value
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not hasattr(tail, "__iter__"):
|
||||||
|
raise TypeError('Tail must be an iterable')
|
||||||
|
|
||||||
|
if not isinstance(tail, list):
|
||||||
|
# NOTE(flaper87): Make sure we pass a list
|
||||||
|
# down to the implementation. Not all drivers
|
||||||
|
# have support for generators, sets or other
|
||||||
|
# iterables.
|
||||||
|
tail = list(tail)
|
||||||
|
|
||||||
|
return self._append_tail(key, tail)
|
||||||
|
|
||||||
|
def append(self, key, value):
|
||||||
|
"""Appends `value` to `key`'s value.
|
||||||
|
|
||||||
|
:params key: The key of the value to which
|
||||||
|
`tail` should be appended.
|
||||||
|
:params value: The value to append to the
|
||||||
|
original.
|
||||||
|
|
||||||
|
:returns: The new value
|
||||||
|
"""
|
||||||
|
return self.append_tail(key, [value])
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def __contains__(self, key):
|
||||||
|
"""Verifies that a key exists.
|
||||||
|
|
||||||
|
:params key: The key to verify.
|
||||||
|
|
||||||
|
:returns: True if the key exists,
|
||||||
|
otherwise False.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_many(self, keys, default):
|
||||||
|
"""Implementations of this class have to override this method."""
|
||||||
|
return ((k, self.get(k, default=default)) for k in keys)
|
||||||
|
|
||||||
|
def get_many(self, keys, default=NOTSET):
|
||||||
|
"""Gets keys' value from cache
|
||||||
|
|
||||||
|
:params keys: List of keys to retrieve.
|
||||||
|
:params default: The default value to return
|
||||||
|
for each key that is not in
|
||||||
|
the cache.
|
||||||
|
|
||||||
|
:returns: A generator of (key, value)
|
||||||
|
"""
|
||||||
|
return self._get_many(keys, default)
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _set_many(self, data, ttl):
|
||||||
|
"""Implementations of this class have to override this method."""
|
||||||
|
|
||||||
|
for key, value in data.items():
|
||||||
|
self.set(key, value, ttl=ttl)
|
||||||
|
|
||||||
|
def set_many(self, data, ttl=None):
|
||||||
|
"""Puts several items into the cache at once
|
||||||
|
|
||||||
|
Depending on the backend, this operation may or may
|
||||||
|
not be efficient. The default implementation calls
|
||||||
|
set for each (key, value) pair passed, other backends
|
||||||
|
support set_many operations as part of their protocols.
|
||||||
|
|
||||||
|
:params data: A dictionary like {key: val} to store
|
||||||
|
in the cache.
|
||||||
|
:params ttl: Key's timeout in seconds.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if ttl is None:
|
||||||
|
ttl = self._default_ttl
|
||||||
|
|
||||||
|
self._set_many(data, ttl)
|
||||||
|
|
||||||
|
def update(self, **kwargs):
|
||||||
|
"""Sets several (key, value) paris.
|
||||||
|
|
||||||
|
Refer to the `set_many` docstring.
|
||||||
|
"""
|
||||||
|
self.set_many(kwargs, ttl=self._default_ttl)
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _unset_many(self, keys):
|
||||||
|
"""Implementations of this class have to override this method."""
|
||||||
|
for key in keys:
|
||||||
|
del self[key]
|
||||||
|
|
||||||
|
def unset_many(self, keys):
|
||||||
|
"""Removes several keys from the cache at once
|
||||||
|
|
||||||
|
:params keys: List of keys to unset.
|
||||||
|
"""
|
||||||
|
self._unset_many(keys)
|
79
marconi/openstack/common/cache/cache.py
vendored
Normal file
79
marconi/openstack/common/cache/cache.py
vendored
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
# Copyright 2013 Red Hat, Inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""Cache library.
|
||||||
|
|
||||||
|
Supported configuration options:
|
||||||
|
|
||||||
|
`default_backend`: Name of the cache backend to use.
|
||||||
|
`key_namespace`: Namespace under which keys will be created.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from stevedore import driver
|
||||||
|
|
||||||
|
from marconi.openstack.common.py3kcompat import urlutils
|
||||||
|
|
||||||
|
|
||||||
|
def _get_olso_configs():
|
||||||
|
"""Returns the oslo.config options to register."""
|
||||||
|
# NOTE(flaper87): Oslo config should be
|
||||||
|
# optional. Instead of doing try / except
|
||||||
|
# at the top of this file, lets import cfg
|
||||||
|
# here and assume that the caller of this
|
||||||
|
# function already took care of this dependency.
|
||||||
|
from oslo.config import cfg
|
||||||
|
|
||||||
|
return [
|
||||||
|
cfg.StrOpt('cache_url', default='memory://',
|
||||||
|
help='Url to connect to the cache backend.')
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def register_oslo_configs(conf):
|
||||||
|
"""Registers a cache configuration options
|
||||||
|
|
||||||
|
:params conf: Config object.
|
||||||
|
:type conf: `cfg.ConfigOptions`
|
||||||
|
"""
|
||||||
|
conf.register_opts(_get_olso_configs())
|
||||||
|
|
||||||
|
|
||||||
|
def get_cache(url='memory://'):
|
||||||
|
"""Loads the cache backend
|
||||||
|
|
||||||
|
This function loads the cache backend
|
||||||
|
specified in the given configuration.
|
||||||
|
|
||||||
|
:param conf: Configuration instance to use
|
||||||
|
"""
|
||||||
|
|
||||||
|
parsed = urlutils.urlparse(url)
|
||||||
|
backend = parsed.scheme
|
||||||
|
|
||||||
|
query = parsed.query
|
||||||
|
# NOTE(flaper87): We need the following hack
|
||||||
|
# for python versions < 2.7.5. Previous versions
|
||||||
|
# of python parsed query params just for 'known'
|
||||||
|
# schemes. This was changed in this patch:
|
||||||
|
# http://hg.python.org/cpython/rev/79e6ff3d9afd
|
||||||
|
if not query and '?' in parsed.path:
|
||||||
|
query = parsed.path.split('?', 1)[-1]
|
||||||
|
parameters = urlutils.parse_qsl(query)
|
||||||
|
kwargs = {'options': dict(parameters)}
|
||||||
|
|
||||||
|
mgr = driver.DriverManager('marconi.openstack.common.cache.backends', backend,
|
||||||
|
invoke_on_load=True,
|
||||||
|
invoke_args=[parsed],
|
||||||
|
invoke_kwds=kwargs)
|
||||||
|
return mgr.driver
|
@ -1,5 +1,3 @@
|
|||||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
|
||||||
|
|
||||||
# Copyright 2011 OpenStack Foundation.
|
# Copyright 2011 OpenStack Foundation.
|
||||||
# Copyright 2012, Red Hat, Inc.
|
# Copyright 2012, Red Hat, Inc.
|
||||||
#
|
#
|
||||||
@ -26,7 +24,7 @@ import traceback
|
|||||||
|
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from marconi.openstack.common.gettextutils import _ # noqa
|
from marconi.openstack.common.gettextutils import _
|
||||||
|
|
||||||
|
|
||||||
class save_and_reraise_exception(object):
|
class save_and_reraise_exception(object):
|
||||||
@ -44,13 +42,13 @@ class save_and_reraise_exception(object):
|
|||||||
|
|
||||||
In some cases the caller may not want to re-raise the exception, and
|
In some cases the caller may not want to re-raise the exception, and
|
||||||
for those circumstances this context provides a reraise flag that
|
for those circumstances this context provides a reraise flag that
|
||||||
can be used to suppress the exception. For example:
|
can be used to suppress the exception. For example::
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
with save_and_reraise_exception() as ctxt:
|
with save_and_reraise_exception() as ctxt:
|
||||||
decide_if_need_reraise()
|
decide_if_need_reraise()
|
||||||
if not should_be_reraised:
|
if not should_be_reraised:
|
||||||
ctxt.reraise = False
|
ctxt.reraise = False
|
||||||
"""
|
"""
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.reraise = True
|
self.reraise = True
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
|
||||||
|
|
||||||
# Copyright 2011 OpenStack Foundation.
|
# Copyright 2011 OpenStack Foundation.
|
||||||
# All Rights Reserved.
|
# All Rights Reserved.
|
||||||
#
|
#
|
||||||
@ -15,13 +13,13 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
|
|
||||||
import contextlib
|
import contextlib
|
||||||
import errno
|
import errno
|
||||||
import os
|
import os
|
||||||
|
import tempfile
|
||||||
|
|
||||||
from marconi.openstack.common import excutils
|
from marconi.openstack.common import excutils
|
||||||
from marconi.openstack.common.gettextutils import _ # noqa
|
from marconi.openstack.common.gettextutils import _
|
||||||
from marconi.openstack.common import log as logging
|
from marconi.openstack.common import log as logging
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
@ -109,3 +107,30 @@ def file_open(*args, **kwargs):
|
|||||||
state at all (for unit tests)
|
state at all (for unit tests)
|
||||||
"""
|
"""
|
||||||
return file(*args, **kwargs)
|
return file(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def write_to_tempfile(content, path=None, suffix='', prefix='tmp'):
|
||||||
|
"""Create temporary file or use existing file.
|
||||||
|
|
||||||
|
This util is needed for creating temporary file with
|
||||||
|
specified content, suffix and prefix. If path is not None,
|
||||||
|
it will be used for writing content. If the path doesn't
|
||||||
|
exist it'll be created.
|
||||||
|
|
||||||
|
:param content: content for temporary file.
|
||||||
|
:param path: same as parameter 'dir' for mkstemp
|
||||||
|
:param suffix: same as parameter 'suffix' for mkstemp
|
||||||
|
:param prefix: same as parameter 'prefix' for mkstemp
|
||||||
|
|
||||||
|
For example: it can be used in database tests for creating
|
||||||
|
configuration files.
|
||||||
|
"""
|
||||||
|
if path:
|
||||||
|
ensure_tree(path)
|
||||||
|
|
||||||
|
(fd, path) = tempfile.mkstemp(suffix=suffix, dir=path, prefix=prefix)
|
||||||
|
try:
|
||||||
|
os.write(fd, content)
|
||||||
|
finally:
|
||||||
|
os.close(fd)
|
||||||
|
return path
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
|
||||||
|
|
||||||
# Copyright 2012 Red Hat, Inc.
|
# Copyright 2012 Red Hat, Inc.
|
||||||
# Copyright 2013 IBM Corp.
|
# Copyright 2013 IBM Corp.
|
||||||
# All Rights Reserved.
|
# All Rights Reserved.
|
||||||
@ -26,13 +24,10 @@ Usual usage in an openstack.common module:
|
|||||||
|
|
||||||
import copy
|
import copy
|
||||||
import gettext
|
import gettext
|
||||||
import logging
|
import locale
|
||||||
|
from logging import handlers
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
try:
|
|
||||||
import UserString as _userString
|
|
||||||
except ImportError:
|
|
||||||
import collections as _userString
|
|
||||||
|
|
||||||
from babel import localedata
|
from babel import localedata
|
||||||
import six
|
import six
|
||||||
@ -58,7 +53,7 @@ def enable_lazy():
|
|||||||
|
|
||||||
def _(msg):
|
def _(msg):
|
||||||
if USE_LAZY:
|
if USE_LAZY:
|
||||||
return Message(msg, 'marconi')
|
return Message(msg, domain='marconi')
|
||||||
else:
|
else:
|
||||||
if six.PY3:
|
if six.PY3:
|
||||||
return _t.gettext(msg)
|
return _t.gettext(msg)
|
||||||
@ -90,11 +85,6 @@ def install(domain, lazy=False):
|
|||||||
# messages in OpenStack. We override the standard _() function
|
# messages in OpenStack. We override the standard _() function
|
||||||
# and % (format string) operation to build Message objects that can
|
# and % (format string) operation to build Message objects that can
|
||||||
# later be translated when we have more information.
|
# later be translated when we have more information.
|
||||||
#
|
|
||||||
# Also included below is an example LocaleHandler that translates
|
|
||||||
# Messages to an associated locale, effectively allowing many logs,
|
|
||||||
# each with their own locale.
|
|
||||||
|
|
||||||
def _lazy_gettext(msg):
|
def _lazy_gettext(msg):
|
||||||
"""Create and return a Message object.
|
"""Create and return a Message object.
|
||||||
|
|
||||||
@ -105,7 +95,7 @@ def install(domain, lazy=False):
|
|||||||
Message encapsulates a string so that we can translate
|
Message encapsulates a string so that we can translate
|
||||||
it later when needed.
|
it later when needed.
|
||||||
"""
|
"""
|
||||||
return Message(msg, domain)
|
return Message(msg, domain=domain)
|
||||||
|
|
||||||
from six import moves
|
from six import moves
|
||||||
moves.builtins.__dict__['_'] = _lazy_gettext
|
moves.builtins.__dict__['_'] = _lazy_gettext
|
||||||
@ -120,182 +110,168 @@ def install(domain, lazy=False):
|
|||||||
unicode=True)
|
unicode=True)
|
||||||
|
|
||||||
|
|
||||||
class Message(_userString.UserString, object):
|
class Message(six.text_type):
|
||||||
"""Class used to encapsulate translatable messages."""
|
"""A Message object is a unicode object that can be translated.
|
||||||
def __init__(self, msg, domain):
|
|
||||||
# _msg is the gettext msgid and should never change
|
|
||||||
self._msg = msg
|
|
||||||
self._left_extra_msg = ''
|
|
||||||
self._right_extra_msg = ''
|
|
||||||
self._locale = None
|
|
||||||
self.params = None
|
|
||||||
self.domain = domain
|
|
||||||
|
|
||||||
@property
|
Translation of Message is done explicitly using the translate() method.
|
||||||
def data(self):
|
For all non-translation intents and purposes, a Message is simply unicode,
|
||||||
# NOTE(mrodden): this should always resolve to a unicode string
|
and can be treated as such.
|
||||||
# that best represents the state of the message currently
|
"""
|
||||||
|
|
||||||
localedir = os.environ.get(self.domain.upper() + '_LOCALEDIR')
|
def __new__(cls, msgid, msgtext=None, params=None, domain='marconi', *args):
|
||||||
if self.locale:
|
"""Create a new Message object.
|
||||||
lang = gettext.translation(self.domain,
|
|
||||||
localedir=localedir,
|
|
||||||
languages=[self.locale],
|
|
||||||
fallback=True)
|
|
||||||
else:
|
|
||||||
# use system locale for translations
|
|
||||||
lang = gettext.translation(self.domain,
|
|
||||||
localedir=localedir,
|
|
||||||
fallback=True)
|
|
||||||
|
|
||||||
|
In order for translation to work gettext requires a message ID, this
|
||||||
|
msgid will be used as the base unicode text. It is also possible
|
||||||
|
for the msgid and the base unicode text to be different by passing
|
||||||
|
the msgtext parameter.
|
||||||
|
"""
|
||||||
|
# If the base msgtext is not given, we use the default translation
|
||||||
|
# of the msgid (which is in English) just in case the system locale is
|
||||||
|
# not English, so that the base text will be in that locale by default.
|
||||||
|
if not msgtext:
|
||||||
|
msgtext = Message._translate_msgid(msgid, domain)
|
||||||
|
# We want to initialize the parent unicode with the actual object that
|
||||||
|
# would have been plain unicode if 'Message' was not enabled.
|
||||||
|
msg = super(Message, cls).__new__(cls, msgtext)
|
||||||
|
msg.msgid = msgid
|
||||||
|
msg.domain = domain
|
||||||
|
msg.params = params
|
||||||
|
return msg
|
||||||
|
|
||||||
|
def translate(self, desired_locale=None):
|
||||||
|
"""Translate this message to the desired locale.
|
||||||
|
|
||||||
|
:param desired_locale: The desired locale to translate the message to,
|
||||||
|
if no locale is provided the message will be
|
||||||
|
translated to the system's default locale.
|
||||||
|
|
||||||
|
:returns: the translated message in unicode
|
||||||
|
"""
|
||||||
|
|
||||||
|
translated_message = Message._translate_msgid(self.msgid,
|
||||||
|
self.domain,
|
||||||
|
desired_locale)
|
||||||
|
if self.params is None:
|
||||||
|
# No need for more translation
|
||||||
|
return translated_message
|
||||||
|
|
||||||
|
# This Message object may have been formatted with one or more
|
||||||
|
# Message objects as substitution arguments, given either as a single
|
||||||
|
# argument, part of a tuple, or as one or more values in a dictionary.
|
||||||
|
# When translating this Message we need to translate those Messages too
|
||||||
|
translated_params = _translate_args(self.params, desired_locale)
|
||||||
|
|
||||||
|
translated_message = translated_message % translated_params
|
||||||
|
|
||||||
|
return translated_message
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _translate_msgid(msgid, domain, desired_locale=None):
|
||||||
|
if not desired_locale:
|
||||||
|
system_locale = locale.getdefaultlocale()
|
||||||
|
# If the system locale is not available to the runtime use English
|
||||||
|
if not system_locale[0]:
|
||||||
|
desired_locale = 'en_US'
|
||||||
|
else:
|
||||||
|
desired_locale = system_locale[0]
|
||||||
|
|
||||||
|
locale_dir = os.environ.get(domain.upper() + '_LOCALEDIR')
|
||||||
|
lang = gettext.translation(domain,
|
||||||
|
localedir=locale_dir,
|
||||||
|
languages=[desired_locale],
|
||||||
|
fallback=True)
|
||||||
if six.PY3:
|
if six.PY3:
|
||||||
ugettext = lang.gettext
|
translator = lang.gettext
|
||||||
else:
|
else:
|
||||||
ugettext = lang.ugettext
|
translator = lang.ugettext
|
||||||
|
|
||||||
full_msg = (self._left_extra_msg +
|
translated_message = translator(msgid)
|
||||||
ugettext(self._msg) +
|
return translated_message
|
||||||
self._right_extra_msg)
|
|
||||||
|
|
||||||
if self.params is not None:
|
def __mod__(self, other):
|
||||||
full_msg = full_msg % self.params
|
# When we mod a Message we want the actual operation to be performed
|
||||||
|
# by the parent class (i.e. unicode()), the only thing we do here is
|
||||||
|
# save the original msgid and the parameters in case of a translation
|
||||||
|
params = self._sanitize_mod_params(other)
|
||||||
|
unicode_mod = super(Message, self).__mod__(params)
|
||||||
|
modded = Message(self.msgid,
|
||||||
|
msgtext=unicode_mod,
|
||||||
|
params=params,
|
||||||
|
domain=self.domain)
|
||||||
|
return modded
|
||||||
|
|
||||||
return six.text_type(full_msg)
|
def _sanitize_mod_params(self, other):
|
||||||
|
"""Sanitize the object being modded with this Message.
|
||||||
|
|
||||||
@property
|
- Add support for modding 'None' so translation supports it
|
||||||
def locale(self):
|
- Trim the modded object, which can be a large dictionary, to only
|
||||||
return self._locale
|
those keys that would actually be used in a translation
|
||||||
|
- Snapshot the object being modded, in case the message is
|
||||||
|
translated, it will be used as it was when the Message was created
|
||||||
|
"""
|
||||||
|
if other is None:
|
||||||
|
params = (other,)
|
||||||
|
elif isinstance(other, dict):
|
||||||
|
params = self._trim_dictionary_parameters(other)
|
||||||
|
else:
|
||||||
|
params = self._copy_param(other)
|
||||||
|
return params
|
||||||
|
|
||||||
@locale.setter
|
def _trim_dictionary_parameters(self, dict_param):
|
||||||
def locale(self, value):
|
"""Return a dict that only has matching entries in the msgid."""
|
||||||
self._locale = value
|
# NOTE(luisg): Here we trim down the dictionary passed as parameters
|
||||||
if not self.params:
|
# to avoid carrying a lot of unnecessary weight around in the message
|
||||||
return
|
# object, for example if someone passes in Message() % locals() but
|
||||||
|
# only some params are used, and additionally we prevent errors for
|
||||||
|
# non-deepcopyable objects by unicoding() them.
|
||||||
|
|
||||||
# This Message object may have been constructed with one or more
|
# Look for %(param) keys in msgid;
|
||||||
# Message objects as substitution parameters, given as a single
|
# Skip %% and deal with the case where % is first character on the line
|
||||||
# Message, or a tuple or Map containing some, so when setting the
|
keys = re.findall('(?:[^%]|^)?%\((\w*)\)[a-z]', self.msgid)
|
||||||
# locale for this Message we need to set it for those Messages too.
|
|
||||||
if isinstance(self.params, Message):
|
|
||||||
self.params.locale = value
|
|
||||||
return
|
|
||||||
if isinstance(self.params, tuple):
|
|
||||||
for param in self.params:
|
|
||||||
if isinstance(param, Message):
|
|
||||||
param.locale = value
|
|
||||||
return
|
|
||||||
if isinstance(self.params, dict):
|
|
||||||
for param in self.params.values():
|
|
||||||
if isinstance(param, Message):
|
|
||||||
param.locale = value
|
|
||||||
|
|
||||||
def _save_dictionary_parameter(self, dict_param):
|
# If we don't find any %(param) keys but have a %s
|
||||||
full_msg = self.data
|
if not keys and re.findall('(?:[^%]|^)%[a-z]', self.msgid):
|
||||||
# look for %(blah) fields in string;
|
# Apparently the full dictionary is the parameter
|
||||||
# ignore %% and deal with the
|
params = self._copy_param(dict_param)
|
||||||
# case where % is first character on the line
|
|
||||||
keys = re.findall('(?:[^%]|^)?%\((\w*)\)[a-z]', full_msg)
|
|
||||||
|
|
||||||
# if we don't find any %(blah) blocks but have a %s
|
|
||||||
if not keys and re.findall('(?:[^%]|^)%[a-z]', full_msg):
|
|
||||||
# apparently the full dictionary is the parameter
|
|
||||||
params = copy.deepcopy(dict_param)
|
|
||||||
else:
|
else:
|
||||||
params = {}
|
params = {}
|
||||||
|
# Save our existing parameters as defaults to protect
|
||||||
|
# ourselves from losing values if we are called through an
|
||||||
|
# (erroneous) chain that builds a valid Message with
|
||||||
|
# arguments, and then does something like "msg % kwds"
|
||||||
|
# where kwds is an empty dictionary.
|
||||||
|
src = {}
|
||||||
|
if isinstance(self.params, dict):
|
||||||
|
src.update(self.params)
|
||||||
|
src.update(dict_param)
|
||||||
for key in keys:
|
for key in keys:
|
||||||
try:
|
params[key] = self._copy_param(src[key])
|
||||||
params[key] = copy.deepcopy(dict_param[key])
|
|
||||||
except TypeError:
|
|
||||||
# cast uncopyable thing to unicode string
|
|
||||||
params[key] = six.text_type(dict_param[key])
|
|
||||||
|
|
||||||
return params
|
return params
|
||||||
|
|
||||||
def _save_parameters(self, other):
|
def _copy_param(self, param):
|
||||||
# we check for None later to see if
|
try:
|
||||||
# we actually have parameters to inject,
|
return copy.deepcopy(param)
|
||||||
# so encapsulate if our parameter is actually None
|
except TypeError:
|
||||||
if other is None:
|
# Fallback to casting to unicode this will handle the
|
||||||
self.params = (other, )
|
# python code-like objects that can't be deep-copied
|
||||||
elif isinstance(other, dict):
|
return six.text_type(param)
|
||||||
self.params = self._save_dictionary_parameter(other)
|
|
||||||
else:
|
|
||||||
# fallback to casting to unicode,
|
|
||||||
# this will handle the problematic python code-like
|
|
||||||
# objects that cannot be deep-copied
|
|
||||||
try:
|
|
||||||
self.params = copy.deepcopy(other)
|
|
||||||
except TypeError:
|
|
||||||
self.params = six.text_type(other)
|
|
||||||
|
|
||||||
return self
|
|
||||||
|
|
||||||
# overrides to be more string-like
|
|
||||||
def __unicode__(self):
|
|
||||||
return self.data
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
if six.PY3:
|
|
||||||
return self.__unicode__()
|
|
||||||
return self.data.encode('utf-8')
|
|
||||||
|
|
||||||
def __getstate__(self):
|
|
||||||
to_copy = ['_msg', '_right_extra_msg', '_left_extra_msg',
|
|
||||||
'domain', 'params', '_locale']
|
|
||||||
new_dict = self.__dict__.fromkeys(to_copy)
|
|
||||||
for attr in to_copy:
|
|
||||||
new_dict[attr] = copy.deepcopy(self.__dict__[attr])
|
|
||||||
|
|
||||||
return new_dict
|
|
||||||
|
|
||||||
def __setstate__(self, state):
|
|
||||||
for (k, v) in state.items():
|
|
||||||
setattr(self, k, v)
|
|
||||||
|
|
||||||
# operator overloads
|
|
||||||
def __add__(self, other):
|
def __add__(self, other):
|
||||||
copied = copy.deepcopy(self)
|
msg = _('Message objects do not support addition.')
|
||||||
copied._right_extra_msg += other.__str__()
|
raise TypeError(msg)
|
||||||
return copied
|
|
||||||
|
|
||||||
def __radd__(self, other):
|
def __radd__(self, other):
|
||||||
copied = copy.deepcopy(self)
|
return self.__add__(other)
|
||||||
copied._left_extra_msg += other.__str__()
|
|
||||||
return copied
|
|
||||||
|
|
||||||
def __mod__(self, other):
|
def __str__(self):
|
||||||
# do a format string to catch and raise
|
# NOTE(luisg): Logging in python 2.6 tries to str() log records,
|
||||||
# any possible KeyErrors from missing parameters
|
# and it expects specifically a UnicodeError in order to proceed.
|
||||||
self.data % other
|
msg = _('Message objects do not support str() because they may '
|
||||||
copied = copy.deepcopy(self)
|
'contain non-ascii characters. '
|
||||||
return copied._save_parameters(other)
|
'Please use unicode() or translate() instead.')
|
||||||
|
raise UnicodeError(msg)
|
||||||
def __mul__(self, other):
|
|
||||||
return self.data * other
|
|
||||||
|
|
||||||
def __rmul__(self, other):
|
|
||||||
return other * self.data
|
|
||||||
|
|
||||||
def __getitem__(self, key):
|
|
||||||
return self.data[key]
|
|
||||||
|
|
||||||
def __getslice__(self, start, end):
|
|
||||||
return self.data.__getslice__(start, end)
|
|
||||||
|
|
||||||
def __getattribute__(self, name):
|
|
||||||
# NOTE(mrodden): handle lossy operations that we can't deal with yet
|
|
||||||
# These override the UserString implementation, since UserString
|
|
||||||
# uses our __class__ attribute to try and build a new message
|
|
||||||
# after running the inner data string through the operation.
|
|
||||||
# At that point, we have lost the gettext message id and can just
|
|
||||||
# safely resolve to a string instead.
|
|
||||||
ops = ['capitalize', 'center', 'decode', 'encode',
|
|
||||||
'expandtabs', 'ljust', 'lstrip', 'replace', 'rjust', 'rstrip',
|
|
||||||
'strip', 'swapcase', 'title', 'translate', 'upper', 'zfill']
|
|
||||||
if name in ops:
|
|
||||||
return getattr(self.data, name)
|
|
||||||
else:
|
|
||||||
return _userString.UserString.__getattribute__(self, name)
|
|
||||||
|
|
||||||
|
|
||||||
def get_available_languages(domain):
|
def get_available_languages(domain):
|
||||||
@ -317,49 +293,147 @@ def get_available_languages(domain):
|
|||||||
# NOTE(luisg): Babel <1.0 used a function called list(), which was
|
# NOTE(luisg): Babel <1.0 used a function called list(), which was
|
||||||
# renamed to locale_identifiers() in >=1.0, the requirements master list
|
# renamed to locale_identifiers() in >=1.0, the requirements master list
|
||||||
# requires >=0.9.6, uncapped, so defensively work with both. We can remove
|
# requires >=0.9.6, uncapped, so defensively work with both. We can remove
|
||||||
# this check when the master list updates to >=1.0, and all projects udpate
|
# this check when the master list updates to >=1.0, and update all projects
|
||||||
list_identifiers = (getattr(localedata, 'list', None) or
|
list_identifiers = (getattr(localedata, 'list', None) or
|
||||||
getattr(localedata, 'locale_identifiers'))
|
getattr(localedata, 'locale_identifiers'))
|
||||||
locale_identifiers = list_identifiers()
|
locale_identifiers = list_identifiers()
|
||||||
|
|
||||||
for i in locale_identifiers:
|
for i in locale_identifiers:
|
||||||
if find(i) is not None:
|
if find(i) is not None:
|
||||||
language_list.append(i)
|
language_list.append(i)
|
||||||
|
|
||||||
|
# NOTE(luisg): Babel>=1.0,<1.3 has a bug where some OpenStack supported
|
||||||
|
# locales (e.g. 'zh_CN', and 'zh_TW') aren't supported even though they
|
||||||
|
# are perfectly legitimate locales:
|
||||||
|
# https://github.com/mitsuhiko/babel/issues/37
|
||||||
|
# In Babel 1.3 they fixed the bug and they support these locales, but
|
||||||
|
# they are still not explicitly "listed" by locale_identifiers().
|
||||||
|
# That is why we add the locales here explicitly if necessary so that
|
||||||
|
# they are listed as supported.
|
||||||
|
aliases = {'zh': 'zh_CN',
|
||||||
|
'zh_Hant_HK': 'zh_HK',
|
||||||
|
'zh_Hant': 'zh_TW',
|
||||||
|
'fil': 'tl_PH'}
|
||||||
|
for (locale, alias) in six.iteritems(aliases):
|
||||||
|
if locale in language_list and alias not in language_list:
|
||||||
|
language_list.append(alias)
|
||||||
|
|
||||||
_AVAILABLE_LANGUAGES[domain] = language_list
|
_AVAILABLE_LANGUAGES[domain] = language_list
|
||||||
return copy.copy(language_list)
|
return copy.copy(language_list)
|
||||||
|
|
||||||
|
|
||||||
def get_localized_message(message, user_locale):
|
def translate(obj, desired_locale=None):
|
||||||
"""Gets a localized version of the given message in the given locale."""
|
"""Gets the translated unicode representation of the given object.
|
||||||
|
|
||||||
|
If the object is not translatable it is returned as-is.
|
||||||
|
If the locale is None the object is translated to the system locale.
|
||||||
|
|
||||||
|
:param obj: the object to translate
|
||||||
|
:param desired_locale: the locale to translate the message to, if None the
|
||||||
|
default system locale will be used
|
||||||
|
:returns: the translated object in unicode, or the original object if
|
||||||
|
it could not be translated
|
||||||
|
"""
|
||||||
|
message = obj
|
||||||
|
if not isinstance(message, Message):
|
||||||
|
# If the object to translate is not already translatable,
|
||||||
|
# let's first get its unicode representation
|
||||||
|
message = six.text_type(obj)
|
||||||
if isinstance(message, Message):
|
if isinstance(message, Message):
|
||||||
if user_locale:
|
# Even after unicoding() we still need to check if we are
|
||||||
message.locale = user_locale
|
# running with translatable unicode before translating
|
||||||
return six.text_type(message)
|
return message.translate(desired_locale)
|
||||||
else:
|
return obj
|
||||||
return message
|
|
||||||
|
|
||||||
|
|
||||||
class LocaleHandler(logging.Handler):
|
def _translate_args(args, desired_locale=None):
|
||||||
"""Handler that can have a locale associated to translate Messages.
|
"""Translates all the translatable elements of the given arguments object.
|
||||||
|
|
||||||
A quick example of how to utilize the Message class above.
|
This method is used for translating the translatable values in method
|
||||||
LocaleHandler takes a locale and a target logging.Handler object
|
arguments which include values of tuples or dictionaries.
|
||||||
to forward LogRecord objects to after translating the internal Message.
|
If the object is not a tuple or a dictionary the object itself is
|
||||||
|
translated if it is translatable.
|
||||||
|
|
||||||
|
If the locale is None the object is translated to the system locale.
|
||||||
|
|
||||||
|
:param args: the args to translate
|
||||||
|
:param desired_locale: the locale to translate the args to, if None the
|
||||||
|
default system locale will be used
|
||||||
|
:returns: a new args object with the translated contents of the original
|
||||||
|
"""
|
||||||
|
if isinstance(args, tuple):
|
||||||
|
return tuple(translate(v, desired_locale) for v in args)
|
||||||
|
if isinstance(args, dict):
|
||||||
|
translated_dict = {}
|
||||||
|
for (k, v) in six.iteritems(args):
|
||||||
|
translated_v = translate(v, desired_locale)
|
||||||
|
translated_dict[k] = translated_v
|
||||||
|
return translated_dict
|
||||||
|
return translate(args, desired_locale)
|
||||||
|
|
||||||
|
|
||||||
|
class TranslationHandler(handlers.MemoryHandler):
|
||||||
|
"""Handler that translates records before logging them.
|
||||||
|
|
||||||
|
The TranslationHandler takes a locale and a target logging.Handler object
|
||||||
|
to forward LogRecord objects to after translating them. This handler
|
||||||
|
depends on Message objects being logged, instead of regular strings.
|
||||||
|
|
||||||
|
The handler can be configured declaratively in the logging.conf as follows:
|
||||||
|
|
||||||
|
[handlers]
|
||||||
|
keys = translatedlog, translator
|
||||||
|
|
||||||
|
[handler_translatedlog]
|
||||||
|
class = handlers.WatchedFileHandler
|
||||||
|
args = ('/var/log/api-localized.log',)
|
||||||
|
formatter = context
|
||||||
|
|
||||||
|
[handler_translator]
|
||||||
|
class = openstack.common.log.TranslationHandler
|
||||||
|
target = translatedlog
|
||||||
|
args = ('zh_CN',)
|
||||||
|
|
||||||
|
If the specified locale is not available in the system, the handler will
|
||||||
|
log in the default locale.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, locale, target):
|
def __init__(self, locale=None, target=None):
|
||||||
"""Initialize a LocaleHandler
|
"""Initialize a TranslationHandler
|
||||||
|
|
||||||
:param locale: locale to use for translating messages
|
:param locale: locale to use for translating messages
|
||||||
:param target: logging.Handler object to forward
|
:param target: logging.Handler object to forward
|
||||||
LogRecord objects to after translation
|
LogRecord objects to after translation
|
||||||
"""
|
"""
|
||||||
logging.Handler.__init__(self)
|
# NOTE(luisg): In order to allow this handler to be a wrapper for
|
||||||
|
# other handlers, such as a FileHandler, and still be able to
|
||||||
|
# configure it using logging.conf, this handler has to extend
|
||||||
|
# MemoryHandler because only the MemoryHandlers' logging.conf
|
||||||
|
# parsing is implemented such that it accepts a target handler.
|
||||||
|
handlers.MemoryHandler.__init__(self, capacity=0, target=target)
|
||||||
self.locale = locale
|
self.locale = locale
|
||||||
self.target = target
|
|
||||||
|
def setFormatter(self, fmt):
|
||||||
|
self.target.setFormatter(fmt)
|
||||||
|
|
||||||
def emit(self, record):
|
def emit(self, record):
|
||||||
if isinstance(record.msg, Message):
|
# We save the message from the original record to restore it
|
||||||
# set the locale and resolve to a string
|
# after translation, so other handlers are not affected by this
|
||||||
record.msg.locale = self.locale
|
original_msg = record.msg
|
||||||
|
original_args = record.args
|
||||||
|
|
||||||
|
try:
|
||||||
|
self._translate_and_log_record(record)
|
||||||
|
finally:
|
||||||
|
record.msg = original_msg
|
||||||
|
record.args = original_args
|
||||||
|
|
||||||
|
def _translate_and_log_record(self, record):
|
||||||
|
record.msg = translate(record.msg, self.locale)
|
||||||
|
|
||||||
|
# In addition to translating the message, we also need to translate
|
||||||
|
# arguments that were passed to the log method that were not part
|
||||||
|
# of the main message e.g., log.info(_('Some message %s'), this_one))
|
||||||
|
record.args = _translate_args(record.args, self.locale)
|
||||||
|
|
||||||
self.target.emit(record)
|
self.target.emit(record)
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
|
||||||
|
|
||||||
# Copyright 2011 OpenStack Foundation.
|
# Copyright 2011 OpenStack Foundation.
|
||||||
# All Rights Reserved.
|
# All Rights Reserved.
|
||||||
#
|
#
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
|
||||||
|
|
||||||
# Copyright 2010 United States Government as represented by the
|
# Copyright 2010 United States Government as represented by the
|
||||||
# Administrator of the National Aeronautics and Space Administration.
|
# Administrator of the National Aeronautics and Space Administration.
|
||||||
# Copyright 2011 Justin Santa Barbara
|
# Copyright 2011 Justin Santa Barbara
|
||||||
@ -41,11 +39,16 @@ import json
|
|||||||
try:
|
try:
|
||||||
import xmlrpclib
|
import xmlrpclib
|
||||||
except ImportError:
|
except ImportError:
|
||||||
# NOTE(jd): xmlrpclib is not shipped with Python 3
|
# NOTE(jaypipes): xmlrpclib was renamed to xmlrpc.client in Python3
|
||||||
xmlrpclib = None
|
# however the function and object call signatures
|
||||||
|
# remained the same. This whole try/except block should
|
||||||
|
# be removed and replaced with a call to six.moves once
|
||||||
|
# six 1.4.2 is released. See http://bit.ly/1bqrVzu
|
||||||
|
import xmlrpc.client as xmlrpclib
|
||||||
|
|
||||||
import six
|
import six
|
||||||
|
|
||||||
|
from marconi.openstack.common import gettextutils
|
||||||
from marconi.openstack.common import importutils
|
from marconi.openstack.common import importutils
|
||||||
from marconi.openstack.common import timeutils
|
from marconi.openstack.common import timeutils
|
||||||
|
|
||||||
@ -123,18 +126,20 @@ def to_primitive(value, convert_instances=False, convert_datetime=True,
|
|||||||
level=level,
|
level=level,
|
||||||
max_depth=max_depth)
|
max_depth=max_depth)
|
||||||
if isinstance(value, dict):
|
if isinstance(value, dict):
|
||||||
return dict((k, recursive(v)) for k, v in value.iteritems())
|
return dict((k, recursive(v)) for k, v in six.iteritems(value))
|
||||||
elif isinstance(value, (list, tuple)):
|
elif isinstance(value, (list, tuple)):
|
||||||
return [recursive(lv) for lv in value]
|
return [recursive(lv) for lv in value]
|
||||||
|
|
||||||
# It's not clear why xmlrpclib created their own DateTime type, but
|
# It's not clear why xmlrpclib created their own DateTime type, but
|
||||||
# for our purposes, make it a datetime type which is explicitly
|
# for our purposes, make it a datetime type which is explicitly
|
||||||
# handled
|
# handled
|
||||||
if xmlrpclib and isinstance(value, xmlrpclib.DateTime):
|
if isinstance(value, xmlrpclib.DateTime):
|
||||||
value = datetime.datetime(*tuple(value.timetuple())[:6])
|
value = datetime.datetime(*tuple(value.timetuple())[:6])
|
||||||
|
|
||||||
if convert_datetime and isinstance(value, datetime.datetime):
|
if convert_datetime and isinstance(value, datetime.datetime):
|
||||||
return timeutils.strtime(value)
|
return timeutils.strtime(value)
|
||||||
|
elif isinstance(value, gettextutils.Message):
|
||||||
|
return value.data
|
||||||
elif hasattr(value, 'iteritems'):
|
elif hasattr(value, 'iteritems'):
|
||||||
return recursive(dict(value.iteritems()), level=level + 1)
|
return recursive(dict(value.iteritems()), level=level + 1)
|
||||||
elif hasattr(value, '__iter__'):
|
elif hasattr(value, '__iter__'):
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
|
||||||
|
|
||||||
# Copyright 2011 OpenStack Foundation.
|
# Copyright 2011 OpenStack Foundation.
|
||||||
# All Rights Reserved.
|
# All Rights Reserved.
|
||||||
#
|
#
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
|
||||||
|
|
||||||
# Copyright 2011 OpenStack Foundation.
|
# Copyright 2011 OpenStack Foundation.
|
||||||
# All Rights Reserved.
|
# All Rights Reserved.
|
||||||
#
|
#
|
||||||
@ -15,7 +13,6 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
|
|
||||||
import contextlib
|
import contextlib
|
||||||
import errno
|
import errno
|
||||||
import functools
|
import functools
|
||||||
@ -31,8 +28,7 @@ import weakref
|
|||||||
from oslo.config import cfg
|
from oslo.config import cfg
|
||||||
|
|
||||||
from marconi.openstack.common import fileutils
|
from marconi.openstack.common import fileutils
|
||||||
from marconi.openstack.common.gettextutils import _ # noqa
|
from marconi.openstack.common.gettextutils import _
|
||||||
from marconi.openstack.common import local
|
|
||||||
from marconi.openstack.common import log as logging
|
from marconi.openstack.common import log as logging
|
||||||
|
|
||||||
|
|
||||||
@ -79,6 +75,12 @@ class _InterProcessLock(object):
|
|||||||
self.fname = name
|
self.fname = name
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
|
basedir = os.path.dirname(self.fname)
|
||||||
|
|
||||||
|
if not os.path.exists(basedir):
|
||||||
|
fileutils.ensure_tree(basedir)
|
||||||
|
LOG.info(_('Created lock path: %s'), basedir)
|
||||||
|
|
||||||
self.lockfile = open(self.fname, 'w')
|
self.lockfile = open(self.fname, 'w')
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
@ -88,6 +90,7 @@ class _InterProcessLock(object):
|
|||||||
# Also upon reading the MSDN docs for locking(), it seems
|
# Also upon reading the MSDN docs for locking(), it seems
|
||||||
# to have a laughable 10 attempts "blocking" mechanism.
|
# to have a laughable 10 attempts "blocking" mechanism.
|
||||||
self.trylock()
|
self.trylock()
|
||||||
|
LOG.debug(_('Got file lock "%s"'), self.fname)
|
||||||
return self
|
return self
|
||||||
except IOError as e:
|
except IOError as e:
|
||||||
if e.errno in (errno.EACCES, errno.EAGAIN):
|
if e.errno in (errno.EACCES, errno.EAGAIN):
|
||||||
@ -104,6 +107,7 @@ class _InterProcessLock(object):
|
|||||||
except IOError:
|
except IOError:
|
||||||
LOG.exception(_("Could not release the acquired lock `%s`"),
|
LOG.exception(_("Could not release the acquired lock `%s`"),
|
||||||
self.fname)
|
self.fname)
|
||||||
|
LOG.debug(_('Released file lock "%s"'), self.fname)
|
||||||
|
|
||||||
def trylock(self):
|
def trylock(self):
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
@ -139,26 +143,27 @@ _semaphores = weakref.WeakValueDictionary()
|
|||||||
_semaphores_lock = threading.Lock()
|
_semaphores_lock = threading.Lock()
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
def external_lock(name, lock_file_prefix=None):
|
||||||
def lock(name, lock_file_prefix=None, external=False, lock_path=None):
|
with internal_lock(name):
|
||||||
"""Context based lock
|
LOG.debug(_('Attempting to grab external lock "%(lock)s"'),
|
||||||
|
{'lock': name})
|
||||||
|
|
||||||
This function yields a `threading.Semaphore` instance (if we don't use
|
# NOTE(mikal): the lock name cannot contain directory
|
||||||
eventlet.monkey_patch(), else `semaphore.Semaphore`) unless external is
|
# separators
|
||||||
True, in which case, it'll yield an InterProcessLock instance.
|
name = name.replace(os.sep, '_')
|
||||||
|
if lock_file_prefix:
|
||||||
|
sep = '' if lock_file_prefix.endswith('-') else '-'
|
||||||
|
name = '%s%s%s' % (lock_file_prefix, sep, name)
|
||||||
|
|
||||||
:param lock_file_prefix: The lock_file_prefix argument is used to provide
|
if not CONF.lock_path:
|
||||||
lock files on disk with a meaningful prefix.
|
raise cfg.RequiredOptError('lock_path')
|
||||||
|
|
||||||
:param external: The external keyword argument denotes whether this lock
|
lock_file_path = os.path.join(CONF.lock_path, name)
|
||||||
should work across multiple processes. This means that if two different
|
|
||||||
workers both run a a method decorated with @synchronized('mylock',
|
|
||||||
external=True), only one of them will execute at a time.
|
|
||||||
|
|
||||||
:param lock_path: The lock_path keyword argument is used to specify a
|
return InterProcessLock(lock_file_path)
|
||||||
special location for external lock files to live. If nothing is set, then
|
|
||||||
CONF.lock_path is used as a default.
|
|
||||||
"""
|
def internal_lock(name):
|
||||||
with _semaphores_lock:
|
with _semaphores_lock:
|
||||||
try:
|
try:
|
||||||
sem = _semaphores[name]
|
sem = _semaphores[name]
|
||||||
@ -166,58 +171,35 @@ def lock(name, lock_file_prefix=None, external=False, lock_path=None):
|
|||||||
sem = threading.Semaphore()
|
sem = threading.Semaphore()
|
||||||
_semaphores[name] = sem
|
_semaphores[name] = sem
|
||||||
|
|
||||||
with sem:
|
LOG.debug(_('Got semaphore "%(lock)s"'), {'lock': name})
|
||||||
LOG.debug(_('Got semaphore "%(lock)s"'), {'lock': name})
|
return sem
|
||||||
|
|
||||||
# NOTE(mikal): I know this looks odd
|
|
||||||
if not hasattr(local.strong_store, 'locks_held'):
|
|
||||||
local.strong_store.locks_held = []
|
|
||||||
local.strong_store.locks_held.append(name)
|
|
||||||
|
|
||||||
try:
|
|
||||||
if external and not CONF.disable_process_locking:
|
|
||||||
LOG.debug(_('Attempting to grab file lock "%(lock)s"'),
|
|
||||||
{'lock': name})
|
|
||||||
|
|
||||||
# We need a copy of lock_path because it is non-local
|
|
||||||
local_lock_path = lock_path or CONF.lock_path
|
|
||||||
if not local_lock_path:
|
|
||||||
raise cfg.RequiredOptError('lock_path')
|
|
||||||
|
|
||||||
if not os.path.exists(local_lock_path):
|
|
||||||
fileutils.ensure_tree(local_lock_path)
|
|
||||||
LOG.info(_('Created lock path: %s'), local_lock_path)
|
|
||||||
|
|
||||||
def add_prefix(name, prefix):
|
|
||||||
if not prefix:
|
|
||||||
return name
|
|
||||||
sep = '' if prefix.endswith('-') else '-'
|
|
||||||
return '%s%s%s' % (prefix, sep, name)
|
|
||||||
|
|
||||||
# NOTE(mikal): the lock name cannot contain directory
|
|
||||||
# separators
|
|
||||||
lock_file_name = add_prefix(name.replace(os.sep, '_'),
|
|
||||||
lock_file_prefix)
|
|
||||||
|
|
||||||
lock_file_path = os.path.join(local_lock_path, lock_file_name)
|
|
||||||
|
|
||||||
try:
|
|
||||||
lock = InterProcessLock(lock_file_path)
|
|
||||||
with lock as lock:
|
|
||||||
LOG.debug(_('Got file lock "%(lock)s" at %(path)s'),
|
|
||||||
{'lock': name, 'path': lock_file_path})
|
|
||||||
yield lock
|
|
||||||
finally:
|
|
||||||
LOG.debug(_('Released file lock "%(lock)s" at %(path)s'),
|
|
||||||
{'lock': name, 'path': lock_file_path})
|
|
||||||
else:
|
|
||||||
yield sem
|
|
||||||
|
|
||||||
finally:
|
|
||||||
local.strong_store.locks_held.remove(name)
|
|
||||||
|
|
||||||
|
|
||||||
def synchronized(name, lock_file_prefix=None, external=False, lock_path=None):
|
@contextlib.contextmanager
|
||||||
|
def lock(name, lock_file_prefix=None, external=False):
|
||||||
|
"""Context based lock
|
||||||
|
|
||||||
|
This function yields a `threading.Semaphore` instance (if we don't use
|
||||||
|
eventlet.monkey_patch(), else `semaphore.Semaphore`) unless external is
|
||||||
|
True, in which case, it'll yield an InterProcessLock instance.
|
||||||
|
|
||||||
|
:param lock_file_prefix: The lock_file_prefix argument is used to provide
|
||||||
|
lock files on disk with a meaningful prefix.
|
||||||
|
|
||||||
|
:param external: The external keyword argument denotes whether this lock
|
||||||
|
should work across multiple processes. This means that if two different
|
||||||
|
workers both run a a method decorated with @synchronized('mylock',
|
||||||
|
external=True), only one of them will execute at a time.
|
||||||
|
"""
|
||||||
|
if external and not CONF.disable_process_locking:
|
||||||
|
lock = external_lock(name, lock_file_prefix)
|
||||||
|
else:
|
||||||
|
lock = internal_lock(name)
|
||||||
|
with lock:
|
||||||
|
yield lock
|
||||||
|
|
||||||
|
|
||||||
|
def synchronized(name, lock_file_prefix=None, external=False):
|
||||||
"""Synchronization decorator.
|
"""Synchronization decorator.
|
||||||
|
|
||||||
Decorating a method like so::
|
Decorating a method like so::
|
||||||
@ -245,7 +227,7 @@ def synchronized(name, lock_file_prefix=None, external=False, lock_path=None):
|
|||||||
@functools.wraps(f)
|
@functools.wraps(f)
|
||||||
def inner(*args, **kwargs):
|
def inner(*args, **kwargs):
|
||||||
try:
|
try:
|
||||||
with lock(name, lock_file_prefix, external, lock_path):
|
with lock(name, lock_file_prefix, external):
|
||||||
LOG.debug(_('Got semaphore / lock "%(function)s"'),
|
LOG.debug(_('Got semaphore / lock "%(function)s"'),
|
||||||
{'function': f.__name__})
|
{'function': f.__name__})
|
||||||
return f(*args, **kwargs)
|
return f(*args, **kwargs)
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
|
||||||
|
|
||||||
# Copyright 2011 OpenStack Foundation.
|
# Copyright 2011 OpenStack Foundation.
|
||||||
# Copyright 2010 United States Government as represented by the
|
# Copyright 2010 United States Government as represented by the
|
||||||
# Administrator of the National Aeronautics and Space Administration.
|
# Administrator of the National Aeronautics and Space Administration.
|
||||||
@ -35,13 +33,15 @@ import logging
|
|||||||
import logging.config
|
import logging.config
|
||||||
import logging.handlers
|
import logging.handlers
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from oslo.config import cfg
|
from oslo.config import cfg
|
||||||
|
import six
|
||||||
from six import moves
|
from six import moves
|
||||||
|
|
||||||
from marconi.openstack.common.gettextutils import _ # noqa
|
from marconi.openstack.common.gettextutils import _
|
||||||
from marconi.openstack.common import importutils
|
from marconi.openstack.common import importutils
|
||||||
from marconi.openstack.common import jsonutils
|
from marconi.openstack.common import jsonutils
|
||||||
from marconi.openstack.common import local
|
from marconi.openstack.common import local
|
||||||
@ -49,6 +49,24 @@ from marconi.openstack.common import local
|
|||||||
|
|
||||||
_DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
|
_DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
|
||||||
|
|
||||||
|
_SANITIZE_KEYS = ['adminPass', 'admin_pass', 'password', 'admin_password']
|
||||||
|
|
||||||
|
# NOTE(ldbragst): Let's build a list of regex objects using the list of
|
||||||
|
# _SANITIZE_KEYS we already have. This way, we only have to add the new key
|
||||||
|
# to the list of _SANITIZE_KEYS and we can generate regular expressions
|
||||||
|
# for XML and JSON automatically.
|
||||||
|
_SANITIZE_PATTERNS = []
|
||||||
|
_FORMAT_PATTERNS = [r'(%(key)s\s*[=]\s*[\"\']).*?([\"\'])',
|
||||||
|
r'(<%(key)s>).*?(</%(key)s>)',
|
||||||
|
r'([\"\']%(key)s[\"\']\s*:\s*[\"\']).*?([\"\'])',
|
||||||
|
r'([\'"].*?%(key)s[\'"]\s*:\s*u?[\'"]).*?([\'"])']
|
||||||
|
|
||||||
|
for key in _SANITIZE_KEYS:
|
||||||
|
for pattern in _FORMAT_PATTERNS:
|
||||||
|
reg_ex = re.compile(pattern % {'key': key}, re.DOTALL)
|
||||||
|
_SANITIZE_PATTERNS.append(reg_ex)
|
||||||
|
|
||||||
|
|
||||||
common_cli_opts = [
|
common_cli_opts = [
|
||||||
cfg.BoolOpt('debug',
|
cfg.BoolOpt('debug',
|
||||||
short='d',
|
short='d',
|
||||||
@ -63,11 +81,13 @@ common_cli_opts = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
logging_cli_opts = [
|
logging_cli_opts = [
|
||||||
cfg.StrOpt('log-config',
|
cfg.StrOpt('log-config-append',
|
||||||
metavar='PATH',
|
metavar='PATH',
|
||||||
help='If this option is specified, the logging configuration '
|
deprecated_name='log-config',
|
||||||
'file specified is used and overrides any other logging '
|
help='The name of logging configuration file. It does not '
|
||||||
'options specified. Please see the Python logging module '
|
'disable existing loggers, but just appends specified '
|
||||||
|
'logging configuration to any other existing logging '
|
||||||
|
'options. Please see the Python logging module '
|
||||||
'documentation for details on logging configuration '
|
'documentation for details on logging configuration '
|
||||||
'files.'),
|
'files.'),
|
||||||
cfg.StrOpt('log-format',
|
cfg.StrOpt('log-format',
|
||||||
@ -110,7 +130,7 @@ generic_log_opts = [
|
|||||||
log_opts = [
|
log_opts = [
|
||||||
cfg.StrOpt('logging_context_format_string',
|
cfg.StrOpt('logging_context_format_string',
|
||||||
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
|
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
|
||||||
'%(name)s [%(request_id)s %(user)s %(tenant)s] '
|
'%(name)s [%(request_id)s %(user_identity)s] '
|
||||||
'%(instance)s%(message)s',
|
'%(instance)s%(message)s',
|
||||||
help='format string to use for log messages with context'),
|
help='format string to use for log messages with context'),
|
||||||
cfg.StrOpt('logging_default_format_string',
|
cfg.StrOpt('logging_default_format_string',
|
||||||
@ -126,12 +146,13 @@ log_opts = [
|
|||||||
help='prefix each line of exception output with this format'),
|
help='prefix each line of exception output with this format'),
|
||||||
cfg.ListOpt('default_log_levels',
|
cfg.ListOpt('default_log_levels',
|
||||||
default=[
|
default=[
|
||||||
|
'amqp=WARN',
|
||||||
'amqplib=WARN',
|
'amqplib=WARN',
|
||||||
'sqlalchemy=WARN',
|
|
||||||
'boto=WARN',
|
'boto=WARN',
|
||||||
|
'qpid=WARN',
|
||||||
|
'sqlalchemy=WARN',
|
||||||
'suds=INFO',
|
'suds=INFO',
|
||||||
'keystone=INFO',
|
'iso8601=WARN',
|
||||||
'eventlet.wsgi.server=WARN'
|
|
||||||
],
|
],
|
||||||
help='list of logger=LEVEL pairs'),
|
help='list of logger=LEVEL pairs'),
|
||||||
cfg.BoolOpt('publish_errors',
|
cfg.BoolOpt('publish_errors',
|
||||||
@ -207,6 +228,42 @@ def _get_log_file_path(binary=None):
|
|||||||
binary = binary or _get_binary_name()
|
binary = binary or _get_binary_name()
|
||||||
return '%s.log' % (os.path.join(logdir, binary),)
|
return '%s.log' % (os.path.join(logdir, binary),)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def mask_password(message, secret="***"):
|
||||||
|
"""Replace password with 'secret' in message.
|
||||||
|
|
||||||
|
:param message: The string which includes security information.
|
||||||
|
:param secret: value with which to replace passwords.
|
||||||
|
:returns: The unicode value of message with the password fields masked.
|
||||||
|
|
||||||
|
For example:
|
||||||
|
|
||||||
|
>>> mask_password("'adminPass' : 'aaaaa'")
|
||||||
|
"'adminPass' : '***'"
|
||||||
|
>>> mask_password("'admin_pass' : 'aaaaa'")
|
||||||
|
"'admin_pass' : '***'"
|
||||||
|
>>> mask_password('"password" : "aaaaa"')
|
||||||
|
'"password" : "***"'
|
||||||
|
>>> mask_password("'original_password' : 'aaaaa'")
|
||||||
|
"'original_password' : '***'"
|
||||||
|
>>> mask_password("u'original_password' : u'aaaaa'")
|
||||||
|
"u'original_password' : u'***'"
|
||||||
|
"""
|
||||||
|
message = six.text_type(message)
|
||||||
|
|
||||||
|
# NOTE(ldbragst): Check to see if anything in message contains any key
|
||||||
|
# specified in _SANITIZE_KEYS, if not then just return the message since
|
||||||
|
# we don't have to mask any passwords.
|
||||||
|
if not any(key in message for key in _SANITIZE_KEYS):
|
||||||
|
return message
|
||||||
|
|
||||||
|
secret = r'\g<1>' + secret + r'\g<2>'
|
||||||
|
for pattern in _SANITIZE_PATTERNS:
|
||||||
|
message = re.sub(pattern, secret, message)
|
||||||
|
return message
|
||||||
|
|
||||||
|
|
||||||
class BaseLoggerAdapter(logging.LoggerAdapter):
|
class BaseLoggerAdapter(logging.LoggerAdapter):
|
||||||
|
|
||||||
@ -249,6 +306,13 @@ class ContextAdapter(BaseLoggerAdapter):
|
|||||||
self.warn(stdmsg, *args, **kwargs)
|
self.warn(stdmsg, *args, **kwargs)
|
||||||
|
|
||||||
def process(self, msg, kwargs):
|
def process(self, msg, kwargs):
|
||||||
|
# NOTE(mrodden): catch any Message/other object and
|
||||||
|
# coerce to unicode before they can get
|
||||||
|
# to the python logging and possibly
|
||||||
|
# cause string encoding trouble
|
||||||
|
if not isinstance(msg, six.string_types):
|
||||||
|
msg = six.text_type(msg)
|
||||||
|
|
||||||
if 'extra' not in kwargs:
|
if 'extra' not in kwargs:
|
||||||
kwargs['extra'] = {}
|
kwargs['extra'] = {}
|
||||||
extra = kwargs['extra']
|
extra = kwargs['extra']
|
||||||
@ -268,10 +332,12 @@ class ContextAdapter(BaseLoggerAdapter):
|
|||||||
elif instance_uuid:
|
elif instance_uuid:
|
||||||
instance_extra = (CONF.instance_uuid_format
|
instance_extra = (CONF.instance_uuid_format
|
||||||
% {'uuid': instance_uuid})
|
% {'uuid': instance_uuid})
|
||||||
extra.update({'instance': instance_extra})
|
extra['instance'] = instance_extra
|
||||||
|
|
||||||
extra.update({"project": self.project})
|
extra.setdefault('user_identity', kwargs.pop('user_identity', None))
|
||||||
extra.update({"version": self.version})
|
|
||||||
|
extra['project'] = self.project
|
||||||
|
extra['version'] = self.version
|
||||||
extra['extra'] = extra.copy()
|
extra['extra'] = extra.copy()
|
||||||
return msg, kwargs
|
return msg, kwargs
|
||||||
|
|
||||||
@ -285,7 +351,7 @@ class JSONFormatter(logging.Formatter):
|
|||||||
def formatException(self, ei, strip_newlines=True):
|
def formatException(self, ei, strip_newlines=True):
|
||||||
lines = traceback.format_exception(*ei)
|
lines = traceback.format_exception(*ei)
|
||||||
if strip_newlines:
|
if strip_newlines:
|
||||||
lines = [itertools.ifilter(
|
lines = [moves.filter(
|
||||||
lambda x: x,
|
lambda x: x,
|
||||||
line.rstrip().splitlines()) for line in lines]
|
line.rstrip().splitlines()) for line in lines]
|
||||||
lines = list(itertools.chain(*lines))
|
lines = list(itertools.chain(*lines))
|
||||||
@ -323,11 +389,13 @@ class JSONFormatter(logging.Formatter):
|
|||||||
|
|
||||||
|
|
||||||
def _create_logging_excepthook(product_name):
|
def _create_logging_excepthook(product_name):
|
||||||
def logging_excepthook(type, value, tb):
|
def logging_excepthook(exc_type, value, tb):
|
||||||
extra = {}
|
extra = {}
|
||||||
if CONF.verbose:
|
if CONF.verbose or CONF.debug:
|
||||||
extra['exc_info'] = (type, value, tb)
|
extra['exc_info'] = (exc_type, value, tb)
|
||||||
getLogger(product_name).critical(str(value), **extra)
|
getLogger(product_name).critical(
|
||||||
|
"".join(traceback.format_exception_only(exc_type, value)),
|
||||||
|
**extra)
|
||||||
return logging_excepthook
|
return logging_excepthook
|
||||||
|
|
||||||
|
|
||||||
@ -344,17 +412,18 @@ class LogConfigError(Exception):
|
|||||||
err_msg=self.err_msg)
|
err_msg=self.err_msg)
|
||||||
|
|
||||||
|
|
||||||
def _load_log_config(log_config):
|
def _load_log_config(log_config_append):
|
||||||
try:
|
try:
|
||||||
logging.config.fileConfig(log_config)
|
logging.config.fileConfig(log_config_append,
|
||||||
|
disable_existing_loggers=False)
|
||||||
except moves.configparser.Error as exc:
|
except moves.configparser.Error as exc:
|
||||||
raise LogConfigError(log_config, str(exc))
|
raise LogConfigError(log_config_append, str(exc))
|
||||||
|
|
||||||
|
|
||||||
def setup(product_name):
|
def setup(product_name):
|
||||||
"""Setup logging."""
|
"""Setup logging."""
|
||||||
if CONF.log_config:
|
if CONF.log_config_append:
|
||||||
_load_log_config(CONF.log_config)
|
_load_log_config(CONF.log_config_append)
|
||||||
else:
|
else:
|
||||||
_setup_logging_from_conf()
|
_setup_logging_from_conf()
|
||||||
sys.excepthook = _create_logging_excepthook(product_name)
|
sys.excepthook = _create_logging_excepthook(product_name)
|
||||||
@ -410,7 +479,7 @@ def _setup_logging_from_conf():
|
|||||||
streamlog = ColorHandler()
|
streamlog = ColorHandler()
|
||||||
log_root.addHandler(streamlog)
|
log_root.addHandler(streamlog)
|
||||||
|
|
||||||
elif not CONF.log_file:
|
elif not logpath:
|
||||||
# pass sys.stdout as a positional argument
|
# pass sys.stdout as a positional argument
|
||||||
# python2.6 calls the argument strm, in 2.7 it's stream
|
# python2.6 calls the argument strm, in 2.7 it's stream
|
||||||
streamlog = logging.StreamHandler(sys.stdout)
|
streamlog = logging.StreamHandler(sys.stdout)
|
||||||
@ -494,7 +563,7 @@ class ContextFormatter(logging.Formatter):
|
|||||||
|
|
||||||
def format(self, record):
|
def format(self, record):
|
||||||
"""Uses contextstring if request_id is set, otherwise default."""
|
"""Uses contextstring if request_id is set, otherwise default."""
|
||||||
# NOTE(sdague): default the fancier formating params
|
# NOTE(sdague): default the fancier formatting params
|
||||||
# to an empty string so we don't throw an exception if
|
# to an empty string so we don't throw an exception if
|
||||||
# they get used
|
# they get used
|
||||||
for key in ('instance', 'color'):
|
for key in ('instance', 'color'):
|
||||||
@ -510,7 +579,7 @@ class ContextFormatter(logging.Formatter):
|
|||||||
CONF.logging_debug_format_suffix):
|
CONF.logging_debug_format_suffix):
|
||||||
self._fmt += " " + CONF.logging_debug_format_suffix
|
self._fmt += " " + CONF.logging_debug_format_suffix
|
||||||
|
|
||||||
# Cache this on the record, Logger will respect our formated copy
|
# Cache this on the record, Logger will respect our formatted copy
|
||||||
if record.exc_info:
|
if record.exc_info:
|
||||||
record.exc_text = self.formatException(record.exc_info, record)
|
record.exc_text = self.formatException(record.exc_info, record)
|
||||||
return logging.Formatter.format(self, record)
|
return logging.Formatter.format(self, record)
|
||||||
|
0
marconi/openstack/common/py3kcompat/__init__.py
Normal file
0
marconi/openstack/common/py3kcompat/__init__.py
Normal file
67
marconi/openstack/common/py3kcompat/urlutils.py
Normal file
67
marconi/openstack/common/py3kcompat/urlutils.py
Normal file
@ -0,0 +1,67 @@
|
|||||||
|
#
|
||||||
|
# Copyright 2013 Canonical Ltd.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
#
|
||||||
|
|
||||||
|
"""
|
||||||
|
Python2/Python3 compatibility layer for OpenStack
|
||||||
|
"""
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
if six.PY3:
|
||||||
|
# python3
|
||||||
|
import urllib.error
|
||||||
|
import urllib.parse
|
||||||
|
import urllib.request
|
||||||
|
|
||||||
|
urlencode = urllib.parse.urlencode
|
||||||
|
urljoin = urllib.parse.urljoin
|
||||||
|
quote = urllib.parse.quote
|
||||||
|
quote_plus = urllib.parse.quote_plus
|
||||||
|
parse_qsl = urllib.parse.parse_qsl
|
||||||
|
unquote = urllib.parse.unquote
|
||||||
|
unquote_plus = urllib.parse.unquote_plus
|
||||||
|
urlparse = urllib.parse.urlparse
|
||||||
|
urlsplit = urllib.parse.urlsplit
|
||||||
|
urlunsplit = urllib.parse.urlunsplit
|
||||||
|
SplitResult = urllib.parse.SplitResult
|
||||||
|
|
||||||
|
urlopen = urllib.request.urlopen
|
||||||
|
URLError = urllib.error.URLError
|
||||||
|
pathname2url = urllib.request.pathname2url
|
||||||
|
else:
|
||||||
|
# python2
|
||||||
|
import urllib
|
||||||
|
import urllib2
|
||||||
|
import urlparse
|
||||||
|
|
||||||
|
urlencode = urllib.urlencode
|
||||||
|
quote = urllib.quote
|
||||||
|
quote_plus = urllib.quote_plus
|
||||||
|
unquote = urllib.unquote
|
||||||
|
unquote_plus = urllib.unquote_plus
|
||||||
|
|
||||||
|
parse = urlparse
|
||||||
|
parse_qsl = parse.parse_qsl
|
||||||
|
urljoin = parse.urljoin
|
||||||
|
urlparse = parse.urlparse
|
||||||
|
urlsplit = parse.urlsplit
|
||||||
|
urlunsplit = parse.urlunsplit
|
||||||
|
SplitResult = parse.SplitResult
|
||||||
|
|
||||||
|
urlopen = urllib2.urlopen
|
||||||
|
URLError = urllib2.URLError
|
||||||
|
pathname2url = urllib.pathname2url
|
@ -1,5 +1,3 @@
|
|||||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
|
||||||
|
|
||||||
# Copyright 2011 OpenStack Foundation.
|
# Copyright 2011 OpenStack Foundation.
|
||||||
# All Rights Reserved.
|
# All Rights Reserved.
|
||||||
#
|
#
|
||||||
@ -50,9 +48,9 @@ def parse_isotime(timestr):
|
|||||||
try:
|
try:
|
||||||
return iso8601.parse_date(timestr)
|
return iso8601.parse_date(timestr)
|
||||||
except iso8601.ParseError as e:
|
except iso8601.ParseError as e:
|
||||||
raise ValueError(unicode(e))
|
raise ValueError(six.text_type(e))
|
||||||
except TypeError as e:
|
except TypeError as e:
|
||||||
raise ValueError(unicode(e))
|
raise ValueError(six.text_type(e))
|
||||||
|
|
||||||
|
|
||||||
def strtime(at=None, fmt=PERFECT_TIME_FORMAT):
|
def strtime(at=None, fmt=PERFECT_TIME_FORMAT):
|
||||||
@ -79,6 +77,9 @@ def is_older_than(before, seconds):
|
|||||||
"""Return True if before is older than seconds."""
|
"""Return True if before is older than seconds."""
|
||||||
if isinstance(before, six.string_types):
|
if isinstance(before, six.string_types):
|
||||||
before = parse_strtime(before).replace(tzinfo=None)
|
before = parse_strtime(before).replace(tzinfo=None)
|
||||||
|
else:
|
||||||
|
before = before.replace(tzinfo=None)
|
||||||
|
|
||||||
return utcnow() - before > datetime.timedelta(seconds=seconds)
|
return utcnow() - before > datetime.timedelta(seconds=seconds)
|
||||||
|
|
||||||
|
|
||||||
@ -86,6 +87,9 @@ def is_newer_than(after, seconds):
|
|||||||
"""Return True if after is newer than seconds."""
|
"""Return True if after is newer than seconds."""
|
||||||
if isinstance(after, six.string_types):
|
if isinstance(after, six.string_types):
|
||||||
after = parse_strtime(after).replace(tzinfo=None)
|
after = parse_strtime(after).replace(tzinfo=None)
|
||||||
|
else:
|
||||||
|
after = after.replace(tzinfo=None)
|
||||||
|
|
||||||
return after - utcnow() > datetime.timedelta(seconds=seconds)
|
return after - utcnow() > datetime.timedelta(seconds=seconds)
|
||||||
|
|
||||||
|
|
||||||
@ -110,7 +114,7 @@ def utcnow():
|
|||||||
|
|
||||||
|
|
||||||
def iso8601_from_timestamp(timestamp):
|
def iso8601_from_timestamp(timestamp):
|
||||||
"""Returns a iso8601 formated date from timestamp."""
|
"""Returns a iso8601 formatted date from timestamp."""
|
||||||
return isotime(datetime.datetime.utcfromtimestamp(timestamp))
|
return isotime(datetime.datetime.utcfromtimestamp(timestamp))
|
||||||
|
|
||||||
|
|
||||||
@ -178,6 +182,15 @@ def delta_seconds(before, after):
|
|||||||
datetime objects (as a float, to microsecond resolution).
|
datetime objects (as a float, to microsecond resolution).
|
||||||
"""
|
"""
|
||||||
delta = after - before
|
delta = after - before
|
||||||
|
return total_seconds(delta)
|
||||||
|
|
||||||
|
|
||||||
|
def total_seconds(delta):
|
||||||
|
"""Return the total seconds of datetime.timedelta object.
|
||||||
|
|
||||||
|
Compute total seconds of datetime.timedelta, datetime.timedelta
|
||||||
|
doesn't have method total_seconds in Python2.6, calculate it manually.
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
return delta.total_seconds()
|
return delta.total_seconds()
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
@ -188,8 +201,8 @@ def delta_seconds(before, after):
|
|||||||
def is_soon(dt, window):
|
def is_soon(dt, window):
|
||||||
"""Determines if time is going to happen in the next window seconds.
|
"""Determines if time is going to happen in the next window seconds.
|
||||||
|
|
||||||
:params dt: the time
|
:param dt: the time
|
||||||
:params window: minimum seconds to remain to consider the time not soon
|
:param window: minimum seconds to remain to consider the time not soon
|
||||||
|
|
||||||
:return: True if expiration is within the given duration
|
:return: True if expiration is within the given duration
|
||||||
"""
|
"""
|
||||||
|
@ -16,9 +16,9 @@
|
|||||||
from oslo.config import cfg
|
from oslo.config import cfg
|
||||||
from stevedore import driver
|
from stevedore import driver
|
||||||
|
|
||||||
from marconi.common.cache import cache as oslo_cache
|
|
||||||
from marconi.common import decorators
|
from marconi.common import decorators
|
||||||
from marconi.common import errors
|
from marconi.common import errors
|
||||||
|
from marconi.openstack.common.cache import cache as oslo_cache
|
||||||
from marconi.openstack.common.gettextutils import _
|
from marconi.openstack.common.gettextutils import _
|
||||||
from marconi.openstack.common import log
|
from marconi.openstack.common import log
|
||||||
from marconi.queues.storage import pipeline
|
from marconi.queues.storage import pipeline
|
||||||
@ -90,7 +90,8 @@ class Bootstrap(object):
|
|||||||
def cache(self):
|
def cache(self):
|
||||||
LOG.debug(_(u'Loading proxy cache driver'))
|
LOG.debug(_(u'Loading proxy cache driver'))
|
||||||
try:
|
try:
|
||||||
mgr = oslo_cache.get_cache(self.conf)
|
oslo_cache.register_oslo_configs(self.conf)
|
||||||
|
mgr = oslo_cache.get_cache(self.conf.cache_url)
|
||||||
return mgr
|
return mgr
|
||||||
except RuntimeError as exc:
|
except RuntimeError as exc:
|
||||||
LOG.exception(exc)
|
LOG.exception(exc)
|
||||||
|
@ -21,7 +21,7 @@ import ddt
|
|||||||
import six
|
import six
|
||||||
from testtools import matchers
|
from testtools import matchers
|
||||||
|
|
||||||
from marconi.common.cache import cache as oslo_cache
|
from marconi.openstack.common.cache import cache as oslo_cache
|
||||||
from marconi.openstack.common import timeutils
|
from marconi.openstack.common import timeutils
|
||||||
from marconi.queues import storage
|
from marconi.queues import storage
|
||||||
from marconi.queues.storage import errors
|
from marconi.queues.storage import errors
|
||||||
@ -47,7 +47,8 @@ class ControllerBaseTest(testing.TestBase):
|
|||||||
self.controller_class,
|
self.controller_class,
|
||||||
self.controller_base_class))
|
self.controller_base_class))
|
||||||
|
|
||||||
cache = oslo_cache.get_cache(self.conf)
|
oslo_cache.register_oslo_configs(self.conf)
|
||||||
|
cache = oslo_cache.get_cache(self.conf.cache_url)
|
||||||
self.driver = self.driver_class(self.conf, cache)
|
self.driver = self.driver_class(self.conf, cache)
|
||||||
self._prepare_conf()
|
self._prepare_conf()
|
||||||
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
[DEFAULT]
|
[DEFAULT]
|
||||||
|
module=cache
|
||||||
module=excutils
|
module=excutils
|
||||||
module=fileutils
|
module=fileutils
|
||||||
module=gettextutils
|
module=gettextutils
|
||||||
|
@ -45,9 +45,8 @@ marconi.queues.public.transport =
|
|||||||
marconi.queues.admin.transport =
|
marconi.queues.admin.transport =
|
||||||
wsgi = marconi.queues.transport.wsgi.admin.driver:Driver
|
wsgi = marconi.queues.transport.wsgi.admin.driver:Driver
|
||||||
|
|
||||||
marconi.common.cache.backends =
|
marconi.openstack.common.cache.backends =
|
||||||
memory = marconi.common.cache._backends.memory:MemoryBackend
|
memory = marconi.openstack.common.cache._backends.memory:MemoryBackend
|
||||||
memcached = marconi.common.cache._backends.memcached:MemcachedBackend
|
|
||||||
|
|
||||||
[nosetests]
|
[nosetests]
|
||||||
where=tests
|
where=tests
|
||||||
|
@ -21,7 +21,7 @@ from pymongo import cursor
|
|||||||
import pymongo.errors
|
import pymongo.errors
|
||||||
from testtools import matchers
|
from testtools import matchers
|
||||||
|
|
||||||
from marconi.common.cache import cache as oslo_cache
|
from marconi.openstack.common.cache import cache as oslo_cache
|
||||||
from marconi.openstack.common import timeutils
|
from marconi.openstack.common import timeutils
|
||||||
from marconi.queues import storage
|
from marconi.queues import storage
|
||||||
from marconi.queues.storage import errors
|
from marconi.queues.storage import errors
|
||||||
|
@ -18,7 +18,7 @@ import uuid
|
|||||||
|
|
||||||
from oslo.config import cfg
|
from oslo.config import cfg
|
||||||
|
|
||||||
from marconi.common.cache import cache as oslo_cache
|
from marconi.openstack.common.cache import cache as oslo_cache
|
||||||
from marconi.queues.storage import sharding
|
from marconi.queues.storage import sharding
|
||||||
from marconi.queues.storage import sqlite
|
from marconi.queues.storage import sqlite
|
||||||
from marconi.queues.storage import utils
|
from marconi.queues.storage import utils
|
||||||
|
@ -19,7 +19,7 @@ import uuid
|
|||||||
|
|
||||||
from oslo.config import cfg
|
from oslo.config import cfg
|
||||||
|
|
||||||
from marconi.common.cache import cache as oslo_cache
|
from marconi.openstack.common.cache import cache as oslo_cache
|
||||||
from marconi.queues.storage import sharding
|
from marconi.queues.storage import sharding
|
||||||
from marconi.queues.storage import utils
|
from marconi.queues.storage import utils
|
||||||
from marconi import tests as testing
|
from marconi import tests as testing
|
||||||
|
Loading…
Reference in New Issue
Block a user