diff --git a/openstack-common.conf b/openstack-common.conf deleted file mode 100644 index b316531706..0000000000 --- a/openstack-common.conf +++ /dev/null @@ -1,11 +0,0 @@ -[DEFAULT] - -# The list of modules to copy from oslo-incubator -module=local -module=notifier -module=pastedeploy -module=rpc -module=versionutils - -# The base module to hold the copy of openstack.common -base=trove diff --git a/run_tests.py b/run_tests.py index a5cb80d008..0c87df56cb 100644 --- a/run_tests.py +++ b/run_tests.py @@ -59,7 +59,7 @@ def add_support_for_localization(): def initialize_trove(config_file): - from trove.openstack.common import pastedeploy + from trove.common import pastedeploy cfg.CONF(args=[], project='trove', diff --git a/trove/common/base_exception.py b/trove/common/base_exception.py index a806ee3a75..72f3b60a3f 100644 --- a/trove/common/base_exception.py +++ b/trove/common/base_exception.py @@ -19,7 +19,7 @@ Exceptions common to OpenStack projects import logging -from trove.openstack.common.gettextutils import _ +from trove.common.i18n import _ _FATAL_EXCEPTION_FORMAT_ERRORS = False diff --git a/trove/common/base_wsgi.py b/trove/common/base_wsgi.py index bbcf849a65..e96f0938c5 100644 --- a/trove/common/base_wsgi.py +++ b/trove/common/base_wsgi.py @@ -40,9 +40,9 @@ import webob.exc from xml.dom import minidom from xml.parsers import expat +from trove.common import base_exception +from trove.common.i18n import _ from trove.common import xmlutils -from trove.openstack.common import exception -from trove.openstack.common.gettextutils import _ socket_opts = [ cfg.IntOpt('backlog', @@ -341,7 +341,7 @@ class Request(webob.Request): self.default_request_content_types) if content_type not in allowed_content_types: - raise exception.InvalidContentType(content_type=content_type) + raise base_exception.InvalidContentType(content_type=content_type) return content_type @@ -380,10 +380,10 @@ class Resource(object): try: action, action_args, accept = self.deserialize_request(request) - except exception.InvalidContentType: + except base_exception.InvalidContentType: msg = _("Unsupported Content-Type") return webob.exc.HTTPUnsupportedMediaType(explanation=msg) - except exception.MalformedRequestBody: + except base_exception.MalformedRequestBody: msg = _("Malformed request body") return webob.exc.HTTPBadRequest(explanation=msg) @@ -616,7 +616,7 @@ class ResponseSerializer(object): try: return self.body_serializers[content_type] except (KeyError, TypeError): - raise exception.InvalidContentType(content_type=content_type) + raise base_exception.InvalidContentType(content_type=content_type) class RequestHeadersDeserializer(ActionDispatcher): @@ -675,7 +675,7 @@ class RequestDeserializer(object): try: content_type = request.get_content_type() - except exception.InvalidContentType: + except base_exception.InvalidContentType: LOG.debug(_("Unrecognized Content-Type provided in request")) raise @@ -685,7 +685,7 @@ class RequestDeserializer(object): try: deserializer = self.get_body_deserializer(content_type) - except exception.InvalidContentType: + except base_exception.InvalidContentType: LOG.debug(_("Unable to deserialize body as provided Content-Type")) raise @@ -695,7 +695,7 @@ class RequestDeserializer(object): try: return self.body_deserializers[content_type] except (KeyError, TypeError): - raise exception.InvalidContentType(content_type=content_type) + raise base_exception.InvalidContentType(content_type=content_type) def get_expected_content_type(self, request): return request.best_match_content_type(self.supported_content_types) @@ -737,7 +737,7 @@ class JSONDeserializer(TextDeserializer): return jsonutils.loads(datastring) except ValueError: msg = _("cannot understand JSON") - raise exception.MalformedRequestBody(reason=msg) + raise base_exception.MalformedRequestBody(reason=msg) def default(self, datastring): return {'body': self._from_json(datastring)} @@ -761,7 +761,7 @@ class XMLDeserializer(TextDeserializer): return {node.nodeName: self._from_xml_node(node, plurals)} except expat.ExpatError: msg = _("cannot understand XML") - raise exception.MalformedRequestBody(reason=msg) + raise base_exception.MalformedRequestBody(reason=msg) def _from_xml_node(self, node, listnames): """Convert a minidom node to a simple Python type. diff --git a/trove/common/context.py b/trove/common/context.py index 769aaf3927..4e918ea942 100644 --- a/trove/common/context.py +++ b/trove/common/context.py @@ -22,7 +22,7 @@ context or provide additional information in their specific WSGI pipeline. from oslo_context import context -from trove.openstack.common import local +from trove.common import local class TroveContext(context.RequestContext): diff --git a/trove/openstack/common/local.py b/trove/common/local.py similarity index 100% rename from trove/openstack/common/local.py rename to trove/common/local.py diff --git a/trove/openstack/common/pastedeploy.py b/trove/common/pastedeploy.py similarity index 93% rename from trove/openstack/common/pastedeploy.py rename to trove/common/pastedeploy.py index b2f343df1a..f604ba2ba2 100644 --- a/trove/openstack/common/pastedeploy.py +++ b/trove/common/pastedeploy.py @@ -18,7 +18,7 @@ import sys from paste import deploy -from trove.openstack.common import local +from trove.common import local class BasePasteFactory(object): @@ -66,7 +66,7 @@ class AppFactory(BasePasteFactory): WSGI app when invoked. The format of the name is : e.g. [app:myfooapp] - paste.app_factory = openstack.common.pastedeploy:app_factory + paste.app_factory = trove.common.pastedeploy:app_factory openstack.app_factory = myapp:Foo The WSGI app constructor must accept a data object and a local config @@ -89,7 +89,7 @@ class FilterFactory(AppFactory): returns a WSGI filter when invoked. The format is : e.g. [filter:myfoofilter] - paste.filter_factory = openstack.common.pastedeploy:filter_factory + paste.filter_factory = trove.common.pastedeploy:filter_factory openstack.filter_factory = myfilter:Foo The WSGI filter constructor must accept a WSGI app, a data object and @@ -129,11 +129,11 @@ def paste_deploy_app(paste_config_file, app_name, data): like this: [app:myapp] - paste.app_factory = openstack.common.pastedeploy:app_factory + paste.app_factory = trove.common.pastedeploy:app_factory openstack.app_factory = myapp:App ... [filter:myfilter] - paste.filter_factory = openstack.common.pastedeploy:filter_factory + paste.filter_factory = trove.common.pastedeploy:filter_factory openstack.filter_factory = myapp:Filter and then: diff --git a/trove/common/rpc/service.py b/trove/common/rpc/service.py index 08559813af..f5ff2af844 100644 --- a/trove/common/rpc/service.py +++ b/trove/common/rpc/service.py @@ -27,8 +27,8 @@ from oslo_utils import importutils from osprofiler import profiler from trove.common import cfg +from trove.common.i18n import _ from trove.common import profile -from trove.openstack.common.gettextutils import _ from trove import rpc diff --git a/trove/common/wsgi.py b/trove/common/wsgi.py index c924fc971b..8263b64a33 100644 --- a/trove/common/wsgi.py +++ b/trove/common/wsgi.py @@ -35,8 +35,8 @@ from trove.common import cfg from trove.common import context as rd_context from trove.common import exception from trove.common.i18n import _ +from trove.common import pastedeploy from trove.common import utils -from trove.openstack.common import pastedeploy CONTEXT_KEY = 'trove.context' Router = base_wsgi.Router diff --git a/trove/guestagent/datastore/experimental/db2/manager.py b/trove/guestagent/datastore/experimental/db2/manager.py index e3d5157dba..fde642b60c 100644 --- a/trove/guestagent/datastore/experimental/db2/manager.py +++ b/trove/guestagent/datastore/experimental/db2/manager.py @@ -18,10 +18,10 @@ from oslo_service import periodic_task from trove.common import cfg from trove.common import exception +from trove.common.i18n import _ from trove.guestagent.datastore.experimental.db2 import service from trove.guestagent import dbaas from trove.guestagent import volume -from trove.openstack.common.gettextutils import _ LOG = logging.getLogger(__name__) CONF = cfg.CONF diff --git a/trove/guestagent/datastore/experimental/db2/service.py b/trove/guestagent/datastore/experimental/db2/service.py index 1a3628b717..eccc8175f8 100644 --- a/trove/guestagent/datastore/experimental/db2/service.py +++ b/trove/guestagent/datastore/experimental/db2/service.py @@ -17,13 +17,13 @@ from oslo_log import log as logging from trove.common import cfg from trove.common import exception +from trove.common.i18n import _ from trove.common import instance as rd_instance from trove.common import utils as utils from trove.guestagent.common import operating_system from trove.guestagent.datastore.experimental.db2 import system from trove.guestagent.datastore import service from trove.guestagent.db import models -from trove.openstack.common.gettextutils import _ CONF = cfg.CONF LOG = logging.getLogger(__name__) diff --git a/trove/guestagent/strategies/replication/mysql_base.py b/trove/guestagent/strategies/replication/mysql_base.py index 45f9e439e9..fde798f01a 100644 --- a/trove/guestagent/strategies/replication/mysql_base.py +++ b/trove/guestagent/strategies/replication/mysql_base.py @@ -21,13 +21,13 @@ from oslo_log import log as logging from oslo_utils import netutils from trove.common import cfg +from trove.common.i18n import _ from trove.common import utils from trove.guestagent.backup.backupagent import BackupAgent from trove.guestagent.datastore.mysql.service import MySqlAdmin from trove.guestagent.db import models from trove.guestagent.strategies import backup from trove.guestagent.strategies.replication import base -from trove.openstack.common.gettextutils import _ AGENT = BackupAgent() CONF = cfg.CONF diff --git a/trove/openstack/__init__.py b/trove/openstack/__init__.py deleted file mode 100644 index 31285c4fc2..0000000000 --- a/trove/openstack/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2011 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. diff --git a/trove/openstack/common/README b/trove/openstack/common/README deleted file mode 100644 index 04a616648b..0000000000 --- a/trove/openstack/common/README +++ /dev/null @@ -1,16 +0,0 @@ -oslo-incubator --------------- - -A number of modules from oslo-incubator are imported into this project. -You can clone the oslo-incubator repository using the following url: - - git://git.openstack.org/openstack/oslo-incubator - -These modules are "incubating" in oslo-incubator and are kept in sync -with the help of oslo-incubator's update.py script. See: - - https://wiki.openstack.org/wiki/Oslo#Syncing_Code_from_Incubator - -The copy of the code should never be directly modified here. Please -always update oslo-incubator first and then run the script to copy -the changes across. diff --git a/trove/openstack/common/__init__.py b/trove/openstack/common/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/trove/openstack/common/_i18n.py b/trove/openstack/common/_i18n.py deleted file mode 100644 index 6f0ff94013..0000000000 --- a/trove/openstack/common/_i18n.py +++ /dev/null @@ -1,45 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""oslo.i18n integration module. - -See http://docs.openstack.org/developer/oslo.i18n/usage.html - -""" - -try: - import oslo_i18n - - # NOTE(dhellmann): This reference to o-s-l-o will be replaced by the - # application name when this module is synced into the separate - # repository. It is OK to have more than one translation function - # using the same domain, since there will still only be one message - # catalog. - _translators = oslo_i18n.TranslatorFactory(domain='trove') - - # The primary translation function using the well-known name "_" - _ = _translators.primary - - # Translators for log levels. - # - # The abbreviated names are meant to reflect the usual use of a short - # name like '_'. The "L" is for "log" and the other letter comes from - # the level. - _LI = _translators.log_info - _LW = _translators.log_warning - _LE = _translators.log_error - _LC = _translators.log_critical -except ImportError: - # NOTE(dims): Support for cases where a project wants to use - # code from trove-incubator, but is not ready to be internationalized - # (like tempest) - _ = _LI = _LW = _LE = _LC = lambda x: x diff --git a/trove/openstack/common/crypto/__init__.py b/trove/openstack/common/crypto/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/trove/openstack/common/crypto/utils.py b/trove/openstack/common/crypto/utils.py deleted file mode 100644 index be69582618..0000000000 --- a/trove/openstack/common/crypto/utils.py +++ /dev/null @@ -1,179 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import base64 - -from Crypto.Hash import HMAC -from Crypto import Random - -from trove.openstack.common.gettextutils import _ # noqa -from trove.openstack.common import importutils - - -class CryptoutilsException(Exception): - """Generic Exception for Crypto utilities.""" - - message = _("An unknown error occurred in crypto utils.") - - -class CipherBlockLengthTooBig(CryptoutilsException): - """The block size is too big.""" - - def __init__(self, requested, permitted): - msg = _("Block size of %(given)d is too big, max = %(maximum)d") - message = msg % {'given': requested, 'maximum': permitted} - super(CryptoutilsException, self).__init__(message) - - -class HKDFOutputLengthTooLong(CryptoutilsException): - """The amount of Key Material asked is too much.""" - - def __init__(self, requested, permitted): - msg = _("Length of %(given)d is too long, max = %(maximum)d") - message = msg % {'given': requested, 'maximum': permitted} - super(CryptoutilsException, self).__init__(message) - - -class HKDF(object): - """An HMAC-based Key Derivation Function implementation (RFC5869) - - This class creates an object that allows to use HKDF to derive keys. - """ - - def __init__(self, hashtype='SHA256'): - self.hashfn = importutils.import_module('Crypto.Hash.' + hashtype) - self.max_okm_length = 255 * self.hashfn.digest_size - - def extract(self, ikm, salt=None): - """An extract function that can be used to derive a robust key given - weak Input Key Material (IKM) which could be a password. - Returns a pseudorandom key (of HashLen octets) - - :param ikm: input keying material (ex a password) - :param salt: optional salt value (a non-secret random value) - """ - if salt is None: - salt = '\x00' * self.hashfn.digest_size - - return HMAC.new(salt, ikm, self.hashfn).digest() - - def expand(self, prk, info, length): - """An expand function that will return arbitrary length output that can - be used as keys. - Returns a buffer usable as key material. - - :param prk: a pseudorandom key of at least HashLen octets - :param info: optional string (can be a zero-length string) - :param length: length of output keying material (<= 255 * HashLen) - """ - if length > self.max_okm_length: - raise HKDFOutputLengthTooLong(length, self.max_okm_length) - - N = (length + self.hashfn.digest_size - 1) / self.hashfn.digest_size - - okm = "" - tmp = "" - for block in range(1, N + 1): - tmp = HMAC.new(prk, tmp + info + chr(block), self.hashfn).digest() - okm += tmp - - return okm[:length] - - -MAX_CB_SIZE = 256 - - -class SymmetricCrypto(object): - """Symmetric Key Crypto object. - - This class creates a Symmetric Key Crypto object that can be used - to encrypt, decrypt, or sign arbitrary data. - - :param enctype: Encryption Cipher name (default: AES) - :param hashtype: Hash/HMAC type name (default: SHA256) - """ - - def __init__(self, enctype='AES', hashtype='SHA256'): - self.cipher = importutils.import_module('Crypto.Cipher.' + enctype) - self.hashfn = importutils.import_module('Crypto.Hash.' + hashtype) - - def new_key(self, size): - return Random.new().read(size) - - def encrypt(self, key, msg, b64encode=True): - """Encrypt the provided msg and returns the cyphertext optionally - base64 encoded. - - Uses AES-128-CBC with a Random IV by default. - - The plaintext is padded to reach blocksize length. - The last byte of the block is the length of the padding. - The length of the padding does not include the length byte itself. - - :param key: The Encryption key. - :param msg: the plain text. - - :returns encblock: a block of encrypted data. - """ - iv = Random.new().read(self.cipher.block_size) - cipher = self.cipher.new(key, self.cipher.MODE_CBC, iv) - - # CBC mode requires a fixed block size. Append padding and length of - # padding. - if self.cipher.block_size > MAX_CB_SIZE: - raise CipherBlockLengthTooBig(self.cipher.block_size, MAX_CB_SIZE) - r = len(msg) % self.cipher.block_size - padlen = self.cipher.block_size - r - 1 - msg += '\x00' * padlen - msg += chr(padlen) - - enc = iv + cipher.encrypt(msg) - if b64encode: - enc = base64.b64encode(enc) - return enc - - def decrypt(self, key, msg, b64decode=True): - """Decrypts the provided ciphertext, optionally base 64 encoded, and - returns the plaintext message, after padding is removed. - - Uses AES-128-CBC with an IV by default. - - :param key: The Encryption key. - :param msg: the ciphetext, the first block is the IV - """ - if b64decode: - msg = base64.b64decode(msg) - iv = msg[:self.cipher.block_size] - cipher = self.cipher.new(key, self.cipher.MODE_CBC, iv) - - padded = cipher.decrypt(msg[self.cipher.block_size:]) - l = ord(padded[-1]) + 1 - plain = padded[:-l] - return plain - - def sign(self, key, msg, b64encode=True): - """Signs a message string and returns a base64 encoded signature. - - Uses HMAC-SHA-256 by default. - - :param key: The Signing key. - :param msg: the message to sign. - """ - h = HMAC.new(key, msg, self.hashfn) - out = h.digest() - if b64encode: - out = base64.b64encode(out) - return out diff --git a/trove/openstack/common/exception.py b/trove/openstack/common/exception.py deleted file mode 100644 index b14c4613a1..0000000000 --- a/trove/openstack/common/exception.py +++ /dev/null @@ -1,142 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Exceptions common to OpenStack projects -""" - -import logging - -from trove.openstack.common.gettextutils import _ - -_FATAL_EXCEPTION_FORMAT_ERRORS = False - - -class Error(Exception): - def __init__(self, message=None): - super(Error, self).__init__(message) - - -class ApiError(Error): - def __init__(self, message='Unknown', code='Unknown'): - self.message = message - self.code = code - super(ApiError, self).__init__('%s: %s' % (code, message)) - - -class NotFound(Error): - pass - - -class UnknownScheme(Error): - - msg = "Unknown scheme '%s' found in URI" - - def __init__(self, scheme): - msg = self.__class__.msg % scheme - super(UnknownScheme, self).__init__(msg) - - -class BadStoreUri(Error): - - msg = "The Store URI %s was malformed. Reason: %s" - - def __init__(self, uri, reason): - msg = self.__class__.msg % (uri, reason) - super(BadStoreUri, self).__init__(msg) - - -class Duplicate(Error): - pass - - -class NotAuthorized(Error): - pass - - -class NotEmpty(Error): - pass - - -class Invalid(Error): - pass - - -class BadInputError(Exception): - """Error resulting from a client sending bad input to a server""" - pass - - -class MissingArgumentError(Error): - pass - - -class DatabaseMigrationError(Error): - pass - - -class ClientConnectionError(Exception): - """Error resulting from a client connecting to a server""" - pass - - -def wrap_exception(f): - def _wrap(*args, **kw): - try: - return f(*args, **kw) - except Exception as e: - if not isinstance(e, Error): - #exc_type, exc_value, exc_traceback = sys.exc_info() - logging.exception(_('Uncaught exception')) - #logging.error(traceback.extract_stack(exc_traceback)) - raise Error(str(e)) - raise - _wrap.func_name = f.func_name - return _wrap - - -class OpenstackException(Exception): - """ - Base Exception - - To correctly use this class, inherit from it and define - a 'message' property. That message will get printf'd - with the keyword arguments provided to the constructor. - """ - message = "An unknown exception occurred" - - def __init__(self, **kwargs): - try: - self._error_string = self.message % kwargs - - except Exception as e: - if _FATAL_EXCEPTION_FORMAT_ERRORS: - raise e - else: - # at least get the core message out if something happened - self._error_string = self.message - - def __str__(self): - return self._error_string - - -class MalformedRequestBody(OpenstackException): - message = "Malformed message body: %(reason)s" - - -class InvalidContentType(OpenstackException): - message = "Invalid content type %(content_type)s" diff --git a/trove/openstack/common/excutils.py b/trove/openstack/common/excutils.py deleted file mode 100644 index 7cad0a1741..0000000000 --- a/trove/openstack/common/excutils.py +++ /dev/null @@ -1,101 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# Copyright 2012, Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Exception related utilities. -""" - -import logging -import sys -import time -import traceback - -import six - -from trove.openstack.common.gettextutils import _ # noqa - - -class save_and_reraise_exception(object): - """Save current exception, run some code and then re-raise. - - In some cases the exception context can be cleared, resulting in None - being attempted to be re-raised after an exception handler is run. This - can happen when eventlet switches greenthreads or when running an - exception handler, code raises and catches an exception. In both - cases the exception context will be cleared. - - To work around this, we save the exception state, run handler code, and - then re-raise the original exception. If another exception occurs, the - saved exception is logged and the new exception is re-raised. - - In some cases the caller may not want to re-raise the exception, and - for those circumstances this context provides a reraise flag that - can be used to suppress the exception. For example: - - except Exception: - with save_and_reraise_exception() as ctxt: - decide_if_need_reraise() - if not should_be_reraised: - ctxt.reraise = False - """ - def __init__(self): - self.reraise = True - - def __enter__(self): - self.type_, self.value, self.tb, = sys.exc_info() - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if exc_type is not None: - logging.error(_('Original exception being dropped: %s'), - traceback.format_exception(self.type_, - self.value, - self.tb)) - return False - if self.reraise: - six.reraise(self.type_, self.value, self.tb) - - -def forever_retry_uncaught_exceptions(infunc): - def inner_func(*args, **kwargs): - last_log_time = 0 - last_exc_message = None - exc_count = 0 - while True: - try: - return infunc(*args, **kwargs) - except Exception as exc: - this_exc_message = unicode(exc) - if this_exc_message == last_exc_message: - exc_count += 1 - else: - exc_count = 1 - # Do not log any more frequently than once a minute unless - # the exception message changes - cur_time = int(time.time()) - if (cur_time - last_log_time > 60 or - this_exc_message != last_exc_message): - logging.exception( - _('Unexpected exception occurred %d time(s)... ' - 'retrying.') % exc_count) - last_log_time = cur_time - last_exc_message = this_exc_message - exc_count = 0 - # This should be a very rare event. In case it isn't, do - # a sleep. - time.sleep(1) - return inner_func diff --git a/trove/openstack/common/fileutils.py b/trove/openstack/common/fileutils.py deleted file mode 100644 index 4746ad4981..0000000000 --- a/trove/openstack/common/fileutils.py +++ /dev/null @@ -1,35 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack LLC. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -import errno -import os - - -def ensure_tree(path): - """Create a directory (and any ancestor directories required) - - :param path: Directory to create - """ - try: - os.makedirs(path) - except OSError as exc: - if exc.errno == errno.EEXIST: - if not os.path.isdir(path): - raise - else: - raise diff --git a/trove/openstack/common/gettextutils.py b/trove/openstack/common/gettextutils.py deleted file mode 100644 index a7079b81fd..0000000000 --- a/trove/openstack/common/gettextutils.py +++ /dev/null @@ -1,479 +0,0 @@ -# Copyright 2012 Red Hat, Inc. -# Copyright 2013 IBM Corp. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -gettext for openstack-common modules. - -Usual usage in an openstack.common module: - - from trove.openstack.common.gettextutils import _ -""" - -import copy -import gettext -import locale -from logging import handlers -import os - -from babel import localedata -import six - -_AVAILABLE_LANGUAGES = {} - -# FIXME(dhellmann): Remove this when moving to oslo.i18n. -USE_LAZY = False - - -class TranslatorFactory(object): - """Create translator functions - """ - - def __init__(self, domain, localedir=None): - """Establish a set of translation functions for the domain. - - :param domain: Name of translation domain, - specifying a message catalog. - :type domain: str - :param lazy: Delays translation until a message is emitted. - Defaults to False. - :type lazy: Boolean - :param localedir: Directory with translation catalogs. - :type localedir: str - """ - self.domain = domain - if localedir is None: - localedir = os.environ.get(domain.upper() + '_LOCALEDIR') - self.localedir = localedir - - def _make_translation_func(self, domain=None): - """Return a new translation function ready for use. - - Takes into account whether or not lazy translation is being - done. - - The domain can be specified to override the default from the - factory, but the localedir from the factory is always used - because we assume the log-level translation catalogs are - installed in the same directory as the main application - catalog. - - """ - if domain is None: - domain = self.domain - t = gettext.translation(domain, - localedir=self.localedir, - fallback=True) - # Use the appropriate method of the translation object based - # on the python version. - m = t.gettext if six.PY3 else t.ugettext - - def f(msg): - """oslo.i18n.gettextutils translation function.""" - if USE_LAZY: - return Message(msg, domain=domain) - return m(msg) - return f - - @property - def primary(self): - "The default translation function." - return self._make_translation_func() - - def _make_log_translation_func(self, level): - return self._make_translation_func(self.domain + '-log-' + level) - - @property - def log_info(self): - "Translate info-level log messages." - return self._make_log_translation_func('info') - - @property - def log_warning(self): - "Translate warning-level log messages." - return self._make_log_translation_func('warning') - - @property - def log_error(self): - "Translate error-level log messages." - return self._make_log_translation_func('error') - - @property - def log_critical(self): - "Translate critical-level log messages." - return self._make_log_translation_func('critical') - - -# NOTE(dhellmann): When this module moves out of the incubator into -# oslo.i18n, these global variables can be moved to an integration -# module within each application. - -# Create the global translation functions. -_translators = TranslatorFactory('trove') - -# The primary translation function using the well-known name "_" -_ = _translators.primary - -# Translators for log levels. -# -# The abbreviated names are meant to reflect the usual use of a short -# name like '_'. The "L" is for "log" and the other letter comes from -# the level. -_LI = _translators.log_info -_LW = _translators.log_warning -_LE = _translators.log_error -_LC = _translators.log_critical - -# NOTE(dhellmann): End of globals that will move to the application's -# integration module. - - -def enable_lazy(): - """Convenience function for configuring _() to use lazy gettext - - Call this at the start of execution to enable the gettextutils._ - function to use lazy gettext functionality. This is useful if - your project is importing _ directly instead of using the - gettextutils.install() way of importing the _ function. - """ - global USE_LAZY - USE_LAZY = True - - -def install(domain): - """Install a _() function using the given translation domain. - - Given a translation domain, install a _() function using gettext's - install() function. - - The main difference from gettext.install() is that we allow - overriding the default localedir (e.g. /usr/share/locale) using - a translation-domain-specific environment variable (e.g. - NOVA_LOCALEDIR). - - Note that to enable lazy translation, enable_lazy must be - called. - - :param domain: the translation domain - """ - from six import moves - tf = TranslatorFactory(domain) - moves.builtins.__dict__['_'] = tf.primary - - -class Message(six.text_type): - """A Message object is a unicode object that can be translated. - - Translation of Message is done explicitly using the translate() method. - For all non-translation intents and purposes, a Message is simply unicode, - and can be treated as such. - """ - - def __new__(cls, msgid, msgtext=None, params=None, - domain='trove', *args): - """Create a new Message object. - - In order for translation to work gettext requires a message ID, this - msgid will be used as the base unicode text. It is also possible - for the msgid and the base unicode text to be different by passing - the msgtext parameter. - """ - # If the base msgtext is not given, we use the default translation - # of the msgid (which is in English) just in case the system locale is - # not English, so that the base text will be in that locale by default. - if not msgtext: - msgtext = Message._translate_msgid(msgid, domain) - # We want to initialize the parent unicode with the actual object that - # would have been plain unicode if 'Message' was not enabled. - msg = super(Message, cls).__new__(cls, msgtext) - msg.msgid = msgid - msg.domain = domain - msg.params = params - return msg - - def translate(self, desired_locale=None): - """Translate this message to the desired locale. - - :param desired_locale: The desired locale to translate the message to, - if no locale is provided the message will be - translated to the system's default locale. - - :returns: the translated message in unicode - """ - - translated_message = Message._translate_msgid(self.msgid, - self.domain, - desired_locale) - if self.params is None: - # No need for more translation - return translated_message - - # This Message object may have been formatted with one or more - # Message objects as substitution arguments, given either as a single - # argument, part of a tuple, or as one or more values in a dictionary. - # When translating this Message we need to translate those Messages too - translated_params = _translate_args(self.params, desired_locale) - - translated_message = translated_message % translated_params - - return translated_message - - @staticmethod - def _translate_msgid(msgid, domain, desired_locale=None): - if not desired_locale: - system_locale = locale.getdefaultlocale() - # If the system locale is not available to the runtime use English - if not system_locale[0]: - desired_locale = 'en_US' - else: - desired_locale = system_locale[0] - - locale_dir = os.environ.get(domain.upper() + '_LOCALEDIR') - lang = gettext.translation(domain, - localedir=locale_dir, - languages=[desired_locale], - fallback=True) - if six.PY3: - translator = lang.gettext - else: - translator = lang.ugettext - - translated_message = translator(msgid) - return translated_message - - def __mod__(self, other): - # When we mod a Message we want the actual operation to be performed - # by the parent class (i.e. unicode()), the only thing we do here is - # save the original msgid and the parameters in case of a translation - params = self._sanitize_mod_params(other) - unicode_mod = super(Message, self).__mod__(params) - modded = Message(self.msgid, - msgtext=unicode_mod, - params=params, - domain=self.domain) - return modded - - def _sanitize_mod_params(self, other): - """Sanitize the object being modded with this Message. - - - Add support for modding 'None' so translation supports it - - Trim the modded object, which can be a large dictionary, to only - those keys that would actually be used in a translation - - Snapshot the object being modded, in case the message is - translated, it will be used as it was when the Message was created - """ - if other is None: - params = (other,) - elif isinstance(other, dict): - # Merge the dictionaries - # Copy each item in case one does not support deep copy. - params = {} - if isinstance(self.params, dict): - for key, val in self.params.items(): - params[key] = self._copy_param(val) - for key, val in other.items(): - params[key] = self._copy_param(val) - else: - params = self._copy_param(other) - return params - - def _copy_param(self, param): - try: - return copy.deepcopy(param) - except Exception: - # Fallback to casting to unicode this will handle the - # python code-like objects that can't be deep-copied - return six.text_type(param) - - def __add__(self, other): - msg = _('Message objects do not support addition.') - raise TypeError(msg) - - def __radd__(self, other): - return self.__add__(other) - - if six.PY2: - def __str__(self): - # NOTE(luisg): Logging in python 2.6 tries to str() log records, - # and it expects specifically a UnicodeError in order to proceed. - msg = _('Message objects do not support str() because they may ' - 'contain non-ascii characters. ' - 'Please use unicode() or translate() instead.') - raise UnicodeError(msg) - - -def get_available_languages(domain): - """Lists the available languages for the given translation domain. - - :param domain: the domain to get languages for - """ - if domain in _AVAILABLE_LANGUAGES: - return copy.copy(_AVAILABLE_LANGUAGES[domain]) - - localedir = '%s_LOCALEDIR' % domain.upper() - find = lambda x: gettext.find(domain, - localedir=os.environ.get(localedir), - languages=[x]) - - # NOTE(mrodden): en_US should always be available (and first in case - # order matters) since our in-line message strings are en_US - language_list = ['en_US'] - # NOTE(luisg): Babel <1.0 used a function called list(), which was - # renamed to locale_identifiers() in >=1.0, the requirements master list - # requires >=0.9.6, uncapped, so defensively work with both. We can remove - # this check when the master list updates to >=1.0, and update all projects - list_identifiers = (getattr(localedata, 'list', None) or - getattr(localedata, 'locale_identifiers')) - locale_identifiers = list_identifiers() - - for i in locale_identifiers: - if find(i) is not None: - language_list.append(i) - - # NOTE(luisg): Babel>=1.0,<1.3 has a bug where some OpenStack supported - # locales (e.g. 'zh_CN', and 'zh_TW') aren't supported even though they - # are perfectly legitimate locales: - # https://github.com/mitsuhiko/babel/issues/37 - # In Babel 1.3 they fixed the bug and they support these locales, but - # they are still not explicitly "listed" by locale_identifiers(). - # That is why we add the locales here explicitly if necessary so that - # they are listed as supported. - aliases = {'zh': 'zh_CN', - 'zh_Hant_HK': 'zh_HK', - 'zh_Hant': 'zh_TW', - 'fil': 'tl_PH'} - for (locale_, alias) in six.iteritems(aliases): - if locale_ in language_list and alias not in language_list: - language_list.append(alias) - - _AVAILABLE_LANGUAGES[domain] = language_list - return copy.copy(language_list) - - -def translate(obj, desired_locale=None): - """Gets the translated unicode representation of the given object. - - If the object is not translatable it is returned as-is. - If the locale is None the object is translated to the system locale. - - :param obj: the object to translate - :param desired_locale: the locale to translate the message to, if None the - default system locale will be used - :returns: the translated object in unicode, or the original object if - it could not be translated - """ - message = obj - if not isinstance(message, Message): - # If the object to translate is not already translatable, - # let's first get its unicode representation - message = six.text_type(obj) - if isinstance(message, Message): - # Even after unicoding() we still need to check if we are - # running with translatable unicode before translating - return message.translate(desired_locale) - return obj - - -def _translate_args(args, desired_locale=None): - """Translates all the translatable elements of the given arguments object. - - This method is used for translating the translatable values in method - arguments which include values of tuples or dictionaries. - If the object is not a tuple or a dictionary the object itself is - translated if it is translatable. - - If the locale is None the object is translated to the system locale. - - :param args: the args to translate - :param desired_locale: the locale to translate the args to, if None the - default system locale will be used - :returns: a new args object with the translated contents of the original - """ - if isinstance(args, tuple): - return tuple(translate(v, desired_locale) for v in args) - if isinstance(args, dict): - translated_dict = {} - for (k, v) in six.iteritems(args): - translated_v = translate(v, desired_locale) - translated_dict[k] = translated_v - return translated_dict - return translate(args, desired_locale) - - -class TranslationHandler(handlers.MemoryHandler): - """Handler that translates records before logging them. - - The TranslationHandler takes a locale and a target logging.Handler object - to forward LogRecord objects to after translating them. This handler - depends on Message objects being logged, instead of regular strings. - - The handler can be configured declaratively in the logging.conf as follows: - - [handlers] - keys = translatedlog, translator - - [handler_translatedlog] - class = handlers.WatchedFileHandler - args = ('/var/log/api-localized.log',) - formatter = context - - [handler_translator] - class = openstack.common.log.TranslationHandler - target = translatedlog - args = ('zh_CN',) - - If the specified locale is not available in the system, the handler will - log in the default locale. - """ - - def __init__(self, locale=None, target=None): - """Initialize a TranslationHandler - - :param locale: locale to use for translating messages - :param target: logging.Handler object to forward - LogRecord objects to after translation - """ - # NOTE(luisg): In order to allow this handler to be a wrapper for - # other handlers, such as a FileHandler, and still be able to - # configure it using logging.conf, this handler has to extend - # MemoryHandler because only the MemoryHandlers' logging.conf - # parsing is implemented such that it accepts a target handler. - handlers.MemoryHandler.__init__(self, capacity=0, target=target) - self.locale = locale - - def setFormatter(self, fmt): - self.target.setFormatter(fmt) - - def emit(self, record): - # We save the message from the original record to restore it - # after translation, so other handlers are not affected by this - original_msg = record.msg - original_args = record.args - - try: - self._translate_and_log_record(record) - finally: - record.msg = original_msg - record.args = original_args - - def _translate_and_log_record(self, record): - record.msg = translate(record.msg, self.locale) - - # In addition to translating the message, we also need to translate - # arguments that were passed to the log method that were not part - # of the main message e.g., log.info(_('Some message %s'), this_one)) - record.args = _translate_args(record.args, self.locale) - - self.target.emit(record) diff --git a/trove/openstack/common/importutils.py b/trove/openstack/common/importutils.py deleted file mode 100644 index a51c32751b..0000000000 --- a/trove/openstack/common/importutils.py +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Import related utilities and helper functions. -""" - -import sys -import traceback - - -def import_class(import_str): - """Returns a class from a string including module and class.""" - mod_str, _sep, class_str = import_str.rpartition('.') - __import__(mod_str) - try: - return getattr(sys.modules[mod_str], class_str) - except AttributeError: - raise ImportError('Class %s cannot be found (%s)' % - (class_str, - traceback.format_exception(*sys.exc_info()))) - - -def import_object(import_str, *args, **kwargs): - """Import a class and return an instance of it.""" - return import_class(import_str)(*args, **kwargs) - - -def import_object_ns(name_space, import_str, *args, **kwargs): - """Tries to import object from default namespace. - - Imports a class and return an instance of it, first by trying - to find the class in a default namespace, then failing back to - a full path if not found in the default namespace. - """ - import_value = "%s.%s" % (name_space, import_str) - try: - return import_class(import_value)(*args, **kwargs) - except ImportError: - return import_class(import_str)(*args, **kwargs) - - -def import_module(import_str): - """Import a module.""" - __import__(import_str) - return sys.modules[import_str] - - -def import_versioned_module(version, submodule=None): - module = 'trove.v%s' % version - if submodule: - module = '.'.join((module, submodule)) - return import_module(module) - - -def try_import(import_str, default=None): - """Try to import a module and if it fails return default.""" - try: - return import_module(import_str) - except ImportError: - return default diff --git a/trove/openstack/common/middleware/__init__.py b/trove/openstack/common/middleware/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/trove/openstack/common/middleware/context.py b/trove/openstack/common/middleware/context.py deleted file mode 100644 index 57409cdf30..0000000000 --- a/trove/openstack/common/middleware/context.py +++ /dev/null @@ -1,64 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Middleware that attaches a context to the WSGI request -""" - -from trove.openstack.common import context -from trove.openstack.common import importutils -from trove.openstack.common import wsgi - - -class ContextMiddleware(wsgi.Middleware): - def __init__(self, app, options): - self.options = options - super(ContextMiddleware, self).__init__(app) - - def make_context(self, *args, **kwargs): - """ - Create a context with the given arguments. - """ - - # Determine the context class to use - ctxcls = context.RequestContext - if 'context_class' in self.options: - ctxcls = importutils.import_class(self.options['context_class']) - - return ctxcls(*args, **kwargs) - - def process_request(self, req): - """ - Extract any authentication information in the request and - construct an appropriate context from it. - """ - # Use the default empty context, with admin turned on for - # backwards compatibility - req.context = self.make_context(is_admin=True) - - -def filter_factory(global_conf, **local_conf): - """ - Factory method for paste.deploy - """ - conf = global_conf.copy() - conf.update(local_conf) - - def filter(app): - return ContextMiddleware(app, conf) - - return filter diff --git a/trove/openstack/common/network_utils.py b/trove/openstack/common/network_utils.py deleted file mode 100644 index a257513b0c..0000000000 --- a/trove/openstack/common/network_utils.py +++ /dev/null @@ -1,81 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2012 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Network-related utilities and helper functions. -""" - -import six.moves.urllib.parse as urlparse - - -def parse_host_port(address, default_port=None): - """Interpret a string as a host:port pair. - - An IPv6 address MUST be escaped if accompanied by a port, - because otherwise ambiguity ensues: 2001:db8:85a3::8a2e:370:7334 - means both [2001:db8:85a3::8a2e:370:7334] and - [2001:db8:85a3::8a2e:370]:7334. - - >>> parse_host_port('server01:80') - ('server01', 80) - >>> parse_host_port('server01') - ('server01', None) - >>> parse_host_port('server01', default_port=1234) - ('server01', 1234) - >>> parse_host_port('[::1]:80') - ('::1', 80) - >>> parse_host_port('[::1]') - ('::1', None) - >>> parse_host_port('[::1]', default_port=1234) - ('::1', 1234) - >>> parse_host_port('2001:db8:85a3::8a2e:370:7334', default_port=1234) - ('2001:db8:85a3::8a2e:370:7334', 1234) - - """ - if address[0] == '[': - # Escaped ipv6 - _host, _port = address[1:].split(']') - host = _host - if ':' in _port: - port = _port.split(':')[1] - else: - port = default_port - else: - if address.count(':') == 1: - host, port = address.split(':') - else: - # 0 means ipv4, >1 means ipv6. - # We prohibit unescaped ipv6 addresses with port. - host = address - port = default_port - - return (host, None if port is None else int(port)) - - -def urlsplit(url, scheme='', allow_fragments=True): - """Parse a URL using urlparse.urlsplit(), splitting query and fragments. - This function papers over Python issue9374 when needed. - - The parameters are the same as urlparse.urlsplit. - """ - scheme, netloc, path, query, fragment = urlparse.urlsplit( - url, scheme, allow_fragments) - if allow_fragments and '#' in path: - path, fragment = path.split('#', 1) - if '?' in path: - path, query = path.split('?', 1) - return urlparse.SplitResult(scheme, netloc, path, query, fragment) diff --git a/trove/openstack/common/policy.py b/trove/openstack/common/policy.py deleted file mode 100644 index a10127e842..0000000000 --- a/trove/openstack/common/policy.py +++ /dev/null @@ -1,779 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright (c) 2012 OpenStack, LLC. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Common Policy Engine Implementation - -Policies can be expressed in one of two forms: A list of lists, or a -string written in the new policy language. - -In the list-of-lists representation, each check inside the innermost -list is combined as with an "and" conjunction--for that check to pass, -all the specified checks must pass. These innermost lists are then -combined as with an "or" conjunction. This is the original way of -expressing policies, but there now exists a new way: the policy -language. - -In the policy language, each check is specified the same way as in the -list-of-lists representation: a simple "a:b" pair that is matched to -the correct code to perform that check. However, conjunction -operators are available, allowing for more expressiveness in crafting -policies. - -As an example, take the following rule, expressed in the list-of-lists -representation:: - - [["role:admin"], ["project_id:%(project_id)s", "role:projectadmin"]] - -In the policy language, this becomes:: - - role:admin or (project_id:%(project_id)s and role:projectadmin) - -The policy language also has the "not" operator, allowing a richer -policy rule:: - - project_id:%(project_id)s and not role:dunce - -Finally, two special policy checks should be mentioned; the policy -check "@" will always accept an access, and the policy check "!" will -always reject an access. (Note that if a rule is either the empty -list ("[]") or the empty string, this is equivalent to the "@" policy -check.) Of these, the "!" policy check is probably the most useful, -as it allows particular rules to be explicitly disabled. -""" - -import abc -import logging -import re -import urllib - -import urllib2 - -from trove.openstack.common.gettextutils import _ -from trove.openstack.common import jsonutils - - -LOG = logging.getLogger(__name__) - - -_rules = None -_checks = {} - - -class Rules(dict): - """ - A store for rules. Handles the default_rule setting directly. - """ - - @classmethod - def load_json(cls, data, default_rule=None): - """ - Allow loading of JSON rule data. - """ - - # Suck in the JSON data and parse the rules - rules = dict((k, parse_rule(v)) for k, v in - jsonutils.loads(data).items()) - - return cls(rules, default_rule) - - def __init__(self, rules=None, default_rule=None): - """Initialize the Rules store.""" - - super(Rules, self).__init__(rules or {}) - self.default_rule = default_rule - - def __missing__(self, key): - """Implements the default rule handling.""" - - # If the default rule isn't actually defined, do something - # reasonably intelligent - if not self.default_rule or self.default_rule not in self: - raise KeyError(key) - - return self[self.default_rule] - - def __str__(self): - """Dumps a string representation of the rules.""" - - # Start by building the canonical strings for the rules - out_rules = {} - for key, value in self.items(): - # Use empty string for singleton TrueCheck instances - if isinstance(value, TrueCheck): - out_rules[key] = '' - else: - out_rules[key] = str(value) - - # Dump a pretty-printed JSON representation - return jsonutils.dumps(out_rules, indent=4) - - -# Really have to figure out a way to deprecate this -def set_rules(rules): - """Set the rules in use for policy checks.""" - - global _rules - - _rules = rules - - -# Ditto -def reset(): - """Clear the rules used for policy checks.""" - - global _rules - - _rules = None - - -def check(rule, target, creds, exc=None, *args, **kwargs): - """ - Checks authorization of a rule against the target and credentials. - - :param rule: The rule to evaluate. - :param target: As much information about the object being operated - on as possible, as a dictionary. - :param creds: As much information about the user performing the - action as possible, as a dictionary. - :param exc: Class of the exception to raise if the check fails. - Any remaining arguments passed to check() (both - positional and keyword arguments) will be passed to - the exception class. If exc is not provided, returns - False. - - :return: Returns False if the policy does not allow the action and - exc is not provided; otherwise, returns a value that - evaluates to True. Note: for rules using the "case" - expression, this True value will be the specified string - from the expression. - """ - - # Allow the rule to be a Check tree - if isinstance(rule, BaseCheck): - result = rule(target, creds) - elif not _rules: - # No rules to reference means we're going to fail closed - result = False - else: - try: - # Evaluate the rule - result = _rules[rule](target, creds) - except KeyError: - # If the rule doesn't exist, fail closed - result = False - - # If it is False, raise the exception if requested - if exc and result is False: - raise exc(*args, **kwargs) - - return result - - -class BaseCheck(object): - """ - Abstract base class for Check classes. - """ - - __metaclass__ = abc.ABCMeta - - @abc.abstractmethod - def __str__(self): - """ - Retrieve a string representation of the Check tree rooted at - this node. - """ - - pass - - @abc.abstractmethod - def __call__(self, target, cred): - """ - Perform the check. Returns False to reject the access or a - true value (not necessary True) to accept the access. - """ - - pass - - -class FalseCheck(BaseCheck): - """ - A policy check that always returns False (disallow). - """ - - def __str__(self): - """Return a string representation of this check.""" - - return "!" - - def __call__(self, target, cred): - """Check the policy.""" - - return False - - -class TrueCheck(BaseCheck): - """ - A policy check that always returns True (allow). - """ - - def __str__(self): - """Return a string representation of this check.""" - - return "@" - - def __call__(self, target, cred): - """Check the policy.""" - - return True - - -class Check(BaseCheck): - """ - A base class to allow for user-defined policy checks. - """ - - def __init__(self, kind, match): - """ - :param kind: The kind of the check, i.e., the field before the - ':'. - :param match: The match of the check, i.e., the field after - the ':'. - """ - - self.kind = kind - self.match = match - - def __str__(self): - """Return a string representation of this check.""" - - return "%s:%s" % (self.kind, self.match) - - -class NotCheck(BaseCheck): - """ - A policy check that inverts the result of another policy check. - Implements the "not" operator. - """ - - def __init__(self, rule): - """ - Initialize the 'not' check. - - :param rule: The rule to negate. Must be a Check. - """ - - self.rule = rule - - def __str__(self): - """Return a string representation of this check.""" - - return "not %s" % self.rule - - def __call__(self, target, cred): - """ - Check the policy. Returns the logical inverse of the wrapped - check. - """ - - return not self.rule(target, cred) - - -class AndCheck(BaseCheck): - """ - A policy check that requires that a list of other checks all - return True. Implements the "and" operator. - """ - - def __init__(self, rules): - """ - Initialize the 'and' check. - - :param rules: A list of rules that will be tested. - """ - - self.rules = rules - - def __str__(self): - """Return a string representation of this check.""" - - return "(%s)" % ' and '.join(str(r) for r in self.rules) - - def __call__(self, target, cred): - """ - Check the policy. Requires that all rules accept in order to - return True. - """ - - for rule in self.rules: - if not rule(target, cred): - return False - - return True - - def add_check(self, rule): - """ - Allows addition of another rule to the list of rules that will - be tested. Returns the AndCheck object for convenience. - """ - - self.rules.append(rule) - return self - - -class OrCheck(BaseCheck): - """ - A policy check that requires that at least one of a list of other - checks returns True. Implements the "or" operator. - """ - - def __init__(self, rules): - """ - Initialize the 'or' check. - - :param rules: A list of rules that will be tested. - """ - - self.rules = rules - - def __str__(self): - """Return a string representation of this check.""" - - return "(%s)" % ' or '.join(str(r) for r in self.rules) - - def __call__(self, target, cred): - """ - Check the policy. Requires that at least one rule accept in - order to return True. - """ - - for rule in self.rules: - if rule(target, cred): - return True - - return False - - def add_check(self, rule): - """ - Allows addition of another rule to the list of rules that will - be tested. Returns the OrCheck object for convenience. - """ - - self.rules.append(rule) - return self - - -def _parse_check(rule): - """ - Parse a single base check rule into an appropriate Check object. - """ - - # Handle the special checks - if rule == '!': - return FalseCheck() - elif rule == '@': - return TrueCheck() - - try: - kind, match = rule.split(':', 1) - except Exception: - LOG.exception(_("Failed to understand rule %(rule)s") % locals()) - # If the rule is invalid, we'll fail closed - return FalseCheck() - - # Find what implements the check - if kind in _checks: - return _checks[kind](kind, match) - elif None in _checks: - return _checks[None](kind, match) - else: - LOG.error(_("No handler for matches of kind %s") % kind) - return FalseCheck() - - -def _parse_list_rule(rule): - """ - Provided for backwards compatibility. Translates the old - list-of-lists syntax into a tree of Check objects. - """ - - # Empty rule defaults to True - if not rule: - return TrueCheck() - - # Outer list is joined by "or"; inner list by "and" - or_list = [] - for inner_rule in rule: - # Elide empty inner lists - if not inner_rule: - continue - - # Handle bare strings - if isinstance(inner_rule, basestring): - inner_rule = [inner_rule] - - # Parse the inner rules into Check objects - and_list = [_parse_check(r) for r in inner_rule] - - # Append the appropriate check to the or_list - if len(and_list) == 1: - or_list.append(and_list[0]) - else: - or_list.append(AndCheck(and_list)) - - # If we have only one check, omit the "or" - if len(or_list) == 0: - return FalseCheck() - elif len(or_list) == 1: - return or_list[0] - - return OrCheck(or_list) - - -# Used for tokenizing the policy language -_tokenize_re = re.compile(r'\s+') - - -def _parse_tokenize(rule): - """ - Tokenizer for the policy language. - - Most of the single-character tokens are specified in the - _tokenize_re; however, parentheses need to be handled specially, - because they can appear inside a check string. Thankfully, those - parentheses that appear inside a check string can never occur at - the very beginning or end ("%(variable)s" is the correct syntax). - """ - - for tok in _tokenize_re.split(rule): - # Skip empty tokens - if not tok or tok.isspace(): - continue - - # Handle leading parens on the token - clean = tok.lstrip('(') - for i in range(len(tok) - len(clean)): - yield '(', '(' - - # If it was only parentheses, continue - if not clean: - continue - else: - tok = clean - - # Handle trailing parens on the token - clean = tok.rstrip(')') - trail = len(tok) - len(clean) - - # Yield the cleaned token - lowered = clean.lower() - if lowered in ('and', 'or', 'not'): - # Special tokens - yield lowered, clean - elif clean: - # Not a special token, but not composed solely of ')' - if len(tok) >= 2 and ((tok[0], tok[-1]) in - [('"', '"'), ("'", "'")]): - # It's a quoted string - yield 'string', tok[1:-1] - else: - yield 'check', _parse_check(clean) - - # Yield the trailing parens - for i in range(trail): - yield ')', ')' - - -class ParseStateMeta(type): - """ - Metaclass for the ParseState class. Facilitates identifying - reduction methods. - """ - - def __new__(mcs, name, bases, cls_dict): - """ - Create the class. Injects the 'reducers' list, a list of - tuples matching token sequences to the names of the - corresponding reduction methods. - """ - - reducers = [] - - for key, value in cls_dict.items(): - if not hasattr(value, 'reducers'): - continue - for reduction in value.reducers: - reducers.append((reduction, key)) - - cls_dict['reducers'] = reducers - - return super(ParseStateMeta, mcs).__new__(mcs, name, bases, cls_dict) - - -def reducer(*tokens): - """ - Decorator for reduction methods. Arguments are a sequence of - tokens, in order, which should trigger running this reduction - method. - """ - - def decorator(func): - # Make sure we have a list of reducer sequences - if not hasattr(func, 'reducers'): - func.reducers = [] - - # Add the tokens to the list of reducer sequences - func.reducers.append(list(tokens)) - - return func - - return decorator - - -class ParseState(object): - """ - Implement the core of parsing the policy language. Uses a greedy - reduction algorithm to reduce a sequence of tokens into a single - terminal, the value of which will be the root of the Check tree. - - Note: error reporting is rather lacking. The best we can get with - this parser formulation is an overall "parse failed" error. - Fortunately, the policy language is simple enough that this - shouldn't be that big a problem. - """ - - __metaclass__ = ParseStateMeta - - def __init__(self): - """Initialize the ParseState.""" - - self.tokens = [] - self.values = [] - - def reduce(self): - """ - Perform a greedy reduction of the token stream. If a reducer - method matches, it will be executed, then the reduce() method - will be called recursively to search for any more possible - reductions. - """ - - for reduction, methname in self.reducers: - if (len(self.tokens) >= len(reduction) and - self.tokens[-len(reduction):] == reduction): - # Get the reduction method - meth = getattr(self, methname) - - # Reduce the token stream - results = meth(*self.values[-len(reduction):]) - - # Update the tokens and values - self.tokens[-len(reduction):] = [r[0] for r in results] - self.values[-len(reduction):] = [r[1] for r in results] - - # Check for any more reductions - return self.reduce() - - def shift(self, tok, value): - """Adds one more token to the state. Calls reduce().""" - - self.tokens.append(tok) - self.values.append(value) - - # Do a greedy reduce... - self.reduce() - - @property - def result(self): - """ - Obtain the final result of the parse. Raises ValueError if - the parse failed to reduce to a single result. - """ - - if len(self.values) != 1: - raise ValueError("Could not parse rule") - return self.values[0] - - @reducer('(', 'check', ')') - @reducer('(', 'and_expr', ')') - @reducer('(', 'or_expr', ')') - def _wrap_check(self, _p1, check, _p2): - """Turn parenthesized expressions into a 'check' token.""" - - return [('check', check)] - - @reducer('check', 'and', 'check') - def _make_and_expr(self, check1, _and, check2): - """ - Create an 'and_expr' from two checks joined by the 'and' - operator. - """ - - return [('and_expr', AndCheck([check1, check2]))] - - @reducer('and_expr', 'and', 'check') - def _extend_and_expr(self, and_expr, _and, check): - """ - Extend an 'and_expr' by adding one more check. - """ - - return [('and_expr', and_expr.add_check(check))] - - @reducer('check', 'or', 'check') - def _make_or_expr(self, check1, _or, check2): - """ - Create an 'or_expr' from two checks joined by the 'or' - operator. - """ - - return [('or_expr', OrCheck([check1, check2]))] - - @reducer('or_expr', 'or', 'check') - def _extend_or_expr(self, or_expr, _or, check): - """ - Extend an 'or_expr' by adding one more check. - """ - - return [('or_expr', or_expr.add_check(check))] - - @reducer('not', 'check') - def _make_not_expr(self, _not, check): - """Invert the result of another check.""" - - return [('check', NotCheck(check))] - - -def _parse_text_rule(rule): - """ - Translates a policy written in the policy language into a tree of - Check objects. - """ - - # Empty rule means always accept - if not rule: - return TrueCheck() - - # Parse the token stream - state = ParseState() - for tok, value in _parse_tokenize(rule): - state.shift(tok, value) - - try: - return state.result - except ValueError: - # Couldn't parse the rule - LOG.exception(_("Failed to understand rule %(rule)r") % locals()) - - # Fail closed - return FalseCheck() - - -def parse_rule(rule): - """ - Parses a policy rule into a tree of Check objects. - """ - - # If the rule is a string, it's in the policy language - if isinstance(rule, basestring): - return _parse_text_rule(rule) - return _parse_list_rule(rule) - - -def register(name, func=None): - """ - Register a function or Check class as a policy check. - - :param name: Gives the name of the check type, e.g., 'rule', - 'role', etc. If name is None, a default check type - will be registered. - :param func: If given, provides the function or class to register. - If not given, returns a function taking one argument - to specify the function or class to register, - allowing use as a decorator. - """ - - # Perform the actual decoration by registering the function or - # class. Returns the function or class for compliance with the - # decorator interface. - def decorator(func): - _checks[name] = func - return func - - # If the function or class is given, do the registration - if func: - return decorator(func) - - return decorator - - -@register("rule") -class RuleCheck(Check): - def __call__(self, target, creds): - """ - Recursively checks credentials based on the defined rules. - """ - - try: - return _rules[self.match](target, creds) - except KeyError: - # We don't have any matching rule; fail closed - return False - - -@register("role") -class RoleCheck(Check): - def __call__(self, target, creds): - """Check that there is a matching role in the cred dict.""" - - return self.match.lower() in [x.lower() for x in creds['roles']] - - -@register('http') -class HttpCheck(Check): - def __call__(self, target, creds): - """ - Check http: rules by calling to a remote server. - - This example implementation simply verifies that the response - is exactly 'True'. - """ - - url = ('http:' + self.match) % target - data = {'target': jsonutils.dumps(target), - 'credentials': jsonutils.dumps(creds)} - post_data = urllib.urlencode(data) - f = urllib2.urlopen(url, post_data) - return f.read() == "True" - - -@register(None) -class GenericCheck(Check): - def __call__(self, target, creds): - """ - Check an individual match. - - Matches look like: - - tenant:%(tenant_id)s - role:compute:admin - """ - - # TODO(termie): do dict inspection via dot syntax - match = self.match % target - if self.kind in creds: - return match == unicode(creds[self.kind]) - return False diff --git a/trove/openstack/common/timeutils.py b/trove/openstack/common/timeutils.py deleted file mode 100644 index c48da95f12..0000000000 --- a/trove/openstack/common/timeutils.py +++ /dev/null @@ -1,210 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Time related utilities and helper functions. -""" - -import calendar -import datetime -import time - -import iso8601 -import six - - -# ISO 8601 extended time format with microseconds -_ISO8601_TIME_FORMAT_SUBSECOND = '%Y-%m-%dT%H:%M:%S.%f' -_ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' -PERFECT_TIME_FORMAT = _ISO8601_TIME_FORMAT_SUBSECOND - - -def isotime(at=None, subsecond=False): - """Stringify time in ISO 8601 format.""" - if not at: - at = utcnow() - st = at.strftime(_ISO8601_TIME_FORMAT - if not subsecond - else _ISO8601_TIME_FORMAT_SUBSECOND) - tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC' - st += ('Z' if tz == 'UTC' else tz) - return st - - -def parse_isotime(timestr): - """Parse time from ISO 8601 format.""" - try: - return iso8601.parse_date(timestr) - except iso8601.ParseError as e: - raise ValueError(six.text_type(e)) - except TypeError as e: - raise ValueError(six.text_type(e)) - - -def strtime(at=None, fmt=PERFECT_TIME_FORMAT): - """Returns formatted utcnow.""" - if not at: - at = utcnow() - return at.strftime(fmt) - - -def parse_strtime(timestr, fmt=PERFECT_TIME_FORMAT): - """Turn a formatted time back into a datetime.""" - return datetime.datetime.strptime(timestr, fmt) - - -def normalize_time(timestamp): - """Normalize time in arbitrary timezone to UTC naive object.""" - offset = timestamp.utcoffset() - if offset is None: - return timestamp - return timestamp.replace(tzinfo=None) - offset - - -def is_older_than(before, seconds): - """Return True if before is older than seconds.""" - if isinstance(before, six.string_types): - before = parse_strtime(before).replace(tzinfo=None) - else: - before = before.replace(tzinfo=None) - - return utcnow() - before > datetime.timedelta(seconds=seconds) - - -def is_newer_than(after, seconds): - """Return True if after is newer than seconds.""" - if isinstance(after, six.string_types): - after = parse_strtime(after).replace(tzinfo=None) - else: - after = after.replace(tzinfo=None) - - return after - utcnow() > datetime.timedelta(seconds=seconds) - - -def utcnow_ts(): - """Timestamp version of our utcnow function.""" - if utcnow.override_time is None: - # NOTE(kgriffs): This is several times faster - # than going through calendar.timegm(...) - return int(time.time()) - - return calendar.timegm(utcnow().timetuple()) - - -def utcnow(): - """Overridable version of utils.utcnow.""" - if utcnow.override_time: - try: - return utcnow.override_time.pop(0) - except AttributeError: - return utcnow.override_time - return datetime.datetime.utcnow() - - -def iso8601_from_timestamp(timestamp): - """Returns an iso8601 formatted date from timestamp.""" - return isotime(datetime.datetime.utcfromtimestamp(timestamp)) - - -utcnow.override_time = None - - -def set_time_override(override_time=None): - """Overrides utils.utcnow. - - Make it return a constant time or a list thereof, one at a time. - - :param override_time: datetime instance or list thereof. If not - given, defaults to the current UTC time. - """ - utcnow.override_time = override_time or datetime.datetime.utcnow() - - -def advance_time_delta(timedelta): - """Advance overridden time using a datetime.timedelta.""" - assert utcnow.override_time is not None - try: - for dt in utcnow.override_time: - dt += timedelta - except TypeError: - utcnow.override_time += timedelta - - -def advance_time_seconds(seconds): - """Advance overridden time by seconds.""" - advance_time_delta(datetime.timedelta(0, seconds)) - - -def clear_time_override(): - """Remove the overridden time.""" - utcnow.override_time = None - - -def marshall_now(now=None): - """Make an rpc-safe datetime with microseconds. - - Note: tzinfo is stripped, but not required for relative times. - """ - if not now: - now = utcnow() - return dict(day=now.day, month=now.month, year=now.year, hour=now.hour, - minute=now.minute, second=now.second, - microsecond=now.microsecond) - - -def unmarshall_time(tyme): - """Unmarshall a datetime dict.""" - return datetime.datetime(day=tyme['day'], - month=tyme['month'], - year=tyme['year'], - hour=tyme['hour'], - minute=tyme['minute'], - second=tyme['second'], - microsecond=tyme['microsecond']) - - -def delta_seconds(before, after): - """Return the difference between two timing objects. - - Compute the difference in seconds between two date, time, or - datetime objects (as a float, to microsecond resolution). - """ - delta = after - before - return total_seconds(delta) - - -def total_seconds(delta): - """Return the total seconds of datetime.timedelta object. - - Compute total seconds of datetime.timedelta, datetime.timedelta - doesn't have method total_seconds in Python2.6, calculate it manually. - """ - try: - return delta.total_seconds() - except AttributeError: - return ((delta.days * 24 * 3600) + delta.seconds + - float(delta.microseconds) / (10 ** 6)) - - -def is_soon(dt, window): - """Determines if time is going to happen in the next window seconds. - - :param dt: the time - :param window: minimum seconds to remain to consider the time not soon - - :return: True if expiration is within the given duration - """ - soon = (utcnow() + datetime.timedelta(seconds=window)) - return normalize_time(dt) <= soon diff --git a/trove/openstack/common/versionutils.py b/trove/openstack/common/versionutils.py deleted file mode 100644 index ca0b8036b8..0000000000 --- a/trove/openstack/common/versionutils.py +++ /dev/null @@ -1,203 +0,0 @@ -# Copyright (c) 2013 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Helpers for comparing version strings. -""" - -import functools -import inspect - -import pkg_resources -import six - -from trove.openstack.common._i18n import _ -from trove.openstack.common import log as logging - - -LOG = logging.getLogger(__name__) - - -class deprecated(object): - """A decorator to mark callables as deprecated. - - This decorator logs a deprecation message when the callable it decorates is - used. The message will include the release where the callable was - deprecated, the release where it may be removed and possibly an optional - replacement. - - Examples: - - 1. Specifying the required deprecated release - - >>> @deprecated(as_of=deprecated.ICEHOUSE) - ... def a(): pass - - 2. Specifying a replacement: - - >>> @deprecated(as_of=deprecated.ICEHOUSE, in_favor_of='f()') - ... def b(): pass - - 3. Specifying the release where the functionality may be removed: - - >>> @deprecated(as_of=deprecated.ICEHOUSE, remove_in=+1) - ... def c(): pass - - 4. Specifying the deprecated functionality will not be removed: - >>> @deprecated(as_of=deprecated.ICEHOUSE, remove_in=0) - ... def d(): pass - - 5. Specifying a replacement, deprecated functionality will not be removed: - >>> @deprecated(as_of=deprecated.ICEHOUSE, in_favor_of='f()', remove_in=0) - ... def e(): pass - - """ - - # NOTE(morganfainberg): Bexar is used for unit test purposes, it is - # expected we maintain a gap between Bexar and Folsom in this list. - BEXAR = 'B' - FOLSOM = 'F' - GRIZZLY = 'G' - HAVANA = 'H' - ICEHOUSE = 'I' - JUNO = 'J' - KILO = 'K' - - _RELEASES = { - # NOTE(morganfainberg): Bexar is used for unit test purposes, it is - # expected we maintain a gap between Bexar and Folsom in this list. - 'B': 'Bexar', - 'F': 'Folsom', - 'G': 'Grizzly', - 'H': 'Havana', - 'I': 'Icehouse', - 'J': 'Juno', - 'K': 'Kilo', - } - - _deprecated_msg_with_alternative = _( - '%(what)s is deprecated as of %(as_of)s in favor of ' - '%(in_favor_of)s and may be removed in %(remove_in)s.') - - _deprecated_msg_no_alternative = _( - '%(what)s is deprecated as of %(as_of)s and may be ' - 'removed in %(remove_in)s. It will not be superseded.') - - _deprecated_msg_with_alternative_no_removal = _( - '%(what)s is deprecated as of %(as_of)s in favor of %(in_favor_of)s.') - - _deprecated_msg_with_no_alternative_no_removal = _( - '%(what)s is deprecated as of %(as_of)s. It will not be superseded.') - - def __init__(self, as_of, in_favor_of=None, remove_in=2, what=None): - """Initialize decorator - - :param as_of: the release deprecating the callable. Constants - are define in this class for convenience. - :param in_favor_of: the replacement for the callable (optional) - :param remove_in: an integer specifying how many releases to wait - before removing (default: 2) - :param what: name of the thing being deprecated (default: the - callable's name) - - """ - self.as_of = as_of - self.in_favor_of = in_favor_of - self.remove_in = remove_in - self.what = what - - def __call__(self, func_or_cls): - if not self.what: - self.what = func_or_cls.__name__ + '()' - msg, details = self._build_message() - - if inspect.isfunction(func_or_cls): - - @six.wraps(func_or_cls) - def wrapped(*args, **kwargs): - LOG.deprecated(msg, details) - return func_or_cls(*args, **kwargs) - return wrapped - elif inspect.isclass(func_or_cls): - orig_init = func_or_cls.__init__ - - # TODO(tsufiev): change `functools` module to `six` as - # soon as six 1.7.4 (with fix for passing `assigned` - # argument to underlying `functools.wraps`) is released - # and added to the trove-incubator requrements - @functools.wraps(orig_init, assigned=('__name__', '__doc__')) - def new_init(self, *args, **kwargs): - LOG.deprecated(msg, details) - orig_init(self, *args, **kwargs) - func_or_cls.__init__ = new_init - return func_or_cls - else: - raise TypeError('deprecated can be used only with functions or ' - 'classes') - - def _get_safe_to_remove_release(self, release): - # TODO(dstanek): this method will have to be reimplemented once - # when we get to the X release because once we get to the Y - # release, what is Y+2? - new_release = chr(ord(release) + self.remove_in) - if new_release in self._RELEASES: - return self._RELEASES[new_release] - else: - return new_release - - def _build_message(self): - details = dict(what=self.what, - as_of=self._RELEASES[self.as_of], - remove_in=self._get_safe_to_remove_release(self.as_of)) - - if self.in_favor_of: - details['in_favor_of'] = self.in_favor_of - if self.remove_in > 0: - msg = self._deprecated_msg_with_alternative - else: - # There are no plans to remove this function, but it is - # now deprecated. - msg = self._deprecated_msg_with_alternative_no_removal - else: - if self.remove_in > 0: - msg = self._deprecated_msg_no_alternative - else: - # There are no plans to remove this function, but it is - # now deprecated. - msg = self._deprecated_msg_with_no_alternative_no_removal - return msg, details - - -def is_compatible(requested_version, current_version, same_major=True): - """Determine whether `requested_version` is satisfied by - `current_version`; in other words, `current_version` is >= - `requested_version`. - - :param requested_version: version to check for compatibility - :param current_version: version to check against - :param same_major: if True, the major version must be identical between - `requested_version` and `current_version`. This is used when a - major-version difference indicates incompatibility between the two - versions. Since this is the common-case in practice, the default is - True. - :returns: True if compatible, False if not - """ - requested_parts = pkg_resources.parse_version(requested_version) - current_parts = pkg_resources.parse_version(current_version) - - if same_major and (requested_parts[0] != current_parts[0]): - return False - - return current_parts >= requested_parts diff --git a/trove/openstack/common/wsgi.py b/trove/openstack/common/wsgi.py deleted file mode 100644 index 88676cf41d..0000000000 --- a/trove/openstack/common/wsgi.py +++ /dev/null @@ -1,801 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Utility methods for working with WSGI servers.""" - -from __future__ import print_function - -import eventlet -eventlet.patcher.monkey_patch(all=False, socket=True) - -import datetime -import errno -import socket -import sys -import time - -import eventlet.wsgi -from oslo_config import cfg -from oslo_service import service -from oslo_service import sslutils -import routes -import routes.middleware -#import six -import webob.dec -import webob.exc -from xml.dom import minidom -from xml.parsers import expat - -from trove.openstack.common import exception -from trove.openstack.common.gettextutils import _ -from trove.openstack.common import jsonutils -from trove.openstack.common import log as logging -from trove.openstack.common import xmlutils - -socket_opts = [ - cfg.IntOpt('backlog', - default=4096, - help="Number of backlog requests to configure the socket with"), - cfg.IntOpt('tcp_keepidle', - default=600, - help="Sets the value of TCP_KEEPIDLE in seconds for each " - "server socket. Not supported on OS X."), -] - -CONF = cfg.CONF -CONF.register_opts(socket_opts) - -LOG = logging.getLogger(__name__) - - -def run_server(application, port, **kwargs): - """Run a WSGI server with the given application.""" - sock = eventlet.listen(('0.0.0.0', port)) - eventlet.wsgi.server(sock, application, **kwargs) - - -class Service(service.Service): - """ - Provides a Service API for wsgi servers. - - This gives us the ability to launch wsgi servers with the - Launcher classes in service.py. - """ - - def __init__(self, application, port, - host='0.0.0.0', backlog=4096, threads=1000): - self.application = application - self._port = port - self._host = host - self._backlog = backlog if backlog else CONF.backlog - self._socket = self._get_socket(host, port, self._backlog) - super(Service, self).__init__(threads) - - def _get_socket(self, host, port, backlog): - # TODO(dims): eventlet's green dns/socket module does not actually - # support IPv6 in getaddrinfo(). We need to get around this in the - # future or monitor upstream for a fix - info = socket.getaddrinfo(host, - port, - socket.AF_UNSPEC, - socket.SOCK_STREAM)[0] - family = info[0] - bind_addr = info[-1] - - sock = None - retry_until = time.time() + 30 - while not sock and time.time() < retry_until: - try: - sock = eventlet.listen(bind_addr, - backlog=backlog, - family=family) - if sslutils.is_enabled(CONF): - sock = sslutils.wrap(CONF, sock) - - except socket.error as err: - if err.args[0] != errno.EADDRINUSE: - raise - eventlet.sleep(0.1) - if not sock: - raise RuntimeError(_("Could not bind to %(host)s:%(port)s " - "after trying for 30 seconds") % - {'host': host, 'port': port}) - sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - # sockets can hang around forever without keepalive - sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) - - # This option isn't available in the OS X version of eventlet - if hasattr(socket, 'TCP_KEEPIDLE'): - sock.setsockopt(socket.IPPROTO_TCP, - socket.TCP_KEEPIDLE, - CONF.tcp_keepidle) - - return sock - - def start(self): - """Start serving this service using the provided server instance. - - :returns: None - - """ - super(Service, self).start() - self.tg.add_thread(self._run, self.application, self._socket) - - @property - def backlog(self): - return self._backlog - - @property - def host(self): - return self._socket.getsockname()[0] if self._socket else self._host - - @property - def port(self): - return self._socket.getsockname()[1] if self._socket else self._port - - def stop(self): - """Stop serving this API. - - :returns: None - - """ - super(Service, self).stop() - - def _run(self, application, socket): - """Start a WSGI server in a new green thread.""" - logger = logging.getLogger('eventlet.wsgi') - eventlet.wsgi.server(socket, - application, - custom_pool=self.tg.pool, - log=logging.WritableLogger(logger)) - - -class Middleware(object): - """ - Base WSGI middleware wrapper. These classes require an application to be - initialized that will be called next. By default the middleware will - simply call its wrapped app, or you can override __call__ to customize its - behavior. - """ - - def __init__(self, application): - self.application = application - - def process_request(self, req): - """ - Called on each request. - - If this returns None, the next application down the stack will be - executed. If it returns a response then that response will be returned - and execution will stop here. - """ - return None - - def process_response(self, response): - """Do whatever you'd like to the response.""" - return response - - @webob.dec.wsgify - def __call__(self, req): - response = self.process_request(req) - if response: - return response - response = req.get_response(self.application) - return self.process_response(response) - - -class Debug(Middleware): - """ - Helper class that can be inserted into any WSGI application chain - to get information about the request and response. - """ - - @webob.dec.wsgify - def __call__(self, req): - print(("*" * 40) + " REQUEST ENVIRON") - for key, value in req.environ.items(): - print(key, "=", value) - print() - resp = req.get_response(self.application) - - print(("*" * 40) + " RESPONSE HEADERS") - for (key, value) in resp.headers.iteritems(): - print(key, "=", value) - print() - - resp.app_iter = self.print_generator(resp.app_iter) - - return resp - - @staticmethod - def print_generator(app_iter): - """ - Iterator that prints the contents of a wrapper string iterator - when iterated. - """ - print(("*" * 40) + " BODY") - for part in app_iter: - sys.stdout.write(part) - sys.stdout.flush() - yield part - print() - - -class Router(object): - - """ - WSGI middleware that maps incoming requests to WSGI apps. - """ - - def __init__(self, mapper): - """ - Create a router for the given routes.Mapper. - - Each route in `mapper` must specify a 'controller', which is a - WSGI app to call. You'll probably want to specify an 'action' as - well and have your controller be a wsgi.Controller, who will route - the request to the action method. - - Examples: - mapper = routes.Mapper() - sc = ServerController() - - # Explicit mapping of one route to a controller+action - mapper.connect(None, "/svrlist", controller=sc, action="list") - - # Actions are all implicitly defined - mapper.resource("server", "servers", controller=sc) - - # Pointing to an arbitrary WSGI app. You can specify the - # {path_info:.*} parameter so the target app can be handed just that - # section of the URL. - mapper.connect(None, "/v1.0/{path_info:.*}", controller=BlogApp()) - """ - self.map = mapper - self._router = routes.middleware.RoutesMiddleware(self._dispatch, - self.map) - - @webob.dec.wsgify - def __call__(self, req): - """ - Route the incoming request to a controller based on self.map. - If no match, return a 404. - """ - return self._router - - @staticmethod - @webob.dec.wsgify - def _dispatch(req): - """ - Called by self._router after matching the incoming request to a route - and putting the information into req.environ. Either returns 404 - or the routed WSGI app's response. - """ - match = req.environ['wsgiorg.routing_args'][1] - if not match: - return webob.exc.HTTPNotFound() - app = match['controller'] - return app - - -class Request(webob.Request): - """Add some Openstack API-specific logic to the base webob.Request.""" - - default_request_content_types = ('application/json', 'application/xml') - default_accept_types = ('application/json', 'application/xml') - default_accept_type = 'application/json' - - def best_match_content_type(self, supported_content_types=None): - """Determine the requested response content-type. - - Based on the query extension then the Accept header. - Defaults to default_accept_type if we don't find a preference - - """ - supported_content_types = (supported_content_types or - self.default_accept_types) - - parts = self.path.rsplit('.', 1) - if len(parts) > 1: - ctype = 'application/{0}'.format(parts[1]) - if ctype in supported_content_types: - return ctype - - bm = self.accept.best_match(supported_content_types) - return bm or self.default_accept_type - - def get_content_type(self, allowed_content_types=None): - """Determine content type of the request body. - - Does not do any body introspection, only checks header - - """ - if "Content-Type" not in self.headers: - return None - - content_type = self.content_type - allowed_content_types = (allowed_content_types or - self.default_request_content_types) - - if content_type not in allowed_content_types: - raise exception.InvalidContentType(content_type=content_type) - return content_type - - -class Resource(object): - """ - WSGI app that handles (de)serialization and controller dispatch. - - Reads routing information supplied by RoutesMiddleware and calls - the requested action method upon its deserializer, controller, - and serializer. Those three objects may implement any of the basic - controller action methods (create, update, show, index, delete) - along with any that may be specified in the api router. A 'default' - method may also be implemented to be used in place of any - non-implemented actions. Deserializer methods must accept a request - argument and return a dictionary. Controller methods must accept a - request argument. Additionally, they must also accept keyword - arguments that represent the keys returned by the Deserializer. They - may raise a webob.exc exception or return a dict, which will be - serialized by requested content type. - """ - def __init__(self, controller, deserializer=None, serializer=None): - """ - :param controller: object that implement methods created by routes lib - :param deserializer: object that supports webob request deserialization - through controller-like actions - :param serializer: object that supports webob response serialization - through controller-like actions - """ - self.controller = controller - self.serializer = serializer or ResponseSerializer() - self.deserializer = deserializer or RequestDeserializer() - - @webob.dec.wsgify(RequestClass=Request) - def __call__(self, request): - """WSGI method that controls (de)serialization and method dispatch.""" - - try: - action, action_args, accept = self.deserialize_request(request) - except exception.InvalidContentType: - msg = _("Unsupported Content-Type") - return webob.exc.HTTPUnsupportedMediaType(explanation=msg) - except exception.MalformedRequestBody: - msg = _("Malformed request body") - return webob.exc.HTTPBadRequest(explanation=msg) - - action_result = self.execute_action(action, request, **action_args) - try: - return self.serialize_response(action, action_result, accept) - # return unserializable result (typically a webob exc) - except Exception: - return action_result - - def deserialize_request(self, request): - return self.deserializer.deserialize(request) - - def serialize_response(self, action, action_result, accept): - return self.serializer.serialize(action_result, accept, action) - - def execute_action(self, action, request, **action_args): - return self.dispatch(self.controller, action, request, **action_args) - - def dispatch(self, obj, action, *args, **kwargs): - """Find action-specific method on self and call it.""" - try: - method = getattr(obj, action) - except AttributeError: - method = getattr(obj, 'default') - - return method(*args, **kwargs) - - def get_action_args(self, request_environment): - """Parse dictionary created by routes library.""" - try: - args = request_environment['wsgiorg.routing_args'][1].copy() - except Exception: - return {} - - try: - del args['controller'] - except KeyError: - pass - - try: - del args['format'] - except KeyError: - pass - - return args - - -class ActionDispatcher(object): - """Maps method name to local methods through action name.""" - - def dispatch(self, *args, **kwargs): - """Find and call local method.""" - action = kwargs.pop('action', 'default') - action_method = getattr(self, str(action), self.default) - return action_method(*args, **kwargs) - - def default(self, data): - raise NotImplementedError() - - -class DictSerializer(ActionDispatcher): - """Default request body serialization""" - - def serialize(self, data, action='default'): - return self.dispatch(data, action=action) - - def default(self, data): - return "" - - -class JSONDictSerializer(DictSerializer): - """Default JSON request body serialization""" - - def default(self, data): - def sanitizer(obj): - if isinstance(obj, datetime.datetime): - _dtime = obj - datetime.timedelta(microseconds=obj.microsecond) - return _dtime.isoformat() - return obj -# return six.text_type(obj) - return jsonutils.dumps(data, default=sanitizer) - - -class XMLDictSerializer(DictSerializer): - - def __init__(self, metadata=None, xmlns=None): - """ - :param metadata: information needed to deserialize xml into - a dictionary. - :param xmlns: XML namespace to include with serialized xml - """ - super(XMLDictSerializer, self).__init__() - self.metadata = metadata or {} - self.xmlns = xmlns - - def default(self, data): - # We expect data to contain a single key which is the XML root. - root_key = data.keys()[0] - doc = minidom.Document() - node = self._to_xml_node(doc, self.metadata, root_key, data[root_key]) - - return self.to_xml_string(node) - - def to_xml_string(self, node, has_atom=False): - self._add_xmlns(node, has_atom) - return node.toprettyxml(indent=' ', encoding='UTF-8') - - #NOTE (ameade): the has_atom should be removed after all of the - # xml serializers and view builders have been updated to the current - # spec that required all responses include the xmlns:atom, the has_atom - # flag is to prevent current tests from breaking - def _add_xmlns(self, node, has_atom=False): - if self.xmlns is not None: - node.setAttribute('xmlns', self.xmlns) - if has_atom: - node.setAttribute('xmlns:atom', "http://www.w3.org/2005/Atom") - - def _to_xml_node(self, doc, metadata, nodename, data): - """Recursive method to convert data members to XML nodes.""" - result = doc.createElement(nodename) - - # Set the xml namespace if one is specified - # TODO(justinsb): We could also use prefixes on the keys - xmlns = metadata.get('xmlns', None) - if xmlns: - result.setAttribute('xmlns', xmlns) - - #TODO(bcwaldon): accomplish this without a type-check - if type(data) is list: - collections = metadata.get('list_collections', {}) - if nodename in collections: - metadata = collections[nodename] - for item in data: - node = doc.createElement(metadata['item_name']) - node.setAttribute(metadata['item_key'], str(item)) - result.appendChild(node) - return result - singular = metadata.get('plurals', {}).get(nodename, None) - if singular is None: - if nodename.endswith('s'): - singular = nodename[:-1] - else: - singular = 'item' - for item in data: - node = self._to_xml_node(doc, metadata, singular, item) - result.appendChild(node) - #TODO(bcwaldon): accomplish this without a type-check - elif type(data) is dict: - collections = metadata.get('dict_collections', {}) - if nodename in collections: - metadata = collections[nodename] - for k, v in data.items(): - node = doc.createElement(metadata['item_name']) - node.setAttribute(metadata['item_key'], str(k)) - text = doc.createTextNode(str(v)) - node.appendChild(text) - result.appendChild(node) - return result - attrs = metadata.get('attributes', {}).get(nodename, {}) - for k, v in data.items(): - if k in attrs: - result.setAttribute(k, str(v)) - else: - node = self._to_xml_node(doc, metadata, k, v) - result.appendChild(node) - else: - # Type is atom - node = doc.createTextNode(str(data)) - result.appendChild(node) - return result - - def _create_link_nodes(self, xml_doc, links): - link_nodes = [] - for link in links: - link_node = xml_doc.createElement('atom:link') - link_node.setAttribute('rel', link['rel']) - link_node.setAttribute('href', link['href']) - if 'type' in link: - link_node.setAttribute('type', link['type']) - link_nodes.append(link_node) - return link_nodes - - -class ResponseHeadersSerializer(ActionDispatcher): - """Default response headers serialization""" - - def serialize(self, response, data, action): - self.dispatch(response, data, action=action) - - def default(self, response, data): - response.status_int = 200 - - -class ResponseSerializer(object): - """Encode the necessary pieces into a response object""" - - def __init__(self, body_serializers=None, headers_serializer=None): - self.body_serializers = { - 'application/xml': XMLDictSerializer(), - 'application/json': JSONDictSerializer(), - } - self.body_serializers.update(body_serializers or {}) - - self.headers_serializer = (headers_serializer or - ResponseHeadersSerializer()) - - def serialize(self, response_data, content_type, action='default'): - """Serialize a dict into a string and wrap in a wsgi.Request object. - - :param response_data: dict produced by the Controller - :param content_type: expected mimetype of serialized response body - - """ - response = webob.Response() - self.serialize_headers(response, response_data, action) - self.serialize_body(response, response_data, content_type, action) - return response - - def serialize_headers(self, response, data, action): - self.headers_serializer.serialize(response, data, action) - - def serialize_body(self, response, data, content_type, action): - response.headers['Content-Type'] = content_type - if data is not None: - serializer = self.get_body_serializer(content_type) - response.body = serializer.serialize(data, action) - - def get_body_serializer(self, content_type): - try: - return self.body_serializers[content_type] - except (KeyError, TypeError): - raise exception.InvalidContentType(content_type=content_type) - - -class RequestHeadersDeserializer(ActionDispatcher): - """Default request headers deserializer""" - - def deserialize(self, request, action): - return self.dispatch(request, action=action) - - def default(self, request): - return {} - - -class RequestDeserializer(object): - """Break up a Request object into more useful pieces.""" - - def __init__(self, body_deserializers=None, headers_deserializer=None, - supported_content_types=None): - - self.supported_content_types = supported_content_types - - self.body_deserializers = { - 'application/xml': XMLDeserializer(), - 'application/json': JSONDeserializer(), - } - self.body_deserializers.update(body_deserializers or {}) - - self.headers_deserializer = (headers_deserializer or - RequestHeadersDeserializer()) - - def deserialize(self, request): - """Extract necessary pieces of the request. - - :param request: Request object - :returns: tuple of (expected controller action name, dictionary of - keyword arguments to pass to the controller, the expected - content type of the response) - - """ - action_args = self.get_action_args(request.environ) - action = action_args.pop('action', None) - - action_args.update(self.deserialize_headers(request, action)) - action_args.update(self.deserialize_body(request, action)) - - accept = self.get_expected_content_type(request) - - return (action, action_args, accept) - - def deserialize_headers(self, request, action): - return self.headers_deserializer.deserialize(request, action) - - def deserialize_body(self, request, action): - if not len(request.body) > 0: - LOG.debug(_("Empty body provided in request")) - return {} - - try: - content_type = request.get_content_type() - except exception.InvalidContentType: - LOG.debug(_("Unrecognized Content-Type provided in request")) - raise - - if content_type is None: - LOG.debug(_("No Content-Type provided in request")) - return {} - - try: - deserializer = self.get_body_deserializer(content_type) - except exception.InvalidContentType: - LOG.debug(_("Unable to deserialize body as provided Content-Type")) - raise - - return deserializer.deserialize(request.body, action) - - def get_body_deserializer(self, content_type): - try: - return self.body_deserializers[content_type] - except (KeyError, TypeError): - raise exception.InvalidContentType(content_type=content_type) - - def get_expected_content_type(self, request): - return request.best_match_content_type(self.supported_content_types) - - def get_action_args(self, request_environment): - """Parse dictionary created by routes library.""" - try: - args = request_environment['wsgiorg.routing_args'][1].copy() - except Exception: - return {} - - try: - del args['controller'] - except KeyError: - pass - - try: - del args['format'] - except KeyError: - pass - - return args - - -class TextDeserializer(ActionDispatcher): - """Default request body deserialization""" - - def deserialize(self, datastring, action='default'): - return self.dispatch(datastring, action=action) - - def default(self, datastring): - return {} - - -class JSONDeserializer(TextDeserializer): - - def _from_json(self, datastring): - try: - return jsonutils.loads(datastring) - except ValueError: - msg = _("cannot understand JSON") - raise exception.MalformedRequestBody(reason=msg) - - def default(self, datastring): - return {'body': self._from_json(datastring)} - - -class XMLDeserializer(TextDeserializer): - - def __init__(self, metadata=None): - """ - :param metadata: information needed to deserialize xml into - a dictionary. - """ - super(XMLDeserializer, self).__init__() - self.metadata = metadata or {} - - def _from_xml(self, datastring): - plurals = set(self.metadata.get('plurals', {})) - - try: - node = xmlutils.safe_minidom_parse_string(datastring).childNodes[0] - return {node.nodeName: self._from_xml_node(node, plurals)} - except expat.ExpatError: - msg = _("cannot understand XML") - raise exception.MalformedRequestBody(reason=msg) - - def _from_xml_node(self, node, listnames): - """Convert a minidom node to a simple Python type. - - :param listnames: list of XML node names whose subnodes should - be considered list items. - - """ - - if len(node.childNodes) == 1 and node.childNodes[0].nodeType == 3: - return node.childNodes[0].nodeValue - elif node.nodeName in listnames: - return [self._from_xml_node(n, listnames) for n in node.childNodes] - else: - result = dict() - for attr in node.attributes.keys(): - result[attr] = node.attributes[attr].nodeValue - for child in node.childNodes: - if child.nodeType != node.TEXT_NODE: - result[child.nodeName] = self._from_xml_node(child, - listnames) - return result - - def find_first_child_named(self, parent, name): - """Search a nodes children for the first child with a given name""" - for node in parent.childNodes: - if node.nodeName == name: - return node - return None - - def find_children_named(self, parent, name): - """Return all of a nodes children who have the given name""" - for node in parent.childNodes: - if node.nodeName == name: - yield node - - def extract_text(self, node): - """Get the text field contained by the given node""" - if len(node.childNodes) == 1: - child = node.childNodes[0] - if child.nodeType == child.TEXT_NODE: - return child.nodeValue - return "" - - def default(self, datastring): - return {'body': self._from_xml(datastring)}