Import copies of the required openstack.common modules
The APIs melange is using are "incubating" in openstack-common. See: http://wiki.openstack.org/CommonLibrary#Incubation A config file is included for openstack-common's copy-and-paste script to make it easy to update the code e.g. $> git clone .../melange $> git clone .../openstack-common $> cd openstack-common $> python update.py ../melange/ Note: the copies of the openstack.common modules should never be directly modified, instead you should always make the modifications in openstack-common and copy them across using the script. Change-Id: I0245e30e4b4a9a2214c4f8a291b7f2caa54ab97e
This commit is contained in:
parent
5b5e1ec321
commit
e23effda88
15
HACKING
15
HACKING
@ -158,3 +158,18 @@ Internationalization (i18n) Strings
|
|||||||
Example:
|
Example:
|
||||||
msg = _("The server with id %(s_id)s has no key %(m_key)s")
|
msg = _("The server with id %(s_id)s has no key %(m_key)s")
|
||||||
LOG.error(msg % {"s_id": "1234", "m_key": "imageId"})
|
LOG.error(msg % {"s_id": "1234", "m_key": "imageId"})
|
||||||
|
|
||||||
|
|
||||||
|
openstack-common
|
||||||
|
----------------
|
||||||
|
|
||||||
|
A number of modules from openstack-common are imported into the project.
|
||||||
|
|
||||||
|
These modules are "incubating" in openstack-common and are kept in sync
|
||||||
|
with the help of openstack-common's update.py script. See:
|
||||||
|
|
||||||
|
http://wiki.openstack.org/CommonLibrary#Incubation
|
||||||
|
|
||||||
|
The copy of the code should never be directly modified here. Please
|
||||||
|
always update openstack-common first and then run the script to copy
|
||||||
|
the changes across.
|
||||||
|
@ -18,7 +18,7 @@
|
|||||||
|
|
||||||
"""Routines for configuring Melange."""
|
"""Routines for configuring Melange."""
|
||||||
|
|
||||||
from openstack.common import config as openstack_config
|
from melange.openstack.common import config as openstack_config
|
||||||
|
|
||||||
|
|
||||||
parse_options = openstack_config.parse_options
|
parse_options = openstack_config.parse_options
|
||||||
|
@ -15,7 +15,7 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
from openstack.common import exception as openstack_exception
|
from melange.openstack.common import exception as openstack_exception
|
||||||
|
|
||||||
|
|
||||||
ClientConnectionError = openstack_exception.ClientConnectionError
|
ClientConnectionError = openstack_exception.ClientConnectionError
|
||||||
|
@ -16,7 +16,7 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
from openstack.common import extensions
|
from melange.openstack.common import extensions
|
||||||
|
|
||||||
|
|
||||||
def factory(global_config, **local_config):
|
def factory(global_config, **local_config):
|
||||||
|
@ -22,7 +22,7 @@ import inspect
|
|||||||
import re
|
import re
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from openstack.common import utils as openstack_utils
|
from melange.openstack.common import utils as openstack_utils
|
||||||
|
|
||||||
from melange.common import exception
|
from melange.common import exception
|
||||||
|
|
||||||
|
@ -27,7 +27,7 @@ import webob.dec
|
|||||||
import webob.exc
|
import webob.exc
|
||||||
|
|
||||||
|
|
||||||
from openstack.common import wsgi as openstack_wsgi
|
from melange.openstack.common import wsgi as openstack_wsgi
|
||||||
|
|
||||||
from melange.common import exception
|
from melange.common import exception
|
||||||
from melange.common import utils
|
from melange.common import utils
|
||||||
|
16
melange/openstack/__init__.py
Normal file
16
melange/openstack/__init__.py
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||||
|
|
||||||
|
# Copyright 2010-2011 OpenStack LLC.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
13
melange/openstack/common/README
Normal file
13
melange/openstack/common/README
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
openstack-common
|
||||||
|
----------------
|
||||||
|
|
||||||
|
A number of modules from openstack-common are imported into this project.
|
||||||
|
|
||||||
|
These modules are "incubating" in openstack-common and are kept in sync
|
||||||
|
with the help of openstack-common's update.py script. See:
|
||||||
|
|
||||||
|
http://wiki.openstack.org/CommonLibrary#Incubation
|
||||||
|
|
||||||
|
The copy of the code should never be directly modified here. Please
|
||||||
|
always update openstack-common first and then run the script to copy
|
||||||
|
the changes across.
|
16
melange/openstack/common/__init__.py
Normal file
16
melange/openstack/common/__init__.py
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||||
|
|
||||||
|
# Copyright 2010-2011 OpenStack LLC.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
337
melange/openstack/common/config.py
Normal file
337
melange/openstack/common/config.py
Normal file
@ -0,0 +1,337 @@
|
|||||||
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||||
|
|
||||||
|
# Copyright 2011 OpenStack LLC.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Routines for configuring Openstack Projects
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import logging.config
|
||||||
|
import logging.handlers
|
||||||
|
import optparse
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from paste import deploy
|
||||||
|
|
||||||
|
DEFAULT_LOG_FORMAT = "%(asctime)s %(levelname)8s [%(name)s] %(message)s"
|
||||||
|
DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
|
||||||
|
|
||||||
|
|
||||||
|
def parse_options(parser, cli_args=None):
|
||||||
|
"""
|
||||||
|
Returns the parsed CLI options, command to run and its arguments, merged
|
||||||
|
with any same-named options found in a configuration file.
|
||||||
|
|
||||||
|
The function returns a tuple of (options, args), where options is a
|
||||||
|
mapping of option key/str(value) pairs, and args is the set of arguments
|
||||||
|
(not options) supplied on the command-line.
|
||||||
|
|
||||||
|
The reason that the option values are returned as strings only is that
|
||||||
|
ConfigParser and paste.deploy only accept string values...
|
||||||
|
|
||||||
|
:param parser: The option parser
|
||||||
|
:param cli_args: (Optional) Set of arguments to process. If not present,
|
||||||
|
sys.argv[1:] is used.
|
||||||
|
:retval tuple of (options, args)
|
||||||
|
"""
|
||||||
|
|
||||||
|
(options, args) = parser.parse_args(cli_args)
|
||||||
|
|
||||||
|
return (vars(options), args)
|
||||||
|
|
||||||
|
|
||||||
|
def add_common_options(parser):
|
||||||
|
"""
|
||||||
|
Given a supplied optparse.OptionParser, adds an OptionGroup that
|
||||||
|
represents all common configuration options.
|
||||||
|
|
||||||
|
:param parser: optparse.OptionParser
|
||||||
|
"""
|
||||||
|
help_text = "The following configuration options are common to "\
|
||||||
|
"this app's programs."
|
||||||
|
|
||||||
|
group = optparse.OptionGroup(parser, "Common Options", help_text)
|
||||||
|
group.add_option('-v', '--verbose', default=False, dest="verbose",
|
||||||
|
action="store_true",
|
||||||
|
help="Print more verbose output")
|
||||||
|
group.add_option('-d', '--debug', default=False, dest="debug",
|
||||||
|
action="store_true",
|
||||||
|
help="Print debugging output")
|
||||||
|
group.add_option('--config-file', default=None, metavar="PATH",
|
||||||
|
help="Path to the config file to use. When not specified "
|
||||||
|
"(the default), we generally look at the first "
|
||||||
|
"argument specified to be a config file, and if "
|
||||||
|
"that is also missing, we search standard "
|
||||||
|
"directories for a config file.")
|
||||||
|
parser.add_option_group(group)
|
||||||
|
|
||||||
|
|
||||||
|
def add_log_options(parser):
|
||||||
|
"""
|
||||||
|
Given a supplied optparse.OptionParser, adds an OptionGroup that
|
||||||
|
represents all the configuration options around logging.
|
||||||
|
|
||||||
|
:param parser: optparse.OptionParser
|
||||||
|
"""
|
||||||
|
help_text = "The following configuration options are specific to logging "\
|
||||||
|
"functionality for this program."
|
||||||
|
|
||||||
|
group = optparse.OptionGroup(parser, "Logging Options", help_text)
|
||||||
|
group.add_option('--log-config', default=None, metavar="PATH",
|
||||||
|
help="If this option is specified, the logging "
|
||||||
|
"configuration file specified is used and overrides "
|
||||||
|
"any other logging options specified. Please see "
|
||||||
|
"the Python logging module documentation for "
|
||||||
|
"details on logging configuration files.")
|
||||||
|
group.add_option('--log-date-format', metavar="FORMAT",
|
||||||
|
default=DEFAULT_LOG_DATE_FORMAT,
|
||||||
|
help="Format string for %(asctime)s in log records. "
|
||||||
|
"Default: %default")
|
||||||
|
group.add_option('--log-file', default=None, metavar="PATH",
|
||||||
|
help="(Optional) Name of log file to output to. "
|
||||||
|
"If not set, logging will go to stdout.")
|
||||||
|
group.add_option("--log-dir", default=None,
|
||||||
|
help="(Optional) The directory to keep log files in "
|
||||||
|
"(will be prepended to --logfile)")
|
||||||
|
group.add_option('--use-syslog', default=False, dest="use_syslog",
|
||||||
|
action="store_true",
|
||||||
|
help="Use syslog for logging.")
|
||||||
|
parser.add_option_group(group)
|
||||||
|
|
||||||
|
|
||||||
|
def setup_logging(options, conf):
|
||||||
|
"""
|
||||||
|
Sets up the logging options for a log with supplied name
|
||||||
|
|
||||||
|
:param options: Mapping of typed option key/values
|
||||||
|
:param conf: Mapping of untyped key/values from config file
|
||||||
|
"""
|
||||||
|
|
||||||
|
if options.get('log_config', None):
|
||||||
|
# Use a logging configuration file for all settings...
|
||||||
|
if os.path.exists(options['log_config']):
|
||||||
|
logging.config.fileConfig(options['log_config'])
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
raise RuntimeError("Unable to locate specified logging "
|
||||||
|
"config file: %s" % options['log_config'])
|
||||||
|
|
||||||
|
# If either the CLI option or the conf value
|
||||||
|
# is True, we set to True
|
||||||
|
debug = options.get('debug') or \
|
||||||
|
get_option(conf, 'debug', type='bool', default=False)
|
||||||
|
verbose = options.get('verbose') or \
|
||||||
|
get_option(conf, 'verbose', type='bool', default=False)
|
||||||
|
root_logger = logging.root
|
||||||
|
if debug:
|
||||||
|
root_logger.setLevel(logging.DEBUG)
|
||||||
|
elif verbose:
|
||||||
|
root_logger.setLevel(logging.INFO)
|
||||||
|
else:
|
||||||
|
root_logger.setLevel(logging.WARNING)
|
||||||
|
|
||||||
|
# Set log configuration from options...
|
||||||
|
# Note that we use a hard-coded log format in the options
|
||||||
|
# because of Paste.Deploy bug #379
|
||||||
|
# http://trac.pythonpaste.org/pythonpaste/ticket/379
|
||||||
|
log_format = options.get('log_format', DEFAULT_LOG_FORMAT)
|
||||||
|
log_date_format = options.get('log_date_format', DEFAULT_LOG_DATE_FORMAT)
|
||||||
|
formatter = logging.Formatter(log_format, log_date_format)
|
||||||
|
|
||||||
|
logfile = options.get('log_file')
|
||||||
|
if not logfile:
|
||||||
|
logfile = conf.get('log_file')
|
||||||
|
|
||||||
|
use_syslog = options.get('use_syslog') or \
|
||||||
|
get_option(conf, 'use_syslog', type='bool', default=False)
|
||||||
|
|
||||||
|
if use_syslog:
|
||||||
|
handler = logging.handlers.SysLogHandler(address='/dev/log')
|
||||||
|
elif logfile:
|
||||||
|
logdir = options.get('log_dir')
|
||||||
|
if not logdir:
|
||||||
|
logdir = conf.get('log_dir')
|
||||||
|
if logdir:
|
||||||
|
logfile = os.path.join(logdir, logfile)
|
||||||
|
handler = logging.FileHandler(logfile)
|
||||||
|
else:
|
||||||
|
handler = logging.StreamHandler(sys.stdout)
|
||||||
|
|
||||||
|
handler.setFormatter(formatter)
|
||||||
|
root_logger.addHandler(handler)
|
||||||
|
|
||||||
|
|
||||||
|
def fix_path(path):
|
||||||
|
"""
|
||||||
|
Return the full absolute path
|
||||||
|
"""
|
||||||
|
return os.path.abspath(os.path.expanduser(path))
|
||||||
|
|
||||||
|
|
||||||
|
def find_config_file(app_name, options, args, config_dir=None):
|
||||||
|
"""
|
||||||
|
Return the first config file found for an application.
|
||||||
|
|
||||||
|
We search for the paste config file in the following order:
|
||||||
|
* If --config-file option is used, use that
|
||||||
|
* If args[0] is a file, use that
|
||||||
|
* Search for $app.conf in standard directories:
|
||||||
|
* .
|
||||||
|
* ~.config_dir/
|
||||||
|
* ~
|
||||||
|
* /etc/config_dir
|
||||||
|
* /etc
|
||||||
|
|
||||||
|
:retval Full path to config file, or None if no config file found
|
||||||
|
"""
|
||||||
|
config_dir = config_dir or app_name
|
||||||
|
|
||||||
|
if options.get('config_file'):
|
||||||
|
if os.path.exists(options['config_file']):
|
||||||
|
return fix_path(options['config_file'])
|
||||||
|
elif args:
|
||||||
|
if os.path.exists(args[0]):
|
||||||
|
return fix_path(args[0])
|
||||||
|
|
||||||
|
# Handle standard directory search for $app_name.conf
|
||||||
|
config_file_dirs = [fix_path(os.getcwd()),
|
||||||
|
fix_path(os.path.join('~', '.' + config_dir)),
|
||||||
|
fix_path('~'),
|
||||||
|
os.path.join('/etc', config_dir),
|
||||||
|
'/etc']
|
||||||
|
|
||||||
|
for cfg_dir in config_file_dirs:
|
||||||
|
cfg_file = os.path.join(cfg_dir, '%s.conf' % app_name)
|
||||||
|
if os.path.exists(cfg_file):
|
||||||
|
return cfg_file
|
||||||
|
|
||||||
|
|
||||||
|
def load_paste_config(app_name, options, args, config_dir=None):
|
||||||
|
"""
|
||||||
|
Looks for a config file to use for an app and returns the
|
||||||
|
config file path and a configuration mapping from a paste config file.
|
||||||
|
|
||||||
|
We search for the paste config file in the following order:
|
||||||
|
* If --config-file option is used, use that
|
||||||
|
* If args[0] is a file, use that
|
||||||
|
* Search for $app_name.conf in standard directories:
|
||||||
|
* .
|
||||||
|
* ~.config_dir/
|
||||||
|
* ~
|
||||||
|
* /etc/config_dir
|
||||||
|
* /etc
|
||||||
|
|
||||||
|
:param app_name: Name of the application to load config for, or None.
|
||||||
|
None signifies to only load the [DEFAULT] section of
|
||||||
|
the config file.
|
||||||
|
:param options: Set of typed options returned from parse_options()
|
||||||
|
:param args: Command line arguments from argv[1:]
|
||||||
|
:retval Tuple of (conf_file, conf)
|
||||||
|
|
||||||
|
:raises RuntimeError when config file cannot be located or there was a
|
||||||
|
problem loading the configuration file.
|
||||||
|
"""
|
||||||
|
conf_file = find_config_file(app_name, options, args, config_dir)
|
||||||
|
if not conf_file:
|
||||||
|
raise RuntimeError("Unable to locate any configuration file. "
|
||||||
|
"Cannot load application %s" % app_name)
|
||||||
|
try:
|
||||||
|
conf = deploy.appconfig("config:%s" % conf_file, name=app_name)
|
||||||
|
return conf_file, conf
|
||||||
|
except Exception, e:
|
||||||
|
raise RuntimeError("Error trying to load config %s: %s"
|
||||||
|
% (conf_file, e))
|
||||||
|
|
||||||
|
|
||||||
|
def load_paste_app(app_name, options, args, config_dir=None):
|
||||||
|
"""
|
||||||
|
Builds and returns a WSGI app from a paste config file.
|
||||||
|
|
||||||
|
We search for the paste config file in the following order:
|
||||||
|
* If --config-file option is used, use that
|
||||||
|
* If args[0] is a file, use that
|
||||||
|
* Search for $app_name.conf in standard directories:
|
||||||
|
* .
|
||||||
|
* ~.config_dir/
|
||||||
|
* ~
|
||||||
|
* /etc/config_dir
|
||||||
|
* /etc
|
||||||
|
|
||||||
|
:param app_name: Name of the application to load
|
||||||
|
:param options: Set of typed options returned from parse_options()
|
||||||
|
:param args: Command line arguments from argv[1:]
|
||||||
|
|
||||||
|
:raises RuntimeError when config file cannot be located or application
|
||||||
|
cannot be loaded from config file
|
||||||
|
"""
|
||||||
|
conf_file, conf = load_paste_config(app_name, options,
|
||||||
|
args, config_dir)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Setup logging early, supplying both the CLI options and the
|
||||||
|
# configuration mapping from the config file
|
||||||
|
setup_logging(options, conf)
|
||||||
|
|
||||||
|
# We only update the conf dict for the verbose and debug
|
||||||
|
# flags. Everything else must be set up in the conf file...
|
||||||
|
debug = options.get('debug') or \
|
||||||
|
get_option(conf, 'debug', type='bool', default=False)
|
||||||
|
verbose = options.get('verbose') or \
|
||||||
|
get_option(conf, 'verbose', type='bool', default=False)
|
||||||
|
conf['debug'] = debug
|
||||||
|
conf['verbose'] = verbose
|
||||||
|
|
||||||
|
# Log the options used when starting if we're in debug mode...
|
||||||
|
if debug:
|
||||||
|
logger = logging.getLogger(app_name)
|
||||||
|
logger.debug("*" * 80)
|
||||||
|
logger.debug("Configuration options gathered from config file:")
|
||||||
|
logger.debug(conf_file)
|
||||||
|
logger.debug("================================================")
|
||||||
|
items = dict([(k, v) for k, v in conf.items()
|
||||||
|
if k not in ('__file__', 'here')])
|
||||||
|
for key, value in sorted(items.items()):
|
||||||
|
logger.debug("%(key)-30s %(value)s" % locals())
|
||||||
|
logger.debug("*" * 80)
|
||||||
|
app = deploy.loadapp("config:%s" % conf_file, name=app_name)
|
||||||
|
except (LookupError, ImportError), e:
|
||||||
|
raise RuntimeError("Unable to load %(app_name)s from "
|
||||||
|
"configuration file %(conf_file)s."
|
||||||
|
"\nGot: %(e)r" % locals())
|
||||||
|
return conf, app
|
||||||
|
|
||||||
|
|
||||||
|
def get_option(options, option, **kwargs):
|
||||||
|
if option in options:
|
||||||
|
value = options[option]
|
||||||
|
type_ = kwargs.get('type', 'str')
|
||||||
|
if type_ == 'bool':
|
||||||
|
if hasattr(value, 'lower'):
|
||||||
|
return value.lower() == 'true'
|
||||||
|
else:
|
||||||
|
return value
|
||||||
|
elif type_ == 'int':
|
||||||
|
return int(value)
|
||||||
|
elif type_ == 'float':
|
||||||
|
return float(value)
|
||||||
|
else:
|
||||||
|
return value
|
||||||
|
elif 'default' in kwargs:
|
||||||
|
return kwargs['default']
|
||||||
|
else:
|
||||||
|
raise KeyError("option '%s' not found" % option)
|
147
melange/openstack/common/exception.py
Normal file
147
melange/openstack/common/exception.py
Normal file
@ -0,0 +1,147 @@
|
|||||||
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||||
|
|
||||||
|
# Copyright 2011 OpenStack LLC.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Exceptions common to OpenStack projects
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
|
||||||
|
class ProcessExecutionError(IOError):
|
||||||
|
def __init__(self, stdout=None, stderr=None, exit_code=None, cmd=None,
|
||||||
|
description=None):
|
||||||
|
if description is None:
|
||||||
|
description = "Unexpected error while running command."
|
||||||
|
if exit_code is None:
|
||||||
|
exit_code = '-'
|
||||||
|
message = "%s\nCommand: %s\nExit code: %s\nStdout: %r\nStderr: %r" % (
|
||||||
|
description, cmd, exit_code, stdout, stderr)
|
||||||
|
IOError.__init__(self, message)
|
||||||
|
|
||||||
|
|
||||||
|
class Error(Exception):
|
||||||
|
def __init__(self, message=None):
|
||||||
|
super(Error, self).__init__(message)
|
||||||
|
|
||||||
|
|
||||||
|
class ApiError(Error):
|
||||||
|
def __init__(self, message='Unknown', code='Unknown'):
|
||||||
|
self.message = message
|
||||||
|
self.code = code
|
||||||
|
super(ApiError, self).__init__('%s: %s' % (code, message))
|
||||||
|
|
||||||
|
|
||||||
|
class NotFound(Error):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class UnknownScheme(Error):
|
||||||
|
|
||||||
|
msg = "Unknown scheme '%s' found in URI"
|
||||||
|
|
||||||
|
def __init__(self, scheme):
|
||||||
|
msg = self.__class__.msg % scheme
|
||||||
|
super(UnknownScheme, self).__init__(msg)
|
||||||
|
|
||||||
|
|
||||||
|
class BadStoreUri(Error):
|
||||||
|
|
||||||
|
msg = "The Store URI %s was malformed. Reason: %s"
|
||||||
|
|
||||||
|
def __init__(self, uri, reason):
|
||||||
|
msg = self.__class__.msg % (uri, reason)
|
||||||
|
super(BadStoreUri, self).__init__(msg)
|
||||||
|
|
||||||
|
|
||||||
|
class Duplicate(Error):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class NotAuthorized(Error):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class NotEmpty(Error):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Invalid(Error):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class BadInputError(Exception):
|
||||||
|
"""Error resulting from a client sending bad input to a server"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class MissingArgumentError(Error):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DatabaseMigrationError(Error):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ClientConnectionError(Exception):
|
||||||
|
"""Error resulting from a client connecting to a server"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def wrap_exception(f):
|
||||||
|
def _wrap(*args, **kw):
|
||||||
|
try:
|
||||||
|
return f(*args, **kw)
|
||||||
|
except Exception, e:
|
||||||
|
if not isinstance(e, Error):
|
||||||
|
#exc_type, exc_value, exc_traceback = sys.exc_info()
|
||||||
|
logging.exception('Uncaught exception')
|
||||||
|
#logging.error(traceback.extract_stack(exc_traceback))
|
||||||
|
raise Error(str(e))
|
||||||
|
raise
|
||||||
|
_wrap.func_name = f.func_name
|
||||||
|
return _wrap
|
||||||
|
|
||||||
|
|
||||||
|
class OpenstackException(Exception):
|
||||||
|
"""
|
||||||
|
Base Exception
|
||||||
|
|
||||||
|
To correctly use this class, inherit from it and define
|
||||||
|
a 'message' property. That message will get printf'd
|
||||||
|
with the keyword arguments provided to the constructor.
|
||||||
|
"""
|
||||||
|
message = "An unknown exception occurred"
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
try:
|
||||||
|
self._error_string = self.message % kwargs
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
# at least get the core message out if something happened
|
||||||
|
self._error_string = self.message
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self._error_string
|
||||||
|
|
||||||
|
|
||||||
|
class MalformedRequestBody(OpenstackException):
|
||||||
|
message = "Malformed message body: %(reason)s"
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidContentType(OpenstackException):
|
||||||
|
message = "Invalid content type %(content_type)s"
|
538
melange/openstack/common/extensions.py
Normal file
538
melange/openstack/common/extensions.py
Normal file
@ -0,0 +1,538 @@
|
|||||||
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||||
|
|
||||||
|
# Copyright 2011 OpenStack LLC.
|
||||||
|
# Copyright 2011 Justin Santa Barbara
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
import imp
|
||||||
|
import os
|
||||||
|
import routes
|
||||||
|
import webob.dec
|
||||||
|
import webob.exc
|
||||||
|
import logging
|
||||||
|
from lxml import etree
|
||||||
|
|
||||||
|
from melange.openstack.common import exception
|
||||||
|
from melange.openstack.common import wsgi
|
||||||
|
|
||||||
|
LOG = logging.getLogger('extensions')
|
||||||
|
DEFAULT_XMLNS = "http://docs.openstack.org/"
|
||||||
|
XMLNS_ATOM = "http://www.w3.org/2005/Atom"
|
||||||
|
|
||||||
|
|
||||||
|
class ExtensionDescriptor(object):
|
||||||
|
"""Base class that defines the contract for extensions.
|
||||||
|
|
||||||
|
Note that you don't have to derive from this class to have a valid
|
||||||
|
extension; it is purely a convenience.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def get_name(self):
|
||||||
|
"""The name of the extension.
|
||||||
|
|
||||||
|
e.g. 'Fox In Socks'
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def get_alias(self):
|
||||||
|
"""The alias for the extension.
|
||||||
|
|
||||||
|
e.g. 'FOXNSOX'
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def get_description(self):
|
||||||
|
"""Friendly description for the extension.
|
||||||
|
|
||||||
|
e.g. 'The Fox In Socks Extension'
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def get_namespace(self):
|
||||||
|
"""The XML namespace for the extension.
|
||||||
|
|
||||||
|
e.g. 'http://www.fox.in.socks/api/ext/pie/v1.0'
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def get_updated(self):
|
||||||
|
"""The timestamp when the extension was last updated.
|
||||||
|
|
||||||
|
e.g. '2011-01-22T13:25:27-06:00'
|
||||||
|
|
||||||
|
"""
|
||||||
|
# NOTE(justinsb): Not sure of the purpose of this is, vs the XML NS
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def get_resources(self):
|
||||||
|
"""List of extensions.ResourceExtension extension objects.
|
||||||
|
|
||||||
|
Resources define new nouns, and are accessible through URLs.
|
||||||
|
|
||||||
|
"""
|
||||||
|
resources = []
|
||||||
|
return resources
|
||||||
|
|
||||||
|
def get_actions(self):
|
||||||
|
"""List of extensions.ActionExtension extension objects.
|
||||||
|
|
||||||
|
Actions are verbs callable from the API.
|
||||||
|
|
||||||
|
"""
|
||||||
|
actions = []
|
||||||
|
return actions
|
||||||
|
|
||||||
|
def get_request_extensions(self):
|
||||||
|
"""List of extensions.RequestException extension objects.
|
||||||
|
|
||||||
|
Request extensions are used to handle custom request data.
|
||||||
|
|
||||||
|
"""
|
||||||
|
request_exts = []
|
||||||
|
return request_exts
|
||||||
|
|
||||||
|
|
||||||
|
class ActionExtensionController(object):
|
||||||
|
def __init__(self, application):
|
||||||
|
self.application = application
|
||||||
|
self.action_handlers = {}
|
||||||
|
|
||||||
|
def add_action(self, action_name, handler):
|
||||||
|
self.action_handlers[action_name] = handler
|
||||||
|
|
||||||
|
def action(self, req, id, body):
|
||||||
|
for action_name, handler in self.action_handlers.iteritems():
|
||||||
|
if action_name in body:
|
||||||
|
return handler(body, req, id)
|
||||||
|
# no action handler found (bump to downstream application)
|
||||||
|
res = self.application
|
||||||
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
class ActionExtensionResource(wsgi.Resource):
|
||||||
|
|
||||||
|
def __init__(self, application):
|
||||||
|
controller = ActionExtensionController(application)
|
||||||
|
wsgi.Resource.__init__(self, controller)
|
||||||
|
|
||||||
|
def add_action(self, action_name, handler):
|
||||||
|
self.controller.add_action(action_name, handler)
|
||||||
|
|
||||||
|
|
||||||
|
class RequestExtensionController(object):
|
||||||
|
|
||||||
|
def __init__(self, application):
|
||||||
|
self.application = application
|
||||||
|
self.handlers = []
|
||||||
|
|
||||||
|
def add_handler(self, handler):
|
||||||
|
self.handlers.append(handler)
|
||||||
|
|
||||||
|
def process(self, req, *args, **kwargs):
|
||||||
|
res = req.get_response(self.application)
|
||||||
|
# currently request handlers are un-ordered
|
||||||
|
for handler in self.handlers:
|
||||||
|
res = handler(req, res)
|
||||||
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
class RequestExtensionResource(wsgi.Resource):
|
||||||
|
|
||||||
|
def __init__(self, application):
|
||||||
|
controller = RequestExtensionController(application)
|
||||||
|
wsgi.Resource.__init__(self, controller)
|
||||||
|
|
||||||
|
def add_handler(self, handler):
|
||||||
|
self.controller.add_handler(handler)
|
||||||
|
|
||||||
|
|
||||||
|
class ExtensionsResource(wsgi.Resource):
|
||||||
|
|
||||||
|
def __init__(self, extension_manager):
|
||||||
|
self.extension_manager = extension_manager
|
||||||
|
body_serializers = {'application/xml': ExtensionsXMLSerializer()}
|
||||||
|
serializer = wsgi.ResponseSerializer(body_serializers=body_serializers)
|
||||||
|
super(ExtensionsResource, self).__init__(self, None, serializer)
|
||||||
|
|
||||||
|
def _translate(self, ext):
|
||||||
|
ext_data = {}
|
||||||
|
ext_data['name'] = ext.get_name()
|
||||||
|
ext_data['alias'] = ext.get_alias()
|
||||||
|
ext_data['description'] = ext.get_description()
|
||||||
|
ext_data['namespace'] = ext.get_namespace()
|
||||||
|
ext_data['updated'] = ext.get_updated()
|
||||||
|
ext_data['links'] = [] # TODO(dprince): implement extension links
|
||||||
|
return ext_data
|
||||||
|
|
||||||
|
def index(self, req):
|
||||||
|
extensions = []
|
||||||
|
for _alias, ext in self.extension_manager.extensions.iteritems():
|
||||||
|
extensions.append(self._translate(ext))
|
||||||
|
return dict(extensions=extensions)
|
||||||
|
|
||||||
|
def show(self, req, id):
|
||||||
|
# NOTE(dprince): the extensions alias is used as the 'id' for show
|
||||||
|
ext = self.extension_manager.extensions.get(id, None)
|
||||||
|
if not ext:
|
||||||
|
raise webob.exc.HTTPNotFound(
|
||||||
|
_("Extension with alias %s does not exist") % id)
|
||||||
|
|
||||||
|
return dict(extension=self._translate(ext))
|
||||||
|
|
||||||
|
def delete(self, req, id):
|
||||||
|
raise webob.exc.HTTPNotFound()
|
||||||
|
|
||||||
|
def create(self, req):
|
||||||
|
raise webob.exc.HTTPNotFound()
|
||||||
|
|
||||||
|
|
||||||
|
class ExtensionMiddleware(wsgi.Middleware):
|
||||||
|
"""Extensions middleware for WSGI."""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def factory(cls, global_config, **local_config):
|
||||||
|
"""Paste factory."""
|
||||||
|
def _factory(app):
|
||||||
|
return cls(app, global_config, **local_config)
|
||||||
|
return _factory
|
||||||
|
|
||||||
|
def _action_ext_resources(self, application, ext_mgr, mapper):
|
||||||
|
"""Return a dict of ActionExtensionResource-s by collection."""
|
||||||
|
action_resources = {}
|
||||||
|
for action in ext_mgr.get_actions():
|
||||||
|
if not action.collection in action_resources.keys():
|
||||||
|
resource = ActionExtensionResource(application)
|
||||||
|
mapper.connect("/%s/:(id)/action.:(format)" %
|
||||||
|
action.collection,
|
||||||
|
action='action',
|
||||||
|
controller=resource,
|
||||||
|
conditions=dict(method=['POST']))
|
||||||
|
mapper.connect("/%s/:(id)/action" %
|
||||||
|
action.collection,
|
||||||
|
action='action',
|
||||||
|
controller=resource,
|
||||||
|
conditions=dict(method=['POST']))
|
||||||
|
action_resources[action.collection] = resource
|
||||||
|
|
||||||
|
return action_resources
|
||||||
|
|
||||||
|
def _request_ext_resources(self, application, ext_mgr, mapper):
|
||||||
|
"""Returns a dict of RequestExtensionResource-s by collection."""
|
||||||
|
request_ext_resources = {}
|
||||||
|
for req_ext in ext_mgr.get_request_extensions():
|
||||||
|
if not req_ext.key in request_ext_resources.keys():
|
||||||
|
resource = RequestExtensionResource(application)
|
||||||
|
mapper.connect(req_ext.url_route + '.:(format)',
|
||||||
|
action='process',
|
||||||
|
controller=resource,
|
||||||
|
conditions=req_ext.conditions)
|
||||||
|
|
||||||
|
mapper.connect(req_ext.url_route,
|
||||||
|
action='process',
|
||||||
|
controller=resource,
|
||||||
|
conditions=req_ext.conditions)
|
||||||
|
request_ext_resources[req_ext.key] = resource
|
||||||
|
|
||||||
|
return request_ext_resources
|
||||||
|
|
||||||
|
def __init__(self, application, config, ext_mgr=None):
|
||||||
|
ext_mgr = ext_mgr or ExtensionManager(
|
||||||
|
config['api_extensions_path'])
|
||||||
|
mapper = routes.Mapper()
|
||||||
|
|
||||||
|
# extended resources
|
||||||
|
for resource_ext in ext_mgr.get_resources():
|
||||||
|
LOG.debug(_('Extended resource: %s'), resource_ext.collection)
|
||||||
|
controller_resource = wsgi.Resource(resource_ext.controller,
|
||||||
|
resource_ext.deserializer,
|
||||||
|
resource_ext.serializer)
|
||||||
|
self._map_custom_collection_actions(resource_ext, mapper,
|
||||||
|
controller_resource)
|
||||||
|
kargs = dict(controller=controller_resource,
|
||||||
|
collection=resource_ext.collection_actions,
|
||||||
|
member=resource_ext.member_actions)
|
||||||
|
if resource_ext.parent:
|
||||||
|
kargs['parent_resource'] = resource_ext.parent
|
||||||
|
mapper.resource(resource_ext.collection,
|
||||||
|
resource_ext.collection, **kargs)
|
||||||
|
|
||||||
|
# extended actions
|
||||||
|
action_resources = self._action_ext_resources(application, ext_mgr,
|
||||||
|
mapper)
|
||||||
|
for action in ext_mgr.get_actions():
|
||||||
|
LOG.debug(_('Extended action: %s'), action.action_name)
|
||||||
|
resource = action_resources[action.collection]
|
||||||
|
resource.add_action(action.action_name, action.handler)
|
||||||
|
|
||||||
|
# extended requests
|
||||||
|
req_controllers = self._request_ext_resources(application, ext_mgr,
|
||||||
|
mapper)
|
||||||
|
for request_ext in ext_mgr.get_request_extensions():
|
||||||
|
LOG.debug(_('Extended request: %s'), request_ext.key)
|
||||||
|
controller = req_controllers[request_ext.key]
|
||||||
|
controller.add_handler(request_ext.handler)
|
||||||
|
|
||||||
|
self._router = routes.middleware.RoutesMiddleware(self._dispatch,
|
||||||
|
mapper)
|
||||||
|
|
||||||
|
super(ExtensionMiddleware, self).__init__(application)
|
||||||
|
|
||||||
|
def _map_custom_collection_actions(self, resource_ext, mapper,
|
||||||
|
controller_resource):
|
||||||
|
for action, method in resource_ext.collection_actions.iteritems():
|
||||||
|
parent = resource_ext.parent
|
||||||
|
conditions = dict(method=[method])
|
||||||
|
path = "/%s/%s" % (resource_ext.collection, action)
|
||||||
|
|
||||||
|
path_prefix = ""
|
||||||
|
if parent:
|
||||||
|
path_prefix = "/%s/{%s_id}" % (parent["collection_name"],
|
||||||
|
parent["member_name"])
|
||||||
|
|
||||||
|
with mapper.submapper(controller=controller_resource,
|
||||||
|
action=action,
|
||||||
|
path_prefix=path_prefix,
|
||||||
|
conditions=conditions) as submap:
|
||||||
|
submap.connect(path)
|
||||||
|
submap.connect("%s.:(format)" % path)
|
||||||
|
|
||||||
|
@webob.dec.wsgify(RequestClass=wsgi.Request)
|
||||||
|
def __call__(self, req):
|
||||||
|
"""Route the incoming request with router."""
|
||||||
|
req.environ['extended.app'] = self.application
|
||||||
|
return self._router
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@webob.dec.wsgify(RequestClass=wsgi.Request)
|
||||||
|
def _dispatch(req):
|
||||||
|
"""Dispatch the request.
|
||||||
|
|
||||||
|
Returns the routed WSGI app's response or defers to the extended
|
||||||
|
application.
|
||||||
|
|
||||||
|
"""
|
||||||
|
match = req.environ['wsgiorg.routing_args'][1]
|
||||||
|
if not match:
|
||||||
|
return req.environ['extended.app']
|
||||||
|
app = match['controller']
|
||||||
|
return app
|
||||||
|
|
||||||
|
|
||||||
|
class ExtensionManager(object):
|
||||||
|
"""Load extensions from the configured extension path.
|
||||||
|
|
||||||
|
See nova/tests/api/openstack/extensions/foxinsocks/extension.py for an
|
||||||
|
example extension implementation.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, path):
|
||||||
|
LOG.debug(_('Initializing extension manager.'))
|
||||||
|
|
||||||
|
self.path = path
|
||||||
|
self.extensions = {}
|
||||||
|
self._load_all_extensions()
|
||||||
|
|
||||||
|
def get_resources(self):
|
||||||
|
"""Returns a list of ResourceExtension objects."""
|
||||||
|
resources = []
|
||||||
|
extension_resource = ExtensionsResource(self)
|
||||||
|
res_ext = ResourceExtension('extensions',
|
||||||
|
extension_resource,
|
||||||
|
serializer=extension_resource.serializer)
|
||||||
|
resources.append(res_ext)
|
||||||
|
for alias, ext in self.extensions.iteritems():
|
||||||
|
try:
|
||||||
|
resources.extend(ext.get_resources())
|
||||||
|
except AttributeError:
|
||||||
|
# NOTE(dprince): Extension aren't required to have resource
|
||||||
|
# extensions
|
||||||
|
pass
|
||||||
|
return resources
|
||||||
|
|
||||||
|
def get_actions(self):
|
||||||
|
"""Returns a list of ActionExtension objects."""
|
||||||
|
actions = []
|
||||||
|
for alias, ext in self.extensions.iteritems():
|
||||||
|
try:
|
||||||
|
actions.extend(ext.get_actions())
|
||||||
|
except AttributeError:
|
||||||
|
# NOTE(dprince): Extension aren't required to have action
|
||||||
|
# extensions
|
||||||
|
pass
|
||||||
|
return actions
|
||||||
|
|
||||||
|
def get_request_extensions(self):
|
||||||
|
"""Returns a list of RequestExtension objects."""
|
||||||
|
request_exts = []
|
||||||
|
for alias, ext in self.extensions.iteritems():
|
||||||
|
try:
|
||||||
|
request_exts.extend(ext.get_request_extensions())
|
||||||
|
except AttributeError:
|
||||||
|
# NOTE(dprince): Extension aren't required to have request
|
||||||
|
# extensions
|
||||||
|
pass
|
||||||
|
return request_exts
|
||||||
|
|
||||||
|
def _check_extension(self, extension):
|
||||||
|
"""Checks for required methods in extension objects."""
|
||||||
|
try:
|
||||||
|
LOG.debug(_('Ext name: %s'), extension.get_name())
|
||||||
|
LOG.debug(_('Ext alias: %s'), extension.get_alias())
|
||||||
|
LOG.debug(_('Ext description: %s'), extension.get_description())
|
||||||
|
LOG.debug(_('Ext namespace: %s'), extension.get_namespace())
|
||||||
|
LOG.debug(_('Ext updated: %s'), extension.get_updated())
|
||||||
|
except AttributeError as ex:
|
||||||
|
LOG.exception(_("Exception loading extension: %s"), unicode(ex))
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _load_all_extensions(self):
|
||||||
|
"""Load extensions from the configured path.
|
||||||
|
|
||||||
|
Load extensions from the configured path. The extension name is
|
||||||
|
constructed from the module_name. If your extension module was named
|
||||||
|
widgets.py the extension class within that module should be
|
||||||
|
'Widgets'.
|
||||||
|
|
||||||
|
In addition, extensions are loaded from the 'contrib' directory.
|
||||||
|
|
||||||
|
See nova/tests/api/openstack/extensions/foxinsocks.py for an example
|
||||||
|
extension implementation.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if os.path.exists(self.path):
|
||||||
|
self._load_all_extensions_from_path(self.path)
|
||||||
|
|
||||||
|
contrib_path = os.path.join(os.path.dirname(__file__), "contrib")
|
||||||
|
if os.path.exists(contrib_path):
|
||||||
|
self._load_all_extensions_from_path(contrib_path)
|
||||||
|
|
||||||
|
def _load_all_extensions_from_path(self, path):
|
||||||
|
for f in os.listdir(path):
|
||||||
|
LOG.debug(_('Loading extension file: %s'), f)
|
||||||
|
mod_name, file_ext = os.path.splitext(os.path.split(f)[-1])
|
||||||
|
ext_path = os.path.join(path, f)
|
||||||
|
if file_ext.lower() == '.py' and not mod_name.startswith('_'):
|
||||||
|
mod = imp.load_source(mod_name, ext_path)
|
||||||
|
ext_name = mod_name[0].upper() + mod_name[1:]
|
||||||
|
new_ext_class = getattr(mod, ext_name, None)
|
||||||
|
if not new_ext_class:
|
||||||
|
LOG.warn(_('Did not find expected name '
|
||||||
|
'"%(ext_name)s" in %(file)s'),
|
||||||
|
{'ext_name': ext_name,
|
||||||
|
'file': ext_path})
|
||||||
|
continue
|
||||||
|
new_ext = new_ext_class()
|
||||||
|
self.add_extension(new_ext)
|
||||||
|
|
||||||
|
def add_extension(self, ext):
|
||||||
|
# Do nothing if the extension doesn't check out
|
||||||
|
if not self._check_extension(ext):
|
||||||
|
return
|
||||||
|
|
||||||
|
alias = ext.get_alias()
|
||||||
|
LOG.debug(_('Loaded extension: %s'), alias)
|
||||||
|
|
||||||
|
if alias in self.extensions:
|
||||||
|
raise exception.Error("Found duplicate extension: %s" % alias)
|
||||||
|
self.extensions[alias] = ext
|
||||||
|
|
||||||
|
|
||||||
|
class RequestExtension(object):
|
||||||
|
"""Extend requests and responses of core nova OpenStack API resources.
|
||||||
|
|
||||||
|
Provide a way to add data to responses and handle custom request data
|
||||||
|
that is sent to core nova OpenStack API controllers.
|
||||||
|
|
||||||
|
"""
|
||||||
|
def __init__(self, method, url_route, handler):
|
||||||
|
self.url_route = url_route
|
||||||
|
self.handler = handler
|
||||||
|
self.conditions = dict(method=[method])
|
||||||
|
self.key = "%s-%s" % (method, url_route)
|
||||||
|
|
||||||
|
|
||||||
|
class ActionExtension(object):
|
||||||
|
"""Add custom actions to core nova OpenStack API resources."""
|
||||||
|
|
||||||
|
def __init__(self, collection, action_name, handler):
|
||||||
|
self.collection = collection
|
||||||
|
self.action_name = action_name
|
||||||
|
self.handler = handler
|
||||||
|
|
||||||
|
|
||||||
|
class ResourceExtension(object):
|
||||||
|
"""Add top level resources to the OpenStack API in nova."""
|
||||||
|
|
||||||
|
def __init__(self, collection, controller, parent=None,
|
||||||
|
collection_actions=None, member_actions=None,
|
||||||
|
deserializer=None, serializer=None):
|
||||||
|
if not collection_actions:
|
||||||
|
collection_actions = {}
|
||||||
|
if not member_actions:
|
||||||
|
member_actions = {}
|
||||||
|
self.collection = collection
|
||||||
|
self.controller = controller
|
||||||
|
self.parent = parent
|
||||||
|
self.collection_actions = collection_actions
|
||||||
|
self.member_actions = member_actions
|
||||||
|
self.deserializer = deserializer
|
||||||
|
self.serializer = serializer
|
||||||
|
|
||||||
|
|
||||||
|
class ExtensionsXMLSerializer(wsgi.XMLDictSerializer):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.nsmap = {None: DEFAULT_XMLNS, 'atom': XMLNS_ATOM}
|
||||||
|
|
||||||
|
def show(self, ext_dict):
|
||||||
|
ext = etree.Element('extension', nsmap=self.nsmap)
|
||||||
|
self._populate_ext(ext, ext_dict['extension'])
|
||||||
|
return self._to_xml(ext)
|
||||||
|
|
||||||
|
def index(self, exts_dict):
|
||||||
|
exts = etree.Element('extensions', nsmap=self.nsmap)
|
||||||
|
for ext_dict in exts_dict['extensions']:
|
||||||
|
ext = etree.SubElement(exts, 'extension')
|
||||||
|
self._populate_ext(ext, ext_dict)
|
||||||
|
return self._to_xml(exts)
|
||||||
|
|
||||||
|
def _populate_ext(self, ext_elem, ext_dict):
|
||||||
|
"""Populate an extension xml element from a dict."""
|
||||||
|
|
||||||
|
ext_elem.set('name', ext_dict['name'])
|
||||||
|
ext_elem.set('namespace', ext_dict['namespace'])
|
||||||
|
ext_elem.set('alias', ext_dict['alias'])
|
||||||
|
ext_elem.set('updated', ext_dict['updated'])
|
||||||
|
desc = etree.Element('description')
|
||||||
|
desc.text = ext_dict['description']
|
||||||
|
ext_elem.append(desc)
|
||||||
|
for link in ext_dict.get('links', []):
|
||||||
|
elem = etree.SubElement(ext_elem, '{%s}link' % XMLNS_ATOM)
|
||||||
|
elem.set('rel', link['rel'])
|
||||||
|
elem.set('href', link['href'])
|
||||||
|
elem.set('type', link['type'])
|
||||||
|
return ext_elem
|
||||||
|
|
||||||
|
def _to_xml(self, root):
|
||||||
|
"""Convert the xml object to an xml string."""
|
||||||
|
|
||||||
|
return etree.tostring(root, encoding='UTF-8')
|
210
melange/openstack/common/utils.py
Normal file
210
melange/openstack/common/utils.py
Normal file
@ -0,0 +1,210 @@
|
|||||||
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||||
|
|
||||||
|
# Copyright 2011 OpenStack LLC.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
System-level utilities and helper functions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import random
|
||||||
|
import shlex
|
||||||
|
import sys
|
||||||
|
import types
|
||||||
|
|
||||||
|
from eventlet import greenthread
|
||||||
|
from eventlet.green import subprocess
|
||||||
|
|
||||||
|
from melange.openstack.common import exception
|
||||||
|
|
||||||
|
|
||||||
|
TIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def int_from_bool_as_string(subject):
|
||||||
|
"""
|
||||||
|
Interpret a string as a boolean and return either 1 or 0.
|
||||||
|
|
||||||
|
Any string value in:
|
||||||
|
('True', 'true', 'On', 'on', '1')
|
||||||
|
is interpreted as a boolean True.
|
||||||
|
|
||||||
|
Useful for JSON-decoded stuff and config file parsing
|
||||||
|
"""
|
||||||
|
return bool_from_string(subject) and 1 or 0
|
||||||
|
|
||||||
|
|
||||||
|
def bool_from_string(subject):
|
||||||
|
"""
|
||||||
|
Interpret a string as a boolean.
|
||||||
|
|
||||||
|
Any string value in:
|
||||||
|
('True', 'true', 'On', 'on', '1')
|
||||||
|
is interpreted as a boolean True.
|
||||||
|
|
||||||
|
Useful for JSON-decoded stuff and config file parsing
|
||||||
|
"""
|
||||||
|
if isinstance(subject, types.BooleanType):
|
||||||
|
return subject
|
||||||
|
if isinstance(subject, types.StringTypes):
|
||||||
|
if subject.strip().lower() in ('true', 'on', '1'):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def execute(*cmd, **kwargs):
|
||||||
|
"""
|
||||||
|
Helper method to execute command with optional retry.
|
||||||
|
|
||||||
|
:cmd Passed to subprocess.Popen.
|
||||||
|
:process_input Send to opened process.
|
||||||
|
:check_exit_code Defaults to 0. Raise exception.ProcessExecutionError
|
||||||
|
unless program exits with this code.
|
||||||
|
:delay_on_retry True | False. Defaults to True. If set to True, wait a
|
||||||
|
short amount of time before retrying.
|
||||||
|
:attempts How many times to retry cmd.
|
||||||
|
:run_as_root True | False. Defaults to False. If set to True,
|
||||||
|
the command is prefixed by the command specified
|
||||||
|
in the root_helper kwarg.
|
||||||
|
:root_helper command to prefix all cmd's with
|
||||||
|
|
||||||
|
:raises exception.Error on receiving unknown arguments
|
||||||
|
:raises exception.ProcessExecutionError
|
||||||
|
"""
|
||||||
|
|
||||||
|
process_input = kwargs.pop('process_input', None)
|
||||||
|
check_exit_code = kwargs.pop('check_exit_code', 0)
|
||||||
|
delay_on_retry = kwargs.pop('delay_on_retry', True)
|
||||||
|
attempts = kwargs.pop('attempts', 1)
|
||||||
|
run_as_root = kwargs.pop('run_as_root', False)
|
||||||
|
root_helper = kwargs.pop('root_helper', '')
|
||||||
|
if len(kwargs):
|
||||||
|
raise exception.Error(_('Got unknown keyword args '
|
||||||
|
'to utils.execute: %r') % kwargs)
|
||||||
|
if run_as_root:
|
||||||
|
cmd = shlex.split(root_helper) + list(cmd)
|
||||||
|
cmd = map(str, cmd)
|
||||||
|
|
||||||
|
while attempts > 0:
|
||||||
|
attempts -= 1
|
||||||
|
try:
|
||||||
|
LOG.debug(_('Running cmd (subprocess): %s'), ' '.join(cmd))
|
||||||
|
_PIPE = subprocess.PIPE # pylint: disable=E1101
|
||||||
|
obj = subprocess.Popen(cmd,
|
||||||
|
stdin=_PIPE,
|
||||||
|
stdout=_PIPE,
|
||||||
|
stderr=_PIPE,
|
||||||
|
close_fds=True)
|
||||||
|
result = None
|
||||||
|
if process_input is not None:
|
||||||
|
result = obj.communicate(process_input)
|
||||||
|
else:
|
||||||
|
result = obj.communicate()
|
||||||
|
obj.stdin.close() # pylint: disable=E1101
|
||||||
|
_returncode = obj.returncode # pylint: disable=E1101
|
||||||
|
if _returncode:
|
||||||
|
LOG.debug(_('Result was %s') % _returncode)
|
||||||
|
if type(check_exit_code) == types.IntType \
|
||||||
|
and _returncode != check_exit_code:
|
||||||
|
(stdout, stderr) = result
|
||||||
|
raise exception.ProcessExecutionError(
|
||||||
|
exit_code=_returncode,
|
||||||
|
stdout=stdout,
|
||||||
|
stderr=stderr,
|
||||||
|
cmd=' '.join(cmd))
|
||||||
|
return result
|
||||||
|
except exception.ProcessExecutionError:
|
||||||
|
if not attempts:
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
LOG.debug(_('%r failed. Retrying.'), cmd)
|
||||||
|
if delay_on_retry:
|
||||||
|
greenthread.sleep(random.randint(20, 200) / 100.0)
|
||||||
|
finally:
|
||||||
|
# NOTE(termie): this appears to be necessary to let the subprocess
|
||||||
|
# call clean something up in between calls, without
|
||||||
|
# it two execute calls in a row hangs the second one
|
||||||
|
greenthread.sleep(0)
|
||||||
|
|
||||||
|
|
||||||
|
def import_class(import_str):
|
||||||
|
"""Returns a class from a string including module and class"""
|
||||||
|
mod_str, _sep, class_str = import_str.rpartition('.')
|
||||||
|
try:
|
||||||
|
__import__(mod_str)
|
||||||
|
return getattr(sys.modules[mod_str], class_str)
|
||||||
|
except (ImportError, ValueError, AttributeError):
|
||||||
|
raise exception.NotFound('Class %s cannot be found' % class_str)
|
||||||
|
|
||||||
|
|
||||||
|
def import_object(import_str):
|
||||||
|
"""Returns an object including a module or module and class"""
|
||||||
|
try:
|
||||||
|
__import__(import_str)
|
||||||
|
return sys.modules[import_str]
|
||||||
|
except ImportError:
|
||||||
|
return import_class(import_str)
|
||||||
|
|
||||||
|
|
||||||
|
def isotime(at=None):
|
||||||
|
if not at:
|
||||||
|
at = datetime.datetime.utcnow()
|
||||||
|
return at.strftime(TIME_FORMAT)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_isotime(timestr):
|
||||||
|
return datetime.datetime.strptime(timestr, TIME_FORMAT)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_mailmap(mailmap='.mailmap'):
|
||||||
|
mapping = {}
|
||||||
|
if os.path.exists(mailmap):
|
||||||
|
fp = open(mailmap, 'r')
|
||||||
|
for l in fp:
|
||||||
|
l = l.strip()
|
||||||
|
if not l.startswith('#') and ' ' in l:
|
||||||
|
canonical_email, alias = l.split(' ')
|
||||||
|
mapping[alias] = canonical_email
|
||||||
|
return mapping
|
||||||
|
|
||||||
|
|
||||||
|
def str_dict_replace(s, mapping):
|
||||||
|
for s1, s2 in mapping.iteritems():
|
||||||
|
s = s.replace(s1, s2)
|
||||||
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
def utcnow():
|
||||||
|
"""Overridable version of utils.utcnow."""
|
||||||
|
if utcnow.override_time:
|
||||||
|
return utcnow.override_time
|
||||||
|
return datetime.datetime.utcnow()
|
||||||
|
|
||||||
|
|
||||||
|
utcnow.override_time = None
|
||||||
|
|
||||||
|
|
||||||
|
def set_time_override(override_time=datetime.datetime.utcnow()):
|
||||||
|
"""Override utils.utcnow to return a constant time."""
|
||||||
|
utcnow.override_time = override_time
|
||||||
|
|
||||||
|
|
||||||
|
def clear_time_override():
|
||||||
|
"""Remove the overridden time."""
|
||||||
|
utcnow.override_time = None
|
717
melange/openstack/common/wsgi.py
Normal file
717
melange/openstack/common/wsgi.py
Normal file
@ -0,0 +1,717 @@
|
|||||||
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||||
|
|
||||||
|
# Copyright 2011 OpenStack LLC.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""Utility methods for working with WSGI servers."""
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
import eventlet
|
||||||
|
import eventlet.wsgi
|
||||||
|
|
||||||
|
eventlet.patcher.monkey_patch(all=False, socket=True)
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
import routes
|
||||||
|
import routes.middleware
|
||||||
|
import webob.dec
|
||||||
|
import webob.exc
|
||||||
|
from xml.dom import minidom
|
||||||
|
from xml.parsers import expat
|
||||||
|
|
||||||
|
from melange.openstack.common import exception
|
||||||
|
|
||||||
|
|
||||||
|
LOG = logging.getLogger('wsgi')
|
||||||
|
|
||||||
|
|
||||||
|
class WritableLogger(object):
|
||||||
|
"""A thin wrapper that responds to `write` and logs."""
|
||||||
|
|
||||||
|
def __init__(self, logger, level=logging.DEBUG):
|
||||||
|
self.logger = logger
|
||||||
|
self.level = level
|
||||||
|
|
||||||
|
def write(self, msg):
|
||||||
|
self.logger.log(self.level, msg.strip("\n"))
|
||||||
|
|
||||||
|
|
||||||
|
def run_server(application, port):
|
||||||
|
"""Run a WSGI server with the given application."""
|
||||||
|
sock = eventlet.listen(('0.0.0.0', port))
|
||||||
|
eventlet.wsgi.server(sock, application)
|
||||||
|
|
||||||
|
|
||||||
|
class Server(object):
|
||||||
|
"""Server class to manage multiple WSGI sockets and applications."""
|
||||||
|
|
||||||
|
def __init__(self, threads=1000):
|
||||||
|
self.pool = eventlet.GreenPool(threads)
|
||||||
|
|
||||||
|
def start(self, application, port, host='0.0.0.0', backlog=128):
|
||||||
|
"""Run a WSGI server with the given application."""
|
||||||
|
socket = eventlet.listen((host, port), backlog=backlog)
|
||||||
|
self.pool.spawn_n(self._run, application, socket)
|
||||||
|
|
||||||
|
def wait(self):
|
||||||
|
"""Wait until all servers have completed running."""
|
||||||
|
try:
|
||||||
|
self.pool.waitall()
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _run(self, application, socket):
|
||||||
|
"""Start a WSGI server in a new green thread."""
|
||||||
|
logger = logging.getLogger('eventlet.wsgi.server')
|
||||||
|
eventlet.wsgi.server(socket, application, custom_pool=self.pool,
|
||||||
|
log=WritableLogger(logger))
|
||||||
|
|
||||||
|
|
||||||
|
class Middleware(object):
|
||||||
|
"""
|
||||||
|
Base WSGI middleware wrapper. These classes require an application to be
|
||||||
|
initialized that will be called next. By default the middleware will
|
||||||
|
simply call its wrapped app, or you can override __call__ to customize its
|
||||||
|
behavior.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, application):
|
||||||
|
self.application = application
|
||||||
|
|
||||||
|
def process_request(self, req):
|
||||||
|
"""
|
||||||
|
Called on each request.
|
||||||
|
|
||||||
|
If this returns None, the next application down the stack will be
|
||||||
|
executed. If it returns a response then that response will be returned
|
||||||
|
and execution will stop here.
|
||||||
|
"""
|
||||||
|
return None
|
||||||
|
|
||||||
|
def process_response(self, response):
|
||||||
|
"""Do whatever you'd like to the response."""
|
||||||
|
return response
|
||||||
|
|
||||||
|
@webob.dec.wsgify
|
||||||
|
def __call__(self, req):
|
||||||
|
response = self.process_request(req)
|
||||||
|
if response:
|
||||||
|
return response
|
||||||
|
response = req.get_response(self.application)
|
||||||
|
return self.process_response(response)
|
||||||
|
|
||||||
|
|
||||||
|
class Debug(Middleware):
|
||||||
|
"""
|
||||||
|
Helper class that can be inserted into any WSGI application chain
|
||||||
|
to get information about the request and response.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@webob.dec.wsgify
|
||||||
|
def __call__(self, req):
|
||||||
|
print ("*" * 40) + " REQUEST ENVIRON"
|
||||||
|
for key, value in req.environ.items():
|
||||||
|
print key, "=", value
|
||||||
|
print
|
||||||
|
resp = req.get_response(self.application)
|
||||||
|
|
||||||
|
print ("*" * 40) + " RESPONSE HEADERS"
|
||||||
|
for (key, value) in resp.headers.iteritems():
|
||||||
|
print key, "=", value
|
||||||
|
print
|
||||||
|
|
||||||
|
resp.app_iter = self.print_generator(resp.app_iter)
|
||||||
|
|
||||||
|
return resp
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def print_generator(app_iter):
|
||||||
|
"""
|
||||||
|
Iterator that prints the contents of a wrapper string iterator
|
||||||
|
when iterated.
|
||||||
|
"""
|
||||||
|
print ("*" * 40) + " BODY"
|
||||||
|
for part in app_iter:
|
||||||
|
sys.stdout.write(part)
|
||||||
|
sys.stdout.flush()
|
||||||
|
yield part
|
||||||
|
print
|
||||||
|
|
||||||
|
|
||||||
|
class Router(object):
|
||||||
|
|
||||||
|
"""
|
||||||
|
WSGI middleware that maps incoming requests to WSGI apps.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, mapper):
|
||||||
|
"""
|
||||||
|
Create a router for the given routes.Mapper.
|
||||||
|
|
||||||
|
Each route in `mapper` must specify a 'controller', which is a
|
||||||
|
WSGI app to call. You'll probably want to specify an 'action' as
|
||||||
|
well and have your controller be a wsgi.Controller, who will route
|
||||||
|
the request to the action method.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
mapper = routes.Mapper()
|
||||||
|
sc = ServerController()
|
||||||
|
|
||||||
|
# Explicit mapping of one route to a controller+action
|
||||||
|
mapper.connect(None, "/svrlist", controller=sc, action="list")
|
||||||
|
|
||||||
|
# Actions are all implicitly defined
|
||||||
|
mapper.resource("server", "servers", controller=sc)
|
||||||
|
|
||||||
|
# Pointing to an arbitrary WSGI app. You can specify the
|
||||||
|
# {path_info:.*} parameter so the target app can be handed just that
|
||||||
|
# section of the URL.
|
||||||
|
mapper.connect(None, "/v1.0/{path_info:.*}", controller=BlogApp())
|
||||||
|
"""
|
||||||
|
self.map = mapper
|
||||||
|
self._router = routes.middleware.RoutesMiddleware(self._dispatch,
|
||||||
|
self.map)
|
||||||
|
|
||||||
|
@webob.dec.wsgify
|
||||||
|
def __call__(self, req):
|
||||||
|
"""
|
||||||
|
Route the incoming request to a controller based on self.map.
|
||||||
|
If no match, return a 404.
|
||||||
|
"""
|
||||||
|
return self._router
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@webob.dec.wsgify
|
||||||
|
def _dispatch(req):
|
||||||
|
"""
|
||||||
|
Called by self._router after matching the incoming request to a route
|
||||||
|
and putting the information into req.environ. Either returns 404
|
||||||
|
or the routed WSGI app's response.
|
||||||
|
"""
|
||||||
|
match = req.environ['wsgiorg.routing_args'][1]
|
||||||
|
if not match:
|
||||||
|
return webob.exc.HTTPNotFound()
|
||||||
|
app = match['controller']
|
||||||
|
return app
|
||||||
|
|
||||||
|
|
||||||
|
class Request(webob.Request):
|
||||||
|
"""Add some Openstack API-specific logic to the base webob.Request."""
|
||||||
|
|
||||||
|
default_request_content_types = ('application/json', 'application/xml')
|
||||||
|
default_accept_types = ('application/json', 'application/xml')
|
||||||
|
default_accept_type = 'application/json'
|
||||||
|
|
||||||
|
def best_match_content_type(self, supported_content_types=None):
|
||||||
|
"""Determine the requested response content-type.
|
||||||
|
|
||||||
|
Based on the query extension then the Accept header.
|
||||||
|
Defaults to default_accept_type if we don't find a preference
|
||||||
|
|
||||||
|
"""
|
||||||
|
supported_content_types = (supported_content_types or
|
||||||
|
self.default_accept_types)
|
||||||
|
|
||||||
|
parts = self.path.rsplit('.', 1)
|
||||||
|
if len(parts) > 1:
|
||||||
|
ctype = 'application/{0}'.format(parts[1])
|
||||||
|
if ctype in supported_content_types:
|
||||||
|
return ctype
|
||||||
|
|
||||||
|
bm = self.accept.best_match(supported_content_types)
|
||||||
|
return bm or self.default_accept_type
|
||||||
|
|
||||||
|
def get_content_type(self, allowed_content_types=None):
|
||||||
|
"""Determine content type of the request body.
|
||||||
|
|
||||||
|
Does not do any body introspection, only checks header
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not "Content-Type" in self.headers:
|
||||||
|
return None
|
||||||
|
|
||||||
|
content_type = self.content_type
|
||||||
|
allowed_content_types = (allowed_content_types or
|
||||||
|
self.default_request_content_types)
|
||||||
|
|
||||||
|
if content_type not in allowed_content_types:
|
||||||
|
raise exception.InvalidContentType(content_type=content_type)
|
||||||
|
return content_type
|
||||||
|
|
||||||
|
|
||||||
|
class Resource(object):
|
||||||
|
"""
|
||||||
|
WSGI app that handles (de)serialization and controller dispatch.
|
||||||
|
|
||||||
|
Reads routing information supplied by RoutesMiddleware and calls
|
||||||
|
the requested action method upon its deserializer, controller,
|
||||||
|
and serializer. Those three objects may implement any of the basic
|
||||||
|
controller action methods (create, update, show, index, delete)
|
||||||
|
along with any that may be specified in the api router. A 'default'
|
||||||
|
method may also be implemented to be used in place of any
|
||||||
|
non-implemented actions. Deserializer methods must accept a request
|
||||||
|
argument and return a dictionary. Controller methods must accept a
|
||||||
|
request argument. Additionally, they must also accept keyword
|
||||||
|
arguments that represent the keys returned by the Deserializer. They
|
||||||
|
may raise a webob.exc exception or return a dict, which will be
|
||||||
|
serialized by requested content type.
|
||||||
|
"""
|
||||||
|
def __init__(self, controller, deserializer=None, serializer=None):
|
||||||
|
"""
|
||||||
|
:param controller: object that implement methods created by routes lib
|
||||||
|
:param deserializer: object that supports webob request deserialization
|
||||||
|
through controller-like actions
|
||||||
|
:param serializer: object that supports webob response serialization
|
||||||
|
through controller-like actions
|
||||||
|
"""
|
||||||
|
self.controller = controller
|
||||||
|
self.serializer = serializer or ResponseSerializer()
|
||||||
|
self.deserializer = deserializer or RequestDeserializer()
|
||||||
|
|
||||||
|
@webob.dec.wsgify(RequestClass=Request)
|
||||||
|
def __call__(self, request):
|
||||||
|
"""WSGI method that controls (de)serialization and method dispatch."""
|
||||||
|
|
||||||
|
try:
|
||||||
|
action, action_args, accept = self.deserialize_request(request)
|
||||||
|
except exception.InvalidContentType:
|
||||||
|
msg = _("Unsupported Content-Type")
|
||||||
|
return webob.exc.HTTPUnsupportedMediaType(explanation=msg)
|
||||||
|
except exception.MalformedRequestBody:
|
||||||
|
msg = _("Malformed request body")
|
||||||
|
return webob.exc.HTTPBadRequest(explanation=msg)
|
||||||
|
|
||||||
|
action_result = self.execute_action(action, request, **action_args)
|
||||||
|
try:
|
||||||
|
return self.serialize_response(action, action_result, accept)
|
||||||
|
# return unserializable result (typically a webob exc)
|
||||||
|
except Exception:
|
||||||
|
return action_result
|
||||||
|
|
||||||
|
def deserialize_request(self, request):
|
||||||
|
return self.deserializer.deserialize(request)
|
||||||
|
|
||||||
|
def serialize_response(self, action, action_result, accept):
|
||||||
|
return self.serializer.serialize(action_result, accept, action)
|
||||||
|
|
||||||
|
def execute_action(self, action, request, **action_args):
|
||||||
|
return self.dispatch(self.controller, action, request, **action_args)
|
||||||
|
|
||||||
|
def dispatch(self, obj, action, *args, **kwargs):
|
||||||
|
"""Find action-specific method on self and call it."""
|
||||||
|
try:
|
||||||
|
method = getattr(obj, action)
|
||||||
|
except AttributeError:
|
||||||
|
method = getattr(obj, 'default')
|
||||||
|
|
||||||
|
return method(*args, **kwargs)
|
||||||
|
|
||||||
|
def get_action_args(self, request_environment):
|
||||||
|
"""Parse dictionary created by routes library."""
|
||||||
|
try:
|
||||||
|
args = request_environment['wsgiorg.routing_args'][1].copy()
|
||||||
|
except Exception:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
del args['controller']
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
del args['format']
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return args
|
||||||
|
|
||||||
|
|
||||||
|
class ActionDispatcher(object):
|
||||||
|
"""Maps method name to local methods through action name."""
|
||||||
|
|
||||||
|
def dispatch(self, *args, **kwargs):
|
||||||
|
"""Find and call local method."""
|
||||||
|
action = kwargs.pop('action', 'default')
|
||||||
|
action_method = getattr(self, str(action), self.default)
|
||||||
|
return action_method(*args, **kwargs)
|
||||||
|
|
||||||
|
def default(self, data):
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
|
class DictSerializer(ActionDispatcher):
|
||||||
|
"""Default request body serialization"""
|
||||||
|
|
||||||
|
def serialize(self, data, action='default'):
|
||||||
|
return self.dispatch(data, action=action)
|
||||||
|
|
||||||
|
def default(self, data):
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
class JSONDictSerializer(DictSerializer):
|
||||||
|
"""Default JSON request body serialization"""
|
||||||
|
|
||||||
|
def default(self, data):
|
||||||
|
def sanitizer(obj):
|
||||||
|
if isinstance(obj, datetime.datetime):
|
||||||
|
_dtime = obj - datetime.timedelta(microseconds=obj.microsecond)
|
||||||
|
return _dtime.isoformat()
|
||||||
|
return obj
|
||||||
|
return json.dumps(data, default=sanitizer)
|
||||||
|
|
||||||
|
|
||||||
|
class XMLDictSerializer(DictSerializer):
|
||||||
|
|
||||||
|
def __init__(self, metadata=None, xmlns=None):
|
||||||
|
"""
|
||||||
|
:param metadata: information needed to deserialize xml into
|
||||||
|
a dictionary.
|
||||||
|
:param xmlns: XML namespace to include with serialized xml
|
||||||
|
"""
|
||||||
|
super(XMLDictSerializer, self).__init__()
|
||||||
|
self.metadata = metadata or {}
|
||||||
|
self.xmlns = xmlns
|
||||||
|
|
||||||
|
def default(self, data):
|
||||||
|
# We expect data to contain a single key which is the XML root.
|
||||||
|
root_key = data.keys()[0]
|
||||||
|
doc = minidom.Document()
|
||||||
|
node = self._to_xml_node(doc, self.metadata, root_key, data[root_key])
|
||||||
|
|
||||||
|
return self.to_xml_string(node)
|
||||||
|
|
||||||
|
def to_xml_string(self, node, has_atom=False):
|
||||||
|
self._add_xmlns(node, has_atom)
|
||||||
|
return node.toprettyxml(indent=' ', encoding='UTF-8')
|
||||||
|
|
||||||
|
#NOTE (ameade): the has_atom should be removed after all of the
|
||||||
|
# xml serializers and view builders have been updated to the current
|
||||||
|
# spec that required all responses include the xmlns:atom, the has_atom
|
||||||
|
# flag is to prevent current tests from breaking
|
||||||
|
def _add_xmlns(self, node, has_atom=False):
|
||||||
|
if self.xmlns is not None:
|
||||||
|
node.setAttribute('xmlns', self.xmlns)
|
||||||
|
if has_atom:
|
||||||
|
node.setAttribute('xmlns:atom', "http://www.w3.org/2005/Atom")
|
||||||
|
|
||||||
|
def _to_xml_node(self, doc, metadata, nodename, data):
|
||||||
|
"""Recursive method to convert data members to XML nodes."""
|
||||||
|
result = doc.createElement(nodename)
|
||||||
|
|
||||||
|
# Set the xml namespace if one is specified
|
||||||
|
# TODO(justinsb): We could also use prefixes on the keys
|
||||||
|
xmlns = metadata.get('xmlns', None)
|
||||||
|
if xmlns:
|
||||||
|
result.setAttribute('xmlns', xmlns)
|
||||||
|
|
||||||
|
#TODO(bcwaldon): accomplish this without a type-check
|
||||||
|
if type(data) is list:
|
||||||
|
collections = metadata.get('list_collections', {})
|
||||||
|
if nodename in collections:
|
||||||
|
metadata = collections[nodename]
|
||||||
|
for item in data:
|
||||||
|
node = doc.createElement(metadata['item_name'])
|
||||||
|
node.setAttribute(metadata['item_key'], str(item))
|
||||||
|
result.appendChild(node)
|
||||||
|
return result
|
||||||
|
singular = metadata.get('plurals', {}).get(nodename, None)
|
||||||
|
if singular is None:
|
||||||
|
if nodename.endswith('s'):
|
||||||
|
singular = nodename[:-1]
|
||||||
|
else:
|
||||||
|
singular = 'item'
|
||||||
|
for item in data:
|
||||||
|
node = self._to_xml_node(doc, metadata, singular, item)
|
||||||
|
result.appendChild(node)
|
||||||
|
#TODO(bcwaldon): accomplish this without a type-check
|
||||||
|
elif type(data) is dict:
|
||||||
|
collections = metadata.get('dict_collections', {})
|
||||||
|
if nodename in collections:
|
||||||
|
metadata = collections[nodename]
|
||||||
|
for k, v in data.items():
|
||||||
|
node = doc.createElement(metadata['item_name'])
|
||||||
|
node.setAttribute(metadata['item_key'], str(k))
|
||||||
|
text = doc.createTextNode(str(v))
|
||||||
|
node.appendChild(text)
|
||||||
|
result.appendChild(node)
|
||||||
|
return result
|
||||||
|
attrs = metadata.get('attributes', {}).get(nodename, {})
|
||||||
|
for k, v in data.items():
|
||||||
|
if k in attrs:
|
||||||
|
result.setAttribute(k, str(v))
|
||||||
|
else:
|
||||||
|
node = self._to_xml_node(doc, metadata, k, v)
|
||||||
|
result.appendChild(node)
|
||||||
|
else:
|
||||||
|
# Type is atom
|
||||||
|
node = doc.createTextNode(str(data))
|
||||||
|
result.appendChild(node)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _create_link_nodes(self, xml_doc, links):
|
||||||
|
link_nodes = []
|
||||||
|
for link in links:
|
||||||
|
link_node = xml_doc.createElement('atom:link')
|
||||||
|
link_node.setAttribute('rel', link['rel'])
|
||||||
|
link_node.setAttribute('href', link['href'])
|
||||||
|
if 'type' in link:
|
||||||
|
link_node.setAttribute('type', link['type'])
|
||||||
|
link_nodes.append(link_node)
|
||||||
|
return link_nodes
|
||||||
|
|
||||||
|
|
||||||
|
class ResponseHeadersSerializer(ActionDispatcher):
|
||||||
|
"""Default response headers serialization"""
|
||||||
|
|
||||||
|
def serialize(self, response, data, action):
|
||||||
|
self.dispatch(response, data, action=action)
|
||||||
|
|
||||||
|
def default(self, response, data):
|
||||||
|
response.status_int = 200
|
||||||
|
|
||||||
|
|
||||||
|
class ResponseSerializer(object):
|
||||||
|
"""Encode the necessary pieces into a response object"""
|
||||||
|
|
||||||
|
def __init__(self, body_serializers=None, headers_serializer=None):
|
||||||
|
self.body_serializers = {
|
||||||
|
'application/xml': XMLDictSerializer(),
|
||||||
|
'application/json': JSONDictSerializer(),
|
||||||
|
}
|
||||||
|
self.body_serializers.update(body_serializers or {})
|
||||||
|
|
||||||
|
self.headers_serializer = headers_serializer or \
|
||||||
|
ResponseHeadersSerializer()
|
||||||
|
|
||||||
|
def serialize(self, response_data, content_type, action='default'):
|
||||||
|
"""Serialize a dict into a string and wrap in a wsgi.Request object.
|
||||||
|
|
||||||
|
:param response_data: dict produced by the Controller
|
||||||
|
:param content_type: expected mimetype of serialized response body
|
||||||
|
|
||||||
|
"""
|
||||||
|
response = webob.Response()
|
||||||
|
self.serialize_headers(response, response_data, action)
|
||||||
|
self.serialize_body(response, response_data, content_type, action)
|
||||||
|
return response
|
||||||
|
|
||||||
|
def serialize_headers(self, response, data, action):
|
||||||
|
self.headers_serializer.serialize(response, data, action)
|
||||||
|
|
||||||
|
def serialize_body(self, response, data, content_type, action):
|
||||||
|
response.headers['Content-Type'] = content_type
|
||||||
|
if data is not None:
|
||||||
|
serializer = self.get_body_serializer(content_type)
|
||||||
|
response.body = serializer.serialize(data, action)
|
||||||
|
|
||||||
|
def get_body_serializer(self, content_type):
|
||||||
|
try:
|
||||||
|
return self.body_serializers[content_type]
|
||||||
|
except (KeyError, TypeError):
|
||||||
|
raise exception.InvalidContentType(content_type=content_type)
|
||||||
|
|
||||||
|
|
||||||
|
class RequestHeadersDeserializer(ActionDispatcher):
|
||||||
|
"""Default request headers deserializer"""
|
||||||
|
|
||||||
|
def deserialize(self, request, action):
|
||||||
|
return self.dispatch(request, action=action)
|
||||||
|
|
||||||
|
def default(self, request):
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
class RequestDeserializer(object):
|
||||||
|
"""Break up a Request object into more useful pieces."""
|
||||||
|
|
||||||
|
def __init__(self, body_deserializers=None, headers_deserializer=None,
|
||||||
|
supported_content_types=None):
|
||||||
|
|
||||||
|
self.supported_content_types = supported_content_types
|
||||||
|
|
||||||
|
self.body_deserializers = {
|
||||||
|
'application/xml': XMLDeserializer(),
|
||||||
|
'application/json': JSONDeserializer(),
|
||||||
|
}
|
||||||
|
self.body_deserializers.update(body_deserializers or {})
|
||||||
|
|
||||||
|
self.headers_deserializer = headers_deserializer or \
|
||||||
|
RequestHeadersDeserializer()
|
||||||
|
|
||||||
|
def deserialize(self, request):
|
||||||
|
"""Extract necessary pieces of the request.
|
||||||
|
|
||||||
|
:param request: Request object
|
||||||
|
:returns tuple of expected controller action name, dictionary of
|
||||||
|
keyword arguments to pass to the controller, the expected
|
||||||
|
content type of the response
|
||||||
|
|
||||||
|
"""
|
||||||
|
action_args = self.get_action_args(request.environ)
|
||||||
|
action = action_args.pop('action', None)
|
||||||
|
|
||||||
|
action_args.update(self.deserialize_headers(request, action))
|
||||||
|
action_args.update(self.deserialize_body(request, action))
|
||||||
|
|
||||||
|
accept = self.get_expected_content_type(request)
|
||||||
|
|
||||||
|
return (action, action_args, accept)
|
||||||
|
|
||||||
|
def deserialize_headers(self, request, action):
|
||||||
|
return self.headers_deserializer.deserialize(request, action)
|
||||||
|
|
||||||
|
def deserialize_body(self, request, action):
|
||||||
|
if not len(request.body) > 0:
|
||||||
|
LOG.debug(_("Empty body provided in request"))
|
||||||
|
return {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
content_type = request.get_content_type()
|
||||||
|
except exception.InvalidContentType:
|
||||||
|
LOG.debug(_("Unrecognized Content-Type provided in request"))
|
||||||
|
raise
|
||||||
|
|
||||||
|
if content_type is None:
|
||||||
|
LOG.debug(_("No Content-Type provided in request"))
|
||||||
|
return {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
deserializer = self.get_body_deserializer(content_type)
|
||||||
|
except exception.InvalidContentType:
|
||||||
|
LOG.debug(_("Unable to deserialize body as provided Content-Type"))
|
||||||
|
raise
|
||||||
|
|
||||||
|
return deserializer.deserialize(request.body, action)
|
||||||
|
|
||||||
|
def get_body_deserializer(self, content_type):
|
||||||
|
try:
|
||||||
|
return self.body_deserializers[content_type]
|
||||||
|
except (KeyError, TypeError):
|
||||||
|
raise exception.InvalidContentType(content_type=content_type)
|
||||||
|
|
||||||
|
def get_expected_content_type(self, request):
|
||||||
|
return request.best_match_content_type(self.supported_content_types)
|
||||||
|
|
||||||
|
def get_action_args(self, request_environment):
|
||||||
|
"""Parse dictionary created by routes library."""
|
||||||
|
try:
|
||||||
|
args = request_environment['wsgiorg.routing_args'][1].copy()
|
||||||
|
except Exception:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
del args['controller']
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
del args['format']
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return args
|
||||||
|
|
||||||
|
|
||||||
|
class TextDeserializer(ActionDispatcher):
|
||||||
|
"""Default request body deserialization"""
|
||||||
|
|
||||||
|
def deserialize(self, datastring, action='default'):
|
||||||
|
return self.dispatch(datastring, action=action)
|
||||||
|
|
||||||
|
def default(self, datastring):
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
class JSONDeserializer(TextDeserializer):
|
||||||
|
|
||||||
|
def _from_json(self, datastring):
|
||||||
|
try:
|
||||||
|
return json.loads(datastring)
|
||||||
|
except ValueError:
|
||||||
|
msg = _("cannot understand JSON")
|
||||||
|
raise exception.MalformedRequestBody(reason=msg)
|
||||||
|
|
||||||
|
def default(self, datastring):
|
||||||
|
return {'body': self._from_json(datastring)}
|
||||||
|
|
||||||
|
|
||||||
|
class XMLDeserializer(TextDeserializer):
|
||||||
|
|
||||||
|
def __init__(self, metadata=None):
|
||||||
|
"""
|
||||||
|
:param metadata: information needed to deserialize xml into
|
||||||
|
a dictionary.
|
||||||
|
"""
|
||||||
|
super(XMLDeserializer, self).__init__()
|
||||||
|
self.metadata = metadata or {}
|
||||||
|
|
||||||
|
def _from_xml(self, datastring):
|
||||||
|
plurals = set(self.metadata.get('plurals', {}))
|
||||||
|
|
||||||
|
try:
|
||||||
|
node = minidom.parseString(datastring).childNodes[0]
|
||||||
|
return {node.nodeName: self._from_xml_node(node, plurals)}
|
||||||
|
except expat.ExpatError:
|
||||||
|
msg = _("cannot understand XML")
|
||||||
|
raise exception.MalformedRequestBody(reason=msg)
|
||||||
|
|
||||||
|
def _from_xml_node(self, node, listnames):
|
||||||
|
"""Convert a minidom node to a simple Python type.
|
||||||
|
|
||||||
|
:param listnames: list of XML node names whose subnodes should
|
||||||
|
be considered list items.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
if len(node.childNodes) == 1 and node.childNodes[0].nodeType == 3:
|
||||||
|
return node.childNodes[0].nodeValue
|
||||||
|
elif node.nodeName in listnames:
|
||||||
|
return [self._from_xml_node(n, listnames) for n in node.childNodes]
|
||||||
|
else:
|
||||||
|
result = dict()
|
||||||
|
for attr in node.attributes.keys():
|
||||||
|
result[attr] = node.attributes[attr].nodeValue
|
||||||
|
for child in node.childNodes:
|
||||||
|
if child.nodeType != node.TEXT_NODE:
|
||||||
|
result[child.nodeName] = self._from_xml_node(child,
|
||||||
|
listnames)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def find_first_child_named(self, parent, name):
|
||||||
|
"""Search a nodes children for the first child with a given name"""
|
||||||
|
for node in parent.childNodes:
|
||||||
|
if node.nodeName == name:
|
||||||
|
return node
|
||||||
|
return None
|
||||||
|
|
||||||
|
def find_children_named(self, parent, name):
|
||||||
|
"""Return all of a nodes children who have the given name"""
|
||||||
|
for node in parent.childNodes:
|
||||||
|
if node.nodeName == name:
|
||||||
|
yield node
|
||||||
|
|
||||||
|
def extract_text(self, node):
|
||||||
|
"""Get the text field contained by the given node"""
|
||||||
|
if len(node.childNodes) == 1:
|
||||||
|
child = node.childNodes[0]
|
||||||
|
if child.nodeType == child.TEXT_NODE:
|
||||||
|
return child.nodeValue
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def default(self, datastring):
|
||||||
|
return {'body': self._from_xml(datastring)}
|
7
openstack-common.conf
Normal file
7
openstack-common.conf
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
[DEFAULT]
|
||||||
|
|
||||||
|
# The list of modules to copy from openstack-common
|
||||||
|
modules=config,exception,extensions,utils,wsgi
|
||||||
|
|
||||||
|
# The base module to hold the copy of openstack.common
|
||||||
|
base=melange
|
2
setup.py
2
setup.py
@ -38,7 +38,7 @@ except ImportError:
|
|||||||
|
|
||||||
gettext.install('melange', unicode=1)
|
gettext.install('melange', unicode=1)
|
||||||
|
|
||||||
from openstack.common.utils import parse_mailmap, str_dict_replace
|
from melange.openstack.common.utils import parse_mailmap, str_dict_replace
|
||||||
from melange import version
|
from melange import version
|
||||||
|
|
||||||
|
|
||||||
|
@ -17,4 +17,3 @@ nosexcover
|
|||||||
webtest
|
webtest
|
||||||
factory_boy
|
factory_boy
|
||||||
httplib2
|
httplib2
|
||||||
-e git+https://github.com/jkoelker/openstack-common.git@melange_compat#egg=openstack.common
|
|
||||||
|
Loading…
Reference in New Issue
Block a user