1. Fixed issue with main file start ./bin/windc-api
2. Added router to Route /datacenters/ and /services/ URLs 3. Added stubs for windc/core/api. 4. Fixed start-up process for service ------------------------------------------------- Now it is working service which will reply for curl http://localhost:8181/tenant_id/datacenters/ curl http://localhost:8181/tenant_id/datacenters/dc_id/services curl http://localhost:8181/tenant_id/datacenters/dc_id/services/service_id
This commit is contained in:
parent
7d7d480d79
commit
8994ffcab1
@ -23,7 +23,7 @@ Windows DataCenter API Server
|
||||
import optparse
|
||||
import os
|
||||
import sys
|
||||
|
||||
import gettext
|
||||
# If ../windc/__init__.py exists, add ../ to Python search path, so that
|
||||
# it will override what happens to be installed in /usr/(local/)lib/python...
|
||||
possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
|
||||
@ -31,34 +31,62 @@ possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
|
||||
os.pardir))
|
||||
if os.path.exists(os.path.join(possible_topdir, 'windc', '__init__.py')):
|
||||
sys.path.insert(0, possible_topdir)
|
||||
|
||||
from openstack.common import config
|
||||
from openstack.common import wsgi
|
||||
from windc import version
|
||||
sys.path.insert(0, '.')
|
||||
|
||||
|
||||
def create_options(parser):
|
||||
"""
|
||||
Sets up the CLI and config-file options that may be
|
||||
parsed and program commands.
|
||||
from windc.common import cfg
|
||||
from windc.common import config
|
||||
from windc.common import wsgi
|
||||
from windc.db import session
|
||||
|
||||
:param parser: The option parser
|
||||
"""
|
||||
config.add_common_options(parser)
|
||||
config.add_log_options(parser)
|
||||
gettext.install('balancer', unicode=1)
|
||||
|
||||
dbsync_opt = cfg.BoolOpt('dbsync', default=False,
|
||||
help='Perform database schema synchronization')
|
||||
|
||||
if __name__ == '__main__':
|
||||
oparser = optparse.OptionParser(version='%%prog %s'
|
||||
% version.version_string())
|
||||
create_options(oparser)
|
||||
(options, args) = config.parse_options(oparser)
|
||||
|
||||
try:
|
||||
conf, app = config.load_paste_app('windc-api', options, args)
|
||||
conf = config.WindcConfigOpts()
|
||||
conf.register_cli_opt(dbsync_opt)
|
||||
conf()
|
||||
|
||||
server = wsgi.Server()
|
||||
server.start(app, int(conf['bind_port']), conf['bind_host'])
|
||||
server.wait()
|
||||
if conf.dbsync:
|
||||
config.setup_logging(conf)
|
||||
session.sync(conf)
|
||||
else:
|
||||
app = config.load_paste_app(conf)
|
||||
server = wsgi.Server()
|
||||
server.start(app, conf, default_port=8181)
|
||||
server.wait()
|
||||
except RuntimeError, e:
|
||||
sys.exit("ERROR: %s" % e)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# def create_options(parser):
|
||||
# """
|
||||
# Sets up the CLI and config-file options that may be
|
||||
# parsed and program commands.
|
||||
#
|
||||
# :param parser: The option parser
|
||||
# """
|
||||
# config.add_common_options(parser)
|
||||
# config.add_log_options(parser)
|
||||
#
|
||||
#
|
||||
# if __name__ == '__main__':
|
||||
# oparser = optparse.OptionParser(version='%%prog %s'
|
||||
# % version.version_string())
|
||||
# create_options(oparser)
|
||||
# (options, args) = config.parse_options(oparser)
|
||||
#
|
||||
# # try:
|
||||
# conf, app = config.load_paste_app('windc', options, args)
|
||||
#
|
||||
# server = wsgi.Server()
|
||||
# server.start(app, int(conf['bind_port']), conf['bind_host'])
|
||||
# server.wait()
|
||||
# # except RuntimeError, e:
|
||||
# # sys.exit("ERROR: %s" % e)
|
||||
|
26
windc/etc/windc-api-paste.ini
Normal file
26
windc/etc/windc-api-paste.ini
Normal file
@ -0,0 +1,26 @@
|
||||
[pipeline:windc-api]
|
||||
pipeline = apiv1app
|
||||
# NOTE: use the following pipeline for keystone
|
||||
#pipeline = authtoken context apiv1app
|
||||
|
||||
[app:apiv1app]
|
||||
paste.app_factory = windc.common.wsgi:app_factory
|
||||
windc.app_factory = windc.api.v1.router:API
|
||||
|
||||
[filter:context]
|
||||
paste.filter_factory = windc.common.wsgi:filter_factory
|
||||
windc.filter_factory = windc.common.context:ContextMiddleware
|
||||
|
||||
[filter:authtoken]
|
||||
paste.filter_factory = keystone.middleware.auth_token:filter_factory
|
||||
auth_host = 172.18.67.57
|
||||
auth_port = 35357
|
||||
auth_protocol = http
|
||||
auth_uri = http://172.18.67.57:5000/v2.0/
|
||||
admin_tenant_name = service
|
||||
admin_user = windc
|
||||
admin_password = 000
|
||||
|
||||
[filter:auth-context]
|
||||
paste.filter_factory = windc.common.wsgi:filter_factory
|
||||
windc.filter_factory = keystone.middleware.balancer_auth_token:KeystoneContextMiddleware
|
@ -3,7 +3,7 @@
|
||||
verbose = True
|
||||
|
||||
# Show debugging output in logs (sets DEBUG log level output)
|
||||
debug = False
|
||||
debug = True
|
||||
|
||||
# Address to bind the server to
|
||||
bind_host = 0.0.0.0
|
||||
@ -13,7 +13,7 @@ bind_port = 8082
|
||||
|
||||
# Log to this file. Make sure the user running skeleton-api has
|
||||
# permissions to write to this file!
|
||||
log_file = api.log
|
||||
log_file = /tmp/api.log
|
||||
|
||||
[pipeline:windc-api]
|
||||
pipeline = versionnegotiation context apiv1app
|
||||
|
26
windc/etc/windc/api-paste.ini
Normal file
26
windc/etc/windc/api-paste.ini
Normal file
@ -0,0 +1,26 @@
|
||||
[pipeline:windc-api]
|
||||
pipeline = apiv1app
|
||||
# NOTE: use the following pipeline for keystone
|
||||
#pipeline = authtoken context apiv1app
|
||||
|
||||
[app:apiv1app]
|
||||
paste.app_factory = windc.common.wsgi:app_factory
|
||||
windc.app_factory = windc.api.v1.router:API
|
||||
|
||||
[filter:context]
|
||||
paste.filter_factory = windc.common.wsgi:filter_factory
|
||||
windc.filter_factory = windc.common.context:ContextMiddleware
|
||||
|
||||
[filter:authtoken]
|
||||
paste.filter_factory = keystone.middleware.auth_token:filter_factory
|
||||
auth_host = 172.18.67.57
|
||||
auth_port = 35357
|
||||
auth_protocol = http
|
||||
auth_uri = http://172.18.67.57:5000/v2.0/
|
||||
admin_tenant_name = service
|
||||
admin_user = windc
|
||||
admin_password = 000
|
||||
|
||||
[filter:auth-context]
|
||||
paste.filter_factory = windc.common.wsgi:filter_factory
|
||||
windc.filter_factory = keystone.middleware.balancer_auth_token:KeystoneContextMiddleware
|
34
windc/etc/windc/windc.conf
Normal file
34
windc/etc/windc/windc.conf
Normal file
@ -0,0 +1,34 @@
|
||||
[DEFAULT]
|
||||
# Show more verbose log output (sets INFO log level output)
|
||||
verbose = True
|
||||
|
||||
# Show debugging output in logs (sets DEBUG log level output)
|
||||
debug = True
|
||||
|
||||
# Address to bind the server to
|
||||
bind_host = 0.0.0.0
|
||||
|
||||
# Port the bind the server to
|
||||
bind_port = 8082
|
||||
|
||||
# Log to this file. Make sure the user running skeleton-api has
|
||||
# permissions to write to this file!
|
||||
log_file = /tmp/api.log
|
||||
|
||||
[pipeline:windc-api]
|
||||
pipeline = versionnegotiation context apiv1app
|
||||
|
||||
[pipeline:versions]
|
||||
pipeline = versionsapp
|
||||
|
||||
[app:versionsapp]
|
||||
paste.app_factory = windc.api.versions:app_factory
|
||||
|
||||
[app:apiv1app]
|
||||
paste.app_factory = windc.api.v1:app_factory
|
||||
|
||||
[filter:versionnegotiation]
|
||||
paste.filter_factory = windc.api.middleware.version_negotiation:filter_factory
|
||||
|
||||
[filter:context]
|
||||
paste.filter_factory = openstack.common.middleware.context:filter_factory
|
@ -15,5 +15,5 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# TODO(jaypipes) Code in this module is intended to be ported to the eventual
|
||||
# TODO(jaypipes) Code in this module is intended to be ported to the eventual
|
||||
# openstack-common library
|
||||
|
@ -19,11 +19,13 @@
|
||||
Routines for configuring Openstack Projects
|
||||
"""
|
||||
|
||||
import ConfigParser
|
||||
import logging
|
||||
import logging.config
|
||||
import logging.handlers
|
||||
import optparse
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
from paste import deploy
|
||||
@ -176,13 +178,6 @@ def setup_logging(options, conf):
|
||||
root_logger.addHandler(handler)
|
||||
|
||||
|
||||
def fix_path(path):
|
||||
"""
|
||||
Return the full absolute path
|
||||
"""
|
||||
return os.path.abspath(os.path.expanduser(path))
|
||||
|
||||
|
||||
def find_config_file(app_name, options, args, config_dir=None):
|
||||
"""
|
||||
Return the first config file found for an application.
|
||||
@ -201,6 +196,7 @@ def find_config_file(app_name, options, args, config_dir=None):
|
||||
"""
|
||||
config_dir = config_dir or app_name
|
||||
|
||||
fix_path = lambda p: os.path.abspath(os.path.expanduser(p))
|
||||
if options.get('config_file'):
|
||||
if os.path.exists(options['config_file']):
|
||||
return fix_path(options['config_file'])
|
||||
@ -251,6 +247,7 @@ def load_paste_config(app_name, options, args, config_dir=None):
|
||||
raise RuntimeError("Unable to locate any configuration file. "
|
||||
"Cannot load application %s" % app_name)
|
||||
try:
|
||||
app = wsgi.paste_deploy_app(conf_file, app_name, conf)
|
||||
conf = deploy.appconfig("config:%s" % conf_file, name=app_name)
|
||||
return conf_file, conf
|
||||
except Exception, e:
|
||||
|
@ -139,9 +139,5 @@ class OpenstackException(Exception):
|
||||
return self._error_string
|
||||
|
||||
|
||||
class MalformedRequestBody(OpenstackException):
|
||||
message = "Malformed message body: %(reason)s"
|
||||
|
||||
|
||||
class InvalidContentType(OpenstackException):
|
||||
message = "Invalid content type %(content_type)s"
|
||||
|
@ -20,21 +20,12 @@ System-level utilities and helper functions.
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import shlex
|
||||
import sys
|
||||
import types
|
||||
|
||||
from eventlet import greenthread
|
||||
from eventlet.green import subprocess
|
||||
|
||||
from openstack.common import exception
|
||||
|
||||
|
||||
TIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def int_from_bool_as_string(subject):
|
||||
@ -60,89 +51,14 @@ def bool_from_string(subject):
|
||||
|
||||
Useful for JSON-decoded stuff and config file parsing
|
||||
"""
|
||||
if isinstance(subject, types.BooleanType):
|
||||
if type(subject) == type(bool):
|
||||
return subject
|
||||
if isinstance(subject, types.StringTypes):
|
||||
if hasattr(subject, 'startswith'): # str or unicode...
|
||||
if subject.strip().lower() in ('true', 'on', '1'):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def execute(*cmd, **kwargs):
|
||||
"""
|
||||
Helper method to execute command with optional retry.
|
||||
|
||||
:cmd Passed to subprocess.Popen.
|
||||
:process_input Send to opened process.
|
||||
:check_exit_code Defaults to 0. Raise exception.ProcessExecutionError
|
||||
unless program exits with this code.
|
||||
:delay_on_retry True | False. Defaults to True. If set to True, wait a
|
||||
short amount of time before retrying.
|
||||
:attempts How many times to retry cmd.
|
||||
:run_as_root True | False. Defaults to False. If set to True,
|
||||
the command is prefixed by the command specified
|
||||
in the root_helper kwarg.
|
||||
:root_helper command to prefix all cmd's with
|
||||
|
||||
:raises exception.Error on receiving unknown arguments
|
||||
:raises exception.ProcessExecutionError
|
||||
"""
|
||||
|
||||
process_input = kwargs.pop('process_input', None)
|
||||
check_exit_code = kwargs.pop('check_exit_code', 0)
|
||||
delay_on_retry = kwargs.pop('delay_on_retry', True)
|
||||
attempts = kwargs.pop('attempts', 1)
|
||||
run_as_root = kwargs.pop('run_as_root', False)
|
||||
root_helper = kwargs.pop('root_helper', '')
|
||||
if len(kwargs):
|
||||
raise exception.Error(_('Got unknown keyword args '
|
||||
'to utils.execute: %r') % kwargs)
|
||||
if run_as_root:
|
||||
cmd = shlex.split(root_helper) + list(cmd)
|
||||
cmd = map(str, cmd)
|
||||
|
||||
while attempts > 0:
|
||||
attempts -= 1
|
||||
try:
|
||||
LOG.debug(_('Running cmd (subprocess): %s'), ' '.join(cmd))
|
||||
_PIPE = subprocess.PIPE # pylint: disable=E1101
|
||||
obj = subprocess.Popen(cmd,
|
||||
stdin=_PIPE,
|
||||
stdout=_PIPE,
|
||||
stderr=_PIPE,
|
||||
close_fds=True)
|
||||
result = None
|
||||
if process_input is not None:
|
||||
result = obj.communicate(process_input)
|
||||
else:
|
||||
result = obj.communicate()
|
||||
obj.stdin.close() # pylint: disable=E1101
|
||||
_returncode = obj.returncode # pylint: disable=E1101
|
||||
if _returncode:
|
||||
LOG.debug(_('Result was %s') % _returncode)
|
||||
if type(check_exit_code) == types.IntType \
|
||||
and _returncode != check_exit_code:
|
||||
(stdout, stderr) = result
|
||||
raise exception.ProcessExecutionError(
|
||||
exit_code=_returncode,
|
||||
stdout=stdout,
|
||||
stderr=stderr,
|
||||
cmd=' '.join(cmd))
|
||||
return result
|
||||
except exception.ProcessExecutionError:
|
||||
if not attempts:
|
||||
raise
|
||||
else:
|
||||
LOG.debug(_('%r failed. Retrying.'), cmd)
|
||||
if delay_on_retry:
|
||||
greenthread.sleep(random.randint(20, 200) / 100.0)
|
||||
finally:
|
||||
# NOTE(termie): this appears to be necessary to let the subprocess
|
||||
# call clean something up in between calls, without
|
||||
# it two execute calls in a row hangs the second one
|
||||
greenthread.sleep(0)
|
||||
|
||||
|
||||
def import_class(import_str):
|
||||
"""Returns a class from a string including module and class"""
|
||||
mod_str, _sep, class_str = import_str.rpartition('.')
|
||||
@ -159,7 +75,8 @@ def import_object(import_str):
|
||||
__import__(import_str)
|
||||
return sys.modules[import_str]
|
||||
except ImportError:
|
||||
return import_class(import_str)
|
||||
cls = import_class(import_str)
|
||||
return cls()
|
||||
|
||||
|
||||
def isotime(at=None):
|
||||
@ -170,41 +87,3 @@ def isotime(at=None):
|
||||
|
||||
def parse_isotime(timestr):
|
||||
return datetime.datetime.strptime(timestr, TIME_FORMAT)
|
||||
|
||||
|
||||
def parse_mailmap(mailmap='.mailmap'):
|
||||
mapping = {}
|
||||
if os.path.exists(mailmap):
|
||||
fp = open(mailmap, 'r')
|
||||
for l in fp:
|
||||
l = l.strip()
|
||||
if not l.startswith('#') and ' ' in l:
|
||||
canonical_email, alias = l.split(' ')
|
||||
mapping[alias] = canonical_email
|
||||
return mapping
|
||||
|
||||
|
||||
def str_dict_replace(s, mapping):
|
||||
for s1, s2 in mapping.iteritems():
|
||||
s = s.replace(s1, s2)
|
||||
return s
|
||||
|
||||
|
||||
def utcnow():
|
||||
"""Overridable version of utils.utcnow."""
|
||||
if utcnow.override_time:
|
||||
return utcnow.override_time
|
||||
return datetime.datetime.utcnow()
|
||||
|
||||
|
||||
utcnow.override_time = None
|
||||
|
||||
|
||||
def set_time_override(override_time=datetime.datetime.utcnow()):
|
||||
"""Override utils.utcnow to return a constant time."""
|
||||
utcnow.override_time = override_time
|
||||
|
||||
|
||||
def clear_time_override():
|
||||
"""Remove the overridden time."""
|
||||
utcnow.override_time = None
|
||||
|
@ -15,28 +15,27 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Utility methods for working with WSGI servers."""
|
||||
|
||||
import datetime
|
||||
import eventlet
|
||||
import eventlet.wsgi
|
||||
|
||||
eventlet.patcher.monkey_patch(all=False, socket=True)
|
||||
"""
|
||||
Utility methods for working with WSGI servers
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
import datetime
|
||||
import urllib2
|
||||
|
||||
import eventlet
|
||||
import eventlet.wsgi
|
||||
eventlet.patcher.monkey_patch(all=False, socket=True)
|
||||
import routes
|
||||
import routes.middleware
|
||||
import webob.dec
|
||||
import webob.exc
|
||||
from xml.dom import minidom
|
||||
from xml.parsers import expat
|
||||
|
||||
from openstack.common import exception
|
||||
|
||||
|
||||
LOG = logging.getLogger('wsgi')
|
||||
logger = logging.getLogger('openstack.common.wsgi')
|
||||
|
||||
|
||||
class WritableLogger(object):
|
||||
@ -210,47 +209,76 @@ class Router(object):
|
||||
|
||||
|
||||
class Request(webob.Request):
|
||||
|
||||
"""Add some Openstack API-specific logic to the base webob.Request."""
|
||||
|
||||
default_request_content_types = ('application/json', 'application/xml')
|
||||
default_accept_types = ('application/json', 'application/xml')
|
||||
default_accept_type = 'application/json'
|
||||
def best_match_content_type(self):
|
||||
"""Determine the requested response content-type."""
|
||||
supported = ('application/json',)
|
||||
bm = self.accept.best_match(supported)
|
||||
return bm or 'application/json'
|
||||
|
||||
def best_match_content_type(self, supported_content_types=None):
|
||||
"""Determine the requested response content-type.
|
||||
|
||||
Based on the query extension then the Accept header.
|
||||
Defaults to default_accept_type if we don't find a preference
|
||||
|
||||
"""
|
||||
supported_content_types = (supported_content_types or
|
||||
self.default_accept_types)
|
||||
|
||||
parts = self.path.rsplit('.', 1)
|
||||
if len(parts) > 1:
|
||||
ctype = 'application/{0}'.format(parts[1])
|
||||
if ctype in supported_content_types:
|
||||
return ctype
|
||||
|
||||
bm = self.accept.best_match(supported_content_types)
|
||||
return bm or self.default_accept_type
|
||||
|
||||
def get_content_type(self, allowed_content_types=None):
|
||||
"""Determine content type of the request body.
|
||||
|
||||
Does not do any body introspection, only checks header
|
||||
|
||||
"""
|
||||
def get_content_type(self, allowed_content_types):
|
||||
"""Determine content type of the request body."""
|
||||
if not "Content-Type" in self.headers:
|
||||
return None
|
||||
raise exception.InvalidContentType(content_type=None)
|
||||
|
||||
content_type = self.content_type
|
||||
allowed_content_types = (allowed_content_types or
|
||||
self.default_request_content_types)
|
||||
|
||||
if content_type not in allowed_content_types:
|
||||
raise exception.InvalidContentType(content_type=content_type)
|
||||
return content_type
|
||||
else:
|
||||
return content_type
|
||||
|
||||
|
||||
class JSONRequestDeserializer(object):
|
||||
def has_body(self, request):
|
||||
"""
|
||||
Returns whether a Webob.Request object will possess an entity body.
|
||||
|
||||
:param request: Webob.Request object
|
||||
"""
|
||||
if 'transfer-encoding' in request.headers:
|
||||
return True
|
||||
elif request.content_length > 0:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def from_json(self, datastring):
|
||||
return json.loads(datastring)
|
||||
|
||||
def default(self, request):
|
||||
msg = "Request deserialization: %s" % request
|
||||
logger.debug(msg)
|
||||
if self.has_body(request):
|
||||
logger.debug("Deserialization: request has body")
|
||||
if request.headers['Content-Type'] == 'application/x-www-form-urlencoded':
|
||||
body = urllib2.unquote(request.body)
|
||||
else:
|
||||
body = request.body
|
||||
msg = "Request body: %s" % body
|
||||
logger.debug(msg)
|
||||
return {'body': self.from_json(body)}
|
||||
else:
|
||||
logger.debug("Deserialization: request has NOT body")
|
||||
return {}
|
||||
|
||||
|
||||
class JSONResponseSerializer(object):
|
||||
|
||||
def to_json(self, data):
|
||||
def sanitizer(obj):
|
||||
if isinstance(obj, datetime.datetime):
|
||||
return obj.isoformat()
|
||||
return obj
|
||||
|
||||
return json.dumps(data, default=sanitizer)
|
||||
|
||||
def default(self, response, result):
|
||||
logger.debug("JSONSerializer default method called.")
|
||||
response.headers['Content-Type'] = 'application/json'
|
||||
response.body = self.to_json(result)
|
||||
|
||||
|
||||
class Resource(object):
|
||||
@ -270,7 +298,7 @@ class Resource(object):
|
||||
may raise a webob.exc exception or return a dict, which will be
|
||||
serialized by requested content type.
|
||||
"""
|
||||
def __init__(self, controller, deserializer=None, serializer=None):
|
||||
def __init__(self, controller, deserializer, serializer):
|
||||
"""
|
||||
:param controller: object that implement methods created by routes lib
|
||||
:param deserializer: object that supports webob request deserialization
|
||||
@ -279,34 +307,61 @@ class Resource(object):
|
||||
through controller-like actions
|
||||
"""
|
||||
self.controller = controller
|
||||
self.serializer = serializer or ResponseSerializer()
|
||||
self.deserializer = deserializer or RequestDeserializer()
|
||||
self.serializer = serializer
|
||||
self.deserializer = deserializer
|
||||
|
||||
# NOTE(yorik-sar): ugly fix for Routes misbehaviour
|
||||
def __add__(self, other):
|
||||
return other
|
||||
|
||||
@webob.dec.wsgify(RequestClass=Request)
|
||||
def __call__(self, request):
|
||||
"""WSGI method that controls (de)serialization and method dispatch."""
|
||||
logger.debug("Resource __call__ is invoked")
|
||||
action_args = self.get_action_args(request.environ)
|
||||
action = action_args.pop('action', None)
|
||||
|
||||
try:
|
||||
action, action_args, accept = self.deserialize_request(request)
|
||||
except exception.InvalidContentType:
|
||||
msg = _("Unsupported Content-Type")
|
||||
return webob.exc.HTTPUnsupportedMediaType(explanation=msg)
|
||||
except exception.MalformedRequestBody:
|
||||
msg = _("Malformed request body")
|
||||
return webob.exc.HTTPBadRequest(explanation=msg)
|
||||
|
||||
deserialized_params = self.deserialize_request(action, request)
|
||||
action_args.update(deserialized_params)
|
||||
action_result = self.execute_action(action, request, **action_args)
|
||||
|
||||
try:
|
||||
return self.serialize_response(action, action_result, accept)
|
||||
return self.serialize_response(action, action_result, request)
|
||||
|
||||
# return unserializable result (typically a webob exc)
|
||||
except Exception:
|
||||
return action_result
|
||||
|
||||
def deserialize_request(self, request):
|
||||
return self.deserializer.deserialize(request)
|
||||
def deserialize_request(self, action, request):
|
||||
return self.dispatch(self.deserializer, action, request)
|
||||
|
||||
def serialize_response(self, action, action_result, accept):
|
||||
return self.serializer.serialize(action_result, accept, action)
|
||||
def serialize_response(self, action, action_result, request):
|
||||
msg = "Called serialize response Action:%s Result:%s Request:%s" % (action, action_result, request)
|
||||
logger.debug(msg)
|
||||
|
||||
try:
|
||||
if not self.controller:
|
||||
meth = getattr(self, action)
|
||||
else:
|
||||
meth = getattr(self.controller, action)
|
||||
except AttributeError:
|
||||
raise
|
||||
|
||||
code = 200
|
||||
if hasattr(meth, 'wsgi_code'):
|
||||
code = meth.wsgi_code
|
||||
|
||||
response = webob.Response()
|
||||
response.status = code
|
||||
logger.debug("serializer: dispatching call")
|
||||
#TODO check why it fails with original openstack code
|
||||
#self.dispatch(self.serializer, action, response,
|
||||
# action_result, request)
|
||||
if action_result is not None:
|
||||
self.serializer.default(response, action_result)
|
||||
msg = "Response: %s" % response
|
||||
logger.debug(msg)
|
||||
return response
|
||||
|
||||
def execute_action(self, action, request, **action_args):
|
||||
return self.dispatch(self.controller, action, request, **action_args)
|
||||
@ -338,380 +393,3 @@ class Resource(object):
|
||||
pass
|
||||
|
||||
return args
|
||||
|
||||
|
||||
class ActionDispatcher(object):
|
||||
"""Maps method name to local methods through action name."""
|
||||
|
||||
def dispatch(self, *args, **kwargs):
|
||||
"""Find and call local method."""
|
||||
action = kwargs.pop('action', 'default')
|
||||
action_method = getattr(self, str(action), self.default)
|
||||
return action_method(*args, **kwargs)
|
||||
|
||||
def default(self, data):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class DictSerializer(ActionDispatcher):
|
||||
"""Default request body serialization"""
|
||||
|
||||
def serialize(self, data, action='default'):
|
||||
return self.dispatch(data, action=action)
|
||||
|
||||
def default(self, data):
|
||||
return ""
|
||||
|
||||
|
||||
class JSONDictSerializer(DictSerializer):
|
||||
"""Default JSON request body serialization"""
|
||||
|
||||
def default(self, data):
|
||||
def sanitizer(obj):
|
||||
if isinstance(obj, datetime.datetime):
|
||||
_dtime = obj - datetime.timedelta(microseconds=obj.microsecond)
|
||||
return _dtime.isoformat()
|
||||
return obj
|
||||
return json.dumps(data, default=sanitizer)
|
||||
|
||||
|
||||
class XMLDictSerializer(DictSerializer):
|
||||
|
||||
def __init__(self, metadata=None, xmlns=None):
|
||||
"""
|
||||
:param metadata: information needed to deserialize xml into
|
||||
a dictionary.
|
||||
:param xmlns: XML namespace to include with serialized xml
|
||||
"""
|
||||
super(XMLDictSerializer, self).__init__()
|
||||
self.metadata = metadata or {}
|
||||
self.xmlns = xmlns
|
||||
|
||||
def default(self, data):
|
||||
# We expect data to contain a single key which is the XML root.
|
||||
root_key = data.keys()[0]
|
||||
doc = minidom.Document()
|
||||
node = self._to_xml_node(doc, self.metadata, root_key, data[root_key])
|
||||
|
||||
return self.to_xml_string(node)
|
||||
|
||||
def to_xml_string(self, node, has_atom=False):
|
||||
self._add_xmlns(node, has_atom)
|
||||
return node.toprettyxml(indent=' ', encoding='UTF-8')
|
||||
|
||||
#NOTE (ameade): the has_atom should be removed after all of the
|
||||
# xml serializers and view builders have been updated to the current
|
||||
# spec that required all responses include the xmlns:atom, the has_atom
|
||||
# flag is to prevent current tests from breaking
|
||||
def _add_xmlns(self, node, has_atom=False):
|
||||
if self.xmlns is not None:
|
||||
node.setAttribute('xmlns', self.xmlns)
|
||||
if has_atom:
|
||||
node.setAttribute('xmlns:atom', "http://www.w3.org/2005/Atom")
|
||||
|
||||
def _to_xml_node(self, doc, metadata, nodename, data):
|
||||
"""Recursive method to convert data members to XML nodes."""
|
||||
result = doc.createElement(nodename)
|
||||
|
||||
# Set the xml namespace if one is specified
|
||||
# TODO(justinsb): We could also use prefixes on the keys
|
||||
xmlns = metadata.get('xmlns', None)
|
||||
if xmlns:
|
||||
result.setAttribute('xmlns', xmlns)
|
||||
|
||||
#TODO(bcwaldon): accomplish this without a type-check
|
||||
if type(data) is list:
|
||||
collections = metadata.get('list_collections', {})
|
||||
if nodename in collections:
|
||||
metadata = collections[nodename]
|
||||
for item in data:
|
||||
node = doc.createElement(metadata['item_name'])
|
||||
node.setAttribute(metadata['item_key'], str(item))
|
||||
result.appendChild(node)
|
||||
return result
|
||||
singular = metadata.get('plurals', {}).get(nodename, None)
|
||||
if singular is None:
|
||||
if nodename.endswith('s'):
|
||||
singular = nodename[:-1]
|
||||
else:
|
||||
singular = 'item'
|
||||
for item in data:
|
||||
node = self._to_xml_node(doc, metadata, singular, item)
|
||||
result.appendChild(node)
|
||||
#TODO(bcwaldon): accomplish this without a type-check
|
||||
elif type(data) is dict:
|
||||
collections = metadata.get('dict_collections', {})
|
||||
if nodename in collections:
|
||||
metadata = collections[nodename]
|
||||
for k, v in data.items():
|
||||
node = doc.createElement(metadata['item_name'])
|
||||
node.setAttribute(metadata['item_key'], str(k))
|
||||
text = doc.createTextNode(str(v))
|
||||
node.appendChild(text)
|
||||
result.appendChild(node)
|
||||
return result
|
||||
attrs = metadata.get('attributes', {}).get(nodename, {})
|
||||
for k, v in data.items():
|
||||
if k in attrs:
|
||||
result.setAttribute(k, str(v))
|
||||
else:
|
||||
node = self._to_xml_node(doc, metadata, k, v)
|
||||
result.appendChild(node)
|
||||
else:
|
||||
# Type is atom
|
||||
node = doc.createTextNode(str(data))
|
||||
result.appendChild(node)
|
||||
return result
|
||||
|
||||
def _create_link_nodes(self, xml_doc, links):
|
||||
link_nodes = []
|
||||
for link in links:
|
||||
link_node = xml_doc.createElement('atom:link')
|
||||
link_node.setAttribute('rel', link['rel'])
|
||||
link_node.setAttribute('href', link['href'])
|
||||
if 'type' in link:
|
||||
link_node.setAttribute('type', link['type'])
|
||||
link_nodes.append(link_node)
|
||||
return link_nodes
|
||||
|
||||
|
||||
class ResponseHeadersSerializer(ActionDispatcher):
|
||||
"""Default response headers serialization"""
|
||||
|
||||
def serialize(self, response, data, action):
|
||||
self.dispatch(response, data, action=action)
|
||||
|
||||
def default(self, response, data):
|
||||
response.status_int = 200
|
||||
|
||||
|
||||
class ResponseSerializer(object):
|
||||
"""Encode the necessary pieces into a response object"""
|
||||
|
||||
def __init__(self, body_serializers=None, headers_serializer=None):
|
||||
self.body_serializers = {
|
||||
'application/xml': XMLDictSerializer(),
|
||||
'application/json': JSONDictSerializer(),
|
||||
}
|
||||
self.body_serializers.update(body_serializers or {})
|
||||
|
||||
self.headers_serializer = headers_serializer or \
|
||||
ResponseHeadersSerializer()
|
||||
|
||||
def serialize(self, response_data, content_type, action='default'):
|
||||
"""Serialize a dict into a string and wrap in a wsgi.Request object.
|
||||
|
||||
:param response_data: dict produced by the Controller
|
||||
:param content_type: expected mimetype of serialized response body
|
||||
|
||||
"""
|
||||
response = webob.Response()
|
||||
self.serialize_headers(response, response_data, action)
|
||||
self.serialize_body(response, response_data, content_type, action)
|
||||
return response
|
||||
|
||||
def serialize_headers(self, response, data, action):
|
||||
self.headers_serializer.serialize(response, data, action)
|
||||
|
||||
def serialize_body(self, response, data, content_type, action):
|
||||
response.headers['Content-Type'] = content_type
|
||||
if data is not None:
|
||||
serializer = self.get_body_serializer(content_type)
|
||||
response.body = serializer.serialize(data, action)
|
||||
|
||||
def get_body_serializer(self, content_type):
|
||||
try:
|
||||
return self.body_serializers[content_type]
|
||||
except (KeyError, TypeError):
|
||||
raise exception.InvalidContentType(content_type=content_type)
|
||||
|
||||
|
||||
class RequestHeadersDeserializer(ActionDispatcher):
|
||||
"""Default request headers deserializer"""
|
||||
|
||||
def deserialize(self, request, action):
|
||||
return self.dispatch(request, action=action)
|
||||
|
||||
def default(self, request):
|
||||
return {}
|
||||
|
||||
|
||||
class RequestDeserializer(object):
|
||||
"""Break up a Request object into more useful pieces."""
|
||||
|
||||
def __init__(self, body_deserializers=None, headers_deserializer=None,
|
||||
supported_content_types=None):
|
||||
|
||||
self.supported_content_types = supported_content_types
|
||||
|
||||
self.body_deserializers = {
|
||||
'application/xml': XMLDeserializer(),
|
||||
'application/json': JSONDeserializer(),
|
||||
}
|
||||
self.body_deserializers.update(body_deserializers or {})
|
||||
|
||||
self.headers_deserializer = headers_deserializer or \
|
||||
RequestHeadersDeserializer()
|
||||
|
||||
def deserialize(self, request):
|
||||
"""Extract necessary pieces of the request.
|
||||
|
||||
:param request: Request object
|
||||
:returns tuple of expected controller action name, dictionary of
|
||||
keyword arguments to pass to the controller, the expected
|
||||
content type of the response
|
||||
|
||||
"""
|
||||
action_args = self.get_action_args(request.environ)
|
||||
action = action_args.pop('action', None)
|
||||
|
||||
action_args.update(self.deserialize_headers(request, action))
|
||||
action_args.update(self.deserialize_body(request, action))
|
||||
|
||||
accept = self.get_expected_content_type(request)
|
||||
|
||||
return (action, action_args, accept)
|
||||
|
||||
def deserialize_headers(self, request, action):
|
||||
return self.headers_deserializer.deserialize(request, action)
|
||||
|
||||
def deserialize_body(self, request, action):
|
||||
if not len(request.body) > 0:
|
||||
LOG.debug(_("Empty body provided in request"))
|
||||
return {}
|
||||
|
||||
try:
|
||||
content_type = request.get_content_type()
|
||||
except exception.InvalidContentType:
|
||||
LOG.debug(_("Unrecognized Content-Type provided in request"))
|
||||
raise
|
||||
|
||||
if content_type is None:
|
||||
LOG.debug(_("No Content-Type provided in request"))
|
||||
return {}
|
||||
|
||||
try:
|
||||
deserializer = self.get_body_deserializer(content_type)
|
||||
except exception.InvalidContentType:
|
||||
LOG.debug(_("Unable to deserialize body as provided Content-Type"))
|
||||
raise
|
||||
|
||||
return deserializer.deserialize(request.body, action)
|
||||
|
||||
def get_body_deserializer(self, content_type):
|
||||
try:
|
||||
return self.body_deserializers[content_type]
|
||||
except (KeyError, TypeError):
|
||||
raise exception.InvalidContentType(content_type=content_type)
|
||||
|
||||
def get_expected_content_type(self, request):
|
||||
return request.best_match_content_type(self.supported_content_types)
|
||||
|
||||
def get_action_args(self, request_environment):
|
||||
"""Parse dictionary created by routes library."""
|
||||
try:
|
||||
args = request_environment['wsgiorg.routing_args'][1].copy()
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
try:
|
||||
del args['controller']
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
try:
|
||||
del args['format']
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
return args
|
||||
|
||||
|
||||
class TextDeserializer(ActionDispatcher):
|
||||
"""Default request body deserialization"""
|
||||
|
||||
def deserialize(self, datastring, action='default'):
|
||||
return self.dispatch(datastring, action=action)
|
||||
|
||||
def default(self, datastring):
|
||||
return {}
|
||||
|
||||
|
||||
class JSONDeserializer(TextDeserializer):
|
||||
|
||||
def _from_json(self, datastring):
|
||||
try:
|
||||
return json.loads(datastring)
|
||||
except ValueError:
|
||||
msg = _("cannot understand JSON")
|
||||
raise exception.MalformedRequestBody(reason=msg)
|
||||
|
||||
def default(self, datastring):
|
||||
return {'body': self._from_json(datastring)}
|
||||
|
||||
|
||||
class XMLDeserializer(TextDeserializer):
|
||||
|
||||
def __init__(self, metadata=None):
|
||||
"""
|
||||
:param metadata: information needed to deserialize xml into
|
||||
a dictionary.
|
||||
"""
|
||||
super(XMLDeserializer, self).__init__()
|
||||
self.metadata = metadata or {}
|
||||
|
||||
def _from_xml(self, datastring):
|
||||
plurals = set(self.metadata.get('plurals', {}))
|
||||
|
||||
try:
|
||||
node = minidom.parseString(datastring).childNodes[0]
|
||||
return {node.nodeName: self._from_xml_node(node, plurals)}
|
||||
except expat.ExpatError:
|
||||
msg = _("cannot understand XML")
|
||||
raise exception.MalformedRequestBody(reason=msg)
|
||||
|
||||
def _from_xml_node(self, node, listnames):
|
||||
"""Convert a minidom node to a simple Python type.
|
||||
|
||||
:param listnames: list of XML node names whose subnodes should
|
||||
be considered list items.
|
||||
|
||||
"""
|
||||
|
||||
if len(node.childNodes) == 1 and node.childNodes[0].nodeType == 3:
|
||||
return node.childNodes[0].nodeValue
|
||||
elif node.nodeName in listnames:
|
||||
return [self._from_xml_node(n, listnames) for n in node.childNodes]
|
||||
else:
|
||||
result = dict()
|
||||
for attr in node.attributes.keys():
|
||||
result[attr] = node.attributes[attr].nodeValue
|
||||
for child in node.childNodes:
|
||||
if child.nodeType != node.TEXT_NODE:
|
||||
result[child.nodeName] = self._from_xml_node(child,
|
||||
listnames)
|
||||
return result
|
||||
|
||||
def find_first_child_named(self, parent, name):
|
||||
"""Search a nodes children for the first child with a given name"""
|
||||
for node in parent.childNodes:
|
||||
if node.nodeName == name:
|
||||
return node
|
||||
return None
|
||||
|
||||
def find_children_named(self, parent, name):
|
||||
"""Return all of a nodes children who have the given name"""
|
||||
for node in parent.childNodes:
|
||||
if node.nodeName == name:
|
||||
yield node
|
||||
|
||||
def extract_text(self, node):
|
||||
"""Get the text field contained by the given node"""
|
||||
if len(node.childNodes) == 1:
|
||||
child = node.childNodes[0]
|
||||
if child.nodeType == child.TEXT_NODE:
|
||||
return child.nodeValue
|
||||
return ""
|
||||
|
||||
def default(self, datastring):
|
||||
return {'body': self._from_xml(datastring)}
|
||||
|
19
windc/openstack/oldcommon/__init__.py
Normal file
19
windc/openstack/oldcommon/__init__.py
Normal file
@ -0,0 +1,19 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2011 OpenStack LLC.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# TODO(jaypipes) Code in this module is intended to be ported to the eventual
|
||||
# openstack-common library
|
337
windc/openstack/oldcommon/config.py
Normal file
337
windc/openstack/oldcommon/config.py
Normal file
@ -0,0 +1,337 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2011 OpenStack LLC.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Routines for configuring Openstack Projects
|
||||
"""
|
||||
|
||||
import logging
|
||||
import logging.config
|
||||
import logging.handlers
|
||||
import optparse
|
||||
import os
|
||||
import sys
|
||||
|
||||
from paste import deploy
|
||||
|
||||
DEFAULT_LOG_FORMAT = "%(asctime)s %(levelname)8s [%(name)s] %(message)s"
|
||||
DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
|
||||
|
||||
|
||||
def parse_options(parser, cli_args=None):
|
||||
"""
|
||||
Returns the parsed CLI options, command to run and its arguments, merged
|
||||
with any same-named options found in a configuration file.
|
||||
|
||||
The function returns a tuple of (options, args), where options is a
|
||||
mapping of option key/str(value) pairs, and args is the set of arguments
|
||||
(not options) supplied on the command-line.
|
||||
|
||||
The reason that the option values are returned as strings only is that
|
||||
ConfigParser and paste.deploy only accept string values...
|
||||
|
||||
:param parser: The option parser
|
||||
:param cli_args: (Optional) Set of arguments to process. If not present,
|
||||
sys.argv[1:] is used.
|
||||
:retval tuple of (options, args)
|
||||
"""
|
||||
|
||||
(options, args) = parser.parse_args(cli_args)
|
||||
|
||||
return (vars(options), args)
|
||||
|
||||
|
||||
def add_common_options(parser):
|
||||
"""
|
||||
Given a supplied optparse.OptionParser, adds an OptionGroup that
|
||||
represents all common configuration options.
|
||||
|
||||
:param parser: optparse.OptionParser
|
||||
"""
|
||||
help_text = "The following configuration options are common to "\
|
||||
"this app's programs."
|
||||
|
||||
group = optparse.OptionGroup(parser, "Common Options", help_text)
|
||||
group.add_option('-v', '--verbose', default=False, dest="verbose",
|
||||
action="store_true",
|
||||
help="Print more verbose output")
|
||||
group.add_option('-d', '--debug', default=False, dest="debug",
|
||||
action="store_true",
|
||||
help="Print debugging output")
|
||||
group.add_option('--config-file', default=None, metavar="PATH",
|
||||
help="Path to the config file to use. When not specified "
|
||||
"(the default), we generally look at the first "
|
||||
"argument specified to be a config file, and if "
|
||||
"that is also missing, we search standard "
|
||||
"directories for a config file.")
|
||||
parser.add_option_group(group)
|
||||
|
||||
|
||||
def add_log_options(parser):
|
||||
"""
|
||||
Given a supplied optparse.OptionParser, adds an OptionGroup that
|
||||
represents all the configuration options around logging.
|
||||
|
||||
:param parser: optparse.OptionParser
|
||||
"""
|
||||
help_text = "The following configuration options are specific to logging "\
|
||||
"functionality for this program."
|
||||
|
||||
group = optparse.OptionGroup(parser, "Logging Options", help_text)
|
||||
group.add_option('--log-config', default=None, metavar="PATH",
|
||||
help="If this option is specified, the logging "
|
||||
"configuration file specified is used and overrides "
|
||||
"any other logging options specified. Please see "
|
||||
"the Python logging module documentation for "
|
||||
"details on logging configuration files.")
|
||||
group.add_option('--log-date-format', metavar="FORMAT",
|
||||
default=DEFAULT_LOG_DATE_FORMAT,
|
||||
help="Format string for %(asctime)s in log records. "
|
||||
"Default: %default")
|
||||
group.add_option('--log-file', default=None, metavar="PATH",
|
||||
help="(Optional) Name of log file to output to. "
|
||||
"If not set, logging will go to stdout.")
|
||||
group.add_option("--log-dir", default=None,
|
||||
help="(Optional) The directory to keep log files in "
|
||||
"(will be prepended to --logfile)")
|
||||
group.add_option('--use-syslog', default=False, dest="use_syslog",
|
||||
action="store_true",
|
||||
help="Use syslog for logging.")
|
||||
parser.add_option_group(group)
|
||||
|
||||
|
||||
def setup_logging(options, conf):
|
||||
"""
|
||||
Sets up the logging options for a log with supplied name
|
||||
|
||||
:param options: Mapping of typed option key/values
|
||||
:param conf: Mapping of untyped key/values from config file
|
||||
"""
|
||||
|
||||
if options.get('log_config', None):
|
||||
# Use a logging configuration file for all settings...
|
||||
if os.path.exists(options['log_config']):
|
||||
logging.config.fileConfig(options['log_config'])
|
||||
return
|
||||
else:
|
||||
raise RuntimeError("Unable to locate specified logging "
|
||||
"config file: %s" % options['log_config'])
|
||||
|
||||
# If either the CLI option or the conf value
|
||||
# is True, we set to True
|
||||
debug = options.get('debug') or \
|
||||
get_option(conf, 'debug', type='bool', default=False)
|
||||
verbose = options.get('verbose') or \
|
||||
get_option(conf, 'verbose', type='bool', default=False)
|
||||
root_logger = logging.root
|
||||
if debug:
|
||||
root_logger.setLevel(logging.DEBUG)
|
||||
elif verbose:
|
||||
root_logger.setLevel(logging.INFO)
|
||||
else:
|
||||
root_logger.setLevel(logging.WARNING)
|
||||
|
||||
# Set log configuration from options...
|
||||
# Note that we use a hard-coded log format in the options
|
||||
# because of Paste.Deploy bug #379
|
||||
# http://trac.pythonpaste.org/pythonpaste/ticket/379
|
||||
log_format = options.get('log_format', DEFAULT_LOG_FORMAT)
|
||||
log_date_format = options.get('log_date_format', DEFAULT_LOG_DATE_FORMAT)
|
||||
formatter = logging.Formatter(log_format, log_date_format)
|
||||
|
||||
logfile = options.get('log_file')
|
||||
if not logfile:
|
||||
logfile = conf.get('log_file')
|
||||
|
||||
use_syslog = options.get('use_syslog') or \
|
||||
get_option(conf, 'use_syslog', type='bool', default=False)
|
||||
|
||||
if use_syslog:
|
||||
handler = logging.handlers.SysLogHandler(address='/dev/log')
|
||||
elif logfile:
|
||||
logdir = options.get('log_dir')
|
||||
if not logdir:
|
||||
logdir = conf.get('log_dir')
|
||||
if logdir:
|
||||
logfile = os.path.join(logdir, logfile)
|
||||
handler = logging.FileHandler(logfile)
|
||||
else:
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
|
||||
handler.setFormatter(formatter)
|
||||
root_logger.addHandler(handler)
|
||||
|
||||
|
||||
def fix_path(path):
|
||||
"""
|
||||
Return the full absolute path
|
||||
"""
|
||||
return os.path.abspath(os.path.expanduser(path))
|
||||
|
||||
|
||||
def find_config_file(app_name, options, args, config_dir=None):
|
||||
"""
|
||||
Return the first config file found for an application.
|
||||
|
||||
We search for the paste config file in the following order:
|
||||
* If --config-file option is used, use that
|
||||
* If args[0] is a file, use that
|
||||
* Search for $app.conf in standard directories:
|
||||
* .
|
||||
* ~.config_dir/
|
||||
* ~
|
||||
* /etc/config_dir
|
||||
* /etc
|
||||
|
||||
:retval Full path to config file, or None if no config file found
|
||||
"""
|
||||
config_dir = config_dir or app_name
|
||||
|
||||
if options.get('config_file'):
|
||||
if os.path.exists(options['config_file']):
|
||||
return fix_path(options['config_file'])
|
||||
elif args:
|
||||
if os.path.exists(args[0]):
|
||||
return fix_path(args[0])
|
||||
|
||||
# Handle standard directory search for $app_name.conf
|
||||
config_file_dirs = [fix_path(os.getcwd()),
|
||||
fix_path(os.path.join('~', '.' + config_dir)),
|
||||
fix_path('~'),
|
||||
os.path.join('/etc', config_dir),
|
||||
'/etc']
|
||||
|
||||
for cfg_dir in config_file_dirs:
|
||||
cfg_file = os.path.join(cfg_dir, '%s.conf' % app_name)
|
||||
if os.path.exists(cfg_file):
|
||||
return cfg_file
|
||||
|
||||
|
||||
def load_paste_config(app_name, options, args, config_dir=None):
|
||||
"""
|
||||
Looks for a config file to use for an app and returns the
|
||||
config file path and a configuration mapping from a paste config file.
|
||||
|
||||
We search for the paste config file in the following order:
|
||||
* If --config-file option is used, use that
|
||||
* If args[0] is a file, use that
|
||||
* Search for $app_name.conf in standard directories:
|
||||
* .
|
||||
* ~.config_dir/
|
||||
* ~
|
||||
* /etc/config_dir
|
||||
* /etc
|
||||
|
||||
:param app_name: Name of the application to load config for, or None.
|
||||
None signifies to only load the [DEFAULT] section of
|
||||
the config file.
|
||||
:param options: Set of typed options returned from parse_options()
|
||||
:param args: Command line arguments from argv[1:]
|
||||
:retval Tuple of (conf_file, conf)
|
||||
|
||||
:raises RuntimeError when config file cannot be located or there was a
|
||||
problem loading the configuration file.
|
||||
"""
|
||||
conf_file = find_config_file(app_name, options, args, config_dir)
|
||||
if not conf_file:
|
||||
raise RuntimeError("Unable to locate any configuration file. "
|
||||
"Cannot load application %s" % app_name)
|
||||
try:
|
||||
conf = deploy.appconfig("config:%s" % conf_file, name=app_name)
|
||||
return conf_file, conf
|
||||
except Exception, e:
|
||||
raise RuntimeError("Error trying to load config %s: %s"
|
||||
% (conf_file, e))
|
||||
|
||||
|
||||
def load_paste_app(app_name, options, args, config_dir=None):
|
||||
"""
|
||||
Builds and returns a WSGI app from a paste config file.
|
||||
|
||||
We search for the paste config file in the following order:
|
||||
* If --config-file option is used, use that
|
||||
* If args[0] is a file, use that
|
||||
* Search for $app_name.conf in standard directories:
|
||||
* .
|
||||
* ~.config_dir/
|
||||
* ~
|
||||
* /etc/config_dir
|
||||
* /etc
|
||||
|
||||
:param app_name: Name of the application to load
|
||||
:param options: Set of typed options returned from parse_options()
|
||||
:param args: Command line arguments from argv[1:]
|
||||
|
||||
:raises RuntimeError when config file cannot be located or application
|
||||
cannot be loaded from config file
|
||||
"""
|
||||
conf_file, conf = load_paste_config(app_name, options,
|
||||
args, config_dir)
|
||||
|
||||
try:
|
||||
# Setup logging early, supplying both the CLI options and the
|
||||
# configuration mapping from the config file
|
||||
setup_logging(options, conf)
|
||||
|
||||
# We only update the conf dict for the verbose and debug
|
||||
# flags. Everything else must be set up in the conf file...
|
||||
debug = options.get('debug') or \
|
||||
get_option(conf, 'debug', type='bool', default=False)
|
||||
verbose = options.get('verbose') or \
|
||||
get_option(conf, 'verbose', type='bool', default=False)
|
||||
conf['debug'] = debug
|
||||
conf['verbose'] = verbose
|
||||
|
||||
# Log the options used when starting if we're in debug mode...
|
||||
if debug:
|
||||
logger = logging.getLogger(app_name)
|
||||
logger.debug("*" * 80)
|
||||
logger.debug("Configuration options gathered from config file:")
|
||||
logger.debug(conf_file)
|
||||
logger.debug("================================================")
|
||||
items = dict([(k, v) for k, v in conf.items()
|
||||
if k not in ('__file__', 'here')])
|
||||
for key, value in sorted(items.items()):
|
||||
logger.debug("%(key)-30s %(value)s" % locals())
|
||||
logger.debug("*" * 80)
|
||||
app = deploy.loadapp("config:%s" % conf_file, name=app_name)
|
||||
except (LookupError, ImportError), e:
|
||||
raise RuntimeError("Unable to load %(app_name)s from "
|
||||
"configuration file %(conf_file)s."
|
||||
"\nGot: %(e)r" % locals())
|
||||
return conf, app
|
||||
|
||||
|
||||
def get_option(options, option, **kwargs):
|
||||
if option in options:
|
||||
value = options[option]
|
||||
type_ = kwargs.get('type', 'str')
|
||||
if type_ == 'bool':
|
||||
if hasattr(value, 'lower'):
|
||||
return value.lower() == 'true'
|
||||
else:
|
||||
return value
|
||||
elif type_ == 'int':
|
||||
return int(value)
|
||||
elif type_ == 'float':
|
||||
return float(value)
|
||||
else:
|
||||
return value
|
||||
elif 'default' in kwargs:
|
||||
return kwargs['default']
|
||||
else:
|
||||
raise KeyError("option '%s' not found" % option)
|
40
windc/openstack/oldcommon/context.py
Normal file
40
windc/openstack/oldcommon/context.py
Normal file
@ -0,0 +1,40 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2011 OpenStack LLC.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Simple class that stores security context information in the web request.
|
||||
|
||||
Projects should subclass this class if they wish to enhance the request
|
||||
context or provide additional information in their specific WSGI pipeline.
|
||||
"""
|
||||
|
||||
|
||||
class RequestContext(object):
|
||||
|
||||
"""
|
||||
Stores information about the security context under which the user
|
||||
accesses the system, as well as additional request information.
|
||||
"""
|
||||
|
||||
def __init__(self, auth_tok=None, user=None, tenant=None, is_admin=False,
|
||||
read_only=False, show_deleted=False):
|
||||
self.auth_tok = auth_tok
|
||||
self.user = user
|
||||
self.tenant = tenant
|
||||
self.is_admin = is_admin
|
||||
self.read_only = read_only
|
||||
self.show_deleted = show_deleted
|
147
windc/openstack/oldcommon/exception.py
Normal file
147
windc/openstack/oldcommon/exception.py
Normal file
@ -0,0 +1,147 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2011 OpenStack LLC.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Exceptions common to OpenStack projects
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
|
||||
class ProcessExecutionError(IOError):
|
||||
def __init__(self, stdout=None, stderr=None, exit_code=None, cmd=None,
|
||||
description=None):
|
||||
if description is None:
|
||||
description = "Unexpected error while running command."
|
||||
if exit_code is None:
|
||||
exit_code = '-'
|
||||
message = "%s\nCommand: %s\nExit code: %s\nStdout: %r\nStderr: %r" % (
|
||||
description, cmd, exit_code, stdout, stderr)
|
||||
IOError.__init__(self, message)
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
def __init__(self, message=None):
|
||||
super(Error, self).__init__(message)
|
||||
|
||||
|
||||
class ApiError(Error):
|
||||
def __init__(self, message='Unknown', code='Unknown'):
|
||||
self.message = message
|
||||
self.code = code
|
||||
super(ApiError, self).__init__('%s: %s' % (code, message))
|
||||
|
||||
|
||||
class NotFound(Error):
|
||||
pass
|
||||
|
||||
|
||||
class UnknownScheme(Error):
|
||||
|
||||
msg = "Unknown scheme '%s' found in URI"
|
||||
|
||||
def __init__(self, scheme):
|
||||
msg = self.__class__.msg % scheme
|
||||
super(UnknownScheme, self).__init__(msg)
|
||||
|
||||
|
||||
class BadStoreUri(Error):
|
||||
|
||||
msg = "The Store URI %s was malformed. Reason: %s"
|
||||
|
||||
def __init__(self, uri, reason):
|
||||
msg = self.__class__.msg % (uri, reason)
|
||||
super(BadStoreUri, self).__init__(msg)
|
||||
|
||||
|
||||
class Duplicate(Error):
|
||||
pass
|
||||
|
||||
|
||||
class NotAuthorized(Error):
|
||||
pass
|
||||
|
||||
|
||||
class NotEmpty(Error):
|
||||
pass
|
||||
|
||||
|
||||
class Invalid(Error):
|
||||
pass
|
||||
|
||||
|
||||
class BadInputError(Exception):
|
||||
"""Error resulting from a client sending bad input to a server"""
|
||||
pass
|
||||
|
||||
|
||||
class MissingArgumentError(Error):
|
||||
pass
|
||||
|
||||
|
||||
class DatabaseMigrationError(Error):
|
||||
pass
|
||||
|
||||
|
||||
class ClientConnectionError(Exception):
|
||||
"""Error resulting from a client connecting to a server"""
|
||||
pass
|
||||
|
||||
|
||||
def wrap_exception(f):
|
||||
def _wrap(*args, **kw):
|
||||
try:
|
||||
return f(*args, **kw)
|
||||
except Exception, e:
|
||||
if not isinstance(e, Error):
|
||||
#exc_type, exc_value, exc_traceback = sys.exc_info()
|
||||
logging.exception('Uncaught exception')
|
||||
#logging.error(traceback.extract_stack(exc_traceback))
|
||||
raise Error(str(e))
|
||||
raise
|
||||
_wrap.func_name = f.func_name
|
||||
return _wrap
|
||||
|
||||
|
||||
class OpenstackException(Exception):
|
||||
"""
|
||||
Base Exception
|
||||
|
||||
To correctly use this class, inherit from it and define
|
||||
a 'message' property. That message will get printf'd
|
||||
with the keyword arguments provided to the constructor.
|
||||
"""
|
||||
message = "An unknown exception occurred"
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
try:
|
||||
self._error_string = self.message % kwargs
|
||||
|
||||
except Exception:
|
||||
# at least get the core message out if something happened
|
||||
self._error_string = self.message
|
||||
|
||||
def __str__(self):
|
||||
return self._error_string
|
||||
|
||||
|
||||
class MalformedRequestBody(OpenstackException):
|
||||
message = "Malformed message body: %(reason)s"
|
||||
|
||||
|
||||
class InvalidContentType(OpenstackException):
|
||||
message = "Invalid content type %(content_type)s"
|
0
windc/openstack/oldcommon/middleware/__init__.py
Normal file
0
windc/openstack/oldcommon/middleware/__init__.py
Normal file
64
windc/openstack/oldcommon/middleware/context.py
Normal file
64
windc/openstack/oldcommon/middleware/context.py
Normal file
@ -0,0 +1,64 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2011 OpenStack LLC.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Middleware that attaches a context to the WSGI request
|
||||
"""
|
||||
|
||||
from openstack.common import utils
|
||||
from openstack.common import wsgi
|
||||
from openstack.common import context
|
||||
|
||||
|
||||
class ContextMiddleware(wsgi.Middleware):
|
||||
def __init__(self, app, options):
|
||||
self.options = options
|
||||
super(ContextMiddleware, self).__init__(app)
|
||||
|
||||
def make_context(self, *args, **kwargs):
|
||||
"""
|
||||
Create a context with the given arguments.
|
||||
"""
|
||||
|
||||
# Determine the context class to use
|
||||
ctxcls = context.RequestContext
|
||||
if 'context_class' in self.options:
|
||||
ctxcls = utils.import_class(self.options['context_class'])
|
||||
|
||||
return ctxcls(*args, **kwargs)
|
||||
|
||||
def process_request(self, req):
|
||||
"""
|
||||
Extract any authentication information in the request and
|
||||
construct an appropriate context from it.
|
||||
"""
|
||||
# Use the default empty context, with admin turned on for
|
||||
# backwards compatibility
|
||||
req.context = self.make_context(is_admin=True)
|
||||
|
||||
|
||||
def filter_factory(global_conf, **local_conf):
|
||||
"""
|
||||
Factory method for paste.deploy
|
||||
"""
|
||||
conf = global_conf.copy()
|
||||
conf.update(local_conf)
|
||||
|
||||
def filter(app):
|
||||
return ContextMiddleware(app, conf)
|
||||
|
||||
return filter
|
210
windc/openstack/oldcommon/utils.py
Normal file
210
windc/openstack/oldcommon/utils.py
Normal file
@ -0,0 +1,210 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2011 OpenStack LLC.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
System-level utilities and helper functions.
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import shlex
|
||||
import sys
|
||||
import types
|
||||
|
||||
from eventlet import greenthread
|
||||
from eventlet.green import subprocess
|
||||
|
||||
from openstack.common import exception
|
||||
|
||||
|
||||
TIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def int_from_bool_as_string(subject):
|
||||
"""
|
||||
Interpret a string as a boolean and return either 1 or 0.
|
||||
|
||||
Any string value in:
|
||||
('True', 'true', 'On', 'on', '1')
|
||||
is interpreted as a boolean True.
|
||||
|
||||
Useful for JSON-decoded stuff and config file parsing
|
||||
"""
|
||||
return bool_from_string(subject) and 1 or 0
|
||||
|
||||
|
||||
def bool_from_string(subject):
|
||||
"""
|
||||
Interpret a string as a boolean.
|
||||
|
||||
Any string value in:
|
||||
('True', 'true', 'On', 'on', '1')
|
||||
is interpreted as a boolean True.
|
||||
|
||||
Useful for JSON-decoded stuff and config file parsing
|
||||
"""
|
||||
if isinstance(subject, types.BooleanType):
|
||||
return subject
|
||||
if isinstance(subject, types.StringTypes):
|
||||
if subject.strip().lower() in ('true', 'on', '1'):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def execute(*cmd, **kwargs):
|
||||
"""
|
||||
Helper method to execute command with optional retry.
|
||||
|
||||
:cmd Passed to subprocess.Popen.
|
||||
:process_input Send to opened process.
|
||||
:check_exit_code Defaults to 0. Raise exception.ProcessExecutionError
|
||||
unless program exits with this code.
|
||||
:delay_on_retry True | False. Defaults to True. If set to True, wait a
|
||||
short amount of time before retrying.
|
||||
:attempts How many times to retry cmd.
|
||||
:run_as_root True | False. Defaults to False. If set to True,
|
||||
the command is prefixed by the command specified
|
||||
in the root_helper kwarg.
|
||||
:root_helper command to prefix all cmd's with
|
||||
|
||||
:raises exception.Error on receiving unknown arguments
|
||||
:raises exception.ProcessExecutionError
|
||||
"""
|
||||
|
||||
process_input = kwargs.pop('process_input', None)
|
||||
check_exit_code = kwargs.pop('check_exit_code', 0)
|
||||
delay_on_retry = kwargs.pop('delay_on_retry', True)
|
||||
attempts = kwargs.pop('attempts', 1)
|
||||
run_as_root = kwargs.pop('run_as_root', False)
|
||||
root_helper = kwargs.pop('root_helper', '')
|
||||
if len(kwargs):
|
||||
raise exception.Error(_('Got unknown keyword args '
|
||||
'to utils.execute: %r') % kwargs)
|
||||
if run_as_root:
|
||||
cmd = shlex.split(root_helper) + list(cmd)
|
||||
cmd = map(str, cmd)
|
||||
|
||||
while attempts > 0:
|
||||
attempts -= 1
|
||||
try:
|
||||
LOG.debug(_('Running cmd (subprocess): %s'), ' '.join(cmd))
|
||||
_PIPE = subprocess.PIPE # pylint: disable=E1101
|
||||
obj = subprocess.Popen(cmd,
|
||||
stdin=_PIPE,
|
||||
stdout=_PIPE,
|
||||
stderr=_PIPE,
|
||||
close_fds=True)
|
||||
result = None
|
||||
if process_input is not None:
|
||||
result = obj.communicate(process_input)
|
||||
else:
|
||||
result = obj.communicate()
|
||||
obj.stdin.close() # pylint: disable=E1101
|
||||
_returncode = obj.returncode # pylint: disable=E1101
|
||||
if _returncode:
|
||||
LOG.debug(_('Result was %s') % _returncode)
|
||||
if type(check_exit_code) == types.IntType \
|
||||
and _returncode != check_exit_code:
|
||||
(stdout, stderr) = result
|
||||
raise exception.ProcessExecutionError(
|
||||
exit_code=_returncode,
|
||||
stdout=stdout,
|
||||
stderr=stderr,
|
||||
cmd=' '.join(cmd))
|
||||
return result
|
||||
except exception.ProcessExecutionError:
|
||||
if not attempts:
|
||||
raise
|
||||
else:
|
||||
LOG.debug(_('%r failed. Retrying.'), cmd)
|
||||
if delay_on_retry:
|
||||
greenthread.sleep(random.randint(20, 200) / 100.0)
|
||||
finally:
|
||||
# NOTE(termie): this appears to be necessary to let the subprocess
|
||||
# call clean something up in between calls, without
|
||||
# it two execute calls in a row hangs the second one
|
||||
greenthread.sleep(0)
|
||||
|
||||
|
||||
def import_class(import_str):
|
||||
"""Returns a class from a string including module and class"""
|
||||
mod_str, _sep, class_str = import_str.rpartition('.')
|
||||
try:
|
||||
__import__(mod_str)
|
||||
return getattr(sys.modules[mod_str], class_str)
|
||||
except (ImportError, ValueError, AttributeError):
|
||||
raise exception.NotFound('Class %s cannot be found' % class_str)
|
||||
|
||||
|
||||
def import_object(import_str):
|
||||
"""Returns an object including a module or module and class"""
|
||||
try:
|
||||
__import__(import_str)
|
||||
return sys.modules[import_str]
|
||||
except ImportError:
|
||||
return import_class(import_str)
|
||||
|
||||
|
||||
def isotime(at=None):
|
||||
if not at:
|
||||
at = datetime.datetime.utcnow()
|
||||
return at.strftime(TIME_FORMAT)
|
||||
|
||||
|
||||
def parse_isotime(timestr):
|
||||
return datetime.datetime.strptime(timestr, TIME_FORMAT)
|
||||
|
||||
|
||||
def parse_mailmap(mailmap='.mailmap'):
|
||||
mapping = {}
|
||||
if os.path.exists(mailmap):
|
||||
fp = open(mailmap, 'r')
|
||||
for l in fp:
|
||||
l = l.strip()
|
||||
if not l.startswith('#') and ' ' in l:
|
||||
canonical_email, alias = l.split(' ')
|
||||
mapping[alias] = canonical_email
|
||||
return mapping
|
||||
|
||||
|
||||
def str_dict_replace(s, mapping):
|
||||
for s1, s2 in mapping.iteritems():
|
||||
s = s.replace(s1, s2)
|
||||
return s
|
||||
|
||||
|
||||
def utcnow():
|
||||
"""Overridable version of utils.utcnow."""
|
||||
if utcnow.override_time:
|
||||
return utcnow.override_time
|
||||
return datetime.datetime.utcnow()
|
||||
|
||||
|
||||
utcnow.override_time = None
|
||||
|
||||
|
||||
def set_time_override(override_time=datetime.datetime.utcnow()):
|
||||
"""Override utils.utcnow to return a constant time."""
|
||||
utcnow.override_time = override_time
|
||||
|
||||
|
||||
def clear_time_override():
|
||||
"""Remove the overridden time."""
|
||||
utcnow.override_time = None
|
717
windc/openstack/oldcommon/wsgi.py
Normal file
717
windc/openstack/oldcommon/wsgi.py
Normal file
@ -0,0 +1,717 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2011 OpenStack LLC.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Utility methods for working with WSGI servers."""
|
||||
|
||||
import datetime
|
||||
import eventlet
|
||||
import eventlet.wsgi
|
||||
|
||||
eventlet.patcher.monkey_patch(all=False, socket=True)
|
||||
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
import routes
|
||||
import routes.middleware
|
||||
import webob.dec
|
||||
import webob.exc
|
||||
from xml.dom import minidom
|
||||
from xml.parsers import expat
|
||||
|
||||
from openstack.common import exception
|
||||
|
||||
|
||||
LOG = logging.getLogger('wsgi')
|
||||
|
||||
|
||||
class WritableLogger(object):
|
||||
"""A thin wrapper that responds to `write` and logs."""
|
||||
|
||||
def __init__(self, logger, level=logging.DEBUG):
|
||||
self.logger = logger
|
||||
self.level = level
|
||||
|
||||
def write(self, msg):
|
||||
self.logger.log(self.level, msg.strip("\n"))
|
||||
|
||||
|
||||
def run_server(application, port):
|
||||
"""Run a WSGI server with the given application."""
|
||||
sock = eventlet.listen(('0.0.0.0', port))
|
||||
eventlet.wsgi.server(sock, application)
|
||||
|
||||
|
||||
class Server(object):
|
||||
"""Server class to manage multiple WSGI sockets and applications."""
|
||||
|
||||
def __init__(self, threads=1000):
|
||||
self.pool = eventlet.GreenPool(threads)
|
||||
|
||||
def start(self, application, port, host='0.0.0.0', backlog=128):
|
||||
"""Run a WSGI server with the given application."""
|
||||
socket = eventlet.listen((host, port), backlog=backlog)
|
||||
self.pool.spawn_n(self._run, application, socket)
|
||||
|
||||
def wait(self):
|
||||
"""Wait until all servers have completed running."""
|
||||
try:
|
||||
self.pool.waitall()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
def _run(self, application, socket):
|
||||
"""Start a WSGI server in a new green thread."""
|
||||
logger = logging.getLogger('eventlet.wsgi.server')
|
||||
eventlet.wsgi.server(socket, application, custom_pool=self.pool,
|
||||
log=WritableLogger(logger))
|
||||
|
||||
|
||||
class Middleware(object):
|
||||
"""
|
||||
Base WSGI middleware wrapper. These classes require an application to be
|
||||
initialized that will be called next. By default the middleware will
|
||||
simply call its wrapped app, or you can override __call__ to customize its
|
||||
behavior.
|
||||
"""
|
||||
|
||||
def __init__(self, application):
|
||||
self.application = application
|
||||
|
||||
def process_request(self, req):
|
||||
"""
|
||||
Called on each request.
|
||||
|
||||
If this returns None, the next application down the stack will be
|
||||
executed. If it returns a response then that response will be returned
|
||||
and execution will stop here.
|
||||
"""
|
||||
return None
|
||||
|
||||
def process_response(self, response):
|
||||
"""Do whatever you'd like to the response."""
|
||||
return response
|
||||
|
||||
@webob.dec.wsgify
|
||||
def __call__(self, req):
|
||||
response = self.process_request(req)
|
||||
if response:
|
||||
return response
|
||||
response = req.get_response(self.application)
|
||||
return self.process_response(response)
|
||||
|
||||
|
||||
class Debug(Middleware):
|
||||
"""
|
||||
Helper class that can be inserted into any WSGI application chain
|
||||
to get information about the request and response.
|
||||
"""
|
||||
|
||||
@webob.dec.wsgify
|
||||
def __call__(self, req):
|
||||
print ("*" * 40) + " REQUEST ENVIRON"
|
||||
for key, value in req.environ.items():
|
||||
print key, "=", value
|
||||
print
|
||||
resp = req.get_response(self.application)
|
||||
|
||||
print ("*" * 40) + " RESPONSE HEADERS"
|
||||
for (key, value) in resp.headers.iteritems():
|
||||
print key, "=", value
|
||||
print
|
||||
|
||||
resp.app_iter = self.print_generator(resp.app_iter)
|
||||
|
||||
return resp
|
||||
|
||||
@staticmethod
|
||||
def print_generator(app_iter):
|
||||
"""
|
||||
Iterator that prints the contents of a wrapper string iterator
|
||||
when iterated.
|
||||
"""
|
||||
print ("*" * 40) + " BODY"
|
||||
for part in app_iter:
|
||||
sys.stdout.write(part)
|
||||
sys.stdout.flush()
|
||||
yield part
|
||||
print
|
||||
|
||||
|
||||
class Router(object):
|
||||
|
||||
"""
|
||||
WSGI middleware that maps incoming requests to WSGI apps.
|
||||
"""
|
||||
|
||||
def __init__(self, mapper):
|
||||
"""
|
||||
Create a router for the given routes.Mapper.
|
||||
|
||||
Each route in `mapper` must specify a 'controller', which is a
|
||||
WSGI app to call. You'll probably want to specify an 'action' as
|
||||
well and have your controller be a wsgi.Controller, who will route
|
||||
the request to the action method.
|
||||
|
||||
Examples:
|
||||
mapper = routes.Mapper()
|
||||
sc = ServerController()
|
||||
|
||||
# Explicit mapping of one route to a controller+action
|
||||
mapper.connect(None, "/svrlist", controller=sc, action="list")
|
||||
|
||||
# Actions are all implicitly defined
|
||||
mapper.resource("server", "servers", controller=sc)
|
||||
|
||||
# Pointing to an arbitrary WSGI app. You can specify the
|
||||
# {path_info:.*} parameter so the target app can be handed just that
|
||||
# section of the URL.
|
||||
mapper.connect(None, "/v1.0/{path_info:.*}", controller=BlogApp())
|
||||
"""
|
||||
self.map = mapper
|
||||
self._router = routes.middleware.RoutesMiddleware(self._dispatch,
|
||||
self.map)
|
||||
|
||||
@webob.dec.wsgify
|
||||
def __call__(self, req):
|
||||
"""
|
||||
Route the incoming request to a controller based on self.map.
|
||||
If no match, return a 404.
|
||||
"""
|
||||
return self._router
|
||||
|
||||
@staticmethod
|
||||
@webob.dec.wsgify
|
||||
def _dispatch(req):
|
||||
"""
|
||||
Called by self._router after matching the incoming request to a route
|
||||
and putting the information into req.environ. Either returns 404
|
||||
or the routed WSGI app's response.
|
||||
"""
|
||||
match = req.environ['wsgiorg.routing_args'][1]
|
||||
if not match:
|
||||
return webob.exc.HTTPNotFound()
|
||||
app = match['controller']
|
||||
return app
|
||||
|
||||
|
||||
class Request(webob.Request):
|
||||
"""Add some Openstack API-specific logic to the base webob.Request."""
|
||||
|
||||
default_request_content_types = ('application/json', 'application/xml')
|
||||
default_accept_types = ('application/json', 'application/xml')
|
||||
default_accept_type = 'application/json'
|
||||
|
||||
def best_match_content_type(self, supported_content_types=None):
|
||||
"""Determine the requested response content-type.
|
||||
|
||||
Based on the query extension then the Accept header.
|
||||
Defaults to default_accept_type if we don't find a preference
|
||||
|
||||
"""
|
||||
supported_content_types = (supported_content_types or
|
||||
self.default_accept_types)
|
||||
|
||||
parts = self.path.rsplit('.', 1)
|
||||
if len(parts) > 1:
|
||||
ctype = 'application/{0}'.format(parts[1])
|
||||
if ctype in supported_content_types:
|
||||
return ctype
|
||||
|
||||
bm = self.accept.best_match(supported_content_types)
|
||||
return bm or self.default_accept_type
|
||||
|
||||
def get_content_type(self, allowed_content_types=None):
|
||||
"""Determine content type of the request body.
|
||||
|
||||
Does not do any body introspection, only checks header
|
||||
|
||||
"""
|
||||
if not "Content-Type" in self.headers:
|
||||
return None
|
||||
|
||||
content_type = self.content_type
|
||||
allowed_content_types = (allowed_content_types or
|
||||
self.default_request_content_types)
|
||||
|
||||
if content_type not in allowed_content_types:
|
||||
raise exception.InvalidContentType(content_type=content_type)
|
||||
return content_type
|
||||
|
||||
|
||||
class Resource(object):
|
||||
"""
|
||||
WSGI app that handles (de)serialization and controller dispatch.
|
||||
|
||||
Reads routing information supplied by RoutesMiddleware and calls
|
||||
the requested action method upon its deserializer, controller,
|
||||
and serializer. Those three objects may implement any of the basic
|
||||
controller action methods (create, update, show, index, delete)
|
||||
along with any that may be specified in the api router. A 'default'
|
||||
method may also be implemented to be used in place of any
|
||||
non-implemented actions. Deserializer methods must accept a request
|
||||
argument and return a dictionary. Controller methods must accept a
|
||||
request argument. Additionally, they must also accept keyword
|
||||
arguments that represent the keys returned by the Deserializer. They
|
||||
may raise a webob.exc exception or return a dict, which will be
|
||||
serialized by requested content type.
|
||||
"""
|
||||
def __init__(self, controller, deserializer=None, serializer=None):
|
||||
"""
|
||||
:param controller: object that implement methods created by routes lib
|
||||
:param deserializer: object that supports webob request deserialization
|
||||
through controller-like actions
|
||||
:param serializer: object that supports webob response serialization
|
||||
through controller-like actions
|
||||
"""
|
||||
self.controller = controller
|
||||
self.serializer = serializer or ResponseSerializer()
|
||||
self.deserializer = deserializer or RequestDeserializer()
|
||||
|
||||
@webob.dec.wsgify(RequestClass=Request)
|
||||
def __call__(self, request):
|
||||
"""WSGI method that controls (de)serialization and method dispatch."""
|
||||
|
||||
try:
|
||||
action, action_args, accept = self.deserialize_request(request)
|
||||
except exception.InvalidContentType:
|
||||
msg = _("Unsupported Content-Type")
|
||||
return webob.exc.HTTPUnsupportedMediaType(explanation=msg)
|
||||
except exception.MalformedRequestBody:
|
||||
msg = _("Malformed request body")
|
||||
return webob.exc.HTTPBadRequest(explanation=msg)
|
||||
|
||||
action_result = self.execute_action(action, request, **action_args)
|
||||
try:
|
||||
return self.serialize_response(action, action_result, accept)
|
||||
# return unserializable result (typically a webob exc)
|
||||
except Exception:
|
||||
return action_result
|
||||
|
||||
def deserialize_request(self, request):
|
||||
return self.deserializer.deserialize(request)
|
||||
|
||||
def serialize_response(self, action, action_result, accept):
|
||||
return self.serializer.serialize(action_result, accept, action)
|
||||
|
||||
def execute_action(self, action, request, **action_args):
|
||||
return self.dispatch(self.controller, action, request, **action_args)
|
||||
|
||||
def dispatch(self, obj, action, *args, **kwargs):
|
||||
"""Find action-specific method on self and call it."""
|
||||
try:
|
||||
method = getattr(obj, action)
|
||||
except AttributeError:
|
||||
method = getattr(obj, 'default')
|
||||
|
||||
return method(*args, **kwargs)
|
||||
|
||||
def get_action_args(self, request_environment):
|
||||
"""Parse dictionary created by routes library."""
|
||||
try:
|
||||
args = request_environment['wsgiorg.routing_args'][1].copy()
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
try:
|
||||
del args['controller']
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
try:
|
||||
del args['format']
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
return args
|
||||
|
||||
|
||||
class ActionDispatcher(object):
|
||||
"""Maps method name to local methods through action name."""
|
||||
|
||||
def dispatch(self, *args, **kwargs):
|
||||
"""Find and call local method."""
|
||||
action = kwargs.pop('action', 'default')
|
||||
action_method = getattr(self, str(action), self.default)
|
||||
return action_method(*args, **kwargs)
|
||||
|
||||
def default(self, data):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class DictSerializer(ActionDispatcher):
|
||||
"""Default request body serialization"""
|
||||
|
||||
def serialize(self, data, action='default'):
|
||||
return self.dispatch(data, action=action)
|
||||
|
||||
def default(self, data):
|
||||
return ""
|
||||
|
||||
|
||||
class JSONDictSerializer(DictSerializer):
|
||||
"""Default JSON request body serialization"""
|
||||
|
||||
def default(self, data):
|
||||
def sanitizer(obj):
|
||||
if isinstance(obj, datetime.datetime):
|
||||
_dtime = obj - datetime.timedelta(microseconds=obj.microsecond)
|
||||
return _dtime.isoformat()
|
||||
return obj
|
||||
return json.dumps(data, default=sanitizer)
|
||||
|
||||
|
||||
class XMLDictSerializer(DictSerializer):
|
||||
|
||||
def __init__(self, metadata=None, xmlns=None):
|
||||
"""
|
||||
:param metadata: information needed to deserialize xml into
|
||||
a dictionary.
|
||||
:param xmlns: XML namespace to include with serialized xml
|
||||
"""
|
||||
super(XMLDictSerializer, self).__init__()
|
||||
self.metadata = metadata or {}
|
||||
self.xmlns = xmlns
|
||||
|
||||
def default(self, data):
|
||||
# We expect data to contain a single key which is the XML root.
|
||||
root_key = data.keys()[0]
|
||||
doc = minidom.Document()
|
||||
node = self._to_xml_node(doc, self.metadata, root_key, data[root_key])
|
||||
|
||||
return self.to_xml_string(node)
|
||||
|
||||
def to_xml_string(self, node, has_atom=False):
|
||||
self._add_xmlns(node, has_atom)
|
||||
return node.toprettyxml(indent=' ', encoding='UTF-8')
|
||||
|
||||
#NOTE (ameade): the has_atom should be removed after all of the
|
||||
# xml serializers and view builders have been updated to the current
|
||||
# spec that required all responses include the xmlns:atom, the has_atom
|
||||
# flag is to prevent current tests from breaking
|
||||
def _add_xmlns(self, node, has_atom=False):
|
||||
if self.xmlns is not None:
|
||||
node.setAttribute('xmlns', self.xmlns)
|
||||
if has_atom:
|
||||
node.setAttribute('xmlns:atom', "http://www.w3.org/2005/Atom")
|
||||
|
||||
def _to_xml_node(self, doc, metadata, nodename, data):
|
||||
"""Recursive method to convert data members to XML nodes."""
|
||||
result = doc.createElement(nodename)
|
||||
|
||||
# Set the xml namespace if one is specified
|
||||
# TODO(justinsb): We could also use prefixes on the keys
|
||||
xmlns = metadata.get('xmlns', None)
|
||||
if xmlns:
|
||||
result.setAttribute('xmlns', xmlns)
|
||||
|
||||
#TODO(bcwaldon): accomplish this without a type-check
|
||||
if type(data) is list:
|
||||
collections = metadata.get('list_collections', {})
|
||||
if nodename in collections:
|
||||
metadata = collections[nodename]
|
||||
for item in data:
|
||||
node = doc.createElement(metadata['item_name'])
|
||||
node.setAttribute(metadata['item_key'], str(item))
|
||||
result.appendChild(node)
|
||||
return result
|
||||
singular = metadata.get('plurals', {}).get(nodename, None)
|
||||
if singular is None:
|
||||
if nodename.endswith('s'):
|
||||
singular = nodename[:-1]
|
||||
else:
|
||||
singular = 'item'
|
||||
for item in data:
|
||||
node = self._to_xml_node(doc, metadata, singular, item)
|
||||
result.appendChild(node)
|
||||
#TODO(bcwaldon): accomplish this without a type-check
|
||||
elif type(data) is dict:
|
||||
collections = metadata.get('dict_collections', {})
|
||||
if nodename in collections:
|
||||
metadata = collections[nodename]
|
||||
for k, v in data.items():
|
||||
node = doc.createElement(metadata['item_name'])
|
||||
node.setAttribute(metadata['item_key'], str(k))
|
||||
text = doc.createTextNode(str(v))
|
||||
node.appendChild(text)
|
||||
result.appendChild(node)
|
||||
return result
|
||||
attrs = metadata.get('attributes', {}).get(nodename, {})
|
||||
for k, v in data.items():
|
||||
if k in attrs:
|
||||
result.setAttribute(k, str(v))
|
||||
else:
|
||||
node = self._to_xml_node(doc, metadata, k, v)
|
||||
result.appendChild(node)
|
||||
else:
|
||||
# Type is atom
|
||||
node = doc.createTextNode(str(data))
|
||||
result.appendChild(node)
|
||||
return result
|
||||
|
||||
def _create_link_nodes(self, xml_doc, links):
|
||||
link_nodes = []
|
||||
for link in links:
|
||||
link_node = xml_doc.createElement('atom:link')
|
||||
link_node.setAttribute('rel', link['rel'])
|
||||
link_node.setAttribute('href', link['href'])
|
||||
if 'type' in link:
|
||||
link_node.setAttribute('type', link['type'])
|
||||
link_nodes.append(link_node)
|
||||
return link_nodes
|
||||
|
||||
|
||||
class ResponseHeadersSerializer(ActionDispatcher):
|
||||
"""Default response headers serialization"""
|
||||
|
||||
def serialize(self, response, data, action):
|
||||
self.dispatch(response, data, action=action)
|
||||
|
||||
def default(self, response, data):
|
||||
response.status_int = 200
|
||||
|
||||
|
||||
class ResponseSerializer(object):
|
||||
"""Encode the necessary pieces into a response object"""
|
||||
|
||||
def __init__(self, body_serializers=None, headers_serializer=None):
|
||||
self.body_serializers = {
|
||||
'application/xml': XMLDictSerializer(),
|
||||
'application/json': JSONDictSerializer(),
|
||||
}
|
||||
self.body_serializers.update(body_serializers or {})
|
||||
|
||||
self.headers_serializer = headers_serializer or \
|
||||
ResponseHeadersSerializer()
|
||||
|
||||
def serialize(self, response_data, content_type, action='default'):
|
||||
"""Serialize a dict into a string and wrap in a wsgi.Request object.
|
||||
|
||||
:param response_data: dict produced by the Controller
|
||||
:param content_type: expected mimetype of serialized response body
|
||||
|
||||
"""
|
||||
response = webob.Response()
|
||||
self.serialize_headers(response, response_data, action)
|
||||
self.serialize_body(response, response_data, content_type, action)
|
||||
return response
|
||||
|
||||
def serialize_headers(self, response, data, action):
|
||||
self.headers_serializer.serialize(response, data, action)
|
||||
|
||||
def serialize_body(self, response, data, content_type, action):
|
||||
response.headers['Content-Type'] = content_type
|
||||
if data is not None:
|
||||
serializer = self.get_body_serializer(content_type)
|
||||
response.body = serializer.serialize(data, action)
|
||||
|
||||
def get_body_serializer(self, content_type):
|
||||
try:
|
||||
return self.body_serializers[content_type]
|
||||
except (KeyError, TypeError):
|
||||
raise exception.InvalidContentType(content_type=content_type)
|
||||
|
||||
|
||||
class RequestHeadersDeserializer(ActionDispatcher):
|
||||
"""Default request headers deserializer"""
|
||||
|
||||
def deserialize(self, request, action):
|
||||
return self.dispatch(request, action=action)
|
||||
|
||||
def default(self, request):
|
||||
return {}
|
||||
|
||||
|
||||
class RequestDeserializer(object):
|
||||
"""Break up a Request object into more useful pieces."""
|
||||
|
||||
def __init__(self, body_deserializers=None, headers_deserializer=None,
|
||||
supported_content_types=None):
|
||||
|
||||
self.supported_content_types = supported_content_types
|
||||
|
||||
self.body_deserializers = {
|
||||
'application/xml': XMLDeserializer(),
|
||||
'application/json': JSONDeserializer(),
|
||||
}
|
||||
self.body_deserializers.update(body_deserializers or {})
|
||||
|
||||
self.headers_deserializer = headers_deserializer or \
|
||||
RequestHeadersDeserializer()
|
||||
|
||||
def deserialize(self, request):
|
||||
"""Extract necessary pieces of the request.
|
||||
|
||||
:param request: Request object
|
||||
:returns tuple of expected controller action name, dictionary of
|
||||
keyword arguments to pass to the controller, the expected
|
||||
content type of the response
|
||||
|
||||
"""
|
||||
action_args = self.get_action_args(request.environ)
|
||||
action = action_args.pop('action', None)
|
||||
|
||||
action_args.update(self.deserialize_headers(request, action))
|
||||
action_args.update(self.deserialize_body(request, action))
|
||||
|
||||
accept = self.get_expected_content_type(request)
|
||||
|
||||
return (action, action_args, accept)
|
||||
|
||||
def deserialize_headers(self, request, action):
|
||||
return self.headers_deserializer.deserialize(request, action)
|
||||
|
||||
def deserialize_body(self, request, action):
|
||||
if not len(request.body) > 0:
|
||||
LOG.debug(_("Empty body provided in request"))
|
||||
return {}
|
||||
|
||||
try:
|
||||
content_type = request.get_content_type()
|
||||
except exception.InvalidContentType:
|
||||
LOG.debug(_("Unrecognized Content-Type provided in request"))
|
||||
raise
|
||||
|
||||
if content_type is None:
|
||||
LOG.debug(_("No Content-Type provided in request"))
|
||||
return {}
|
||||
|
||||
try:
|
||||
deserializer = self.get_body_deserializer(content_type)
|
||||
except exception.InvalidContentType:
|
||||
LOG.debug(_("Unable to deserialize body as provided Content-Type"))
|
||||
raise
|
||||
|
||||
return deserializer.deserialize(request.body, action)
|
||||
|
||||
def get_body_deserializer(self, content_type):
|
||||
try:
|
||||
return self.body_deserializers[content_type]
|
||||
except (KeyError, TypeError):
|
||||
raise exception.InvalidContentType(content_type=content_type)
|
||||
|
||||
def get_expected_content_type(self, request):
|
||||
return request.best_match_content_type(self.supported_content_types)
|
||||
|
||||
def get_action_args(self, request_environment):
|
||||
"""Parse dictionary created by routes library."""
|
||||
try:
|
||||
args = request_environment['wsgiorg.routing_args'][1].copy()
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
try:
|
||||
del args['controller']
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
try:
|
||||
del args['format']
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
return args
|
||||
|
||||
|
||||
class TextDeserializer(ActionDispatcher):
|
||||
"""Default request body deserialization"""
|
||||
|
||||
def deserialize(self, datastring, action='default'):
|
||||
return self.dispatch(datastring, action=action)
|
||||
|
||||
def default(self, datastring):
|
||||
return {}
|
||||
|
||||
|
||||
class JSONDeserializer(TextDeserializer):
|
||||
|
||||
def _from_json(self, datastring):
|
||||
try:
|
||||
return json.loads(datastring)
|
||||
except ValueError:
|
||||
msg = _("cannot understand JSON")
|
||||
raise exception.MalformedRequestBody(reason=msg)
|
||||
|
||||
def default(self, datastring):
|
||||
return {'body': self._from_json(datastring)}
|
||||
|
||||
|
||||
class XMLDeserializer(TextDeserializer):
|
||||
|
||||
def __init__(self, metadata=None):
|
||||
"""
|
||||
:param metadata: information needed to deserialize xml into
|
||||
a dictionary.
|
||||
"""
|
||||
super(XMLDeserializer, self).__init__()
|
||||
self.metadata = metadata or {}
|
||||
|
||||
def _from_xml(self, datastring):
|
||||
plurals = set(self.metadata.get('plurals', {}))
|
||||
|
||||
try:
|
||||
node = minidom.parseString(datastring).childNodes[0]
|
||||
return {node.nodeName: self._from_xml_node(node, plurals)}
|
||||
except expat.ExpatError:
|
||||
msg = _("cannot understand XML")
|
||||
raise exception.MalformedRequestBody(reason=msg)
|
||||
|
||||
def _from_xml_node(self, node, listnames):
|
||||
"""Convert a minidom node to a simple Python type.
|
||||
|
||||
:param listnames: list of XML node names whose subnodes should
|
||||
be considered list items.
|
||||
|
||||
"""
|
||||
|
||||
if len(node.childNodes) == 1 and node.childNodes[0].nodeType == 3:
|
||||
return node.childNodes[0].nodeValue
|
||||
elif node.nodeName in listnames:
|
||||
return [self._from_xml_node(n, listnames) for n in node.childNodes]
|
||||
else:
|
||||
result = dict()
|
||||
for attr in node.attributes.keys():
|
||||
result[attr] = node.attributes[attr].nodeValue
|
||||
for child in node.childNodes:
|
||||
if child.nodeType != node.TEXT_NODE:
|
||||
result[child.nodeName] = self._from_xml_node(child,
|
||||
listnames)
|
||||
return result
|
||||
|
||||
def find_first_child_named(self, parent, name):
|
||||
"""Search a nodes children for the first child with a given name"""
|
||||
for node in parent.childNodes:
|
||||
if node.nodeName == name:
|
||||
return node
|
||||
return None
|
||||
|
||||
def find_children_named(self, parent, name):
|
||||
"""Return all of a nodes children who have the given name"""
|
||||
for node in parent.childNodes:
|
||||
if node.nodeName == name:
|
||||
yield node
|
||||
|
||||
def extract_text(self, node):
|
||||
"""Get the text field contained by the given node"""
|
||||
if len(node.childNodes) == 1:
|
||||
child = node.childNodes[0]
|
||||
if child.nodeType == child.TEXT_NODE:
|
||||
return child.nodeValue
|
||||
return ""
|
||||
|
||||
def default(self, datastring):
|
||||
return {'body': self._from_xml(datastring)}
|
3
windc/run_tests.log
Normal file
3
windc/run_tests.log
Normal file
@ -0,0 +1,3 @@
|
||||
nose.config: INFO: Ignoring files matching ['^\\.', '^_', '^setup\\.py$']
|
||||
nose.selector: INFO: /Users/gokrokve/Keero/windc/run_tests.sh is executable; skipped
|
||||
2013-02-11 13:35:01,989 DEBUG Initialized with method overriding = True, and path info altering = True
|
293
windc/run_tests.py
Normal file
293
windc/run_tests.py
Normal file
@ -0,0 +1,293 @@
|
||||
#!/usr/bin/env python
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2010 OpenStack, LLC
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Colorizer Code is borrowed from Twisted:
|
||||
# Copyright (c) 2001-2010 Twisted Matrix Laboratories.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
"""
|
||||
Unittest runner for balancer
|
||||
|
||||
To run all test::
|
||||
python run_tests.py
|
||||
|
||||
To run a single test::
|
||||
python run_tests.py test_stores:TestSwiftBackend.test_get
|
||||
|
||||
To run a single test module::
|
||||
python run_tests.py test_stores
|
||||
"""
|
||||
|
||||
import gettext
|
||||
import logging
|
||||
import os
|
||||
import unittest
|
||||
import sys
|
||||
|
||||
gettext.install('windc', unicode=1)
|
||||
|
||||
from nose import config
|
||||
from nose import result
|
||||
from nose import core
|
||||
|
||||
|
||||
class _AnsiColorizer(object):
|
||||
"""
|
||||
A colorizer is an object that loosely wraps around a stream, allowing
|
||||
callers to write text to the stream in a particular color.
|
||||
|
||||
Colorizer classes must implement C{supported()} and C{write(text, color)}.
|
||||
"""
|
||||
_colors = dict(black=30, red=31, green=32, yellow=33,
|
||||
blue=34, magenta=35, cyan=36, white=37)
|
||||
|
||||
def __init__(self, stream):
|
||||
self.stream = stream
|
||||
|
||||
def supported(cls, stream=sys.stdout):
|
||||
"""
|
||||
A class method that returns True if the current platform supports
|
||||
coloring terminal output using this method. Returns False otherwise.
|
||||
"""
|
||||
if not stream.isatty():
|
||||
return False # auto color only on TTYs
|
||||
try:
|
||||
import curses
|
||||
except ImportError:
|
||||
return False
|
||||
else:
|
||||
try:
|
||||
try:
|
||||
return curses.tigetnum("colors") > 2
|
||||
except curses.error:
|
||||
curses.setupterm()
|
||||
return curses.tigetnum("colors") > 2
|
||||
except:
|
||||
raise
|
||||
# guess false in case of error
|
||||
return False
|
||||
supported = classmethod(supported)
|
||||
|
||||
def write(self, text, color):
|
||||
"""
|
||||
Write the given text to the stream in the given color.
|
||||
|
||||
@param text: Text to be written to the stream.
|
||||
|
||||
@param color: A string label for a color. e.g. 'red', 'white'.
|
||||
"""
|
||||
color = self._colors[color]
|
||||
self.stream.write('\x1b[%s;1m%s\x1b[0m' % (color, text))
|
||||
|
||||
|
||||
class _Win32Colorizer(object):
|
||||
"""
|
||||
See _AnsiColorizer docstring.
|
||||
"""
|
||||
def __init__(self, stream):
|
||||
from win32console import GetStdHandle, STD_OUT_HANDLE, \
|
||||
FOREGROUND_RED, FOREGROUND_BLUE, FOREGROUND_GREEN, \
|
||||
FOREGROUND_INTENSITY
|
||||
red, green, blue, bold = (FOREGROUND_RED, FOREGROUND_GREEN,
|
||||
FOREGROUND_BLUE, FOREGROUND_INTENSITY)
|
||||
self.stream = stream
|
||||
self.screenBuffer = GetStdHandle(STD_OUT_HANDLE)
|
||||
self._colors = {
|
||||
'normal': red | green | blue,
|
||||
'red': red | bold,
|
||||
'green': green | bold,
|
||||
'blue': blue | bold,
|
||||
'yellow': red | green | bold,
|
||||
'magenta': red | blue | bold,
|
||||
'cyan': green | blue | bold,
|
||||
'white': red | green | blue | bold}
|
||||
|
||||
def supported(cls, stream=sys.stdout):
|
||||
try:
|
||||
import win32console
|
||||
screenBuffer = win32console.GetStdHandle(
|
||||
win32console.STD_OUT_HANDLE)
|
||||
except ImportError:
|
||||
return False
|
||||
import pywintypes
|
||||
try:
|
||||
screenBuffer.SetConsoleTextAttribute(
|
||||
win32console.FOREGROUND_RED |
|
||||
win32console.FOREGROUND_GREEN |
|
||||
win32console.FOREGROUND_BLUE)
|
||||
except pywintypes.error:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
supported = classmethod(supported)
|
||||
|
||||
def write(self, text, color):
|
||||
color = self._colors[color]
|
||||
self.screenBuffer.SetConsoleTextAttribute(color)
|
||||
self.stream.write(text)
|
||||
self.screenBuffer.SetConsoleTextAttribute(self._colors['normal'])
|
||||
|
||||
|
||||
class _NullColorizer(object):
|
||||
"""
|
||||
See _AnsiColorizer docstring.
|
||||
"""
|
||||
def __init__(self, stream):
|
||||
self.stream = stream
|
||||
|
||||
def supported(cls, stream=sys.stdout):
|
||||
return True
|
||||
supported = classmethod(supported)
|
||||
|
||||
def write(self, text, color):
|
||||
self.stream.write(text)
|
||||
|
||||
|
||||
class WindcTestResult(result.TextTestResult):
|
||||
def __init__(self, *args, **kw):
|
||||
result.TextTestResult.__init__(self, *args, **kw)
|
||||
self._last_case = None
|
||||
self.colorizer = None
|
||||
# NOTE(vish, tfukushima): reset stdout for the terminal check
|
||||
stdout = sys.stdout
|
||||
sys.stdout = sys.__stdout__
|
||||
for colorizer in [_Win32Colorizer, _AnsiColorizer, _NullColorizer]:
|
||||
if colorizer.supported():
|
||||
self.colorizer = colorizer(self.stream)
|
||||
break
|
||||
sys.stdout = stdout
|
||||
|
||||
def getDescription(self, test):
|
||||
return str(test)
|
||||
|
||||
# NOTE(vish, tfukushima): copied from unittest with edit to add color
|
||||
def addSuccess(self, test):
|
||||
unittest.TestResult.addSuccess(self, test)
|
||||
if self.showAll:
|
||||
self.colorizer.write("OK", 'green')
|
||||
self.stream.writeln()
|
||||
elif self.dots:
|
||||
self.stream.write('.')
|
||||
self.stream.flush()
|
||||
|
||||
# NOTE(vish, tfukushima): copied from unittest with edit to add color
|
||||
def addFailure(self, test, err):
|
||||
unittest.TestResult.addFailure(self, test, err)
|
||||
if self.showAll:
|
||||
self.colorizer.write("FAIL", 'red')
|
||||
self.stream.writeln()
|
||||
elif self.dots:
|
||||
self.stream.write('F')
|
||||
self.stream.flush()
|
||||
|
||||
# NOTE(vish, tfukushima): copied from unittest with edit to add color
|
||||
def addError(self, test, err):
|
||||
"""
|
||||
Overrides normal addError to add support for errorClasses.
|
||||
If the exception is a registered class, the error will be added
|
||||
to the list for that class, not errors.
|
||||
"""
|
||||
stream = getattr(self, 'stream', None)
|
||||
ec, ev, tb = err
|
||||
try:
|
||||
exc_info = self._exc_info_to_string(err, test)
|
||||
except TypeError:
|
||||
# This is for compatibility with Python 2.3.
|
||||
exc_info = self._exc_info_to_string(err)
|
||||
for cls, (storage, label, isfail) in self.errorClasses.items():
|
||||
if result.isclass(ec) and issubclass(ec, cls):
|
||||
if isfail:
|
||||
test.passwd = False
|
||||
storage.append((test, exc_info))
|
||||
# Might get patched into a streamless result
|
||||
if stream is not None:
|
||||
if self.showAll:
|
||||
message = [label]
|
||||
detail = result._exception_detail(err[1])
|
||||
if detail:
|
||||
message.append(detail)
|
||||
stream.writeln(": ".join(message))
|
||||
elif self.dots:
|
||||
stream.write(label[:1])
|
||||
return
|
||||
self.errors.append((test, exc_info))
|
||||
test.passed = False
|
||||
if stream is not None:
|
||||
if self.showAll:
|
||||
self.colorizer.write("ERROR", 'red')
|
||||
self.stream.writeln()
|
||||
elif self.dots:
|
||||
stream.write('E')
|
||||
|
||||
def startTest(self, test):
|
||||
unittest.TestResult.startTest(self, test)
|
||||
current_case = test.test.__class__.__name__
|
||||
|
||||
if self.showAll:
|
||||
if current_case != self._last_case:
|
||||
self.stream.writeln(current_case)
|
||||
self._last_case = current_case
|
||||
|
||||
self.stream.write(
|
||||
' %s' % str(test.test._testMethodName).ljust(60))
|
||||
self.stream.flush()
|
||||
|
||||
|
||||
class WindcTestRunner(core.TextTestRunner):
|
||||
def _makeResult(self):
|
||||
return WindcTestResult(self.stream,
|
||||
self.descriptions,
|
||||
self.verbosity,
|
||||
self.config)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
logger = logging.getLogger()
|
||||
hdlr = logging.StreamHandler()
|
||||
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
|
||||
hdlr.setFormatter(formatter)
|
||||
logger.addHandler(hdlr)
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
c = config.Config(stream=sys.stdout,
|
||||
env=os.environ,
|
||||
verbosity=3,
|
||||
plugins=core.DefaultPluginManager())
|
||||
|
||||
runner = WindcTestRunner(stream=c.stream,
|
||||
verbosity=c.verbosity,
|
||||
config=c)
|
||||
sys.exit(not core.run(config=c, testRunner=runner))
|
129
windc/run_tests.sh
Executable file
129
windc/run_tests.sh
Executable file
@ -0,0 +1,129 @@
|
||||
#!/bin/bash
|
||||
|
||||
function usage {
|
||||
echo "Usage: $0 [OPTION]..."
|
||||
echo "Run Loadbalancer's test suite(s)"
|
||||
echo ""
|
||||
echo " -V, --virtual-env Always use virtualenv. Install automatically if not present"
|
||||
echo " -N, --no-virtual-env Don't use virtualenv. Run tests in local environment"
|
||||
echo " -f, --force Force a clean re-build of the virtual environment. Useful when dependencies have been added."
|
||||
echo " --unittests-only Run unit tests only, exclude functional tests."
|
||||
echo " -c, --coverage Generate coverage report"
|
||||
echo " -p, --pep8 Just run pep8"
|
||||
echo " -h, --help Print this usage message"
|
||||
echo ""
|
||||
echo "Note: with no options specified, the script will try to run the tests in a virtual environment,"
|
||||
echo " If no virtualenv is found, the script will ask if you would like to create one. If you "
|
||||
echo " prefer to run tests NOT in a virtual environment, simply pass the -N option."
|
||||
exit
|
||||
}
|
||||
|
||||
function process_option {
|
||||
case "$1" in
|
||||
-h|--help) usage;;
|
||||
-V|--virtual-env) let always_venv=1; let never_venv=0;;
|
||||
-N|--no-virtual-env) let always_venv=0; let never_venv=1;;
|
||||
-p|--pep8) let just_pep8=1;;
|
||||
-f|--force) let force=1;;
|
||||
--unittests-only) noseopts="$noseopts --exclude-dir=windc/tests/functional";;
|
||||
-c|--coverage) coverage=1;;
|
||||
-*) noseopts="$noseopts $1";;
|
||||
*) noseargs="$noseargs $1"
|
||||
esac
|
||||
}
|
||||
|
||||
venv=.venv
|
||||
with_venv=tools/with_venv.sh
|
||||
always_venv=0
|
||||
never_venv=0
|
||||
force=0
|
||||
noseargs=
|
||||
noseopts=
|
||||
wrapper=""
|
||||
just_pep8=0
|
||||
coverage=0
|
||||
|
||||
for arg in "$@"; do
|
||||
process_option $arg
|
||||
done
|
||||
|
||||
# If enabled, tell nose to collect coverage data
|
||||
if [ $coverage -eq 1 ]; then
|
||||
noseopts="$noseopts --with-coverage --cover-package=windc --cover-inclusive"
|
||||
fi
|
||||
|
||||
function run_tests {
|
||||
# Just run the test suites in current environment
|
||||
${wrapper} $NOSETESTS 2> run_tests.log
|
||||
}
|
||||
|
||||
function run_pep8 {
|
||||
echo "Running pep8 ..."
|
||||
PEP8_OPTIONS="--exclude=$PEP8_EXCLUDE --repeat"
|
||||
PEP8_INCLUDE="bin/* windc tools setup.py run_tests.py"
|
||||
${wrapper} pep8 $PEP8_OPTIONS $PEP8_INCLUDE
|
||||
PEP_RESULT=$?
|
||||
case "$TERM" in
|
||||
*color* ) function out { printf "\033[3%d;1m%s\033[m\n" "$1" "$2"; } ;;
|
||||
* ) function out { printf "%s\n" "$2"; } ;;
|
||||
esac
|
||||
if [ $PEP_RESULT -eq 0 ]; then
|
||||
out 2 "PEP8 OK"
|
||||
else
|
||||
out 1 "PEP8 FAIL"
|
||||
fi
|
||||
return $PEP_RESULT
|
||||
}
|
||||
|
||||
|
||||
NOSETESTS="python run_tests.py $noseopts $noseargs"
|
||||
|
||||
if [ $never_venv -eq 0 ]
|
||||
then
|
||||
# Remove the virtual environment if --force used
|
||||
if [ $force -eq 1 ]; then
|
||||
echo "Cleaning virtualenv..."
|
||||
rm -rf ${venv}
|
||||
fi
|
||||
if [ -e ${venv} ]; then
|
||||
wrapper="${with_venv}"
|
||||
else
|
||||
if [ $always_venv -eq 1 ]; then
|
||||
# Automatically install the virtualenv
|
||||
python tools/install_venv.py || exit 1
|
||||
wrapper="${with_venv}"
|
||||
else
|
||||
echo -e "No virtual environment found...create one? (Y/n) \c"
|
||||
read use_ve
|
||||
if [ "x$use_ve" = "xY" -o "x$use_ve" = "x" -o "x$use_ve" = "xy" ]; then
|
||||
# Install the virtualenv and run the test suite in it
|
||||
python tools/install_venv.py || exit 1
|
||||
wrapper=${with_venv}
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Delete old coverage data from previous runs
|
||||
if [ $coverage -eq 1 ]; then
|
||||
${wrapper} coverage erase
|
||||
fi
|
||||
|
||||
if [ $just_pep8 -eq 1 ]; then
|
||||
run_pep8
|
||||
exit $?
|
||||
fi
|
||||
|
||||
run_tests
|
||||
TEST_RESULT=$?
|
||||
|
||||
if [ -z "$noseargs" ]; then
|
||||
run_pep8 || exit 1
|
||||
fi
|
||||
|
||||
if [ $coverage -eq 1 ]; then
|
||||
echo "Generating coverage report in covhtml/"
|
||||
${wrapper} coverage html -d covhtml -i --include='windc/*' --omit='windc/db/migrate_repo*,windc/common*,windc/tests*'
|
||||
fi
|
||||
|
||||
exit $TEST_RESULT
|
14
windc/tests/unit/test_base_driver.py
Normal file
14
windc/tests/unit/test_base_driver.py
Normal file
@ -0,0 +1,14 @@
|
||||
import unittest
|
||||
import mock
|
||||
|
||||
|
||||
from windc.api.v1.router import API
|
||||
|
||||
|
||||
class TestBaseDriver(unittest.TestCase):
|
||||
def setUp(self):
|
||||
super(TestBaseDriver, self).setUp()
|
||||
self.conf = mock.Mock()
|
||||
|
||||
def testAPI(self):
|
||||
api = API(None)
|
154
windc/tools/install_venv.py
Normal file
154
windc/tools/install_venv.py
Normal file
@ -0,0 +1,154 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2010 United States Government as represented by the
|
||||
# Administrator of the National Aeronautics and Space Administration.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Copyright 2010 OpenStack LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Installation script for Glance's development virtualenv
|
||||
"""
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
ROOT = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
VENV = os.path.join(ROOT, '.venv')
|
||||
PIP_REQUIRES = os.path.join(ROOT, 'tools', 'pip-requires')
|
||||
TEST_REQUIRES = os.path.join(ROOT, 'tools', 'test-requires')
|
||||
|
||||
|
||||
def die(message, *args):
|
||||
print >> sys.stderr, message % args
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def run_command(cmd, redirect_output=True, check_exit_code=True):
|
||||
"""
|
||||
Runs a command in an out-of-process shell, returning the
|
||||
output of that command. Working directory is ROOT.
|
||||
"""
|
||||
if redirect_output:
|
||||
stdout = subprocess.PIPE
|
||||
else:
|
||||
stdout = None
|
||||
|
||||
proc = subprocess.Popen(cmd, cwd=ROOT, stdout=stdout)
|
||||
output = proc.communicate()[0]
|
||||
if check_exit_code and proc.returncode != 0:
|
||||
die('Command "%s" failed.\n%s', ' '.join(cmd), output)
|
||||
return output
|
||||
|
||||
|
||||
HAS_EASY_INSTALL = bool(run_command(['which', 'easy_install'],
|
||||
check_exit_code=False).strip())
|
||||
HAS_VIRTUALENV = bool(run_command(['which', 'virtualenv'],
|
||||
check_exit_code=False).strip())
|
||||
|
||||
|
||||
def check_dependencies():
|
||||
"""Make sure virtualenv is in the path."""
|
||||
|
||||
if not HAS_VIRTUALENV:
|
||||
print 'not found.'
|
||||
# Try installing it via easy_install...
|
||||
if HAS_EASY_INSTALL:
|
||||
print 'Installing virtualenv via easy_install...',
|
||||
if not run_command(['which', 'easy_install']):
|
||||
die('ERROR: virtualenv not found.\n\n'
|
||||
'Balancer development requires virtualenv, please install'
|
||||
' it using your favorite package management tool')
|
||||
print 'done.'
|
||||
print 'done.'
|
||||
|
||||
|
||||
def create_virtualenv(venv=VENV):
|
||||
"""
|
||||
Creates the virtual environment and installs PIP only into the
|
||||
virtual environment
|
||||
"""
|
||||
print 'Creating venv...',
|
||||
run_command(['virtualenv', '-q', '--no-site-packages', VENV])
|
||||
print 'done.'
|
||||
print 'Installing pip in virtualenv...',
|
||||
if not run_command(['tools/with_venv.sh', 'easy_install',
|
||||
'pip>1.0']).strip():
|
||||
die("Failed to install pip.")
|
||||
print 'done.'
|
||||
|
||||
|
||||
def pip_install(*args):
|
||||
run_command(['tools/with_venv.sh',
|
||||
'pip', 'install', '--upgrade'] + list(args),
|
||||
redirect_output=False)
|
||||
|
||||
|
||||
def install_dependencies(venv=VENV):
|
||||
print 'Installing dependencies with pip (this can take a while)...'
|
||||
|
||||
pip_install('pip')
|
||||
|
||||
pip_install('-r', PIP_REQUIRES)
|
||||
pip_install('-r', TEST_REQUIRES)
|
||||
|
||||
# Tell the virtual env how to "import glance"
|
||||
py_ver = _detect_python_version(venv)
|
||||
pthfile = os.path.join(venv, "lib", py_ver,
|
||||
"site-packages", "balancer.pth")
|
||||
f = open(pthfile, 'w')
|
||||
f.write("%s\n" % ROOT)
|
||||
|
||||
|
||||
def _detect_python_version(venv):
|
||||
lib_dir = os.path.join(venv, "lib")
|
||||
for pathname in os.listdir(lib_dir):
|
||||
if pathname.startswith('python'):
|
||||
return pathname
|
||||
raise Exception('Unable to detect Python version')
|
||||
|
||||
|
||||
def print_help():
|
||||
help = """
|
||||
Glance development environment setup is complete.
|
||||
|
||||
Glance development uses virtualenv to track and manage Python dependencies
|
||||
while in development and testing.
|
||||
|
||||
To activate the Glance virtualenv for the extent of your current shell session
|
||||
you can run:
|
||||
|
||||
$ source .venv/bin/activate
|
||||
|
||||
Or, if you prefer, you can run commands in the virtualenv on a case by case
|
||||
basis by running:
|
||||
|
||||
$ tools/with_venv.sh <your command>
|
||||
|
||||
Also, make test will automatically use the virtualenv.
|
||||
"""
|
||||
print help
|
||||
|
||||
|
||||
def main(argv):
|
||||
check_dependencies()
|
||||
create_virtualenv()
|
||||
install_dependencies()
|
||||
print_help()
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(sys.argv)
|
22
windc/tools/pip-requires
Normal file
22
windc/tools/pip-requires
Normal file
@ -0,0 +1,22 @@
|
||||
# The greenlet package must be compiled with gcc and needs
|
||||
# the Python.h headers. Make sure you install the python-dev
|
||||
# package to get the right headers...
|
||||
greenlet>=0.3.1
|
||||
|
||||
SQLAlchemy>=0.7
|
||||
anyjson
|
||||
eventlet>=0.9.12
|
||||
PasteDeploy
|
||||
Routes
|
||||
webob==1.0.8
|
||||
wsgiref
|
||||
argparse
|
||||
sqlalchemy-migrate>=0.7.2
|
||||
httplib2
|
||||
kombu
|
||||
iso8601>=0.1.4
|
||||
|
||||
# For paste.util.template used in keystone.common.template
|
||||
Paste
|
||||
|
||||
passlib
|
16
windc/tools/test-requires
Normal file
16
windc/tools/test-requires
Normal file
@ -0,0 +1,16 @@
|
||||
# Packages needed for dev testing
|
||||
# For translations processing
|
||||
Babel
|
||||
|
||||
# Needed for testing
|
||||
unittest2
|
||||
mock==0.8.0
|
||||
nose
|
||||
nose-exclude
|
||||
nosexcover
|
||||
#openstack.nose_plugin
|
||||
pep8==1.0.1
|
||||
sphinx>=1.1.2
|
||||
paramiko
|
||||
ipaddr
|
||||
pysqlite
|
4
windc/tools/with_venv.sh
Executable file
4
windc/tools/with_venv.sh
Executable file
@ -0,0 +1,4 @@
|
||||
#!/bin/bash
|
||||
TOOLS=`dirname $0`
|
||||
VENV=$TOOLS/../.venv
|
||||
source $VENV/bin/activate && $@
|
81
windc/windc/api/v1/datacenters.py
Normal file
81
windc/windc/api/v1/datacenters.py
Normal file
@ -0,0 +1,81 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2011 OpenStack LLC.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import logging
|
||||
|
||||
from openstack.common import wsgi
|
||||
|
||||
from windc import utils
|
||||
from windc.core import api as core_api
|
||||
from windc.db import api as db_api
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Controller(object):
|
||||
def __init__(self, conf):
|
||||
LOG.debug("Creating data centers controller with config:"
|
||||
"datacenters.py %s", conf)
|
||||
self.conf = conf
|
||||
|
||||
@utils.verify_tenant
|
||||
def findLBforVM(self, req, tenant_id, vm_id):
|
||||
LOG.debug("Got index request. Request: %s", req)
|
||||
result = core_api.lb_find_for_vm(self.conf, tenant_id, vm_id)
|
||||
return {'loadbalancers': result}
|
||||
|
||||
@utils.verify_tenant
|
||||
def index(self, req, tenant_id):
|
||||
LOG.debug("Got index request. Request: %s", req)
|
||||
result = core_api.dc_get_index(self.conf, tenant_id)
|
||||
return {'datacenters': result}
|
||||
|
||||
@utils.http_success_code(202)
|
||||
@utils.verify_tenant
|
||||
def create(self, req, tenant_id, body):
|
||||
LOG.debug("Got create request. Request: %s", req)
|
||||
#here we need to decide which device should be used
|
||||
params = body.copy()
|
||||
LOG.debug("Headers: %s", req.headers)
|
||||
# We need to create DataCenter object and return its id
|
||||
params['tenant_id'] = tenant_id
|
||||
dc_id = core_api.create_dc(self.conf, params)
|
||||
return {'datacenter': {'id': dc_id}}
|
||||
|
||||
@utils.verify_tenant
|
||||
def delete(self, req, tenant_id, datacenter_id):
|
||||
LOG.debug("Got delete request. Request: %s", req)
|
||||
core_api.delete_dc(self.conf, tenant_id, datacenter_id)
|
||||
|
||||
@utils.verify_tenant
|
||||
def show(self, req, tenant_id, datacenter_id):
|
||||
LOG.debug("Got datacenter info request. Request: %s", req)
|
||||
result = core_api.dc_get_data(self.conf, tenant_id, datacenter_id)
|
||||
return {'datacenter': result}
|
||||
|
||||
@utils.verify_tenant
|
||||
def update(self, req, tenant_id, datacenter_id, body):
|
||||
LOG.debug("Got update request. Request: %s", req)
|
||||
core_api.update_dc(self.conf, tenant_id, datacenter_id, body)
|
||||
return {'datacenter': {'id': dc_id}}
|
||||
|
||||
|
||||
def create_resource(conf):
|
||||
"""Datacenters resource factory method"""
|
||||
deserializer = wsgi.JSONRequestDeserializer()
|
||||
serializer = wsgi.JSONResponseSerializer()
|
||||
return wsgi.Resource(Controller(conf), deserializer, serializer)
|
42
windc/windc/api/v1/filters.py
Normal file
42
windc/windc/api/v1/filters.py
Normal file
@ -0,0 +1,42 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2012, Piston Cloud Computing, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
def validate(filter, value):
|
||||
return FILTER_FUNCTIONS.get(filter, lambda v: True)(value)
|
||||
|
||||
|
||||
def validate_int_in_range(min=0, max=None):
|
||||
def _validator(v):
|
||||
try:
|
||||
if max is None:
|
||||
return min <= int(v)
|
||||
return min <= int(v) <= max
|
||||
except ValueError:
|
||||
return False
|
||||
return _validator
|
||||
|
||||
|
||||
def validate_boolean(v):
|
||||
return v.lower() in ('none', 'true', 'false', '1', '0')
|
||||
|
||||
|
||||
FILTER_FUNCTIONS = {'size_max': validate_int_in_range(), # build validator
|
||||
'size_min': validate_int_in_range(), # build validator
|
||||
'min_ram': validate_int_in_range(), # build validator
|
||||
'protected': validate_boolean,
|
||||
'is_public': validate_boolean, }
|
56
windc/windc/api/v1/router.py
Normal file
56
windc/windc/api/v1/router.py
Normal file
@ -0,0 +1,56 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
# Copyright 2011 OpenStack LLC.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import logging
|
||||
|
||||
import routes
|
||||
|
||||
from windc.api.v1 import datacenters
|
||||
from windc.api.v1 import services
|
||||
|
||||
#from . import tasks
|
||||
|
||||
|
||||
from openstack.common import wsgi
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class API(wsgi.Router):
|
||||
|
||||
"""WSGI router for balancer v1 API requests."""
|
||||
|
||||
def __init__(self, conf, **local_conf):
|
||||
self.conf = conf
|
||||
mapper = routes.Mapper()
|
||||
tenant_mapper = mapper.submapper(path_prefix="/{tenant_id}")
|
||||
datacenter_resource = datacenters.create_resource(self.conf)
|
||||
datacenter_collection = tenant_mapper.collection(
|
||||
"datacenters", "datacenter",
|
||||
controller=datacenter_resource, member_prefix="/{datacenter_id}",
|
||||
formatted=False)
|
||||
service_resource = services.create_resource(self.conf)
|
||||
service_collection = datacenter_collection.member.collection('services', 'service',
|
||||
controller=service_resource, member_prefix="/{service_id}",
|
||||
formatted=False)
|
||||
service_collection.member.connect("/{status}", action="changeServiceStatus",
|
||||
conditions={'method': ["PUT"]})
|
||||
mapper.connect("/servicetypes",
|
||||
controller=datacenter_resource,
|
||||
action="show_servicetypes",
|
||||
conditions={'method': ["GET"]})
|
||||
super(API, self).__init__(mapper)
|
86
windc/windc/api/v1/services.py
Normal file
86
windc/windc/api/v1/services.py
Normal file
@ -0,0 +1,86 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2011 OpenStack LLC.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import logging
|
||||
|
||||
from openstack.common import wsgi
|
||||
|
||||
from windc import utils
|
||||
from windc.core import api as core_api
|
||||
from windc.db import api as db_api
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Controller(object):
|
||||
def __init__(self, conf):
|
||||
LOG.debug("Creating services controller with config:"
|
||||
"services.py %s", conf)
|
||||
self.conf = conf
|
||||
|
||||
@utils.verify_tenant
|
||||
def findLBforVM(self, req, tenant_id, vm_id):
|
||||
LOG.debug("Got index request. Request: %s", req)
|
||||
result = core_api.lb_find_for_vm(self.conf, tenant_id, vm_id)
|
||||
return {'loadbalancers': result}
|
||||
|
||||
@utils.verify_tenant
|
||||
def index(self, req, tenant_id, datacenter_id):
|
||||
LOG.debug("Got index request. Request: %s", req)
|
||||
result = core_api.service_get_index(self.conf, tenant_id, datacenter_id)
|
||||
return {'services': result}
|
||||
|
||||
@utils.http_success_code(202)
|
||||
@utils.verify_tenant
|
||||
def create(self, req, tenant_id, datacenter_id, body):
|
||||
LOG.debug("Got create request. Request: %s", req)
|
||||
#here we need to decide which device should be used
|
||||
params = body.copy()
|
||||
LOG.debug("Headers: %s", req.headers)
|
||||
# We need to create Service object and return its id
|
||||
params['tenant_id'] = tenant_id
|
||||
service_id = core_api.create_service(self.conf, params)
|
||||
return {'service': {'id': service_id}}
|
||||
|
||||
@utils.http_success_code(204)
|
||||
@utils.verify_tenant
|
||||
def delete(self, req, tenant_id, datacenter_id, service_id):
|
||||
LOG.debug("Got delete request. Request: %s", req)
|
||||
core_api.delete_service(self.conf, tenant_id, datacenter_id, service_id)
|
||||
|
||||
@utils.verify_tenant
|
||||
def show(self, req, tenant_id, datacenter_id, service_id):
|
||||
LOG.debug("Got loadbalancerr info request. Request: %s", req)
|
||||
result = core_api.service_get_data(self.conf, tenant_id, datacenter_id, service_id)
|
||||
return {'service': result}
|
||||
|
||||
@utils.http_success_code(202)
|
||||
@utils.verify_tenant
|
||||
def update(self, req, tenant_id, datacenter_id, service_id, body):
|
||||
LOG.debug("Got update request. Request: %s", req)
|
||||
core_api.update_service(self.conf, tenant_id, datacenter_id, service_id, body)
|
||||
return {'service': {'id': service_id}}
|
||||
|
||||
|
||||
def create_resource(conf):
|
||||
"""Services resource factory method"""
|
||||
deserializer = wsgi.JSONRequestDeserializer()
|
||||
serializer = wsgi.JSONResponseSerializer()
|
||||
return wsgi.Resource(Controller(conf), deserializer, serializer)
|
||||
|
||||
|
||||
|
66
windc/windc/api/versions.py
Normal file
66
windc/windc/api/versions.py
Normal file
@ -0,0 +1,66 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2011 OpenStack LLC.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Controller that returns information on the Glance API versions
|
||||
"""
|
||||
|
||||
import httplib
|
||||
import json
|
||||
|
||||
import webob.dec
|
||||
|
||||
|
||||
class Controller(object):
|
||||
|
||||
"""
|
||||
A controller that produces information on the Glance API versions.
|
||||
"""
|
||||
|
||||
def __init__(self, conf):
|
||||
self.conf = conf
|
||||
|
||||
@webob.dec.wsgify
|
||||
def __call__(self, req):
|
||||
"""Respond to a request for all OpenStack API versions."""
|
||||
version_objs = [
|
||||
{
|
||||
"id": "v1.0",
|
||||
"status": "CURRENT",
|
||||
"links": [
|
||||
{
|
||||
"rel": "self",
|
||||
"href": self.get_href(req)}]},
|
||||
{
|
||||
"id": "v1.1",
|
||||
"status": "SUPPORTED",
|
||||
"links": [
|
||||
{
|
||||
"rel": "self",
|
||||
"href": self.get_href(req)}]}]
|
||||
|
||||
body = json.dumps(dict(versions=version_objs))
|
||||
|
||||
response = webob.Response(request=req,
|
||||
status=httplib.MULTIPLE_CHOICES,
|
||||
content_type='application/json')
|
||||
response.body = body
|
||||
|
||||
return response
|
||||
|
||||
def get_href(self, req):
|
||||
return "%s/v1/" % req.host_url
|
16
windc/windc/common/__init__.py
Normal file
16
windc/windc/common/__init__.py
Normal file
@ -0,0 +1,16 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2010-2011 OpenStack LLC.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
1135
windc/windc/common/cfg.py
Normal file
1135
windc/windc/common/cfg.py
Normal file
File diff suppressed because it is too large
Load Diff
605
windc/windc/common/client.py
Normal file
605
windc/windc/common/client.py
Normal file
@ -0,0 +1,605 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2010-2011 OpenStack, LLC
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# HTTPSClientAuthConnection code comes courtesy of ActiveState website:
|
||||
# http://code.activestate.com/recipes/
|
||||
# 577548-https-httplib-client-connection-with-certificate-v/
|
||||
|
||||
import collections
|
||||
import errno
|
||||
import functools
|
||||
import httplib
|
||||
import os
|
||||
import select
|
||||
import urllib
|
||||
import urlparse
|
||||
|
||||
try:
|
||||
from eventlet.green import socket, ssl
|
||||
except ImportError:
|
||||
import socket
|
||||
import ssl
|
||||
|
||||
try:
|
||||
import sendfile
|
||||
SENDFILE_SUPPORTED = True
|
||||
except ImportError:
|
||||
SENDFILE_SUPPORTED = False
|
||||
|
||||
#from glance.common import auth
|
||||
#from glance.common import exception, utils
|
||||
|
||||
|
||||
# common chunk size for get and put
|
||||
CHUNKSIZE = 65536
|
||||
|
||||
|
||||
def handle_unauthorized(func):
|
||||
"""
|
||||
Wrap a function to re-authenticate and retry.
|
||||
"""
|
||||
@functools.wraps(func)
|
||||
def wrapped(self, *args, **kwargs):
|
||||
try:
|
||||
return func(self, *args, **kwargs)
|
||||
except exception.NotAuthorized:
|
||||
self._authenticate(force_reauth=True)
|
||||
return func(self, *args, **kwargs)
|
||||
return wrapped
|
||||
|
||||
|
||||
def handle_redirects(func):
|
||||
"""
|
||||
Wrap the _do_request function to handle HTTP redirects.
|
||||
"""
|
||||
MAX_REDIRECTS = 5
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapped(self, method, url, body, headers):
|
||||
for _ in xrange(MAX_REDIRECTS):
|
||||
try:
|
||||
return func(self, method, url, body, headers)
|
||||
except exception.RedirectException as redirect:
|
||||
if redirect.url is None:
|
||||
raise exception.InvalidRedirect()
|
||||
url = redirect.url
|
||||
raise exception.MaxRedirectsExceeded(redirects=MAX_REDIRECTS)
|
||||
return wrapped
|
||||
|
||||
|
||||
class ImageBodyIterator(object):
|
||||
|
||||
"""
|
||||
A class that acts as an iterator over an image file's
|
||||
chunks of data. This is returned as part of the result
|
||||
tuple from `glance.client.Client.get_image`
|
||||
"""
|
||||
|
||||
def __init__(self, source):
|
||||
"""
|
||||
Constructs the object from a readable image source
|
||||
(such as an HTTPResponse or file-like object)
|
||||
"""
|
||||
self.source = source
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
Exposes an iterator over the chunks of data in the
|
||||
image file.
|
||||
"""
|
||||
while True:
|
||||
chunk = self.source.read(CHUNKSIZE)
|
||||
if chunk:
|
||||
yield chunk
|
||||
else:
|
||||
break
|
||||
|
||||
|
||||
class SendFileIterator:
|
||||
"""
|
||||
Emulate iterator pattern over sendfile, in order to allow
|
||||
send progress be followed by wrapping the iteration.
|
||||
"""
|
||||
def __init__(self, connection, body):
|
||||
self.connection = connection
|
||||
self.body = body
|
||||
self.offset = 0
|
||||
self.sending = True
|
||||
|
||||
def __iter__(self):
|
||||
class OfLength:
|
||||
def __init__(self, len):
|
||||
self.len = len
|
||||
|
||||
def __len__(self):
|
||||
return self.len
|
||||
|
||||
while self.sending:
|
||||
try:
|
||||
sent = sendfile.sendfile(self.connection.sock.fileno(),
|
||||
self.body.fileno(),
|
||||
self.offset,
|
||||
CHUNKSIZE)
|
||||
except OSError as e:
|
||||
# suprisingly, sendfile may fail transiently instead of
|
||||
# blocking, in which case we select on the socket in order
|
||||
# to wait on its return to a writeable state before resuming
|
||||
# the send loop
|
||||
if e.errno in (errno.EAGAIN, errno.EBUSY):
|
||||
wlist = [self.connection.sock.fileno()]
|
||||
rfds, wfds, efds = select.select([], wlist, [])
|
||||
if wfds:
|
||||
continue
|
||||
raise
|
||||
|
||||
self.sending = (sent != 0)
|
||||
self.offset += sent
|
||||
yield OfLength(sent)
|
||||
|
||||
|
||||
class HTTPSClientAuthConnection(httplib.HTTPSConnection):
|
||||
"""
|
||||
Class to make a HTTPS connection, with support for
|
||||
full client-based SSL Authentication
|
||||
|
||||
:see http://code.activestate.com/recipes/
|
||||
577548-https-httplib-client-connection-with-certificate-v/
|
||||
"""
|
||||
|
||||
def __init__(self, host, port, key_file, cert_file,
|
||||
ca_file, timeout=None, insecure=False):
|
||||
httplib.HTTPSConnection.__init__(self, host, port, key_file=key_file,
|
||||
cert_file=cert_file)
|
||||
self.key_file = key_file
|
||||
self.cert_file = cert_file
|
||||
self.ca_file = ca_file
|
||||
self.timeout = timeout
|
||||
self.insecure = insecure
|
||||
|
||||
def connect(self):
|
||||
"""
|
||||
Connect to a host on a given (SSL) port.
|
||||
If ca_file is pointing somewhere, use it to check Server Certificate.
|
||||
|
||||
Redefined/copied and extended from httplib.py:1105 (Python 2.6.x).
|
||||
This is needed to pass cert_reqs=ssl.CERT_REQUIRED as parameter to
|
||||
ssl.wrap_socket(), which forces SSL to check server certificate against
|
||||
our client certificate.
|
||||
"""
|
||||
sock = socket.create_connection((self.host, self.port), self.timeout)
|
||||
if self._tunnel_host:
|
||||
self.sock = sock
|
||||
self._tunnel()
|
||||
# Check CA file unless 'insecure' is specificed
|
||||
if self.insecure is True:
|
||||
self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file,
|
||||
cert_reqs=ssl.CERT_NONE)
|
||||
else:
|
||||
self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file,
|
||||
ca_certs=self.ca_file,
|
||||
cert_reqs=ssl.CERT_REQUIRED)
|
||||
|
||||
|
||||
class BaseClient(object):
|
||||
|
||||
"""A base client class"""
|
||||
|
||||
DEFAULT_PORT = 80
|
||||
DEFAULT_DOC_ROOT = None
|
||||
# Standard CA file locations for Debian/Ubuntu, RedHat/Fedora,
|
||||
# Suse, FreeBSD/OpenBSD
|
||||
DEFAULT_CA_FILE_PATH = '/etc/ssl/certs/ca-certificates.crt:'\
|
||||
'/etc/pki/tls/certs/ca-bundle.crt:'\
|
||||
'/etc/ssl/ca-bundle.pem:'\
|
||||
'/etc/ssl/cert.pem'
|
||||
|
||||
OK_RESPONSE_CODES = (
|
||||
httplib.OK,
|
||||
httplib.CREATED,
|
||||
httplib.ACCEPTED,
|
||||
httplib.NO_CONTENT,
|
||||
)
|
||||
|
||||
REDIRECT_RESPONSE_CODES = (
|
||||
httplib.MOVED_PERMANENTLY,
|
||||
httplib.FOUND,
|
||||
httplib.SEE_OTHER,
|
||||
httplib.USE_PROXY,
|
||||
httplib.TEMPORARY_REDIRECT,
|
||||
)
|
||||
|
||||
def __init__(self, host, port=None, use_ssl=False, auth_tok=None,
|
||||
creds=None, doc_root=None, key_file=None,
|
||||
cert_file=None, ca_file=None, insecure=False,
|
||||
configure_via_auth=True):
|
||||
"""
|
||||
Creates a new client to some service.
|
||||
|
||||
:param host: The host where service resides
|
||||
:param port: The port where service resides
|
||||
:param use_ssl: Should we use HTTPS?
|
||||
:param auth_tok: The auth token to pass to the server
|
||||
:param creds: The credentials to pass to the auth plugin
|
||||
:param doc_root: Prefix for all URLs we request from host
|
||||
:param key_file: Optional PEM-formatted file that contains the private
|
||||
key.
|
||||
If use_ssl is True, and this param is None (the
|
||||
default), then an environ variable
|
||||
GLANCE_CLIENT_KEY_FILE is looked for. If no such
|
||||
environ variable is found, ClientConnectionError
|
||||
will be raised.
|
||||
:param cert_file: Optional PEM-formatted certificate chain file.
|
||||
If use_ssl is True, and this param is None (the
|
||||
default), then an environ variable
|
||||
GLANCE_CLIENT_CERT_FILE is looked for. If no such
|
||||
environ variable is found, ClientConnectionError
|
||||
will be raised.
|
||||
:param ca_file: Optional CA cert file to use in SSL connections
|
||||
If use_ssl is True, and this param is None (the
|
||||
default), then an environ variable
|
||||
GLANCE_CLIENT_CA_FILE is looked for.
|
||||
:param insecure: Optional. If set then the server's certificate
|
||||
will not be verified.
|
||||
"""
|
||||
self.host = host
|
||||
self.port = port or self.DEFAULT_PORT
|
||||
self.use_ssl = use_ssl
|
||||
self.auth_tok = auth_tok
|
||||
self.creds = creds or {}
|
||||
self.connection = None
|
||||
self.configure_via_auth = configure_via_auth
|
||||
# doc_root can be a nullstring, which is valid, and why we
|
||||
# cannot simply do doc_root or self.DEFAULT_DOC_ROOT below.
|
||||
self.doc_root = (doc_root if doc_root is not None
|
||||
else self.DEFAULT_DOC_ROOT)
|
||||
self.auth_plugin = self.make_auth_plugin(self.creds)
|
||||
|
||||
self.key_file = key_file
|
||||
self.cert_file = cert_file
|
||||
self.ca_file = ca_file
|
||||
self.insecure = insecure
|
||||
self.connect_kwargs = self.get_connect_kwargs()
|
||||
|
||||
def get_connect_kwargs(self):
|
||||
connect_kwargs = {}
|
||||
if self.use_ssl:
|
||||
if self.key_file is None:
|
||||
self.key_file = os.environ.get('BALANCER_CLIENT_KEY_FILE')
|
||||
if self.cert_file is None:
|
||||
self.cert_file = os.environ.get('BALANCER_CLIENT_CERT_FILE')
|
||||
if self.ca_file is None:
|
||||
self.ca_file = os.environ.get('BALANCER_CLIENT_CA_FILE')
|
||||
|
||||
# Check that key_file/cert_file are either both set or both unset
|
||||
if self.cert_file is not None and self.key_file is None:
|
||||
msg = _("You have selected to use SSL in connecting, "
|
||||
"and you have supplied a cert, "
|
||||
"however you have failed to supply either a "
|
||||
"key_file parameter or set the "
|
||||
"BALANCER_CLIENT_KEY_FILE environ variable")
|
||||
raise exception.ClientConnectionError(msg)
|
||||
|
||||
if self.key_file is not None and self.cert_file is None:
|
||||
msg = _("You have selected to use SSL in connecting, "
|
||||
"and you have supplied a key, "
|
||||
"however you have failed to supply either a "
|
||||
"cert_file parameter or set the "
|
||||
"BALANCER_CLIENT_CERT_FILE environ variable")
|
||||
raise exception.ClientConnectionError(msg)
|
||||
|
||||
if (self.key_file is not None and
|
||||
not os.path.exists(self.key_file)):
|
||||
msg = _("The key file you specified %s does not "
|
||||
"exist") % self.key_file
|
||||
raise exception.ClientConnectionError(msg)
|
||||
connect_kwargs['key_file'] = self.key_file
|
||||
|
||||
if (self.cert_file is not None and
|
||||
not os.path.exists(self.cert_file)):
|
||||
msg = _("The cert file you specified %s does not "
|
||||
"exist") % self.cert_file
|
||||
raise exception.ClientConnectionError(msg)
|
||||
connect_kwargs['cert_file'] = self.cert_file
|
||||
|
||||
if (self.ca_file is not None and
|
||||
not os.path.exists(self.ca_file)):
|
||||
msg = _("The CA file you specified %s does not "
|
||||
"exist") % self.ca_file
|
||||
raise exception.ClientConnectionError(msg)
|
||||
|
||||
if self.ca_file is None:
|
||||
for ca in self.DEFAULT_CA_FILE_PATH.split(":"):
|
||||
if os.path.exists(ca):
|
||||
self.ca_file = ca
|
||||
break
|
||||
|
||||
connect_kwargs['ca_file'] = self.ca_file
|
||||
connect_kwargs['insecure'] = self.insecure
|
||||
|
||||
return connect_kwargs
|
||||
|
||||
def set_auth_token(self, auth_tok):
|
||||
"""
|
||||
Updates the authentication token for this client connection.
|
||||
"""
|
||||
# FIXME(sirp): Nova image/glance.py currently calls this. Since this
|
||||
# method isn't really doing anything useful[1], we should go ahead and
|
||||
# rip it out, first in Nova, then here. Steps:
|
||||
#
|
||||
# 1. Change auth_tok in Glance to auth_token
|
||||
# 2. Change image/glance.py in Nova to use client.auth_token
|
||||
# 3. Remove this method
|
||||
#
|
||||
# [1] http://mail.python.org/pipermail/tutor/2003-October/025932.html
|
||||
self.auth_tok = auth_tok
|
||||
|
||||
def configure_from_url(self, url):
|
||||
"""
|
||||
Setups the connection based on the given url.
|
||||
|
||||
The form is:
|
||||
|
||||
<http|https>://<host>:port/doc_root
|
||||
"""
|
||||
parsed = urlparse.urlparse(url)
|
||||
self.use_ssl = parsed.scheme == 'https'
|
||||
self.host = parsed.hostname
|
||||
self.port = parsed.port or 80
|
||||
self.doc_root = parsed.path
|
||||
|
||||
# ensure connection kwargs are re-evaluated after the service catalog
|
||||
# publicURL is parsed for potential SSL usage
|
||||
self.connect_kwargs = self.get_connect_kwargs()
|
||||
|
||||
def make_auth_plugin(self, creds):
|
||||
"""
|
||||
Returns an instantiated authentication plugin.
|
||||
"""
|
||||
strategy = creds.get('strategy', 'noauth')
|
||||
plugin = auth.get_plugin_from_strategy(strategy, creds)
|
||||
return plugin
|
||||
|
||||
def get_connection_type(self):
|
||||
"""
|
||||
Returns the proper connection type
|
||||
"""
|
||||
if self.use_ssl:
|
||||
return HTTPSClientAuthConnection
|
||||
else:
|
||||
return httplib.HTTPConnection
|
||||
|
||||
def _authenticate(self, force_reauth=False):
|
||||
"""
|
||||
Use the authentication plugin to authenticate and set the auth token.
|
||||
|
||||
:param force_reauth: For re-authentication to bypass cache.
|
||||
"""
|
||||
auth_plugin = self.auth_plugin
|
||||
|
||||
if not auth_plugin.is_authenticated or force_reauth:
|
||||
auth_plugin.authenticate()
|
||||
|
||||
self.auth_tok = auth_plugin.auth_token
|
||||
|
||||
management_url = auth_plugin.management_url
|
||||
if management_url and self.configure_via_auth:
|
||||
self.configure_from_url(management_url)
|
||||
|
||||
@handle_unauthorized
|
||||
def do_request(self, method, action, body=None, headers=None,
|
||||
params=None):
|
||||
"""
|
||||
Make a request, returning an HTTP response object.
|
||||
|
||||
:param method: HTTP verb (GET, POST, PUT, etc.)
|
||||
:param action: Requested path to append to self.doc_root
|
||||
:param body: Data to send in the body of the request
|
||||
:param headers: Headers to send with the request
|
||||
:param params: Key/value pairs to use in query string
|
||||
:returns: HTTP response object
|
||||
"""
|
||||
if not self.auth_tok:
|
||||
self._authenticate()
|
||||
|
||||
url = self._construct_url(action, params)
|
||||
return self._do_request(method=method, url=url, body=body,
|
||||
headers=headers)
|
||||
|
||||
def _construct_url(self, action, params=None):
|
||||
"""
|
||||
Create a URL object we can use to pass to _do_request().
|
||||
"""
|
||||
path = '/'.join([self.doc_root or '', action.lstrip('/')])
|
||||
scheme = "https" if self.use_ssl else "http"
|
||||
netloc = "%s:%d" % (self.host, self.port)
|
||||
|
||||
if isinstance(params, dict):
|
||||
for (key, value) in params.items():
|
||||
if value is None:
|
||||
del params[key]
|
||||
query = urllib.urlencode(params)
|
||||
else:
|
||||
query = None
|
||||
|
||||
return urlparse.ParseResult(scheme, netloc, path, '', query, '')
|
||||
|
||||
@handle_redirects
|
||||
def _do_request(self, method, url, body, headers):
|
||||
"""
|
||||
Connects to the server and issues a request. Handles converting
|
||||
any returned HTTP error status codes to OpenStack/Glance exceptions
|
||||
and closing the server connection. Returns the result data, or
|
||||
raises an appropriate exception.
|
||||
|
||||
:param method: HTTP method ("GET", "POST", "PUT", etc...)
|
||||
:param url: urlparse.ParsedResult object with URL information
|
||||
:param body: data to send (as string, filelike or iterable),
|
||||
or None (default)
|
||||
:param headers: mapping of key/value pairs to add as headers
|
||||
|
||||
:note
|
||||
|
||||
If the body param has a read attribute, and method is either
|
||||
POST or PUT, this method will automatically conduct a chunked-transfer
|
||||
encoding and use the body as a file object or iterable, transferring
|
||||
chunks of data using the connection's send() method. This allows large
|
||||
objects to be transferred efficiently without buffering the entire
|
||||
body in memory.
|
||||
"""
|
||||
if url.query:
|
||||
path = url.path + "?" + url.query
|
||||
else:
|
||||
path = url.path
|
||||
|
||||
try:
|
||||
connection_type = self.get_connection_type()
|
||||
headers = headers or {}
|
||||
|
||||
if 'x-auth-token' not in headers and self.auth_tok:
|
||||
headers['x-auth-token'] = self.auth_tok
|
||||
|
||||
c = connection_type(url.hostname, url.port, **self.connect_kwargs)
|
||||
|
||||
def _pushing(method):
|
||||
return method.lower() in ('post', 'put')
|
||||
|
||||
def _simple(body):
|
||||
return body is None or isinstance(body, basestring)
|
||||
|
||||
def _filelike(body):
|
||||
return hasattr(body, 'read')
|
||||
|
||||
def _sendbody(connection, iter):
|
||||
connection.endheaders()
|
||||
for sent in iter:
|
||||
# iterator has done the heavy lifting
|
||||
pass
|
||||
|
||||
def _chunkbody(connection, iter):
|
||||
connection.putheader('Transfer-Encoding', 'chunked')
|
||||
connection.endheaders()
|
||||
for chunk in iter:
|
||||
connection.send('%x\r\n%s\r\n' % (len(chunk), chunk))
|
||||
connection.send('0\r\n\r\n')
|
||||
|
||||
# Do a simple request or a chunked request, depending
|
||||
# on whether the body param is file-like or iterable and
|
||||
# the method is PUT or POST
|
||||
#
|
||||
if not _pushing(method) or _simple(body):
|
||||
# Simple request...
|
||||
c.request(method, path, body, headers)
|
||||
elif _filelike(body) or self._iterable(body):
|
||||
c.putrequest(method, path)
|
||||
|
||||
for header, value in headers.items():
|
||||
c.putheader(header, value)
|
||||
|
||||
iter = self.image_iterator(c, headers, body)
|
||||
|
||||
if self._sendable(body):
|
||||
# send actual file without copying into userspace
|
||||
_sendbody(c, iter)
|
||||
else:
|
||||
# otherwise iterate and chunk
|
||||
_chunkbody(c, iter)
|
||||
else:
|
||||
raise TypeError('Unsupported image type: %s' % body.__class__)
|
||||
|
||||
res = c.getresponse()
|
||||
status_code = self.get_status_code(res)
|
||||
if status_code in self.OK_RESPONSE_CODES:
|
||||
return res
|
||||
elif status_code in self.REDIRECT_RESPONSE_CODES:
|
||||
raise exception.RedirectException(res.getheader('Location'))
|
||||
elif status_code == httplib.UNAUTHORIZED:
|
||||
raise exception.NotAuthorized(res.read())
|
||||
elif status_code == httplib.FORBIDDEN:
|
||||
raise exception.NotAuthorized(res.read())
|
||||
elif status_code == httplib.NOT_FOUND:
|
||||
raise exception.NotFound(res.read())
|
||||
elif status_code == httplib.CONFLICT:
|
||||
raise exception.Duplicate(res.read())
|
||||
elif status_code == httplib.BAD_REQUEST:
|
||||
raise exception.Invalid(res.read())
|
||||
elif status_code == httplib.MULTIPLE_CHOICES:
|
||||
raise exception.MultipleChoices(body=res.read())
|
||||
elif status_code == httplib.INTERNAL_SERVER_ERROR:
|
||||
raise Exception("Internal Server error: %s" % res.read())
|
||||
else:
|
||||
raise Exception("Unknown error occurred! %s" % res.read())
|
||||
|
||||
except (socket.error, IOError), e:
|
||||
raise exception.ClientConnectionError(e)
|
||||
|
||||
def _seekable(self, body):
|
||||
# pipes are not seekable, avoids sendfile() failure on e.g.
|
||||
# cat /path/to/image | glance add ...
|
||||
# or where add command is launched via popen
|
||||
try:
|
||||
os.lseek(body.fileno(), 0, os.SEEK_SET)
|
||||
return True
|
||||
except OSError as e:
|
||||
return (e.errno != errno.ESPIPE)
|
||||
|
||||
def _sendable(self, body):
|
||||
return (SENDFILE_SUPPORTED and hasattr(body, 'fileno') and
|
||||
self._seekable(body) and not self.use_ssl)
|
||||
|
||||
def _iterable(self, body):
|
||||
return isinstance(body, collections.Iterable)
|
||||
|
||||
def image_iterator(self, connection, headers, body):
|
||||
if self._sendable(body):
|
||||
return SendFileIterator(connection, body)
|
||||
elif self._iterable(body):
|
||||
return utils.chunkreadable(body)
|
||||
else:
|
||||
return ImageBodyIterator(body)
|
||||
|
||||
def get_status_code(self, response):
|
||||
"""
|
||||
Returns the integer status code from the response, which
|
||||
can be either a Webob.Response (used in testing) or httplib.Response
|
||||
"""
|
||||
if hasattr(response, 'status_int'):
|
||||
return response.status_int
|
||||
else:
|
||||
return response.status
|
||||
|
||||
def _extract_params(self, actual_params, allowed_params):
|
||||
"""
|
||||
Extract a subset of keys from a dictionary. The filters key
|
||||
will also be extracted, and each of its values will be returned
|
||||
as an individual param.
|
||||
|
||||
:param actual_params: dict of keys to filter
|
||||
:param allowed_params: list of keys that 'actual_params' will be
|
||||
reduced to
|
||||
:retval subset of 'params' dict
|
||||
"""
|
||||
try:
|
||||
# expect 'filters' param to be a dict here
|
||||
result = dict(actual_params.get('filters'))
|
||||
except TypeError:
|
||||
result = {}
|
||||
|
||||
for allowed_param in allowed_params:
|
||||
if allowed_param in actual_params:
|
||||
result[allowed_param] = actual_params[allowed_param]
|
||||
|
||||
return result
|
181
windc/windc/common/config.py
Normal file
181
windc/windc/common/config.py
Normal file
@ -0,0 +1,181 @@
|
||||
#!/usr/bin/env python
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2011 OpenStack LLC.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Routines for configuring balancer
|
||||
"""
|
||||
|
||||
import logging
|
||||
import logging.config
|
||||
import logging.handlers
|
||||
import os
|
||||
import sys
|
||||
|
||||
from windc.common import cfg
|
||||
from windc.common import wsgi
|
||||
from windc import version
|
||||
|
||||
|
||||
paste_deploy_group = cfg.OptGroup('paste_deploy')
|
||||
paste_deploy_opts = [
|
||||
cfg.StrOpt('flavor'),
|
||||
cfg.StrOpt('config_file')
|
||||
]
|
||||
|
||||
|
||||
class WindcConfigOpts(cfg.CommonConfigOpts):
|
||||
def __init__(self, default_config_files=None, **kwargs):
|
||||
super(WindcConfigOpts, self).__init__(
|
||||
project='windc',
|
||||
version='%%prog %s' % version.version_string(),
|
||||
default_config_files=default_config_files,
|
||||
**kwargs)
|
||||
|
||||
|
||||
class WindcCacheConfigOpts(WindcConfigOpts):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
config_files = cfg.find_config_files(project='windc',
|
||||
prog='windc-cache')
|
||||
super(BalancerCacheConfigOpts, self).__init__(config_files, **kwargs)
|
||||
|
||||
|
||||
def setup_logging(conf):
|
||||
"""
|
||||
Sets up the logging options for a log with supplied name
|
||||
|
||||
:param conf: a cfg.ConfOpts object
|
||||
"""
|
||||
|
||||
if conf.log_config:
|
||||
# Use a logging configuration file for all settings...
|
||||
if os.path.exists(conf.log_config):
|
||||
logging.config.fileConfig(conf.log_config)
|
||||
return
|
||||
else:
|
||||
raise RuntimeError("Unable to locate specified logging "
|
||||
"config file: %s" % conf.log_config)
|
||||
|
||||
root_logger = logging.root
|
||||
if conf.debug:
|
||||
root_logger.setLevel(logging.DEBUG)
|
||||
elif conf.verbose:
|
||||
root_logger.setLevel(logging.INFO)
|
||||
else:
|
||||
root_logger.setLevel(logging.WARNING)
|
||||
|
||||
formatter = logging.Formatter(conf.log_format, conf.log_date_format)
|
||||
|
||||
if conf.use_syslog:
|
||||
try:
|
||||
facility = getattr(logging.handlers.SysLogHandler,
|
||||
conf.syslog_log_facility)
|
||||
except AttributeError:
|
||||
raise ValueError(_("Invalid syslog facility"))
|
||||
|
||||
handler = logging.handlers.SysLogHandler(address='/dev/log',
|
||||
facility=facility)
|
||||
elif conf.log_file:
|
||||
logfile = conf.log_file
|
||||
if conf.log_dir:
|
||||
logfile = os.path.join(conf.log_dir, logfile)
|
||||
handler = logging.handlers.WatchedFileHandler(logfile)
|
||||
else:
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
|
||||
handler.setFormatter(formatter)
|
||||
root_logger.addHandler(handler)
|
||||
|
||||
|
||||
def _register_paste_deploy_opts(conf):
|
||||
"""
|
||||
Idempotent registration of paste_deploy option group
|
||||
|
||||
:param conf: a cfg.ConfigOpts object
|
||||
"""
|
||||
conf.register_group(paste_deploy_group)
|
||||
conf.register_opts(paste_deploy_opts, group=paste_deploy_group)
|
||||
|
||||
|
||||
def _get_deployment_flavor(conf):
|
||||
"""
|
||||
Retrieve the paste_deploy.flavor config item, formatted appropriately
|
||||
for appending to the application name.
|
||||
|
||||
:param conf: a cfg.ConfigOpts object
|
||||
"""
|
||||
_register_paste_deploy_opts(conf)
|
||||
flavor = conf.paste_deploy.flavor
|
||||
return '' if not flavor else ('-' + flavor)
|
||||
|
||||
|
||||
def _get_deployment_config_file(conf):
|
||||
"""
|
||||
Retrieve the deployment_config_file config item, formatted as an
|
||||
absolute pathname.
|
||||
|
||||
:param conf: a cfg.ConfigOpts object
|
||||
"""
|
||||
_register_paste_deploy_opts(conf)
|
||||
config_file = conf.paste_deploy.config_file
|
||||
if not config_file:
|
||||
# Assume paste config is in a paste.ini file corresponding
|
||||
# to the last config file
|
||||
path = conf.config_file[-1].replace(".conf", "-paste.ini")
|
||||
else:
|
||||
path = config_file
|
||||
return os.path.abspath(path)
|
||||
|
||||
|
||||
def load_paste_app(conf, app_name=None):
|
||||
"""
|
||||
Builds and returns a WSGI app from a paste config file.
|
||||
|
||||
We assume the last config file specified in the supplied ConfigOpts
|
||||
object is the paste config file.
|
||||
|
||||
:param conf: a cfg.ConfigOpts object
|
||||
:param app_name: name of the application to load
|
||||
|
||||
:raises RuntimeError when config file cannot be located or application
|
||||
cannot be loaded from config file
|
||||
"""
|
||||
if app_name is None:
|
||||
app_name = conf.prog
|
||||
|
||||
# append the deployment flavor to the application name,
|
||||
# in order to identify the appropriate paste pipeline
|
||||
app_name += _get_deployment_flavor(conf)
|
||||
|
||||
conf_file = _get_deployment_config_file(conf)
|
||||
|
||||
try:
|
||||
# Setup logging early
|
||||
setup_logging(conf)
|
||||
|
||||
app = wsgi.paste_deploy_app(conf_file, app_name, conf)
|
||||
|
||||
# Log the options used when starting if we're in debug mode...
|
||||
if conf.debug:
|
||||
conf.log_opt_values(logging.getLogger(app_name), logging.DEBUG)
|
||||
|
||||
return app
|
||||
except (LookupError, ImportError), e:
|
||||
raise RuntimeError("Unable to load %(app_name)s from "
|
||||
"configuration file %(conf_file)s."
|
||||
"\nGot: %(e)r" % locals())
|
130
windc/windc/common/context.py
Normal file
130
windc/windc/common/context.py
Normal file
@ -0,0 +1,130 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2011 OpenStack LLC.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from balancer.common import cfg
|
||||
from balancer.common import exception
|
||||
from balancer.common import utils
|
||||
from balancer.common import wsgi
|
||||
|
||||
|
||||
class RequestContext(object):
|
||||
"""
|
||||
Stores information about the security context under which the user
|
||||
accesses the system, as well as additional request information.
|
||||
"""
|
||||
|
||||
def __init__(self, auth_tok=None, user=None, user_id=None, tenant=None,
|
||||
tenant_id=None, roles=None, is_admin=False, read_only=False,
|
||||
show_deleted=False, owner_is_tenant=True):
|
||||
self.auth_tok = auth_tok
|
||||
self.user = user
|
||||
self.user_id = user_id
|
||||
self.tenant = tenant
|
||||
self.tenant_id = tenant_id
|
||||
self.roles = roles or []
|
||||
self.is_admin = is_admin
|
||||
self.read_only = read_only
|
||||
self._show_deleted = show_deleted
|
||||
self.owner_is_tenant = owner_is_tenant
|
||||
|
||||
@property
|
||||
def owner(self):
|
||||
"""Return the owner to correlate with an image."""
|
||||
return self.tenant if self.owner_is_tenant else self.user
|
||||
|
||||
@property
|
||||
def show_deleted(self):
|
||||
"""Admins can see deleted by default"""
|
||||
if self._show_deleted or self.is_admin:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class ContextMiddleware(wsgi.Middleware):
|
||||
|
||||
opts = [
|
||||
cfg.BoolOpt('owner_is_tenant', default=True),
|
||||
]
|
||||
|
||||
def __init__(self, app, conf, **local_conf):
|
||||
self.conf = conf
|
||||
self.conf.register_opts(self.opts)
|
||||
|
||||
# Determine the context class to use
|
||||
self.ctxcls = RequestContext
|
||||
if 'context_class' in local_conf:
|
||||
self.ctxcls = utils.import_class(local_conf['context_class'])
|
||||
|
||||
super(ContextMiddleware, self).__init__(app)
|
||||
|
||||
def make_context(self, *args, **kwargs):
|
||||
"""
|
||||
Create a context with the given arguments.
|
||||
"""
|
||||
kwargs.setdefault('owner_is_tenant', self.conf.owner_is_tenant)
|
||||
|
||||
return self.ctxcls(*args, **kwargs)
|
||||
|
||||
def process_request(self, req):
|
||||
"""
|
||||
Extract any authentication information in the request and
|
||||
construct an appropriate context from it.
|
||||
|
||||
A few scenarios exist:
|
||||
|
||||
1. If X-Auth-Token is passed in, then consult TENANT and ROLE headers
|
||||
to determine permissions.
|
||||
|
||||
2. An X-Auth-Token was passed in, but the Identity-Status is not
|
||||
confirmed. For now, just raising a NotAuthorized exception.
|
||||
|
||||
3. X-Auth-Token is omitted. If we were using Keystone, then the
|
||||
tokenauth middleware would have rejected the request, so we must be
|
||||
using NoAuth. In that case, assume that is_admin=True.
|
||||
"""
|
||||
# TODO(sirp): should we be using the balancer_tokeauth shim from
|
||||
# Keystone here? If we do, we need to make sure it handles the NoAuth
|
||||
# case
|
||||
auth_tok = req.headers.get('X-Auth-Token',
|
||||
req.headers.get('X-Storage-Token'))
|
||||
if auth_tok:
|
||||
if req.headers.get('X-Identity-Status') == 'Confirmed':
|
||||
# 1. Auth-token is passed, check other headers
|
||||
user = req.headers.get('X-User-Name')
|
||||
user_id = req.headers.get('X-User-Id')
|
||||
tenant = req.headers.get('X-Tenant-Name')
|
||||
tenant_id = req.headers.get('X-Tenant-Id')
|
||||
roles = [r.strip()
|
||||
for r in req.headers.get('X-Role', '').split(',')]
|
||||
is_admin = any(role.lower() == 'admin' for role in roles)
|
||||
else:
|
||||
# 2. Indentity-Status not confirmed
|
||||
# FIXME(sirp): not sure what the correct behavior in this case
|
||||
# is; just raising NotAuthorized for now
|
||||
raise exception.NotAuthorized()
|
||||
else:
|
||||
# 3. Auth-token is ommited, assume NoAuth
|
||||
user = None
|
||||
user_id = None
|
||||
tenant = None
|
||||
tenant_id = None
|
||||
roles = []
|
||||
is_admin = True
|
||||
|
||||
req.context = self.make_context(auth_tok=auth_tok, user=user,
|
||||
user_id=user_id, tenant=tenant, tenant_id=tenant_id,
|
||||
roles=roles, is_admin=is_admin)
|
184
windc/windc/common/exception.py
Normal file
184
windc/windc/common/exception.py
Normal file
@ -0,0 +1,184 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2010 United States Government as represented by the
|
||||
# Administrator of the National Aeronautics and Space Administration.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Glance exception subclasses"""
|
||||
|
||||
import urlparse
|
||||
|
||||
|
||||
class RedirectException(Exception):
|
||||
def __init__(self, url):
|
||||
self.url = urlparse.urlparse(url)
|
||||
|
||||
|
||||
class GlanceException(Exception):
|
||||
"""
|
||||
Base Glance Exception
|
||||
|
||||
To correctly use this class, inherit from it and define
|
||||
a 'message' property. That message will get printf'd
|
||||
with the keyword arguments provided to the constructor.
|
||||
"""
|
||||
message = "An unknown exception occurred"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
try:
|
||||
self._error_string = self.message % kwargs
|
||||
except Exception:
|
||||
# at least get the core message out if something happened
|
||||
self._error_string = self.message
|
||||
if len(args) > 0:
|
||||
# If there is a non-kwarg parameter, assume it's the error
|
||||
# message or reason description and tack it on to the end
|
||||
# of the exception message
|
||||
# Convert all arguments into their string representations...
|
||||
args = ["%s" % arg for arg in args]
|
||||
self._error_string = (self._error_string +
|
||||
"\nDetails: %s" % '\n'.join(args))
|
||||
|
||||
def __str__(self):
|
||||
return self._error_string
|
||||
|
||||
|
||||
class MissingArgumentError(GlanceException):
|
||||
message = "Missing required argument."
|
||||
|
||||
|
||||
class MissingCredentialError(GlanceException):
|
||||
message = "Missing required credential: %(required)s"
|
||||
|
||||
|
||||
class BadAuthStrategy(GlanceException):
|
||||
message = "Incorrect auth strategy, expected \"%(expected)s\" but "
|
||||
|
||||
|
||||
class NotFound(GlanceException):
|
||||
message = "An object with the specified identifier was not found."
|
||||
|
||||
|
||||
class UnknownScheme(GlanceException):
|
||||
message = "Unknown scheme '%(scheme)s' found in URI"
|
||||
|
||||
|
||||
class BadStoreUri(GlanceException):
|
||||
message = "The Store URI %(uri)s was malformed. Reason: %(reason)s"
|
||||
|
||||
|
||||
class Duplicate(GlanceException):
|
||||
message = "An object with the same identifier already exists."
|
||||
|
||||
|
||||
class StorageFull(GlanceException):
|
||||
message = "There is not enough disk space on the image storage media."
|
||||
|
||||
|
||||
class StorageWriteDenied(GlanceException):
|
||||
message = "Permission to write image storage media denied."
|
||||
|
||||
|
||||
class ImportFailure(GlanceException):
|
||||
message = "Failed to import requested object/class: '%(import_str)s'. \
|
||||
Reason: %(reason)s"
|
||||
|
||||
|
||||
class AuthBadRequest(GlanceException):
|
||||
message = "Connect error/bad request to Auth service at URL %(url)s."
|
||||
|
||||
|
||||
class AuthUrlNotFound(GlanceException):
|
||||
message = "Auth service at URL %(url)s not found."
|
||||
|
||||
|
||||
class AuthorizationFailure(GlanceException):
|
||||
message = "Authorization failed."
|
||||
|
||||
|
||||
class NotAuthorized(GlanceException):
|
||||
message = "You are not authorized to complete this action."
|
||||
|
||||
|
||||
class NotAuthorizedPublicImage(NotAuthorized):
|
||||
message = "You are not authorized to complete this action."
|
||||
|
||||
|
||||
class Invalid(GlanceException):
|
||||
message = "Data supplied was not valid."
|
||||
|
||||
|
||||
class AuthorizationRedirect(GlanceException):
|
||||
message = "Redirecting to %(uri)s for authorization."
|
||||
|
||||
|
||||
class DatabaseMigrationError(GlanceException):
|
||||
message = "There was an error migrating the database."
|
||||
|
||||
|
||||
class ClientConnectionError(GlanceException):
|
||||
message = "There was an error connecting to a server"
|
||||
|
||||
|
||||
class ClientConfigurationError(GlanceException):
|
||||
message = "There was an error configuring the client."
|
||||
|
||||
|
||||
class MultipleChoices(GlanceException):
|
||||
message = "The request returned a 302 Multiple Choices. This generally "
|
||||
|
||||
|
||||
class InvalidContentType(GlanceException):
|
||||
message = "Invalid content type %(content_type)s"
|
||||
|
||||
|
||||
class BadRegistryConnectionConfiguration(GlanceException):
|
||||
message = "Registry was not configured correctly on API server. "
|
||||
|
||||
|
||||
class BadStoreConfiguration(GlanceException):
|
||||
message = "Store %(store_name)s could not be configured correctly. "
|
||||
|
||||
|
||||
class BadDriverConfiguration(GlanceException):
|
||||
message = "Driver %(driver_name)s could not be configured correctly. "
|
||||
|
||||
|
||||
class StoreDeleteNotSupported(GlanceException):
|
||||
message = "Deleting images from this store is not supported."
|
||||
|
||||
|
||||
class StoreAddDisabled(GlanceException):
|
||||
message = "Configuration for store failed. Adding images to this "
|
||||
|
||||
|
||||
class InvalidNotifierStrategy(GlanceException):
|
||||
message = "'%(strategy)s' is not an available notifier strategy."
|
||||
|
||||
|
||||
class MaxRedirectsExceeded(GlanceException):
|
||||
message = "Maximum redirects (%(redirects)s) was exceeded."
|
||||
|
||||
|
||||
class InvalidRedirect(GlanceException):
|
||||
message = "Received invalid HTTP redirect."
|
||||
|
||||
|
||||
class NoServiceEndpoint(GlanceException):
|
||||
message = "Response from Keystone does not contain a Glance endpoint."
|
||||
|
||||
|
||||
class RegionAmbiguity(GlanceException):
|
||||
message = "Multiple 'image' service matches for region %(region)s. This "
|
182
windc/windc/common/policy.py
Normal file
182
windc/windc/common/policy.py
Normal file
@ -0,0 +1,182 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright (c) 2011 OpenStack, LLC.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Common Policy Engine Implementation"""
|
||||
|
||||
import json
|
||||
|
||||
|
||||
class NotAuthorized(Exception):
|
||||
pass
|
||||
|
||||
|
||||
_BRAIN = None
|
||||
|
||||
|
||||
def set_brain(brain):
|
||||
"""Set the brain used by enforce().
|
||||
|
||||
Defaults use Brain() if not set.
|
||||
|
||||
"""
|
||||
global _BRAIN
|
||||
_BRAIN = brain
|
||||
|
||||
|
||||
def reset():
|
||||
"""Clear the brain used by enforce()."""
|
||||
global _BRAIN
|
||||
_BRAIN = None
|
||||
|
||||
|
||||
def enforce(match_list, target_dict, credentials_dict):
|
||||
"""Enforces authorization of some rules against credentials.
|
||||
|
||||
:param match_list: nested tuples of data to match against
|
||||
The basic brain supports three types of match lists:
|
||||
1) rules
|
||||
looks like: ('rule:compute:get_instance',)
|
||||
Retrieves the named rule from the rules dict and recursively
|
||||
checks against the contents of the rule.
|
||||
2) roles
|
||||
looks like: ('role:compute:admin',)
|
||||
Matches if the specified role is in credentials_dict['roles'].
|
||||
3) generic
|
||||
('tenant_id:%(tenant_id)s',)
|
||||
Substitutes values from the target dict into the match using
|
||||
the % operator and matches them against the creds dict.
|
||||
|
||||
Combining rules:
|
||||
The brain returns True if any of the outer tuple of rules match
|
||||
and also True if all of the inner tuples match. You can use this to
|
||||
perform simple boolean logic. For example, the following rule would
|
||||
return True if the creds contain the role 'admin' OR the if the
|
||||
tenant_id matches the target dict AND the the creds contains the
|
||||
role 'compute_sysadmin':
|
||||
|
||||
{
|
||||
"rule:combined": (
|
||||
'role:admin',
|
||||
('tenant_id:%(tenant_id)s', 'role:compute_sysadmin')
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
Note that rule and role are reserved words in the credentials match, so
|
||||
you can't match against properties with those names. Custom brains may
|
||||
also add new reserved words. For example, the HttpBrain adds http as a
|
||||
reserved word.
|
||||
|
||||
:param target_dict: dict of object properties
|
||||
Target dicts contain as much information as we can about the object being
|
||||
operated on.
|
||||
|
||||
:param credentials_dict: dict of actor properties
|
||||
Credentials dicts contain as much information as we can about the user
|
||||
performing the action.
|
||||
|
||||
:raises NotAuthorized if the check fails
|
||||
|
||||
"""
|
||||
global _BRAIN
|
||||
if not _BRAIN:
|
||||
_BRAIN = Brain()
|
||||
if not _BRAIN.check(match_list, target_dict, credentials_dict):
|
||||
raise NotAuthorized()
|
||||
|
||||
|
||||
class Brain(object):
|
||||
"""Implements policy checking."""
|
||||
@classmethod
|
||||
def load_json(cls, data, default_rule=None):
|
||||
"""Init a brain using json instead of a rules dictionary."""
|
||||
rules_dict = json.loads(data)
|
||||
return cls(rules=rules_dict, default_rule=default_rule)
|
||||
|
||||
def __init__(self, rules=None, default_rule=None):
|
||||
self.rules = rules or {}
|
||||
self.default_rule = default_rule
|
||||
|
||||
def add_rule(self, key, match):
|
||||
self.rules[key] = match
|
||||
|
||||
def _check(self, match, target_dict, cred_dict):
|
||||
match_kind, match_value = match.split(':', 1)
|
||||
try:
|
||||
f = getattr(self, '_check_%s' % match_kind)
|
||||
except AttributeError:
|
||||
if not self._check_generic(match, target_dict, cred_dict):
|
||||
return False
|
||||
else:
|
||||
if not f(match_value, target_dict, cred_dict):
|
||||
return False
|
||||
return True
|
||||
|
||||
def check(self, match_list, target_dict, cred_dict):
|
||||
"""Checks authorization of some rules against credentials.
|
||||
|
||||
Detailed description of the check with examples in policy.enforce().
|
||||
|
||||
:param match_list: nested tuples of data to match against
|
||||
:param target_dict: dict of object properties
|
||||
:param credentials_dict: dict of actor properties
|
||||
|
||||
:returns: True if the check passes
|
||||
|
||||
"""
|
||||
if not match_list:
|
||||
return True
|
||||
for and_list in match_list:
|
||||
if isinstance(and_list, basestring):
|
||||
and_list = (and_list,)
|
||||
if all([self._check(item, target_dict, cred_dict)
|
||||
for item in and_list]):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _check_rule(self, match, target_dict, cred_dict):
|
||||
"""Recursively checks credentials based on the brains rules."""
|
||||
try:
|
||||
new_match_list = self.rules[match]
|
||||
except KeyError:
|
||||
if self.default_rule and match != self.default_rule:
|
||||
new_match_list = ('rule:%s' % self.default_rule,)
|
||||
else:
|
||||
return False
|
||||
|
||||
return self.check(new_match_list, target_dict, cred_dict)
|
||||
|
||||
def _check_role(self, match, target_dict, cred_dict):
|
||||
"""Check that there is a matching role in the cred dict."""
|
||||
return match in cred_dict['roles']
|
||||
|
||||
def _check_generic(self, match, target_dict, cred_dict):
|
||||
"""Check an individual match.
|
||||
|
||||
Matches look like:
|
||||
|
||||
tenant:%(tenant_id)s
|
||||
role:compute:admin
|
||||
|
||||
"""
|
||||
|
||||
# TODO(termie): do dict inspection via dot syntax
|
||||
match = match % target_dict
|
||||
key, value = match.split(':', 1)
|
||||
if key in cred_dict:
|
||||
return value == cred_dict[key]
|
||||
return False
|
421
windc/windc/common/utils.py
Normal file
421
windc/windc/common/utils.py
Normal file
@ -0,0 +1,421 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2010 United States Government as represented by the
|
||||
# Administrator of the National Aeronautics and Space Administration.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
System-level utilities and helper functions.
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import errno
|
||||
import logging
|
||||
import os
|
||||
import platform
|
||||
import subprocess
|
||||
import sys
|
||||
import uuid
|
||||
|
||||
import iso8601
|
||||
|
||||
from windc.common import exception
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
TIME_FORMAT = "%Y-%m-%dT%H:%M:%S"
|
||||
|
||||
|
||||
class Singleton:
|
||||
"""
|
||||
A non-thread-safe helper class to ease implementing singletons.
|
||||
This should be used as a decorator -- not a metaclass -- to the
|
||||
class that should be a singleton.
|
||||
|
||||
The decorated class can define one `__init__` function that
|
||||
takes only the `self` argument. Other than that, there are
|
||||
no restrictions that apply to the decorated class.
|
||||
|
||||
To get the singleton instance, use the `Instance` method. Trying
|
||||
to use `__call__` will result in a `TypeError` being raised.
|
||||
|
||||
Limitations: The decorated class cannot be inherited from and the
|
||||
type of the singleton instance cannot be checked with `isinstance`..
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, decorated):
|
||||
self._decorated = decorated
|
||||
|
||||
def Instance(self, conf):
|
||||
"""
|
||||
Returns the singleton instance. Upon its first call, it creates a
|
||||
new instance of the decorated class and calls its `__init__` method.
|
||||
On all subsequent calls, the already created instance is returned.
|
||||
|
||||
"""
|
||||
try:
|
||||
return self._instance
|
||||
except AttributeError:
|
||||
self._instance = self._decorated(conf)
|
||||
return self._instance
|
||||
|
||||
def __call__(self):
|
||||
"""
|
||||
Call method that raises an exception in order to prevent creation
|
||||
of multiple instances of the singleton. The `Instance` method should
|
||||
be used instead.
|
||||
|
||||
"""
|
||||
raise TypeError(
|
||||
'Singletons must be accessed through the `Instance` method.')
|
||||
|
||||
|
||||
def checkNone(obj):
|
||||
if bool(obj):
|
||||
if obj != 'None':
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def chunkreadable(iter, chunk_size=65536):
|
||||
"""
|
||||
Wrap a readable iterator with a reader yielding chunks of
|
||||
a preferred size, otherwise leave iterator unchanged.
|
||||
|
||||
:param iter: an iter which may also be readable
|
||||
:param chunk_size: maximum size of chunk
|
||||
"""
|
||||
return chunkiter(iter, chunk_size) if hasattr(iter, 'read') else iter
|
||||
|
||||
|
||||
def chunkiter(fp, chunk_size=65536):
|
||||
"""
|
||||
Return an iterator to a file-like obj which yields fixed size chunks
|
||||
|
||||
:param fp: a file-like object
|
||||
:param chunk_size: maximum size of chunk
|
||||
"""
|
||||
while True:
|
||||
chunk = fp.read(chunk_size)
|
||||
if chunk:
|
||||
yield chunk
|
||||
else:
|
||||
break
|
||||
|
||||
|
||||
def image_meta_to_http_headers(image_meta):
|
||||
"""
|
||||
Returns a set of image metadata into a dict
|
||||
of HTTP headers that can be fed to either a Webob
|
||||
Request object or an httplib.HTTP(S)Connection object
|
||||
|
||||
:param image_meta: Mapping of image metadata
|
||||
"""
|
||||
headers = {}
|
||||
for k, v in image_meta.items():
|
||||
if v is not None:
|
||||
if k == 'properties':
|
||||
for pk, pv in v.items():
|
||||
if pv is not None:
|
||||
headers["x-image-meta-property-%s"
|
||||
% pk.lower()] = unicode(pv)
|
||||
else:
|
||||
headers["x-image-meta-%s" % k.lower()] = unicode(v)
|
||||
return headers
|
||||
|
||||
|
||||
def add_features_to_http_headers(features, headers):
|
||||
"""
|
||||
Adds additional headers representing balancer features to be enabled.
|
||||
|
||||
:param headers: Base set of headers
|
||||
:param features: Map of enabled features
|
||||
"""
|
||||
if features:
|
||||
for k, v in features.items():
|
||||
if v is not None:
|
||||
headers[k.lower()] = unicode(v)
|
||||
|
||||
|
||||
def get_image_meta_from_headers(response):
|
||||
"""
|
||||
Processes HTTP headers from a supplied response that
|
||||
match the x-image-meta and x-image-meta-property and
|
||||
returns a mapping of image metadata and properties
|
||||
|
||||
:param response: Response to process
|
||||
"""
|
||||
result = {}
|
||||
properties = {}
|
||||
|
||||
if hasattr(response, 'getheaders'): # httplib.HTTPResponse
|
||||
headers = response.getheaders()
|
||||
else: # webob.Response
|
||||
headers = response.headers.items()
|
||||
|
||||
for key, value in headers:
|
||||
key = str(key.lower())
|
||||
if key.startswith('x-image-meta-property-'):
|
||||
field_name = key[len('x-image-meta-property-'):].replace('-', '_')
|
||||
properties[field_name] = value or None
|
||||
elif key.startswith('x-image-meta-'):
|
||||
field_name = key[len('x-image-meta-'):].replace('-', '_')
|
||||
result[field_name] = value or None
|
||||
result['properties'] = properties
|
||||
if 'size' in result:
|
||||
try:
|
||||
result['size'] = int(result['size'])
|
||||
except ValueError:
|
||||
raise exception.Invalid
|
||||
for key in ('is_public', 'deleted', 'protected'):
|
||||
if key in result:
|
||||
result[key] = bool_from_header_value(result[key])
|
||||
return result
|
||||
|
||||
|
||||
def bool_from_header_value(value):
|
||||
"""
|
||||
Returns True if value is a boolean True or the
|
||||
string 'true', case-insensitive, False otherwise
|
||||
"""
|
||||
if isinstance(value, bool):
|
||||
return value
|
||||
elif isinstance(value, (basestring, unicode)):
|
||||
if str(value).lower() == 'true':
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def bool_from_string(subject):
|
||||
"""
|
||||
Interpret a string as a boolean.
|
||||
|
||||
Any string value in:
|
||||
('True', 'true', 'On', 'on', '1')
|
||||
is interpreted as a boolean True.
|
||||
|
||||
Useful for JSON-decoded stuff and config file parsing
|
||||
"""
|
||||
if isinstance(subject, bool):
|
||||
return subject
|
||||
elif isinstance(subject, int):
|
||||
return subject == 1
|
||||
if hasattr(subject, 'startswith'): # str or unicode...
|
||||
if subject.strip().lower() in ('true', 'on', '1'):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def import_class(import_str):
|
||||
"""Returns a class from a string including module and class"""
|
||||
mod_str, _sep, class_str = import_str.rpartition('.')
|
||||
try:
|
||||
__import__(mod_str)
|
||||
return getattr(sys.modules[mod_str], class_str)
|
||||
except (ImportError, ValueError, AttributeError), e:
|
||||
raise exception.ImportFailure(import_str=import_str,
|
||||
reason=e)
|
||||
|
||||
|
||||
def import_object(import_str):
|
||||
"""Returns an object including a module or module and class"""
|
||||
try:
|
||||
__import__(import_str)
|
||||
return sys.modules[import_str]
|
||||
except ImportError:
|
||||
cls = import_class(import_str)
|
||||
return cls()
|
||||
|
||||
|
||||
def generate_uuid():
|
||||
return str(uuid.uuid4())
|
||||
|
||||
|
||||
def is_uuid_like(value):
|
||||
try:
|
||||
uuid.UUID(value)
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def isotime(at=None):
|
||||
"""Stringify time in ISO 8601 format"""
|
||||
if not at:
|
||||
at = datetime.datetime.utcnow()
|
||||
str = at.strftime(TIME_FORMAT)
|
||||
tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC'
|
||||
str += ('Z' if tz == 'UTC' else tz)
|
||||
return str
|
||||
|
||||
|
||||
def parse_isotime(timestr):
|
||||
"""Parse time from ISO 8601 format"""
|
||||
try:
|
||||
return iso8601.parse_date(timestr)
|
||||
except iso8601.ParseError as e:
|
||||
raise ValueError(e.message)
|
||||
except TypeError as e:
|
||||
raise ValueError(e.message)
|
||||
|
||||
|
||||
def normalize_time(timestamp):
|
||||
"""Normalize time in arbitrary timezone to UTC"""
|
||||
offset = timestamp.utcoffset()
|
||||
return timestamp.replace(tzinfo=None) - offset if offset else timestamp
|
||||
|
||||
|
||||
def safe_mkdirs(path):
|
||||
try:
|
||||
os.makedirs(path)
|
||||
except OSError, e:
|
||||
if e.errno != errno.EEXIST:
|
||||
raise
|
||||
|
||||
|
||||
def safe_remove(path):
|
||||
try:
|
||||
os.remove(path)
|
||||
except OSError, e:
|
||||
if e.errno != errno.ENOENT:
|
||||
raise
|
||||
|
||||
|
||||
class PrettyTable(object):
|
||||
"""Creates an ASCII art table for use in bin/balancer
|
||||
|
||||
Example:
|
||||
|
||||
ID Name Size Hits
|
||||
--- ----------------- ------------ -----
|
||||
122 image 22 0
|
||||
"""
|
||||
def __init__(self):
|
||||
self.columns = []
|
||||
|
||||
def add_column(self, width, label="", just='l'):
|
||||
"""Add a column to the table
|
||||
|
||||
:param width: number of characters wide the column should be
|
||||
:param label: column heading
|
||||
:param just: justification for the column, 'l' for left,
|
||||
'r' for right
|
||||
"""
|
||||
self.columns.append((width, label, just))
|
||||
|
||||
def make_header(self):
|
||||
label_parts = []
|
||||
break_parts = []
|
||||
for width, label, _ in self.columns:
|
||||
# NOTE(sirp): headers are always left justified
|
||||
label_part = self._clip_and_justify(label, width, 'l')
|
||||
label_parts.append(label_part)
|
||||
|
||||
break_part = '-' * width
|
||||
break_parts.append(break_part)
|
||||
|
||||
label_line = ' '.join(label_parts)
|
||||
break_line = ' '.join(break_parts)
|
||||
return '\n'.join([label_line, break_line])
|
||||
|
||||
def make_row(self, *args):
|
||||
row = args
|
||||
row_parts = []
|
||||
for data, (width, _, just) in zip(row, self.columns):
|
||||
row_part = self._clip_and_justify(data, width, just)
|
||||
row_parts.append(row_part)
|
||||
|
||||
row_line = ' '.join(row_parts)
|
||||
return row_line
|
||||
|
||||
@staticmethod
|
||||
def _clip_and_justify(data, width, just):
|
||||
# clip field to column width
|
||||
clipped_data = str(data)[:width]
|
||||
|
||||
if just == 'r':
|
||||
# right justify
|
||||
justified = clipped_data.rjust(width)
|
||||
else:
|
||||
# left justify
|
||||
justified = clipped_data.ljust(width)
|
||||
|
||||
return justified
|
||||
|
||||
|
||||
def get_terminal_size():
|
||||
|
||||
def _get_terminal_size_posix():
|
||||
import fcntl
|
||||
import struct
|
||||
import termios
|
||||
|
||||
height_width = None
|
||||
|
||||
try:
|
||||
height_width = struct.unpack('hh', fcntl.ioctl(sys.stderr.fileno(),
|
||||
termios.TIOCGWINSZ,
|
||||
struct.pack('HH', 0, 0)))
|
||||
except:
|
||||
pass
|
||||
|
||||
if not height_width:
|
||||
try:
|
||||
p = subprocess.Popen(['stty', 'size'],
|
||||
shell=False,
|
||||
stdout=subprocess.PIPE)
|
||||
return tuple(int(x) for x in p.communicate()[0].split())
|
||||
except:
|
||||
pass
|
||||
|
||||
return height_width
|
||||
|
||||
def _get_terminal_size_win32():
|
||||
try:
|
||||
from ctypes import windll, create_string_buffer
|
||||
handle = windll.kernel32.GetStdHandle(-12)
|
||||
csbi = create_string_buffer(22)
|
||||
res = windll.kernel32.GetConsoleScreenBufferInfo(handle, csbi)
|
||||
except:
|
||||
return None
|
||||
if res:
|
||||
import struct
|
||||
unpack_tmp = struct.unpack("hhhhHhhhhhh", csbi.raw)
|
||||
(bufx, bufy, curx, cury, wattr,
|
||||
left, top, right, bottom, maxx, maxy) = unpack_tmp
|
||||
height = bottom - top + 1
|
||||
width = right - left + 1
|
||||
return (height, width)
|
||||
else:
|
||||
return None
|
||||
|
||||
def _get_terminal_size_unknownOS():
|
||||
raise NotImplementedError
|
||||
|
||||
func = {'posix': _get_terminal_size_posix,
|
||||
'win32': _get_terminal_size_win32}
|
||||
|
||||
height_width = func.get(platform.os.name, _get_terminal_size_unknownOS)()
|
||||
|
||||
if height_width == None:
|
||||
raise exception.Invalid()
|
||||
|
||||
for i in height_width:
|
||||
if not isinstance(i, int) or i <= 0:
|
||||
raise exception.Invalid()
|
||||
|
||||
return height_width[0], height_width[1]
|
652
windc/windc/common/wsgi.py
Normal file
652
windc/windc/common/wsgi.py
Normal file
@ -0,0 +1,652 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2010 United States Government as represented by the
|
||||
# Administrator of the National Aeronautics and Space Administration.
|
||||
# Copyright 2010 OpenStack LLC.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Utility methods for working with WSGI servers
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import errno
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import signal
|
||||
import sys
|
||||
import time
|
||||
|
||||
import eventlet
|
||||
import eventlet.greenio
|
||||
from eventlet.green import socket, ssl
|
||||
import eventlet.wsgi
|
||||
from paste import deploy
|
||||
import routes
|
||||
import routes.middleware
|
||||
import webob.dec
|
||||
import webob.exc
|
||||
|
||||
from windc.common import cfg
|
||||
from windc.common import exception
|
||||
from windc.common import utils
|
||||
|
||||
|
||||
bind_opts = [
|
||||
cfg.StrOpt('bind_host', default='0.0.0.0'),
|
||||
cfg.IntOpt('bind_port'),
|
||||
]
|
||||
|
||||
socket_opts = [
|
||||
cfg.IntOpt('backlog', default=4096),
|
||||
cfg.StrOpt('cert_file'),
|
||||
cfg.StrOpt('key_file'),
|
||||
]
|
||||
|
||||
workers_opt = cfg.IntOpt('workers', default=0)
|
||||
|
||||
|
||||
class WritableLogger(object):
|
||||
"""A thin wrapper that responds to `write` and logs."""
|
||||
|
||||
def __init__(self, logger, level=logging.DEBUG):
|
||||
self.logger = logger
|
||||
self.level = level
|
||||
|
||||
def write(self, msg):
|
||||
self.logger.log(self.level, msg.strip("\n"))
|
||||
|
||||
|
||||
def get_bind_addr(conf, default_port=None):
|
||||
"""Return the host and port to bind to."""
|
||||
conf.register_opts(bind_opts)
|
||||
return (conf.bind_host, conf.bind_port or default_port)
|
||||
|
||||
|
||||
def get_socket(conf, default_port):
|
||||
"""
|
||||
Bind socket to bind ip:port in conf
|
||||
|
||||
note: Mostly comes from Swift with a few small changes...
|
||||
|
||||
:param conf: a cfg.ConfigOpts object
|
||||
:param default_port: port to bind to if none is specified in conf
|
||||
|
||||
:returns : a socket object as returned from socket.listen or
|
||||
ssl.wrap_socket if conf specifies cert_file
|
||||
"""
|
||||
bind_addr = get_bind_addr(conf, default_port)
|
||||
|
||||
# TODO(jaypipes): eventlet's greened socket module does not actually
|
||||
# support IPv6 in getaddrinfo(). We need to get around this in the
|
||||
# future or monitor upstream for a fix
|
||||
address_family = [addr[0] for addr in socket.getaddrinfo(bind_addr[0],
|
||||
bind_addr[1], socket.AF_UNSPEC, socket.SOCK_STREAM)
|
||||
if addr[0] in (socket.AF_INET, socket.AF_INET6)][0]
|
||||
|
||||
conf.register_opts(socket_opts)
|
||||
|
||||
cert_file = conf.cert_file
|
||||
key_file = conf.key_file
|
||||
use_ssl = cert_file or key_file
|
||||
if use_ssl and (not cert_file or not key_file):
|
||||
raise RuntimeError(_("When running server in SSL mode, you must "
|
||||
"specify both a cert_file and key_file "
|
||||
"option value in your configuration file"))
|
||||
|
||||
sock = None
|
||||
retry_until = time.time() + 30
|
||||
while not sock and time.time() < retry_until:
|
||||
try:
|
||||
sock = eventlet.listen(bind_addr, backlog=conf.backlog,
|
||||
family=address_family)
|
||||
if use_ssl:
|
||||
sock = ssl.wrap_socket(sock, certfile=cert_file,
|
||||
keyfile=key_file)
|
||||
except socket.error, err:
|
||||
if err.args[0] != errno.EADDRINUSE:
|
||||
raise
|
||||
eventlet.sleep(0.1)
|
||||
if not sock:
|
||||
raise RuntimeError(_("Could not bind to %s:%s after trying for 30 "
|
||||
"seconds") % bind_addr)
|
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
# in my experience, sockets can hang around forever without keepalive
|
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
|
||||
|
||||
# This option isn't available in the OS X version of eventlet
|
||||
if hasattr(socket, 'TCP_KEEPIDLE'):
|
||||
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 600)
|
||||
|
||||
return sock
|
||||
|
||||
|
||||
class Server(object):
|
||||
"""Server class to manage multiple WSGI sockets and applications."""
|
||||
|
||||
def __init__(self, threads=1000):
|
||||
self.threads = threads
|
||||
self.children = []
|
||||
self.running = True
|
||||
|
||||
def start(self, application, conf, default_port):
|
||||
"""
|
||||
Run a WSGI server with the given application.
|
||||
|
||||
:param application: The application to run in the WSGI server
|
||||
:param conf: a cfg.ConfigOpts object
|
||||
:param default_port: Port to bind to if none is specified in conf
|
||||
"""
|
||||
def kill_children(*args):
|
||||
"""Kills the entire process group."""
|
||||
self.logger.error(_('SIGTERM received'))
|
||||
signal.signal(signal.SIGTERM, signal.SIG_IGN)
|
||||
self.running = False
|
||||
os.killpg(0, signal.SIGTERM)
|
||||
|
||||
def hup(*args):
|
||||
"""
|
||||
Shuts down the server, but allows running requests to complete
|
||||
"""
|
||||
self.logger.error(_('SIGHUP received'))
|
||||
signal.signal(signal.SIGHUP, signal.SIG_IGN)
|
||||
self.running = False
|
||||
|
||||
self.application = application
|
||||
self.sock = get_socket(conf, default_port)
|
||||
conf.register_opt(workers_opt)
|
||||
|
||||
self.logger = logging.getLogger('eventlet.wsgi.server')
|
||||
|
||||
if conf.workers == 0:
|
||||
# Useful for profiling, test, debug etc.
|
||||
self.pool = eventlet.GreenPool(size=self.threads)
|
||||
self.pool.spawn_n(self._single_run, application, self.sock)
|
||||
return
|
||||
|
||||
self.logger.info(_("Starting %d workers") % conf.workers)
|
||||
signal.signal(signal.SIGTERM, kill_children)
|
||||
signal.signal(signal.SIGHUP, hup)
|
||||
while len(self.children) < conf.workers:
|
||||
self.run_child()
|
||||
|
||||
def wait_on_children(self):
|
||||
while self.running:
|
||||
try:
|
||||
pid, status = os.wait()
|
||||
if os.WIFEXITED(status) or os.WIFSIGNALED(status):
|
||||
self.logger.error(_('Removing dead child %s') % pid)
|
||||
self.children.remove(pid)
|
||||
self.run_child()
|
||||
except OSError, err:
|
||||
if err.errno not in (errno.EINTR, errno.ECHILD):
|
||||
raise
|
||||
except KeyboardInterrupt:
|
||||
sys.exit(1)
|
||||
self.logger.info(_('Caught keyboard interrupt. Exiting.'))
|
||||
break
|
||||
eventlet.greenio.shutdown_safe(self.sock)
|
||||
self.sock.close()
|
||||
self.logger.debug(_('Exited'))
|
||||
|
||||
def wait(self):
|
||||
"""Wait until all servers have completed running."""
|
||||
try:
|
||||
if self.children:
|
||||
self.wait_on_children()
|
||||
else:
|
||||
self.pool.waitall()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
def run_child(self):
|
||||
pid = os.fork()
|
||||
if pid == 0:
|
||||
signal.signal(signal.SIGHUP, signal.SIG_DFL)
|
||||
signal.signal(signal.SIGTERM, signal.SIG_DFL)
|
||||
self.run_server()
|
||||
self.logger.info(_('Child %d exiting normally') % os.getpid())
|
||||
return
|
||||
else:
|
||||
self.logger.info(_('Started child %s') % pid)
|
||||
self.children.append(pid)
|
||||
|
||||
def run_server(self):
|
||||
"""Run a WSGI server."""
|
||||
eventlet.wsgi.HttpProtocol.default_request_version = "HTTP/1.0"
|
||||
eventlet.hubs.use_hub('poll')
|
||||
eventlet.patcher.monkey_patch(all=False, socket=True)
|
||||
self.pool = eventlet.GreenPool(size=self.threads)
|
||||
try:
|
||||
eventlet.wsgi.server(self.sock, self.application,
|
||||
log=WritableLogger(self.logger), custom_pool=self.pool)
|
||||
except socket.error, err:
|
||||
if err[0] != errno.EINVAL:
|
||||
raise
|
||||
self.pool.waitall()
|
||||
|
||||
def _single_run(self, application, sock):
|
||||
"""Start a WSGI server in a new green thread."""
|
||||
self.logger.info(_("Starting single process server"))
|
||||
eventlet.wsgi.server(sock, application, custom_pool=self.pool,
|
||||
log=WritableLogger(self.logger))
|
||||
|
||||
|
||||
class Middleware(object):
|
||||
"""
|
||||
Base WSGI middleware wrapper. These classes require an application to be
|
||||
initialized that will be called next. By default the middleware will
|
||||
simply call its wrapped app, or you can override __call__ to customize its
|
||||
behavior.
|
||||
"""
|
||||
|
||||
def __init__(self, application):
|
||||
self.application = application
|
||||
|
||||
def process_request(self, req):
|
||||
"""
|
||||
Called on each request.
|
||||
|
||||
If this returns None, the next application down the stack will be
|
||||
executed. If it returns a response then that response will be returned
|
||||
and execution will stop here.
|
||||
|
||||
"""
|
||||
return None
|
||||
|
||||
def process_response(self, response):
|
||||
"""Do whatever you'd like to the response."""
|
||||
return response
|
||||
|
||||
@webob.dec.wsgify
|
||||
def __call__(self, req):
|
||||
response = self.process_request(req)
|
||||
if response:
|
||||
return response
|
||||
response = req.get_response(self.application)
|
||||
return self.process_response(response)
|
||||
|
||||
|
||||
class Debug(Middleware):
|
||||
"""
|
||||
Helper class that can be inserted into any WSGI application chain
|
||||
to get information about the request and response.
|
||||
"""
|
||||
|
||||
@webob.dec.wsgify
|
||||
def __call__(self, req):
|
||||
print ("*" * 40) + " REQUEST ENVIRON"
|
||||
for key, value in req.environ.items():
|
||||
print key, "=", value
|
||||
print
|
||||
resp = req.get_response(self.application)
|
||||
|
||||
print ("*" * 40) + " RESPONSE HEADERS"
|
||||
for (key, value) in resp.headers.iteritems():
|
||||
print key, "=", value
|
||||
print
|
||||
|
||||
resp.app_iter = self.print_generator(resp.app_iter)
|
||||
|
||||
return resp
|
||||
|
||||
@staticmethod
|
||||
def print_generator(app_iter):
|
||||
"""
|
||||
Iterator that prints the contents of a wrapper string iterator
|
||||
when iterated.
|
||||
"""
|
||||
print ("*" * 40) + " BODY"
|
||||
for part in app_iter:
|
||||
sys.stdout.write(part)
|
||||
sys.stdout.flush()
|
||||
yield part
|
||||
print
|
||||
|
||||
|
||||
class Router(object):
|
||||
"""
|
||||
WSGI middleware that maps incoming requests to WSGI apps.
|
||||
"""
|
||||
|
||||
def __init__(self, mapper):
|
||||
"""
|
||||
Create a router for the given routes.Mapper.
|
||||
|
||||
Each route in `mapper` must specify a 'controller', which is a
|
||||
WSGI app to call. You'll probably want to specify an 'action' as
|
||||
well and have your controller be a wsgi.Controller, who will route
|
||||
the request to the action method.
|
||||
|
||||
Examples:
|
||||
mapper = routes.Mapper()
|
||||
sc = ServerController()
|
||||
|
||||
# Explicit mapping of one route to a controller+action
|
||||
mapper.connect(None, "/svrlist", controller=sc, action="list")
|
||||
|
||||
# Actions are all implicitly defined
|
||||
mapper.resource("server", "servers", controller=sc)
|
||||
|
||||
# Pointing to an arbitrary WSGI app. You can specify the
|
||||
# {path_info:.*} parameter so the target app can be handed just that
|
||||
# section of the URL.
|
||||
mapper.connect(None, "/v1.0/{path_info:.*}", controller=BlogApp())
|
||||
"""
|
||||
self.map = mapper
|
||||
self._router = routes.middleware.RoutesMiddleware(self._dispatch,
|
||||
self.map)
|
||||
|
||||
@webob.dec.wsgify
|
||||
def __call__(self, req):
|
||||
"""
|
||||
Route the incoming request to a controller based on self.map.
|
||||
If no match, return a 404.
|
||||
"""
|
||||
return self._router
|
||||
|
||||
@staticmethod
|
||||
@webob.dec.wsgify
|
||||
def _dispatch(req):
|
||||
"""
|
||||
Called by self._router after matching the incoming request to a route
|
||||
and putting the information into req.environ. Either returns 404
|
||||
or the routed WSGI app's response.
|
||||
"""
|
||||
match = req.environ['wsgiorg.routing_args'][1]
|
||||
if not match:
|
||||
return webob.exc.HTTPNotFound()
|
||||
app = match['controller']
|
||||
return app
|
||||
|
||||
|
||||
class Request(webob.Request):
|
||||
"""Add some Openstack API-specific logic to the base webob.Request."""
|
||||
|
||||
def best_match_content_type(self):
|
||||
"""Determine the requested response content-type."""
|
||||
supported = ('application/json',)
|
||||
bm = self.accept.best_match(supported)
|
||||
return bm or 'application/json'
|
||||
|
||||
def get_content_type(self, allowed_content_types):
|
||||
"""Determine content type of the request body."""
|
||||
if not "Content-Type" in self.headers:
|
||||
raise exception.InvalidContentType(content_type=None)
|
||||
|
||||
content_type = self.content_type
|
||||
|
||||
if content_type not in allowed_content_types:
|
||||
raise exception.InvalidContentType(content_type=content_type)
|
||||
else:
|
||||
return content_type
|
||||
|
||||
|
||||
class JSONRequestDeserializer(object):
|
||||
def has_body(self, request):
|
||||
"""
|
||||
Returns whether a Webob.Request object will possess an entity body.
|
||||
|
||||
:param request: Webob.Request object
|
||||
"""
|
||||
if 'transfer-encoding' in request.headers:
|
||||
return True
|
||||
elif request.content_length > 0:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def from_json(self, datastring):
|
||||
return json.loads(datastring)
|
||||
|
||||
def default(self, request):
|
||||
if self.has_body(request):
|
||||
return {'body': self.from_json(request.body)}
|
||||
else:
|
||||
return {}
|
||||
|
||||
|
||||
class JSONResponseSerializer(object):
|
||||
|
||||
def to_json(self, data):
|
||||
def sanitizer(obj):
|
||||
if isinstance(obj, datetime.datetime):
|
||||
return obj.isoformat()
|
||||
return obj
|
||||
|
||||
return json.dumps(data, default=sanitizer)
|
||||
|
||||
def default(self, response, result):
|
||||
response.content_type = 'application/json'
|
||||
response.body = self.to_json(result)
|
||||
|
||||
|
||||
class Resource(object):
|
||||
"""
|
||||
WSGI app that handles (de)serialization and controller dispatch.
|
||||
|
||||
Reads routing information supplied by RoutesMiddleware and calls
|
||||
the requested action method upon its deserializer, controller,
|
||||
and serializer. Those three objects may implement any of the basic
|
||||
controller action methods (create, update, show, index, delete)
|
||||
along with any that may be specified in the api router. A 'default'
|
||||
method may also be implemented to be used in place of any
|
||||
non-implemented actions. Deserializer methods must accept a request
|
||||
argument and return a dictionary. Controller methods must accept a
|
||||
request argument. Additionally, they must also accept keyword
|
||||
arguments that represent the keys returned by the Deserializer. They
|
||||
may raise a webob.exc exception or return a dict, which will be
|
||||
serialized by requested content type.
|
||||
"""
|
||||
def __init__(self, controller, deserializer, serializer):
|
||||
"""
|
||||
:param controller: object that implement methods created by routes lib
|
||||
:param deserializer: object that supports webob request deserialization
|
||||
through controller-like actions
|
||||
:param serializer: object that supports webob response serialization
|
||||
through controller-like actions
|
||||
"""
|
||||
self.controller = controller
|
||||
self.serializer = serializer
|
||||
self.deserializer = deserializer
|
||||
|
||||
@webob.dec.wsgify(RequestClass=Request)
|
||||
def __call__(self, request):
|
||||
"""WSGI method that controls (de)serialization and method dispatch."""
|
||||
action_args = self.get_action_args(request.environ)
|
||||
action = action_args.pop('action', None)
|
||||
|
||||
deserialized_request = self.dispatch(self.deserializer,
|
||||
action, request)
|
||||
action_args.update(deserialized_request)
|
||||
|
||||
action_result = self.dispatch(self.controller, action,
|
||||
request, **action_args)
|
||||
try:
|
||||
response = webob.Response(request=request)
|
||||
self.dispatch(self.serializer, action, response, action_result)
|
||||
return response
|
||||
|
||||
# return unserializable result (typically a webob exc)
|
||||
except Exception:
|
||||
return action_result
|
||||
|
||||
def dispatch(self, obj, action, *args, **kwargs):
|
||||
"""Find action-specific method on self and call it."""
|
||||
try:
|
||||
method = getattr(obj, action)
|
||||
except AttributeError:
|
||||
method = getattr(obj, 'default')
|
||||
|
||||
return method(*args, **kwargs)
|
||||
|
||||
def get_action_args(self, request_environment):
|
||||
"""Parse dictionary created by routes library."""
|
||||
try:
|
||||
args = request_environment['wsgiorg.routing_args'][1].copy()
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
try:
|
||||
del args['controller']
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
try:
|
||||
del args['format']
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
return args
|
||||
|
||||
|
||||
class BasePasteFactory(object):
|
||||
|
||||
"""A base class for paste app and filter factories.
|
||||
|
||||
Sub-classes must override the KEY class attribute and provide
|
||||
a __call__ method.
|
||||
"""
|
||||
|
||||
KEY = None
|
||||
|
||||
def __init__(self, conf):
|
||||
self.conf = conf
|
||||
|
||||
def __call__(self, global_conf, **local_conf):
|
||||
raise NotImplementedError
|
||||
|
||||
def _import_factory(self, local_conf):
|
||||
"""Import an app/filter class.
|
||||
|
||||
Lookup the KEY from the PasteDeploy local conf and import the
|
||||
class named there. This class can then be used as an app or
|
||||
filter factory.
|
||||
|
||||
Note we support the <module>:<class> format.
|
||||
|
||||
Note also that if you do e.g.
|
||||
|
||||
key =
|
||||
value
|
||||
|
||||
then ConfigParser returns a value with a leading newline, so
|
||||
we strip() the value before using it.
|
||||
"""
|
||||
class_name = local_conf[self.KEY].replace(':', '.').strip()
|
||||
return utils.import_class(class_name)
|
||||
|
||||
|
||||
class AppFactory(BasePasteFactory):
|
||||
|
||||
"""A Generic paste.deploy app factory.
|
||||
|
||||
This requires balancer.app_factory to be set to a callable which returns a
|
||||
WSGI app when invoked. The format of the name is <module>:<callable> e.g.
|
||||
|
||||
[app:apiv1app]
|
||||
paste.app_factory = balancer.common.wsgi:app_factory
|
||||
balancer.app_factory = balancer.api.v1:API
|
||||
|
||||
The WSGI app constructor must accept a ConfigOpts object and a local config
|
||||
dict as its two arguments.
|
||||
"""
|
||||
print "DEBUG AppFactory start\n"
|
||||
KEY = 'windc.app_factory'
|
||||
|
||||
def __call__(self, global_conf, **local_conf):
|
||||
"""The actual paste.app_factory protocol method."""
|
||||
print "DEBUG Call factory"
|
||||
factory = self._import_factory(local_conf)
|
||||
return factory(self.conf, **local_conf)
|
||||
|
||||
|
||||
class FilterFactory(AppFactory):
|
||||
|
||||
"""A Generic paste.deploy filter factory.
|
||||
|
||||
This requires balancer.filter_factory to be set to a callable which returns
|
||||
a WSGI filter when invoked. The format is <module>:<callable> e.g.
|
||||
|
||||
[filter:cache]
|
||||
paste.filter_factory = balancer.common.wsgi:filter_factory
|
||||
balancer.filter_factory = balancer.api.middleware.cache:CacheFilter
|
||||
|
||||
The WSGI filter constructor must accept a WSGI app, a ConfigOpts object and
|
||||
a local config dict as its three arguments.
|
||||
"""
|
||||
|
||||
KEY = 'windc.filter_factory'
|
||||
|
||||
def __call__(self, global_conf, **local_conf):
|
||||
"""The actual paste.filter_factory protocol method."""
|
||||
factory = self._import_factory(local_conf)
|
||||
|
||||
def filter(app):
|
||||
return factory(app, self.conf, **local_conf)
|
||||
|
||||
return filter
|
||||
|
||||
|
||||
def setup_paste_factories(conf):
|
||||
"""Set up the generic paste app and filter factories.
|
||||
|
||||
Set things up so that:
|
||||
|
||||
paste.app_factory = balancer.common.wsgi:app_factory
|
||||
|
||||
and
|
||||
|
||||
paste.filter_factory = balancer.common.wsgi:filter_factory
|
||||
|
||||
work correctly while loading PasteDeploy configuration.
|
||||
|
||||
The app factories are constructed at runtime to allow us to pass a
|
||||
ConfigOpts object to the WSGI classes.
|
||||
|
||||
:param conf: a ConfigOpts object
|
||||
"""
|
||||
print "DEBUG Setup Factories\n"
|
||||
global app_factory, filter_factory
|
||||
app_factory = AppFactory(conf)
|
||||
filter_factory = FilterFactory(conf)
|
||||
|
||||
|
||||
def teardown_paste_factories():
|
||||
"""Reverse the effect of setup_paste_factories()."""
|
||||
global app_factory, filter_factory
|
||||
del app_factory
|
||||
del filter_factory
|
||||
|
||||
|
||||
def paste_deploy_app(paste_config_file, app_name, conf):
|
||||
"""Load a WSGI app from a PasteDeploy configuration.
|
||||
|
||||
Use deploy.loadapp() to load the app from the PasteDeploy configuration,
|
||||
ensuring that the supplied ConfigOpts object is passed to the app and
|
||||
filter constructors.
|
||||
|
||||
:param paste_config_file: a PasteDeploy config file
|
||||
:param app_name: the name of the app/pipeline to load from the file
|
||||
:param conf: a ConfigOpts object to supply to the app and its filters
|
||||
:returns: the WSGI app
|
||||
"""
|
||||
print "DEBUG Paste deploy\n"
|
||||
setup_paste_factories(conf)
|
||||
try:
|
||||
return deploy.loadapp("config:%s" % paste_config_file, name=app_name)
|
||||
finally:
|
||||
teardown_paste_factories()
|
16
windc/windc/core/__init__.py
Normal file
16
windc/windc/core/__init__.py
Normal file
@ -0,0 +1,16 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2011 OpenStack LLC.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
46
windc/windc/core/api.py
Normal file
46
windc/windc/core/api.py
Normal file
@ -0,0 +1,46 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2011 OpenStack LLC.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
def dc_get_index(conf, tenant_id):
|
||||
pass
|
||||
|
||||
def create_dc(conf, params):
|
||||
pass
|
||||
|
||||
def delete_dc(conf, tenant_id, dc_id):
|
||||
pass
|
||||
|
||||
def dc_get_data(conf, tenant_id, dc_id):
|
||||
pass
|
||||
|
||||
def update_dc(conf, tenant_id, dc_id, body):
|
||||
pass
|
||||
|
||||
def service_get_index(conf, tenant_id, datacenter_id):
|
||||
pass
|
||||
|
||||
def create_service(conf, params):
|
||||
pass
|
||||
|
||||
def delete_service(conf, tenant_id, datacenter_id, service_id):
|
||||
pass
|
||||
|
||||
def service_get_data(conf, tenant_id, datacenter_id, service_id):
|
||||
pass
|
||||
|
||||
def update_service(conf, tenant_id, datacenter_id, service_id, body):
|
||||
pass
|
25
windc/windc/core/service_status.py
Normal file
25
windc/windc/core/service_status.py
Normal file
@ -0,0 +1,25 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
|
||||
# Copyright 2010 United States Government as represented by the
|
||||
# Administrator of the National Aeronautics and Space Administration.
|
||||
# Copyright 2011 Piston Cloud Computing, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
"""Possible load balancer statuses."""
|
||||
|
||||
BUILD = "BUILD"
|
||||
ACTIVE = "ACTIVE"
|
||||
PENDING_UPDATE = "PENDING_UPDATE"
|
||||
ERROR = "ERROR"
|
1
windc/windc/db/__init__.py
Normal file
1
windc/windc/db/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
from windc.db.api import *
|
174
windc/windc/db/api.py
Normal file
174
windc/windc/db/api.py
Normal file
@ -0,0 +1,174 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
|
||||
# Copyright 2010 United States Government as represented by the
|
||||
# Administrator of the National Aeronautics and Space Administration.
|
||||
# Copyright 2011 Piston Cloud Computing, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
"""Database storage API."""
|
||||
|
||||
import functools
|
||||
import datetime
|
||||
|
||||
from windc.db import models
|
||||
from windc.db.session import get_session
|
||||
from windc import exception
|
||||
|
||||
|
||||
|
||||
# XXX(akscram): pack_ and unpack_ are helper methods to compatibility
|
||||
def pack_extra(model, values):
|
||||
obj_ref = model()
|
||||
pack_update(obj_ref, values)
|
||||
return obj_ref
|
||||
|
||||
|
||||
def unpack_extra(obj_ref):
|
||||
obj_dict = dict(obj_ref.iteritems())
|
||||
obj_dict.update(obj_dict.pop('extra', None) or {})
|
||||
return obj_dict
|
||||
|
||||
|
||||
def pack_update(obj_ref, values):
|
||||
obj_dict = values.copy()
|
||||
for k, v in values.iteritems():
|
||||
if k in obj_ref.keys():
|
||||
obj_ref[k] = obj_dict.pop(k)
|
||||
if obj_dict:
|
||||
if obj_ref['extra'] is not None:
|
||||
obj_ref['extra'].update(obj_dict)
|
||||
else:
|
||||
obj_ref['extra'] = obj_dict.copy()
|
||||
|
||||
|
||||
datacenter_pack_extra = functools.partial(pack_extra, models.DataCenter)
|
||||
service_pack_extra = functools.partial(pack_extra, models.Service)
|
||||
|
||||
|
||||
# Datacenter
|
||||
|
||||
|
||||
def datacenter_get(conf, datacenter_id, session=None):
|
||||
session = session or get_session(conf)
|
||||
datacenter_ref = session.query(models.DataCenter).\
|
||||
filter_by(id=datacenter_id).first()
|
||||
if not datacenter_ref:
|
||||
raise exception.DeviceNotFound(datacenter_id=datacenter_id)
|
||||
return datacenter_ref
|
||||
|
||||
|
||||
def datacenter_get_all(conf):
|
||||
session = get_session(conf)
|
||||
query = session.query(models.DataCenter)
|
||||
return query.all()
|
||||
|
||||
|
||||
def datacenter_create(conf, values):
|
||||
session = get_session(conf)
|
||||
with session.begin():
|
||||
datacenter_ref = models.DataCenter()
|
||||
datacenter_ref.update(values)
|
||||
session.add(datacenter_ref)
|
||||
return datacenter_ref
|
||||
|
||||
|
||||
def datacenter_update(conf, datacenter_id, values):
|
||||
session = get_session(conf)
|
||||
with session.begin():
|
||||
datacenter_ref = datacenter_get(conf, datacenter_id, session=session)
|
||||
datacenter_ref.update(values)
|
||||
return datacenter_ref
|
||||
|
||||
|
||||
def datacenter_destroy(conf, datacenter_id):
|
||||
session = get_session(conf)
|
||||
with session.begin():
|
||||
datacenter_ref = device_get(conf, datacenter_id, session=session)
|
||||
session.delete(datacenter_ref)
|
||||
|
||||
# Service
|
||||
|
||||
|
||||
def service_get(conf, service_id, tenant_id=None, session=None):
|
||||
session = session or get_session(conf)
|
||||
query = session.query(models.Service).filter_by(id=service_id)
|
||||
if tenant_id:
|
||||
query = query.filter_by(tenant_id=tenant_id)
|
||||
service_ref = query.first()
|
||||
if not service_ref:
|
||||
raise exception.ServiceNotFound(service_ref=service_ref)
|
||||
return service_ref
|
||||
|
||||
|
||||
def service_get_all_by_project(conf, tenant_id):
|
||||
session = get_session(conf)
|
||||
query = session.query(models.Service).filter_by(tenant_id=tenant_id)
|
||||
return query.all()
|
||||
|
||||
|
||||
def service_get_all_by_vm_id(conf, tenant_id, vm_id):
|
||||
session = get_session(conf)
|
||||
query = session.query(models.Service).distinct().\
|
||||
filter_by(tenant_id=tenant_id).\
|
||||
filter(vm_id == vm_id)
|
||||
return query.all()
|
||||
|
||||
|
||||
def service_get_all_by_datacenter_id(conf, datacenter_id):
|
||||
session = get_session(conf)
|
||||
query = session.query(models.Service).filter_by(datacenter_id=datacenter_id)
|
||||
service_refs = query.all()
|
||||
if not service_refs:
|
||||
raise exception.ServiceNotFound('No service '
|
||||
'for the datacenter %s found'
|
||||
% datacenter_id)
|
||||
return service_refs
|
||||
|
||||
|
||||
def service_create(conf, values):
|
||||
session = get_session(conf)
|
||||
with session.begin():
|
||||
service_ref = models.Service()
|
||||
service_ref.update(values)
|
||||
session.add(service_ref)
|
||||
return service_ref
|
||||
|
||||
|
||||
def service_update(conf, service_id, values):
|
||||
session = get_session(conf)
|
||||
with session.begin():
|
||||
service_ref = service_get(conf, service_id, session=session)
|
||||
service_ref.update(values)
|
||||
service_ref['updated_at'] = datetime.datetime.utcnow()
|
||||
return service_ref
|
||||
|
||||
|
||||
def service_destroy(conf, service_id):
|
||||
session = get_session(conf)
|
||||
with session.begin():
|
||||
service_ref = service_get(conf, service_id, session=session)
|
||||
session.delete(service_ref)
|
||||
|
||||
|
||||
def service_count_active_by_datacenter(conf, datacenter_id):
|
||||
session = get_session(conf)
|
||||
with session.begin():
|
||||
service_count = session.query(models.Service).\
|
||||
filter_by(datacenter_id=datacenter_id).\
|
||||
filter_by(status=service_status.ACTIVE).\
|
||||
count()
|
||||
return service_count
|
||||
|
||||
|
76
windc/windc/db/base.py
Normal file
76
windc/windc/db/base.py
Normal file
@ -0,0 +1,76 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
|
||||
# Copyright 2010 United States Government as represented by the
|
||||
# Administrator of the National Aeronautics and Space Administration.
|
||||
# Copyright 2011 Piston Cloud Computing, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
"""Base classes and custome fields for balancer models."""
|
||||
|
||||
import json
|
||||
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import object_mapper
|
||||
from sqlalchemy.types import TypeDecorator
|
||||
from sqlalchemy import Text
|
||||
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
|
||||
class DictBase(object):
|
||||
def to_dict(self):
|
||||
return dict(self.iteritems())
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
setattr(self, key, value)
|
||||
|
||||
def __getitem__(self, key):
|
||||
return getattr(self, key)
|
||||
|
||||
def get(self, key, default=None):
|
||||
return getattr(self, key, default)
|
||||
|
||||
def __iter__(self):
|
||||
return (col.name for col in object_mapper(self).columns)
|
||||
|
||||
def keys(self):
|
||||
return list(self)
|
||||
|
||||
def update(self, values):
|
||||
for key, value in values.iteritems():
|
||||
if isinstance(value, dict):
|
||||
value = value.copy()
|
||||
setattr(self, key, value)
|
||||
|
||||
def iteritems(self):
|
||||
items = []
|
||||
for key in self:
|
||||
value = getattr(self, key)
|
||||
if isinstance(value, dict):
|
||||
value = value.copy()
|
||||
items.append((key, value))
|
||||
return iter(items)
|
||||
|
||||
|
||||
class JsonBlob(TypeDecorator):
|
||||
|
||||
impl = Text
|
||||
|
||||
def process_bind_param(self, value, dialect):
|
||||
return json.dumps(value)
|
||||
|
||||
def process_result_value(self, value, dialect):
|
||||
return json.loads(value)
|
4
windc/windc/db/migrate_repo/README
Normal file
4
windc/windc/db/migrate_repo/README
Normal file
@ -0,0 +1,4 @@
|
||||
This is a database migration repository.
|
||||
|
||||
More information at
|
||||
http://code.google.com/p/sqlalchemy-migrate/
|
0
windc/windc/db/migrate_repo/__init__.py
Normal file
0
windc/windc/db/migrate_repo/__init__.py
Normal file
5
windc/windc/db/migrate_repo/manage.py
Normal file
5
windc/windc/db/migrate_repo/manage.py
Normal file
@ -0,0 +1,5 @@
|
||||
#!/usr/bin/env python
|
||||
from migrate.versioning.shell import main
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(debug='False')
|
25
windc/windc/db/migrate_repo/migrate.cfg
Normal file
25
windc/windc/db/migrate_repo/migrate.cfg
Normal file
@ -0,0 +1,25 @@
|
||||
[db_settings]
|
||||
# Used to identify which repository this database is versioned under.
|
||||
# You can use the name of your project.
|
||||
repository_id=windc
|
||||
|
||||
# The name of the database table used to track the schema version.
|
||||
# This name shouldn't already be used by your project.
|
||||
# If this is changed once a database is under version control, you'll need to
|
||||
# change the table name in each database too.
|
||||
version_table=migrate_version
|
||||
|
||||
# When committing a change script, Migrate will attempt to generate the
|
||||
# sql for all supported databases; normally, if one of them fails - probably
|
||||
# because you don't have that database installed - it is ignored and the
|
||||
# commit continues, perhaps ending successfully.
|
||||
# Databases in this list MUST compile successfully during a commit, or the
|
||||
# entire commit will fail. List the databases your application will actually
|
||||
# be using to ensure your updates to that database work properly.
|
||||
# This must be a list; example: ['postgres','sqlite']
|
||||
required_dbs=[]
|
||||
|
||||
# When creating new change scripts, Migrate will stamp the new script with
|
||||
# a version number. By default this is latest_version + 1. You can set this
|
||||
# to 'true' to tell Migrate to use the UTC timestamp instead.
|
||||
use_timestamp_numbering=False
|
@ -0,0 +1,40 @@
|
||||
from sqlalchemy.schema import MetaData, Table, Column, ForeignKey
|
||||
from sqlalchemy.types import Integer, String, Text, DateTime
|
||||
|
||||
|
||||
meta = MetaData()
|
||||
|
||||
Table('datacenter', meta,
|
||||
Column('id', String(32), primary_key=True),
|
||||
Column('name', String(255)),
|
||||
Column('type', String(255)),
|
||||
Column('version', String(255)),
|
||||
Column('KMS', String(80)),
|
||||
Column('WSUS', String(80)),
|
||||
Column('extra', Text()),
|
||||
)
|
||||
|
||||
Table('service', meta,
|
||||
Column('id', String(32), primary_key=True),
|
||||
Column('datacenter_id', String(32), ForeignKey('datacenter.id')),
|
||||
Column('name', String(255)),
|
||||
Column('type', String(40)),
|
||||
Column('status', String(255)),
|
||||
Column('tenant_id', String(40)),
|
||||
Column('created_at', DateTime, nullable=False),
|
||||
Column('updated_at', DateTime, nullable=False),
|
||||
Column('deployed', String(40)),
|
||||
Column('vm_id',String(40)),
|
||||
Column('extra', Text()),
|
||||
)
|
||||
|
||||
|
||||
|
||||
def upgrade(migrate_engine):
|
||||
meta.bind = migrate_engine
|
||||
meta.create_all()
|
||||
|
||||
|
||||
def downgrade(migrate_engine):
|
||||
meta.bind = migrate_engine
|
||||
meta.drop_all()
|
0
windc/windc/db/migrate_repo/versions/__init__.py
Normal file
0
windc/windc/db/migrate_repo/versions/__init__.py
Normal file
80
windc/windc/db/models.py
Normal file
80
windc/windc/db/models.py
Normal file
@ -0,0 +1,80 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
#Copyright by Mirantis Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
"""SQLAlchemy models for balancer data."""
|
||||
|
||||
import datetime
|
||||
import uuid
|
||||
|
||||
from sqlalchemy.orm import relationship, backref
|
||||
from sqlalchemy import (Column, ForeignKey, Integer, String, Boolean,
|
||||
DateTime)
|
||||
|
||||
from windc.db.base import Base, DictBase, JsonBlob
|
||||
|
||||
|
||||
def create_uuid():
|
||||
return uuid.uuid4().hex
|
||||
|
||||
|
||||
class DataCenter(DictBase, Base):
|
||||
"""
|
||||
Represents a data center - a Windows Environment with different
|
||||
services in it.
|
||||
"""
|
||||
|
||||
__tablename__ = 'datacenter'
|
||||
id = Column(String(32), primary_key=True, default=create_uuid)
|
||||
name = Column(String(255))
|
||||
type = Column(String(255))
|
||||
version = Column(String(255))
|
||||
KMS = Column(String(80))
|
||||
WSUS = Column(String(80))
|
||||
extra = Column(JsonBlob())
|
||||
|
||||
|
||||
class Service(DictBase, Base):
|
||||
"""
|
||||
Represents an instance of service.
|
||||
|
||||
:var name: string
|
||||
:var type: string - type of service (e.g. Active Directory)
|
||||
:var tenant_id: string - OpenStack tenant ID
|
||||
:var extra: dictionary - additional attributes
|
||||
"""
|
||||
|
||||
__tablename__ = 'service'
|
||||
id = Column(String(32), primary_key=True, default=create_uuid)
|
||||
datacenter_id = Column(String(32), ForeignKey('datacenter.id'))
|
||||
name = Column(String(255))
|
||||
type = Column(String(40))
|
||||
status = Column(String(40))
|
||||
tenant_id = Column(String(40))
|
||||
created_at = Column(DateTime, default=datetime.datetime.utcnow,
|
||||
nullable=False)
|
||||
updated_at = Column(DateTime, default=datetime.datetime.utcnow,
|
||||
onupdate=datetime.datetime.utcnow,
|
||||
nullable=False)
|
||||
deployed = Column(String(40))
|
||||
vm_id = Column(String(40))
|
||||
extra = Column(JsonBlob())
|
||||
datacenter = relationship(DataCenter,
|
||||
backref=backref('service', order_by=id),
|
||||
uselist=False)
|
||||
|
||||
def register_models(engine):
|
||||
"""Create tables for models."""
|
||||
|
||||
Base.metadata.create_all(engine)
|
122
windc/windc/db/session.py
Normal file
122
windc/windc/db/session.py
Normal file
@ -0,0 +1,122 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
|
||||
# Copyright 2010 United States Government as represented by the
|
||||
# Administrator of the National Aeronautics and Space Administration.
|
||||
# Copyright 2011 Piston Cloud Computing, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
"""Session management functions."""
|
||||
|
||||
import os
|
||||
import logging
|
||||
|
||||
from migrate.versioning import api as versioning_api
|
||||
from migrate import exceptions as versioning_exceptions
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.engine.url import make_url
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy.pool import NullPool
|
||||
from sqlalchemy.exc import DisconnectionError
|
||||
|
||||
from windc.common import cfg
|
||||
from windc.db import migrate_repo
|
||||
|
||||
|
||||
DB_GROUP_NAME = 'sql'
|
||||
DB_OPTIONS = (
|
||||
cfg.IntOpt('idle_timeout', default=3600),
|
||||
cfg.StrOpt('connection', default='sqlite:///windc.sqlite'),
|
||||
)
|
||||
|
||||
MAKER = None
|
||||
ENGINE = None
|
||||
|
||||
|
||||
class MySQLPingListener(object):
|
||||
"""
|
||||
Ensures that MySQL connections checked out of the
|
||||
pool are alive.
|
||||
|
||||
Borrowed from:
|
||||
http://groups.google.com/group/sqlalchemy/msg/a4ce563d802c929f
|
||||
|
||||
Error codes caught:
|
||||
* 2006 MySQL server has gone away
|
||||
* 2013 Lost connection to MySQL server during query
|
||||
* 2014 Commands out of sync; you can't run this command now
|
||||
* 2045 Can't open shared memory; no answer from server (%lu)
|
||||
* 2055 Lost connection to MySQL server at '%s', system error: %d
|
||||
|
||||
from http://dev.mysql.com/doc/refman/5.6/en/error-messages-client.html
|
||||
"""
|
||||
|
||||
def checkout(self, dbapi_con, con_record, con_proxy):
|
||||
try:
|
||||
dbapi_con.cursor().execute('select 1')
|
||||
except dbapi_con.OperationalError, ex:
|
||||
if ex.args[0] in (2006, 2013, 2014, 2045, 2055):
|
||||
logging.warn('Got mysql server has gone away: %s', ex)
|
||||
raise DisconnectionError("Database server went away")
|
||||
else:
|
||||
raise
|
||||
|
||||
|
||||
def get_session(conf, autocommit=True, expire_on_commit=False):
|
||||
"""Return a SQLAlchemy session."""
|
||||
global MAKER
|
||||
|
||||
if MAKER is None:
|
||||
MAKER = sessionmaker(autocommit=autocommit,
|
||||
expire_on_commit=expire_on_commit)
|
||||
engine = get_engine(conf)
|
||||
MAKER.configure(bind=engine)
|
||||
session = MAKER()
|
||||
return session
|
||||
|
||||
|
||||
def get_engine(conf):
|
||||
"""Return a SQLAlchemy engine."""
|
||||
global ENGINE
|
||||
|
||||
register_conf_opts(conf)
|
||||
connection_url = make_url(conf.sql.connection)
|
||||
if ENGINE is None or not ENGINE.url == connection_url:
|
||||
engine_args = {'pool_recycle': conf.sql.idle_timeout,
|
||||
'echo': False,
|
||||
'convert_unicode': True
|
||||
}
|
||||
if 'sqlite' in connection_url.drivername:
|
||||
engine_args['poolclass'] = NullPool
|
||||
if 'mysql' in connection_url.drivername:
|
||||
engine_args['listeners'] = [MySQLPingListener()]
|
||||
ENGINE = create_engine(conf.sql.connection, **engine_args)
|
||||
return ENGINE
|
||||
|
||||
|
||||
def register_conf_opts(conf, options=DB_OPTIONS, group=DB_GROUP_NAME):
|
||||
"""Register database options."""
|
||||
|
||||
conf.register_group(cfg.OptGroup(name=group))
|
||||
conf.register_opts(options, group=group)
|
||||
|
||||
|
||||
def sync(conf):
|
||||
register_conf_opts(conf)
|
||||
repo_path = os.path.abspath(os.path.dirname(migrate_repo.__file__))
|
||||
try:
|
||||
versioning_api.upgrade(conf.sql.connection, repo_path)
|
||||
except versioning_exceptions.DatabaseNotControlledError:
|
||||
versioning_api.version_control(conf.sql.connection, repo_path)
|
||||
versioning_api.upgrade(conf.sql.connection, repo_path)
|
50
windc/windc/exception.py
Normal file
50
windc/windc/exception.py
Normal file
@ -0,0 +1,50 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
|
||||
# Copyright 2010 United States Government as represented by the
|
||||
# Administrator of the National Aeronautics and Space Administration.
|
||||
# Copyright 2011 Piston Cloud Computing, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
"""Balancer base exception handling."""
|
||||
|
||||
import webob.exc as exception
|
||||
|
||||
|
||||
class NotFound(exception.HTTPNotFound):
|
||||
message = 'Resource not found.'
|
||||
|
||||
def __init__(self, message=None, **kwargs):
|
||||
super(NotFound, self).__init__(message)
|
||||
self.kwargs = kwargs
|
||||
|
||||
|
||||
class DeviceNotFound(NotFound):
|
||||
message = 'Device not found'
|
||||
|
||||
|
||||
class NoValidDevice(NotFound):
|
||||
message = 'Suitable device not found'
|
||||
|
||||
|
||||
class ServiceNotFound(NotFound):
|
||||
message = 'LoadBalancer not found'
|
||||
|
||||
|
||||
class DeviceConflict(exception.HTTPConflict):
|
||||
message = 'Conflict while device deleting'
|
||||
|
||||
def __init__(self, message=None, **kwargs):
|
||||
super(DeviceConflict, self).__init__(message)
|
||||
self.kwargs = kwargs
|
66
windc/windc/utils.py
Normal file
66
windc/windc/utils.py
Normal file
@ -0,0 +1,66 @@
|
||||
import contextlib
|
||||
import functools
|
||||
import logging
|
||||
import sys
|
||||
|
||||
import webob.exc
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def http_success_code(code):
|
||||
"""Attaches response code to a method.
|
||||
|
||||
This decorator associates a response code with a method. Note
|
||||
that the function attributes are directly manipulated; the method
|
||||
is not wrapped.
|
||||
"""
|
||||
|
||||
def decorator(func):
|
||||
func.wsgi_code = code
|
||||
return func
|
||||
return decorator
|
||||
|
||||
|
||||
def verify_tenant(func):
|
||||
@functools.wraps(func)
|
||||
def __inner(self, req, tenant_id, *args, **kwargs):
|
||||
if hasattr(req, 'context') and tenant_id != req.context.tenant_id:
|
||||
LOG.info('User is not authorized to access this tenant.')
|
||||
raise webob.exc.HTTPUnauthorized
|
||||
return func(self, req, tenant_id, *args, **kwargs)
|
||||
return __inner
|
||||
|
||||
|
||||
def require_admin(func):
|
||||
@functools.wraps(func)
|
||||
def __inner(self, req, *args, **kwargs):
|
||||
if hasattr(req, 'context') and not req.context.is_admin:
|
||||
LOG.info('User has no admin priviledges.')
|
||||
raise webob.exc.HTTPUnauthorized
|
||||
return func(self, req, *args, **kwargs)
|
||||
return __inner
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def save_and_reraise_exception():
|
||||
"""Save current exception, run some code and then re-raise.
|
||||
|
||||
In some cases the exception context can be cleared, resulting in None
|
||||
being attempted to be reraised after an exception handler is run. This
|
||||
can happen when eventlet switches greenthreads or when running an
|
||||
exception handler, code raises and catches an exception. In both
|
||||
cases the exception context will be cleared.
|
||||
|
||||
To work around this, we save the exception state, run handler code, and
|
||||
then re-raise the original exception. If another exception occurs, the
|
||||
saved exception is logged and the new exception is reraised.
|
||||
"""
|
||||
type_, value, traceback = sys.exc_info()
|
||||
try:
|
||||
yield
|
||||
except Exception:
|
||||
LOG.error('Original exception being dropped',
|
||||
exc_info=(type_, value, traceback))
|
||||
raise
|
||||
raise type_, value, traceback
|
Loading…
Reference in New Issue
Block a user