Refactor api service
Change-Id: Id77ce5b6e4225a7fe6554575ae899ac7fd86a00b
This commit is contained in:
parent
45b10f82ec
commit
d1877662a4
@ -1,17 +1,14 @@
|
||||
[DEFAULT]
|
||||
|
||||
# The list of modules to copy from oslo-incubator.git
|
||||
module=config.generator
|
||||
module=log
|
||||
module=jsonutils
|
||||
module=lockutils
|
||||
# The list of modules to copy from oslo-incubator
|
||||
module=cliutils
|
||||
module=fileutils
|
||||
module=local
|
||||
module=loopingcall
|
||||
module=periodic_task
|
||||
module=service
|
||||
module=threadgroup
|
||||
module=timeutils
|
||||
module=importutils
|
||||
module=strutils
|
||||
module=uuidutils
|
||||
module=versionutils
|
||||
|
||||
# The base module to hold the copy of openstack.common
|
||||
base=mistral
|
||||
base=terracotta
|
||||
|
@ -1,6 +1,6 @@
|
||||
[metadata]
|
||||
name = terracotta
|
||||
summary = Dynamic Scheduling Serice for OpenStack Cloud
|
||||
summary = Dynamic Scheduling service for OpenStack Cloud
|
||||
description-file =
|
||||
README.rst
|
||||
license = Apache License, Version 2.0
|
||||
|
@ -1,6 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
# Copyright 2015 Huawei Technologies Co. Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -17,7 +16,7 @@
|
||||
"""Access Control API server."""
|
||||
|
||||
from keystonemiddleware import auth_token
|
||||
from oslo.config import cfg
|
||||
from oslo_config import cfg
|
||||
|
||||
|
||||
_ENFORCER = None
|
||||
|
@ -1,6 +1,4 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
# Copyright 2015 Huawei Technologies Co. Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -14,13 +12,11 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from oslo.config import cfg
|
||||
from oslo_config import cfg
|
||||
import pecan
|
||||
|
||||
from mistral.api import access_control
|
||||
from mistral import context as ctx
|
||||
from mistral.db.v2 import api as db_api_v2
|
||||
from mistral.services import periodic
|
||||
from terracotta.api import access_control
|
||||
from terracotta import context as ctx
|
||||
|
||||
|
||||
def get_pecan_config():
|
||||
@ -45,10 +41,6 @@ def setup_app(config=None):
|
||||
|
||||
app_conf = dict(config.app)
|
||||
|
||||
db_api_v2.setup_db()
|
||||
|
||||
periodic.setup()
|
||||
|
||||
app = pecan.make_app(
|
||||
app_conf.pop('root'),
|
||||
hooks=lambda: [ctx.ContextHook()],
|
||||
|
@ -1,6 +1,4 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
# Copyright 2015 Huawei Technologies Co. Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -18,9 +16,10 @@ import pecan
|
||||
from wsme import types as wtypes
|
||||
import wsmeext.pecan as wsme_pecan
|
||||
|
||||
from mistral.api.controllers import resource
|
||||
from mistral.api.controllers.v2 import root as v2_root
|
||||
from mistral.openstack.common import log as logging
|
||||
from oslo_log import log as logging
|
||||
|
||||
from terracotta.api.controllers import resource
|
||||
from terracotta.api.controllers.v1 import root as v1_root
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
@ -46,23 +45,23 @@ class APIVersion(resource.Resource):
|
||||
status='CURRENT',
|
||||
link=resource.Link(
|
||||
target_name='v1',
|
||||
href='http://example.com:9777/v1'
|
||||
href='http://example.com:9999/v1'
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class RootController(object):
|
||||
v2 = v2_root.Controller()
|
||||
v1 = v1_root.Controller()
|
||||
|
||||
@wsme_pecan.wsexpose([APIVersion])
|
||||
def index(self):
|
||||
LOG.debug("Fetching API versions.")
|
||||
|
||||
host_url_v2 = '%s/%s' % (pecan.request.host_url, 'v2')
|
||||
api_v2 = APIVersion(
|
||||
id='v2.0',
|
||||
host_url_v1 = '%s/%s' % (pecan.request.host_url, 'v1')
|
||||
api_v1 = APIVersion(
|
||||
id='v1.0',
|
||||
status='CURRENT',
|
||||
link=resource.Link(href=host_url_v2, target='v2')
|
||||
link=resource.Link(href=host_url_v1, target='v1')
|
||||
)
|
||||
|
||||
return [api_v2]
|
||||
return [api_v1]
|
||||
|
@ -1,3 +1,4 @@
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
# Copyright 2015 - StackStorm, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -13,29 +14,24 @@
|
||||
# limitations under the License.
|
||||
|
||||
import pecan
|
||||
from pecan import rest
|
||||
from wsme import types as wtypes
|
||||
import wsmeext.pecan as wsme_pecan
|
||||
|
||||
from mistral import exceptions as exc
|
||||
from mistral.openstack.common import log as logging
|
||||
from terracotta.api.controllers import resource
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
class RootResource(resource.Resource):
|
||||
"""Root resource for API version 2.
|
||||
|
||||
It references all other resources belonging to the API.
|
||||
"""
|
||||
|
||||
uri = wtypes.text
|
||||
|
||||
|
||||
class SpecValidationController(rest.RestController):
|
||||
class Controller(object):
|
||||
"""API root controller for version 1."""
|
||||
|
||||
def __init__(self, parser):
|
||||
super(SpecValidationController, self).__init__()
|
||||
self._parse_func = parser
|
||||
|
||||
@pecan.expose('json')
|
||||
def post(self):
|
||||
"""Validate a spec."""
|
||||
definition = pecan.request.text
|
||||
|
||||
try:
|
||||
self._parse_func(definition)
|
||||
except exc.DSLParsingException as e:
|
||||
return {'valid': False, 'error': e.message}
|
||||
|
||||
return {'valid': True}
|
||||
@wsme_pecan.wsexpose(RootResource)
|
||||
def index(self):
|
||||
return RootResource(uri='%s/%s' % (pecan.request.host_url, 'v1'))
|
@ -1,159 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2014 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import pecan
|
||||
from pecan import hooks
|
||||
from pecan import rest
|
||||
from wsme import types as wtypes
|
||||
import wsmeext.pecan as wsme_pecan
|
||||
|
||||
from mistral.api.controllers import resource
|
||||
from mistral.api.hooks import content_type as ct_hook
|
||||
from mistral.db.v2 import api as db_api
|
||||
from mistral import exceptions as exc
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.services import actions
|
||||
from mistral.utils import rest_utils
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
SCOPE_TYPES = wtypes.Enum(str, 'private', 'public')
|
||||
|
||||
|
||||
class Action(resource.Resource):
|
||||
"""Action resource.
|
||||
|
||||
NOTE: *name* is immutable. Note that name and description get inferred
|
||||
from action definition when Mistral service receives a POST request.
|
||||
So they can't be changed in another way.
|
||||
|
||||
"""
|
||||
|
||||
id = wtypes.text
|
||||
name = wtypes.text
|
||||
is_system = bool
|
||||
input = wtypes.text
|
||||
|
||||
description = wtypes.text
|
||||
tags = [wtypes.text]
|
||||
definition = wtypes.text
|
||||
scope = SCOPE_TYPES
|
||||
|
||||
created_at = wtypes.text
|
||||
updated_at = wtypes.text
|
||||
|
||||
@classmethod
|
||||
def sample(cls):
|
||||
return cls(id='123e4567-e89b-12d3-a456-426655440000',
|
||||
name='flow',
|
||||
definition='HERE GOES ACTION DEFINITION IN MISTRAL DSL v2',
|
||||
tags=['large', 'expensive'],
|
||||
scope='private',
|
||||
created_at='1970-01-01T00:00:00.000000',
|
||||
updated_at='1970-01-01T00:00:00.000000')
|
||||
|
||||
|
||||
class Actions(resource.ResourceList):
|
||||
"""A collection of Actions."""
|
||||
|
||||
actions = [Action]
|
||||
|
||||
@classmethod
|
||||
def sample(cls):
|
||||
return cls(actions=[Action.sample()])
|
||||
|
||||
|
||||
class ActionsController(rest.RestController, hooks.HookController):
|
||||
# TODO(nmakhotkin): Have a discussion with pecan/WSME folks in order
|
||||
# to have requests and response of different content types. Then
|
||||
# delete ContentTypeHook.
|
||||
__hooks__ = [ct_hook.ContentTypeHook("application/json", ['POST', 'PUT'])]
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(Action, wtypes.text)
|
||||
def get(self, name):
|
||||
"""Return the named action."""
|
||||
LOG.info("Fetch action [name=%s]" % name)
|
||||
|
||||
db_model = db_api.get_action_definition(name)
|
||||
|
||||
return Action.from_dict(db_model.to_dict())
|
||||
|
||||
@rest_utils.wrap_pecan_controller_exception
|
||||
@pecan.expose(content_type="text/plain")
|
||||
def put(self):
|
||||
"""Update one or more actions.
|
||||
|
||||
NOTE: This text is allowed to have definitions
|
||||
of multiple actions. In this case they all will be updated.
|
||||
"""
|
||||
definition = pecan.request.text
|
||||
LOG.info("Update action(s) [definition=%s]" % definition)
|
||||
|
||||
db_acts = actions.update_actions(definition)
|
||||
models_dicts = [db_act.to_dict() for db_act in db_acts]
|
||||
|
||||
action_list = [Action.from_dict(act) for act in models_dicts]
|
||||
|
||||
return Actions(actions=action_list).to_string()
|
||||
|
||||
@rest_utils.wrap_pecan_controller_exception
|
||||
@pecan.expose(content_type="text/plain")
|
||||
def post(self):
|
||||
"""Create a new action.
|
||||
|
||||
NOTE: This text is allowed to have definitions
|
||||
of multiple actions. In this case they all will be created.
|
||||
"""
|
||||
definition = pecan.request.text
|
||||
pecan.response.status = 201
|
||||
|
||||
LOG.info("Create action(s) [definition=%s]" % definition)
|
||||
|
||||
db_acts = actions.create_actions(definition)
|
||||
models_dicts = [db_act.to_dict() for db_act in db_acts]
|
||||
|
||||
action_list = [Action.from_dict(act) for act in models_dicts]
|
||||
|
||||
return Actions(actions=action_list).to_string()
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(None, wtypes.text, status_code=204)
|
||||
def delete(self, name):
|
||||
"""Delete the named action."""
|
||||
LOG.info("Delete action [name=%s]" % name)
|
||||
|
||||
with db_api.transaction():
|
||||
db_model = db_api.get_action_definition(name)
|
||||
|
||||
if db_model.is_system:
|
||||
msg = "Attempt to delete a system action: %s" % name
|
||||
raise exc.DataAccessException(msg)
|
||||
|
||||
db_api.delete_action_definition(name)
|
||||
|
||||
@wsme_pecan.wsexpose(Actions)
|
||||
def get_all(self):
|
||||
"""Return all actions.
|
||||
|
||||
Where project_id is the same as the requester or
|
||||
project_id is different but the scope is public.
|
||||
"""
|
||||
LOG.info("Fetch actions.")
|
||||
|
||||
action_list = [Action.from_dict(db_model.to_dict())
|
||||
for db_model in db_api.get_action_definitions()]
|
||||
|
||||
return Actions(actions=action_list)
|
@ -1,204 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2015 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
|
||||
from pecan import rest
|
||||
from wsme import types as wtypes
|
||||
import wsmeext.pecan as wsme_pecan
|
||||
|
||||
from mistral.api.controllers import resource
|
||||
from mistral.db.v2 import api as db_api
|
||||
from mistral.engine import rpc
|
||||
from mistral import exceptions as exc
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.utils import rest_utils
|
||||
from mistral.workflow import states
|
||||
from mistral.workflow import utils as wf_utils
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ActionExecution(resource.Resource):
|
||||
"""ActionExecution resource."""
|
||||
|
||||
id = wtypes.text
|
||||
|
||||
workflow_name = wtypes.text
|
||||
task_name = wtypes.text
|
||||
task_execution_id = wtypes.text
|
||||
|
||||
state = wtypes.text
|
||||
|
||||
state_info = wtypes.text
|
||||
tags = [wtypes.text]
|
||||
name = wtypes.text
|
||||
accepted = bool
|
||||
input = wtypes.text
|
||||
output = wtypes.text
|
||||
created_at = wtypes.text
|
||||
updated_at = wtypes.text
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, d):
|
||||
e = cls()
|
||||
|
||||
for key, val in d.items():
|
||||
if hasattr(e, key):
|
||||
# Nonetype check for dictionary must be explicit.
|
||||
if val is not None and (
|
||||
key == 'input' or key == 'output'):
|
||||
val = json.dumps(val)
|
||||
setattr(e, key, val)
|
||||
|
||||
return e
|
||||
|
||||
@classmethod
|
||||
def sample(cls):
|
||||
return cls(
|
||||
id='123e4567-e89b-12d3-a456-426655440000',
|
||||
workflow_name='flow',
|
||||
task_name='task1',
|
||||
workflow_execution_id='653e4127-e89b-12d3-a456-426655440076',
|
||||
task_execution_id='343e45623-e89b-12d3-a456-426655440090',
|
||||
state=states.SUCCESS,
|
||||
state_info=states.SUCCESS,
|
||||
tags=['foo', 'fee'],
|
||||
definition_name='std.echo',
|
||||
accepted=True,
|
||||
input='{"first_name": "John", "last_name": "Doe"}',
|
||||
output='{"some_output": "Hello, John Doe!"}',
|
||||
created_at='1970-01-01T00:00:00.000000',
|
||||
updated_at='1970-01-01T00:00:00.000000'
|
||||
)
|
||||
|
||||
|
||||
class ActionExecutions(resource.Resource):
|
||||
"""A collection of action_executions."""
|
||||
|
||||
action_executions = [ActionExecution]
|
||||
|
||||
@classmethod
|
||||
def sample(cls):
|
||||
return cls(action_executions=[ActionExecution.sample()])
|
||||
|
||||
|
||||
def _load_deferred_output_field(action_ex):
|
||||
# We need to refer to this lazy-load field explicitly in
|
||||
# order to make sure that it is correctly loaded.
|
||||
hasattr(action_ex, 'output')
|
||||
|
||||
|
||||
def _get_action_execution(id):
|
||||
action_ex = db_api.get_action_execution(id)
|
||||
|
||||
return _get_action_execution_resource(action_ex)
|
||||
|
||||
|
||||
def _get_action_execution_resource(action_ex):
|
||||
_load_deferred_output_field(action_ex)
|
||||
|
||||
# TODO(nmakhotkin): Get rid of using dicts for constructing resources.
|
||||
# TODO(nmakhotkin): Use db_model for this instead.
|
||||
res = ActionExecution.from_dict(action_ex.to_dict())
|
||||
|
||||
setattr(res, 'task_name', action_ex.task_execution.name)
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def _get_action_executions(task_execution_id=None):
|
||||
kwargs = {'type': 'action_execution'}
|
||||
|
||||
if task_execution_id:
|
||||
kwargs['task_execution_id'] = task_execution_id
|
||||
|
||||
action_executions = []
|
||||
|
||||
for action_ex in db_api.get_action_executions(**kwargs):
|
||||
action_executions.append(
|
||||
_get_action_execution_resource(action_ex)
|
||||
)
|
||||
|
||||
return ActionExecutions(action_executions=action_executions)
|
||||
|
||||
|
||||
class ActionExecutionsController(rest.RestController):
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(ActionExecution, wtypes.text)
|
||||
def get(self, id):
|
||||
"""Return the specified action_execution."""
|
||||
LOG.info("Fetch action_execution [id=%s]" % id)
|
||||
|
||||
return _get_action_execution(id)
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(ActionExecution, wtypes.text, body=ActionExecution)
|
||||
def put(self, id, action_execution):
|
||||
"""Update the specified action_execution."""
|
||||
LOG.info(
|
||||
"Update action_execution [id=%s, action_execution=%s]"
|
||||
% (id, action_execution)
|
||||
)
|
||||
|
||||
# Client must provide a valid json. It shouldn't necessarily be an
|
||||
# object but it should be json complaint so strings have to be escaped.
|
||||
output = None
|
||||
|
||||
if action_execution.output:
|
||||
try:
|
||||
output = json.loads(action_execution.output)
|
||||
except (ValueError, TypeError) as e:
|
||||
raise exc.InvalidResultException(str(e))
|
||||
|
||||
if action_execution.state == states.SUCCESS:
|
||||
result = wf_utils.Result(data=output)
|
||||
elif action_execution.state == states.ERROR:
|
||||
result = wf_utils.Result(error=output)
|
||||
else:
|
||||
raise exc.InvalidResultException(
|
||||
"Error. Expected on of %s, actual: %s" %
|
||||
([states.SUCCESS, states.ERROR], action_execution.state)
|
||||
)
|
||||
|
||||
values = rpc.get_engine_client().on_action_complete(id, result)
|
||||
|
||||
return ActionExecution.from_dict(values)
|
||||
|
||||
@wsme_pecan.wsexpose(ActionExecutions)
|
||||
def get_all(self):
|
||||
"""Return all action_executions within the execution."""
|
||||
LOG.info("Fetch action_executions")
|
||||
|
||||
return _get_action_executions()
|
||||
|
||||
|
||||
class TasksActionExecutionController(rest.RestController):
|
||||
@wsme_pecan.wsexpose(ActionExecutions, wtypes.text)
|
||||
def get_all(self, task_execution_id):
|
||||
"""Return all action executions within the task execution."""
|
||||
LOG.info("Fetch action executions")
|
||||
|
||||
return _get_action_executions(task_execution_id=task_execution_id)
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(ActionExecution, wtypes.text, wtypes.text)
|
||||
def get(self, task_execution_id, action_ex_id):
|
||||
"""Return the specified action_execution."""
|
||||
LOG.info("Fetch action_execution [id=%s]" % action_ex_id)
|
||||
|
||||
return _get_action_execution(action_ex_id)
|
@ -1,144 +0,0 @@
|
||||
# Copyright 2014 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
from pecan import rest
|
||||
from wsme import types as wtypes
|
||||
import wsmeext.pecan as wsme_pecan
|
||||
|
||||
from mistral.api.controllers import resource
|
||||
from mistral.db.v2 import api as db_api
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.services import triggers
|
||||
from mistral.utils import rest_utils
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
SCOPE_TYPES = wtypes.Enum(str, 'private', 'public')
|
||||
|
||||
|
||||
class CronTrigger(resource.Resource):
|
||||
"""CronTrigger resource."""
|
||||
|
||||
id = wtypes.text
|
||||
name = wtypes.text
|
||||
workflow_name = wtypes.text
|
||||
workflow_input = wtypes.text
|
||||
|
||||
scope = SCOPE_TYPES
|
||||
|
||||
pattern = wtypes.text
|
||||
remaining_executions = wtypes.IntegerType(minimum=1)
|
||||
first_execution_time = wtypes.text
|
||||
next_execution_time = wtypes.text
|
||||
|
||||
created_at = wtypes.text
|
||||
updated_at = wtypes.text
|
||||
|
||||
def to_dict(self):
|
||||
d = super(CronTrigger, self).to_dict()
|
||||
|
||||
if d.get('workflow_input'):
|
||||
d['workflow_input'] = json.loads(d['workflow_input'])
|
||||
|
||||
return d
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, d):
|
||||
e = cls()
|
||||
|
||||
for key, val in d.items():
|
||||
if hasattr(e, key):
|
||||
# Nonetype check for dictionary must be explicit.
|
||||
if key == 'workflow_input' and val is not None:
|
||||
val = json.dumps(val)
|
||||
|
||||
setattr(e, key, val)
|
||||
|
||||
return e
|
||||
|
||||
@classmethod
|
||||
def sample(cls):
|
||||
return cls(id='123e4567-e89b-12d3-a456-426655440000',
|
||||
name='my_trigger',
|
||||
workflow_name='my_wf',
|
||||
workflow_input={},
|
||||
scope='private',
|
||||
pattern='* * * * *',
|
||||
remaining_executions=42,
|
||||
created_at='1970-01-01T00:00:00.000000',
|
||||
updated_at='1970-01-01T00:00:00.000000')
|
||||
|
||||
|
||||
class CronTriggers(resource.Resource):
|
||||
"""A collection of cron triggers."""
|
||||
|
||||
cron_triggers = [CronTrigger]
|
||||
|
||||
@classmethod
|
||||
def sample(cls):
|
||||
return cls(cron_triggers=[CronTrigger.sample()])
|
||||
|
||||
|
||||
class CronTriggersController(rest.RestController):
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(CronTrigger, wtypes.text)
|
||||
def get(self, name):
|
||||
"""Returns the named cron_trigger."""
|
||||
|
||||
LOG.info('Fetch cron trigger [name=%s]' % name)
|
||||
|
||||
db_model = db_api.get_cron_trigger(name)
|
||||
|
||||
return CronTrigger.from_dict(db_model.to_dict())
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(CronTrigger, body=CronTrigger, status_code=201)
|
||||
def post(self, cron_trigger):
|
||||
"""Creates a new cron trigger."""
|
||||
|
||||
LOG.info('Create cron trigger: %s' % cron_trigger)
|
||||
|
||||
values = cron_trigger.to_dict()
|
||||
|
||||
db_model = triggers.create_cron_trigger(
|
||||
values['name'],
|
||||
values['workflow_name'],
|
||||
values.get('workflow_input'),
|
||||
values.get('pattern'),
|
||||
values.get('first_execution_time'),
|
||||
values.get('remaining_executions')
|
||||
)
|
||||
|
||||
return CronTrigger.from_dict(db_model.to_dict())
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(None, wtypes.text, status_code=204)
|
||||
def delete(self, name):
|
||||
"""Delete cron trigger."""
|
||||
LOG.info("Delete cron trigger [name=%s]" % name)
|
||||
|
||||
db_api.delete_cron_trigger(name)
|
||||
|
||||
@wsme_pecan.wsexpose(CronTriggers)
|
||||
def get_all(self):
|
||||
"""Return all cron triggers."""
|
||||
|
||||
LOG.info("Fetch cron triggers.")
|
||||
|
||||
_list = [
|
||||
CronTrigger.from_dict(db_model.to_dict())
|
||||
for db_model in db_api.get_cron_triggers()
|
||||
]
|
||||
|
||||
return CronTriggers(cron_triggers=_list)
|
@ -1,148 +0,0 @@
|
||||
# Copyright 2015 - StackStorm, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import uuid
|
||||
|
||||
from pecan import rest
|
||||
import six
|
||||
from wsme import types as wtypes
|
||||
import wsmeext.pecan as wsme_pecan
|
||||
|
||||
from mistral.api.controllers import resource
|
||||
from mistral.db.v2 import api as db_api
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.utils import rest_utils
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
SAMPLE = {
|
||||
'server': 'localhost',
|
||||
'database': 'temp',
|
||||
'timeout': 600,
|
||||
'verbose': True
|
||||
}
|
||||
|
||||
|
||||
class Environment(resource.Resource):
|
||||
"""Environment resource."""
|
||||
|
||||
id = wtypes.text
|
||||
name = wtypes.text
|
||||
description = wtypes.text
|
||||
variables = wtypes.text
|
||||
scope = wtypes.Enum(str, 'private', 'public')
|
||||
created_at = wtypes.text
|
||||
updated_at = wtypes.text
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Environment, self).__init__()
|
||||
|
||||
for key, val in six.iteritems(kwargs):
|
||||
if key == 'variables' and val is not None:
|
||||
val = json.dumps(val)
|
||||
|
||||
setattr(self, key, val)
|
||||
|
||||
def to_dict(self):
|
||||
d = super(Environment, self).to_dict()
|
||||
|
||||
if d.get('variables'):
|
||||
d['variables'] = json.loads(d['variables'])
|
||||
|
||||
return d
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, d):
|
||||
return cls(**d)
|
||||
|
||||
@classmethod
|
||||
def sample(cls):
|
||||
return cls(id=str(uuid.uuid4()),
|
||||
name='sample',
|
||||
description='example environment entry',
|
||||
variables=json.dumps(SAMPLE),
|
||||
scope='private',
|
||||
created_at='1970-01-01T00:00:00.000000',
|
||||
updated_at='1970-01-01T00:00:00.000000')
|
||||
|
||||
|
||||
class Environments(resource.Resource):
|
||||
"""A collection of Environment resources."""
|
||||
|
||||
environments = [Environment]
|
||||
|
||||
@classmethod
|
||||
def sample(cls):
|
||||
return cls(environments=[Environment.sample()])
|
||||
|
||||
|
||||
class EnvironmentController(rest.RestController):
|
||||
|
||||
@wsme_pecan.wsexpose(Environments)
|
||||
def get_all(self):
|
||||
"""Return all environments.
|
||||
Where project_id is the same as the requestor or
|
||||
project_id is different but the scope is public.
|
||||
"""
|
||||
LOG.info("Fetch environments.")
|
||||
|
||||
environments = [Environment(**db_model.to_dict())
|
||||
for db_model in db_api.get_environments()]
|
||||
|
||||
return Environments(environments=environments)
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(Environment, wtypes.text)
|
||||
def get(self, name):
|
||||
"""Return the named environment."""
|
||||
LOG.info("Fetch environment [name=%s]" % name)
|
||||
|
||||
db_model = db_api.get_environment(name)
|
||||
|
||||
return Environment(**db_model.to_dict())
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(Environment, body=Environment, status_code=201)
|
||||
def post(self, environment):
|
||||
"""Create a new environment."""
|
||||
LOG.info("Create environment [env=%s]" % environment)
|
||||
|
||||
db_model = db_api.create_environment(environment.to_dict())
|
||||
|
||||
return Environment(**db_model.to_dict())
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(Environment, body=Environment)
|
||||
def put(self, environment):
|
||||
"""Update an environment."""
|
||||
if not environment.name:
|
||||
raise ValueError('Name of the environment is not provided.')
|
||||
|
||||
LOG.info("Update environment [name=%s, env=%s]" %
|
||||
(environment.name, environment))
|
||||
|
||||
db_model = db_api.update_environment(environment.name,
|
||||
environment.to_dict())
|
||||
|
||||
return Environment(**db_model.to_dict())
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(None, wtypes.text, status_code=204)
|
||||
def delete(self, name):
|
||||
"""Delete the named environment."""
|
||||
LOG.info("Delete environment [name=%s]" % name)
|
||||
|
||||
db_api.delete_environment(name)
|
@ -1,203 +0,0 @@
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
# Copyright 2015 - StackStorm, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
from pecan import rest
|
||||
from wsme import types as wtypes
|
||||
import wsmeext.pecan as wsme_pecan
|
||||
|
||||
from mistral.api.controllers import resource
|
||||
from mistral.api.controllers.v2 import task
|
||||
from mistral.db.v2 import api as db_api
|
||||
from mistral.engine import rpc
|
||||
from mistral import exceptions as exc
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.utils import rest_utils
|
||||
from mistral.workflow import states
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
# TODO(rakhmerov): Make sure to make all needed renaming on public API.
|
||||
|
||||
|
||||
class Execution(resource.Resource):
|
||||
"""Execution resource."""
|
||||
|
||||
id = wtypes.text
|
||||
"id is immutable and auto assigned."
|
||||
|
||||
workflow_name = wtypes.text
|
||||
"reference to workflow definition"
|
||||
|
||||
params = wtypes.text
|
||||
"params define workflow type specific parameters. For example, reverse \
|
||||
workflow takes one parameter 'task_name' that defines a target task."
|
||||
|
||||
state = wtypes.text
|
||||
"state can be one of: RUNNING, SUCCESS, ERROR, PAUSED"
|
||||
|
||||
state_info = wtypes.text
|
||||
"an optional state information string"
|
||||
|
||||
input = wtypes.text
|
||||
"input is a JSON structure containing workflow input values."
|
||||
output = wtypes.text
|
||||
"output is a workflow output."
|
||||
|
||||
created_at = wtypes.text
|
||||
updated_at = wtypes.text
|
||||
|
||||
# Context is a JSON object but since WSME doesn't support arbitrary
|
||||
# dictionaries we have to use text type convert to json and back manually.
|
||||
def to_dict(self):
|
||||
d = super(Execution, self).to_dict()
|
||||
|
||||
if d.get('input'):
|
||||
d['input'] = json.loads(d['input'])
|
||||
|
||||
if d.get('output'):
|
||||
d['output'] = json.loads(d['output'])
|
||||
|
||||
if d.get('params'):
|
||||
d['params'] = json.loads(d['params'])
|
||||
|
||||
return d
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, d):
|
||||
e = cls()
|
||||
|
||||
for key, val in d.items():
|
||||
if hasattr(e, key):
|
||||
# Nonetype check for dictionary must be explicit
|
||||
if key in ['input', 'output', 'params'] and val is not None:
|
||||
val = json.dumps(val)
|
||||
setattr(e, key, val)
|
||||
|
||||
return e
|
||||
|
||||
@classmethod
|
||||
def sample(cls):
|
||||
return cls(id='123e4567-e89b-12d3-a456-426655440000',
|
||||
workflow_name='flow',
|
||||
state='SUCCESS',
|
||||
input='{}',
|
||||
output='{}',
|
||||
params='{"env": {"k1": "abc", "k2": 123}}',
|
||||
created_at='1970-01-01T00:00:00.000000',
|
||||
updated_at='1970-01-01T00:00:00.000000')
|
||||
|
||||
|
||||
class Executions(resource.Resource):
|
||||
"""A collection of Execution resources."""
|
||||
|
||||
executions = [Execution]
|
||||
|
||||
@classmethod
|
||||
def sample(cls):
|
||||
return cls(executions=[Execution.sample()])
|
||||
|
||||
|
||||
class ExecutionsController(rest.RestController):
|
||||
tasks = task.ExecutionTasksController()
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(Execution, wtypes.text)
|
||||
def get(self, id):
|
||||
"""Return the specified Execution."""
|
||||
LOG.info("Fetch execution [id=%s]" % id)
|
||||
|
||||
return Execution.from_dict(db_api.get_workflow_execution(id).to_dict())
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(Execution, wtypes.text, body=Execution)
|
||||
def put(self, id, execution):
|
||||
"""Update the specified Execution.
|
||||
|
||||
:param id: execution ID.
|
||||
:param execution: Execution objects
|
||||
"""
|
||||
LOG.info("Update execution [id=%s, execution=%s]" %
|
||||
(id, execution))
|
||||
db_api.ensure_workflow_execution_exists(id)
|
||||
|
||||
# Currently we can change only state.
|
||||
if not execution.state:
|
||||
raise exc.DataAccessException(
|
||||
"Only state of execution can change. "
|
||||
"Missing 'state' property."
|
||||
)
|
||||
|
||||
new_state = execution.state
|
||||
msg = execution.state_info
|
||||
|
||||
if new_state == states.PAUSED:
|
||||
wf_ex = rpc.get_engine_client().pause_workflow(id)
|
||||
elif new_state == states.RUNNING:
|
||||
wf_ex = rpc.get_engine_client().resume_workflow(id)
|
||||
elif new_state in [states.SUCCESS, states.ERROR]:
|
||||
wf_ex = rpc.get_engine_client().stop_workflow(id, new_state, msg)
|
||||
else:
|
||||
# To prevent changing state in other cases throw a message.
|
||||
raise exc.DataAccessException(
|
||||
"Can not change state to %s. Allowed states are: '%s" %
|
||||
(new_state, ", ".join([states.RUNNING, states.PAUSED,
|
||||
states.SUCCESS, states.ERROR]))
|
||||
)
|
||||
|
||||
return Execution.from_dict(
|
||||
wf_ex if isinstance(wf_ex, dict) else wf_ex.to_dict()
|
||||
)
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(Execution, body=Execution, status_code=201)
|
||||
def post(self, execution):
|
||||
"""Create a new Execution.
|
||||
|
||||
:param execution: Execution object with input content.
|
||||
"""
|
||||
LOG.info("Create execution [execution=%s]" % execution)
|
||||
|
||||
engine = rpc.get_engine_client()
|
||||
exec_dict = execution.to_dict()
|
||||
|
||||
result = engine.start_workflow(
|
||||
exec_dict['workflow_name'],
|
||||
exec_dict.get('input'),
|
||||
**exec_dict.get('params') or {}
|
||||
)
|
||||
|
||||
return Execution.from_dict(result)
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(None, wtypes.text, status_code=204)
|
||||
def delete(self, id):
|
||||
"""Delete the specified Execution."""
|
||||
LOG.info("Delete execution [id=%s]" % id)
|
||||
|
||||
return db_api.delete_workflow_execution(id)
|
||||
|
||||
@wsme_pecan.wsexpose(Executions)
|
||||
def get_all(self):
|
||||
"""Return all Executions."""
|
||||
LOG.info("Fetch executions")
|
||||
|
||||
wf_executions = [
|
||||
Execution.from_dict(db_model.to_dict())
|
||||
for db_model in db_api.get_workflow_executions()
|
||||
]
|
||||
|
||||
return Executions(executions=wf_executions)
|
@ -1,58 +0,0 @@
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
# Copyright 2015 - StackStorm, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import pecan
|
||||
from wsme import types as wtypes
|
||||
import wsmeext.pecan as wsme_pecan
|
||||
|
||||
from mistral.api.controllers import resource
|
||||
from mistral.api.controllers.v2 import action
|
||||
from mistral.api.controllers.v2 import action_execution
|
||||
from mistral.api.controllers.v2 import cron_trigger
|
||||
from mistral.api.controllers.v2 import environment
|
||||
from mistral.api.controllers.v2 import execution
|
||||
from mistral.api.controllers.v2 import task
|
||||
from mistral.api.controllers.v2 import workbook
|
||||
from mistral.api.controllers.v2 import workflow
|
||||
|
||||
|
||||
class RootResource(resource.Resource):
|
||||
"""Root resource for API version 2.
|
||||
|
||||
It references all other resources belonging to the API.
|
||||
"""
|
||||
|
||||
uri = wtypes.text
|
||||
|
||||
# TODO(everyone): what else do we need here?
|
||||
# TODO(everyone): we need to collect all the links from API v2.0
|
||||
# and provide them.
|
||||
|
||||
|
||||
class Controller(object):
|
||||
"""API root controller for version 2."""
|
||||
|
||||
workbooks = workbook.WorkbooksController()
|
||||
actions = action.ActionsController()
|
||||
workflows = workflow.WorkflowsController()
|
||||
executions = execution.ExecutionsController()
|
||||
tasks = task.TasksController()
|
||||
cron_triggers = cron_trigger.CronTriggersController()
|
||||
environments = environment.EnvironmentController()
|
||||
action_executions = action_execution.ActionExecutionsController()
|
||||
|
||||
@wsme_pecan.wsexpose(RootResource)
|
||||
def index(self):
|
||||
return RootResource(uri='%s/%s' % (pecan.request.host_url, 'v2'))
|
@ -1,144 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
|
||||
from pecan import rest
|
||||
from wsme import types as wtypes
|
||||
import wsmeext.pecan as wsme_pecan
|
||||
|
||||
from mistral.api.controllers import resource
|
||||
from mistral.api.controllers.v2 import action_execution
|
||||
from mistral.db.v2 import api as db_api
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.utils import rest_utils
|
||||
from mistral.workflow import data_flow
|
||||
from mistral.workflow import states
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Task(resource.Resource):
|
||||
"""Task resource."""
|
||||
|
||||
id = wtypes.text
|
||||
name = wtypes.text
|
||||
|
||||
workflow_name = wtypes.text
|
||||
workflow_execution_id = wtypes.text
|
||||
|
||||
state = wtypes.text
|
||||
"state can take one of the following values: \
|
||||
IDLE, RUNNING, SUCCESS, ERROR, DELAYED"
|
||||
|
||||
result = wtypes.text
|
||||
published = wtypes.text
|
||||
|
||||
created_at = wtypes.text
|
||||
updated_at = wtypes.text
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, d):
|
||||
e = cls()
|
||||
|
||||
for key, val in d.items():
|
||||
if hasattr(e, key):
|
||||
# Nonetype check for dictionary must be explicit.
|
||||
if val is not None and key == 'published':
|
||||
val = json.dumps(val)
|
||||
setattr(e, key, val)
|
||||
|
||||
return e
|
||||
|
||||
@classmethod
|
||||
def sample(cls):
|
||||
return cls(
|
||||
id='123e4567-e89b-12d3-a456-426655440000',
|
||||
workflow_name='flow',
|
||||
workflow_execution_id='123e4567-e89b-12d3-a456-426655440000',
|
||||
name='task',
|
||||
description='tell when you are done',
|
||||
state=states.SUCCESS,
|
||||
tags=['foo', 'fee'],
|
||||
input='{"first_name": "John", "last_name": "Doe"}',
|
||||
output='{"task": {"build_greeting": '
|
||||
'{"greeting": "Hello, John Doe!"}}}',
|
||||
created_at='1970-01-01T00:00:00.000000',
|
||||
updated_at='1970-01-01T00:00:00.000000'
|
||||
)
|
||||
|
||||
|
||||
class Tasks(resource.Resource):
|
||||
"""A collection of tasks."""
|
||||
|
||||
tasks = [Task]
|
||||
|
||||
@classmethod
|
||||
def sample(cls):
|
||||
return cls(tasks=[Task.sample()])
|
||||
|
||||
|
||||
def _get_task_resources_with_results(wf_ex_id=None):
|
||||
filters = {}
|
||||
|
||||
if wf_ex_id:
|
||||
filters['workflow_execution_id'] = wf_ex_id
|
||||
|
||||
tasks = []
|
||||
task_execs = db_api.get_task_executions(**filters)
|
||||
for task_ex in task_execs:
|
||||
task = Task.from_dict(task_ex.to_dict())
|
||||
task.result = json.dumps(
|
||||
data_flow.get_task_execution_result(task_ex)
|
||||
)
|
||||
|
||||
tasks += [task]
|
||||
|
||||
return Tasks(tasks=tasks)
|
||||
|
||||
|
||||
class TasksController(rest.RestController):
|
||||
action_executions = action_execution.TasksActionExecutionController()
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(Task, wtypes.text)
|
||||
def get(self, id):
|
||||
"""Return the specified task."""
|
||||
LOG.info("Fetch task [id=%s]" % id)
|
||||
|
||||
task_ex = db_api.get_task_execution(id)
|
||||
task = Task.from_dict(task_ex.to_dict())
|
||||
|
||||
task.result = json.dumps(data_flow.get_task_execution_result(task_ex))
|
||||
|
||||
return task
|
||||
|
||||
@wsme_pecan.wsexpose(Tasks)
|
||||
def get_all(self):
|
||||
"""Return all tasks within the execution."""
|
||||
LOG.info("Fetch tasks")
|
||||
|
||||
return _get_task_resources_with_results()
|
||||
|
||||
|
||||
class ExecutionTasksController(rest.RestController):
|
||||
@wsme_pecan.wsexpose(Tasks, wtypes.text)
|
||||
def get_all(self, workflow_execution_id):
|
||||
"""Return all tasks within the workflow execution."""
|
||||
LOG.info("Fetch tasks")
|
||||
|
||||
return _get_task_resources_with_results(workflow_execution_id)
|
@ -1,132 +0,0 @@
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
# Copyright 2015 - StackStorm, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import pecan
|
||||
from pecan import hooks
|
||||
from pecan import rest
|
||||
from wsme import types as wtypes
|
||||
import wsmeext.pecan as wsme_pecan
|
||||
|
||||
from mistral.api.controllers import resource
|
||||
from mistral.api.controllers.v2 import validation
|
||||
from mistral.api.hooks import content_type as ct_hook
|
||||
from mistral.db.v2 import api as db_api
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.services import workbooks
|
||||
from mistral.utils import rest_utils
|
||||
from mistral.workbook import parser as spec_parser
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
SCOPE_TYPES = wtypes.Enum(str, 'private', 'public')
|
||||
|
||||
|
||||
class Workbook(resource.Resource):
|
||||
"""Workbook resource."""
|
||||
|
||||
id = wtypes.text
|
||||
name = wtypes.text
|
||||
|
||||
definition = wtypes.text
|
||||
"workbook definition in Mistral v2 DSL"
|
||||
tags = [wtypes.text]
|
||||
scope = SCOPE_TYPES
|
||||
"'private' or 'public'"
|
||||
|
||||
created_at = wtypes.text
|
||||
updated_at = wtypes.text
|
||||
|
||||
@classmethod
|
||||
def sample(cls):
|
||||
return cls(id='123e4567-e89b-12d3-a456-426655440000',
|
||||
name='book',
|
||||
definition='HERE GOES'
|
||||
'WORKBOOK DEFINITION IN MISTRAL DSL v2',
|
||||
tags=['large', 'expensive'],
|
||||
scope='private',
|
||||
created_at='1970-01-01T00:00:00.000000',
|
||||
updated_at='1970-01-01T00:00:00.000000')
|
||||
|
||||
|
||||
class Workbooks(resource.Resource):
|
||||
"""A collection of Workbooks."""
|
||||
|
||||
workbooks = [Workbook]
|
||||
|
||||
@classmethod
|
||||
def sample(cls):
|
||||
return cls(workbooks=[Workbook.sample()])
|
||||
|
||||
|
||||
class WorkbooksController(rest.RestController, hooks.HookController):
|
||||
__hooks__ = [ct_hook.ContentTypeHook("application/json", ['POST', 'PUT'])]
|
||||
|
||||
validate = validation.SpecValidationController(
|
||||
spec_parser.get_workbook_spec_from_yaml)
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(Workbook, wtypes.text)
|
||||
def get(self, name):
|
||||
"""Return the named workbook."""
|
||||
LOG.info("Fetch workbook [name=%s]" % name)
|
||||
|
||||
db_model = db_api.get_workbook(name)
|
||||
|
||||
return Workbook.from_dict(db_model.to_dict())
|
||||
|
||||
@rest_utils.wrap_pecan_controller_exception
|
||||
@pecan.expose(content_type="text/plain")
|
||||
def put(self):
|
||||
"""Update a workbook."""
|
||||
definition = pecan.request.text
|
||||
LOG.info("Update workbook [definition=%s]" % definition)
|
||||
|
||||
wb_db = workbooks.update_workbook_v2(definition)
|
||||
|
||||
return Workbook.from_dict(wb_db.to_dict()).to_string()
|
||||
|
||||
@rest_utils.wrap_pecan_controller_exception
|
||||
@pecan.expose(content_type="text/plain")
|
||||
def post(self):
|
||||
"""Create a new workbook."""
|
||||
definition = pecan.request.text
|
||||
LOG.info("Create workbook [definition=%s]" % definition)
|
||||
|
||||
wb_db = workbooks.create_workbook_v2(definition)
|
||||
pecan.response.status = 201
|
||||
|
||||
return Workbook.from_dict(wb_db.to_dict()).to_string()
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(None, wtypes.text, status_code=204)
|
||||
def delete(self, name):
|
||||
"""Delete the named workbook."""
|
||||
LOG.info("Delete workbook [name=%s]" % name)
|
||||
|
||||
db_api.delete_workbook(name)
|
||||
|
||||
@wsme_pecan.wsexpose(Workbooks)
|
||||
def get_all(self):
|
||||
"""Return all workbooks.
|
||||
|
||||
Where project_id is the same as the requestor or
|
||||
project_id is different but the scope is public.
|
||||
"""
|
||||
LOG.info("Fetch workbooks.")
|
||||
|
||||
workbooks_list = [Workbook.from_dict(db_model.to_dict())
|
||||
for db_model in db_api.get_workbooks()]
|
||||
|
||||
return Workbooks(workbooks=workbooks_list)
|
@ -1,174 +0,0 @@
|
||||
# Copyright 2013 - Mirantis, Inc.
|
||||
# Copyright 2015 - StackStorm, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import pecan
|
||||
from pecan import hooks
|
||||
from pecan import rest
|
||||
from wsme import types as wtypes
|
||||
import wsmeext.pecan as wsme_pecan
|
||||
|
||||
from mistral.api.controllers import resource
|
||||
from mistral.api.controllers.v2 import validation
|
||||
from mistral.api.hooks import content_type as ct_hook
|
||||
from mistral.db.v2 import api as db_api
|
||||
from mistral.openstack.common import log as logging
|
||||
from mistral.services import workflows
|
||||
from mistral.utils import rest_utils
|
||||
from mistral.workbook import parser as spec_parser
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
SCOPE_TYPES = wtypes.Enum(str, 'private', 'public')
|
||||
|
||||
|
||||
class Workflow(resource.Resource):
|
||||
"""Workflow resource."""
|
||||
|
||||
id = wtypes.text
|
||||
name = wtypes.text
|
||||
input = wtypes.text
|
||||
|
||||
definition = wtypes.text
|
||||
"Workflow definition in Mistral v2 DSL"
|
||||
tags = [wtypes.text]
|
||||
scope = SCOPE_TYPES
|
||||
"'private' or 'public'"
|
||||
|
||||
created_at = wtypes.text
|
||||
updated_at = wtypes.text
|
||||
|
||||
@classmethod
|
||||
def sample(cls):
|
||||
return cls(id='123e4567-e89b-12d3-a456-426655440000',
|
||||
name='flow',
|
||||
input='param1, param2',
|
||||
definition='HERE GOES'
|
||||
'WORKFLOW DEFINITION IN MISTRAL DSL v2',
|
||||
tags=['large', 'expensive'],
|
||||
scope='private',
|
||||
created_at='1970-01-01T00:00:00.000000',
|
||||
updated_at='1970-01-01T00:00:00.000000')
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, d):
|
||||
e = cls()
|
||||
input_list = []
|
||||
|
||||
for key, val in d.items():
|
||||
if hasattr(e, key):
|
||||
setattr(e, key, val)
|
||||
|
||||
input = d['spec'].get('input', [])
|
||||
for param in input:
|
||||
if isinstance(param, dict):
|
||||
for k, v in param.items():
|
||||
input_list.append("%s=%s" % (k, v))
|
||||
else:
|
||||
input_list.append(param)
|
||||
|
||||
setattr(e, 'input', ", ".join(input_list) if input_list else None)
|
||||
|
||||
return e
|
||||
|
||||
|
||||
class Workflows(resource.ResourceList):
|
||||
"""A collection of workflows."""
|
||||
|
||||
workflows = [Workflow]
|
||||
|
||||
@classmethod
|
||||
def sample(cls):
|
||||
return cls(workflows=[Workflow.sample()])
|
||||
|
||||
|
||||
class WorkflowsController(rest.RestController, hooks.HookController):
|
||||
# TODO(nmakhotkin): Have a discussion with pecan/WSME folks in order
|
||||
# to have requests and response of different content types. Then
|
||||
# delete ContentTypeHook.
|
||||
__hooks__ = [ct_hook.ContentTypeHook("application/json", ['POST', 'PUT'])]
|
||||
|
||||
validate = validation.SpecValidationController(
|
||||
spec_parser.get_workflow_list_spec_from_yaml)
|
||||
|
||||
@rest_utils.wrap_wsme_controller_exception
|
||||
@wsme_pecan.wsexpose(Workflow, wtypes.text)
|
||||
def get(self, name):
|
||||
"""Return the named workflow."""
|
||||
LOG.info("Fetch workflow [name=%s]" % name)
|
||||
|
||||
db_model = db_api.get_workflow_definition(name)
|
||||
|
||||
return Workflow.from_dict(db_model.to_dict())
|
||||
|
||||
@rest_utils.wrap_pecan_controller_exception
|
||||
@pecan.expose(content_type="text/plain")
|
||||
def put(self):
|
||||
"""Update one or more workflows.
|
||||
|
||||
NOTE: The text is allowed to have definitions
|
||||
of multiple workflows. In this case they all will be updated.
|
||||
"""
|
||||
definition = pecan.request.text
|
||||
|
||||
LOG.info("Update workflow(s) [definition=%s]" % definition)
|
||||
|
||||
db_wfs = workflows.update_workflows(definition)
|
||||
models_dicts = [db_wf.to_dict() for db_wf in db_wfs]
|
||||
|
||||
workflow_list = [Workflow.from_dict(wf) for wf in models_dicts]
|
||||
|
||||
return Workflows(workflows=workflow_list).to_string()
|
||||
|
||||
@rest_utils.wrap_pecan_controller_exception
|
||||
@pecan.expose(content_type="text/plain")
|
||||
def post(self):
|
||||
"""Create a new workflow.
|
||||
|
||||
NOTE: The text is allowed to have definitions
|
||||
of multiple workflows. In this case they all will be created.
|
||||
"""
|
||||
definition = pecan.request.text
|
||||
pecan.response.status = 201
|
||||
|
||||
LOG.info("Create workflow(s) [definition=%s]" % definition)
|
||||
|
||||
db_wfs = workflows.create_workflows(definition)
|
||||
models_dicts = [db_wf.to_dict() for db_wf in db_wfs]
|
||||
|
||||
workflow_list = [Workflow.from_dict(wf) for wf in models_dicts]
|
||||
|
||||
return Workflows(workflows=workflow_list).to_string()
|
||||
|
||||
@rest_utils.wrap_pecan_controller_exception
|
||||
@wsme_pecan.wsexpose(None, wtypes.text, status_code=204)
|
||||
def delete(self, name):
|
||||
"""Delete the named workflow."""
|
||||
LOG.info("Delete workflow [name=%s]" % name)
|
||||
|
||||
db_api.delete_workflow_definition(name)
|
||||
|
||||
@wsme_pecan.wsexpose(Workflows)
|
||||
def get_all(self):
|
||||
"""Return all workflows.
|
||||
|
||||
Where project_id is the same as the requester or
|
||||
project_id is different but the scope is public.
|
||||
"""
|
||||
LOG.info("Fetch workflows.")
|
||||
|
||||
workflows_list = [Workflow.from_dict(db_model.to_dict())
|
||||
for db_model in db_api.get_workflow_definitions()]
|
||||
|
||||
return Workflows(workflows=workflows_list)
|
@ -1,6 +1,4 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2014 - Mirantis, Inc.
|
||||
# Copyright 2015 Huawei Technologies Co. Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
@ -1,4 +1,4 @@
|
||||
# Copyright 2015 - StackStorm, Inc.
|
||||
# Copyright 2015 Huawei Technologies Co. Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -12,8 +12,8 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from mistral.api import app
|
||||
from mistral import config
|
||||
from terracotta.api import app
|
||||
from terracotta import config
|
||||
|
||||
config.parse_args()
|
||||
application = app.setup_app()
|
||||
|
@ -34,7 +34,9 @@ if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'terracotta', '__init__.py')):
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
import oslo_messaging as messaging
|
||||
from wsgiref import simple_server
|
||||
|
||||
from terracotta.api import app
|
||||
from terracotta import config
|
||||
from terracotta import rpc
|
||||
from terracotta.locals import collector
|
||||
@ -46,6 +48,22 @@ from terracotta import version
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def launch_api(transport):
|
||||
host = cfg.CONF.api.host
|
||||
port = cfg.CONF.api.port
|
||||
|
||||
server = simple_server.make_server(
|
||||
host,
|
||||
port,
|
||||
app.setup_app()
|
||||
)
|
||||
|
||||
LOG.info("Terracotta API is serving on http://%s:%s (PID=%s)" %
|
||||
(host, port, os.getpid()))
|
||||
|
||||
server.serve_forever()
|
||||
|
||||
|
||||
def launch_lm(transport):
|
||||
target = messaging.Target(
|
||||
topic=cfg.CONF.local_manager.topic,
|
||||
|
@ -1,4 +1,3 @@
|
||||
# Copyright 2012 Anton Beloglazov
|
||||
# Copyright 2015 - Huawei Technologies Co. Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -25,21 +24,21 @@ from terracotta import version
|
||||
|
||||
launch_opt = cfg.ListOpt(
|
||||
'server',
|
||||
default=['all'],
|
||||
help='Specifies which mistral server to start by the launch script. '
|
||||
default=['global-manager', 'local-manager', 'local-collector'],
|
||||
help='Specifies which terracotta server to start by the launch script. '
|
||||
'Valid options are all or any combination of '
|
||||
'api, engine, and executor.'
|
||||
'global-manager, local-manager, and local-collector.'
|
||||
)
|
||||
|
||||
api_opts = [
|
||||
cfg.StrOpt('host', default='0.0.0.0', help='Mistral API server host'),
|
||||
cfg.IntOpt('port', default=8989, help='Mistral API server port')
|
||||
cfg.StrOpt('host', default='0.0.0.0', help='Terracotta API server host'),
|
||||
cfg.IntOpt('port', default=9090, help='Terracotta API server port')
|
||||
]
|
||||
|
||||
pecan_opts = [
|
||||
cfg.StrOpt('root', default='mistral.api.controllers.root.RootController',
|
||||
cfg.StrOpt('root', default='terracotta.api.controllers.root.RootController',
|
||||
help='Pecan root controller'),
|
||||
cfg.ListOpt('modules', default=["mistral.api"],
|
||||
cfg.ListOpt('modules', default=["terracotta.api"],
|
||||
help='A list of modules where pecan will search for '
|
||||
'applications.'),
|
||||
cfg.BoolOpt('debug', default=False,
|
||||
|
@ -1,16 +0,0 @@
|
||||
oslo-incubator
|
||||
--------------
|
||||
|
||||
A number of modules from oslo-incubator are imported into this project.
|
||||
You can clone the oslo-incubator repository using the following url:
|
||||
|
||||
git://git.openstack.org/openstack/oslo-incubator
|
||||
|
||||
These modules are "incubating" in oslo-incubator and are kept in sync
|
||||
with the help of oslo-incubator's update.py script. See:
|
||||
|
||||
https://wiki.openstack.org/wiki/Oslo#Syncing_Code_from_Incubator
|
||||
|
||||
The copy of the code should never be directly modified here. Please
|
||||
always update oslo-incubator first and then run the script to copy
|
||||
the changes across.
|
@ -1,45 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""oslo.i18n integration module.
|
||||
|
||||
See http://docs.openstack.org/developer/oslo.i18n/usage.html
|
||||
|
||||
"""
|
||||
|
||||
try:
|
||||
import oslo_i18n
|
||||
|
||||
# NOTE(dhellmann): This reference to o-s-l-o will be replaced by the
|
||||
# application name when this module is synced into the separate
|
||||
# repository. It is OK to have more than one translation function
|
||||
# using the same domain, since there will still only be one message
|
||||
# catalog.
|
||||
_translators = oslo_i18n.TranslatorFactory(domain='nova')
|
||||
|
||||
# The primary translation function using the well-known name "_"
|
||||
_ = _translators.primary
|
||||
|
||||
# Translators for log levels.
|
||||
#
|
||||
# The abbreviated names are meant to reflect the usual use of a short
|
||||
# name like '_'. The "L" is for "log" and the other letter comes from
|
||||
# the level.
|
||||
_LI = _translators.log_info
|
||||
_LW = _translators.log_warning
|
||||
_LE = _translators.log_error
|
||||
_LC = _translators.log_critical
|
||||
except ImportError:
|
||||
# NOTE(dims): Support for cases where a project wants to use
|
||||
# code from oslo-incubator, but is not ready to be internationalized
|
||||
# (like tempest)
|
||||
_ = _LI = _LW = _LE = _LC = lambda x: x
|
@ -1,271 +0,0 @@
|
||||
# Copyright 2012 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# W0603: Using the global statement
|
||||
# W0621: Redefining name %s from outer scope
|
||||
# pylint: disable=W0603,W0621
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import getpass
|
||||
import inspect
|
||||
import os
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
from oslo_utils import encodeutils
|
||||
from oslo_utils import strutils
|
||||
import prettytable
|
||||
import six
|
||||
from six import moves
|
||||
|
||||
from nova.openstack.common._i18n import _
|
||||
|
||||
|
||||
class MissingArgs(Exception):
|
||||
"""Supplied arguments are not sufficient for calling a function."""
|
||||
def __init__(self, missing):
|
||||
self.missing = missing
|
||||
msg = _("Missing arguments: %s") % ", ".join(missing)
|
||||
super(MissingArgs, self).__init__(msg)
|
||||
|
||||
|
||||
def validate_args(fn, *args, **kwargs):
|
||||
"""Check that the supplied args are sufficient for calling a function.
|
||||
|
||||
>>> validate_args(lambda a: None)
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
MissingArgs: Missing argument(s): a
|
||||
>>> validate_args(lambda a, b, c, d: None, 0, c=1)
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
MissingArgs: Missing argument(s): b, d
|
||||
|
||||
:param fn: the function to check
|
||||
:param arg: the positional arguments supplied
|
||||
:param kwargs: the keyword arguments supplied
|
||||
"""
|
||||
argspec = inspect.getargspec(fn)
|
||||
|
||||
num_defaults = len(argspec.defaults or [])
|
||||
required_args = argspec.args[:len(argspec.args) - num_defaults]
|
||||
|
||||
def isbound(method):
|
||||
return getattr(method, '__self__', None) is not None
|
||||
|
||||
if isbound(fn):
|
||||
required_args.pop(0)
|
||||
|
||||
missing = [arg for arg in required_args if arg not in kwargs]
|
||||
missing = missing[len(args):]
|
||||
if missing:
|
||||
raise MissingArgs(missing)
|
||||
|
||||
|
||||
def arg(*args, **kwargs):
|
||||
"""Decorator for CLI args.
|
||||
|
||||
Example:
|
||||
|
||||
>>> @arg("name", help="Name of the new entity")
|
||||
... def entity_create(args):
|
||||
... pass
|
||||
"""
|
||||
def _decorator(func):
|
||||
add_arg(func, *args, **kwargs)
|
||||
return func
|
||||
return _decorator
|
||||
|
||||
|
||||
def env(*args, **kwargs):
|
||||
"""Returns the first environment variable set.
|
||||
|
||||
If all are empty, defaults to '' or keyword arg `default`.
|
||||
"""
|
||||
for arg in args:
|
||||
value = os.environ.get(arg)
|
||||
if value:
|
||||
return value
|
||||
return kwargs.get('default', '')
|
||||
|
||||
|
||||
def add_arg(func, *args, **kwargs):
|
||||
"""Bind CLI arguments to a shell.py `do_foo` function."""
|
||||
|
||||
if not hasattr(func, 'arguments'):
|
||||
func.arguments = []
|
||||
|
||||
# NOTE(sirp): avoid dups that can occur when the module is shared across
|
||||
# tests.
|
||||
if (args, kwargs) not in func.arguments:
|
||||
# Because of the semantics of decorator composition if we just append
|
||||
# to the options list positional options will appear to be backwards.
|
||||
func.arguments.insert(0, (args, kwargs))
|
||||
|
||||
|
||||
def unauthenticated(func):
|
||||
"""Adds 'unauthenticated' attribute to decorated function.
|
||||
|
||||
Usage:
|
||||
|
||||
>>> @unauthenticated
|
||||
... def mymethod(f):
|
||||
... pass
|
||||
"""
|
||||
func.unauthenticated = True
|
||||
return func
|
||||
|
||||
|
||||
def isunauthenticated(func):
|
||||
"""Checks if the function does not require authentication.
|
||||
|
||||
Mark such functions with the `@unauthenticated` decorator.
|
||||
|
||||
:returns: bool
|
||||
"""
|
||||
return getattr(func, 'unauthenticated', False)
|
||||
|
||||
|
||||
def print_list(objs, fields, formatters=None, sortby_index=0,
|
||||
mixed_case_fields=None, field_labels=None):
|
||||
"""Print a list or objects as a table, one row per object.
|
||||
|
||||
:param objs: iterable of :class:`Resource`
|
||||
:param fields: attributes that correspond to columns, in order
|
||||
:param formatters: `dict` of callables for field formatting
|
||||
:param sortby_index: index of the field for sorting table rows
|
||||
:param mixed_case_fields: fields corresponding to object attributes that
|
||||
have mixed case names (e.g., 'serverId')
|
||||
:param field_labels: Labels to use in the heading of the table, default to
|
||||
fields.
|
||||
"""
|
||||
formatters = formatters or {}
|
||||
mixed_case_fields = mixed_case_fields or []
|
||||
field_labels = field_labels or fields
|
||||
if len(field_labels) != len(fields):
|
||||
raise ValueError(_("Field labels list %(labels)s has different number "
|
||||
"of elements than fields list %(fields)s"),
|
||||
{'labels': field_labels, 'fields': fields})
|
||||
|
||||
if sortby_index is None:
|
||||
kwargs = {}
|
||||
else:
|
||||
kwargs = {'sortby': field_labels[sortby_index]}
|
||||
pt = prettytable.PrettyTable(field_labels)
|
||||
pt.align = 'l'
|
||||
|
||||
for o in objs:
|
||||
row = []
|
||||
for field in fields:
|
||||
if field in formatters:
|
||||
row.append(formatters[field](o))
|
||||
else:
|
||||
if field in mixed_case_fields:
|
||||
field_name = field.replace(' ', '_')
|
||||
else:
|
||||
field_name = field.lower().replace(' ', '_')
|
||||
data = getattr(o, field_name, '')
|
||||
row.append(data)
|
||||
pt.add_row(row)
|
||||
|
||||
if six.PY3:
|
||||
print(encodeutils.safe_encode(pt.get_string(**kwargs)).decode())
|
||||
else:
|
||||
print(encodeutils.safe_encode(pt.get_string(**kwargs)))
|
||||
|
||||
|
||||
def print_dict(dct, dict_property="Property", wrap=0):
|
||||
"""Print a `dict` as a table of two columns.
|
||||
|
||||
:param dct: `dict` to print
|
||||
:param dict_property: name of the first column
|
||||
:param wrap: wrapping for the second column
|
||||
"""
|
||||
pt = prettytable.PrettyTable([dict_property, 'Value'])
|
||||
pt.align = 'l'
|
||||
for k, v in six.iteritems(dct):
|
||||
# convert dict to str to check length
|
||||
if isinstance(v, dict):
|
||||
v = six.text_type(v)
|
||||
if wrap > 0:
|
||||
v = textwrap.fill(six.text_type(v), wrap)
|
||||
# if value has a newline, add in multiple rows
|
||||
# e.g. fault with stacktrace
|
||||
if v and isinstance(v, six.string_types) and r'\n' in v:
|
||||
lines = v.strip().split(r'\n')
|
||||
col1 = k
|
||||
for line in lines:
|
||||
pt.add_row([col1, line])
|
||||
col1 = ''
|
||||
else:
|
||||
pt.add_row([k, v])
|
||||
|
||||
if six.PY3:
|
||||
print(encodeutils.safe_encode(pt.get_string()).decode())
|
||||
else:
|
||||
print(encodeutils.safe_encode(pt.get_string()))
|
||||
|
||||
|
||||
def get_password(max_password_prompts=3):
|
||||
"""Read password from TTY."""
|
||||
verify = strutils.bool_from_string(env("OS_VERIFY_PASSWORD"))
|
||||
pw = None
|
||||
if hasattr(sys.stdin, "isatty") and sys.stdin.isatty():
|
||||
# Check for Ctrl-D
|
||||
try:
|
||||
for __ in moves.range(max_password_prompts):
|
||||
pw1 = getpass.getpass("OS Password: ")
|
||||
if verify:
|
||||
pw2 = getpass.getpass("Please verify: ")
|
||||
else:
|
||||
pw2 = pw1
|
||||
if pw1 == pw2 and pw1:
|
||||
pw = pw1
|
||||
break
|
||||
except EOFError:
|
||||
pass
|
||||
return pw
|
||||
|
||||
|
||||
def service_type(stype):
|
||||
"""Adds 'service_type' attribute to decorated function.
|
||||
|
||||
Usage:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@service_type('volume')
|
||||
def mymethod(f):
|
||||
...
|
||||
"""
|
||||
def inner(f):
|
||||
f.service_type = stype
|
||||
return f
|
||||
return inner
|
||||
|
||||
|
||||
def get_service_type(f):
|
||||
"""Retrieves service type from function."""
|
||||
return getattr(f, 'service_type', None)
|
||||
|
||||
|
||||
def pretty_choice_list(l):
|
||||
return ', '.join("'%s'" % i for i in l)
|
||||
|
||||
|
||||
def exit(msg=''):
|
||||
if msg:
|
||||
print (msg, file=sys.stderr)
|
||||
sys.exit(1)
|
@ -1,320 +0,0 @@
|
||||
# Copyright 2012 SINA Corporation
|
||||
# Copyright 2014 Cisco Systems, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
"""Extracts OpenStack config option info from module(s)."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import imp
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_utils import importutils
|
||||
import six
|
||||
import stevedore.named
|
||||
|
||||
STROPT = "StrOpt"
|
||||
BOOLOPT = "BoolOpt"
|
||||
INTOPT = "IntOpt"
|
||||
FLOATOPT = "FloatOpt"
|
||||
LISTOPT = "ListOpt"
|
||||
DICTOPT = "DictOpt"
|
||||
MULTISTROPT = "MultiStrOpt"
|
||||
|
||||
OPT_TYPES = {
|
||||
STROPT: 'string value',
|
||||
BOOLOPT: 'boolean value',
|
||||
INTOPT: 'integer value',
|
||||
FLOATOPT: 'floating point value',
|
||||
LISTOPT: 'list value',
|
||||
DICTOPT: 'dict value',
|
||||
MULTISTROPT: 'multi valued',
|
||||
}
|
||||
|
||||
OPTION_REGEX = re.compile(r"(%s)" % "|".join([STROPT, BOOLOPT, INTOPT,
|
||||
FLOATOPT, LISTOPT, DICTOPT,
|
||||
MULTISTROPT]))
|
||||
|
||||
PY_EXT = ".py"
|
||||
BASEDIR = os.path.abspath(os.path.join(os.path.dirname(__file__),
|
||||
"../../../../"))
|
||||
WORDWRAP_WIDTH = 60
|
||||
|
||||
|
||||
def raise_extension_exception(extmanager, ep, err):
|
||||
raise
|
||||
|
||||
|
||||
def generate(argv):
|
||||
parser = argparse.ArgumentParser(
|
||||
description='generate sample configuration file',
|
||||
)
|
||||
parser.add_argument('-m', dest='modules', action='append')
|
||||
parser.add_argument('-l', dest='libraries', action='append')
|
||||
parser.add_argument('srcfiles', nargs='*')
|
||||
parsed_args = parser.parse_args(argv)
|
||||
|
||||
mods_by_pkg = dict()
|
||||
for filepath in parsed_args.srcfiles:
|
||||
pkg_name = filepath.split(os.sep)[1]
|
||||
mod_str = '.'.join(['.'.join(filepath.split(os.sep)[:-1]),
|
||||
os.path.basename(filepath).split('.')[0]])
|
||||
mods_by_pkg.setdefault(pkg_name, list()).append(mod_str)
|
||||
# NOTE(lzyeval): place top level modules before packages
|
||||
pkg_names = sorted(pkg for pkg in mods_by_pkg if pkg.endswith(PY_EXT))
|
||||
ext_names = sorted(pkg for pkg in mods_by_pkg if pkg not in pkg_names)
|
||||
pkg_names.extend(ext_names)
|
||||
|
||||
# opts_by_group is a mapping of group name to an options list
|
||||
# The options list is a list of (module, options) tuples
|
||||
opts_by_group = {'DEFAULT': []}
|
||||
|
||||
if parsed_args.modules:
|
||||
for module_name in parsed_args.modules:
|
||||
module = _import_module(module_name)
|
||||
if module:
|
||||
for group, opts in _list_opts(module):
|
||||
opts_by_group.setdefault(group, []).append((module_name,
|
||||
opts))
|
||||
|
||||
# Look for entry points defined in libraries (or applications) for
|
||||
# option discovery, and include their return values in the output.
|
||||
#
|
||||
# Each entry point should be a function returning an iterable
|
||||
# of pairs with the group name (or None for the default group)
|
||||
# and the list of Opt instances for that group.
|
||||
if parsed_args.libraries:
|
||||
loader = stevedore.named.NamedExtensionManager(
|
||||
'oslo_config.opts',
|
||||
names=list(set(parsed_args.libraries)),
|
||||
invoke_on_load=False,
|
||||
on_load_failure_callback=raise_extension_exception
|
||||
)
|
||||
for ext in loader:
|
||||
for group, opts in ext.plugin():
|
||||
opt_list = opts_by_group.setdefault(group or 'DEFAULT', [])
|
||||
opt_list.append((ext.name, opts))
|
||||
|
||||
for pkg_name in pkg_names:
|
||||
mods = mods_by_pkg.get(pkg_name)
|
||||
mods.sort()
|
||||
for mod_str in mods:
|
||||
if mod_str.endswith('.__init__'):
|
||||
mod_str = mod_str[:mod_str.rfind(".")]
|
||||
|
||||
mod_obj = _import_module(mod_str)
|
||||
if not mod_obj:
|
||||
raise RuntimeError("Unable to import module %s" % mod_str)
|
||||
|
||||
for group, opts in _list_opts(mod_obj):
|
||||
opts_by_group.setdefault(group, []).append((mod_str, opts))
|
||||
|
||||
print_group_opts('DEFAULT', opts_by_group.pop('DEFAULT', []))
|
||||
for group in sorted(opts_by_group.keys()):
|
||||
print_group_opts(group, opts_by_group[group])
|
||||
|
||||
|
||||
def _import_module(mod_str):
|
||||
try:
|
||||
if mod_str.startswith('bin.'):
|
||||
imp.load_source(mod_str[4:], os.path.join('bin', mod_str[4:]))
|
||||
return sys.modules[mod_str[4:]]
|
||||
else:
|
||||
return importutils.import_module(mod_str)
|
||||
except Exception as e:
|
||||
sys.stderr.write("Error importing module %s: %s\n" % (mod_str, str(e)))
|
||||
return None
|
||||
|
||||
|
||||
def _is_in_group(opt, group):
|
||||
"Check if opt is in group."
|
||||
for value in group._opts.values():
|
||||
# NOTE(llu): Temporary workaround for bug #1262148, wait until
|
||||
# newly released oslo.config support '==' operator.
|
||||
if not(value['opt'] != opt):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _guess_groups(opt, mod_obj):
|
||||
# is it in the DEFAULT group?
|
||||
if _is_in_group(opt, cfg.CONF):
|
||||
return 'DEFAULT'
|
||||
|
||||
# what other groups is it in?
|
||||
for value in cfg.CONF.values():
|
||||
if isinstance(value, cfg.CONF.GroupAttr):
|
||||
if _is_in_group(opt, value._group):
|
||||
return value._group.name
|
||||
|
||||
raise RuntimeError(
|
||||
"Unable to find group for option %s, "
|
||||
"maybe it's defined twice in the same group?"
|
||||
% opt.name
|
||||
)
|
||||
|
||||
|
||||
def _list_opts(obj):
|
||||
def is_opt(o):
|
||||
return (isinstance(o, cfg.Opt) and
|
||||
not isinstance(o, cfg.SubCommandOpt))
|
||||
|
||||
opts = list()
|
||||
|
||||
if 'list_opts' in dir(obj):
|
||||
return getattr(obj, 'list_opts')()
|
||||
|
||||
for attr_str in dir(obj):
|
||||
attr_obj = getattr(obj, attr_str)
|
||||
if is_opt(attr_obj):
|
||||
opts.append(attr_obj)
|
||||
elif (isinstance(attr_obj, list) and
|
||||
all(map(lambda x: is_opt(x), attr_obj))):
|
||||
opts.extend(attr_obj)
|
||||
|
||||
ret = {}
|
||||
for opt in opts:
|
||||
ret.setdefault(_guess_groups(opt, obj), []).append(opt)
|
||||
return ret.items()
|
||||
|
||||
|
||||
def print_group_opts(group, opts_by_module):
|
||||
print("[%s]" % group)
|
||||
print('')
|
||||
for mod, opts in opts_by_module:
|
||||
print('#')
|
||||
print('# Options defined in %s' % mod)
|
||||
print('#')
|
||||
print('')
|
||||
for opt in opts:
|
||||
_print_opt(opt)
|
||||
print('')
|
||||
|
||||
|
||||
def _get_my_ip():
|
||||
try:
|
||||
csock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
csock.connect(('8.8.8.8', 80))
|
||||
(addr, port) = csock.getsockname()
|
||||
csock.close()
|
||||
return addr
|
||||
except socket.error:
|
||||
return None
|
||||
|
||||
|
||||
def _sanitize_default(name, value):
|
||||
"""Set up a reasonably sensible default for pybasedir, my_ip and host."""
|
||||
if value.startswith(sys.prefix):
|
||||
# NOTE(jd) Don't use os.path.join, because it is likely to think the
|
||||
# second part is an absolute pathname and therefore drop the first
|
||||
# part.
|
||||
value = os.path.normpath("/usr/" + value[len(sys.prefix):])
|
||||
elif value.startswith(BASEDIR):
|
||||
return value.replace(BASEDIR, '/usr/lib/python/site-packages')
|
||||
elif BASEDIR in value:
|
||||
return value.replace(BASEDIR, '')
|
||||
elif value == _get_my_ip():
|
||||
return '10.0.0.1'
|
||||
elif value in (socket.gethostname(), socket.getfqdn()) and 'host' in name:
|
||||
return 'nova'
|
||||
elif value.strip() != value:
|
||||
return '"%s"' % value
|
||||
return value
|
||||
|
||||
|
||||
def _get_choice_text(choice):
|
||||
if choice is None:
|
||||
return '<None>'
|
||||
elif choice == '':
|
||||
return "''"
|
||||
return six.text_type(choice)
|
||||
|
||||
|
||||
def _print_opt(opt):
|
||||
opt_name, opt_default, opt_help = opt.dest, opt.default, opt.help
|
||||
if not opt_help:
|
||||
sys.stderr.write('WARNING: "%s" is missing help string.\n' % opt_name)
|
||||
opt_help = ""
|
||||
opt_type = None
|
||||
try:
|
||||
opt_type = OPTION_REGEX.search(str(type(opt))).group(0)
|
||||
except (ValueError, AttributeError) as err:
|
||||
sys.stderr.write("%s\n" % str(err))
|
||||
sys.exit(1)
|
||||
opt_help = u'%s (%s)' % (opt_help,
|
||||
OPT_TYPES[opt_type])
|
||||
print('#', "\n# ".join(textwrap.wrap(opt_help, WORDWRAP_WIDTH)))
|
||||
if opt.deprecated_opts:
|
||||
for deprecated_opt in opt.deprecated_opts:
|
||||
if deprecated_opt.name:
|
||||
deprecated_group = (deprecated_opt.group if
|
||||
deprecated_opt.group else "DEFAULT")
|
||||
print('# Deprecated group/name - [%s]/%s' %
|
||||
(deprecated_group,
|
||||
deprecated_opt.name))
|
||||
try:
|
||||
if opt_default is None:
|
||||
print('#%s=<None>' % opt_name)
|
||||
elif opt_type == STROPT:
|
||||
assert(isinstance(opt_default, six.string_types))
|
||||
if (getattr(opt, 'type', None) and
|
||||
getattr(opt.type, 'choices', None)):
|
||||
choices_text = ', '.join([_get_choice_text(choice)
|
||||
for choice in opt.type.choices])
|
||||
print('# Allowed values: %s' % choices_text)
|
||||
print('#%s=%s' % (opt_name, _sanitize_default(opt_name,
|
||||
opt_default)))
|
||||
elif opt_type == BOOLOPT:
|
||||
assert(isinstance(opt_default, bool))
|
||||
print('#%s=%s' % (opt_name, str(opt_default).lower()))
|
||||
elif opt_type == INTOPT:
|
||||
assert(isinstance(opt_default, int) and
|
||||
not isinstance(opt_default, bool))
|
||||
print('#%s=%s' % (opt_name, opt_default))
|
||||
elif opt_type == FLOATOPT:
|
||||
assert(isinstance(opt_default, float))
|
||||
print('#%s=%s' % (opt_name, opt_default))
|
||||
elif opt_type == LISTOPT:
|
||||
assert(isinstance(opt_default, list))
|
||||
print('#%s=%s' % (opt_name, ','.join(opt_default)))
|
||||
elif opt_type == DICTOPT:
|
||||
assert(isinstance(opt_default, dict))
|
||||
opt_default_strlist = [str(key) + ':' + str(value)
|
||||
for (key, value) in opt_default.items()]
|
||||
print('#%s=%s' % (opt_name, ','.join(opt_default_strlist)))
|
||||
elif opt_type == MULTISTROPT:
|
||||
assert(isinstance(opt_default, list))
|
||||
if not opt_default:
|
||||
opt_default = ['']
|
||||
for default in opt_default:
|
||||
print('#%s=%s' % (opt_name, default))
|
||||
print('')
|
||||
except Exception:
|
||||
sys.stderr.write('Error in option "%s"\n' % opt_name)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def main():
|
||||
generate(sys.argv[1:])
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -1,151 +0,0 @@
|
||||
# Copyright (c) 2012 OpenStack Foundation.
|
||||
# Administrator of the National Aeronautics and Space Administration.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import copy
|
||||
import errno
|
||||
import gc
|
||||
import logging
|
||||
import os
|
||||
import pprint
|
||||
import socket
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
import eventlet.backdoor
|
||||
import greenlet
|
||||
from oslo_config import cfg
|
||||
|
||||
from nova.openstack.common._i18n import _LI
|
||||
|
||||
help_for_backdoor_port = (
|
||||
"Acceptable values are 0, <port>, and <start>:<end>, where 0 results "
|
||||
"in listening on a random tcp port number; <port> results in listening "
|
||||
"on the specified port number (and not enabling backdoor if that port "
|
||||
"is in use); and <start>:<end> results in listening on the smallest "
|
||||
"unused port number within the specified range of port numbers. The "
|
||||
"chosen port is displayed in the service's log file.")
|
||||
eventlet_backdoor_opts = [
|
||||
cfg.StrOpt('backdoor_port',
|
||||
help="Enable eventlet backdoor. %s" % help_for_backdoor_port)
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.register_opts(eventlet_backdoor_opts)
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def list_opts():
|
||||
"""Entry point for oslo-config-generator.
|
||||
"""
|
||||
return [(None, copy.deepcopy(eventlet_backdoor_opts))]
|
||||
|
||||
|
||||
class EventletBackdoorConfigValueError(Exception):
|
||||
def __init__(self, port_range, help_msg, ex):
|
||||
msg = ('Invalid backdoor_port configuration %(range)s: %(ex)s. '
|
||||
'%(help)s' %
|
||||
{'range': port_range, 'ex': ex, 'help': help_msg})
|
||||
super(EventletBackdoorConfigValueError, self).__init__(msg)
|
||||
self.port_range = port_range
|
||||
|
||||
|
||||
def _dont_use_this():
|
||||
print("Don't use this, just disconnect instead")
|
||||
|
||||
|
||||
def _find_objects(t):
|
||||
return [o for o in gc.get_objects() if isinstance(o, t)]
|
||||
|
||||
|
||||
def _print_greenthreads():
|
||||
for i, gt in enumerate(_find_objects(greenlet.greenlet)):
|
||||
print(i, gt)
|
||||
traceback.print_stack(gt.gr_frame)
|
||||
print()
|
||||
|
||||
|
||||
def _print_nativethreads():
|
||||
for threadId, stack in sys._current_frames().items():
|
||||
print(threadId)
|
||||
traceback.print_stack(stack)
|
||||
print()
|
||||
|
||||
|
||||
def _parse_port_range(port_range):
|
||||
if ':' not in port_range:
|
||||
start, end = port_range, port_range
|
||||
else:
|
||||
start, end = port_range.split(':', 1)
|
||||
try:
|
||||
start, end = int(start), int(end)
|
||||
if end < start:
|
||||
raise ValueError
|
||||
return start, end
|
||||
except ValueError as ex:
|
||||
raise EventletBackdoorConfigValueError(port_range, ex,
|
||||
help_for_backdoor_port)
|
||||
|
||||
|
||||
def _listen(host, start_port, end_port, listen_func):
|
||||
try_port = start_port
|
||||
while True:
|
||||
try:
|
||||
return listen_func((host, try_port))
|
||||
except socket.error as exc:
|
||||
if (exc.errno != errno.EADDRINUSE or
|
||||
try_port >= end_port):
|
||||
raise
|
||||
try_port += 1
|
||||
|
||||
|
||||
def initialize_if_enabled():
|
||||
backdoor_locals = {
|
||||
'exit': _dont_use_this, # So we don't exit the entire process
|
||||
'quit': _dont_use_this, # So we don't exit the entire process
|
||||
'fo': _find_objects,
|
||||
'pgt': _print_greenthreads,
|
||||
'pnt': _print_nativethreads,
|
||||
}
|
||||
|
||||
if CONF.backdoor_port is None:
|
||||
return None
|
||||
|
||||
start_port, end_port = _parse_port_range(str(CONF.backdoor_port))
|
||||
|
||||
# NOTE(johannes): The standard sys.displayhook will print the value of
|
||||
# the last expression and set it to __builtin__._, which overwrites
|
||||
# the __builtin__._ that gettext sets. Let's switch to using pprint
|
||||
# since it won't interact poorly with gettext, and it's easier to
|
||||
# read the output too.
|
||||
def displayhook(val):
|
||||
if val is not None:
|
||||
pprint.pprint(val)
|
||||
sys.displayhook = displayhook
|
||||
|
||||
sock = _listen('localhost', start_port, end_port, eventlet.listen)
|
||||
|
||||
# In the case of backdoor port being zero, a port number is assigned by
|
||||
# listen(). In any case, pull the port number out here.
|
||||
port = sock.getsockname()[1]
|
||||
LOG.info(
|
||||
_LI('Eventlet backdoor listening on %(port)s for process %(pid)d') %
|
||||
{'port': port, 'pid': os.getpid()}
|
||||
)
|
||||
eventlet.spawn_n(eventlet.backdoor.backdoor_server, sock,
|
||||
locals=backdoor_locals)
|
||||
return port
|
@ -1,149 +0,0 @@
|
||||
# Copyright 2011 OpenStack Foundation.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import contextlib
|
||||
import errno
|
||||
import logging
|
||||
import os
|
||||
import stat
|
||||
import tempfile
|
||||
|
||||
from oslo_utils import excutils
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
_FILE_CACHE = {}
|
||||
DEFAULT_MODE = stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO
|
||||
|
||||
|
||||
def ensure_tree(path, mode=DEFAULT_MODE):
|
||||
"""Create a directory (and any ancestor directories required)
|
||||
|
||||
:param path: Directory to create
|
||||
:param mode: Directory creation permissions
|
||||
"""
|
||||
try:
|
||||
os.makedirs(path, mode)
|
||||
except OSError as exc:
|
||||
if exc.errno == errno.EEXIST:
|
||||
if not os.path.isdir(path):
|
||||
raise
|
||||
else:
|
||||
raise
|
||||
|
||||
|
||||
def read_cached_file(filename, force_reload=False):
|
||||
"""Read from a file if it has been modified.
|
||||
|
||||
:param force_reload: Whether to reload the file.
|
||||
:returns: A tuple with a boolean specifying if the data is fresh
|
||||
or not.
|
||||
"""
|
||||
global _FILE_CACHE
|
||||
|
||||
if force_reload:
|
||||
delete_cached_file(filename)
|
||||
|
||||
reloaded = False
|
||||
mtime = os.path.getmtime(filename)
|
||||
cache_info = _FILE_CACHE.setdefault(filename, {})
|
||||
|
||||
if not cache_info or mtime > cache_info.get('mtime', 0):
|
||||
LOG.debug("Reloading cached file %s" % filename)
|
||||
with open(filename) as fap:
|
||||
cache_info['data'] = fap.read()
|
||||
cache_info['mtime'] = mtime
|
||||
reloaded = True
|
||||
return (reloaded, cache_info['data'])
|
||||
|
||||
|
||||
def delete_cached_file(filename):
|
||||
"""Delete cached file if present.
|
||||
|
||||
:param filename: filename to delete
|
||||
"""
|
||||
global _FILE_CACHE
|
||||
|
||||
if filename in _FILE_CACHE:
|
||||
del _FILE_CACHE[filename]
|
||||
|
||||
|
||||
def delete_if_exists(path, remove=os.unlink):
|
||||
"""Delete a file, but ignore file not found error.
|
||||
|
||||
:param path: File to delete
|
||||
:param remove: Optional function to remove passed path
|
||||
"""
|
||||
|
||||
try:
|
||||
remove(path)
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
raise
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def remove_path_on_error(path, remove=delete_if_exists):
|
||||
"""Protect code that wants to operate on PATH atomically.
|
||||
Any exception will cause PATH to be removed.
|
||||
|
||||
:param path: File to work with
|
||||
:param remove: Optional function to remove passed path
|
||||
"""
|
||||
|
||||
try:
|
||||
yield
|
||||
except Exception:
|
||||
with excutils.save_and_reraise_exception():
|
||||
remove(path)
|
||||
|
||||
|
||||
def file_open(*args, **kwargs):
|
||||
"""Open file
|
||||
|
||||
see built-in open() documentation for more details
|
||||
|
||||
Note: The reason this is kept in a separate module is to easily
|
||||
be able to provide a stub module that doesn't alter system
|
||||
state at all (for unit tests)
|
||||
"""
|
||||
return open(*args, **kwargs)
|
||||
|
||||
|
||||
def write_to_tempfile(content, path=None, suffix='', prefix='tmp'):
|
||||
"""Create temporary file or use existing file.
|
||||
|
||||
This util is needed for creating temporary file with
|
||||
specified content, suffix and prefix. If path is not None,
|
||||
it will be used for writing content. If the path doesn't
|
||||
exist it'll be created.
|
||||
|
||||
:param content: content for temporary file.
|
||||
:param path: same as parameter 'dir' for mkstemp
|
||||
:param suffix: same as parameter 'suffix' for mkstemp
|
||||
:param prefix: same as parameter 'prefix' for mkstemp
|
||||
|
||||
For example: it can be used in database tests for creating
|
||||
configuration files.
|
||||
"""
|
||||
if path:
|
||||
ensure_tree(path)
|
||||
|
||||
(fd, path) = tempfile.mkstemp(suffix=suffix, dir=path, prefix=prefix)
|
||||
try:
|
||||
os.write(fd, content)
|
||||
finally:
|
||||
os.close(fd)
|
||||
return path
|
@ -1,152 +0,0 @@
|
||||
# Copyright 2010 United States Government as represented by the
|
||||
# Administrator of the National Aeronautics and Space Administration.
|
||||
# All Rights Reserved.
|
||||
# Copyright (c) 2010 Citrix Systems, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Helper methods to deal with images.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from oslo_utils import strutils
|
||||
|
||||
from nova.openstack.common._i18n import _
|
||||
|
||||
|
||||
class QemuImgInfo(object):
|
||||
BACKING_FILE_RE = re.compile((r"^(.*?)\s*\(actual\s+path\s*:"
|
||||
r"\s+(.*?)\)\s*$"), re.I)
|
||||
TOP_LEVEL_RE = re.compile(r"^([\w\d\s\_\-]+):(.*)$")
|
||||
SIZE_RE = re.compile(r"(\d*\.?\d+)(\w+)?(\s*\(\s*(\d+)\s+bytes\s*\))?",
|
||||
re.I)
|
||||
|
||||
def __init__(self, cmd_output=None):
|
||||
details = self._parse(cmd_output or '')
|
||||
self.image = details.get('image')
|
||||
self.backing_file = details.get('backing_file')
|
||||
self.file_format = details.get('file_format')
|
||||
self.virtual_size = details.get('virtual_size')
|
||||
self.cluster_size = details.get('cluster_size')
|
||||
self.disk_size = details.get('disk_size')
|
||||
self.snapshots = details.get('snapshot_list', [])
|
||||
self.encrypted = details.get('encrypted')
|
||||
|
||||
def __str__(self):
|
||||
lines = [
|
||||
'image: %s' % self.image,
|
||||
'file_format: %s' % self.file_format,
|
||||
'virtual_size: %s' % self.virtual_size,
|
||||
'disk_size: %s' % self.disk_size,
|
||||
'cluster_size: %s' % self.cluster_size,
|
||||
'backing_file: %s' % self.backing_file,
|
||||
]
|
||||
if self.snapshots:
|
||||
lines.append("snapshots: %s" % self.snapshots)
|
||||
if self.encrypted:
|
||||
lines.append("encrypted: %s" % self.encrypted)
|
||||
return "\n".join(lines)
|
||||
|
||||
def _canonicalize(self, field):
|
||||
# Standardize on underscores/lc/no dash and no spaces
|
||||
# since qemu seems to have mixed outputs here... and
|
||||
# this format allows for better integration with python
|
||||
# - i.e. for usage in kwargs and such...
|
||||
field = field.lower().strip()
|
||||
for c in (" ", "-"):
|
||||
field = field.replace(c, '_')
|
||||
return field
|
||||
|
||||
def _extract_bytes(self, details):
|
||||
# Replace it with the byte amount
|
||||
real_size = self.SIZE_RE.search(details)
|
||||
if not real_size:
|
||||
raise ValueError(_('Invalid input value "%s".') % details)
|
||||
magnitude = real_size.group(1)
|
||||
unit_of_measure = real_size.group(2)
|
||||
bytes_info = real_size.group(3)
|
||||
if bytes_info:
|
||||
return int(real_size.group(4))
|
||||
elif not unit_of_measure:
|
||||
return int(magnitude)
|
||||
return strutils.string_to_bytes('%s%sB' % (magnitude, unit_of_measure),
|
||||
return_int=True)
|
||||
|
||||
def _extract_details(self, root_cmd, root_details, lines_after):
|
||||
real_details = root_details
|
||||
if root_cmd == 'backing_file':
|
||||
# Replace it with the real backing file
|
||||
backing_match = self.BACKING_FILE_RE.match(root_details)
|
||||
if backing_match:
|
||||
real_details = backing_match.group(2).strip()
|
||||
elif root_cmd in ['virtual_size', 'cluster_size', 'disk_size']:
|
||||
# Replace it with the byte amount (if we can convert it)
|
||||
if root_details == 'None':
|
||||
real_details = 0
|
||||
else:
|
||||
real_details = self._extract_bytes(root_details)
|
||||
elif root_cmd == 'file_format':
|
||||
real_details = real_details.strip().lower()
|
||||
elif root_cmd == 'snapshot_list':
|
||||
# Next line should be a header, starting with 'ID'
|
||||
if not lines_after or not lines_after.pop(0).startswith("ID"):
|
||||
msg = _("Snapshot list encountered but no header found!")
|
||||
raise ValueError(msg)
|
||||
real_details = []
|
||||
# This is the sprintf pattern we will try to match
|
||||
# "%-10s%-20s%7s%20s%15s"
|
||||
# ID TAG VM SIZE DATE VM CLOCK (current header)
|
||||
while lines_after:
|
||||
line = lines_after[0]
|
||||
line_pieces = line.split()
|
||||
if len(line_pieces) != 6:
|
||||
break
|
||||
# Check against this pattern in the final position
|
||||
# "%02d:%02d:%02d.%03d"
|
||||
date_pieces = line_pieces[5].split(":")
|
||||
if len(date_pieces) != 3:
|
||||
break
|
||||
lines_after.pop(0)
|
||||
real_details.append({
|
||||
'id': line_pieces[0],
|
||||
'tag': line_pieces[1],
|
||||
'vm_size': line_pieces[2],
|
||||
'date': line_pieces[3],
|
||||
'vm_clock': line_pieces[4] + " " + line_pieces[5],
|
||||
})
|
||||
return real_details
|
||||
|
||||
def _parse(self, cmd_output):
|
||||
# Analysis done of qemu-img.c to figure out what is going on here
|
||||
# Find all points start with some chars and then a ':' then a newline
|
||||
# and then handle the results of those 'top level' items in a separate
|
||||
# function.
|
||||
#
|
||||
# TODO(harlowja): newer versions might have a json output format
|
||||
# we should switch to that whenever possible.
|
||||
# see: http://bit.ly/XLJXDX
|
||||
contents = {}
|
||||
lines = [x for x in cmd_output.splitlines() if x.strip()]
|
||||
while lines:
|
||||
line = lines.pop(0)
|
||||
top_level = self.TOP_LEVEL_RE.match(line)
|
||||
if top_level:
|
||||
root = self._canonicalize(top_level.group(1))
|
||||
if not root:
|
||||
continue
|
||||
root_details = top_level.group(2).strip()
|
||||
details = self._extract_details(root, root_details, lines)
|
||||
contents[root] = details
|
||||
return contents
|
@ -1,45 +0,0 @@
|
||||
# Copyright 2011 OpenStack Foundation.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Local storage of variables using weak references"""
|
||||
|
||||
import threading
|
||||
import weakref
|
||||
|
||||
|
||||
class WeakLocal(threading.local):
|
||||
def __getattribute__(self, attr):
|
||||
rval = super(WeakLocal, self).__getattribute__(attr)
|
||||
if rval:
|
||||
# NOTE(mikal): this bit is confusing. What is stored is a weak
|
||||
# reference, not the value itself. We therefore need to lookup
|
||||
# the weak reference and return the inner value here.
|
||||
rval = rval()
|
||||
return rval
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
value = weakref.ref(value)
|
||||
return super(WeakLocal, self).__setattr__(attr, value)
|
||||
|
||||
|
||||
# NOTE(mikal): the name "store" should be deprecated in the future
|
||||
store = WeakLocal()
|
||||
|
||||
# A "weak" store uses weak references and allows an object to fall out of scope
|
||||
# when it falls out of scope in the code that uses the thread local storage. A
|
||||
# "strong" store will hold a reference to the object so that it never falls out
|
||||
# of scope.
|
||||
weak_store = WeakLocal()
|
||||
strong_store = threading.local()
|
@ -1,147 +0,0 @@
|
||||
# Copyright 2010 United States Government as represented by the
|
||||
# Administrator of the National Aeronautics and Space Administration.
|
||||
# Copyright 2011 Justin Santa Barbara
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import logging
|
||||
import sys
|
||||
import time
|
||||
|
||||
from eventlet import event
|
||||
from eventlet import greenthread
|
||||
|
||||
from nova.openstack.common._i18n import _LE, _LW
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
# NOTE(zyluo): This lambda function was declared to avoid mocking collisions
|
||||
# with time.time() called in the standard logging module
|
||||
# during unittests.
|
||||
_ts = lambda: time.time()
|
||||
|
||||
|
||||
class LoopingCallDone(Exception):
|
||||
"""Exception to break out and stop a LoopingCallBase.
|
||||
|
||||
The poll-function passed to LoopingCallBase can raise this exception to
|
||||
break out of the loop normally. This is somewhat analogous to
|
||||
StopIteration.
|
||||
|
||||
An optional return-value can be included as the argument to the exception;
|
||||
this return-value will be returned by LoopingCallBase.wait()
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, retvalue=True):
|
||||
""":param retvalue: Value that LoopingCallBase.wait() should return."""
|
||||
self.retvalue = retvalue
|
||||
|
||||
|
||||
class LoopingCallBase(object):
|
||||
def __init__(self, f=None, *args, **kw):
|
||||
self.args = args
|
||||
self.kw = kw
|
||||
self.f = f
|
||||
self._running = False
|
||||
self.done = None
|
||||
|
||||
def stop(self):
|
||||
self._running = False
|
||||
|
||||
def wait(self):
|
||||
return self.done.wait()
|
||||
|
||||
|
||||
class FixedIntervalLoopingCall(LoopingCallBase):
|
||||
"""A fixed interval looping call."""
|
||||
|
||||
def start(self, interval, initial_delay=None):
|
||||
self._running = True
|
||||
done = event.Event()
|
||||
|
||||
def _inner():
|
||||
if initial_delay:
|
||||
greenthread.sleep(initial_delay)
|
||||
|
||||
try:
|
||||
while self._running:
|
||||
start = _ts()
|
||||
self.f(*self.args, **self.kw)
|
||||
end = _ts()
|
||||
if not self._running:
|
||||
break
|
||||
delay = end - start - interval
|
||||
if delay > 0:
|
||||
LOG.warn(_LW('task %(func_name)r run outlasted '
|
||||
'interval by %(delay).2f sec'),
|
||||
{'func_name': self.f, 'delay': delay})
|
||||
greenthread.sleep(-delay if delay < 0 else 0)
|
||||
except LoopingCallDone as e:
|
||||
self.stop()
|
||||
done.send(e.retvalue)
|
||||
except Exception:
|
||||
LOG.exception(_LE('in fixed duration looping call'))
|
||||
done.send_exception(*sys.exc_info())
|
||||
return
|
||||
else:
|
||||
done.send(True)
|
||||
|
||||
self.done = done
|
||||
|
||||
greenthread.spawn_n(_inner)
|
||||
return self.done
|
||||
|
||||
|
||||
class DynamicLoopingCall(LoopingCallBase):
|
||||
"""A looping call which sleeps until the next known event.
|
||||
|
||||
The function called should return how long to sleep for before being
|
||||
called again.
|
||||
"""
|
||||
|
||||
def start(self, initial_delay=None, periodic_interval_max=None):
|
||||
self._running = True
|
||||
done = event.Event()
|
||||
|
||||
def _inner():
|
||||
if initial_delay:
|
||||
greenthread.sleep(initial_delay)
|
||||
|
||||
try:
|
||||
while self._running:
|
||||
idle = self.f(*self.args, **self.kw)
|
||||
if not self._running:
|
||||
break
|
||||
|
||||
if periodic_interval_max is not None:
|
||||
idle = min(idle, periodic_interval_max)
|
||||
LOG.debug('Dynamic looping call %(func_name)r sleeping '
|
||||
'for %(idle).02f seconds',
|
||||
{'func_name': self.f, 'idle': idle})
|
||||
greenthread.sleep(idle)
|
||||
except LoopingCallDone as e:
|
||||
self.stop()
|
||||
done.send(e.retvalue)
|
||||
except Exception:
|
||||
LOG.exception(_LE('in dynamic looping call'))
|
||||
done.send_exception(*sys.exc_info())
|
||||
return
|
||||
else:
|
||||
done.send(True)
|
||||
|
||||
self.done = done
|
||||
|
||||
greenthread.spawn(_inner)
|
||||
return self.done
|
@ -1,97 +0,0 @@
|
||||
# Copyright 2010 United States Government as represented by the
|
||||
# Administrator of the National Aeronautics and Space Administration.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Super simple fake memcache client."""
|
||||
|
||||
import copy
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_utils import timeutils
|
||||
|
||||
memcache_opts = [
|
||||
cfg.ListOpt('memcached_servers',
|
||||
help='Memcached servers or None for in process cache.'),
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.register_opts(memcache_opts)
|
||||
|
||||
|
||||
def list_opts():
|
||||
"""Entry point for oslo-config-generator."""
|
||||
return [(None, copy.deepcopy(memcache_opts))]
|
||||
|
||||
|
||||
def get_client(memcached_servers=None):
|
||||
client_cls = Client
|
||||
|
||||
if not memcached_servers:
|
||||
memcached_servers = CONF.memcached_servers
|
||||
if memcached_servers:
|
||||
import memcache
|
||||
client_cls = memcache.Client
|
||||
|
||||
return client_cls(memcached_servers, debug=0)
|
||||
|
||||
|
||||
class Client(object):
|
||||
"""Replicates a tiny subset of memcached client interface."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Ignores the passed in args."""
|
||||
self.cache = {}
|
||||
|
||||
def get(self, key):
|
||||
"""Retrieves the value for a key or None.
|
||||
|
||||
This expunges expired keys during each get.
|
||||
"""
|
||||
|
||||
now = timeutils.utcnow_ts()
|
||||
for k in list(self.cache):
|
||||
(timeout, _value) = self.cache[k]
|
||||
if timeout and now >= timeout:
|
||||
del self.cache[k]
|
||||
|
||||
return self.cache.get(key, (0, None))[1]
|
||||
|
||||
def set(self, key, value, time=0, min_compress_len=0):
|
||||
"""Sets the value for a key."""
|
||||
timeout = 0
|
||||
if time != 0:
|
||||
timeout = timeutils.utcnow_ts() + time
|
||||
self.cache[key] = (timeout, value)
|
||||
return True
|
||||
|
||||
def add(self, key, value, time=0, min_compress_len=0):
|
||||
"""Sets the value for a key if it doesn't exist."""
|
||||
if self.get(key) is not None:
|
||||
return False
|
||||
return self.set(key, value, time, min_compress_len)
|
||||
|
||||
def incr(self, key, delta=1):
|
||||
"""Increments the value for a key."""
|
||||
value = self.get(key)
|
||||
if value is None:
|
||||
return None
|
||||
new_value = int(value) + delta
|
||||
self.cache[key] = (self.cache[key][0], str(new_value))
|
||||
return new_value
|
||||
|
||||
def delete(self, key, time=0):
|
||||
"""Deletes the value associated with a key."""
|
||||
if key in self.cache:
|
||||
del self.cache[key]
|
@ -1,27 +0,0 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Compatibility shim for Kilo, while operators migrate to oslo.middleware."""
|
||||
|
||||
from oslo_middleware import request_id
|
||||
|
||||
from nova.openstack.common import versionutils
|
||||
|
||||
|
||||
ENV_REQUEST_ID = 'openstack.request_id'
|
||||
HTTP_RESP_HEADER_REQUEST_ID = 'x-openstack-request-id'
|
||||
|
||||
|
||||
@versionutils.deprecated(as_of=versionutils.deprecated.KILO,
|
||||
in_favor_of='oslo.middleware.RequestId')
|
||||
class RequestIdMiddleware(request_id.RequestId):
|
||||
pass
|
@ -1,232 +0,0 @@
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
import logging
|
||||
import random
|
||||
import time
|
||||
|
||||
from oslo_config import cfg
|
||||
import six
|
||||
|
||||
from nova.openstack.common._i18n import _, _LE, _LI
|
||||
|
||||
|
||||
periodic_opts = [
|
||||
cfg.BoolOpt('run_external_periodic_tasks',
|
||||
default=True,
|
||||
help='Some periodic tasks can be run in a separate process. '
|
||||
'Should we run them here?'),
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.register_opts(periodic_opts)
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_INTERVAL = 60.0
|
||||
|
||||
|
||||
def list_opts():
|
||||
"""Entry point for oslo-config-generator."""
|
||||
return [(None, copy.deepcopy(periodic_opts))]
|
||||
|
||||
|
||||
class InvalidPeriodicTaskArg(Exception):
|
||||
message = _("Unexpected argument for periodic task creation: %(arg)s.")
|
||||
|
||||
|
||||
def periodic_task(*args, **kwargs):
|
||||
"""Decorator to indicate that a method is a periodic task.
|
||||
|
||||
This decorator can be used in two ways:
|
||||
|
||||
1. Without arguments '@periodic_task', this will be run on the default
|
||||
interval of 60 seconds.
|
||||
|
||||
2. With arguments:
|
||||
@periodic_task(spacing=N [, run_immediately=[True|False]]
|
||||
[, name=[None|"string"])
|
||||
this will be run on approximately every N seconds. If this number is
|
||||
negative the periodic task will be disabled. If the run_immediately
|
||||
argument is provided and has a value of 'True', the first run of the
|
||||
task will be shortly after task scheduler starts. If
|
||||
run_immediately is omitted or set to 'False', the first time the
|
||||
task runs will be approximately N seconds after the task scheduler
|
||||
starts. If name is not provided, __name__ of function is used.
|
||||
"""
|
||||
def decorator(f):
|
||||
# Test for old style invocation
|
||||
if 'ticks_between_runs' in kwargs:
|
||||
raise InvalidPeriodicTaskArg(arg='ticks_between_runs')
|
||||
|
||||
# Control if run at all
|
||||
f._periodic_task = True
|
||||
f._periodic_external_ok = kwargs.pop('external_process_ok', False)
|
||||
if f._periodic_external_ok and not CONF.run_external_periodic_tasks:
|
||||
f._periodic_enabled = False
|
||||
else:
|
||||
f._periodic_enabled = kwargs.pop('enabled', True)
|
||||
f._periodic_name = kwargs.pop('name', f.__name__)
|
||||
|
||||
# Control frequency
|
||||
f._periodic_spacing = kwargs.pop('spacing', 0)
|
||||
f._periodic_immediate = kwargs.pop('run_immediately', False)
|
||||
if f._periodic_immediate:
|
||||
f._periodic_last_run = None
|
||||
else:
|
||||
f._periodic_last_run = time.time()
|
||||
return f
|
||||
|
||||
# NOTE(sirp): The `if` is necessary to allow the decorator to be used with
|
||||
# and without parenthesis.
|
||||
#
|
||||
# In the 'with-parenthesis' case (with kwargs present), this function needs
|
||||
# to return a decorator function since the interpreter will invoke it like:
|
||||
#
|
||||
# periodic_task(*args, **kwargs)(f)
|
||||
#
|
||||
# In the 'without-parenthesis' case, the original function will be passed
|
||||
# in as the first argument, like:
|
||||
#
|
||||
# periodic_task(f)
|
||||
if kwargs:
|
||||
return decorator
|
||||
else:
|
||||
return decorator(args[0])
|
||||
|
||||
|
||||
class _PeriodicTasksMeta(type):
|
||||
def _add_periodic_task(cls, task):
|
||||
"""Add a periodic task to the list of periodic tasks.
|
||||
|
||||
The task should already be decorated by @periodic_task.
|
||||
|
||||
:return: whether task was actually enabled
|
||||
"""
|
||||
name = task._periodic_name
|
||||
|
||||
if task._periodic_spacing < 0:
|
||||
LOG.info(_LI('Skipping periodic task %(task)s because '
|
||||
'its interval is negative'),
|
||||
{'task': name})
|
||||
return False
|
||||
if not task._periodic_enabled:
|
||||
LOG.info(_LI('Skipping periodic task %(task)s because '
|
||||
'it is disabled'),
|
||||
{'task': name})
|
||||
return False
|
||||
|
||||
# A periodic spacing of zero indicates that this task should
|
||||
# be run on the default interval to avoid running too
|
||||
# frequently.
|
||||
if task._periodic_spacing == 0:
|
||||
task._periodic_spacing = DEFAULT_INTERVAL
|
||||
|
||||
cls._periodic_tasks.append((name, task))
|
||||
cls._periodic_spacing[name] = task._periodic_spacing
|
||||
return True
|
||||
|
||||
def __init__(cls, names, bases, dict_):
|
||||
"""Metaclass that allows us to collect decorated periodic tasks."""
|
||||
super(_PeriodicTasksMeta, cls).__init__(names, bases, dict_)
|
||||
|
||||
# NOTE(sirp): if the attribute is not present then we must be the base
|
||||
# class, so, go ahead an initialize it. If the attribute is present,
|
||||
# then we're a subclass so make a copy of it so we don't step on our
|
||||
# parent's toes.
|
||||
try:
|
||||
cls._periodic_tasks = cls._periodic_tasks[:]
|
||||
except AttributeError:
|
||||
cls._periodic_tasks = []
|
||||
|
||||
try:
|
||||
cls._periodic_spacing = cls._periodic_spacing.copy()
|
||||
except AttributeError:
|
||||
cls._periodic_spacing = {}
|
||||
|
||||
for value in cls.__dict__.values():
|
||||
if getattr(value, '_periodic_task', False):
|
||||
cls._add_periodic_task(value)
|
||||
|
||||
|
||||
def _nearest_boundary(last_run, spacing):
|
||||
"""Find nearest boundary which is in the past, which is a multiple of the
|
||||
spacing with the last run as an offset.
|
||||
|
||||
Eg if last run was 10 and spacing was 7, the new last run could be: 17, 24,
|
||||
31, 38...
|
||||
|
||||
0% to 5% of the spacing value will be added to this value to ensure tasks
|
||||
do not synchronize. This jitter is rounded to the nearest second, this
|
||||
means that spacings smaller than 20 seconds will not have jitter.
|
||||
"""
|
||||
current_time = time.time()
|
||||
if last_run is None:
|
||||
return current_time
|
||||
delta = current_time - last_run
|
||||
offset = delta % spacing
|
||||
# Add up to 5% jitter
|
||||
jitter = int(spacing * (random.random() / 20))
|
||||
return current_time - offset + jitter
|
||||
|
||||
|
||||
@six.add_metaclass(_PeriodicTasksMeta)
|
||||
class PeriodicTasks(object):
|
||||
def __init__(self):
|
||||
super(PeriodicTasks, self).__init__()
|
||||
self._periodic_last_run = {}
|
||||
for name, task in self._periodic_tasks:
|
||||
self._periodic_last_run[name] = task._periodic_last_run
|
||||
|
||||
def add_periodic_task(self, task):
|
||||
"""Add a periodic task to the list of periodic tasks.
|
||||
|
||||
The task should already be decorated by @periodic_task.
|
||||
"""
|
||||
if self.__class__._add_periodic_task(task):
|
||||
self._periodic_last_run[task._periodic_name] = (
|
||||
task._periodic_last_run)
|
||||
|
||||
def run_periodic_tasks(self, context, raise_on_error=False):
|
||||
"""Tasks to be run at a periodic interval."""
|
||||
idle_for = DEFAULT_INTERVAL
|
||||
for task_name, task in self._periodic_tasks:
|
||||
full_task_name = '.'.join([self.__class__.__name__, task_name])
|
||||
|
||||
spacing = self._periodic_spacing[task_name]
|
||||
last_run = self._periodic_last_run[task_name]
|
||||
|
||||
# Check if due, if not skip
|
||||
idle_for = min(idle_for, spacing)
|
||||
if last_run is not None:
|
||||
delta = last_run + spacing - time.time()
|
||||
if delta > 0:
|
||||
idle_for = min(idle_for, delta)
|
||||
continue
|
||||
|
||||
LOG.debug("Running periodic task %(full_task_name)s",
|
||||
{"full_task_name": full_task_name})
|
||||
self._periodic_last_run[task_name] = _nearest_boundary(
|
||||
last_run, spacing)
|
||||
|
||||
try:
|
||||
task(self, context)
|
||||
except Exception as e:
|
||||
if raise_on_error:
|
||||
raise
|
||||
LOG.exception(_LE("Error during %(full_task_name)s: %(e)s"),
|
||||
{"full_task_name": full_task_name, "e": e})
|
||||
time.sleep(0)
|
||||
|
||||
return idle_for
|
@ -1,963 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (c) 2012 OpenStack Foundation.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Common Policy Engine Implementation
|
||||
|
||||
Policies can be expressed in one of two forms: A list of lists, or a
|
||||
string written in the new policy language.
|
||||
|
||||
In the list-of-lists representation, each check inside the innermost
|
||||
list is combined as with an "and" conjunction--for that check to pass,
|
||||
all the specified checks must pass. These innermost lists are then
|
||||
combined as with an "or" conjunction. As an example, take the following
|
||||
rule, expressed in the list-of-lists representation::
|
||||
|
||||
[["role:admin"], ["project_id:%(project_id)s", "role:projectadmin"]]
|
||||
|
||||
This is the original way of expressing policies, but there now exists a
|
||||
new way: the policy language.
|
||||
|
||||
In the policy language, each check is specified the same way as in the
|
||||
list-of-lists representation: a simple "a:b" pair that is matched to
|
||||
the correct class to perform that check::
|
||||
|
||||
+===========================================================================+
|
||||
| TYPE | SYNTAX |
|
||||
+===========================================================================+
|
||||
|User's Role | role:admin |
|
||||
+---------------------------------------------------------------------------+
|
||||
|Rules already defined on policy | rule:admin_required |
|
||||
+---------------------------------------------------------------------------+
|
||||
|Against URL's¹ | http://my-url.org/check |
|
||||
+---------------------------------------------------------------------------+
|
||||
|User attributes² | project_id:%(target.project.id)s |
|
||||
+---------------------------------------------------------------------------+
|
||||
|Strings | <variable>:'xpto2035abc' |
|
||||
| | 'myproject':<variable> |
|
||||
+---------------------------------------------------------------------------+
|
||||
| | project_id:xpto2035abc |
|
||||
|Literals | domain_id:20 |
|
||||
| | True:%(user.enabled)s |
|
||||
+===========================================================================+
|
||||
|
||||
¹URL checking must return 'True' to be valid
|
||||
²User attributes (obtained through the token): user_id, domain_id or project_id
|
||||
|
||||
Conjunction operators are available, allowing for more expressiveness
|
||||
in crafting policies. So, in the policy language, the previous check in
|
||||
list-of-lists becomes::
|
||||
|
||||
role:admin or (project_id:%(project_id)s and role:projectadmin)
|
||||
|
||||
The policy language also has the "not" operator, allowing a richer
|
||||
policy rule::
|
||||
|
||||
project_id:%(project_id)s and not role:dunce
|
||||
|
||||
Attributes sent along with API calls can be used by the policy engine
|
||||
(on the right side of the expression), by using the following syntax::
|
||||
|
||||
<some_value>:%(user.id)s
|
||||
|
||||
Contextual attributes of objects identified by their IDs are loaded
|
||||
from the database. They are also available to the policy engine and
|
||||
can be checked through the `target` keyword::
|
||||
|
||||
<some_value>:%(target.role.name)s
|
||||
|
||||
Finally, two special policy checks should be mentioned; the policy
|
||||
check "@" will always accept an access, and the policy check "!" will
|
||||
always reject an access. (Note that if a rule is either the empty
|
||||
list ("[]") or the empty string, this is equivalent to the "@" policy
|
||||
check.) Of these, the "!" policy check is probably the most useful,
|
||||
as it allows particular rules to be explicitly disabled.
|
||||
"""
|
||||
|
||||
import abc
|
||||
import ast
|
||||
import copy
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_serialization import jsonutils
|
||||
import six
|
||||
import six.moves.urllib.parse as urlparse
|
||||
import six.moves.urllib.request as urlrequest
|
||||
|
||||
from nova.openstack.common import fileutils
|
||||
from nova.openstack.common._i18n import _, _LE
|
||||
|
||||
|
||||
policy_opts = [
|
||||
cfg.StrOpt('policy_file',
|
||||
default='policy.json',
|
||||
help=_('The JSON file that defines policies.')),
|
||||
cfg.StrOpt('policy_default_rule',
|
||||
default='default',
|
||||
help=_('Default rule. Enforced when a requested rule is not '
|
||||
'found.')),
|
||||
cfg.MultiStrOpt('policy_dirs',
|
||||
default=['policy.d'],
|
||||
help=_('Directories where policy configuration files are '
|
||||
'stored. They can be relative to any directory '
|
||||
'in the search path defined by the config_dir '
|
||||
'option, or absolute paths. The file defined by '
|
||||
'policy_file must exist for these directories to '
|
||||
'be searched. Missing or empty directories are '
|
||||
'ignored.')),
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.register_opts(policy_opts)
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
_checks = {}
|
||||
|
||||
|
||||
def list_opts():
|
||||
"""Entry point for oslo-config-generator."""
|
||||
return [(None, copy.deepcopy(policy_opts))]
|
||||
|
||||
|
||||
class PolicyNotAuthorized(Exception):
|
||||
|
||||
def __init__(self, rule):
|
||||
msg = _("Policy doesn't allow %s to be performed.") % rule
|
||||
super(PolicyNotAuthorized, self).__init__(msg)
|
||||
|
||||
|
||||
class Rules(dict):
|
||||
"""A store for rules. Handles the default_rule setting directly."""
|
||||
|
||||
@classmethod
|
||||
def load_json(cls, data, default_rule=None):
|
||||
"""Allow loading of JSON rule data."""
|
||||
|
||||
# Suck in the JSON data and parse the rules
|
||||
rules = dict((k, parse_rule(v)) for k, v in
|
||||
jsonutils.loads(data).items())
|
||||
|
||||
return cls(rules, default_rule)
|
||||
|
||||
def __init__(self, rules=None, default_rule=None):
|
||||
"""Initialize the Rules store."""
|
||||
|
||||
super(Rules, self).__init__(rules or {})
|
||||
self.default_rule = default_rule
|
||||
|
||||
def __missing__(self, key):
|
||||
"""Implements the default rule handling."""
|
||||
|
||||
if isinstance(self.default_rule, dict):
|
||||
raise KeyError(key)
|
||||
|
||||
# If the default rule isn't actually defined, do something
|
||||
# reasonably intelligent
|
||||
if not self.default_rule:
|
||||
raise KeyError(key)
|
||||
|
||||
if isinstance(self.default_rule, BaseCheck):
|
||||
return self.default_rule
|
||||
|
||||
# We need to check this or we can get infinite recursion
|
||||
if self.default_rule not in self:
|
||||
raise KeyError(key)
|
||||
|
||||
elif isinstance(self.default_rule, six.string_types):
|
||||
return self[self.default_rule]
|
||||
|
||||
def __str__(self):
|
||||
"""Dumps a string representation of the rules."""
|
||||
|
||||
# Start by building the canonical strings for the rules
|
||||
out_rules = {}
|
||||
for key, value in self.items():
|
||||
# Use empty string for singleton TrueCheck instances
|
||||
if isinstance(value, TrueCheck):
|
||||
out_rules[key] = ''
|
||||
else:
|
||||
out_rules[key] = str(value)
|
||||
|
||||
# Dump a pretty-printed JSON representation
|
||||
return jsonutils.dumps(out_rules, indent=4)
|
||||
|
||||
|
||||
class Enforcer(object):
|
||||
"""Responsible for loading and enforcing rules.
|
||||
|
||||
:param policy_file: Custom policy file to use, if none is
|
||||
specified, `CONF.policy_file` will be
|
||||
used.
|
||||
:param rules: Default dictionary / Rules to use. It will be
|
||||
considered just in the first instantiation. If
|
||||
`load_rules(True)`, `clear()` or `set_rules(True)`
|
||||
is called this will be overwritten.
|
||||
:param default_rule: Default rule to use, CONF.default_rule will
|
||||
be used if none is specified.
|
||||
:param use_conf: Whether to load rules from cache or config file.
|
||||
:param overwrite: Whether to overwrite existing rules when reload rules
|
||||
from config file.
|
||||
"""
|
||||
|
||||
def __init__(self, policy_file=None, rules=None,
|
||||
default_rule=None, use_conf=True, overwrite=True):
|
||||
self.default_rule = default_rule or CONF.policy_default_rule
|
||||
self.rules = Rules(rules, self.default_rule)
|
||||
|
||||
self.policy_path = None
|
||||
self.policy_file = policy_file or CONF.policy_file
|
||||
self.use_conf = use_conf
|
||||
self.overwrite = overwrite
|
||||
|
||||
def set_rules(self, rules, overwrite=True, use_conf=False):
|
||||
"""Create a new Rules object based on the provided dict of rules.
|
||||
|
||||
:param rules: New rules to use. It should be an instance of dict.
|
||||
:param overwrite: Whether to overwrite current rules or update them
|
||||
with the new rules.
|
||||
:param use_conf: Whether to reload rules from cache or config file.
|
||||
"""
|
||||
|
||||
if not isinstance(rules, dict):
|
||||
raise TypeError(_("Rules must be an instance of dict or Rules, "
|
||||
"got %s instead") % type(rules))
|
||||
self.use_conf = use_conf
|
||||
if overwrite:
|
||||
self.rules = Rules(rules, self.default_rule)
|
||||
else:
|
||||
self.rules.update(rules)
|
||||
|
||||
def clear(self):
|
||||
"""Clears Enforcer rules, policy's cache and policy's path."""
|
||||
self.set_rules({})
|
||||
fileutils.delete_cached_file(self.policy_path)
|
||||
self.default_rule = None
|
||||
self.policy_path = None
|
||||
|
||||
def load_rules(self, force_reload=False):
|
||||
"""Loads policy_path's rules.
|
||||
|
||||
Policy file is cached and will be reloaded if modified.
|
||||
|
||||
:param force_reload: Whether to reload rules from config file.
|
||||
"""
|
||||
|
||||
if force_reload:
|
||||
self.use_conf = force_reload
|
||||
|
||||
if self.use_conf:
|
||||
if not self.policy_path:
|
||||
self.policy_path = self._get_policy_path(self.policy_file)
|
||||
|
||||
self._load_policy_file(self.policy_path, force_reload,
|
||||
overwrite=self.overwrite)
|
||||
for path in CONF.policy_dirs:
|
||||
try:
|
||||
path = self._get_policy_path(path)
|
||||
except cfg.ConfigFilesNotFoundError:
|
||||
continue
|
||||
self._walk_through_policy_directory(path,
|
||||
self._load_policy_file,
|
||||
force_reload, False)
|
||||
|
||||
@staticmethod
|
||||
def _walk_through_policy_directory(path, func, *args):
|
||||
# We do not iterate over sub-directories.
|
||||
policy_files = next(os.walk(path))[2]
|
||||
policy_files.sort()
|
||||
for policy_file in [p for p in policy_files if not p.startswith('.')]:
|
||||
func(os.path.join(path, policy_file), *args)
|
||||
|
||||
def _load_policy_file(self, path, force_reload, overwrite=True):
|
||||
reloaded, data = fileutils.read_cached_file(
|
||||
path, force_reload=force_reload)
|
||||
if reloaded or not self.rules or not overwrite:
|
||||
rules = Rules.load_json(data, self.default_rule)
|
||||
self.set_rules(rules, overwrite=overwrite, use_conf=True)
|
||||
LOG.debug("Reloaded policy file: %(path)s",
|
||||
{'path': path})
|
||||
|
||||
def _get_policy_path(self, path):
|
||||
"""Locate the policy json data file/path.
|
||||
|
||||
:param path: It's value can be a full path or related path. When
|
||||
full path specified, this function just returns the full
|
||||
path. When related path specified, this function will
|
||||
search configuration directories to find one that exists.
|
||||
|
||||
:returns: The policy path
|
||||
|
||||
:raises: ConfigFilesNotFoundError if the file/path couldn't
|
||||
be located.
|
||||
"""
|
||||
policy_path = CONF.find_file(path)
|
||||
|
||||
if policy_path:
|
||||
return policy_path
|
||||
|
||||
raise cfg.ConfigFilesNotFoundError((path,))
|
||||
|
||||
def enforce(self, rule, target, creds, do_raise=False,
|
||||
exc=None, *args, **kwargs):
|
||||
"""Checks authorization of a rule against the target and credentials.
|
||||
|
||||
:param rule: A string or BaseCheck instance specifying the rule
|
||||
to evaluate.
|
||||
:param target: As much information about the object being operated
|
||||
on as possible, as a dictionary.
|
||||
:param creds: As much information about the user performing the
|
||||
action as possible, as a dictionary.
|
||||
:param do_raise: Whether to raise an exception or not if check
|
||||
fails.
|
||||
:param exc: Class of the exception to raise if the check fails.
|
||||
Any remaining arguments passed to enforce() (both
|
||||
positional and keyword arguments) will be passed to
|
||||
the exception class. If not specified, PolicyNotAuthorized
|
||||
will be used.
|
||||
|
||||
:return: Returns False if the policy does not allow the action and
|
||||
exc is not provided; otherwise, returns a value that
|
||||
evaluates to True. Note: for rules using the "case"
|
||||
expression, this True value will be the specified string
|
||||
from the expression.
|
||||
"""
|
||||
|
||||
self.load_rules()
|
||||
|
||||
# Allow the rule to be a Check tree
|
||||
if isinstance(rule, BaseCheck):
|
||||
result = rule(target, creds, self)
|
||||
elif not self.rules:
|
||||
# No rules to reference means we're going to fail closed
|
||||
result = False
|
||||
else:
|
||||
try:
|
||||
# Evaluate the rule
|
||||
result = self.rules[rule](target, creds, self)
|
||||
except KeyError:
|
||||
LOG.debug("Rule [%s] doesn't exist" % rule)
|
||||
# If the rule doesn't exist, fail closed
|
||||
result = False
|
||||
|
||||
# If it is False, raise the exception if requested
|
||||
if do_raise and not result:
|
||||
if exc:
|
||||
raise exc(*args, **kwargs)
|
||||
|
||||
raise PolicyNotAuthorized(rule)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class BaseCheck(object):
|
||||
"""Abstract base class for Check classes."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def __str__(self):
|
||||
"""String representation of the Check tree rooted at this node."""
|
||||
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def __call__(self, target, cred, enforcer):
|
||||
"""Triggers if instance of the class is called.
|
||||
|
||||
Performs the check. Returns False to reject the access or a
|
||||
true value (not necessary True) to accept the access.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class FalseCheck(BaseCheck):
|
||||
"""A policy check that always returns False (disallow)."""
|
||||
|
||||
def __str__(self):
|
||||
"""Return a string representation of this check."""
|
||||
|
||||
return "!"
|
||||
|
||||
def __call__(self, target, cred, enforcer):
|
||||
"""Check the policy."""
|
||||
|
||||
return False
|
||||
|
||||
|
||||
class TrueCheck(BaseCheck):
|
||||
"""A policy check that always returns True (allow)."""
|
||||
|
||||
def __str__(self):
|
||||
"""Return a string representation of this check."""
|
||||
|
||||
return "@"
|
||||
|
||||
def __call__(self, target, cred, enforcer):
|
||||
"""Check the policy."""
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class Check(BaseCheck):
|
||||
"""A base class to allow for user-defined policy checks."""
|
||||
|
||||
def __init__(self, kind, match):
|
||||
"""Initiates Check instance.
|
||||
|
||||
:param kind: The kind of the check, i.e., the field before the
|
||||
':'.
|
||||
:param match: The match of the check, i.e., the field after
|
||||
the ':'.
|
||||
"""
|
||||
|
||||
self.kind = kind
|
||||
self.match = match
|
||||
|
||||
def __str__(self):
|
||||
"""Return a string representation of this check."""
|
||||
|
||||
return "%s:%s" % (self.kind, self.match)
|
||||
|
||||
|
||||
class NotCheck(BaseCheck):
|
||||
"""Implements the "not" logical operator.
|
||||
|
||||
A policy check that inverts the result of another policy check.
|
||||
"""
|
||||
|
||||
def __init__(self, rule):
|
||||
"""Initialize the 'not' check.
|
||||
|
||||
:param rule: The rule to negate. Must be a Check.
|
||||
"""
|
||||
|
||||
self.rule = rule
|
||||
|
||||
def __str__(self):
|
||||
"""Return a string representation of this check."""
|
||||
|
||||
return "not %s" % self.rule
|
||||
|
||||
def __call__(self, target, cred, enforcer):
|
||||
"""Check the policy.
|
||||
|
||||
Returns the logical inverse of the wrapped check.
|
||||
"""
|
||||
|
||||
return not self.rule(target, cred, enforcer)
|
||||
|
||||
|
||||
class AndCheck(BaseCheck):
|
||||
"""Implements the "and" logical operator.
|
||||
|
||||
A policy check that requires that a list of other checks all return True.
|
||||
"""
|
||||
|
||||
def __init__(self, rules):
|
||||
"""Initialize the 'and' check.
|
||||
|
||||
:param rules: A list of rules that will be tested.
|
||||
"""
|
||||
|
||||
self.rules = rules
|
||||
|
||||
def __str__(self):
|
||||
"""Return a string representation of this check."""
|
||||
|
||||
return "(%s)" % ' and '.join(str(r) for r in self.rules)
|
||||
|
||||
def __call__(self, target, cred, enforcer):
|
||||
"""Check the policy.
|
||||
|
||||
Requires that all rules accept in order to return True.
|
||||
"""
|
||||
|
||||
for rule in self.rules:
|
||||
if not rule(target, cred, enforcer):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def add_check(self, rule):
|
||||
"""Adds rule to be tested.
|
||||
|
||||
Allows addition of another rule to the list of rules that will
|
||||
be tested. Returns the AndCheck object for convenience.
|
||||
"""
|
||||
|
||||
self.rules.append(rule)
|
||||
return self
|
||||
|
||||
|
||||
class OrCheck(BaseCheck):
|
||||
"""Implements the "or" operator.
|
||||
|
||||
A policy check that requires that at least one of a list of other
|
||||
checks returns True.
|
||||
"""
|
||||
|
||||
def __init__(self, rules):
|
||||
"""Initialize the 'or' check.
|
||||
|
||||
:param rules: A list of rules that will be tested.
|
||||
"""
|
||||
|
||||
self.rules = rules
|
||||
|
||||
def __str__(self):
|
||||
"""Return a string representation of this check."""
|
||||
|
||||
return "(%s)" % ' or '.join(str(r) for r in self.rules)
|
||||
|
||||
def __call__(self, target, cred, enforcer):
|
||||
"""Check the policy.
|
||||
|
||||
Requires that at least one rule accept in order to return True.
|
||||
"""
|
||||
|
||||
for rule in self.rules:
|
||||
if rule(target, cred, enforcer):
|
||||
return True
|
||||
return False
|
||||
|
||||
def add_check(self, rule):
|
||||
"""Adds rule to be tested.
|
||||
|
||||
Allows addition of another rule to the list of rules that will
|
||||
be tested. Returns the OrCheck object for convenience.
|
||||
"""
|
||||
|
||||
self.rules.append(rule)
|
||||
return self
|
||||
|
||||
|
||||
def _parse_check(rule):
|
||||
"""Parse a single base check rule into an appropriate Check object."""
|
||||
|
||||
# Handle the special checks
|
||||
if rule == '!':
|
||||
return FalseCheck()
|
||||
elif rule == '@':
|
||||
return TrueCheck()
|
||||
|
||||
try:
|
||||
kind, match = rule.split(':', 1)
|
||||
except Exception:
|
||||
LOG.exception(_LE("Failed to understand rule %s") % rule)
|
||||
# If the rule is invalid, we'll fail closed
|
||||
return FalseCheck()
|
||||
|
||||
# Find what implements the check
|
||||
if kind in _checks:
|
||||
return _checks[kind](kind, match)
|
||||
elif None in _checks:
|
||||
return _checks[None](kind, match)
|
||||
else:
|
||||
LOG.error(_LE("No handler for matches of kind %s") % kind)
|
||||
return FalseCheck()
|
||||
|
||||
|
||||
def _parse_list_rule(rule):
|
||||
"""Translates the old list-of-lists syntax into a tree of Check objects.
|
||||
|
||||
Provided for backwards compatibility.
|
||||
"""
|
||||
|
||||
# Empty rule defaults to True
|
||||
if not rule:
|
||||
return TrueCheck()
|
||||
|
||||
# Outer list is joined by "or"; inner list by "and"
|
||||
or_list = []
|
||||
for inner_rule in rule:
|
||||
# Elide empty inner lists
|
||||
if not inner_rule:
|
||||
continue
|
||||
|
||||
# Handle bare strings
|
||||
if isinstance(inner_rule, six.string_types):
|
||||
inner_rule = [inner_rule]
|
||||
|
||||
# Parse the inner rules into Check objects
|
||||
and_list = [_parse_check(r) for r in inner_rule]
|
||||
|
||||
# Append the appropriate check to the or_list
|
||||
if len(and_list) == 1:
|
||||
or_list.append(and_list[0])
|
||||
else:
|
||||
or_list.append(AndCheck(and_list))
|
||||
|
||||
# If we have only one check, omit the "or"
|
||||
if not or_list:
|
||||
return FalseCheck()
|
||||
elif len(or_list) == 1:
|
||||
return or_list[0]
|
||||
|
||||
return OrCheck(or_list)
|
||||
|
||||
|
||||
# Used for tokenizing the policy language
|
||||
_tokenize_re = re.compile(r'\s+')
|
||||
|
||||
|
||||
def _parse_tokenize(rule):
|
||||
"""Tokenizer for the policy language.
|
||||
|
||||
Most of the single-character tokens are specified in the
|
||||
_tokenize_re; however, parentheses need to be handled specially,
|
||||
because they can appear inside a check string. Thankfully, those
|
||||
parentheses that appear inside a check string can never occur at
|
||||
the very beginning or end ("%(variable)s" is the correct syntax).
|
||||
"""
|
||||
|
||||
for tok in _tokenize_re.split(rule):
|
||||
# Skip empty tokens
|
||||
if not tok or tok.isspace():
|
||||
continue
|
||||
|
||||
# Handle leading parens on the token
|
||||
clean = tok.lstrip('(')
|
||||
for i in range(len(tok) - len(clean)):
|
||||
yield '(', '('
|
||||
|
||||
# If it was only parentheses, continue
|
||||
if not clean:
|
||||
continue
|
||||
else:
|
||||
tok = clean
|
||||
|
||||
# Handle trailing parens on the token
|
||||
clean = tok.rstrip(')')
|
||||
trail = len(tok) - len(clean)
|
||||
|
||||
# Yield the cleaned token
|
||||
lowered = clean.lower()
|
||||
if lowered in ('and', 'or', 'not'):
|
||||
# Special tokens
|
||||
yield lowered, clean
|
||||
elif clean:
|
||||
# Not a special token, but not composed solely of ')'
|
||||
if len(tok) >= 2 and ((tok[0], tok[-1]) in
|
||||
[('"', '"'), ("'", "'")]):
|
||||
# It's a quoted string
|
||||
yield 'string', tok[1:-1]
|
||||
else:
|
||||
yield 'check', _parse_check(clean)
|
||||
|
||||
# Yield the trailing parens
|
||||
for i in range(trail):
|
||||
yield ')', ')'
|
||||
|
||||
|
||||
class ParseStateMeta(type):
|
||||
"""Metaclass for the ParseState class.
|
||||
|
||||
Facilitates identifying reduction methods.
|
||||
"""
|
||||
|
||||
def __new__(mcs, name, bases, cls_dict):
|
||||
"""Create the class.
|
||||
|
||||
Injects the 'reducers' list, a list of tuples matching token sequences
|
||||
to the names of the corresponding reduction methods.
|
||||
"""
|
||||
|
||||
reducers = []
|
||||
|
||||
for key, value in cls_dict.items():
|
||||
if not hasattr(value, 'reducers'):
|
||||
continue
|
||||
for reduction in value.reducers:
|
||||
reducers.append((reduction, key))
|
||||
|
||||
cls_dict['reducers'] = reducers
|
||||
|
||||
return super(ParseStateMeta, mcs).__new__(mcs, name, bases, cls_dict)
|
||||
|
||||
|
||||
def reducer(*tokens):
|
||||
"""Decorator for reduction methods.
|
||||
|
||||
Arguments are a sequence of tokens, in order, which should trigger running
|
||||
this reduction method.
|
||||
"""
|
||||
|
||||
def decorator(func):
|
||||
# Make sure we have a list of reducer sequences
|
||||
if not hasattr(func, 'reducers'):
|
||||
func.reducers = []
|
||||
|
||||
# Add the tokens to the list of reducer sequences
|
||||
func.reducers.append(list(tokens))
|
||||
|
||||
return func
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
@six.add_metaclass(ParseStateMeta)
|
||||
class ParseState(object):
|
||||
"""Implement the core of parsing the policy language.
|
||||
|
||||
Uses a greedy reduction algorithm to reduce a sequence of tokens into
|
||||
a single terminal, the value of which will be the root of the Check tree.
|
||||
|
||||
Note: error reporting is rather lacking. The best we can get with
|
||||
this parser formulation is an overall "parse failed" error.
|
||||
Fortunately, the policy language is simple enough that this
|
||||
shouldn't be that big a problem.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize the ParseState."""
|
||||
|
||||
self.tokens = []
|
||||
self.values = []
|
||||
|
||||
def reduce(self):
|
||||
"""Perform a greedy reduction of the token stream.
|
||||
|
||||
If a reducer method matches, it will be executed, then the
|
||||
reduce() method will be called recursively to search for any more
|
||||
possible reductions.
|
||||
"""
|
||||
|
||||
for reduction, methname in self.reducers:
|
||||
if (len(self.tokens) >= len(reduction) and
|
||||
self.tokens[-len(reduction):] == reduction):
|
||||
# Get the reduction method
|
||||
meth = getattr(self, methname)
|
||||
|
||||
# Reduce the token stream
|
||||
results = meth(*self.values[-len(reduction):])
|
||||
|
||||
# Update the tokens and values
|
||||
self.tokens[-len(reduction):] = [r[0] for r in results]
|
||||
self.values[-len(reduction):] = [r[1] for r in results]
|
||||
|
||||
# Check for any more reductions
|
||||
return self.reduce()
|
||||
|
||||
def shift(self, tok, value):
|
||||
"""Adds one more token to the state. Calls reduce()."""
|
||||
|
||||
self.tokens.append(tok)
|
||||
self.values.append(value)
|
||||
|
||||
# Do a greedy reduce...
|
||||
self.reduce()
|
||||
|
||||
@property
|
||||
def result(self):
|
||||
"""Obtain the final result of the parse.
|
||||
|
||||
Raises ValueError if the parse failed to reduce to a single result.
|
||||
"""
|
||||
|
||||
if len(self.values) != 1:
|
||||
raise ValueError("Could not parse rule")
|
||||
return self.values[0]
|
||||
|
||||
@reducer('(', 'check', ')')
|
||||
@reducer('(', 'and_expr', ')')
|
||||
@reducer('(', 'or_expr', ')')
|
||||
def _wrap_check(self, _p1, check, _p2):
|
||||
"""Turn parenthesized expressions into a 'check' token."""
|
||||
|
||||
return [('check', check)]
|
||||
|
||||
@reducer('check', 'and', 'check')
|
||||
def _make_and_expr(self, check1, _and, check2):
|
||||
"""Create an 'and_expr'.
|
||||
|
||||
Join two checks by the 'and' operator.
|
||||
"""
|
||||
|
||||
return [('and_expr', AndCheck([check1, check2]))]
|
||||
|
||||
@reducer('and_expr', 'and', 'check')
|
||||
def _extend_and_expr(self, and_expr, _and, check):
|
||||
"""Extend an 'and_expr' by adding one more check."""
|
||||
|
||||
return [('and_expr', and_expr.add_check(check))]
|
||||
|
||||
@reducer('check', 'or', 'check')
|
||||
def _make_or_expr(self, check1, _or, check2):
|
||||
"""Create an 'or_expr'.
|
||||
|
||||
Join two checks by the 'or' operator.
|
||||
"""
|
||||
|
||||
return [('or_expr', OrCheck([check1, check2]))]
|
||||
|
||||
@reducer('or_expr', 'or', 'check')
|
||||
def _extend_or_expr(self, or_expr, _or, check):
|
||||
"""Extend an 'or_expr' by adding one more check."""
|
||||
|
||||
return [('or_expr', or_expr.add_check(check))]
|
||||
|
||||
@reducer('not', 'check')
|
||||
def _make_not_expr(self, _not, check):
|
||||
"""Invert the result of another check."""
|
||||
|
||||
return [('check', NotCheck(check))]
|
||||
|
||||
|
||||
def _parse_text_rule(rule):
|
||||
"""Parses policy to the tree.
|
||||
|
||||
Translates a policy written in the policy language into a tree of
|
||||
Check objects.
|
||||
"""
|
||||
|
||||
# Empty rule means always accept
|
||||
if not rule:
|
||||
return TrueCheck()
|
||||
|
||||
# Parse the token stream
|
||||
state = ParseState()
|
||||
for tok, value in _parse_tokenize(rule):
|
||||
state.shift(tok, value)
|
||||
|
||||
try:
|
||||
return state.result
|
||||
except ValueError:
|
||||
# Couldn't parse the rule
|
||||
LOG.exception(_LE("Failed to understand rule %s") % rule)
|
||||
|
||||
# Fail closed
|
||||
return FalseCheck()
|
||||
|
||||
|
||||
def parse_rule(rule):
|
||||
"""Parses a policy rule into a tree of Check objects."""
|
||||
|
||||
# If the rule is a string, it's in the policy language
|
||||
if isinstance(rule, six.string_types):
|
||||
return _parse_text_rule(rule)
|
||||
return _parse_list_rule(rule)
|
||||
|
||||
|
||||
def register(name, func=None):
|
||||
"""Register a function or Check class as a policy check.
|
||||
|
||||
:param name: Gives the name of the check type, e.g., 'rule',
|
||||
'role', etc. If name is None, a default check type
|
||||
will be registered.
|
||||
:param func: If given, provides the function or class to register.
|
||||
If not given, returns a function taking one argument
|
||||
to specify the function or class to register,
|
||||
allowing use as a decorator.
|
||||
"""
|
||||
|
||||
# Perform the actual decoration by registering the function or
|
||||
# class. Returns the function or class for compliance with the
|
||||
# decorator interface.
|
||||
def decorator(func):
|
||||
_checks[name] = func
|
||||
return func
|
||||
|
||||
# If the function or class is given, do the registration
|
||||
if func:
|
||||
return decorator(func)
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
@register("rule")
|
||||
class RuleCheck(Check):
|
||||
def __call__(self, target, creds, enforcer):
|
||||
"""Recursively checks credentials based on the defined rules."""
|
||||
|
||||
try:
|
||||
return enforcer.rules[self.match](target, creds, enforcer)
|
||||
except KeyError:
|
||||
# We don't have any matching rule; fail closed
|
||||
return False
|
||||
|
||||
|
||||
@register("role")
|
||||
class RoleCheck(Check):
|
||||
def __call__(self, target, creds, enforcer):
|
||||
"""Check that there is a matching role in the cred dict."""
|
||||
|
||||
return self.match.lower() in [x.lower() for x in creds['roles']]
|
||||
|
||||
|
||||
@register('http')
|
||||
class HttpCheck(Check):
|
||||
def __call__(self, target, creds, enforcer):
|
||||
"""Check http: rules by calling to a remote server.
|
||||
|
||||
This example implementation simply verifies that the response
|
||||
is exactly 'True'.
|
||||
"""
|
||||
|
||||
url = ('http:' + self.match) % target
|
||||
|
||||
# Convert instances of object() in target temporarily to
|
||||
# empty dict to avoid circular reference detection
|
||||
# errors in jsonutils.dumps().
|
||||
temp_target = copy.deepcopy(target)
|
||||
for key in target.keys():
|
||||
element = target.get(key)
|
||||
if type(element) is object:
|
||||
temp_target[key] = {}
|
||||
|
||||
data = {'target': jsonutils.dumps(temp_target),
|
||||
'credentials': jsonutils.dumps(creds)}
|
||||
post_data = urlparse.urlencode(data)
|
||||
f = urlrequest.urlopen(url, post_data)
|
||||
return f.read() == "True"
|
||||
|
||||
|
||||
@register(None)
|
||||
class GenericCheck(Check):
|
||||
def __call__(self, target, creds, enforcer):
|
||||
"""Check an individual match.
|
||||
|
||||
Matches look like:
|
||||
|
||||
tenant:%(tenant_id)s
|
||||
role:compute:admin
|
||||
True:%(user.enabled)s
|
||||
'Member':%(role.name)s
|
||||
"""
|
||||
|
||||
try:
|
||||
match = self.match % target
|
||||
except KeyError:
|
||||
# While doing GenericCheck if key not
|
||||
# present in Target return false
|
||||
return False
|
||||
|
||||
try:
|
||||
# Try to interpret self.kind as a literal
|
||||
leftval = ast.literal_eval(self.kind)
|
||||
except ValueError:
|
||||
try:
|
||||
kind_parts = self.kind.split('.')
|
||||
leftval = creds
|
||||
for kind_part in kind_parts:
|
||||
leftval = leftval[kind_part]
|
||||
except KeyError:
|
||||
return False
|
||||
return match == six.text_type(leftval)
|
@ -1,25 +0,0 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Provides a way to generate serializable reports
|
||||
|
||||
This package/module provides mechanisms for defining reports
|
||||
which may then be serialized into various data types. Each
|
||||
report ( :class:`openstack.common.report.report.BasicReport` )
|
||||
is composed of one or more report sections
|
||||
( :class:`openstack.common.report.report.BasicSection` ),
|
||||
which contain generators which generate data models
|
||||
( :class:`openstack.common.report.models.base.ReportModels` ),
|
||||
which are then serialized by views.
|
||||
"""
|
@ -1,21 +0,0 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Provides Data Model Generators
|
||||
|
||||
This module defines classes for generating data models
|
||||
( :class:`openstack.common.report.models.base.ReportModel` ).
|
||||
A generator is any object which is callable with no parameters
|
||||
and returns a data model.
|
||||
"""
|
@ -1,44 +0,0 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Provides OpenStack config generators
|
||||
|
||||
This module defines a class for configuration
|
||||
generators for generating the model in
|
||||
:mod:`openstack.common.report.models.conf`.
|
||||
"""
|
||||
|
||||
from oslo_config import cfg
|
||||
|
||||
from nova.openstack.common.report.models import conf as cm
|
||||
|
||||
|
||||
class ConfigReportGenerator(object):
|
||||
"""A Configuration Data Generator
|
||||
|
||||
This generator returns
|
||||
:class:`openstack.common.report.models.conf.ConfigModel`,
|
||||
by default using the configuration options stored
|
||||
in :attr:`oslo_config.cfg.CONF`, which is where
|
||||
OpenStack stores everything.
|
||||
|
||||
:param cnf: the configuration option object
|
||||
:type cnf: :class:`oslo_config.cfg.ConfigOpts`
|
||||
"""
|
||||
|
||||
def __init__(self, cnf=cfg.CONF):
|
||||
self.conf_obj = cnf
|
||||
|
||||
def __call__(self):
|
||||
return cm.ConfigModel(self.conf_obj)
|
@ -1,38 +0,0 @@
|
||||
# Copyright 2014 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Provides process-data generators
|
||||
|
||||
This modules defines a class for generating
|
||||
process data by way of the psutil package.
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
import psutil
|
||||
|
||||
from nova.openstack.common.report.models import process as pm
|
||||
|
||||
|
||||
class ProcessReportGenerator(object):
|
||||
"""A Process Data Generator
|
||||
|
||||
This generator returns a
|
||||
:class:`openstack.common.report.models.process.ProcessModel`
|
||||
based on the current process (which will also include
|
||||
all subprocesses, recursively) using the :class:`psutil.Process` class`.
|
||||
"""
|
||||
|
||||
def __call__(self):
|
||||
return pm.ProcessModel(psutil.Process(os.getpid()))
|
@ -1,86 +0,0 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Provides thread-related generators
|
||||
|
||||
This module defines classes for threading-related
|
||||
generators for generating the models in
|
||||
:mod:`openstack.common.report.models.threading`.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import sys
|
||||
import threading
|
||||
|
||||
import greenlet
|
||||
|
||||
from nova.openstack.common.report.models import threading as tm
|
||||
from nova.openstack.common.report.models import with_default_views as mwdv
|
||||
from nova.openstack.common.report import utils as rutils
|
||||
from nova.openstack.common.report.views.text import generic as text_views
|
||||
|
||||
|
||||
class ThreadReportGenerator(object):
|
||||
"""A Thread Data Generator
|
||||
|
||||
This generator returns a collection of
|
||||
:class:`openstack.common.report.models.threading.ThreadModel`
|
||||
objects by introspecting the current python state using
|
||||
:func:`sys._current_frames()` . Its constructor may optionally
|
||||
be passed a frame object. This frame object will be interpreted
|
||||
as the actual stack trace for the current thread, and, come generation
|
||||
time, will be used to replace the stack trace of the thread in which
|
||||
this code is running.
|
||||
"""
|
||||
|
||||
def __init__(self, curr_thread_traceback=None):
|
||||
self.traceback = curr_thread_traceback
|
||||
|
||||
def __call__(self):
|
||||
threadModels = dict(
|
||||
(thread_id, tm.ThreadModel(thread_id, stack))
|
||||
for thread_id, stack in sys._current_frames().items()
|
||||
)
|
||||
|
||||
if self.traceback is not None:
|
||||
curr_thread_id = threading.current_thread().ident
|
||||
threadModels[curr_thread_id] = tm.ThreadModel(curr_thread_id,
|
||||
self.traceback)
|
||||
|
||||
return mwdv.ModelWithDefaultViews(threadModels,
|
||||
text_view=text_views.MultiView())
|
||||
|
||||
|
||||
class GreenThreadReportGenerator(object):
|
||||
"""A Green Thread Data Generator
|
||||
|
||||
This generator returns a collection of
|
||||
:class:`openstack.common.report.models.threading.GreenThreadModel`
|
||||
objects by introspecting the current python garbage collection
|
||||
state, and sifting through for :class:`greenlet.greenlet` objects.
|
||||
|
||||
.. seealso::
|
||||
|
||||
Function :func:`openstack.common.report.utils._find_objects`
|
||||
"""
|
||||
|
||||
def __call__(self):
|
||||
threadModels = [
|
||||
tm.GreenThreadModel(gr.gr_frame)
|
||||
for gr in rutils._find_objects(greenlet.greenlet)
|
||||
]
|
||||
|
||||
return mwdv.ModelWithDefaultViews(threadModels,
|
||||
text_view=text_views.MultiView())
|
@ -1,46 +0,0 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Provides OpenStack version generators
|
||||
|
||||
This module defines a class for OpenStack
|
||||
version and package information
|
||||
generators for generating the model in
|
||||
:mod:`openstack.common.report.models.version`.
|
||||
"""
|
||||
|
||||
from nova.openstack.common.report.models import version as vm
|
||||
|
||||
|
||||
class PackageReportGenerator(object):
|
||||
"""A Package Information Data Generator
|
||||
|
||||
This generator returns
|
||||
:class:`openstack.common.report.models.version.PackageModel`,
|
||||
extracting data from the given version object, which should follow
|
||||
the general format defined in Nova's version information (i.e. it
|
||||
should contain the methods vendor_string, product_string, and
|
||||
version_string_with_package).
|
||||
|
||||
:param version_object: the version information object
|
||||
"""
|
||||
|
||||
def __init__(self, version_obj):
|
||||
self.version_obj = version_obj
|
||||
|
||||
def __call__(self):
|
||||
return vm.PackageModel(
|
||||
self.version_obj.vendor_string(),
|
||||
self.version_obj.product_string(),
|
||||
self.version_obj.version_string_with_package())
|
@ -1,226 +0,0 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Provides Guru Meditation Report
|
||||
|
||||
This module defines the actual OpenStack Guru Meditation
|
||||
Report class.
|
||||
|
||||
This can be used in the OpenStack command definition files.
|
||||
For example, in a nova command module (under nova/cmd):
|
||||
|
||||
.. code-block:: python
|
||||
:emphasize-lines: 8,9,10
|
||||
|
||||
CONF = cfg.CONF
|
||||
# maybe import some options here...
|
||||
|
||||
def main():
|
||||
config.parse_args(sys.argv)
|
||||
logging.setup('blah')
|
||||
|
||||
TextGuruMeditation.register_section('Some Special Section',
|
||||
special_section_generator)
|
||||
TextGuruMeditation.setup_autorun(version_object)
|
||||
|
||||
server = service.Service.create(binary='some-service',
|
||||
topic=CONF.some_service_topic)
|
||||
service.serve(server)
|
||||
service.wait()
|
||||
|
||||
Then, you can do
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$ kill -USR1 $SERVICE_PID
|
||||
|
||||
and get a Guru Meditation Report in the file or terminal
|
||||
where stderr is logged for that given service.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import inspect
|
||||
import os
|
||||
import signal
|
||||
import sys
|
||||
|
||||
from oslo_utils import timeutils
|
||||
|
||||
from nova.openstack.common.report.generators import conf as cgen
|
||||
from nova.openstack.common.report.generators import process as prgen
|
||||
from nova.openstack.common.report.generators import threading as tgen
|
||||
from nova.openstack.common.report.generators import version as pgen
|
||||
from nova.openstack.common.report import report
|
||||
|
||||
|
||||
class GuruMeditation(object):
|
||||
"""A Guru Meditation Report Mixin/Base Class
|
||||
|
||||
This class is a base class for Guru Meditation Reports.
|
||||
It provides facilities for registering sections and
|
||||
setting up functionality to auto-run the report on
|
||||
a certain signal.
|
||||
|
||||
This class should always be used in conjunction with
|
||||
a Report class via multiple inheritance. It should
|
||||
always come first in the class list to ensure the
|
||||
MRO is correct.
|
||||
"""
|
||||
|
||||
timestamp_fmt = "%Y%m%d%H%M%S"
|
||||
|
||||
def __init__(self, version_obj, sig_handler_tb=None, *args, **kwargs):
|
||||
self.version_obj = version_obj
|
||||
self.traceback = sig_handler_tb
|
||||
|
||||
super(GuruMeditation, self).__init__(*args, **kwargs)
|
||||
self.start_section_index = len(self.sections)
|
||||
|
||||
@classmethod
|
||||
def register_section(cls, section_title, generator):
|
||||
"""Register a New Section
|
||||
|
||||
This method registers a persistent section for the current
|
||||
class.
|
||||
|
||||
:param str section_title: the title of the section
|
||||
:param generator: the generator for the section
|
||||
"""
|
||||
|
||||
try:
|
||||
cls.persistent_sections.append([section_title, generator])
|
||||
except AttributeError:
|
||||
cls.persistent_sections = [[section_title, generator]]
|
||||
|
||||
@classmethod
|
||||
def setup_autorun(cls, version, service_name=None,
|
||||
log_dir=None, signum=None):
|
||||
"""Set Up Auto-Run
|
||||
|
||||
This method sets up the Guru Meditation Report to automatically
|
||||
get dumped to stderr or a file in a given dir when the given signal
|
||||
is received.
|
||||
|
||||
:param version: the version object for the current product
|
||||
:param service_name: this program name used to construct logfile name
|
||||
:param logdir: path to a log directory where to create a file
|
||||
:param signum: the signal to associate with running the report
|
||||
"""
|
||||
|
||||
if not signum and hasattr(signal, 'SIGUSR1'):
|
||||
# SIGUSR1 is not supported on all platforms
|
||||
signum = signal.SIGUSR1
|
||||
|
||||
if signum:
|
||||
signal.signal(signum,
|
||||
lambda sn, tb: cls.handle_signal(
|
||||
version, service_name, log_dir, tb))
|
||||
|
||||
@classmethod
|
||||
def handle_signal(cls, version, service_name, log_dir, traceback):
|
||||
"""The Signal Handler
|
||||
|
||||
This method (indirectly) handles receiving a registered signal and
|
||||
dumping the Guru Meditation Report to stderr or a file in a given dir.
|
||||
If service name and log dir are not None, the report will be dumped to
|
||||
a file named $service_name_gurumeditation_$current_time in the log_dir
|
||||
directory.
|
||||
This method is designed to be curried into a proper signal handler by
|
||||
currying out the version
|
||||
parameter.
|
||||
|
||||
:param version: the version object for the current product
|
||||
:param service_name: this program name used to construct logfile name
|
||||
:param logdir: path to a log directory where to create a file
|
||||
:param traceback: the traceback provided to the signal handler
|
||||
"""
|
||||
|
||||
try:
|
||||
res = cls(version, traceback).run()
|
||||
except Exception:
|
||||
print("Unable to run Guru Meditation Report!",
|
||||
file=sys.stderr)
|
||||
else:
|
||||
if log_dir:
|
||||
service_name = service_name or os.path.basename(
|
||||
inspect.stack()[-1][1])
|
||||
filename = "%s_gurumeditation_%s" % (
|
||||
service_name, timeutils.strtime(fmt=cls.timestamp_fmt))
|
||||
filepath = os.path.join(log_dir, filename)
|
||||
try:
|
||||
with open(filepath, "w") as dumpfile:
|
||||
dumpfile.write(res)
|
||||
except Exception:
|
||||
print("Unable to dump Guru Meditation Report to file %s" %
|
||||
(filepath,), file=sys.stderr)
|
||||
else:
|
||||
print(res, file=sys.stderr)
|
||||
|
||||
def _readd_sections(self):
|
||||
del self.sections[self.start_section_index:]
|
||||
|
||||
self.add_section('Package',
|
||||
pgen.PackageReportGenerator(self.version_obj))
|
||||
|
||||
self.add_section('Threads',
|
||||
tgen.ThreadReportGenerator(self.traceback))
|
||||
|
||||
self.add_section('Green Threads',
|
||||
tgen.GreenThreadReportGenerator())
|
||||
|
||||
self.add_section('Processes',
|
||||
prgen.ProcessReportGenerator())
|
||||
|
||||
self.add_section('Configuration',
|
||||
cgen.ConfigReportGenerator())
|
||||
|
||||
try:
|
||||
for section_title, generator in self.persistent_sections:
|
||||
self.add_section(section_title, generator)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
def run(self):
|
||||
self._readd_sections()
|
||||
return super(GuruMeditation, self).run()
|
||||
|
||||
|
||||
# GuruMeditation must come first to get the correct MRO
|
||||
class TextGuruMeditation(GuruMeditation, report.TextReport):
|
||||
"""A Text Guru Meditation Report
|
||||
|
||||
This report is the basic human-readable Guru Meditation Report
|
||||
|
||||
It contains the following sections by default
|
||||
(in addition to any registered persistent sections):
|
||||
|
||||
- Package Information
|
||||
|
||||
- Threads List
|
||||
|
||||
- Green Threads List
|
||||
|
||||
- Process List
|
||||
|
||||
- Configuration Options
|
||||
|
||||
:param version_obj: the version object for the current product
|
||||
:param traceback: an (optional) frame object providing the actual
|
||||
traceback for the current thread
|
||||
"""
|
||||
|
||||
def __init__(self, version_obj, traceback=None):
|
||||
super(TextGuruMeditation, self).__init__(version_obj, traceback,
|
||||
'Guru Meditation')
|
@ -1,20 +0,0 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Provides data models
|
||||
|
||||
This module provides both the base data model,
|
||||
as well as several predefined specific data models
|
||||
to be used in reports.
|
||||
"""
|
@ -1,162 +0,0 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Provides the base report model
|
||||
|
||||
This module defines a class representing the basic report
|
||||
data model from which all data models should inherit (or
|
||||
at least implement similar functionality). Data models
|
||||
store unserialized data generated by generators during
|
||||
the report serialization process.
|
||||
"""
|
||||
|
||||
import collections as col
|
||||
import copy
|
||||
|
||||
import six
|
||||
|
||||
|
||||
class ReportModel(col.MutableMapping):
|
||||
"""A Report Data Model
|
||||
|
||||
A report data model contains data generated by some
|
||||
generator method or class. Data may be read or written
|
||||
using dictionary-style access, and may be read (but not
|
||||
written) using object-member-style access. Additionally,
|
||||
a data model may have an associated view. This view is
|
||||
used to serialize the model when str() is called on the
|
||||
model. An appropriate object for a view is callable with
|
||||
a single parameter: the model to be serialized.
|
||||
|
||||
If present, the object passed in as data will be transformed
|
||||
into a standard python dict. For mappings, this is fairly
|
||||
straightforward. For sequences, the indices become keys
|
||||
and the items become values.
|
||||
|
||||
:param data: a sequence or mapping of data to associate with the model
|
||||
:param attached_view: a view object to attach to this model
|
||||
"""
|
||||
|
||||
def __init__(self, data=None, attached_view=None):
|
||||
self.attached_view = attached_view
|
||||
|
||||
if data is not None:
|
||||
if isinstance(data, col.Mapping):
|
||||
self.data = dict(data)
|
||||
elif isinstance(data, col.Sequence):
|
||||
# convert a list [a, b, c] to a dict {0: a, 1: b, 2: c}
|
||||
self.data = dict(enumerate(data))
|
||||
else:
|
||||
raise TypeError('Data for the model must be a sequence '
|
||||
'or mapping.')
|
||||
else:
|
||||
self.data = {}
|
||||
|
||||
def __str__(self):
|
||||
self_cpy = copy.deepcopy(self)
|
||||
for key in self_cpy:
|
||||
if getattr(self_cpy[key], 'attached_view', None) is not None:
|
||||
self_cpy[key] = str(self_cpy[key])
|
||||
|
||||
if self.attached_view is not None:
|
||||
return self.attached_view(self_cpy)
|
||||
else:
|
||||
raise Exception("Cannot stringify model: no attached view")
|
||||
|
||||
def __repr__(self):
|
||||
if self.attached_view is not None:
|
||||
return ("<Model {cl.__module__}.{cl.__name__} {dt}"
|
||||
" with view {vw.__module__}."
|
||||
"{vw.__name__}>").format(cl=type(self),
|
||||
dt=self.data,
|
||||
vw=type(self.attached_view))
|
||||
else:
|
||||
return ("<Model {cl.__module__}.{cl.__name__} {dt}"
|
||||
" with no view>").format(cl=type(self),
|
||||
dt=self.data)
|
||||
|
||||
def __getitem__(self, attrname):
|
||||
return self.data[attrname]
|
||||
|
||||
def __setitem__(self, attrname, attrval):
|
||||
self.data[attrname] = attrval
|
||||
|
||||
def __delitem__(self, attrname):
|
||||
del self.data[attrname]
|
||||
|
||||
def __contains__(self, key):
|
||||
return self.data.__contains__(key)
|
||||
|
||||
def __getattr__(self, attrname):
|
||||
# Needed for deepcopy in Python3. That will avoid an infinite loop
|
||||
# in __getattr__ .
|
||||
if 'data' not in self.__dict__:
|
||||
self.data = {}
|
||||
|
||||
try:
|
||||
return self.data[attrname]
|
||||
except KeyError:
|
||||
# we don't have that key in data, and the
|
||||
# model class doesn't have that attribute
|
||||
raise AttributeError(
|
||||
"'{cl}' object has no attribute '{an}'".format(
|
||||
cl=type(self).__name__, an=attrname
|
||||
)
|
||||
)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.data)
|
||||
|
||||
def __iter__(self):
|
||||
return self.data.__iter__()
|
||||
|
||||
def set_current_view_type(self, tp, visited=None):
|
||||
"""Set the current view type
|
||||
|
||||
This method attempts to set the current view
|
||||
type for this model and all submodels by calling
|
||||
itself recursively on all values, traversing
|
||||
intervening sequences and mappings when possible,
|
||||
and ignoring all other objects.
|
||||
|
||||
:param tp: the type of the view ('text', 'json', 'xml', etc)
|
||||
:param visited: a set of object ids for which the corresponding objects
|
||||
have already had their view type set
|
||||
"""
|
||||
|
||||
if visited is None:
|
||||
visited = set()
|
||||
|
||||
def traverse_obj(obj):
|
||||
oid = id(obj)
|
||||
|
||||
# don't die on recursive structures,
|
||||
# and don't treat strings like sequences
|
||||
if oid in visited or isinstance(obj, six.string_types):
|
||||
return
|
||||
|
||||
visited.add(oid)
|
||||
|
||||
if hasattr(obj, 'set_current_view_type'):
|
||||
obj.set_current_view_type(tp, visited=visited)
|
||||
|
||||
if isinstance(obj, col.Sequence):
|
||||
for item in obj:
|
||||
traverse_obj(item)
|
||||
|
||||
elif isinstance(obj, col.Mapping):
|
||||
for val in six.itervalues(obj):
|
||||
traverse_obj(val)
|
||||
|
||||
traverse_obj(self)
|
@ -1,66 +0,0 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Provides OpenStack Configuration Model
|
||||
|
||||
This module defines a class representing the data
|
||||
model for :mod:`oslo_config` configuration options
|
||||
"""
|
||||
|
||||
from nova.openstack.common.report.models import with_default_views as mwdv
|
||||
from nova.openstack.common.report.views.text import generic as generic_text_views
|
||||
|
||||
|
||||
class ConfigModel(mwdv.ModelWithDefaultViews):
|
||||
"""A Configuration Options Model
|
||||
|
||||
This model holds data about a set of configuration options
|
||||
from :mod:`oslo_config`. It supports both the default group
|
||||
of options and named option groups.
|
||||
|
||||
:param conf_obj: a configuration object
|
||||
:type conf_obj: :class:`oslo_config.cfg.ConfigOpts`
|
||||
"""
|
||||
|
||||
def __init__(self, conf_obj):
|
||||
kv_view = generic_text_views.KeyValueView(dict_sep=": ",
|
||||
before_dict='')
|
||||
super(ConfigModel, self).__init__(text_view=kv_view)
|
||||
|
||||
def opt_title(optname, co):
|
||||
return co._opts[optname]['opt'].name
|
||||
|
||||
def opt_value(opt_obj, value):
|
||||
if opt_obj['opt'].secret:
|
||||
return '***'
|
||||
else:
|
||||
return value
|
||||
|
||||
self['default'] = dict(
|
||||
(opt_title(optname, conf_obj),
|
||||
opt_value(conf_obj._opts[optname], conf_obj[optname]))
|
||||
for optname in conf_obj._opts
|
||||
)
|
||||
|
||||
groups = {}
|
||||
for groupname in conf_obj._groups:
|
||||
group_obj = conf_obj._groups[groupname]
|
||||
curr_group_opts = dict(
|
||||
(opt_title(optname, group_obj),
|
||||
opt_value(group_obj._opts[optname],
|
||||
conf_obj[groupname][optname]))
|
||||
for optname in group_obj._opts)
|
||||
groups[group_obj.name] = curr_group_opts
|
||||
|
||||
self.update(groups)
|
@ -1,62 +0,0 @@
|
||||
# Copyright 2014 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Provides a process model
|
||||
|
||||
This module defines a class representing a process,
|
||||
potentially with subprocesses.
|
||||
"""
|
||||
|
||||
import nova.openstack.common.report.models.with_default_views as mwdv
|
||||
import nova.openstack.common.report.views.text.process as text_views
|
||||
|
||||
|
||||
class ProcessModel(mwdv.ModelWithDefaultViews):
|
||||
"""A Process Model
|
||||
|
||||
This model holds data about a process,
|
||||
including references to any subprocesses
|
||||
|
||||
:param process: a :class:`psutil.Process` object
|
||||
"""
|
||||
|
||||
def __init__(self, process):
|
||||
super(ProcessModel, self).__init__(
|
||||
text_view=text_views.ProcessView())
|
||||
|
||||
self['pid'] = process.pid
|
||||
self['parent_pid'] = process.ppid
|
||||
if hasattr(process, 'uids'):
|
||||
self['uids'] = {'real': process.uids.real,
|
||||
'effective': process.uids.effective,
|
||||
'saved': process.uids.saved}
|
||||
else:
|
||||
self['uids'] = {'real': None,
|
||||
'effective': None,
|
||||
'saved': None}
|
||||
|
||||
if hasattr(process, 'gids'):
|
||||
self['gids'] = {'real': process.gids.real,
|
||||
'effective': process.gids.effective,
|
||||
'saved': process.gids.saved}
|
||||
else:
|
||||
self['gids'] = {'real': None,
|
||||
'effective': None,
|
||||
'saved': None}
|
||||
|
||||
self['username'] = process.username
|
||||
self['command'] = process.cmdline
|
||||
self['state'] = process.status
|
||||
|
||||
self['children'] = [ProcessModel(pr) for pr in process.get_children()]
|
@ -1,100 +0,0 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Provides threading and stack-trace models
|
||||
|
||||
This module defines classes representing thread, green
|
||||
thread, and stack trace data models
|
||||
"""
|
||||
|
||||
import traceback
|
||||
|
||||
from nova.openstack.common.report.models import with_default_views as mwdv
|
||||
from nova.openstack.common.report.views.text import threading as text_views
|
||||
|
||||
|
||||
class StackTraceModel(mwdv.ModelWithDefaultViews):
|
||||
"""A Stack Trace Model
|
||||
|
||||
This model holds data from a python stack trace,
|
||||
commonly extracted from running thread information
|
||||
|
||||
:param stack_state: the python stack_state object
|
||||
"""
|
||||
|
||||
def __init__(self, stack_state):
|
||||
super(StackTraceModel, self).__init__(
|
||||
text_view=text_views.StackTraceView())
|
||||
|
||||
if (stack_state is not None):
|
||||
self['lines'] = [
|
||||
{'filename': fn, 'line': ln, 'name': nm, 'code': cd}
|
||||
for fn, ln, nm, cd in traceback.extract_stack(stack_state)
|
||||
]
|
||||
# FIXME(flepied): under Python3 f_exc_type doesn't exist
|
||||
# anymore so we lose information about exceptions
|
||||
if getattr(stack_state, 'f_exc_type', None) is not None:
|
||||
self['root_exception'] = {
|
||||
'type': stack_state.f_exc_type,
|
||||
'value': stack_state.f_exc_value}
|
||||
else:
|
||||
self['root_exception'] = None
|
||||
else:
|
||||
self['lines'] = []
|
||||
self['root_exception'] = None
|
||||
|
||||
|
||||
class ThreadModel(mwdv.ModelWithDefaultViews):
|
||||
"""A Thread Model
|
||||
|
||||
This model holds data for information about an
|
||||
individual thread. It holds both a thread id,
|
||||
as well as a stack trace for the thread
|
||||
|
||||
.. seealso::
|
||||
|
||||
Class :class:`StackTraceModel`
|
||||
|
||||
:param int thread_id: the id of the thread
|
||||
:param stack: the python stack state for the current thread
|
||||
"""
|
||||
|
||||
# threadId, stack in sys._current_frams().items()
|
||||
def __init__(self, thread_id, stack):
|
||||
super(ThreadModel, self).__init__(text_view=text_views.ThreadView())
|
||||
|
||||
self['thread_id'] = thread_id
|
||||
self['stack_trace'] = StackTraceModel(stack)
|
||||
|
||||
|
||||
class GreenThreadModel(mwdv.ModelWithDefaultViews):
|
||||
"""A Green Thread Model
|
||||
|
||||
This model holds data for information about an
|
||||
individual thread. Unlike the thread model,
|
||||
it holds just a stack trace, since green threads
|
||||
do not have thread ids.
|
||||
|
||||
.. seealso::
|
||||
|
||||
Class :class:`StackTraceModel`
|
||||
|
||||
:param stack: the python stack state for the green thread
|
||||
"""
|
||||
|
||||
# gr in greenpool.coroutines_running --> gr.gr_frame
|
||||
def __init__(self, stack):
|
||||
super(GreenThreadModel, self).__init__(
|
||||
{'stack_trace': StackTraceModel(stack)},
|
||||
text_view=text_views.GreenThreadView())
|
@ -1,44 +0,0 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Provides OpenStack Version Info Model
|
||||
|
||||
This module defines a class representing the data
|
||||
model for OpenStack package and version information
|
||||
"""
|
||||
|
||||
from nova.openstack.common.report.models import with_default_views as mwdv
|
||||
from nova.openstack.common.report.views.text import generic as generic_text_views
|
||||
|
||||
|
||||
class PackageModel(mwdv.ModelWithDefaultViews):
|
||||
"""A Package Information Model
|
||||
|
||||
This model holds information about the current
|
||||
package. It contains vendor, product, and version
|
||||
information.
|
||||
|
||||
:param str vendor: the product vendor
|
||||
:param str product: the product name
|
||||
:param str version: the product version
|
||||
"""
|
||||
|
||||
def __init__(self, vendor, product, version):
|
||||
super(PackageModel, self).__init__(
|
||||
text_view=generic_text_views.KeyValueView()
|
||||
)
|
||||
|
||||
self['vendor'] = vendor
|
||||
self['product'] = product
|
||||
self['version'] = version
|
@ -1,81 +0,0 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
|
||||
from nova.openstack.common.report.models import base as base_model
|
||||
from nova.openstack.common.report.views.json import generic as jsonviews
|
||||
from nova.openstack.common.report.views.text import generic as textviews
|
||||
from nova.openstack.common.report.views.xml import generic as xmlviews
|
||||
|
||||
|
||||
class ModelWithDefaultViews(base_model.ReportModel):
|
||||
"""A Model With Default Views of Various Types
|
||||
|
||||
A model with default views has several predefined views,
|
||||
each associated with a given type. This is often used for
|
||||
when a submodel should have an attached view, but the view
|
||||
differs depending on the serialization format
|
||||
|
||||
Parameters are as the superclass, except for any
|
||||
parameters ending in '_view': these parameters
|
||||
get stored as default views.
|
||||
|
||||
The default 'default views' are
|
||||
|
||||
text
|
||||
:class:`openstack.common.report.views.text.generic.KeyValueView`
|
||||
xml
|
||||
:class:`openstack.common.report.views.xml.generic.KeyValueView`
|
||||
json
|
||||
:class:`openstack.common.report.views.json.generic.KeyValueView`
|
||||
|
||||
.. function:: to_type()
|
||||
|
||||
('type' is one of the 'default views' defined for this model)
|
||||
Serializes this model using the default view for 'type'
|
||||
|
||||
:rtype: str
|
||||
:returns: this model serialized as 'type'
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.views = {
|
||||
'text': textviews.KeyValueView(),
|
||||
'json': jsonviews.KeyValueView(),
|
||||
'xml': xmlviews.KeyValueView()
|
||||
}
|
||||
|
||||
newargs = copy.copy(kwargs)
|
||||
for k in kwargs:
|
||||
if k.endswith('_view'):
|
||||
self.views[k[:-5]] = kwargs[k]
|
||||
del newargs[k]
|
||||
super(ModelWithDefaultViews, self).__init__(*args, **newargs)
|
||||
|
||||
def set_current_view_type(self, tp, visited=None):
|
||||
self.attached_view = self.views[tp]
|
||||
super(ModelWithDefaultViews, self).set_current_view_type(tp, visited)
|
||||
|
||||
def __getattr__(self, attrname):
|
||||
if attrname[:3] == 'to_':
|
||||
if self.views[attrname[3:]] is not None:
|
||||
return lambda: self.views[attrname[3:]](self)
|
||||
else:
|
||||
raise NotImplementedError((
|
||||
"Model {cn.__module__}.{cn.__name__} does not have" +
|
||||
" a default view for "
|
||||
"{tp}").format(cn=type(self), tp=attrname[3:]))
|
||||
else:
|
||||
return super(ModelWithDefaultViews, self).__getattr__(attrname)
|
@ -1,187 +0,0 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Provides Report classes
|
||||
|
||||
This module defines various classes representing reports and report sections.
|
||||
All reports take the form of a report class containing various report
|
||||
sections.
|
||||
"""
|
||||
|
||||
from nova.openstack.common.report.views.text import header as header_views
|
||||
|
||||
|
||||
class BasicReport(object):
|
||||
"""A Basic Report
|
||||
|
||||
A Basic Report consists of a collection of :class:`ReportSection`
|
||||
objects, each of which contains a top-level model and generator.
|
||||
It collects these sections into a cohesive report which may then
|
||||
be serialized by calling :func:`run`.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.sections = []
|
||||
self._state = 0
|
||||
|
||||
def add_section(self, view, generator, index=None):
|
||||
"""Add a section to the report
|
||||
|
||||
This method adds a section with the given view and
|
||||
generator to the report. An index may be specified to
|
||||
insert the section at a given location in the list;
|
||||
If no index is specified, the section is appended to the
|
||||
list. The view is called on the model which results from
|
||||
the generator when the report is run. A generator is simply
|
||||
a method or callable object which takes no arguments and
|
||||
returns a :class:`openstack.common.report.models.base.ReportModel`
|
||||
or similar object.
|
||||
|
||||
:param view: the top-level view for the section
|
||||
:param generator: the method or class which generates the model
|
||||
:param index: the index at which to insert the section
|
||||
(or None to append it)
|
||||
:type index: int or None
|
||||
"""
|
||||
|
||||
if index is None:
|
||||
self.sections.append(ReportSection(view, generator))
|
||||
else:
|
||||
self.sections.insert(index, ReportSection(view, generator))
|
||||
|
||||
def run(self):
|
||||
"""Run the report
|
||||
|
||||
This method runs the report, having each section generate
|
||||
its data and serialize itself before joining the sections
|
||||
together. The BasicReport accomplishes the joining
|
||||
by joining the serialized sections together with newlines.
|
||||
|
||||
:rtype: str
|
||||
:returns: the serialized report
|
||||
"""
|
||||
|
||||
return "\n".join(str(sect) for sect in self.sections)
|
||||
|
||||
|
||||
class ReportSection(object):
|
||||
"""A Report Section
|
||||
|
||||
A report section contains a generator and a top-level view. When something
|
||||
attempts to serialize the section by calling str() on it, the section runs
|
||||
the generator and calls the view on the resulting model.
|
||||
|
||||
.. seealso::
|
||||
|
||||
Class :class:`BasicReport`
|
||||
:func:`BasicReport.add_section`
|
||||
|
||||
:param view: the top-level view for this section
|
||||
:param generator: the generator for this section
|
||||
(any callable object which takes no parameters and returns a data model)
|
||||
"""
|
||||
|
||||
def __init__(self, view, generator):
|
||||
self.view = view
|
||||
self.generator = generator
|
||||
|
||||
def __str__(self):
|
||||
return self.view(self.generator())
|
||||
|
||||
|
||||
class ReportOfType(BasicReport):
|
||||
"""A Report of a Certain Type
|
||||
|
||||
A ReportOfType has a predefined type associated with it.
|
||||
This type is automatically propagated down to the each of
|
||||
the sections upon serialization by wrapping the generator
|
||||
for each section.
|
||||
|
||||
.. seealso::
|
||||
|
||||
Class :class:`openstack.common.report.models.with_default_view.ModelWithDefaultView` # noqa
|
||||
(the entire class)
|
||||
|
||||
Class :class:`openstack.common.report.models.base.ReportModel`
|
||||
:func:`openstack.common.report.models.base.ReportModel.set_current_view_type` # noqa
|
||||
|
||||
:param str tp: the type of the report
|
||||
"""
|
||||
|
||||
def __init__(self, tp):
|
||||
self.output_type = tp
|
||||
super(ReportOfType, self).__init__()
|
||||
|
||||
def add_section(self, view, generator, index=None):
|
||||
def with_type(gen):
|
||||
def newgen():
|
||||
res = gen()
|
||||
try:
|
||||
res.set_current_view_type(self.output_type)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
return res
|
||||
return newgen
|
||||
|
||||
super(ReportOfType, self).add_section(
|
||||
view,
|
||||
with_type(generator),
|
||||
index
|
||||
)
|
||||
|
||||
|
||||
class TextReport(ReportOfType):
|
||||
"""A Human-Readable Text Report
|
||||
|
||||
This class defines a report that is designed to be read by a human
|
||||
being. It has nice section headers, and a formatted title.
|
||||
|
||||
:param str name: the title of the report
|
||||
"""
|
||||
|
||||
def __init__(self, name):
|
||||
super(TextReport, self).__init__('text')
|
||||
self.name = name
|
||||
# add a title with a generator that creates an empty result model
|
||||
self.add_section(name, lambda: ('|' * 72) + "\n\n")
|
||||
|
||||
def add_section(self, heading, generator, index=None):
|
||||
"""Add a section to the report
|
||||
|
||||
This method adds a section with the given title, and
|
||||
generator to the report. An index may be specified to
|
||||
insert the section at a given location in the list;
|
||||
If no index is specified, the section is appended to the
|
||||
list. The view is called on the model which results from
|
||||
the generator when the report is run. A generator is simply
|
||||
a method or callable object which takes no arguments and
|
||||
returns a :class:`openstack.common.report.models.base.ReportModel`
|
||||
or similar object.
|
||||
|
||||
The model is told to serialize as text (if possible) at serialization
|
||||
time by wrapping the generator. The view model's attached view
|
||||
(if any) is wrapped in a
|
||||
:class:`openstack.common.report.views.text.header.TitledView`
|
||||
|
||||
:param str heading: the title for the section
|
||||
:param generator: the method or class which generates the model
|
||||
:param index: the index at which to insert the section
|
||||
(or None to append)
|
||||
:type index: int or None
|
||||
"""
|
||||
|
||||
super(TextReport, self).add_section(header_views.TitledView(heading),
|
||||
generator,
|
||||
index)
|
@ -1,46 +0,0 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Various utilities for report generation
|
||||
|
||||
This module includes various utilities
|
||||
used in generating reports.
|
||||
"""
|
||||
|
||||
import gc
|
||||
|
||||
|
||||
class StringWithAttrs(str):
|
||||
"""A String that can have arbitrary attributes
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def _find_objects(t):
|
||||
"""Find Objects in the GC State
|
||||
|
||||
This horribly hackish method locates objects of a
|
||||
given class in the current python instance's garbage
|
||||
collection state. In case you couldn't tell, this is
|
||||
horribly hackish, but is necessary for locating all
|
||||
green threads, since they don't keep track of themselves
|
||||
like normal threads do in python.
|
||||
|
||||
:param class t: the class of object to locate
|
||||
:rtype: list
|
||||
:returns: a list of objects of the given type
|
||||
"""
|
||||
|
||||
return [o for o in gc.get_objects() if isinstance(o, t)]
|
@ -1,22 +0,0 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Provides predefined views
|
||||
|
||||
This module provides a collection of predefined views
|
||||
for use in reports. It is separated by type (xml, json, or text).
|
||||
Each type contains a submodule called 'generic' containing
|
||||
several basic, universal views for that type. There is also
|
||||
a predefined view that utilizes Jinja.
|
||||
"""
|
@ -1,137 +0,0 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Provides Jinja Views
|
||||
|
||||
This module provides views that utilize the Jinja templating
|
||||
system for serialization. For more information on Jinja, please
|
||||
see http://jinja.pocoo.org/ .
|
||||
"""
|
||||
|
||||
import copy
|
||||
|
||||
import jinja2
|
||||
|
||||
|
||||
class JinjaView(object):
|
||||
"""A Jinja View
|
||||
|
||||
This view renders the given model using the provided Jinja
|
||||
template. The template can be given in various ways.
|
||||
If the `VIEw_TEXT` property is defined, that is used as template.
|
||||
Othewise, if a `path` parameter is passed to the constructor, that
|
||||
is used to load a file containing the template. If the `path`
|
||||
parameter is None, the `text` parameter is used as the template.
|
||||
|
||||
The leading newline character and trailing newline character are stripped
|
||||
from the template (provided they exist). Baseline indentation is
|
||||
also stripped from each line. The baseline indentation is determined by
|
||||
checking the indentation of the first line, after stripping off the leading
|
||||
newline (if any).
|
||||
|
||||
:param str path: the path to the Jinja template
|
||||
:param str text: the text of the Jinja template
|
||||
"""
|
||||
|
||||
def __init__(self, path=None, text=None):
|
||||
try:
|
||||
self._text = self.VIEW_TEXT
|
||||
except AttributeError:
|
||||
if path is not None:
|
||||
with open(path, 'r') as f:
|
||||
self._text = f.read()
|
||||
elif text is not None:
|
||||
self._text = text
|
||||
else:
|
||||
self._text = ""
|
||||
|
||||
if self._text[0] == "\n":
|
||||
self._text = self._text[1:]
|
||||
|
||||
newtext = self._text.lstrip()
|
||||
amt = len(self._text) - len(newtext)
|
||||
if (amt > 0):
|
||||
base_indent = self._text[0:amt]
|
||||
lines = self._text.splitlines()
|
||||
newlines = []
|
||||
for line in lines:
|
||||
if line.startswith(base_indent):
|
||||
newlines.append(line[amt:])
|
||||
else:
|
||||
newlines.append(line)
|
||||
self._text = "\n".join(newlines)
|
||||
|
||||
if self._text[-1] == "\n":
|
||||
self._text = self._text[:-1]
|
||||
|
||||
self._regentemplate = True
|
||||
self._templatecache = None
|
||||
|
||||
def __call__(self, model):
|
||||
return self.template.render(**model)
|
||||
|
||||
def __deepcopy__(self, memodict):
|
||||
res = object.__new__(JinjaView)
|
||||
res._text = copy.deepcopy(self._text, memodict)
|
||||
|
||||
# regenerate the template on a deepcopy
|
||||
res._regentemplate = True
|
||||
res._templatecache = None
|
||||
|
||||
return res
|
||||
|
||||
@property
|
||||
def template(self):
|
||||
"""Get the Compiled Template
|
||||
|
||||
Gets the compiled template, using a cached copy if possible
|
||||
(stored in attr:`_templatecache`) or otherwise recompiling
|
||||
the template if the compiled template is not present or is
|
||||
invalid (due to attr:`_regentemplate` being set to True).
|
||||
|
||||
:returns: the compiled Jinja template
|
||||
:rtype: :class:`jinja2.Template`
|
||||
"""
|
||||
|
||||
if self._templatecache is None or self._regentemplate:
|
||||
self._templatecache = jinja2.Template(self._text)
|
||||
self._regentemplate = False
|
||||
|
||||
return self._templatecache
|
||||
|
||||
def _gettext(self):
|
||||
"""Get the Template Text
|
||||
|
||||
Gets the text of the current template
|
||||
|
||||
:returns: the text of the Jinja template
|
||||
:rtype: str
|
||||
"""
|
||||
|
||||
return self._text
|
||||
|
||||
def _settext(self, textval):
|
||||
"""Set the Template Text
|
||||
|
||||
Sets the text of the current template, marking it
|
||||
for recompilation next time the compiled template
|
||||
is retrived via attr:`template` .
|
||||
|
||||
:param str textval: the new text of the Jinja template
|
||||
"""
|
||||
|
||||
self._text = textval
|
||||
self.regentemplate = True
|
||||
|
||||
text = property(_gettext, _settext)
|
@ -1,19 +0,0 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Provides basic JSON views
|
||||
|
||||
This module provides several basic views which serialize
|
||||
models into JSON.
|
||||
"""
|
@ -1,66 +0,0 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Provides generic JSON views
|
||||
|
||||
This modules defines several basic views for serializing
|
||||
data to JSON. Submodels that have already been serialized
|
||||
as JSON may have their string values marked with `__is_json__
|
||||
= True` using :class:`openstack.common.report.utils.StringWithAttrs`
|
||||
(each of the classes within this module does this automatically,
|
||||
and non-naive serializers check for this attribute and handle
|
||||
such strings specially)
|
||||
"""
|
||||
|
||||
import copy
|
||||
|
||||
from oslo_serialization import jsonutils as json
|
||||
|
||||
from nova.openstack.common.report import utils as utils
|
||||
|
||||
|
||||
class BasicKeyValueView(object):
|
||||
"""A Basic Key-Value JSON View
|
||||
|
||||
This view performs a naive serialization of a model
|
||||
into JSON by simply calling :func:`json.dumps` on the model
|
||||
"""
|
||||
|
||||
def __call__(self, model):
|
||||
res = utils.StringWithAttrs(json.dumps(model.data))
|
||||
res.__is_json__ = True
|
||||
return res
|
||||
|
||||
|
||||
class KeyValueView(object):
|
||||
"""A Key-Value JSON View
|
||||
|
||||
This view performs advanced serialization to a model
|
||||
into JSON. It does so by first checking all values to
|
||||
see if they are marked as JSON. If so, they are deserialized
|
||||
using :func:`json.loads`. Then, the copy of the model with all
|
||||
JSON deserialized is reserialized into proper nested JSON using
|
||||
:func:`json.dumps`.
|
||||
"""
|
||||
|
||||
def __call__(self, model):
|
||||
# this part deals with subviews that were already serialized
|
||||
cpy = copy.deepcopy(model)
|
||||
for key in model.keys():
|
||||
if getattr(model[key], '__is_json__', False):
|
||||
cpy[key] = json.loads(model[key])
|
||||
|
||||
res = utils.StringWithAttrs(json.dumps(cpy.data, sort_keys=True))
|
||||
res.__is_json__ = True
|
||||
return res
|
@ -1,19 +0,0 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Provides basic text views
|
||||
|
||||
This module provides several basic views which serialize
|
||||
models into human-readable text.
|
||||
"""
|
@ -1,202 +0,0 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Provides generic text views
|
||||
|
||||
This modules provides several generic views for
|
||||
serializing models into human-readable text.
|
||||
"""
|
||||
|
||||
import collections as col
|
||||
|
||||
import six
|
||||
|
||||
|
||||
class MultiView(object):
|
||||
"""A Text View Containing Multiple Views
|
||||
|
||||
This view simply serializes each
|
||||
value in the data model, and then
|
||||
joins them with newlines (ignoring
|
||||
the key values altogether). This is
|
||||
useful for serializing lists of models
|
||||
(as array-like dicts).
|
||||
"""
|
||||
|
||||
def __call__(self, model):
|
||||
res = [str(model[key]) for key in model]
|
||||
return "\n".join(res)
|
||||
|
||||
|
||||
class BasicKeyValueView(object):
|
||||
"""A Basic Key-Value Text View
|
||||
|
||||
This view performs a naive serialization of a model into
|
||||
text using a basic key-value method, where each
|
||||
key-value pair is rendered as "key = str(value)"
|
||||
"""
|
||||
|
||||
def __call__(self, model):
|
||||
res = ""
|
||||
for key in model:
|
||||
res += "{key} = {value}\n".format(key=key, value=model[key])
|
||||
|
||||
return res
|
||||
|
||||
|
||||
class KeyValueView(object):
|
||||
"""A Key-Value Text View
|
||||
|
||||
This view performs an advanced serialization of a model
|
||||
into text by following the following set of rules:
|
||||
|
||||
key : text
|
||||
key = text
|
||||
|
||||
rootkey : Mapping
|
||||
::
|
||||
|
||||
rootkey =
|
||||
serialize(key, value)
|
||||
|
||||
key : Sequence
|
||||
::
|
||||
|
||||
key =
|
||||
serialize(item)
|
||||
|
||||
:param str indent_str: the string used to represent one "indent"
|
||||
:param str key_sep: the separator to use between keys and values
|
||||
:param str dict_sep: the separator to use after a dictionary root key
|
||||
:param str list_sep: the separator to use after a list root key
|
||||
:param str anon_dict: the "key" to use when there is a dict in a list
|
||||
(does not automatically use the dict separator)
|
||||
:param before_dict: content to place on the line(s) before the a dict
|
||||
root key (use None to avoid inserting an extra line)
|
||||
:type before_dict: str or None
|
||||
:param before_list: content to place on the line(s) before the a list
|
||||
root key (use None to avoid inserting an extra line)
|
||||
:type before_list: str or None
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
indent_str=' ',
|
||||
key_sep=' = ',
|
||||
dict_sep=' = ',
|
||||
list_sep=' = ',
|
||||
anon_dict='[dict]',
|
||||
before_dict=None,
|
||||
before_list=None):
|
||||
self.indent_str = indent_str
|
||||
self.key_sep = key_sep
|
||||
self.dict_sep = dict_sep
|
||||
self.list_sep = list_sep
|
||||
self.anon_dict = anon_dict
|
||||
self.before_dict = before_dict
|
||||
self.before_list = before_list
|
||||
|
||||
def __call__(self, model):
|
||||
def serialize(root, rootkey, indent):
|
||||
res = []
|
||||
if rootkey is not None:
|
||||
res.append((self.indent_str * indent) + rootkey)
|
||||
|
||||
if isinstance(root, col.Mapping):
|
||||
if rootkey is None and indent > 0:
|
||||
res.append((self.indent_str * indent) + self.anon_dict)
|
||||
elif rootkey is not None:
|
||||
res[0] += self.dict_sep
|
||||
if self.before_dict is not None:
|
||||
res.insert(0, self.before_dict)
|
||||
|
||||
for key in sorted(root):
|
||||
res.extend(serialize(root[key], key, indent + 1))
|
||||
elif (isinstance(root, col.Sequence) and
|
||||
not isinstance(root, six.string_types)):
|
||||
if rootkey is not None:
|
||||
res[0] += self.list_sep
|
||||
if self.before_list is not None:
|
||||
res.insert(0, self.before_list)
|
||||
|
||||
for val in sorted(root, key=str):
|
||||
res.extend(serialize(val, None, indent + 1))
|
||||
else:
|
||||
str_root = str(root)
|
||||
if '\n' in str_root:
|
||||
# we are in a submodel
|
||||
if rootkey is not None:
|
||||
res[0] += self.dict_sep
|
||||
|
||||
list_root = [(self.indent_str * (indent + 1)) + line
|
||||
for line in str_root.split('\n')]
|
||||
res.extend(list_root)
|
||||
else:
|
||||
# just a normal key or list entry
|
||||
try:
|
||||
res[0] += self.key_sep + str_root
|
||||
except IndexError:
|
||||
res = [(self.indent_str * indent) + str_root]
|
||||
|
||||
return res
|
||||
|
||||
return "\n".join(serialize(model, None, -1))
|
||||
|
||||
|
||||
class TableView(object):
|
||||
"""A Basic Table Text View
|
||||
|
||||
This view performs serialization of data into a basic table with
|
||||
predefined column names and mappings. Column width is auto-calculated
|
||||
evenly, column values are automatically truncated accordingly. Values
|
||||
are centered in the columns.
|
||||
|
||||
:param [str] column_names: the headers for each of the columns
|
||||
:param [str] column_values: the item name to match each column to in
|
||||
each row
|
||||
:param str table_prop_name: the name of the property within the model
|
||||
containing the row models
|
||||
"""
|
||||
|
||||
def __init__(self, column_names, column_values, table_prop_name):
|
||||
self.table_prop_name = table_prop_name
|
||||
self.column_names = column_names
|
||||
self.column_values = column_values
|
||||
self.column_width = (72 - len(column_names) + 1) // len(column_names)
|
||||
|
||||
column_headers = "|".join(
|
||||
"{ch[" + str(n) + "]: ^" + str(self.column_width) + "}"
|
||||
for n in range(len(column_names))
|
||||
)
|
||||
|
||||
# correct for float-to-int roundoff error
|
||||
test_fmt = column_headers.format(ch=column_names)
|
||||
if len(test_fmt) < 72:
|
||||
column_headers += ' ' * (72 - len(test_fmt))
|
||||
|
||||
vert_divider = '-' * 72
|
||||
self.header_fmt_str = column_headers + "\n" + vert_divider + "\n"
|
||||
|
||||
self.row_fmt_str = "|".join(
|
||||
"{cv[" + str(n) + "]: ^" + str(self.column_width) + "}"
|
||||
for n in range(len(column_values))
|
||||
)
|
||||
|
||||
def __call__(self, model):
|
||||
res = self.header_fmt_str.format(ch=self.column_names)
|
||||
for raw_row in model[self.table_prop_name]:
|
||||
row = [str(raw_row[prop_name]) for prop_name in self.column_values]
|
||||
# double format is in case we have roundoff error
|
||||
res += '{0: <72}\n'.format(self.row_fmt_str.format(cv=row))
|
||||
|
||||
return res
|
@ -1,51 +0,0 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Text Views With Headers
|
||||
|
||||
This package defines several text views with headers
|
||||
"""
|
||||
|
||||
|
||||
class HeaderView(object):
|
||||
"""A Text View With a Header
|
||||
|
||||
This view simply serializes the model and places the given
|
||||
header on top.
|
||||
|
||||
:param header: the header (can be anything on which str() can be called)
|
||||
"""
|
||||
|
||||
def __init__(self, header):
|
||||
self.header = header
|
||||
|
||||
def __call__(self, model):
|
||||
return str(self.header) + "\n" + str(model)
|
||||
|
||||
|
||||
class TitledView(HeaderView):
|
||||
"""A Text View With a Title
|
||||
|
||||
This view simply serializes the model, and places
|
||||
a preformatted header containing the given title
|
||||
text on top. The title text can be up to 64 characters
|
||||
long.
|
||||
|
||||
:param str title: the title of the view
|
||||
"""
|
||||
|
||||
FORMAT_STR = ('=' * 72) + "\n===={0: ^64}====\n" + ('=' * 72)
|
||||
|
||||
def __init__(self, title):
|
||||
super(TitledView, self).__init__(self.FORMAT_STR.format(title))
|
@ -1,38 +0,0 @@
|
||||
# Copyright 2014 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Provides process view
|
||||
|
||||
This module provides a view for
|
||||
visualizing processes in human-readable formm
|
||||
"""
|
||||
|
||||
import nova.openstack.common.report.views.jinja_view as jv
|
||||
|
||||
|
||||
class ProcessView(jv.JinjaView):
|
||||
"""A Process View
|
||||
|
||||
This view displays process models defined by
|
||||
:class:`openstack.common.report.models.process.ProcessModel`
|
||||
"""
|
||||
|
||||
VIEW_TEXT = (
|
||||
"Process {{ pid }} (under {{ parent_pid }}) "
|
||||
"[ run by: {{ username }} ({{ uids.real|default('unknown uid') }}),"
|
||||
" state: {{ state }} ]\n"
|
||||
"{% for child in children %}"
|
||||
" {{ child }}"
|
||||
"{% endfor %}"
|
||||
)
|
@ -1,80 +0,0 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Provides thread and stack-trace views
|
||||
|
||||
This module provides a collection of views for
|
||||
visualizing threads, green threads, and stack traces
|
||||
in human-readable form.
|
||||
"""
|
||||
|
||||
from nova.openstack.common.report.views import jinja_view as jv
|
||||
|
||||
|
||||
class StackTraceView(jv.JinjaView):
|
||||
"""A Stack Trace View
|
||||
|
||||
This view displays stack trace models defined by
|
||||
:class:`openstack.common.report.models.threading.StackTraceModel`
|
||||
"""
|
||||
|
||||
VIEW_TEXT = (
|
||||
"{% if root_exception is not none %}"
|
||||
"Exception: {{ root_exception }}\n"
|
||||
"------------------------------------\n"
|
||||
"\n"
|
||||
"{% endif %}"
|
||||
"{% for line in lines %}\n"
|
||||
"{{ line.filename }}:{{ line.line }} in {{ line.name }}\n"
|
||||
" {% if line.code is not none %}"
|
||||
"`{{ line.code }}`"
|
||||
"{% else %}"
|
||||
"(source not found)"
|
||||
"{% endif %}\n"
|
||||
"{% else %}\n"
|
||||
"No Traceback!\n"
|
||||
"{% endfor %}"
|
||||
)
|
||||
|
||||
|
||||
class GreenThreadView(object):
|
||||
"""A Green Thread View
|
||||
|
||||
This view displays a green thread provided by the data
|
||||
model :class:`openstack.common.report.models.threading.GreenThreadModel`
|
||||
"""
|
||||
|
||||
FORMAT_STR = "------{thread_str: ^60}------" + "\n" + "{stack_trace}"
|
||||
|
||||
def __call__(self, model):
|
||||
return self.FORMAT_STR.format(
|
||||
thread_str=" Green Thread ",
|
||||
stack_trace=model.stack_trace
|
||||
)
|
||||
|
||||
|
||||
class ThreadView(object):
|
||||
"""A Thread Collection View
|
||||
|
||||
This view displays a python thread provided by the data
|
||||
model :class:`openstack.common.report.models.threading.ThreadModel` # noqa
|
||||
"""
|
||||
|
||||
FORMAT_STR = "------{thread_str: ^60}------" + "\n" + "{stack_trace}"
|
||||
|
||||
def __call__(self, model):
|
||||
return self.FORMAT_STR.format(
|
||||
thread_str=" Thread #{0} ".format(model.thread_id),
|
||||
stack_trace=model.stack_trace
|
||||
)
|
@ -1,19 +0,0 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Provides basic XML views
|
||||
|
||||
This module provides several basic views which serialize
|
||||
models into XML.
|
||||
"""
|
@ -1,87 +0,0 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Provides generic XML views
|
||||
|
||||
This modules defines several basic views for serializing
|
||||
data to XML. Submodels that have already been serialized
|
||||
as XML may have their string values marked with `__is_xml__
|
||||
= True` using :class:`openstack.common.report.utils.StringWithAttrs`
|
||||
(each of the classes within this module does this automatically,
|
||||
and non-naive serializers check for this attribute and handle
|
||||
such strings specially)
|
||||
"""
|
||||
|
||||
import collections as col
|
||||
import copy
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
import six
|
||||
|
||||
from nova.openstack.common.report import utils as utils
|
||||
|
||||
|
||||
class KeyValueView(object):
|
||||
"""A Key-Value XML View
|
||||
|
||||
This view performs advanced serialization of a data model
|
||||
into XML. It first deserializes any values marked as XML so
|
||||
that they can be properly reserialized later. It then follows
|
||||
the following rules to perform serialization:
|
||||
|
||||
key : text/xml
|
||||
The tag name is the key name, and the contents are the text or xml
|
||||
key : Sequence
|
||||
A wrapper tag is created with the key name, and each item is placed
|
||||
in an 'item' tag
|
||||
key : Mapping
|
||||
A wrapper tag is created with the key name, and the serialize is called
|
||||
on each key-value pair (such that each key gets its own tag)
|
||||
|
||||
:param str wrapper_name: the name of the top-level element
|
||||
"""
|
||||
|
||||
def __init__(self, wrapper_name="model"):
|
||||
self.wrapper_name = wrapper_name
|
||||
|
||||
def __call__(self, model):
|
||||
# this part deals with subviews that were already serialized
|
||||
cpy = copy.deepcopy(model)
|
||||
for key, valstr in model.items():
|
||||
if getattr(valstr, '__is_xml__', False):
|
||||
cpy[key] = ET.fromstring(valstr)
|
||||
|
||||
def serialize(rootmodel, rootkeyname):
|
||||
res = ET.Element(rootkeyname)
|
||||
|
||||
if isinstance(rootmodel, col.Mapping):
|
||||
for key in sorted(rootmodel):
|
||||
res.append(serialize(rootmodel[key], key))
|
||||
elif (isinstance(rootmodel, col.Sequence)
|
||||
and not isinstance(rootmodel, six.string_types)):
|
||||
for val in sorted(rootmodel, key=str):
|
||||
res.append(serialize(val, 'item'))
|
||||
elif ET.iselement(rootmodel):
|
||||
res.append(rootmodel)
|
||||
else:
|
||||
res.text = str(rootmodel)
|
||||
|
||||
return res
|
||||
|
||||
str_ = ET.tostring(serialize(cpy,
|
||||
self.wrapper_name),
|
||||
encoding="utf-8").decode("utf-8")
|
||||
res = utils.StringWithAttrs(str_)
|
||||
res.__is_xml__ = True
|
||||
return res
|
@ -1,509 +0,0 @@
|
||||
# Copyright 2010 United States Government as represented by the
|
||||
# Administrator of the National Aeronautics and Space Administration.
|
||||
# Copyright 2011 Justin Santa Barbara
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Generic Node base class for all workers that run on hosts."""
|
||||
|
||||
import errno
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import signal
|
||||
import sys
|
||||
import time
|
||||
|
||||
try:
|
||||
# Importing just the symbol here because the io module does not
|
||||
# exist in Python 2.6.
|
||||
from io import UnsupportedOperation # noqa
|
||||
except ImportError:
|
||||
# Python 2.6
|
||||
UnsupportedOperation = None
|
||||
|
||||
import eventlet
|
||||
from eventlet import event
|
||||
from oslo_config import cfg
|
||||
|
||||
from nova.openstack.common import eventlet_backdoor
|
||||
from nova.openstack.common._i18n import _LE, _LI, _LW
|
||||
from nova.openstack.common import systemd
|
||||
from nova.openstack.common import threadgroup
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _sighup_supported():
|
||||
return hasattr(signal, 'SIGHUP')
|
||||
|
||||
|
||||
def _is_daemon():
|
||||
# The process group for a foreground process will match the
|
||||
# process group of the controlling terminal. If those values do
|
||||
# not match, or ioctl() fails on the stdout file handle, we assume
|
||||
# the process is running in the background as a daemon.
|
||||
# http://www.gnu.org/software/bash/manual/bashref.html#Job-Control-Basics
|
||||
try:
|
||||
is_daemon = os.getpgrp() != os.tcgetpgrp(sys.stdout.fileno())
|
||||
except OSError as err:
|
||||
if err.errno == errno.ENOTTY:
|
||||
# Assume we are a daemon because there is no terminal.
|
||||
is_daemon = True
|
||||
else:
|
||||
raise
|
||||
except UnsupportedOperation:
|
||||
# Could not get the fileno for stdout, so we must be a daemon.
|
||||
is_daemon = True
|
||||
return is_daemon
|
||||
|
||||
|
||||
def _is_sighup_and_daemon(signo):
|
||||
if not (_sighup_supported() and signo == signal.SIGHUP):
|
||||
# Avoid checking if we are a daemon, because the signal isn't
|
||||
# SIGHUP.
|
||||
return False
|
||||
return _is_daemon()
|
||||
|
||||
|
||||
def _signo_to_signame(signo):
|
||||
signals = {signal.SIGTERM: 'SIGTERM',
|
||||
signal.SIGINT: 'SIGINT'}
|
||||
if _sighup_supported():
|
||||
signals[signal.SIGHUP] = 'SIGHUP'
|
||||
return signals[signo]
|
||||
|
||||
|
||||
def _set_signals_handler(handler):
|
||||
signal.signal(signal.SIGTERM, handler)
|
||||
signal.signal(signal.SIGINT, handler)
|
||||
if _sighup_supported():
|
||||
signal.signal(signal.SIGHUP, handler)
|
||||
|
||||
|
||||
class Launcher(object):
|
||||
"""Launch one or more services and wait for them to complete."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize the service launcher.
|
||||
|
||||
:returns: None
|
||||
|
||||
"""
|
||||
self.services = Services()
|
||||
self.backdoor_port = eventlet_backdoor.initialize_if_enabled()
|
||||
|
||||
def launch_service(self, service):
|
||||
"""Load and start the given service.
|
||||
|
||||
:param service: The service you would like to start.
|
||||
:returns: None
|
||||
|
||||
"""
|
||||
service.backdoor_port = self.backdoor_port
|
||||
self.services.add(service)
|
||||
|
||||
def stop(self):
|
||||
"""Stop all services which are currently running.
|
||||
|
||||
:returns: None
|
||||
|
||||
"""
|
||||
self.services.stop()
|
||||
|
||||
def wait(self):
|
||||
"""Waits until all services have been stopped, and then returns.
|
||||
|
||||
:returns: None
|
||||
|
||||
"""
|
||||
self.services.wait()
|
||||
|
||||
def restart(self):
|
||||
"""Reload config files and restart service.
|
||||
|
||||
:returns: None
|
||||
|
||||
"""
|
||||
cfg.CONF.reload_config_files()
|
||||
self.services.restart()
|
||||
|
||||
|
||||
class SignalExit(SystemExit):
|
||||
def __init__(self, signo, exccode=1):
|
||||
super(SignalExit, self).__init__(exccode)
|
||||
self.signo = signo
|
||||
|
||||
|
||||
class ServiceLauncher(Launcher):
|
||||
def _handle_signal(self, signo, frame):
|
||||
# Allow the process to be killed again and die from natural causes
|
||||
_set_signals_handler(signal.SIG_DFL)
|
||||
raise SignalExit(signo)
|
||||
|
||||
def handle_signal(self):
|
||||
_set_signals_handler(self._handle_signal)
|
||||
|
||||
def _wait_for_exit_or_signal(self, ready_callback=None):
|
||||
status = None
|
||||
signo = 0
|
||||
|
||||
LOG.debug('Full set of CONF:')
|
||||
CONF.log_opt_values(LOG, logging.DEBUG)
|
||||
|
||||
try:
|
||||
if ready_callback:
|
||||
ready_callback()
|
||||
super(ServiceLauncher, self).wait()
|
||||
except SignalExit as exc:
|
||||
signame = _signo_to_signame(exc.signo)
|
||||
LOG.info(_LI('Caught %s, exiting'), signame)
|
||||
status = exc.code
|
||||
signo = exc.signo
|
||||
except SystemExit as exc:
|
||||
status = exc.code
|
||||
finally:
|
||||
self.stop()
|
||||
|
||||
return status, signo
|
||||
|
||||
def wait(self, ready_callback=None):
|
||||
systemd.notify_once()
|
||||
while True:
|
||||
self.handle_signal()
|
||||
status, signo = self._wait_for_exit_or_signal(ready_callback)
|
||||
if not _is_sighup_and_daemon(signo):
|
||||
return status
|
||||
self.restart()
|
||||
|
||||
|
||||
class ServiceWrapper(object):
|
||||
def __init__(self, service, workers):
|
||||
self.service = service
|
||||
self.workers = workers
|
||||
self.children = set()
|
||||
self.forktimes = []
|
||||
|
||||
|
||||
class ProcessLauncher(object):
|
||||
_signal_handlers_set = set()
|
||||
|
||||
@classmethod
|
||||
def _handle_class_signals(cls, *args, **kwargs):
|
||||
for handler in cls._signal_handlers_set:
|
||||
handler(*args, **kwargs)
|
||||
|
||||
def __init__(self):
|
||||
"""Constructor."""
|
||||
|
||||
self.children = {}
|
||||
self.sigcaught = None
|
||||
self.running = True
|
||||
rfd, self.writepipe = os.pipe()
|
||||
self.readpipe = eventlet.greenio.GreenPipe(rfd, 'r')
|
||||
self.handle_signal()
|
||||
|
||||
def handle_signal(self):
|
||||
self._signal_handlers_set.add(self._handle_signal)
|
||||
_set_signals_handler(self._handle_class_signals)
|
||||
|
||||
def _handle_signal(self, signo, frame):
|
||||
self.sigcaught = signo
|
||||
self.running = False
|
||||
|
||||
# Allow the process to be killed again and die from natural causes
|
||||
_set_signals_handler(signal.SIG_DFL)
|
||||
|
||||
def _pipe_watcher(self):
|
||||
# This will block until the write end is closed when the parent
|
||||
# dies unexpectedly
|
||||
self.readpipe.read()
|
||||
|
||||
LOG.info(_LI('Parent process has died unexpectedly, exiting'))
|
||||
|
||||
sys.exit(1)
|
||||
|
||||
def _child_process_handle_signal(self):
|
||||
# Setup child signal handlers differently
|
||||
def _sigterm(*args):
|
||||
signal.signal(signal.SIGTERM, signal.SIG_DFL)
|
||||
raise SignalExit(signal.SIGTERM)
|
||||
|
||||
def _sighup(*args):
|
||||
signal.signal(signal.SIGHUP, signal.SIG_DFL)
|
||||
raise SignalExit(signal.SIGHUP)
|
||||
|
||||
signal.signal(signal.SIGTERM, _sigterm)
|
||||
if _sighup_supported():
|
||||
signal.signal(signal.SIGHUP, _sighup)
|
||||
# Block SIGINT and let the parent send us a SIGTERM
|
||||
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
||||
|
||||
def _child_wait_for_exit_or_signal(self, launcher):
|
||||
status = 0
|
||||
signo = 0
|
||||
|
||||
# NOTE(johannes): All exceptions are caught to ensure this
|
||||
# doesn't fallback into the loop spawning children. It would
|
||||
# be bad for a child to spawn more children.
|
||||
try:
|
||||
launcher.wait()
|
||||
except SignalExit as exc:
|
||||
signame = _signo_to_signame(exc.signo)
|
||||
LOG.info(_LI('Child caught %s, exiting'), signame)
|
||||
status = exc.code
|
||||
signo = exc.signo
|
||||
except SystemExit as exc:
|
||||
status = exc.code
|
||||
except BaseException:
|
||||
LOG.exception(_LE('Unhandled exception'))
|
||||
status = 2
|
||||
finally:
|
||||
launcher.stop()
|
||||
|
||||
return status, signo
|
||||
|
||||
def _child_process(self, service):
|
||||
self._child_process_handle_signal()
|
||||
|
||||
# Reopen the eventlet hub to make sure we don't share an epoll
|
||||
# fd with parent and/or siblings, which would be bad
|
||||
eventlet.hubs.use_hub()
|
||||
|
||||
# Close write to ensure only parent has it open
|
||||
os.close(self.writepipe)
|
||||
# Create greenthread to watch for parent to close pipe
|
||||
eventlet.spawn_n(self._pipe_watcher)
|
||||
|
||||
# Reseed random number generator
|
||||
random.seed()
|
||||
|
||||
launcher = Launcher()
|
||||
launcher.launch_service(service)
|
||||
return launcher
|
||||
|
||||
def _start_child(self, wrap):
|
||||
if len(wrap.forktimes) > wrap.workers:
|
||||
# Limit ourselves to one process a second (over the period of
|
||||
# number of workers * 1 second). This will allow workers to
|
||||
# start up quickly but ensure we don't fork off children that
|
||||
# die instantly too quickly.
|
||||
if time.time() - wrap.forktimes[0] < wrap.workers:
|
||||
LOG.info(_LI('Forking too fast, sleeping'))
|
||||
time.sleep(1)
|
||||
|
||||
wrap.forktimes.pop(0)
|
||||
|
||||
wrap.forktimes.append(time.time())
|
||||
|
||||
pid = os.fork()
|
||||
if pid == 0:
|
||||
launcher = self._child_process(wrap.service)
|
||||
while True:
|
||||
self._child_process_handle_signal()
|
||||
status, signo = self._child_wait_for_exit_or_signal(launcher)
|
||||
if not _is_sighup_and_daemon(signo):
|
||||
break
|
||||
launcher.restart()
|
||||
|
||||
os._exit(status)
|
||||
|
||||
LOG.info(_LI('Started child %d'), pid)
|
||||
|
||||
wrap.children.add(pid)
|
||||
self.children[pid] = wrap
|
||||
|
||||
return pid
|
||||
|
||||
def launch_service(self, service, workers=1):
|
||||
wrap = ServiceWrapper(service, workers)
|
||||
|
||||
LOG.info(_LI('Starting %d workers'), wrap.workers)
|
||||
while self.running and len(wrap.children) < wrap.workers:
|
||||
self._start_child(wrap)
|
||||
|
||||
def _wait_child(self):
|
||||
try:
|
||||
# Block while any of child processes have exited
|
||||
pid, status = os.waitpid(0, 0)
|
||||
if not pid:
|
||||
return None
|
||||
except OSError as exc:
|
||||
if exc.errno not in (errno.EINTR, errno.ECHILD):
|
||||
raise
|
||||
return None
|
||||
|
||||
if os.WIFSIGNALED(status):
|
||||
sig = os.WTERMSIG(status)
|
||||
LOG.info(_LI('Child %(pid)d killed by signal %(sig)d'),
|
||||
dict(pid=pid, sig=sig))
|
||||
else:
|
||||
code = os.WEXITSTATUS(status)
|
||||
LOG.info(_LI('Child %(pid)s exited with status %(code)d'),
|
||||
dict(pid=pid, code=code))
|
||||
|
||||
if pid not in self.children:
|
||||
LOG.warning(_LW('pid %d not in child list'), pid)
|
||||
return None
|
||||
|
||||
wrap = self.children.pop(pid)
|
||||
wrap.children.remove(pid)
|
||||
return wrap
|
||||
|
||||
def _respawn_children(self):
|
||||
while self.running:
|
||||
wrap = self._wait_child()
|
||||
if not wrap:
|
||||
continue
|
||||
while self.running and len(wrap.children) < wrap.workers:
|
||||
self._start_child(wrap)
|
||||
|
||||
def wait(self):
|
||||
"""Loop waiting on children to die and respawning as necessary."""
|
||||
|
||||
systemd.notify_once()
|
||||
LOG.debug('Full set of CONF:')
|
||||
CONF.log_opt_values(LOG, logging.DEBUG)
|
||||
|
||||
try:
|
||||
while True:
|
||||
self.handle_signal()
|
||||
self._respawn_children()
|
||||
# No signal means that stop was called. Don't clean up here.
|
||||
if not self.sigcaught:
|
||||
return
|
||||
|
||||
signame = _signo_to_signame(self.sigcaught)
|
||||
LOG.info(_LI('Caught %s, stopping children'), signame)
|
||||
if not _is_sighup_and_daemon(self.sigcaught):
|
||||
break
|
||||
|
||||
cfg.CONF.reload_config_files()
|
||||
for service in set(
|
||||
[wrap.service for wrap in self.children.values()]):
|
||||
service.reset()
|
||||
|
||||
for pid in self.children:
|
||||
os.kill(pid, signal.SIGHUP)
|
||||
|
||||
self.running = True
|
||||
self.sigcaught = None
|
||||
except eventlet.greenlet.GreenletExit:
|
||||
LOG.info(_LI("Wait called after thread killed. Cleaning up."))
|
||||
|
||||
self.stop()
|
||||
|
||||
def stop(self):
|
||||
"""Terminate child processes and wait on each."""
|
||||
self.running = False
|
||||
for pid in self.children:
|
||||
try:
|
||||
os.kill(pid, signal.SIGTERM)
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.ESRCH:
|
||||
raise
|
||||
|
||||
# Wait for children to die
|
||||
if self.children:
|
||||
LOG.info(_LI('Waiting on %d children to exit'), len(self.children))
|
||||
while self.children:
|
||||
self._wait_child()
|
||||
|
||||
|
||||
class Service(object):
|
||||
"""Service object for binaries running on hosts."""
|
||||
|
||||
def __init__(self, threads=1000):
|
||||
self.tg = threadgroup.ThreadGroup(threads)
|
||||
|
||||
# signal that the service is done shutting itself down:
|
||||
self._done = event.Event()
|
||||
|
||||
def reset(self):
|
||||
# NOTE(Fengqian): docs for Event.reset() recommend against using it
|
||||
self._done = event.Event()
|
||||
|
||||
def start(self):
|
||||
pass
|
||||
|
||||
def stop(self, graceful=False):
|
||||
self.tg.stop(graceful)
|
||||
self.tg.wait()
|
||||
# Signal that service cleanup is done:
|
||||
if not self._done.ready():
|
||||
self._done.send()
|
||||
|
||||
def wait(self):
|
||||
self._done.wait()
|
||||
|
||||
|
||||
class Services(object):
|
||||
|
||||
def __init__(self):
|
||||
self.services = []
|
||||
self.tg = threadgroup.ThreadGroup()
|
||||
self.done = event.Event()
|
||||
|
||||
def add(self, service):
|
||||
self.services.append(service)
|
||||
self.tg.add_thread(self.run_service, service, self.done)
|
||||
|
||||
def stop(self):
|
||||
# wait for graceful shutdown of services:
|
||||
for service in self.services:
|
||||
service.stop()
|
||||
service.wait()
|
||||
|
||||
# Each service has performed cleanup, now signal that the run_service
|
||||
# wrapper threads can now die:
|
||||
if not self.done.ready():
|
||||
self.done.send()
|
||||
|
||||
# reap threads:
|
||||
self.tg.stop()
|
||||
|
||||
def wait(self):
|
||||
self.tg.wait()
|
||||
|
||||
def restart(self):
|
||||
self.stop()
|
||||
self.done = event.Event()
|
||||
for restart_service in self.services:
|
||||
restart_service.reset()
|
||||
self.tg.add_thread(self.run_service, restart_service, self.done)
|
||||
|
||||
@staticmethod
|
||||
def run_service(service, done):
|
||||
"""Service start wrapper.
|
||||
|
||||
:param service: service to run
|
||||
:param done: event to wait on until a shutdown is triggered
|
||||
:returns: None
|
||||
|
||||
"""
|
||||
service.start()
|
||||
done.wait()
|
||||
|
||||
|
||||
def launch(service, workers=1):
|
||||
if workers is None or workers == 1:
|
||||
launcher = ServiceLauncher()
|
||||
launcher.launch_service(service)
|
||||
else:
|
||||
launcher = ProcessLauncher()
|
||||
launcher.launch_service(service, workers=workers)
|
||||
|
||||
return launcher
|
@ -1,81 +0,0 @@
|
||||
# Copyright 2013 IBM Corp.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
import os
|
||||
import ssl
|
||||
|
||||
from oslo_config import cfg
|
||||
|
||||
from nova.openstack.common._i18n import _
|
||||
|
||||
|
||||
ssl_opts = [
|
||||
cfg.StrOpt('ca_file',
|
||||
help="CA certificate file to use to verify "
|
||||
"connecting clients."),
|
||||
cfg.StrOpt('cert_file',
|
||||
help="Certificate file to use when starting "
|
||||
"the server securely."),
|
||||
cfg.StrOpt('key_file',
|
||||
help="Private key file to use when starting "
|
||||
"the server securely."),
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
config_section = 'ssl'
|
||||
CONF.register_opts(ssl_opts, config_section)
|
||||
|
||||
|
||||
def list_opts():
|
||||
"""Entry point for oslo-config-generator."""
|
||||
return [(config_section, copy.deepcopy(ssl_opts))]
|
||||
|
||||
|
||||
def is_enabled():
|
||||
cert_file = CONF.ssl.cert_file
|
||||
key_file = CONF.ssl.key_file
|
||||
ca_file = CONF.ssl.ca_file
|
||||
use_ssl = cert_file or key_file
|
||||
|
||||
if cert_file and not os.path.exists(cert_file):
|
||||
raise RuntimeError(_("Unable to find cert_file : %s") % cert_file)
|
||||
|
||||
if ca_file and not os.path.exists(ca_file):
|
||||
raise RuntimeError(_("Unable to find ca_file : %s") % ca_file)
|
||||
|
||||
if key_file and not os.path.exists(key_file):
|
||||
raise RuntimeError(_("Unable to find key_file : %s") % key_file)
|
||||
|
||||
if use_ssl and (not cert_file or not key_file):
|
||||
raise RuntimeError(_("When running server in SSL mode, you must "
|
||||
"specify both a cert_file and key_file "
|
||||
"option value in your configuration file"))
|
||||
|
||||
return use_ssl
|
||||
|
||||
|
||||
def wrap(sock):
|
||||
ssl_kwargs = {
|
||||
'server_side': True,
|
||||
'certfile': CONF.ssl.cert_file,
|
||||
'keyfile': CONF.ssl.key_file,
|
||||
'cert_reqs': ssl.CERT_NONE,
|
||||
}
|
||||
|
||||
if CONF.ssl.ca_file:
|
||||
ssl_kwargs['ca_certs'] = CONF.ssl.ca_file
|
||||
ssl_kwargs['cert_reqs'] = ssl.CERT_REQUIRED
|
||||
|
||||
return ssl.wrap_socket(sock, **ssl_kwargs)
|
@ -1,105 +0,0 @@
|
||||
# Copyright 2012-2014 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Helper module for systemd service readiness notification.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import socket
|
||||
import sys
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _abstractify(socket_name):
|
||||
if socket_name.startswith('@'):
|
||||
# abstract namespace socket
|
||||
socket_name = '\0%s' % socket_name[1:]
|
||||
return socket_name
|
||||
|
||||
|
||||
def _sd_notify(unset_env, msg):
|
||||
notify_socket = os.getenv('NOTIFY_SOCKET')
|
||||
if notify_socket:
|
||||
sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
|
||||
try:
|
||||
sock.connect(_abstractify(notify_socket))
|
||||
sock.sendall(msg)
|
||||
if unset_env:
|
||||
del os.environ['NOTIFY_SOCKET']
|
||||
except EnvironmentError:
|
||||
LOG.debug("Systemd notification failed", exc_info=True)
|
||||
finally:
|
||||
sock.close()
|
||||
|
||||
|
||||
def notify():
|
||||
"""Send notification to Systemd that service is ready.
|
||||
|
||||
For details see
|
||||
http://www.freedesktop.org/software/systemd/man/sd_notify.html
|
||||
"""
|
||||
_sd_notify(False, 'READY=1')
|
||||
|
||||
|
||||
def notify_once():
|
||||
"""Send notification once to Systemd that service is ready.
|
||||
|
||||
Systemd sets NOTIFY_SOCKET environment variable with the name of the
|
||||
socket listening for notifications from services.
|
||||
This method removes the NOTIFY_SOCKET environment variable to ensure
|
||||
notification is sent only once.
|
||||
"""
|
||||
_sd_notify(True, 'READY=1')
|
||||
|
||||
|
||||
def onready(notify_socket, timeout):
|
||||
"""Wait for systemd style notification on the socket.
|
||||
|
||||
:param notify_socket: local socket address
|
||||
:type notify_socket: string
|
||||
:param timeout: socket timeout
|
||||
:type timeout: float
|
||||
:returns: 0 service ready
|
||||
1 service not ready
|
||||
2 timeout occurred
|
||||
"""
|
||||
sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
|
||||
sock.settimeout(timeout)
|
||||
sock.bind(_abstractify(notify_socket))
|
||||
try:
|
||||
msg = sock.recv(512)
|
||||
except socket.timeout:
|
||||
return 2
|
||||
finally:
|
||||
sock.close()
|
||||
if 'READY=1' in msg:
|
||||
return 0
|
||||
else:
|
||||
return 1
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
# simple CLI for testing
|
||||
if len(sys.argv) == 1:
|
||||
notify()
|
||||
elif len(sys.argv) >= 2:
|
||||
timeout = float(sys.argv[1])
|
||||
notify_socket = os.getenv('NOTIFY_SOCKET')
|
||||
if notify_socket:
|
||||
retval = onready(notify_socket, timeout)
|
||||
sys.exit(retval)
|
@ -1,149 +0,0 @@
|
||||
# Copyright 2012 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import logging
|
||||
import threading
|
||||
|
||||
import eventlet
|
||||
from eventlet import greenpool
|
||||
|
||||
from nova.openstack.common import loopingcall
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _thread_done(gt, *args, **kwargs):
|
||||
"""Callback function to be passed to GreenThread.link() when we spawn()
|
||||
Calls the :class:`ThreadGroup` to notify if.
|
||||
|
||||
"""
|
||||
kwargs['group'].thread_done(kwargs['thread'])
|
||||
|
||||
|
||||
class Thread(object):
|
||||
"""Wrapper around a greenthread, that holds a reference to the
|
||||
:class:`ThreadGroup`. The Thread will notify the :class:`ThreadGroup` when
|
||||
it has done so it can be removed from the threads list.
|
||||
"""
|
||||
def __init__(self, thread, group):
|
||||
self.thread = thread
|
||||
self.thread.link(_thread_done, group=group, thread=self)
|
||||
|
||||
def stop(self):
|
||||
self.thread.kill()
|
||||
|
||||
def wait(self):
|
||||
return self.thread.wait()
|
||||
|
||||
def link(self, func, *args, **kwargs):
|
||||
self.thread.link(func, *args, **kwargs)
|
||||
|
||||
|
||||
class ThreadGroup(object):
|
||||
"""The point of the ThreadGroup class is to:
|
||||
|
||||
* keep track of timers and greenthreads (making it easier to stop them
|
||||
when need be).
|
||||
* provide an easy API to add timers.
|
||||
"""
|
||||
def __init__(self, thread_pool_size=10):
|
||||
self.pool = greenpool.GreenPool(thread_pool_size)
|
||||
self.threads = []
|
||||
self.timers = []
|
||||
|
||||
def add_dynamic_timer(self, callback, initial_delay=None,
|
||||
periodic_interval_max=None, *args, **kwargs):
|
||||
timer = loopingcall.DynamicLoopingCall(callback, *args, **kwargs)
|
||||
timer.start(initial_delay=initial_delay,
|
||||
periodic_interval_max=periodic_interval_max)
|
||||
self.timers.append(timer)
|
||||
|
||||
def add_timer(self, interval, callback, initial_delay=None,
|
||||
*args, **kwargs):
|
||||
pulse = loopingcall.FixedIntervalLoopingCall(callback, *args, **kwargs)
|
||||
pulse.start(interval=interval,
|
||||
initial_delay=initial_delay)
|
||||
self.timers.append(pulse)
|
||||
|
||||
def add_thread(self, callback, *args, **kwargs):
|
||||
gt = self.pool.spawn(callback, *args, **kwargs)
|
||||
th = Thread(gt, self)
|
||||
self.threads.append(th)
|
||||
return th
|
||||
|
||||
def thread_done(self, thread):
|
||||
self.threads.remove(thread)
|
||||
|
||||
def _stop_threads(self):
|
||||
current = threading.current_thread()
|
||||
|
||||
# Iterate over a copy of self.threads so thread_done doesn't
|
||||
# modify the list while we're iterating
|
||||
for x in self.threads[:]:
|
||||
if x is current:
|
||||
# don't kill the current thread.
|
||||
continue
|
||||
try:
|
||||
x.stop()
|
||||
except eventlet.greenlet.GreenletExit:
|
||||
pass
|
||||
except Exception as ex:
|
||||
LOG.exception(ex)
|
||||
|
||||
def stop_timers(self):
|
||||
for x in self.timers:
|
||||
try:
|
||||
x.stop()
|
||||
except Exception as ex:
|
||||
LOG.exception(ex)
|
||||
self.timers = []
|
||||
|
||||
def stop(self, graceful=False):
|
||||
"""stop function has the option of graceful=True/False.
|
||||
|
||||
* In case of graceful=True, wait for all threads to be finished.
|
||||
Never kill threads.
|
||||
* In case of graceful=False, kill threads immediately.
|
||||
"""
|
||||
self.stop_timers()
|
||||
if graceful:
|
||||
# In case of graceful=True, wait for all threads to be
|
||||
# finished, never kill threads
|
||||
self.wait()
|
||||
else:
|
||||
# In case of graceful=False(Default), kill threads
|
||||
# immediately
|
||||
self._stop_threads()
|
||||
|
||||
def wait(self):
|
||||
for x in self.timers:
|
||||
try:
|
||||
x.wait()
|
||||
except eventlet.greenlet.GreenletExit:
|
||||
pass
|
||||
except Exception as ex:
|
||||
LOG.exception(ex)
|
||||
current = threading.current_thread()
|
||||
|
||||
# Iterate over a copy of self.threads so thread_done doesn't
|
||||
# modify the list while we're iterating
|
||||
for x in self.threads[:]:
|
||||
if x is current:
|
||||
continue
|
||||
try:
|
||||
x.wait()
|
||||
except eventlet.greenlet.GreenletExit:
|
||||
pass
|
||||
except Exception as ex:
|
||||
LOG.exception(ex)
|
@ -1,262 +0,0 @@
|
||||
# Copyright (c) 2013 OpenStack Foundation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Helpers for comparing version strings.
|
||||
"""
|
||||
|
||||
import copy
|
||||
import functools
|
||||
import inspect
|
||||
import logging
|
||||
|
||||
from oslo_config import cfg
|
||||
import pkg_resources
|
||||
import six
|
||||
|
||||
from nova.openstack.common._i18n import _
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
deprecated_opts = [
|
||||
cfg.BoolOpt('fatal_deprecations',
|
||||
default=False,
|
||||
help='Enables or disables fatal status of deprecations.'),
|
||||
]
|
||||
|
||||
|
||||
def list_opts():
|
||||
"""Entry point for oslo.config-generator.
|
||||
"""
|
||||
return [(None, copy.deepcopy(deprecated_opts))]
|
||||
|
||||
|
||||
class deprecated(object):
|
||||
"""A decorator to mark callables as deprecated.
|
||||
|
||||
This decorator logs a deprecation message when the callable it decorates is
|
||||
used. The message will include the release where the callable was
|
||||
deprecated, the release where it may be removed and possibly an optional
|
||||
replacement.
|
||||
|
||||
Examples:
|
||||
|
||||
1. Specifying the required deprecated release
|
||||
|
||||
>>> @deprecated(as_of=deprecated.ICEHOUSE)
|
||||
... def a(): pass
|
||||
|
||||
2. Specifying a replacement:
|
||||
|
||||
>>> @deprecated(as_of=deprecated.ICEHOUSE, in_favor_of='f()')
|
||||
... def b(): pass
|
||||
|
||||
3. Specifying the release where the functionality may be removed:
|
||||
|
||||
>>> @deprecated(as_of=deprecated.ICEHOUSE, remove_in=+1)
|
||||
... def c(): pass
|
||||
|
||||
4. Specifying the deprecated functionality will not be removed:
|
||||
>>> @deprecated(as_of=deprecated.ICEHOUSE, remove_in=0)
|
||||
... def d(): pass
|
||||
|
||||
5. Specifying a replacement, deprecated functionality will not be removed:
|
||||
>>> @deprecated(as_of=deprecated.ICEHOUSE, in_favor_of='f()', remove_in=0)
|
||||
... def e(): pass
|
||||
|
||||
"""
|
||||
|
||||
# NOTE(morganfainberg): Bexar is used for unit test purposes, it is
|
||||
# expected we maintain a gap between Bexar and Folsom in this list.
|
||||
BEXAR = 'B'
|
||||
FOLSOM = 'F'
|
||||
GRIZZLY = 'G'
|
||||
HAVANA = 'H'
|
||||
ICEHOUSE = 'I'
|
||||
JUNO = 'J'
|
||||
KILO = 'K'
|
||||
LIBERTY = 'L'
|
||||
|
||||
_RELEASES = {
|
||||
# NOTE(morganfainberg): Bexar is used for unit test purposes, it is
|
||||
# expected we maintain a gap between Bexar and Folsom in this list.
|
||||
'B': 'Bexar',
|
||||
'F': 'Folsom',
|
||||
'G': 'Grizzly',
|
||||
'H': 'Havana',
|
||||
'I': 'Icehouse',
|
||||
'J': 'Juno',
|
||||
'K': 'Kilo',
|
||||
'L': 'Liberty',
|
||||
}
|
||||
|
||||
_deprecated_msg_with_alternative = _(
|
||||
'%(what)s is deprecated as of %(as_of)s in favor of '
|
||||
'%(in_favor_of)s and may be removed in %(remove_in)s.')
|
||||
|
||||
_deprecated_msg_no_alternative = _(
|
||||
'%(what)s is deprecated as of %(as_of)s and may be '
|
||||
'removed in %(remove_in)s. It will not be superseded.')
|
||||
|
||||
_deprecated_msg_with_alternative_no_removal = _(
|
||||
'%(what)s is deprecated as of %(as_of)s in favor of %(in_favor_of)s.')
|
||||
|
||||
_deprecated_msg_with_no_alternative_no_removal = _(
|
||||
'%(what)s is deprecated as of %(as_of)s. It will not be superseded.')
|
||||
|
||||
def __init__(self, as_of, in_favor_of=None, remove_in=2, what=None):
|
||||
"""Initialize decorator
|
||||
|
||||
:param as_of: the release deprecating the callable. Constants
|
||||
are define in this class for convenience.
|
||||
:param in_favor_of: the replacement for the callable (optional)
|
||||
:param remove_in: an integer specifying how many releases to wait
|
||||
before removing (default: 2)
|
||||
:param what: name of the thing being deprecated (default: the
|
||||
callable's name)
|
||||
|
||||
"""
|
||||
self.as_of = as_of
|
||||
self.in_favor_of = in_favor_of
|
||||
self.remove_in = remove_in
|
||||
self.what = what
|
||||
|
||||
def __call__(self, func_or_cls):
|
||||
if not self.what:
|
||||
self.what = func_or_cls.__name__ + '()'
|
||||
msg, details = self._build_message()
|
||||
|
||||
if inspect.isfunction(func_or_cls):
|
||||
|
||||
@six.wraps(func_or_cls)
|
||||
def wrapped(*args, **kwargs):
|
||||
report_deprecated_feature(LOG, msg, details)
|
||||
return func_or_cls(*args, **kwargs)
|
||||
return wrapped
|
||||
elif inspect.isclass(func_or_cls):
|
||||
orig_init = func_or_cls.__init__
|
||||
|
||||
# TODO(tsufiev): change `functools` module to `six` as
|
||||
# soon as six 1.7.4 (with fix for passing `assigned`
|
||||
# argument to underlying `functools.wraps`) is released
|
||||
# and added to the oslo-incubator requrements
|
||||
@functools.wraps(orig_init, assigned=('__name__', '__doc__'))
|
||||
def new_init(self, *args, **kwargs):
|
||||
report_deprecated_feature(LOG, msg, details)
|
||||
orig_init(self, *args, **kwargs)
|
||||
func_or_cls.__init__ = new_init
|
||||
return func_or_cls
|
||||
else:
|
||||
raise TypeError('deprecated can be used only with functions or '
|
||||
'classes')
|
||||
|
||||
def _get_safe_to_remove_release(self, release):
|
||||
# TODO(dstanek): this method will have to be reimplemented once
|
||||
# when we get to the X release because once we get to the Y
|
||||
# release, what is Y+2?
|
||||
new_release = chr(ord(release) + self.remove_in)
|
||||
if new_release in self._RELEASES:
|
||||
return self._RELEASES[new_release]
|
||||
else:
|
||||
return new_release
|
||||
|
||||
def _build_message(self):
|
||||
details = dict(what=self.what,
|
||||
as_of=self._RELEASES[self.as_of],
|
||||
remove_in=self._get_safe_to_remove_release(self.as_of))
|
||||
|
||||
if self.in_favor_of:
|
||||
details['in_favor_of'] = self.in_favor_of
|
||||
if self.remove_in > 0:
|
||||
msg = self._deprecated_msg_with_alternative
|
||||
else:
|
||||
# There are no plans to remove this function, but it is
|
||||
# now deprecated.
|
||||
msg = self._deprecated_msg_with_alternative_no_removal
|
||||
else:
|
||||
if self.remove_in > 0:
|
||||
msg = self._deprecated_msg_no_alternative
|
||||
else:
|
||||
# There are no plans to remove this function, but it is
|
||||
# now deprecated.
|
||||
msg = self._deprecated_msg_with_no_alternative_no_removal
|
||||
return msg, details
|
||||
|
||||
|
||||
def is_compatible(requested_version, current_version, same_major=True):
|
||||
"""Determine whether `requested_version` is satisfied by
|
||||
`current_version`; in other words, `current_version` is >=
|
||||
`requested_version`.
|
||||
|
||||
:param requested_version: version to check for compatibility
|
||||
:param current_version: version to check against
|
||||
:param same_major: if True, the major version must be identical between
|
||||
`requested_version` and `current_version`. This is used when a
|
||||
major-version difference indicates incompatibility between the two
|
||||
versions. Since this is the common-case in practice, the default is
|
||||
True.
|
||||
:returns: True if compatible, False if not
|
||||
"""
|
||||
requested_parts = pkg_resources.parse_version(requested_version)
|
||||
current_parts = pkg_resources.parse_version(current_version)
|
||||
|
||||
if same_major and (requested_parts[0] != current_parts[0]):
|
||||
return False
|
||||
|
||||
return current_parts >= requested_parts
|
||||
|
||||
|
||||
# Track the messages we have sent already. See
|
||||
# report_deprecated_feature().
|
||||
_deprecated_messages_sent = {}
|
||||
|
||||
|
||||
def report_deprecated_feature(logger, msg, *args, **kwargs):
|
||||
"""Call this function when a deprecated feature is used.
|
||||
|
||||
If the system is configured for fatal deprecations then the message
|
||||
is logged at the 'critical' level and :class:`DeprecatedConfig` will
|
||||
be raised.
|
||||
|
||||
Otherwise, the message will be logged (once) at the 'warn' level.
|
||||
|
||||
:raises: :class:`DeprecatedConfig` if the system is configured for
|
||||
fatal deprecations.
|
||||
"""
|
||||
stdmsg = _("Deprecated: %s") % msg
|
||||
CONF.register_opts(deprecated_opts)
|
||||
if CONF.fatal_deprecations:
|
||||
logger.critical(stdmsg, *args, **kwargs)
|
||||
raise DeprecatedConfig(msg=stdmsg)
|
||||
|
||||
# Using a list because a tuple with dict can't be stored in a set.
|
||||
sent_args = _deprecated_messages_sent.setdefault(msg, list())
|
||||
|
||||
if args in sent_args:
|
||||
# Already logged this message, so don't log it again.
|
||||
return
|
||||
|
||||
sent_args.append(args)
|
||||
logger.warn(stdmsg, *args, **kwargs)
|
||||
|
||||
|
||||
class DeprecatedConfig(Exception):
|
||||
message = _("Fatal call to deprecated config: %(msg)s")
|
||||
|
||||
def __init__(self, msg):
|
||||
super(Exception, self).__init__(self.message % dict(msg=msg))
|
Loading…
x
Reference in New Issue
Block a user