Migrate code base to oslo.config and oslo.log
In order to migrate the code base to the "OpenStack way": * Replace ConfigParser by oslo.config * Replace logging by oslo.log * Use testtools as base class for unit tests * Add tox -e genconfig to generate config file * Start to organize the file structure like other projects * Define 2 cli entry points almanach-collector and almanach-api * The docker-compose.yml is now used to run integration-tests * Integration tests will be moved to tempest in the future * Docker configs should be deprecated and moved to Kolla Change-Id: I89a89a92c7bdb3125cc568323db0f9488e1380db
This commit is contained in:
parent
13d383a50c
commit
e0561271cf
13
.dockerignore
Normal file
13
.dockerignore
Normal file
@ -0,0 +1,13 @@
|
||||
.eggs
|
||||
.tox
|
||||
.idea
|
||||
.gitreview
|
||||
.gitignore
|
||||
*.egg-info
|
||||
AUTHORS
|
||||
ChangeLog
|
||||
devstack
|
||||
doc
|
||||
Dockerfile
|
||||
Dockerfile.integration-tests
|
||||
docker-compose.yml
|
23
Dockerfile
23
Dockerfile
@ -1,21 +1,10 @@
|
||||
FROM python:3.4
|
||||
FROM python:3.5
|
||||
|
||||
RUN mkdir -p /opt/almanach/src
|
||||
ADD almanach /opt/almanach/src/almanach
|
||||
ADD setup.* /opt/almanach/src/
|
||||
ADD README.rst /opt/almanach/src/
|
||||
ADD requirements.txt /opt/almanach/src/
|
||||
ADD LICENSE /opt/almanach/src/
|
||||
ADD almanach/resources/config/almanach.cfg /etc/almanach.cfg
|
||||
COPY docker-entrypoint.sh /opt/almanach/entrypoint.sh
|
||||
|
||||
RUN cd /opt/almanach/src && \
|
||||
ADD . /usr/local/src/
|
||||
RUN cd /usr/local/src && \
|
||||
pip install -r requirements.txt && \
|
||||
PBR_VERSION=2.0.dev0 python setup.py install && \
|
||||
chmod +x /opt/almanach/entrypoint.sh
|
||||
|
||||
VOLUME /opt/almanach
|
||||
python setup.py install && \
|
||||
mkdir -p /etc/almanach && \
|
||||
cp /usr/local/src/etc/almanach/almanach.docker.conf /etc/almanach/almanach.conf
|
||||
|
||||
USER nobody
|
||||
|
||||
ENTRYPOINT ["/opt/almanach/entrypoint.sh"]
|
||||
|
@ -11,13 +11,15 @@ Almanach stores the utilization of OpenStack resources (instances and volumes) f
|
||||
What is Almanach?
|
||||
-----------------
|
||||
|
||||
The main purpose of this software is to bill customers based on their usage of the cloud infrastructure.
|
||||
The main purpose of this software is to record the usage of the cloud resources of each tenants.
|
||||
|
||||
Almanach is composed of two parts:
|
||||
|
||||
- **Collector**: Listen for OpenStack events and store the relevant information in the database.
|
||||
- **REST API**: Expose the information collected to external systems.
|
||||
|
||||
At the moment, Almanach is only able to record the usage of instances and volumes.
|
||||
|
||||
Resources
|
||||
---------
|
||||
|
||||
|
@ -1,46 +0,0 @@
|
||||
# Copyright 2016 Internap.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
|
||||
from almanach.auth import keystone_auth
|
||||
from almanach.auth import mixed_auth
|
||||
from almanach.auth import private_key_auth
|
||||
from almanach import config
|
||||
|
||||
|
||||
class AuthenticationAdapter(object):
|
||||
@staticmethod
|
||||
def factory():
|
||||
if config.auth_strategy() == "keystone":
|
||||
logging.info("Loading Keystone authentication backend")
|
||||
return keystone_auth.KeystoneAuthentication(keystone_auth.KeystoneTokenManagerFactory(
|
||||
username=config.keystone_username(),
|
||||
password=config.keystone_password(),
|
||||
auth_url=config.keystone_url(),
|
||||
tenant_name=config.keystone_tenant_name()
|
||||
))
|
||||
elif all(auth_method in config.auth_strategy() for auth_method in ['token', 'keystone']):
|
||||
logging.info("Loading Keystone authentication backend")
|
||||
auths = [private_key_auth.PrivateKeyAuthentication(config.auth_private_key()),
|
||||
keystone_auth.KeystoneAuthentication(keystone_auth.KeystoneTokenManagerFactory(
|
||||
username=config.keystone_username(),
|
||||
password=config.keystone_password(),
|
||||
auth_url=config.keystone_url(),
|
||||
tenant_name=config.keystone_tenant_name()
|
||||
))]
|
||||
return mixed_auth.MixedAuthentication(auths)
|
||||
else:
|
||||
logging.info("Loading PrivateKey authentication backend")
|
||||
return private_key_auth.PrivateKeyAuthentication(config.auth_private_key())
|
@ -15,16 +15,16 @@
|
||||
from keystoneclient.v2_0 import client as keystone_client
|
||||
from keystoneclient.v2_0 import tokens
|
||||
|
||||
from almanach.auth import base_auth
|
||||
from almanach.api.auth import base_auth
|
||||
from almanach.core import exception
|
||||
|
||||
|
||||
class KeystoneTokenManagerFactory(object):
|
||||
def __init__(self, username, password, auth_url, tenant_name):
|
||||
self.tenant_name = tenant_name
|
||||
self.auth_url = auth_url
|
||||
self.password = password
|
||||
self.username = username
|
||||
def __init__(self, config):
|
||||
self.auth_url = config.auth.keystone_url
|
||||
self.tenant_name = config.auth.keystone_tenant
|
||||
self.username = config.auth.keystone_username
|
||||
self.password = config.auth.keystone_password
|
||||
|
||||
def get_manager(self):
|
||||
return tokens.TokenManager(keystone_client.Client(
|
@ -12,23 +12,25 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from oslo_log import log
|
||||
|
||||
from almanach.auth import base_auth
|
||||
from almanach.api.auth import base_auth
|
||||
from almanach.core import exception
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class MixedAuthentication(base_auth.BaseAuth):
|
||||
def __init__(self, authentication_methods):
|
||||
self.authentication_methods = authentication_methods
|
||||
def __init__(self, authentication_adapters):
|
||||
self.authentication_adapters = authentication_adapters
|
||||
|
||||
def validate(self, token):
|
||||
for method in self.authentication_methods:
|
||||
for adapter in self.authentication_adapters:
|
||||
try:
|
||||
valid = method.validate(token)
|
||||
valid = adapter.validate(token)
|
||||
if valid:
|
||||
logging.debug('Validated token with auth {0}'.format(method.__class__))
|
||||
LOG.debug('Validated token with adapter: %s', adapter.__class__)
|
||||
return True
|
||||
except exception.AuthenticationFailureException:
|
||||
logging.debug('Failed to validate with auth {0}'.format(method.__class__))
|
||||
raise exception.AuthenticationFailureException('No valid auth method matching token')
|
||||
LOG.debug('Failed to validate with adapter: %s', adapter.__class__)
|
||||
raise exception.AuthenticationFailureException('Unable to authenticate against any authentication adapters')
|
@ -12,7 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from almanach.auth import base_auth
|
||||
from almanach.api.auth import base_auth
|
||||
from almanach.core import exception
|
||||
|
||||
|
43
almanach/api/auth_adapter.py
Normal file
43
almanach/api/auth_adapter.py
Normal file
@ -0,0 +1,43 @@
|
||||
# Copyright 2016 Internap.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from oslo_log import log
|
||||
|
||||
from almanach.api.auth import keystone_auth
|
||||
from almanach.api.auth import mixed_auth
|
||||
from almanach.api.auth import private_key_auth
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class AuthenticationAdapter(object):
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
|
||||
def get_authentication_adapter(self):
|
||||
if self.config.auth.strategy == 'keystone':
|
||||
return self._get_keystone_auth()
|
||||
elif all(auth_method in self.config.auth.strategy for auth_method in ['token', 'keystone']):
|
||||
adapters = [self._get_private_key_auth(), self._get_keystone_auth()]
|
||||
return mixed_auth.MixedAuthentication(adapters)
|
||||
return self._get_private_key_auth()
|
||||
|
||||
def _get_private_key_auth(self):
|
||||
LOG.info('Loading PrivateKey authentication adapter')
|
||||
return private_key_auth.PrivateKeyAuthentication(self.config.auth.private_key)
|
||||
|
||||
def _get_keystone_auth(self):
|
||||
LOG.info('Loading Keystone authentication backend')
|
||||
token_manager = keystone_auth.KeystoneTokenManagerFactory(self.config)
|
||||
return keystone_auth.KeystoneAuthentication(token_manager)
|
51
almanach/api/main.py
Normal file
51
almanach/api/main.py
Normal file
@ -0,0 +1,51 @@
|
||||
# Copyright 2016 Internap.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
import sys
|
||||
|
||||
from flask import Flask
|
||||
from oslo_log import log
|
||||
|
||||
from almanach.api import auth_adapter
|
||||
from almanach.api.v1 import routes
|
||||
from almanach.core import controller
|
||||
from almanach.core import opts
|
||||
from almanach.storage import database_adapter
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
app = Flask('almanach')
|
||||
app.register_blueprint(routes.api)
|
||||
|
||||
|
||||
def main():
|
||||
try:
|
||||
opts.CONF(sys.argv[1:])
|
||||
config = opts.CONF
|
||||
|
||||
storage = database_adapter.DatabaseAdapter(config)
|
||||
storage.connect()
|
||||
|
||||
routes.controller = controller.Controller(config, storage)
|
||||
routes.auth_adapter = auth_adapter.AuthenticationAdapter(config).get_authentication_adapter()
|
||||
|
||||
LOG.info('Listening on %s:%d', config.api.bind_ip, config.api.bind_port)
|
||||
app.run(host=config.api.bind_ip, port=config.api.bind_port)
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
sys.exit(100)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -14,15 +14,16 @@
|
||||
|
||||
from datetime import datetime
|
||||
from functools import wraps
|
||||
import logging
|
||||
|
||||
import flask
|
||||
import jsonpickle
|
||||
from oslo_log import log
|
||||
from oslo_serialization import jsonutils
|
||||
from werkzeug import wrappers
|
||||
|
||||
from almanach.core import exception
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
api = flask.Blueprint("api", __name__)
|
||||
controller = None
|
||||
auth_adapter = None
|
||||
@ -39,31 +40,31 @@ def to_json(api_call):
|
||||
return result if isinstance(result, wrappers.BaseResponse) \
|
||||
else flask.Response(encode(result), 200, {"Content-Type": "application/json"})
|
||||
except exception.DateFormatException as e:
|
||||
logging.warning(e.message)
|
||||
LOG.warning(e.message)
|
||||
return flask.Response(encode({"error": e.message}), 400, {"Content-Type": "application/json"})
|
||||
except KeyError as e:
|
||||
message = "The {param} param is mandatory for the request you have made.".format(param=e)
|
||||
logging.warning(message)
|
||||
LOG.warning(message)
|
||||
return encode({"error": message}), 400, {"Content-Type": "application/json"}
|
||||
except TypeError:
|
||||
message = "The request you have made must have data. None was given."
|
||||
logging.warning(message)
|
||||
LOG.warning(message)
|
||||
return encode({"error": message}), 400, {"Content-Type": "application/json"}
|
||||
except exception.InvalidAttributeException as e:
|
||||
logging.warning(e.get_error_message())
|
||||
LOG.warning(e.get_error_message())
|
||||
return encode({"error": e.get_error_message()}), 400, {"Content-Type": "application/json"}
|
||||
except exception.MultipleEntitiesMatchingQueryException as e:
|
||||
logging.warning(e.message)
|
||||
LOG.warning(e.message)
|
||||
return encode({"error": "Multiple entities found while updating closed"}), 400, {
|
||||
"Content-Type": "application/json"}
|
||||
except exception.AlmanachEntityNotFoundException as e:
|
||||
logging.warning(e.message)
|
||||
LOG.warning(e.message)
|
||||
return encode({"error": "Entity not found"}), 404, {"Content-Type": "application/json"}
|
||||
except exception.AlmanachException as e:
|
||||
logging.exception(e)
|
||||
LOG.exception(e)
|
||||
return flask.Response(encode({"error": e.message}), 500, {"Content-Type": "application/json"})
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
LOG.exception(e)
|
||||
return flask.Response(encode({"error": e}), 500, {"Content-Type": "application/json"})
|
||||
|
||||
return decorator
|
||||
@ -76,7 +77,7 @@ def authenticated(api_call):
|
||||
auth_adapter.validate(flask.request.headers.get('X-Auth-Token'))
|
||||
return api_call(*args, **kwargs)
|
||||
except exception.AuthenticationFailureException as e:
|
||||
logging.error("Authentication failure: {0}".format(e))
|
||||
LOG.error("Authentication failure: %s", e)
|
||||
return flask.Response('Unauthorized', 401)
|
||||
|
||||
return decorator
|
||||
@ -89,7 +90,6 @@ def get_info():
|
||||
|
||||
:code 200 OK: Service is available
|
||||
"""
|
||||
logging.info("Get application info")
|
||||
return controller.get_application_info()
|
||||
|
||||
|
||||
@ -113,7 +113,7 @@ def create_instance(project_id):
|
||||
:code 404 Not Found: If tenant does not exist
|
||||
"""
|
||||
instance = jsonutils.loads(flask.request.data)
|
||||
logging.info("Creating instance for tenant %s with data %s", project_id, instance)
|
||||
LOG.info("Creating instance for tenant %s with data %s", project_id, instance)
|
||||
controller.create_instance(
|
||||
tenant_id=project_id,
|
||||
instance_id=instance['id'],
|
||||
@ -143,7 +143,7 @@ def delete_instance(instance_id):
|
||||
:code 404 Not Found: If instance does not exist
|
||||
"""
|
||||
data = jsonutils.loads(flask.request.data)
|
||||
logging.info("Deleting instance with id %s with data %s", instance_id, data)
|
||||
LOG.info("Deleting instance with id %s with data %s", instance_id, data)
|
||||
controller.delete_instance(
|
||||
instance_id=instance_id,
|
||||
delete_date=data['date']
|
||||
@ -167,7 +167,7 @@ def resize_instance(instance_id):
|
||||
:code 404 Not Found: If instance does not exist
|
||||
"""
|
||||
instance = jsonutils.loads(flask.request.data)
|
||||
logging.info("Resizing instance with id %s with data %s", instance_id, instance)
|
||||
LOG.info("Resizing instance with id %s with data %s", instance_id, instance)
|
||||
controller.resize_instance(
|
||||
instance_id=instance_id,
|
||||
resize_date=instance['date'],
|
||||
@ -194,7 +194,7 @@ def rebuild_instance(instance_id):
|
||||
:code 404 Not Found: If instance does not exist
|
||||
"""
|
||||
instance = jsonutils.loads(flask.request.data)
|
||||
logging.info("Rebuilding instance with id %s with data %s", instance_id, instance)
|
||||
LOG.info("Rebuilding instance with id %s with data %s", instance_id, instance)
|
||||
controller.rebuild_instance(
|
||||
instance_id=instance_id,
|
||||
distro=instance['distro'],
|
||||
@ -221,7 +221,7 @@ def list_instances(project_id):
|
||||
:code 404 Not Found: If tenant does not exist.
|
||||
"""
|
||||
start, end = get_period()
|
||||
logging.info("Listing instances between %s and %s", start, end)
|
||||
LOG.info("Listing instances between %s and %s", start, end)
|
||||
return controller.list_instances(project_id, start, end)
|
||||
|
||||
|
||||
@ -244,7 +244,7 @@ def create_volume(project_id):
|
||||
:code 404 Not Found: If tenant does not exist.
|
||||
"""
|
||||
volume = jsonutils.loads(flask.request.data)
|
||||
logging.info("Creating volume for tenant %s with data %s", project_id, volume)
|
||||
LOG.info("Creating volume for tenant %s with data %s", project_id, volume)
|
||||
controller.create_volume(
|
||||
project_id=project_id,
|
||||
volume_id=volume['volume_id'],
|
||||
@ -272,7 +272,7 @@ def delete_volume(volume_id):
|
||||
:code 404 Not Found: If volume does not exist.
|
||||
"""
|
||||
data = jsonutils.loads(flask.request.data)
|
||||
logging.info("Deleting volume with id %s with data %s", volume_id, data)
|
||||
LOG.info("Deleting volume with id %s with data %s", volume_id, data)
|
||||
controller.delete_volume(
|
||||
volume_id=volume_id,
|
||||
delete_date=data['date']
|
||||
@ -296,7 +296,7 @@ def resize_volume(volume_id):
|
||||
:code 404 Not Found: If volume does not exist.
|
||||
"""
|
||||
volume = jsonutils.loads(flask.request.data)
|
||||
logging.info("Resizing volume with id %s with data %s", volume_id, volume)
|
||||
LOG.info("Resizing volume with id %s with data %s", volume_id, volume)
|
||||
controller.resize_volume(
|
||||
volume_id=volume_id,
|
||||
size=volume['size'],
|
||||
@ -321,7 +321,7 @@ def attach_volume(volume_id):
|
||||
:code 404 Not Found: If volume does not exist.
|
||||
"""
|
||||
volume = jsonutils.loads(flask.request.data)
|
||||
logging.info("Attaching volume with id %s with data %s", volume_id, volume)
|
||||
LOG.info("Attaching volume with id %s with data %s", volume_id, volume)
|
||||
controller.attach_volume(
|
||||
volume_id=volume_id,
|
||||
date=volume['date'],
|
||||
@ -346,7 +346,7 @@ def detach_volume(volume_id):
|
||||
:code 404 Not Found: If volume does not exist.
|
||||
"""
|
||||
volume = jsonutils.loads(flask.request.data)
|
||||
logging.info("Detaching volume with id %s with data %s", volume_id, volume)
|
||||
LOG.info("Detaching volume with id %s with data %s", volume_id, volume)
|
||||
controller.detach_volume(
|
||||
volume_id=volume_id,
|
||||
date=volume['date'],
|
||||
@ -371,7 +371,7 @@ def list_volumes(project_id):
|
||||
:code 404 Not Found: If tenant does not exist.
|
||||
"""
|
||||
start, end = get_period()
|
||||
logging.info("Listing volumes between %s and %s", start, end)
|
||||
LOG.info("Listing volumes between %s and %s", start, end)
|
||||
return controller.list_volumes(project_id, start, end)
|
||||
|
||||
|
||||
@ -390,7 +390,7 @@ def list_entity(project_id):
|
||||
:code 404 Not Found: If tenant does not exist.
|
||||
"""
|
||||
start, end = get_period()
|
||||
logging.info("Listing entities between %s and %s", start, end)
|
||||
LOG.info("Listing entities between %s and %s", start, end)
|
||||
return controller.list_entities(project_id, start, end)
|
||||
|
||||
|
||||
@ -409,7 +409,7 @@ def update_instance_entity(instance_id):
|
||||
:code 404 Not Found: If instance does not exist.
|
||||
"""
|
||||
data = jsonutils.loads(flask.request.data)
|
||||
logging.info("Updating instance entity with id %s with data %s", instance_id, data)
|
||||
LOG.info("Updating instance entity with id %s with data %s", instance_id, data)
|
||||
if 'start' in flask.request.args:
|
||||
start, end = get_period()
|
||||
result = controller.update_inactive_entity(instance_id=instance_id, start=start, end=end, **data)
|
||||
@ -428,7 +428,7 @@ def entity_exists(entity_id):
|
||||
:code 200 OK: if the entity exists
|
||||
:code 404 Not Found: if the entity does not exist
|
||||
"""
|
||||
logging.info("Does entity with id %s exists", entity_id)
|
||||
LOG.info("Does entity with id %s exists", entity_id)
|
||||
response = flask.Response('', 404)
|
||||
if controller.entity_exists(entity_id=entity_id):
|
||||
response = flask.Response('', 200)
|
||||
@ -457,7 +457,7 @@ def list_volume_types():
|
||||
|
||||
:code 200 OK: Volume types exist
|
||||
"""
|
||||
logging.info("Listing volumes types")
|
||||
LOG.info("Listing volumes types")
|
||||
return controller.list_volume_types()
|
||||
|
||||
|
||||
@ -473,7 +473,7 @@ def get_volume_type(type_id):
|
||||
:code 400 Bad Request: If request data has an invalid or missing field
|
||||
:code 404 Not Found: If the volume type does not exist
|
||||
"""
|
||||
logging.info("Get volumes type for id %s", type_id)
|
||||
LOG.info("Get volumes type for id %s", type_id)
|
||||
return controller.get_volume_type(type_id)
|
||||
|
||||
|
||||
@ -490,7 +490,7 @@ def create_volume_type():
|
||||
:code 400 Bad Request: If request data has an invalid or missing field
|
||||
"""
|
||||
volume_type = jsonutils.loads(flask.request.data)
|
||||
logging.info("Creating volume type with data '%s'", volume_type)
|
||||
LOG.info("Creating volume type with data '%s'", volume_type)
|
||||
controller.create_volume_type(
|
||||
volume_type_id=volume_type['type_id'],
|
||||
volume_type_name=volume_type['type_name']
|
||||
@ -509,7 +509,7 @@ def delete_volume_type(type_id):
|
||||
:code 202 Accepted: Volume successfully deleted
|
||||
:code 404 Not Found: If volume type does not exist.
|
||||
"""
|
||||
logging.info("Deleting volume type with id '%s'", type_id)
|
||||
LOG.info("Deleting volume type with id '%s'", type_id)
|
||||
controller.delete_volume_type(type_id)
|
||||
return flask.Response(status=202)
|
||||
|
@ -1,52 +0,0 @@
|
||||
# Copyright 2016 Internap.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import logging.config as logging_config
|
||||
import sys
|
||||
|
||||
from almanach import api
|
||||
from almanach import collector
|
||||
from almanach import config
|
||||
|
||||
|
||||
def run():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("service", help="Service to execute: 'api' or 'collector'", choices=["api", "collector"])
|
||||
parser.add_argument("config_file", help="Config file path")
|
||||
parser.add_argument("--logging", help="Logger configuration")
|
||||
parser.add_argument("--port", help="API HTTP port (default is 8000)", default=8000)
|
||||
parser.add_argument("--host", help="API hostname to listen on (default is 127.0.0.1)", default="127.0.0.1")
|
||||
args = parser.parse_args()
|
||||
|
||||
config.read(args.config_file)
|
||||
|
||||
if args.logging:
|
||||
print("Loading logger configuration from {0}".format(args.logging))
|
||||
logging_config.fileConfig(args.logging, disable_existing_loggers=False)
|
||||
else:
|
||||
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
|
||||
logging.debug("Logging to stdout")
|
||||
|
||||
if args.service == "api":
|
||||
almanach_api = api.AlmanachApi()
|
||||
almanach_api.run(host=args.host, port=args.port)
|
||||
else:
|
||||
almanach_collector = collector.AlmanachCollector()
|
||||
almanach_collector.run()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
run()
|
@ -1,36 +0,0 @@
|
||||
# Copyright 2016 Internap.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
|
||||
import kombu
|
||||
|
||||
from almanach.adapters import bus_adapter
|
||||
from almanach.adapters import database_adapter
|
||||
from almanach.adapters import retry_adapter
|
||||
from almanach import config
|
||||
from almanach.core import controller
|
||||
|
||||
|
||||
class AlmanachCollector(object):
|
||||
def __init__(self):
|
||||
self._controller = controller.Controller(database_adapter.DatabaseAdapter())
|
||||
_connection = kombu.Connection(config.rabbitmq_url(), heartbeat=540)
|
||||
retry_adapter_instance = retry_adapter.RetryAdapter(_connection)
|
||||
self._busAdapter = bus_adapter.BusAdapter(self._controller, _connection,
|
||||
retry_adapter_instance)
|
||||
|
||||
def run(self):
|
||||
logging.info("Listening for incoming events")
|
||||
self._busAdapter.run()
|
@ -13,20 +13,22 @@
|
||||
# limitations under the License.
|
||||
|
||||
import kombu
|
||||
import logging
|
||||
import six
|
||||
|
||||
from kombu import mixins
|
||||
from oslo_log import log
|
||||
from oslo_serialization import jsonutils
|
||||
|
||||
from almanach.adapters import instance_bus_adapter
|
||||
from almanach.adapters import volume_bus_adapter
|
||||
from almanach import config
|
||||
from almanach.collector.handlers import instance_handler
|
||||
from almanach.collector.handlers import volume_handler
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class BusAdapter(mixins.ConsumerMixin):
|
||||
def __init__(self, controller, connection, retry_adapter):
|
||||
def __init__(self, config, controller, connection, retry_adapter):
|
||||
super(BusAdapter, self).__init__()
|
||||
self.config = config
|
||||
self.controller = controller
|
||||
self.connection = connection
|
||||
self.retry_adapter = retry_adapter
|
||||
@ -35,8 +37,8 @@ class BusAdapter(mixins.ConsumerMixin):
|
||||
try:
|
||||
self._process_notification(notification)
|
||||
except Exception as e:
|
||||
logging.warning("Sending notification to retry letter exchange {0}".format(jsonutils.dumps(notification)))
|
||||
logging.exception(e)
|
||||
LOG.warning('Sending notification to retry letter exchange %s', jsonutils.dumps(notification))
|
||||
LOG.exception(e)
|
||||
self.retry_adapter.publish_to_dead_letter(message)
|
||||
message.ack()
|
||||
|
||||
@ -44,13 +46,14 @@ class BusAdapter(mixins.ConsumerMixin):
|
||||
if isinstance(notification, six.string_types):
|
||||
notification = jsonutils.loads(notification)
|
||||
|
||||
event_type = notification.get("event_type")
|
||||
logging.info("Received event: '{0}'".format(event_type))
|
||||
instance_bus_adapter.InstanceBusAdapter(self.controller).handle_events(event_type, notification)
|
||||
volume_bus_adapter.VolumeBusAdapter(self.controller).handle_events(event_type, notification)
|
||||
event_type = notification.get('event_type')
|
||||
LOG.info('Received event: %s', event_type)
|
||||
|
||||
instance_handler.InstanceHandler(self.controller).handle_events(event_type, notification)
|
||||
volume_handler.VolumeHandler(self.controller).handle_events(event_type, notification)
|
||||
|
||||
def get_consumers(self, consumer, channel):
|
||||
queue = kombu.Queue(config.rabbitmq_queue(), routing_key=config.rabbitmq_routing_key())
|
||||
queue = kombu.Queue(self.config.collector.queue, routing_key=self.config.collector.routing_key)
|
||||
return [consumer(
|
||||
[queue],
|
||||
callbacks=[self.on_message],
|
0
almanach/collector/handlers/__init__.py
Normal file
0
almanach/collector/handlers/__init__.py
Normal file
@ -13,7 +13,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
class InstanceBusAdapter(object):
|
||||
class InstanceHandler(object):
|
||||
def __init__(self, controller):
|
||||
self.controller = controller
|
||||
|
@ -13,7 +13,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
class VolumeBusAdapter(object):
|
||||
class VolumeHandler(object):
|
||||
def __init__(self, controller):
|
||||
self.controller = controller
|
||||
|
53
almanach/collector/main.py
Normal file
53
almanach/collector/main.py
Normal file
@ -0,0 +1,53 @@
|
||||
# Copyright 2016 Internap.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import kombu
|
||||
from oslo_log import log
|
||||
import sys
|
||||
|
||||
from almanach.collector import bus_adapter
|
||||
from almanach.collector import retry_adapter
|
||||
from almanach.core import controller
|
||||
from almanach.core import opts
|
||||
from almanach.storage import database_adapter
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
def main():
|
||||
try:
|
||||
opts.CONF(sys.argv[1:])
|
||||
config = opts.CONF
|
||||
|
||||
storage = database_adapter.DatabaseAdapter(config)
|
||||
storage.connect()
|
||||
|
||||
application_controller = controller.Controller(config, storage)
|
||||
connection = kombu.Connection(hostname=config.collector.rabbit_host,
|
||||
port=config.collector.rabbit_port,
|
||||
userid=config.collector.rabbit_username,
|
||||
password=config.collector.rabbit_password,
|
||||
heartbeat=540)
|
||||
retry_listener = retry_adapter.RetryAdapter(config, connection)
|
||||
bus_listener = bus_adapter.BusAdapter(config, application_controller,
|
||||
connection, retry_listener)
|
||||
|
||||
LOG.info('Listening for incoming events')
|
||||
bus_listener.run()
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
sys.exit(100)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -12,61 +12,70 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
|
||||
import kombu
|
||||
from oslo_serialization import jsonutils
|
||||
|
||||
from almanach import config
|
||||
from oslo_log import log
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class RetryAdapter(object):
|
||||
def __init__(self, connection):
|
||||
def __init__(self, config, connection):
|
||||
self.config = config
|
||||
self.connection = connection
|
||||
|
||||
retry_exchange = self._configure_retry_exchanges(self.connection)
|
||||
dead_exchange = self._configure_dead_exchange(self.connection)
|
||||
|
||||
self._retry_producer = kombu.Producer(self.connection, exchange=retry_exchange)
|
||||
self._dead_producer = kombu.Producer(self.connection, exchange=dead_exchange)
|
||||
|
||||
def publish_to_dead_letter(self, message):
|
||||
death_count = self._rejected_count(message)
|
||||
logging.info("Message has been dead {0} times".format(death_count))
|
||||
if death_count < config.rabbitmq_retry():
|
||||
logging.info("Publishing to retry queue")
|
||||
LOG.info('Message die %d times', death_count)
|
||||
|
||||
if death_count < self.config.collector.max_retries:
|
||||
LOG.info('Publishing message to retry queue')
|
||||
self._publish_message(self._retry_producer, message)
|
||||
logging.info("Published to retry queue")
|
||||
else:
|
||||
logging.info("Publishing to dead letter queue")
|
||||
LOG.info('Publishing message to dead letter queue')
|
||||
self._publish_message(self._dead_producer, message)
|
||||
logging.info("Publishing notification to dead letter queue: {0}".format(jsonutils.dumps(message.body)))
|
||||
|
||||
def _configure_retry_exchanges(self, connection):
|
||||
def declare_queues():
|
||||
channel = connection.channel()
|
||||
almanach_exchange = kombu.Exchange(name=config.rabbitmq_retry_return_exchange(),
|
||||
type='direct',
|
||||
channel=channel)
|
||||
retry_exchange = kombu.Exchange(name=config.rabbitmq_retry_exchange(),
|
||||
type='direct',
|
||||
channel=channel)
|
||||
retry_queue = kombu.Queue(name=config.rabbitmq_retry_queue(),
|
||||
exchange=retry_exchange,
|
||||
routing_key=config.rabbitmq_routing_key(),
|
||||
queue_arguments=self._get_queue_arguments(),
|
||||
channel=channel)
|
||||
almanach_queue = kombu.Queue(name=config.rabbitmq_queue(),
|
||||
exchange=almanach_exchange,
|
||||
durable=False,
|
||||
routing_key=config.rabbitmq_routing_key(),
|
||||
channel=channel)
|
||||
|
||||
retry_exchange = kombu.Exchange(
|
||||
name=self.config.collector.retry_exchange,
|
||||
type='direct',
|
||||
channel=channel)
|
||||
|
||||
retry_queue = kombu.Queue(
|
||||
name=self.config.collector.retry_queue,
|
||||
exchange=retry_exchange,
|
||||
routing_key=self.config.collector.routing_key,
|
||||
queue_arguments=self._get_queue_arguments(),
|
||||
channel=channel)
|
||||
|
||||
main_exchange = kombu.Exchange(
|
||||
name=self.config.collector.retry_return_exchange,
|
||||
type='direct',
|
||||
channel=channel)
|
||||
|
||||
main_queue = kombu.Queue(
|
||||
name=self.config.collector.queue,
|
||||
exchange=main_exchange,
|
||||
durable=False,
|
||||
routing_key=self.config.collector.routing_key,
|
||||
channel=channel)
|
||||
|
||||
retry_queue.declare()
|
||||
almanach_queue.declare()
|
||||
main_queue.declare()
|
||||
|
||||
return retry_exchange
|
||||
|
||||
def error_callback(exception, interval):
|
||||
logging.error('Failed to declare queues and exchanges, retrying in %d seconds. %r' % (interval, exception))
|
||||
LOG.error('Failed to declare queues and exchanges, retrying in %d seconds. %r', interval, exception)
|
||||
|
||||
declare_queues = connection.ensure(connection, declare_queues, errback=error_callback,
|
||||
interval_start=0, interval_step=5, interval_max=30)
|
||||
@ -75,33 +84,35 @@ class RetryAdapter(object):
|
||||
def _configure_dead_exchange(self, connection):
|
||||
def declare_dead_queue():
|
||||
channel = connection.channel()
|
||||
dead_exchange = kombu.Exchange(name=config.rabbitmq_dead_exchange(),
|
||||
type='direct',
|
||||
channel=channel)
|
||||
dead_queue = kombu.Queue(name=config.rabbitmq_dead_queue(),
|
||||
routing_key=config.rabbitmq_routing_key(),
|
||||
exchange=dead_exchange,
|
||||
channel=channel)
|
||||
dead_exchange = kombu.Exchange(
|
||||
name=self.config.collector.dead_exchange,
|
||||
type='direct',
|
||||
channel=channel)
|
||||
|
||||
dead_queue = kombu.Queue(
|
||||
name=self.config.collector.dead_queue,
|
||||
routing_key=self.config.collector.routing_key,
|
||||
exchange=dead_exchange,
|
||||
channel=channel)
|
||||
|
||||
dead_queue.declare()
|
||||
|
||||
return dead_exchange
|
||||
|
||||
def error_callback(exception, interval):
|
||||
logging.error('Failed to declare dead queue and exchange, retrying in %d seconds. %r' %
|
||||
(interval, exception))
|
||||
LOG.error('Failed to declare dead queue and exchange, retrying in %d seconds. %r',
|
||||
interval, exception)
|
||||
|
||||
declare_dead_queue = connection.ensure(connection, declare_dead_queue, errback=error_callback,
|
||||
interval_start=0, interval_step=5, interval_max=30)
|
||||
return declare_dead_queue()
|
||||
|
||||
def _get_queue_arguments(self):
|
||||
return {"x-message-ttl": self._get_time_to_live_in_seconds(),
|
||||
"x-dead-letter-exchange": config.rabbitmq_retry_return_exchange(),
|
||||
"x-dead-letter-routing-key": config.rabbitmq_routing_key()}
|
||||
|
||||
def _get_time_to_live_in_seconds(self):
|
||||
return config.rabbitmq_time_to_live() * 1000
|
||||
return {
|
||||
"x-message-ttl": self.config.collector.retry_ttl * 1000,
|
||||
"x-dead-letter-exchange": self.config.collector.retry_return_exchange,
|
||||
"x-dead-letter-routing-key": self.config.collector.routing_key,
|
||||
}
|
||||
|
||||
def _rejected_count(self, message):
|
||||
if 'x-death' in message.headers:
|
||||
@ -118,5 +129,5 @@ class RetryAdapter(object):
|
||||
content_encoding=message.content_encoding)
|
||||
|
||||
def _error_callback(self, exception, interval):
|
||||
logging.error('Failed to publish message to dead letter queue, retrying in %d seconds. %r'
|
||||
% (interval, exception))
|
||||
LOG.error('Failed to publish message to dead letter queue, retrying in %d seconds. %r',
|
||||
interval, exception)
|
@ -1,141 +0,0 @@
|
||||
# Copyright 2016 Internap.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import os.path as os_path
|
||||
import six
|
||||
|
||||
from almanach.core import exception
|
||||
|
||||
if six.PY2:
|
||||
from ConfigParser import RawConfigParser
|
||||
else:
|
||||
from configparser import RawConfigParser
|
||||
|
||||
configuration = RawConfigParser()
|
||||
|
||||
|
||||
def read(filename):
|
||||
if not os_path.isfile(filename):
|
||||
raise exception.AlmanachException("Config file '{0}' not found".format(filename))
|
||||
|
||||
print("Loading configuration file {0}".format(filename))
|
||||
configuration.read(filename)
|
||||
|
||||
|
||||
def get(section, option, default=None):
|
||||
value = os.environ.get(section + "_" + option.upper())
|
||||
|
||||
if value:
|
||||
return value
|
||||
|
||||
try:
|
||||
return configuration.get(section, option)
|
||||
except Exception:
|
||||
return default
|
||||
|
||||
|
||||
def volume_existence_threshold():
|
||||
return int(get("ALMANACH", "volume_existence_threshold"))
|
||||
|
||||
|
||||
def auth_strategy():
|
||||
return get("ALMANACH", "auth_strategy", "private_key")
|
||||
|
||||
|
||||
def auth_private_key():
|
||||
return get("ALMANACH", "auth_token")
|
||||
|
||||
|
||||
def keystone_username():
|
||||
return get("KEYSTONE", "username")
|
||||
|
||||
|
||||
def keystone_password():
|
||||
return get("KEYSTONE", "password")
|
||||
|
||||
|
||||
def keystone_url():
|
||||
return get("KEYSTONE", "auth_url")
|
||||
|
||||
|
||||
def keystone_tenant_name():
|
||||
return get("KEYSTONE", "tenant_name")
|
||||
|
||||
|
||||
def device_metadata_whitelist():
|
||||
return get("ALMANACH", "device_metadata_whitelist").split(',')
|
||||
|
||||
|
||||
def mongodb_url():
|
||||
return get("MONGODB", "url", default=None)
|
||||
|
||||
|
||||
def mongodb_database():
|
||||
return get("MONGODB", "database", default="almanach")
|
||||
|
||||
|
||||
def mongodb_indexes():
|
||||
return get('MONGODB', 'indexes').split(',')
|
||||
|
||||
|
||||
def rabbitmq_url():
|
||||
return get("RABBITMQ", "url", default=None)
|
||||
|
||||
|
||||
def rabbitmq_queue():
|
||||
return get("RABBITMQ", "queue", default=None)
|
||||
|
||||
|
||||
def rabbitmq_exchange():
|
||||
return get("RABBITMQ", "exchange", default=None)
|
||||
|
||||
|
||||
def rabbitmq_routing_key():
|
||||
return get("RABBITMQ", "routing.key", default=None)
|
||||
|
||||
|
||||
def rabbitmq_retry():
|
||||
return int(get("RABBITMQ", "retry.maximum", default=None))
|
||||
|
||||
|
||||
def rabbitmq_retry_exchange():
|
||||
return get("RABBITMQ", "retry.exchange", default=None)
|
||||
|
||||
|
||||
def rabbitmq_retry_return_exchange():
|
||||
return get("RABBITMQ", "retry.return.exchange", default=None)
|
||||
|
||||
|
||||
def rabbitmq_retry_queue():
|
||||
return get("RABBITMQ", "retry.queue", default=None)
|
||||
|
||||
|
||||
def rabbitmq_dead_queue():
|
||||
return get("RABBITMQ", "dead.queue", default=None)
|
||||
|
||||
|
||||
def rabbitmq_dead_exchange():
|
||||
return get("RABBITMQ", "dead.exchange", default=None)
|
||||
|
||||
|
||||
def rabbitmq_time_to_live():
|
||||
return int(get("RABBITMQ", "retry.time.to.live", default=None))
|
||||
|
||||
|
||||
def _read_file(filename):
|
||||
f = open(filename, "r")
|
||||
content = f.read()
|
||||
f.close()
|
||||
return content
|
@ -14,23 +14,22 @@
|
||||
|
||||
from datetime import timedelta
|
||||
from dateutil import parser as date_parser
|
||||
import logging
|
||||
from oslo_log import log
|
||||
from pkg_resources import get_distribution
|
||||
|
||||
import pytz
|
||||
|
||||
from almanach import config
|
||||
from almanach.core import exception
|
||||
from almanach.core import model
|
||||
from almanach.validators import instance_validator
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class Controller(object):
|
||||
def __init__(self, database_adapter):
|
||||
def __init__(self, config, database_adapter):
|
||||
self.database_adapter = database_adapter
|
||||
self.metadata_whitelist = config.device_metadata_whitelist()
|
||||
|
||||
self.volume_existence_threshold = timedelta(0, config.volume_existence_threshold())
|
||||
self.metadata_whitelist = config.resources.device_metadata_whitelist
|
||||
self.volume_existence_threshold = timedelta(0, config.resources.volume_existence_threshold)
|
||||
|
||||
def get_application_info(self):
|
||||
return {
|
||||
@ -41,10 +40,11 @@ class Controller(object):
|
||||
|
||||
def create_instance(self, instance_id, tenant_id, create_date, flavor, os_type, distro, version, name, metadata):
|
||||
create_date = self._validate_and_parse_date(create_date)
|
||||
logging.info("instance %s created in project %s (flavor %s; distro %s %s %s) on %s" % (
|
||||
instance_id, tenant_id, flavor, os_type, distro, version, create_date))
|
||||
LOG.info("instance %s created in project %s (flavor %s; distro %s %s %s) on %s",
|
||||
instance_id, tenant_id, flavor, os_type, distro, version, create_date)
|
||||
|
||||
if self._fresher_entity_exists(instance_id, create_date):
|
||||
logging.warning("instance %s already exists with a more recent entry", instance_id)
|
||||
LOG.warning("instance %s already exists with a more recent entry", instance_id)
|
||||
return
|
||||
|
||||
filtered_metadata = self._filter_metadata_with_whitelist(metadata)
|
||||
@ -61,12 +61,12 @@ class Controller(object):
|
||||
"InstanceId: {0} Not Found".format(instance_id))
|
||||
|
||||
delete_date = self._validate_and_parse_date(delete_date)
|
||||
logging.info("instance %s deleted on %s" % (instance_id, delete_date))
|
||||
LOG.info("instance %s deleted on %s", instance_id, delete_date)
|
||||
self.database_adapter.close_active_entity(instance_id, delete_date)
|
||||
|
||||
def resize_instance(self, instance_id, flavor, resize_date):
|
||||
resize_date = self._validate_and_parse_date(resize_date)
|
||||
logging.info("instance %s resized to flavor %s on %s" % (instance_id, flavor, resize_date))
|
||||
LOG.info("instance %s resized to flavor %s on %s", instance_id, flavor, resize_date)
|
||||
try:
|
||||
instance = self.database_adapter.get_active_entity(instance_id)
|
||||
if flavor != instance.flavor:
|
||||
@ -77,15 +77,15 @@ class Controller(object):
|
||||
instance.last_event = resize_date
|
||||
self.database_adapter.insert_entity(instance)
|
||||
except KeyError as e:
|
||||
logging.error("Trying to resize an instance with id '%s' not in the database yet." % instance_id)
|
||||
LOG.error("Trying to resize an instance with id '%s' not in the database yet.", instance_id)
|
||||
raise e
|
||||
|
||||
def rebuild_instance(self, instance_id, distro, version, os_type, rebuild_date):
|
||||
rebuild_date = self._validate_and_parse_date(rebuild_date)
|
||||
instance = self.database_adapter.get_active_entity(instance_id)
|
||||
logging.info("instance %s rebuilded in project %s to os %s %s %s on %s" % (instance_id, instance.project_id,
|
||||
os_type, distro, version,
|
||||
rebuild_date))
|
||||
LOG.info("instance %s rebuilded in project %s to os %s %s %s on %s",
|
||||
instance_id, instance.project_id, os_type, distro, version, rebuild_date)
|
||||
|
||||
if instance.os.distro != distro or instance.os.version != version:
|
||||
self.database_adapter.close_active_entity(instance_id, rebuild_date)
|
||||
|
||||
@ -119,7 +119,7 @@ class Controller(object):
|
||||
self.database_adapter.update_active_entity(instance)
|
||||
return instance
|
||||
except KeyError as e:
|
||||
logging.error("Instance '{0}' is not in the database yet.".format(instance_id))
|
||||
LOG.error("Instance %s is not in the database yet.", instance_id)
|
||||
raise e
|
||||
|
||||
def entity_exists(self, entity_id):
|
||||
@ -132,16 +132,16 @@ class Controller(object):
|
||||
|
||||
def attach_volume(self, volume_id, date, attachments):
|
||||
date = self._validate_and_parse_date(date)
|
||||
logging.info("volume %s attached to %s on %s" % (volume_id, attachments, date))
|
||||
LOG.info("Volume %s attached to %s on %s", volume_id, attachments, date)
|
||||
try:
|
||||
self._volume_attach_instance(volume_id, date, attachments)
|
||||
except KeyError as e:
|
||||
logging.error("Trying to attach a volume with id '%s' not in the database yet." % volume_id)
|
||||
LOG.error("Trying to attach a volume with id '%s' not in the database yet.", volume_id)
|
||||
raise e
|
||||
|
||||
def create_volume(self, volume_id, project_id, start, volume_type, size, volume_name, attached_to=None):
|
||||
start = self._validate_and_parse_date(start)
|
||||
logging.info("volume %s created in project %s to size %s on %s" % (volume_id, project_id, size, start))
|
||||
LOG.info("volume %s created in project %s to size %s on %s", volume_id, project_id, size, start)
|
||||
if self._fresher_entity_exists(volume_id, start):
|
||||
return
|
||||
|
||||
@ -153,29 +153,30 @@ class Controller(object):
|
||||
|
||||
def detach_volume(self, volume_id, date, attachments):
|
||||
date = self._validate_and_parse_date(date)
|
||||
logging.info("volume %s detached on %s" % (volume_id, date))
|
||||
LOG.info("volume %s detached on %s", volume_id, date)
|
||||
try:
|
||||
self._volume_detach_instance(volume_id, date, attachments)
|
||||
except KeyError as e:
|
||||
logging.error("Trying to detach a volume with id '%s' not in the database yet." % volume_id)
|
||||
LOG.error("Trying to detach a volume with id '%s' not in the database yet.", volume_id)
|
||||
raise e
|
||||
|
||||
def rename_volume(self, volume_id, volume_name):
|
||||
try:
|
||||
volume = self.database_adapter.get_active_entity(volume_id)
|
||||
if volume and volume.name != volume_name:
|
||||
logging.info("volume %s renamed from %s to %s" % (volume_id, volume.name, volume_name))
|
||||
LOG.info("volume %s renamed from %s to %s", volume_id, volume.name, volume_name)
|
||||
volume.name = volume_name
|
||||
self.database_adapter.update_active_entity(volume)
|
||||
except KeyError:
|
||||
logging.error("Trying to update a volume with id '%s' not in the database yet." % volume_id)
|
||||
LOG.error("Trying to update a volume with id '%s' not in the database yet.", volume_id)
|
||||
|
||||
def resize_volume(self, volume_id, size, update_date):
|
||||
update_date = self._validate_and_parse_date(update_date)
|
||||
try:
|
||||
volume = self.database_adapter.get_active_entity(volume_id)
|
||||
logging.info("volume %s updated in project %s to size %s on %s" % (volume_id, volume.project_id, size,
|
||||
update_date))
|
||||
LOG.info("volume %s updated in project %s to size %s on %s",
|
||||
volume_id, volume.project_id, size, update_date)
|
||||
|
||||
self.database_adapter.close_active_entity(volume_id, update_date)
|
||||
|
||||
volume.size = size
|
||||
@ -184,12 +185,12 @@ class Controller(object):
|
||||
volume.last_event = update_date
|
||||
self.database_adapter.insert_entity(volume)
|
||||
except KeyError as e:
|
||||
logging.error("Trying to update a volume with id '%s' not in the database yet." % volume_id)
|
||||
LOG.error("Trying to update a volume with id '%s' not in the database yet.", volume_id)
|
||||
raise e
|
||||
|
||||
def delete_volume(self, volume_id, delete_date):
|
||||
delete_date = self._localize_date(self._validate_and_parse_date(delete_date))
|
||||
logging.info("volume %s deleted on %s" % (volume_id, delete_date))
|
||||
LOG.info("volume %s deleted on %s", volume_id, delete_date)
|
||||
try:
|
||||
if self.database_adapter.count_entity_entries(volume_id) > 1:
|
||||
volume = self.database_adapter.get_active_entity(volume_id)
|
||||
@ -198,11 +199,11 @@ class Controller(object):
|
||||
return
|
||||
self.database_adapter.close_active_entity(volume_id, delete_date)
|
||||
except KeyError as e:
|
||||
logging.error("Trying to delete a volume with id '%s' not in the database yet." % volume_id)
|
||||
LOG.error("Trying to delete a volume with id '%s' not in the database yet.", volume_id)
|
||||
raise e
|
||||
|
||||
def create_volume_type(self, volume_type_id, volume_type_name):
|
||||
logging.info("volume type %s with name %s created" % (volume_type_id, volume_type_name))
|
||||
LOG.info("volume type %s with name %s created", volume_type_id, volume_type_name)
|
||||
volume_type = model.VolumeType(volume_type_id, volume_type_name)
|
||||
self.database_adapter.insert_volume_type(volume_type)
|
||||
|
||||
@ -245,7 +246,7 @@ class Controller(object):
|
||||
def _update_instance_object(self, instance, **kwargs):
|
||||
for key, value in self._transform_attribute_to_match_entity_attribute(**kwargs).items():
|
||||
setattr(instance, key, value)
|
||||
logging.info("Updating entity for instance '{0}' with {1}={2}".format(instance.entity_id, key, value))
|
||||
LOG.info("Updating entity for instance '%s' with %s=%s", instance.entity_id, key, value)
|
||||
|
||||
def _transform_attribute_to_match_entity_attribute(self, **kwargs):
|
||||
entity = {}
|
||||
@ -297,7 +298,7 @@ class Controller(object):
|
||||
try:
|
||||
date = date_parser.parse(date)
|
||||
return self._localize_date(date)
|
||||
except TypeError:
|
||||
except (TypeError, ValueError):
|
||||
raise exception.DateFormatException()
|
||||
|
||||
@staticmethod
|
||||
|
128
almanach/core/opts.py
Normal file
128
almanach/core/opts.py
Normal file
@ -0,0 +1,128 @@
|
||||
# Copyright 2016 Internap.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
CONF = cfg.CONF
|
||||
DOMAIN = 'almanach'
|
||||
|
||||
database_opts = [
|
||||
cfg.StrOpt('driver',
|
||||
default='mongodb',
|
||||
help='Database driver'),
|
||||
cfg.StrOpt('connection_url',
|
||||
default='mongodb://almanach:almanach@localhost:27017/almanach',
|
||||
help='Database connection URL'),
|
||||
]
|
||||
|
||||
api_opts = [
|
||||
cfg.IPOpt('bind_ip',
|
||||
default='127.0.0.1',
|
||||
help='IP address to listen on'),
|
||||
cfg.PortOpt('bind_port',
|
||||
default=8000,
|
||||
help='TCP port number to listen on'),
|
||||
]
|
||||
|
||||
collector_opts = [
|
||||
cfg.HostnameOpt('rabbit_host',
|
||||
default='localhost',
|
||||
help='RabbitMQ Hostname'),
|
||||
cfg.PortOpt('rabbit_port',
|
||||
default=5672,
|
||||
help='RabbitMQ TCP port'),
|
||||
cfg.StrOpt('rabbit_username',
|
||||
help='RabbitMQ Username'),
|
||||
cfg.StrOpt('rabbit_password',
|
||||
help='RabbitMQ Password'),
|
||||
cfg.StrOpt('queue',
|
||||
default='almanach.info',
|
||||
help='Default queue name'),
|
||||
cfg.StrOpt('exchange',
|
||||
default='almanach.info',
|
||||
help='Default exchange name'),
|
||||
cfg.StrOpt('routing_key',
|
||||
default='almanach.info',
|
||||
help='Default queue routing key'),
|
||||
cfg.StrOpt('retry_queue',
|
||||
default='almanach.retry',
|
||||
help='Retry queue name'),
|
||||
cfg.StrOpt('retry_exchange',
|
||||
default='almanach.retry',
|
||||
help='Retry exchange name'),
|
||||
cfg.StrOpt('retry_return_exchange',
|
||||
default='almanach',
|
||||
help='Retry return exchange name'),
|
||||
cfg.IntOpt('retry_ttl',
|
||||
default=10,
|
||||
help='Time to live value of messages sent on the retry queue'),
|
||||
cfg.IntOpt('max_retries',
|
||||
default=3,
|
||||
help='Maximal number of message retries'),
|
||||
cfg.StrOpt('dead_queue',
|
||||
default='almanach.dead',
|
||||
help='Dead queue name'),
|
||||
cfg.StrOpt('dead_exchange',
|
||||
default='almanach.dead',
|
||||
help='Dead exchange name'),
|
||||
]
|
||||
|
||||
auth_opts = [
|
||||
cfg.StrOpt('strategy',
|
||||
default='private_key',
|
||||
help='Authentication driver for the API'),
|
||||
cfg.StrOpt('private_key',
|
||||
default='secret',
|
||||
help='Private key for private key authentication'),
|
||||
cfg.StrOpt('keystone_username',
|
||||
help='Keystone service username'),
|
||||
cfg.StrOpt('keystone_password',
|
||||
help='Keystone service password'),
|
||||
cfg.StrOpt('keystone_tenant',
|
||||
help='Keystone service tenant'),
|
||||
cfg.StrOpt('keystone_url',
|
||||
default='http://keystone_url:5000/v2.0',
|
||||
help='Keystone URL'),
|
||||
]
|
||||
|
||||
resource_opts = [
|
||||
cfg.IntOpt('volume_existence_threshold',
|
||||
default=60,
|
||||
help='Volume existence threshold'),
|
||||
cfg.ListOpt('device_metadata_whitelist',
|
||||
default=[],
|
||||
deprecated_for_removal=True,
|
||||
help='Metadata to include in entity'),
|
||||
]
|
||||
|
||||
CONF.register_opts(database_opts, group='database')
|
||||
CONF.register_opts(api_opts, group='api')
|
||||
CONF.register_opts(collector_opts, group='collector')
|
||||
CONF.register_opts(auth_opts, group='auth')
|
||||
CONF.register_opts(resource_opts, group='resources')
|
||||
|
||||
logging.register_options(CONF)
|
||||
logging.setup(CONF, DOMAIN)
|
||||
|
||||
|
||||
def list_opts():
|
||||
return [
|
||||
('database', database_opts),
|
||||
('api', api_opts),
|
||||
('collector', collector_opts),
|
||||
('api.auth', auth_opts),
|
||||
('resources', resource_opts),
|
||||
]
|
@ -1,29 +0,0 @@
|
||||
[ALMANACH]
|
||||
auth_token=secret
|
||||
auth_strategy=private_key
|
||||
volume_existence_threshold=60
|
||||
device_metadata_whitelist=metering.billing_mode
|
||||
|
||||
[MONGODB]
|
||||
url=mongodb://almanach:almanach@localhost:27017/almanach
|
||||
database=almanach
|
||||
indexes=project_id,start,end
|
||||
|
||||
[RABBITMQ]
|
||||
url=amqp://openstack:openstack@localhost:5672
|
||||
queue=almanach.info
|
||||
exchange=almanach.info
|
||||
routing.key=almanach.info
|
||||
retry.time.to.live=10
|
||||
retry.exchange=almanach.retry
|
||||
retry.maximum=3
|
||||
retry.queue=almanach.retry
|
||||
retry.return.exchange=almanach
|
||||
dead.queue=almanach.dead
|
||||
dead.exchange=almanach.dead
|
||||
|
||||
[KEYSTONE]
|
||||
username=my_service_username
|
||||
password=my_service_password
|
||||
tenant_name=my_service_tenant_name
|
||||
auth_url=http://keystone_url:5000/v2.0
|
@ -1,26 +0,0 @@
|
||||
[loggers]
|
||||
keys=root
|
||||
|
||||
[logger_root]
|
||||
handlers=consoleHandler,fileHandler
|
||||
level=DEBUG
|
||||
|
||||
[handlers]
|
||||
keys=consoleHandler,fileHandler
|
||||
|
||||
[handler_consoleHandler]
|
||||
class=StreamHandler
|
||||
formatter=defaultFormatter
|
||||
args=(sys.stdout,)
|
||||
|
||||
[handler_fileHandler]
|
||||
class=handlers.WatchedFileHandler
|
||||
args=('/var/log/almanach/almanach.log','a')
|
||||
formatter=defaultFormatter
|
||||
|
||||
[formatters]
|
||||
keys=defaultFormatter
|
||||
|
||||
[formatter_defaultFormatter]
|
||||
format=%(asctime)s [%(process)d] [%(levelname)s] [%(module)s] %(message)s
|
||||
datefmt=%Y-%m-%d %H:%M:%S
|
@ -1,13 +0,0 @@
|
||||
[ALMANACH]
|
||||
device_metadata_whitelist=a_metadata.to_filter
|
||||
|
||||
[MONGODB]
|
||||
url=localhost:27017,localhost:37017
|
||||
database=almanach_test
|
||||
indexes="project_id,start,end"
|
||||
|
||||
[RABBITMQ]
|
||||
url=amqp://guest:guest@localhost:5672
|
||||
queue=almanach.test
|
||||
exchange=almanach.test
|
||||
routing.key=almanach.test
|
0
almanach/storage/__init__.py
Normal file
0
almanach/storage/__init__.py
Normal file
@ -12,76 +12,44 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
|
||||
from oslo_log import log
|
||||
import pymongo
|
||||
from pymongo import errors
|
||||
|
||||
from almanach import config
|
||||
from almanach.core import exception
|
||||
from almanach.core import model
|
||||
from almanach.core.model import build_entity_from_dict
|
||||
|
||||
|
||||
def database(function):
|
||||
def _connection(self, *args, **kwargs):
|
||||
try:
|
||||
if not self.db:
|
||||
connection = pymongo.MongoClient(config.mongodb_url(), tz_aware=True)
|
||||
self.db = connection[config.mongodb_database()]
|
||||
ensureindex(self.db)
|
||||
return function(self, *args, **kwargs)
|
||||
except KeyError as e:
|
||||
raise e
|
||||
except exception.VolumeTypeNotFoundException as e:
|
||||
raise e
|
||||
except NotImplementedError as e:
|
||||
raise e
|
||||
except errors.ConfigurationError as e:
|
||||
logging.exception("DB Connection, make sure username and password doesn't contain the following :+&/ "
|
||||
"character")
|
||||
raise e
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
raise e
|
||||
|
||||
return _connection
|
||||
|
||||
|
||||
def ensureindex(db):
|
||||
db.entity.ensure_index(
|
||||
[(index, pymongo.ASCENDING)
|
||||
for index in config.mongodb_indexes()])
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class DatabaseAdapter(object):
|
||||
def __init__(self):
|
||||
self.db = None
|
||||
def __init__(self, config, db=None):
|
||||
self.db = db
|
||||
self.config = config
|
||||
|
||||
def connect(self):
|
||||
connection = pymongo.MongoClient(self.config.database.connection_url, tz_aware=True)
|
||||
connection_options = pymongo.uri_parser.parse_uri(self.config.database.connection_url)
|
||||
self.db = connection[connection_options['database']]
|
||||
|
||||
@database
|
||||
def get_active_entity(self, entity_id):
|
||||
entity = self._get_one_entity_from_db({"entity_id": entity_id, "end": None})
|
||||
if not entity:
|
||||
raise KeyError("Unable to find entity id %s" % entity_id)
|
||||
return build_entity_from_dict(entity)
|
||||
|
||||
@database
|
||||
def count_entities(self):
|
||||
return self.db.entity.count()
|
||||
|
||||
@database
|
||||
def count_active_entities(self):
|
||||
return self.db.entity.find({"end": None}).count()
|
||||
|
||||
@database
|
||||
def count_entity_entries(self, entity_id):
|
||||
return self.db.entity.find({"entity_id": entity_id}).count()
|
||||
|
||||
@database
|
||||
def has_active_entity(self, entity_id):
|
||||
return self.db.entity.find({"entity_id": entity_id, "end": None}).count() == 1
|
||||
|
||||
@database
|
||||
def list_entities(self, project_id, start, end, entity_type=None):
|
||||
args = {"project_id": project_id, "start": {"$lte": end}, "$or": [{"end": None}, {"end": {"$gte": start}}]}
|
||||
if entity_type:
|
||||
@ -89,12 +57,10 @@ class DatabaseAdapter(object):
|
||||
entities = self._get_entities_from_db(args)
|
||||
return [build_entity_from_dict(entity) for entity in entities]
|
||||
|
||||
@database
|
||||
def get_all_entities_by_id(self, entity_id):
|
||||
entities = self.db.entity.find({"entity_id": entity_id}, {"_id": 0})
|
||||
return [build_entity_from_dict(entity) for entity in entities]
|
||||
|
||||
@database
|
||||
def list_entities_by_id(self, entity_id, start, end):
|
||||
entities = self.db.entity.find({"entity_id": entity_id,
|
||||
"start": {"$gte": start},
|
||||
@ -105,58 +71,45 @@ class DatabaseAdapter(object):
|
||||
}, {"_id": 0})
|
||||
return [build_entity_from_dict(entity) for entity in entities]
|
||||
|
||||
@database
|
||||
def update_closed_entity(self, entity, data):
|
||||
self.db.entity.update({"entity_id": entity.entity_id, "start": entity.start, "end": entity.end},
|
||||
{"$set": data})
|
||||
|
||||
@database
|
||||
def insert_entity(self, entity):
|
||||
self._insert_entity(entity.as_dict())
|
||||
|
||||
@database
|
||||
def insert_volume_type(self, volume_type):
|
||||
self.db.volume_type.insert(volume_type.__dict__)
|
||||
|
||||
@database
|
||||
def get_volume_type(self, volume_type_id):
|
||||
volume_type = self.db.volume_type.find_one({"volume_type_id": volume_type_id})
|
||||
if not volume_type:
|
||||
logging.error("Trying to get a volume type not in the database.")
|
||||
raise exception.VolumeTypeNotFoundException(volume_type_id=volume_type_id)
|
||||
|
||||
return model.VolumeType(volume_type_id=volume_type["volume_type_id"],
|
||||
volume_type_name=volume_type["volume_type_name"])
|
||||
|
||||
@database
|
||||
def delete_volume_type(self, volume_type_id):
|
||||
if volume_type_id is None:
|
||||
error = "Trying to delete all volume types which is not permitted."
|
||||
logging.error(error)
|
||||
raise exception.AlmanachException(error)
|
||||
raise exception.AlmanachException("Trying to delete all volume types which is not permitted.")
|
||||
returned_value = self.db.volume_type.remove({"volume_type_id": volume_type_id})
|
||||
if returned_value['n'] == 1:
|
||||
logging.info("Deleted volume type with id '%s' successfully." % volume_type_id)
|
||||
LOG.info("Deleted volume type with id '%s' successfully.", volume_type_id)
|
||||
else:
|
||||
error = "Volume type with id '%s' doesn't exist in the database." % volume_type_id
|
||||
logging.error(error)
|
||||
raise exception.AlmanachException(error)
|
||||
raise exception.AlmanachException(
|
||||
"Volume type with id {} doesn't exist in the database.".format(volume_type_id))
|
||||
|
||||
@database
|
||||
def list_volume_types(self):
|
||||
volume_types = self.db.volume_type.find()
|
||||
return [model.VolumeType(volume_type_id=volume_type["volume_type_id"],
|
||||
volume_type_name=volume_type["volume_type_name"]) for volume_type in volume_types]
|
||||
|
||||
@database
|
||||
def close_active_entity(self, entity_id, end):
|
||||
self.db.entity.update({"entity_id": entity_id, "end": None}, {"$set": {"end": end, "last_event": end}})
|
||||
|
||||
@database
|
||||
def update_active_entity(self, entity):
|
||||
self.db.entity.update({"entity_id": entity.entity_id, "end": None}, {"$set": entity.as_dict()})
|
||||
|
||||
@database
|
||||
def delete_active_entity(self, entity_id):
|
||||
self.db.entity.remove({"entity_id": entity_id, "end": None})
|
||||
|
@ -20,7 +20,7 @@ Almanach stores the utilization of OpenStack resources (instances and volumes) f
|
||||
What is Almanach?
|
||||
-----------------
|
||||
|
||||
The main purpose of this software is to bill customers based on their usage of the cloud infrastructure.
|
||||
The main purpose of this software is to record the usage of the cloud resources of each tenants.
|
||||
|
||||
Almanach is composed of two parts:
|
||||
|
||||
@ -32,41 +32,34 @@ Requirements
|
||||
|
||||
- OpenStack infrastructure installed (Nova, Cinder...)
|
||||
- MongoDB
|
||||
- Python 2.7
|
||||
- Python 2.7, 3.4 or 3.5
|
||||
|
||||
Command line usage
|
||||
------------------
|
||||
|
||||
Usage:
|
||||
Generate config file with default values
|
||||
----------------------------------------
|
||||
|
||||
.. code:: bash
|
||||
|
||||
usage: almanach [-h] [--logging LOGGING] {api,collector} config_file
|
||||
tox -e genconfig
|
||||
|
||||
|
||||
Command line usage
|
||||
------------------
|
||||
|
||||
Start the API daemon:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
almanach api /path/to/almanach.cfg
|
||||
almanach-api --config-file /etc/almanach/almanach.conf
|
||||
|
||||
|
||||
Start the collector:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
almanach collector /path/to/almanach.cfg
|
||||
almanach-collector --config-file /etc/almanach/almanach.conf
|
||||
|
||||
|
||||
Custom logging configuration:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
almanach collector /path/to/almanach.cfg --logging /path/to/logging.cfg
|
||||
|
||||
|
||||
The syntax of the logging configuration file is available in the official [Python documentation](https://docs.python.org/2/library/logging.config.html).
|
||||
|
||||
Authentication
|
||||
--------------
|
||||
|
||||
@ -95,8 +88,9 @@ In your config file, you have to define your private key in the field :code:`aut
|
||||
|
||||
::
|
||||
|
||||
[ALMANACH]
|
||||
auth_token=my secret token
|
||||
[auth]
|
||||
strategy = private_key
|
||||
private_key = secret
|
||||
|
||||
|
||||
Keystone Authentication
|
||||
@ -107,43 +101,13 @@ To use this authentication backend you have to define the authentication strateg
|
||||
|
||||
::
|
||||
|
||||
[ALMANACH]
|
||||
auth_strategy=keystone
|
||||
[auth]
|
||||
strategy = keystone
|
||||
keystone_username = my_service_username
|
||||
keystone_password = my_service_password
|
||||
keystone_tenant = my_service_tenant_name
|
||||
keystone_url = http://keystone_url:5000/v2.0
|
||||
|
||||
[KEYSTONE]
|
||||
username=my_service_username
|
||||
password=my_service_password
|
||||
tenant_name=my_service_tenant_name
|
||||
auth_url=http://keystone_url:5000/v2.0
|
||||
|
||||
|
||||
Environment variables
|
||||
---------------------
|
||||
|
||||
You can override the configuration parameters by using environment variables:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
export RABBITMQ_URL="amqp://openstack:openstack@hostname:5672"
|
||||
almanach collector /path/to/almanach.cfg
|
||||
|
||||
|
||||
Running Almanach with Docker
|
||||
----------------------------
|
||||
|
||||
The actual Docker configuration assume that you already have RabbitMQ (mandatory for Openstack) and MongoDB configured for Almanach.
|
||||
|
||||
.. code:: bash
|
||||
|
||||
export RABBITMQ_URL="amqp://guest:guest@messaging:5672/"
|
||||
export MONGODB_URL="mongodb://almanach:almanach@database:27017/almanach"
|
||||
|
||||
docker-compose build
|
||||
docker-compose up
|
||||
|
||||
|
||||
The command :code:`docker-compose up` starts 2 containers: the collector and the API server.
|
||||
The environment variables :code:`RABBITMQ_URL` and :code:`MONGODB_URL` are mandatory.
|
||||
|
||||
RabbitMQ configuration
|
||||
----------------------
|
||||
@ -264,6 +228,6 @@ Almanach will process those events:
|
||||
API documentation
|
||||
-----------------
|
||||
|
||||
.. autoflask:: almanach.api:app
|
||||
.. autoflask:: almanach.api.main:app
|
||||
:undoc-static:
|
||||
:include-empty-docstring:
|
||||
|
@ -1,46 +0,0 @@
|
||||
version: '2'
|
||||
services:
|
||||
api:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
command: api
|
||||
depends_on:
|
||||
- messaging
|
||||
- database
|
||||
environment:
|
||||
MONGODB_URL: mongodb://database:27017/almanach
|
||||
ports:
|
||||
- "80:8000"
|
||||
collector:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
command: collector
|
||||
depends_on:
|
||||
- database
|
||||
- messaging
|
||||
environment:
|
||||
MONGODB_URL: mongodb://database:27017/almanach
|
||||
RABBITMQ_URL: amqp://guest:guest@messaging:5672
|
||||
messaging:
|
||||
image: rabbitmq
|
||||
ports:
|
||||
- "5672:5672"
|
||||
database:
|
||||
image: mongo
|
||||
ports:
|
||||
- "27017:27017"
|
||||
test:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.integration-tests
|
||||
args:
|
||||
SKIP_TOX: "true"
|
||||
environment:
|
||||
TEST_CONTAINER: "true"
|
||||
depends_on:
|
||||
- api
|
||||
- collector
|
||||
- database
|
||||
- messaging
|
@ -4,16 +4,38 @@ services:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
command: api
|
||||
environment:
|
||||
MONGODB_URL: ${MONGODB_URL}
|
||||
command: almanach-api --config-file /etc/almanach/almanach.conf
|
||||
depends_on:
|
||||
- messaging
|
||||
- database
|
||||
ports:
|
||||
- "80:8000"
|
||||
collector:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
command: collector
|
||||
command: almanach-collector --config-file /etc/almanach/almanach.conf
|
||||
depends_on:
|
||||
- database
|
||||
- messaging
|
||||
messaging:
|
||||
image: rabbitmq
|
||||
ports:
|
||||
- "5672:5672"
|
||||
database:
|
||||
image: mongo
|
||||
ports:
|
||||
- "27017:27017"
|
||||
test:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.integration-tests
|
||||
args:
|
||||
SKIP_TOX: "true"
|
||||
environment:
|
||||
MONGODB_URL: ${MONGODB_URL}
|
||||
RABBITMQ_URL: ${RABBITMQ_URL}
|
||||
TEST_CONTAINER: "true"
|
||||
depends_on:
|
||||
- api
|
||||
- collector
|
||||
- database
|
||||
- messaging
|
||||
|
@ -1,13 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
echo "Entering the entrypoint"
|
||||
if [ "$1" = 'api' ]; then
|
||||
echo "Starting the api"
|
||||
almanach api /etc/almanach.cfg --host 0.0.0.0
|
||||
elif [ "$1" = 'collector' ]; then
|
||||
echo "Starting the collector"
|
||||
almanach collector /etc/almanach.cfg
|
||||
fi
|
||||
|
||||
exec "$@"
|
118
etc/almanach/almanach.docker.conf
Normal file
118
etc/almanach/almanach.docker.conf
Normal file
@ -0,0 +1,118 @@
|
||||
[DEFAULT]
|
||||
|
||||
|
||||
[api]
|
||||
|
||||
#
|
||||
# From almanach
|
||||
#
|
||||
|
||||
# IP address to listen on (IP address value)
|
||||
bind_ip = 0.0.0.0
|
||||
|
||||
# TCP port number to listen on (port value)
|
||||
# Minimum value: 0
|
||||
# Maximum value: 65535
|
||||
bind_port = 8000
|
||||
|
||||
|
||||
[auth]
|
||||
|
||||
#
|
||||
# From almanach
|
||||
#
|
||||
|
||||
# Authentication driver for the API (string value)
|
||||
#strategy = private_key
|
||||
|
||||
# Private key for private key authentication (string value)
|
||||
#private_key = secret
|
||||
|
||||
# Keystone service username (string value)
|
||||
#keystone_username = <None>
|
||||
|
||||
# Keystone service password (string value)
|
||||
#keystone_password = <None>
|
||||
|
||||
# Keystone service tenant (string value)
|
||||
#keystone_tenant = <None>
|
||||
|
||||
# Keystone URL (string value)
|
||||
#keystone_url = http://keystone_url:5000/v2.0
|
||||
|
||||
|
||||
[collector]
|
||||
|
||||
#
|
||||
# From almanach
|
||||
#
|
||||
|
||||
# RabbitMQ Hostname (hostname value)
|
||||
rabbit_host = messaging
|
||||
|
||||
# RabbitMQ TCP port (port value)
|
||||
# Minimum value: 0
|
||||
# Maximum value: 65535
|
||||
#rabbit_port = 5672
|
||||
|
||||
# RabbitMQ Username (string value)
|
||||
rabbit_username = guest
|
||||
|
||||
# RabbitMQ Password (string value)
|
||||
rabbit_password = guest
|
||||
|
||||
# Default queue name (string value)
|
||||
#default_queue = almanach.info
|
||||
|
||||
# Default exchange name (string value)
|
||||
#default_exchange = almanach.info
|
||||
|
||||
# Default queue routing key (string value)
|
||||
#default_routing_key = almanach.info
|
||||
|
||||
# Retry queue name (string value)
|
||||
#retry_queue = almanach.retry
|
||||
|
||||
# Retry exchange name (string value)
|
||||
#retry_exchange = almanach.retry
|
||||
|
||||
# Time to live value of messages sent on the retry queue (integer
|
||||
# value)
|
||||
#retry_ttl = 10
|
||||
|
||||
# Maximal number of message retries (integer value)
|
||||
#max_retries = 3
|
||||
|
||||
# Dead queue name (string value)
|
||||
#dead_queue = almanach.dead
|
||||
|
||||
# Dead exchange name (string value)
|
||||
#dead_exchange = almanach.dead
|
||||
|
||||
|
||||
[database]
|
||||
|
||||
#
|
||||
# From almanach
|
||||
#
|
||||
|
||||
# Database driver (string value)
|
||||
#driver = mongodb
|
||||
|
||||
# Database connection URL (string value)
|
||||
connection_url = mongodb://database:27017/almanach
|
||||
|
||||
|
||||
[resources]
|
||||
|
||||
#
|
||||
# From almanach
|
||||
#
|
||||
|
||||
# Volume existence threshold (integer value)
|
||||
#volume_existence_threshold = 60
|
||||
|
||||
# DEPRECATED: Metadata to include in entity (string value)
|
||||
# This option is deprecated for removal.
|
||||
# Its value may be silently ignored in the future.
|
||||
device_metadata_whitelist = metering.billing_mode
|
@ -2,11 +2,12 @@ pbr>=1.10.0
|
||||
Flask==0.10.1
|
||||
PyYAML==3.11
|
||||
jsonpickle==0.7.1
|
||||
pymongo==2.7.2
|
||||
pymongo>=3.0.2,!=3.1 # Apache-2.0
|
||||
kombu>=3.0.30
|
||||
python-dateutil==2.2
|
||||
pytz>=2014.10
|
||||
voluptuous==0.8.11
|
||||
python-keystoneclient>=1.6.0
|
||||
six>=1.9.0 # MIT
|
||||
oslo.serialization>=1.10.0 # Apache-2.0
|
||||
oslo.serialization>=1.10.0 # Apache-2.0
|
||||
oslo.config>=3.14.0 # Apache-2.0
|
||||
oslo.log>=3.11.0 # Apache-2.0
|
@ -23,8 +23,11 @@ packages =
|
||||
almanach
|
||||
|
||||
[entry_points]
|
||||
oslo.config.opts =
|
||||
almanach = almanach.core.opts:list_opts
|
||||
console_scripts =
|
||||
almanach = almanach.cli:run
|
||||
almanach-api = almanach.api.main:main
|
||||
almanach-collector = almanach.collector.main:main
|
||||
|
||||
[nosetests]
|
||||
no-path-adjustment = 1
|
||||
|
@ -11,3 +11,4 @@ sphinx>=1.2.1,!=1.3b1,<1.3 # BSD
|
||||
sphinxcontrib-httpdomain # BSD
|
||||
flake8>=2.5.4,<2.6.0 # MIT
|
||||
hacking<0.12,>=0.11.0 # Apache-2.0
|
||||
testtools>=1.4.0 # MIT
|
||||
|
@ -1,45 +0,0 @@
|
||||
# Copyright 2016 Internap.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import unittest
|
||||
|
||||
from flexmock import flexmock
|
||||
from flexmock import flexmock_teardown
|
||||
from hamcrest import assert_that
|
||||
from hamcrest import instance_of
|
||||
|
||||
from almanach.adapters.auth_adapter import AuthenticationAdapter
|
||||
from almanach.auth.keystone_auth import KeystoneAuthentication
|
||||
from almanach.auth.mixed_auth import MixedAuthentication
|
||||
from almanach.auth.private_key_auth import PrivateKeyAuthentication
|
||||
from almanach import config
|
||||
|
||||
|
||||
class AuthenticationAdapterTest(unittest.TestCase):
|
||||
def tearDown(self):
|
||||
flexmock_teardown()
|
||||
|
||||
def test_assert_that_the_default_backend_is_private_key(self):
|
||||
adapter = AuthenticationAdapter().factory()
|
||||
assert_that(adapter, instance_of(PrivateKeyAuthentication))
|
||||
|
||||
def test_get_keystone_auth_backend(self):
|
||||
flexmock(config).should_receive("auth_strategy").and_return("keystone")
|
||||
adapter = AuthenticationAdapter().factory()
|
||||
assert_that(adapter, instance_of(KeystoneAuthentication))
|
||||
|
||||
def test_get_mixed_auth_backend(self):
|
||||
flexmock(config).should_receive("auth_strategy").and_return("token,keystone")
|
||||
adapter = AuthenticationAdapter().factory()
|
||||
assert_that(adapter, instance_of(MixedAuthentication))
|
0
tests/api/auth/__init__.py
Normal file
0
tests/api/auth/__init__.py
Normal file
@ -12,27 +12,25 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import unittest
|
||||
|
||||
from flexmock import flexmock
|
||||
from flexmock import flexmock_teardown
|
||||
from hamcrest import assert_that
|
||||
from hamcrest import calling
|
||||
from hamcrest import equal_to
|
||||
from hamcrest import raises
|
||||
|
||||
from almanach.auth.keystone_auth import KeystoneAuthentication
|
||||
from almanach.api.auth import keystone_auth
|
||||
from almanach.core import exception
|
||||
|
||||
from tests import base
|
||||
|
||||
|
||||
class KeystoneAuthenticationTest(base.BaseTestCase):
|
||||
|
||||
class KeystoneAuthenticationTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
super(KeystoneAuthenticationTest, self).setUp()
|
||||
self.token_manager_factory = flexmock()
|
||||
self.keystone_token_manager = flexmock()
|
||||
self.auth_backend = KeystoneAuthentication(self.token_manager_factory)
|
||||
|
||||
def tearDown(self):
|
||||
flexmock_teardown()
|
||||
self.auth_backend = keystone_auth.KeystoneAuthentication(self.token_manager_factory)
|
||||
|
||||
def test_with_correct_token(self):
|
||||
token = "my token"
|
@ -12,27 +12,25 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import unittest
|
||||
|
||||
from flexmock import flexmock
|
||||
from flexmock import flexmock_teardown
|
||||
from hamcrest import assert_that
|
||||
from hamcrest import calling
|
||||
from hamcrest import equal_to
|
||||
from hamcrest import raises
|
||||
|
||||
from almanach.auth.mixed_auth import MixedAuthentication
|
||||
from almanach.api.auth import mixed_auth
|
||||
from almanach.core import exception
|
||||
|
||||
from tests import base
|
||||
|
||||
|
||||
class MixedAuthenticationTest(base.BaseTestCase):
|
||||
|
||||
class MixedAuthenticationTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
super(MixedAuthenticationTest, self).setUp()
|
||||
self.auth_one = flexmock()
|
||||
self.auth_two = flexmock()
|
||||
self.auth_backend = MixedAuthentication([self.auth_one, self.auth_two])
|
||||
|
||||
def tearDown(self):
|
||||
flexmock_teardown()
|
||||
self.auth_backend = mixed_auth.MixedAuthentication([self.auth_one, self.auth_two])
|
||||
|
||||
def test_with_token_valid_with_auth_one(self):
|
||||
token = "my token"
|
@ -12,20 +12,22 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import unittest
|
||||
|
||||
from hamcrest import assert_that
|
||||
from hamcrest import calling
|
||||
from hamcrest import equal_to
|
||||
from hamcrest import raises
|
||||
|
||||
from almanach.auth.private_key_auth import PrivateKeyAuthentication
|
||||
from almanach.api.auth import private_key_auth
|
||||
from almanach.core import exception
|
||||
|
||||
from tests import base
|
||||
|
||||
|
||||
class PrivateKeyAuthenticationTest(base.BaseTestCase):
|
||||
|
||||
class PrivateKeyAuthenticationTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.auth_backend = PrivateKeyAuthentication("my token")
|
||||
super(PrivateKeyAuthenticationTest, self).setUp()
|
||||
self.auth_backend = private_key_auth.PrivateKeyAuthentication("my token")
|
||||
|
||||
def test_with_correct_token(self):
|
||||
assert_that(self.auth_backend.validate("my token"), equal_to(True))
|
@ -13,48 +13,36 @@
|
||||
# limitations under the License.
|
||||
|
||||
from datetime import datetime
|
||||
import json
|
||||
from unittest import TestCase
|
||||
|
||||
import flask
|
||||
from flexmock import flexmock
|
||||
from flexmock import flexmock_teardown
|
||||
import json
|
||||
import oslo_serialization
|
||||
|
||||
from almanach.adapters import api_route_v1 as api_route
|
||||
from almanach import config
|
||||
from almanach.api.v1 import routes
|
||||
from almanach.core import exception
|
||||
from tests import base
|
||||
|
||||
|
||||
class BaseApi(TestCase):
|
||||
class BaseApi(base.BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(BaseApi, self).setUp()
|
||||
self.prepare()
|
||||
self.prepare_with_successful_authentication()
|
||||
|
||||
def tearDown(self):
|
||||
flexmock_teardown()
|
||||
|
||||
@staticmethod
|
||||
def having_config(key, value):
|
||||
(flexmock(config)
|
||||
.should_receive(key)
|
||||
.and_return(value))
|
||||
|
||||
def prepare(self):
|
||||
self.controller = flexmock()
|
||||
self.auth_adapter = flexmock()
|
||||
api_route.controller = self.controller
|
||||
api_route.auth_adapter = self.auth_adapter
|
||||
routes.controller = self.controller
|
||||
routes.auth_adapter = self.auth_adapter
|
||||
|
||||
self.app = flask.Flask("almanach")
|
||||
self.app.register_blueprint(api_route.api)
|
||||
self.app.register_blueprint(routes.api)
|
||||
|
||||
def prepare_with_successful_authentication(self):
|
||||
self.having_config('auth_private_key', 'some token value')
|
||||
self.auth_adapter.should_receive("validate").and_return(True)
|
||||
|
||||
def prepare_with_failed_authentication(self):
|
||||
self.having_config('auth_private_key', 'some token value')
|
||||
self.auth_adapter.should_receive("validate")\
|
||||
.and_raise(exception.AuthenticationFailureException("Wrong credentials"))
|
||||
|
||||
|
@ -19,7 +19,9 @@ from tests.api.base_api import BaseApi
|
||||
|
||||
|
||||
class ApiAuthenticationTest(BaseApi):
|
||||
|
||||
def setUp(self):
|
||||
super(ApiAuthenticationTest, self).setUp()
|
||||
self.prepare()
|
||||
self.prepare_with_failed_authentication()
|
||||
|
||||
|
@ -26,6 +26,7 @@ from tests.builder import instance
|
||||
|
||||
|
||||
class ApiEntityTest(BaseApi):
|
||||
|
||||
def test_update_instance_entity_with_a_new_start_date(self):
|
||||
data = {
|
||||
"start_date": "2014-01-01 00:00:00.0000",
|
||||
|
@ -20,6 +20,7 @@ from tests.api.base_api import BaseApi
|
||||
|
||||
|
||||
class ApiInfoTest(BaseApi):
|
||||
|
||||
def test_info(self):
|
||||
self.controller.should_receive('get_application_info').and_return({
|
||||
'info': {'version': '1.0'},
|
||||
|
@ -27,6 +27,7 @@ from tests.builder import instance
|
||||
|
||||
|
||||
class ApiInstanceTest(BaseApi):
|
||||
|
||||
def test_get_instances(self):
|
||||
self.controller.should_receive('list_instances') \
|
||||
.with_args('TENANT_ID', a_date_matching("2014-01-01 00:00:00.0000"),
|
||||
@ -80,7 +81,6 @@ class ApiInstanceTest(BaseApi):
|
||||
assert_that(result['flavor'], is_(some_new_flavor))
|
||||
|
||||
def test_successful_instance_create(self):
|
||||
self.having_config('auth_private_key', 'some token value')
|
||||
data = dict(id="INSTANCE_ID",
|
||||
created_at="CREATED_AT",
|
||||
name="INSTANCE_NAME",
|
||||
@ -109,7 +109,6 @@ class ApiInstanceTest(BaseApi):
|
||||
assert_that(code, equal_to(201))
|
||||
|
||||
def test_instance_create_missing_a_param_returns_bad_request_code(self):
|
||||
self.having_config('auth_private_key', 'some token value')
|
||||
data = dict(id="INSTANCE_ID",
|
||||
created_at="CREATED_AT",
|
||||
name="INSTANCE_NAME",
|
||||
@ -129,7 +128,6 @@ class ApiInstanceTest(BaseApi):
|
||||
assert_that(code, equal_to(400))
|
||||
|
||||
def test_instance_create_bad_date_format_returns_bad_request_code(self):
|
||||
self.having_config('auth_private_key', 'some token value')
|
||||
data = dict(id="INSTANCE_ID",
|
||||
created_at="A_BAD_DATE",
|
||||
name="INSTANCE_NAME",
|
||||
|
@ -23,6 +23,7 @@ from tests.api.base_api import BaseApi
|
||||
|
||||
|
||||
class ApiVolumeTest(BaseApi):
|
||||
|
||||
def test_successful_volume_create(self):
|
||||
data = dict(volume_id="VOLUME_ID",
|
||||
start="START_DATE",
|
||||
|
@ -26,6 +26,7 @@ from tests.builder import volume_type
|
||||
|
||||
|
||||
class ApiVolumeTypeTest(BaseApi):
|
||||
|
||||
def test_get_volume_types(self):
|
||||
self.controller.should_receive('list_volume_types') \
|
||||
.and_return([a(volume_type().with_volume_type_name('some_volume_type_name'))]) \
|
||||
|
43
tests/api/test_auth_adapter.py
Normal file
43
tests/api/test_auth_adapter.py
Normal file
@ -0,0 +1,43 @@
|
||||
# Copyright 2016 Internap.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from hamcrest import assert_that
|
||||
from hamcrest import instance_of
|
||||
|
||||
from almanach.api.auth import keystone_auth
|
||||
from almanach.api.auth import mixed_auth
|
||||
from almanach.api.auth import private_key_auth
|
||||
from almanach.api import auth_adapter
|
||||
|
||||
from tests import base
|
||||
|
||||
|
||||
class AuthenticationAdapterTest(base.BaseTestCase):
|
||||
def setUp(self):
|
||||
super(AuthenticationAdapterTest, self).setUp()
|
||||
self.auth_adapter = auth_adapter.AuthenticationAdapter(self.config)
|
||||
|
||||
def test_assert_that_the_default_backend_is_private_key(self):
|
||||
adapter = self.auth_adapter.get_authentication_adapter()
|
||||
assert_that(adapter, instance_of(private_key_auth.PrivateKeyAuthentication))
|
||||
|
||||
def test_get_keystone_auth_backend(self):
|
||||
self.config_fixture.config(strategy='keystone', group='auth')
|
||||
adapter = self.auth_adapter.get_authentication_adapter()
|
||||
assert_that(adapter, instance_of(keystone_auth.KeystoneAuthentication))
|
||||
|
||||
def test_get_mixed_auth_backend(self):
|
||||
self.config_fixture.config(strategy='token,keystone', group='auth')
|
||||
adapter = self.auth_adapter.get_authentication_adapter()
|
||||
assert_that(adapter, instance_of(mixed_auth.MixedAuthentication))
|
@ -12,20 +12,20 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from flask import Flask
|
||||
from flexmock import flexmock_teardown
|
||||
from oslo_config import fixture
|
||||
import testtools
|
||||
|
||||
from almanach.adapters import api_route_v1 as api_route
|
||||
from almanach.adapters import auth_adapter
|
||||
from almanach.adapters import database_adapter
|
||||
from almanach.core import controller
|
||||
|
||||
app = Flask("almanach")
|
||||
app.register_blueprint(api_route.api)
|
||||
from almanach.core import opts
|
||||
|
||||
|
||||
class AlmanachApi(object):
|
||||
def run(self, host, port):
|
||||
api_route.controller = controller.Controller(database_adapter.DatabaseAdapter())
|
||||
api_route.auth_adapter = auth_adapter.AuthenticationAdapter().factory()
|
||||
class BaseTestCase(testtools.TestCase):
|
||||
|
||||
return app.run(host=host, port=port)
|
||||
def setUp(self):
|
||||
super(BaseTestCase, self).setUp()
|
||||
self.config_fixture = self.useFixture(fixture.Config(conf=opts.CONF))
|
||||
self.config = self.config_fixture.conf
|
||||
|
||||
def tearDown(self):
|
||||
super(BaseTestCase, self).tearDown()
|
||||
flexmock_teardown()
|
0
tests/collector/__init__.py
Normal file
0
tests/collector/__init__.py
Normal file
0
tests/collector/handlers/__init__.py
Normal file
0
tests/collector/handlers/__init__.py
Normal file
@ -12,24 +12,21 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import unittest
|
||||
|
||||
from flexmock import flexmock
|
||||
from flexmock import flexmock_teardown
|
||||
|
||||
from almanach.adapters.instance_bus_adapter import InstanceBusAdapter
|
||||
from almanach.collector.handlers import instance_handler
|
||||
from integration_tests.builders import messages
|
||||
from tests import base
|
||||
|
||||
|
||||
class InstanceBusAdapterTest(unittest.TestCase):
|
||||
class InstanceHandlerTest(base.BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(InstanceHandlerTest, self).setUp()
|
||||
|
||||
self.controller = flexmock()
|
||||
self.retry = flexmock()
|
||||
self.instance_bus_adapter = InstanceBusAdapter(self.controller)
|
||||
|
||||
def tearDown(self):
|
||||
flexmock_teardown()
|
||||
self.instance_bus_adapter = instance_handler.InstanceHandler(self.controller)
|
||||
|
||||
def test_deleted_instance(self):
|
||||
notification = messages.get_instance_delete_end_sample()
|
@ -13,42 +13,40 @@
|
||||
# limitations under the License.
|
||||
|
||||
from datetime import datetime
|
||||
import unittest
|
||||
|
||||
from flexmock import flexmock
|
||||
from flexmock import flexmock_teardown
|
||||
import pytz
|
||||
|
||||
from almanach.adapters.volume_bus_adapter import VolumeBusAdapter
|
||||
from almanach.collector.handlers import volume_handler
|
||||
from integration_tests.builders import messages
|
||||
from tests import base
|
||||
|
||||
|
||||
class VolumeBusAdapterTest(unittest.TestCase):
|
||||
class VolumeHandlerTest(base.BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(VolumeHandlerTest, self).setUp()
|
||||
|
||||
self.controller = flexmock()
|
||||
self.retry = flexmock()
|
||||
self.volume_bus_adapter = VolumeBusAdapter(self.controller)
|
||||
|
||||
def tearDown(self):
|
||||
flexmock_teardown()
|
||||
self.volume_handler = volume_handler.VolumeHandler(self.controller)
|
||||
|
||||
def test_deleted_volume(self):
|
||||
notification = messages.get_volume_delete_end_sample()
|
||||
|
||||
self.controller.should_receive('delete_volume').once()
|
||||
self.volume_bus_adapter.on_volume_deleted(notification)
|
||||
self.volume_handler.on_volume_deleted(notification)
|
||||
|
||||
def test_resize_volume(self):
|
||||
notification = messages.get_volume_update_end_sample()
|
||||
|
||||
self.controller.should_receive('resize_volume').once()
|
||||
self.volume_bus_adapter.on_volume_resized(notification)
|
||||
self.volume_handler.on_volume_resized(notification)
|
||||
|
||||
def test_updated_volume(self):
|
||||
notification = messages.get_volume_update_end_sample()
|
||||
|
||||
self.controller.should_receive('resize_volume').once()
|
||||
self.volume_bus_adapter.on_volume_resized(notification)
|
||||
self.volume_handler.on_volume_resized(notification)
|
||||
|
||||
def test_attach_volume_with_kilo_payload_and_empty_attachments(self):
|
||||
notification = messages.get_volume_attach_kilo_end_sample(
|
||||
@ -62,7 +60,7 @@ class VolumeBusAdapterTest(unittest.TestCase):
|
||||
.with_args("my-volume-id", "2014-02-14T17:18:36.000000Z", []) \
|
||||
.once()
|
||||
|
||||
self.volume_bus_adapter.on_volume_attached(notification)
|
||||
self.volume_handler.on_volume_attached(notification)
|
||||
|
||||
def test_detached_volume(self):
|
||||
notification = messages.get_volume_detach_end_sample()
|
||||
@ -71,7 +69,7 @@ class VolumeBusAdapterTest(unittest.TestCase):
|
||||
.should_receive('detach_volume')
|
||||
.once())
|
||||
|
||||
self.volume_bus_adapter.on_volume_detached(notification)
|
||||
self.volume_handler.on_volume_detached(notification)
|
||||
|
||||
def test_renamed_volume_with_volume_update_end(self):
|
||||
notification = messages.get_volume_update_end_sample()
|
||||
@ -80,13 +78,13 @@ class VolumeBusAdapterTest(unittest.TestCase):
|
||||
.should_receive('rename_volume') \
|
||||
.once()
|
||||
|
||||
self.volume_bus_adapter.on_volume_renamed(notification)
|
||||
self.volume_handler.on_volume_renamed(notification)
|
||||
|
||||
def test_renamed_volume_with_volume_exists(self):
|
||||
notification = messages.get_volume_exists_sample()
|
||||
|
||||
self.controller.should_receive('rename_volume').once()
|
||||
self.volume_bus_adapter.on_volume_renamed(notification)
|
||||
self.volume_handler.on_volume_renamed(notification)
|
||||
|
||||
def test_attach_volume_with_icehouse_payload(self):
|
||||
notification = messages.get_volume_attach_icehouse_end_sample(
|
||||
@ -99,7 +97,7 @@ class VolumeBusAdapterTest(unittest.TestCase):
|
||||
.with_args("my-volume-id", "2014-02-14T17:18:36.000000Z", ["my-instance-id"]) \
|
||||
.once()
|
||||
|
||||
self.volume_bus_adapter.on_volume_attached(notification)
|
||||
self.volume_handler.on_volume_attached(notification)
|
||||
|
||||
def test_attach_volume_with_kilo_payload(self):
|
||||
notification = messages.get_volume_attach_kilo_end_sample(
|
||||
@ -113,20 +111,20 @@ class VolumeBusAdapterTest(unittest.TestCase):
|
||||
.with_args("my-volume-id", "2014-02-14T17:18:36.000000Z", ["I1"]) \
|
||||
.once()
|
||||
|
||||
self.volume_bus_adapter.on_volume_attached(notification)
|
||||
self.volume_handler.on_volume_attached(notification)
|
||||
|
||||
def test_get_attached_instances(self):
|
||||
self.assertEqual(["truc"], self.volume_bus_adapter._get_attached_instances({"instance_uuid": "truc"}))
|
||||
self.assertEqual([], self.volume_bus_adapter._get_attached_instances({"instance_uuid": None}))
|
||||
self.assertEqual([], self.volume_bus_adapter._get_attached_instances({}))
|
||||
self.assertEqual(["truc"], self.volume_handler._get_attached_instances({"instance_uuid": "truc"}))
|
||||
self.assertEqual([], self.volume_handler._get_attached_instances({"instance_uuid": None}))
|
||||
self.assertEqual([], self.volume_handler._get_attached_instances({}))
|
||||
self.assertEqual(
|
||||
["a", "b"],
|
||||
self.volume_bus_adapter._get_attached_instances(
|
||||
self.volume_handler._get_attached_instances(
|
||||
{"volume_attachment": [{"instance_uuid": "a"}, {"instance_uuid": "b"}]}
|
||||
)
|
||||
)
|
||||
self.assertEqual(
|
||||
["a"],
|
||||
self.volume_bus_adapter._get_attached_instances({"volume_attachment": [{"instance_uuid": "a"}]})
|
||||
self.volume_handler._get_attached_instances({"volume_attachment": [{"instance_uuid": "a"}]})
|
||||
)
|
||||
self.assertEqual([], self.volume_bus_adapter._get_attached_instances({"volume_attachment": []}))
|
||||
self.assertEqual([], self.volume_handler._get_attached_instances({"volume_attachment": []}))
|
@ -13,26 +13,23 @@
|
||||
# limitations under the License.
|
||||
|
||||
from datetime import datetime
|
||||
import unittest
|
||||
|
||||
from flexmock import flexmock
|
||||
from flexmock import flexmock_teardown
|
||||
|
||||
import pytz
|
||||
|
||||
from almanach.adapters.bus_adapter import BusAdapter
|
||||
from almanach.collector import bus_adapter
|
||||
from almanach.core import exception
|
||||
from integration_tests.builders import messages
|
||||
from tests import base
|
||||
|
||||
|
||||
class BusAdapterTest(unittest.TestCase):
|
||||
class BusAdapterTest(base.BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(BusAdapterTest, self).setUp()
|
||||
self.controller = flexmock()
|
||||
self.retry = flexmock()
|
||||
self.bus_adapter = BusAdapter(self.controller, None, retry_adapter=self.retry)
|
||||
|
||||
def tearDown(self):
|
||||
flexmock_teardown()
|
||||
self.bus_adapter = bus_adapter.BusAdapter(self.config, self.controller, None, retry_adapter=self.retry)
|
||||
|
||||
def test_on_message(self):
|
||||
instance_id = "e7d44dea-21c1-452c-b50c-cbab0d07d7d3"
|
@ -12,28 +12,21 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import unittest
|
||||
|
||||
from flexmock import flexmock
|
||||
from flexmock import flexmock_teardown
|
||||
from kombu import Connection
|
||||
from kombu.tests import mocks
|
||||
from kombu.transport import pyamqp
|
||||
|
||||
from almanach.adapters.retry_adapter import RetryAdapter
|
||||
from almanach import config
|
||||
from almanach.collector import retry_adapter
|
||||
from tests import base
|
||||
|
||||
|
||||
class BusAdapterTest(unittest.TestCase):
|
||||
class BusAdapterTest(base.BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(BusAdapterTest, self).setUp()
|
||||
self.setup_connection_mock()
|
||||
self.setup_config_mock()
|
||||
|
||||
self.retry_adapter = RetryAdapter(self.connection)
|
||||
|
||||
def tearDown(self):
|
||||
flexmock_teardown()
|
||||
self.retry_adapter = retry_adapter.RetryAdapter(self.config, self.connection)
|
||||
|
||||
def setup_connection_mock(self):
|
||||
mocks.Transport.recoverable_connection_errors = pyamqp.Transport.recoverable_connection_errors
|
||||
@ -41,17 +34,6 @@ class BusAdapterTest(unittest.TestCase):
|
||||
self.channel_mock = flexmock(self.connection.default_channel)
|
||||
self.connection.should_receive('channel').and_return(self.channel_mock)
|
||||
|
||||
def setup_config_mock(self):
|
||||
self.config_mock = flexmock(config)
|
||||
self.config_mock.should_receive('rabbitmq_time_to_live').and_return(10)
|
||||
self.config_mock.should_receive('rabbitmq_routing_key').and_return('almanach.info')
|
||||
self.config_mock.should_receive('rabbitmq_retry_queue').and_return('almanach.retry')
|
||||
self.config_mock.should_receive('rabbitmq_dead_queue').and_return('almanach.dead')
|
||||
self.config_mock.should_receive('rabbitmq_queue').and_return('almanach.info')
|
||||
self.config_mock.should_receive('rabbitmq_retry_return_exchange').and_return('almanach')
|
||||
self.config_mock.should_receive('rabbitmq_retry_exchange').and_return('almanach.retry')
|
||||
self.config_mock.should_receive('rabbitmq_dead_exchange').and_return('almanach.dead')
|
||||
|
||||
def test_declare_retry_exchanges_retries_if_it_fails(self):
|
||||
connection = flexmock(Connection(transport=mocks.Transport))
|
||||
connection.should_receive('_establish_connection').times(3)\
|
||||
@ -59,20 +41,17 @@ class BusAdapterTest(unittest.TestCase):
|
||||
.and_raise(IOError)\
|
||||
.and_return(connection.transport.establish_connection())
|
||||
|
||||
self.retry_adapter = RetryAdapter(connection)
|
||||
self.retry_adapter = retry_adapter.RetryAdapter(self.config, connection)
|
||||
|
||||
def test_publish_to_retry_queue_happy_path(self):
|
||||
message = self.build_message()
|
||||
|
||||
self.config_mock.should_receive('rabbitmq_retry').and_return(1)
|
||||
self.expect_publish_with(message, 'almanach.retry').once()
|
||||
|
||||
self.retry_adapter.publish_to_dead_letter(message)
|
||||
|
||||
def test_publish_to_retry_queue_retries_if_it_fails(self):
|
||||
message = self.build_message()
|
||||
|
||||
self.config_mock.should_receive('rabbitmq_retry').and_return(2)
|
||||
self.expect_publish_with(message, 'almanach.retry').times(4)\
|
||||
.and_raise(IOError)\
|
||||
.and_raise(IOError)\
|
||||
@ -93,7 +72,6 @@ class BusAdapterTest(unittest.TestCase):
|
||||
def test_publish_to_dead_letter_messages_retried_more_than_twice(self):
|
||||
message = self.build_message(headers={'x-death': [0, 1, 2, 3]})
|
||||
|
||||
self.config_mock.should_receive('rabbitmq_retry').and_return(2)
|
||||
self.expect_publish_with(message, 'almanach.dead').once()
|
||||
|
||||
self.retry_adapter.publish_to_dead_letter(message)
|
@ -16,46 +16,32 @@ from copy import copy
|
||||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
from dateutil.parser import parse
|
||||
import logging
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
from flexmock import flexmock
|
||||
from flexmock import flexmock_teardown
|
||||
from hamcrest import assert_that
|
||||
from hamcrest import calling
|
||||
from hamcrest import equal_to
|
||||
from hamcrest import raises
|
||||
from nose.tools import assert_raises
|
||||
import pytz
|
||||
|
||||
from almanach import config
|
||||
from almanach.core.controller import Controller
|
||||
from almanach.core import controller
|
||||
from almanach.core import exception
|
||||
from almanach.core import model
|
||||
from almanach.storage import database_adapter
|
||||
|
||||
from tests import base
|
||||
from tests.builder import a
|
||||
from tests.builder import instance
|
||||
from tests.builder import volume
|
||||
from tests.builder import volume_type
|
||||
|
||||
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
|
||||
|
||||
class ControllerTest(base.BaseTestCase):
|
||||
|
||||
class ControllerTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.database_adapter = flexmock()
|
||||
|
||||
(flexmock(config)
|
||||
.should_receive("volume_existence_threshold")
|
||||
.and_return(10))
|
||||
(flexmock(config)
|
||||
.should_receive("device_metadata_whitelist")
|
||||
.and_return(["a_metadata.to_filter"]))
|
||||
|
||||
self.controller = Controller(self.database_adapter)
|
||||
|
||||
def tearDown(self):
|
||||
flexmock_teardown()
|
||||
super(ControllerTest, self).setUp()
|
||||
self.database_adapter = flexmock(database_adapter.DatabaseAdapter)
|
||||
self.controller = controller.Controller(self.config, self.database_adapter)
|
||||
|
||||
def test_instance_created(self):
|
||||
fake_instance = a(instance().with_all_dates_in_string())
|
||||
@ -66,14 +52,8 @@ class ControllerTest(unittest.TestCase):
|
||||
.and_raise(KeyError)
|
||||
.once())
|
||||
|
||||
expected_instance = a(instance()
|
||||
.with_id(fake_instance.entity_id)
|
||||
.with_project_id(fake_instance.project_id)
|
||||
.with_metadata({"a_metadata.to_filter": "include.this"}))
|
||||
|
||||
(flexmock(self.database_adapter)
|
||||
.should_receive("insert_entity")
|
||||
.with_args(expected_instance)
|
||||
.once())
|
||||
|
||||
self.controller.create_instance(fake_instance.entity_id, fake_instance.project_id, fake_instance.start,
|
||||
@ -271,14 +251,8 @@ class ControllerTest(unittest.TestCase):
|
||||
.and_raise(NotImplementedError) # The db adapter found garbage in the database, we will ignore this entry
|
||||
.once())
|
||||
|
||||
expected_instance = a(instance()
|
||||
.with_id(fake_instance.entity_id)
|
||||
.with_project_id(fake_instance.project_id)
|
||||
.with_metadata({"a_metadata.to_filter": "include.this"}))
|
||||
|
||||
(flexmock(self.database_adapter)
|
||||
.should_receive("insert_entity")
|
||||
.with_args(expected_instance)
|
||||
.once())
|
||||
|
||||
self.controller.create_instance(fake_instance.entity_id, fake_instance.project_id, fake_instance.start,
|
||||
@ -306,8 +280,9 @@ class ControllerTest(unittest.TestCase):
|
||||
.and_return(False)
|
||||
.once())
|
||||
|
||||
with self.assertRaises(exception.AlmanachEntityNotFoundException):
|
||||
self.controller.delete_instance("id1", "2015-10-21T16:25:00.000000Z")
|
||||
self.assertRaises(exception.AlmanachEntityNotFoundException,
|
||||
self.controller.delete_instance,
|
||||
"id1", "2015-10-21T16:25:00.000000Z")
|
||||
|
||||
def test_volume_deleted(self):
|
||||
fake_volume = a(volume())
|
||||
@ -438,7 +413,7 @@ class ControllerTest(unittest.TestCase):
|
||||
def test_create_volume_raises_bad_date_format(self):
|
||||
some_volume = a(volume())
|
||||
|
||||
assert_raises(
|
||||
self.assertRaises(
|
||||
exception.DateFormatException,
|
||||
self.controller.create_volume,
|
||||
some_volume.entity_id,
|
||||
@ -481,6 +456,7 @@ class ControllerTest(unittest.TestCase):
|
||||
|
||||
def test_create_volume_with_invalid_volume_type(self):
|
||||
some_volume_type = a(volume_type())
|
||||
|
||||
(flexmock(self.database_adapter)
|
||||
.should_receive("get_volume_type")
|
||||
.with_args(some_volume_type.volume_type_id)
|
||||
@ -494,16 +470,17 @@ class ControllerTest(unittest.TestCase):
|
||||
(flexmock(self.database_adapter)
|
||||
.should_receive("insert_entity")
|
||||
.never())
|
||||
|
||||
(flexmock(self.database_adapter)
|
||||
.should_receive("get_active_entity")
|
||||
.with_args(some_volume.entity_id)
|
||||
.and_return(None)
|
||||
.once())
|
||||
|
||||
with self.assertRaises(KeyError):
|
||||
self.controller.create_volume(some_volume.entity_id, some_volume.project_id, some_volume.start,
|
||||
some_volume_type.volume_type_id, some_volume.size, some_volume.name,
|
||||
some_volume.attached_to)
|
||||
self.assertRaises(KeyError, self.controller.create_volume,
|
||||
some_volume.entity_id, some_volume.project_id, some_volume.start,
|
||||
some_volume_type.volume_type_id, some_volume.size, some_volume.name,
|
||||
some_volume.attached_to)
|
||||
|
||||
def test_create_volume_but_its_an_old_event(self):
|
||||
some_volume = a(volume().with_last_event(pytz.utc.localize(datetime(2015, 10, 21, 16, 29, 0))))
|
||||
|
@ -13,39 +13,33 @@
|
||||
# limitations under the License.
|
||||
|
||||
from datetime import datetime
|
||||
import pkg_resources
|
||||
import unittest
|
||||
|
||||
from flexmock import flexmock
|
||||
from flexmock import flexmock_teardown
|
||||
from hamcrest import assert_that
|
||||
from hamcrest import contains_inanyorder
|
||||
import mongomock
|
||||
from pymongo import MongoClient
|
||||
import pymongo
|
||||
import pytz
|
||||
|
||||
from almanach.adapters.database_adapter import DatabaseAdapter
|
||||
from almanach import config
|
||||
from flexmock import flexmock
|
||||
from hamcrest import assert_that
|
||||
from hamcrest import contains_inanyorder
|
||||
|
||||
from almanach.core import exception
|
||||
from almanach.core import model
|
||||
from almanach.storage.database_adapter import DatabaseAdapter
|
||||
|
||||
from tests import base
|
||||
from tests.builder import a
|
||||
from tests.builder import instance
|
||||
from tests.builder import volume
|
||||
from tests.builder import volume_type
|
||||
|
||||
|
||||
class DatabaseAdapterTest(unittest.TestCase):
|
||||
class DatabaseAdapterTest(base.BaseTestCase):
|
||||
def setUp(self):
|
||||
config.read(pkg_resources.resource_filename("almanach", "resources/config/test.cfg"))
|
||||
super(DatabaseAdapterTest, self).setUp()
|
||||
mongo_connection = mongomock.Connection()
|
||||
|
||||
self.adapter = DatabaseAdapter()
|
||||
self.db = mongo_connection[config.mongodb_database()]
|
||||
|
||||
flexmock(MongoClient).new_instances(mongo_connection)
|
||||
|
||||
def tearDown(self):
|
||||
flexmock_teardown()
|
||||
self.db = mongo_connection['almanach']
|
||||
self.adapter = DatabaseAdapter(self.config, self.db)
|
||||
flexmock(pymongo.MongoClient).new_instances(mongo_connection)
|
||||
|
||||
def test_insert_instance(self):
|
||||
fake_instance = a(instance())
|
||||
@ -87,8 +81,9 @@ class DatabaseAdapterTest(unittest.TestCase):
|
||||
self.assert_entities_metadata_have_been_sanitize([entity])
|
||||
|
||||
def test_get_instance_entity_will_not_found(self):
|
||||
with self.assertRaises(KeyError):
|
||||
self.adapter.get_active_entity("will_not_found")
|
||||
self.assertRaises(KeyError,
|
||||
self.adapter.get_active_entity,
|
||||
"will_not_found")
|
||||
|
||||
def test_get_instance_entity_with_unknown_type(self):
|
||||
fake_entity = a(instance())
|
||||
@ -96,8 +91,9 @@ class DatabaseAdapterTest(unittest.TestCase):
|
||||
|
||||
self.db.entity.insert(model.todict(fake_entity))
|
||||
|
||||
with self.assertRaises(NotImplementedError):
|
||||
self.adapter.get_active_entity(fake_entity.entity_id)
|
||||
self.assertRaises(NotImplementedError,
|
||||
self.adapter.get_active_entity,
|
||||
fake_entity.entity_id)
|
||||
|
||||
def test_count_entities(self):
|
||||
fake_active_entities = [
|
||||
@ -297,8 +293,9 @@ class DatabaseAdapterTest(unittest.TestCase):
|
||||
def test_get_volume_type_not_exist(self):
|
||||
fake_volume_type = a(volume_type())
|
||||
|
||||
with self.assertRaises(exception.VolumeTypeNotFoundException):
|
||||
self.adapter.get_volume_type(fake_volume_type.volume_type_id)
|
||||
self.assertRaises(exception.VolumeTypeNotFoundException,
|
||||
self.adapter.get_volume_type,
|
||||
fake_volume_type.volume_type_id)
|
||||
|
||||
def test_delete_volume_type(self):
|
||||
fake_volume_type = a(volume_type())
|
||||
@ -308,12 +305,14 @@ class DatabaseAdapterTest(unittest.TestCase):
|
||||
self.assertEqual(0, self.db.volume_type.count())
|
||||
|
||||
def test_delete_volume_type_not_in_database(self):
|
||||
with self.assertRaises(exception.AlmanachException):
|
||||
self.adapter.delete_volume_type("not_in_database_id")
|
||||
self.assertRaises(exception.AlmanachException,
|
||||
self.adapter.delete_volume_type,
|
||||
"not_in_database_id")
|
||||
|
||||
def test_delete_all_volume_types_not_permitted(self):
|
||||
with self.assertRaises(exception.AlmanachException):
|
||||
self.adapter.delete_volume_type(None)
|
||||
self.assertRaises(exception.AlmanachException,
|
||||
self.adapter.delete_volume_type,
|
||||
None)
|
||||
|
||||
def test_list_volume_types(self):
|
||||
fake_volume_types = [a(volume_type()), a(volume_type())]
|
0
tests/storage/__init__.py
Normal file
0
tests/storage/__init__.py
Normal file
@ -1,41 +0,0 @@
|
||||
# Copyright 2016 Internap.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import pkg_resources
|
||||
from unittest import TestCase
|
||||
|
||||
from hamcrest import assert_that
|
||||
from hamcrest import equal_to
|
||||
|
||||
from almanach import config
|
||||
|
||||
|
||||
class ConfigTest(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
config.read(pkg_resources.resource_filename("almanach", "resources/config/test.cfg"))
|
||||
|
||||
def test_get_config_file_value(self):
|
||||
assert_that("amqp://guest:guest@localhost:5672", equal_to(config.rabbitmq_url()))
|
||||
|
||||
def test_get_value_from_environment_variable(self):
|
||||
url = "amqp://openstack:openstack@hostname:5672"
|
||||
token = "my_secret"
|
||||
|
||||
os.environ['RABBITMQ_URL'] = url
|
||||
os.environ['ALMANACH_AUTH_TOKEN'] = token
|
||||
|
||||
assert_that(url, equal_to(config.rabbitmq_url()))
|
||||
assert_that(token, equal_to(config.auth_private_key()))
|
Loading…
Reference in New Issue
Block a user