Add compatibility to python3
Change-Id: I20251e3dbe495c60a2a17751d84a395d10d38817
This commit is contained in:
parent
4f9fe41f51
commit
7f33dcc7df
@ -1,9 +1,9 @@
|
|||||||
FROM python:2.7
|
FROM python:3.4
|
||||||
|
|
||||||
RUN mkdir -p /opt/almanach/src
|
RUN mkdir -p /opt/almanach/src
|
||||||
ADD almanach /opt/almanach/src/almanach
|
ADD almanach /opt/almanach/src/almanach
|
||||||
ADD setup.* /opt/almanach/src/
|
ADD setup.* /opt/almanach/src/
|
||||||
ADD README.md /opt/almanach/src/
|
ADD README.rst /opt/almanach/src/
|
||||||
ADD requirements.txt /opt/almanach/src/
|
ADD requirements.txt /opt/almanach/src/
|
||||||
ADD LICENSE /opt/almanach/src/
|
ADD LICENSE /opt/almanach/src/
|
||||||
ADD almanach/resources/config/almanach.cfg /etc/almanach.cfg
|
ADD almanach/resources/config/almanach.cfg /etc/almanach.cfg
|
||||||
|
@ -13,16 +13,17 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import json
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
|
||||||
import jsonpickle
|
import jsonpickle
|
||||||
|
|
||||||
from flask import Blueprint, Response, request
|
from flask import Blueprint, Response, request
|
||||||
|
from oslo_serialization import jsonutils
|
||||||
|
|
||||||
from werkzeug.wrappers import BaseResponse
|
from werkzeug.wrappers import BaseResponse
|
||||||
|
|
||||||
|
from almanach.common.exceptions.almanach_exception import AlmanachException
|
||||||
from almanach.common.exceptions.almanach_entity_not_found_exception import AlmanachEntityNotFoundException
|
from almanach.common.exceptions.almanach_entity_not_found_exception import AlmanachEntityNotFoundException
|
||||||
from almanach.common.exceptions.authentication_failure_exception import AuthenticationFailureException
|
from almanach.common.exceptions.authentication_failure_exception import AuthenticationFailureException
|
||||||
from almanach.common.exceptions.multiple_entities_matching_query import MultipleEntitiesMatchingQuery
|
from almanach.common.exceptions.multiple_entities_matching_query import MultipleEntitiesMatchingQuery
|
||||||
@ -48,7 +49,7 @@ def to_json(api_call):
|
|||||||
logging.warning(e.message)
|
logging.warning(e.message)
|
||||||
return Response(encode({"error": e.message}), 400, {"Content-Type": "application/json"})
|
return Response(encode({"error": e.message}), 400, {"Content-Type": "application/json"})
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
message = "The '{param}' param is mandatory for the request you have made.".format(param=e.message)
|
message = "The {param} param is mandatory for the request you have made.".format(param=e)
|
||||||
logging.warning(message)
|
logging.warning(message)
|
||||||
return encode({"error": message}), 400, {"Content-Type": "application/json"}
|
return encode({"error": message}), 400, {"Content-Type": "application/json"}
|
||||||
except TypeError:
|
except TypeError:
|
||||||
@ -65,10 +66,12 @@ def to_json(api_call):
|
|||||||
except AlmanachEntityNotFoundException as e:
|
except AlmanachEntityNotFoundException as e:
|
||||||
logging.warning(e.message)
|
logging.warning(e.message)
|
||||||
return encode({"error": "Entity not found"}), 404, {"Content-Type": "application/json"}
|
return encode({"error": "Entity not found"}), 404, {"Content-Type": "application/json"}
|
||||||
|
except AlmanachException as e:
|
||||||
except Exception as e:
|
|
||||||
logging.exception(e)
|
logging.exception(e)
|
||||||
return Response(encode({"error": e.message}), 500, {"Content-Type": "application/json"})
|
return Response(encode({"error": e.message}), 500, {"Content-Type": "application/json"})
|
||||||
|
except Exception as e:
|
||||||
|
logging.exception(e)
|
||||||
|
return Response(encode({"error": e}), 500, {"Content-Type": "application/json"})
|
||||||
|
|
||||||
return decorator
|
return decorator
|
||||||
|
|
||||||
@ -80,7 +83,7 @@ def authenticated(api_call):
|
|||||||
auth_adapter.validate(request.headers.get('X-Auth-Token'))
|
auth_adapter.validate(request.headers.get('X-Auth-Token'))
|
||||||
return api_call(*args, **kwargs)
|
return api_call(*args, **kwargs)
|
||||||
except AuthenticationFailureException as e:
|
except AuthenticationFailureException as e:
|
||||||
logging.error("Authentication failure: {0}".format(e.message))
|
logging.error("Authentication failure: {0}".format(e))
|
||||||
return Response('Unauthorized', 401)
|
return Response('Unauthorized', 401)
|
||||||
|
|
||||||
return decorator
|
return decorator
|
||||||
@ -97,7 +100,7 @@ def get_info():
|
|||||||
@authenticated
|
@authenticated
|
||||||
@to_json
|
@to_json
|
||||||
def create_instance(project_id):
|
def create_instance(project_id):
|
||||||
instance = json.loads(request.data)
|
instance = jsonutils.loads(request.data)
|
||||||
logging.info("Creating instance for tenant %s with data %s", project_id, instance)
|
logging.info("Creating instance for tenant %s with data %s", project_id, instance)
|
||||||
controller.create_instance(
|
controller.create_instance(
|
||||||
tenant_id=project_id,
|
tenant_id=project_id,
|
||||||
@ -118,7 +121,7 @@ def create_instance(project_id):
|
|||||||
@authenticated
|
@authenticated
|
||||||
@to_json
|
@to_json
|
||||||
def delete_instance(instance_id):
|
def delete_instance(instance_id):
|
||||||
data = json.loads(request.data)
|
data = jsonutils.loads(request.data)
|
||||||
logging.info("Deleting instance with id %s with data %s", instance_id, data)
|
logging.info("Deleting instance with id %s with data %s", instance_id, data)
|
||||||
controller.delete_instance(
|
controller.delete_instance(
|
||||||
instance_id=instance_id,
|
instance_id=instance_id,
|
||||||
@ -132,7 +135,7 @@ def delete_instance(instance_id):
|
|||||||
@authenticated
|
@authenticated
|
||||||
@to_json
|
@to_json
|
||||||
def resize_instance(instance_id):
|
def resize_instance(instance_id):
|
||||||
instance = json.loads(request.data)
|
instance = jsonutils.loads(request.data)
|
||||||
logging.info("Resizing instance with id %s with data %s", instance_id, instance)
|
logging.info("Resizing instance with id %s with data %s", instance_id, instance)
|
||||||
controller.resize_instance(
|
controller.resize_instance(
|
||||||
instance_id=instance_id,
|
instance_id=instance_id,
|
||||||
@ -147,7 +150,7 @@ def resize_instance(instance_id):
|
|||||||
@authenticated
|
@authenticated
|
||||||
@to_json
|
@to_json
|
||||||
def rebuild_instance(instance_id):
|
def rebuild_instance(instance_id):
|
||||||
instance = json.loads(request.data)
|
instance = jsonutils.loads(request.data)
|
||||||
logging.info("Rebuilding instance with id %s with data %s", instance_id, instance)
|
logging.info("Rebuilding instance with id %s with data %s", instance_id, instance)
|
||||||
controller.rebuild_instance(
|
controller.rebuild_instance(
|
||||||
instance_id=instance_id,
|
instance_id=instance_id,
|
||||||
@ -173,7 +176,7 @@ def list_instances(project_id):
|
|||||||
@authenticated
|
@authenticated
|
||||||
@to_json
|
@to_json
|
||||||
def create_volume(project_id):
|
def create_volume(project_id):
|
||||||
volume = json.loads(request.data)
|
volume = jsonutils.loads(request.data)
|
||||||
logging.info("Creating volume for tenant %s with data %s", project_id, volume)
|
logging.info("Creating volume for tenant %s with data %s", project_id, volume)
|
||||||
controller.create_volume(
|
controller.create_volume(
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
@ -192,7 +195,7 @@ def create_volume(project_id):
|
|||||||
@authenticated
|
@authenticated
|
||||||
@to_json
|
@to_json
|
||||||
def delete_volume(volume_id):
|
def delete_volume(volume_id):
|
||||||
data = json.loads(request.data)
|
data = jsonutils.loads(request.data)
|
||||||
logging.info("Deleting volume with id %s with data %s", volume_id, data)
|
logging.info("Deleting volume with id %s with data %s", volume_id, data)
|
||||||
controller.delete_volume(
|
controller.delete_volume(
|
||||||
volume_id=volume_id,
|
volume_id=volume_id,
|
||||||
@ -206,7 +209,7 @@ def delete_volume(volume_id):
|
|||||||
@authenticated
|
@authenticated
|
||||||
@to_json
|
@to_json
|
||||||
def resize_volume(volume_id):
|
def resize_volume(volume_id):
|
||||||
volume = json.loads(request.data)
|
volume = jsonutils.loads(request.data)
|
||||||
logging.info("Resizing volume with id %s with data %s", volume_id, volume)
|
logging.info("Resizing volume with id %s with data %s", volume_id, volume)
|
||||||
controller.resize_volume(
|
controller.resize_volume(
|
||||||
volume_id=volume_id,
|
volume_id=volume_id,
|
||||||
@ -221,7 +224,7 @@ def resize_volume(volume_id):
|
|||||||
@authenticated
|
@authenticated
|
||||||
@to_json
|
@to_json
|
||||||
def attach_volume(volume_id):
|
def attach_volume(volume_id):
|
||||||
volume = json.loads(request.data)
|
volume = jsonutils.loads(request.data)
|
||||||
logging.info("Attaching volume with id %s with data %s", volume_id, volume)
|
logging.info("Attaching volume with id %s with data %s", volume_id, volume)
|
||||||
controller.attach_volume(
|
controller.attach_volume(
|
||||||
volume_id=volume_id,
|
volume_id=volume_id,
|
||||||
@ -236,7 +239,7 @@ def attach_volume(volume_id):
|
|||||||
@authenticated
|
@authenticated
|
||||||
@to_json
|
@to_json
|
||||||
def detach_volume(volume_id):
|
def detach_volume(volume_id):
|
||||||
volume = json.loads(request.data)
|
volume = jsonutils.loads(request.data)
|
||||||
logging.info("Detaching volume with id %s with data %s", volume_id, volume)
|
logging.info("Detaching volume with id %s with data %s", volume_id, volume)
|
||||||
controller.detach_volume(
|
controller.detach_volume(
|
||||||
volume_id=volume_id,
|
volume_id=volume_id,
|
||||||
@ -269,7 +272,7 @@ def list_entity(project_id):
|
|||||||
@authenticated
|
@authenticated
|
||||||
@to_json
|
@to_json
|
||||||
def update_instance_entity(instance_id):
|
def update_instance_entity(instance_id):
|
||||||
data = json.loads(request.data)
|
data = jsonutils.loads(request.data)
|
||||||
logging.info("Updating instance entity with id %s with data %s", instance_id, data)
|
logging.info("Updating instance entity with id %s with data %s", instance_id, data)
|
||||||
if 'start' in request.args:
|
if 'start' in request.args:
|
||||||
start, end = get_period()
|
start, end = get_period()
|
||||||
@ -316,7 +319,7 @@ def get_volume_type(type_id):
|
|||||||
@authenticated
|
@authenticated
|
||||||
@to_json
|
@to_json
|
||||||
def create_volume_type():
|
def create_volume_type():
|
||||||
volume_type = json.loads(request.data)
|
volume_type = jsonutils.loads(request.data)
|
||||||
logging.info("Creating volume type with data '%s'", volume_type)
|
logging.info("Creating volume type with data '%s'", volume_type)
|
||||||
controller.create_volume_type(
|
controller.create_volume_type(
|
||||||
volume_type_id=volume_type['type_id'],
|
volume_type_id=volume_type['type_id'],
|
||||||
|
@ -12,11 +12,13 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import kombu
|
import kombu
|
||||||
|
import six
|
||||||
|
|
||||||
from kombu.mixins import ConsumerMixin
|
from kombu.mixins import ConsumerMixin
|
||||||
|
from oslo_serialization import jsonutils
|
||||||
|
|
||||||
from almanach import config
|
from almanach import config
|
||||||
from almanach.adapters.instance_bus_adapter import InstanceBusAdapter
|
from almanach.adapters.instance_bus_adapter import InstanceBusAdapter
|
||||||
from almanach.adapters.volume_bus_adapter import VolumeBusAdapter
|
from almanach.adapters.volume_bus_adapter import VolumeBusAdapter
|
||||||
@ -34,14 +36,14 @@ class BusAdapter(ConsumerMixin):
|
|||||||
try:
|
try:
|
||||||
self._process_notification(notification)
|
self._process_notification(notification)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.warning("Sending notification to retry letter exchange {0}".format(json.dumps(notification)))
|
logging.warning("Sending notification to retry letter exchange {0}".format(jsonutils.dumps(notification)))
|
||||||
logging.exception(e.message)
|
logging.exception(e)
|
||||||
self.retry_adapter.publish_to_dead_letter(message)
|
self.retry_adapter.publish_to_dead_letter(message)
|
||||||
message.ack()
|
message.ack()
|
||||||
|
|
||||||
def _process_notification(self, notification):
|
def _process_notification(self, notification):
|
||||||
if isinstance(notification, basestring):
|
if isinstance(notification, six.string_types):
|
||||||
notification = json.loads(notification)
|
notification = jsonutils.loads(notification)
|
||||||
|
|
||||||
event_type = notification.get("event_type")
|
event_type = notification.get("event_type")
|
||||||
logging.info("Received event: '{0}'".format(event_type))
|
logging.info("Received event: '{0}'".format(event_type))
|
||||||
|
@ -16,7 +16,6 @@ import logging
|
|||||||
|
|
||||||
import pymongo
|
import pymongo
|
||||||
from pymongo.errors import ConfigurationError
|
from pymongo.errors import ConfigurationError
|
||||||
from pymongomodem.utils import decode_output, encode_input
|
|
||||||
|
|
||||||
from almanach import config
|
from almanach import config
|
||||||
from almanach.common.exceptions.almanach_exception import AlmanachException
|
from almanach.common.exceptions.almanach_exception import AlmanachException
|
||||||
@ -161,14 +160,11 @@ class DatabaseAdapter(object):
|
|||||||
def delete_active_entity(self, entity_id):
|
def delete_active_entity(self, entity_id):
|
||||||
self.db.entity.remove({"entity_id": entity_id, "end": None})
|
self.db.entity.remove({"entity_id": entity_id, "end": None})
|
||||||
|
|
||||||
@encode_input
|
|
||||||
def _insert_entity(self, entity):
|
def _insert_entity(self, entity):
|
||||||
self.db.entity.insert(entity)
|
self.db.entity.insert(entity)
|
||||||
|
|
||||||
@decode_output
|
|
||||||
def _get_entities_from_db(self, args):
|
def _get_entities_from_db(self, args):
|
||||||
return list(self.db.entity.find(args, {"_id": 0}))
|
return list(self.db.entity.find(args, {"_id": 0}))
|
||||||
|
|
||||||
@decode_output
|
|
||||||
def _get_one_entity_from_db(self, args):
|
def _get_one_entity_from_db(self, args):
|
||||||
return self.db.entity.find_one(args, {"_id": 0})
|
return self.db.entity.find_one(args, {"_id": 0})
|
||||||
|
@ -12,10 +12,11 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from kombu import Exchange, Queue, Producer
|
from kombu import Exchange, Queue, Producer
|
||||||
|
from oslo_serialization import jsonutils
|
||||||
|
|
||||||
from almanach import config
|
from almanach import config
|
||||||
|
|
||||||
|
|
||||||
@ -38,7 +39,7 @@ class RetryAdapter:
|
|||||||
else:
|
else:
|
||||||
logging.info("Publishing to dead letter queue")
|
logging.info("Publishing to dead letter queue")
|
||||||
self._publish_message(self._dead_producer, message)
|
self._publish_message(self._dead_producer, message)
|
||||||
logging.info("Publishing notification to dead letter queue: {0}".format(json.dumps(message.body)))
|
logging.info("Publishing notification to dead letter queue: {0}".format(jsonutils.dumps(message.body)))
|
||||||
|
|
||||||
def _configure_retry_exchanges(self, connection):
|
def _configure_retry_exchanges(self, connection):
|
||||||
def declare_queues():
|
def declare_queues():
|
||||||
|
@ -46,6 +46,6 @@ class KeystoneAuthentication(BaseAuth):
|
|||||||
try:
|
try:
|
||||||
self.token_manager_factory.get_manager().validate(token)
|
self.token_manager_factory.get_manager().validate(token)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise AuthenticationFailureException(e.message)
|
raise AuthenticationFailureException(e)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
@ -14,4 +14,5 @@
|
|||||||
|
|
||||||
|
|
||||||
class AlmanachException(Exception):
|
class AlmanachException(Exception):
|
||||||
pass
|
def __init__(self, message=None):
|
||||||
|
self.message = message
|
||||||
|
@ -12,13 +12,18 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import ConfigParser
|
|
||||||
import os
|
import os
|
||||||
import os.path as os_path
|
import os.path as os_path
|
||||||
|
import six
|
||||||
|
|
||||||
from almanach.common.exceptions.almanach_exception import AlmanachException
|
from almanach.common.exceptions.almanach_exception import AlmanachException
|
||||||
|
|
||||||
configuration = ConfigParser.RawConfigParser()
|
if six.PY2:
|
||||||
|
from ConfigParser import RawConfigParser
|
||||||
|
else:
|
||||||
|
from configparser import RawConfigParser
|
||||||
|
|
||||||
|
configuration = RawConfigParser()
|
||||||
|
|
||||||
|
|
||||||
def read(filename):
|
def read(filename):
|
||||||
|
@ -11,6 +11,7 @@
|
|||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
import six
|
||||||
|
|
||||||
|
|
||||||
class Entity(object):
|
class Entity(object):
|
||||||
@ -45,6 +46,10 @@ class Instance(Entity):
|
|||||||
self.metadata = metadata
|
self.metadata = metadata
|
||||||
self.os = OS(**os)
|
self.os = OS(**os)
|
||||||
|
|
||||||
|
def as_dict(self):
|
||||||
|
_replace_metadata_name_with_dot_instead_of_circumflex(self)
|
||||||
|
return todict(self)
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
return (super(Instance, self).__eq__(other) and
|
return (super(Instance, self).__eq__(other) and
|
||||||
other.flavor == self.flavor and
|
other.flavor == self.flavor and
|
||||||
@ -95,6 +100,7 @@ class VolumeType(object):
|
|||||||
|
|
||||||
def build_entity_from_dict(entity_dict):
|
def build_entity_from_dict(entity_dict):
|
||||||
if entity_dict.get("entity_type") == Instance.TYPE:
|
if entity_dict.get("entity_type") == Instance.TYPE:
|
||||||
|
_replace_metadata_name_with_circumflex_instead_of_dot(entity_dict)
|
||||||
return Instance(**entity_dict)
|
return Instance(**entity_dict)
|
||||||
elif entity_dict.get("entity_type") == Volume.TYPE:
|
elif entity_dict.get("entity_type") == Volume.TYPE:
|
||||||
return Volume(**entity_dict)
|
return Volume(**entity_dict)
|
||||||
@ -102,13 +108,34 @@ def build_entity_from_dict(entity_dict):
|
|||||||
|
|
||||||
|
|
||||||
def todict(obj):
|
def todict(obj):
|
||||||
if isinstance(obj, dict):
|
if isinstance(obj, dict) or isinstance(obj, six.text_type):
|
||||||
return obj
|
return obj
|
||||||
elif hasattr(obj, "__iter__"):
|
elif hasattr(obj, "__iter__"):
|
||||||
return [todict(v) for v in obj]
|
return [todict(v) for v in obj]
|
||||||
elif hasattr(obj, "__dict__"):
|
elif hasattr(obj, "__dict__"):
|
||||||
return dict([(key, todict(value))
|
return dict([(key, todict(value))
|
||||||
for key, value in obj.__dict__.iteritems()
|
for key, value in obj.__dict__.items()
|
||||||
if not callable(value) and not key.startswith('_')])
|
if not callable(value) and not key.startswith('_')])
|
||||||
else:
|
else:
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
|
|
||||||
|
def _replace_metadata_name_with_dot_instead_of_circumflex(instance):
|
||||||
|
if instance.metadata:
|
||||||
|
cleaned_metadata = dict()
|
||||||
|
for key, value in instance.metadata.items():
|
||||||
|
if '.' in key:
|
||||||
|
key = key.replace(".", "^")
|
||||||
|
cleaned_metadata[key] = value
|
||||||
|
instance.metadata = cleaned_metadata
|
||||||
|
|
||||||
|
|
||||||
|
def _replace_metadata_name_with_circumflex_instead_of_dot(entity_dict):
|
||||||
|
metadata = entity_dict.get("metadata")
|
||||||
|
if metadata:
|
||||||
|
dirty_metadata = dict()
|
||||||
|
for key, value in metadata.items():
|
||||||
|
if '^' in key:
|
||||||
|
key = key.replace("^", ".")
|
||||||
|
dirty_metadata[key] = value
|
||||||
|
entity_dict["metadata"] = dirty_metadata
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
import six
|
||||||
from voluptuous import Schema, MultipleInvalid, Datetime, Required
|
from voluptuous import Schema, MultipleInvalid, Datetime, Required
|
||||||
|
|
||||||
from almanach.common.exceptions.validation_exception import InvalidAttributeException
|
from almanach.common.exceptions.validation_exception import InvalidAttributeException
|
||||||
@ -6,12 +7,12 @@ from almanach.common.exceptions.validation_exception import InvalidAttributeExce
|
|||||||
class InstanceValidator(object):
|
class InstanceValidator(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.schema = Schema({
|
self.schema = Schema({
|
||||||
'name': unicode,
|
'name': six.text_type,
|
||||||
'flavor': unicode,
|
'flavor': six.text_type,
|
||||||
'os': {
|
'os': {
|
||||||
Required('distro'): unicode,
|
Required('distro'): six.text_type,
|
||||||
Required('version'): unicode,
|
Required('version'): six.text_type,
|
||||||
Required('os_type'): unicode,
|
Required('os_type'): six.text_type,
|
||||||
},
|
},
|
||||||
'metadata': dict,
|
'metadata': dict,
|
||||||
'start_date': Datetime(),
|
'start_date': Datetime(),
|
||||||
|
@ -33,7 +33,7 @@ def get_instance_create_end_sample(instance_id=None, tenant_id=None, flavor_name
|
|||||||
"os_version": os_version or "6.4",
|
"os_version": os_version or "6.4",
|
||||||
"created_at": creation_timestamp if creation_timestamp else datetime(2014, 2, 14, 16, 29, 58, tzinfo=pytz.utc),
|
"created_at": creation_timestamp if creation_timestamp else datetime(2014, 2, 14, 16, 29, 58, tzinfo=pytz.utc),
|
||||||
"launched_at": creation_timestamp + timedelta(seconds=1) if creation_timestamp else datetime(2014, 2, 14, 16,
|
"launched_at": creation_timestamp + timedelta(seconds=1) if creation_timestamp else datetime(2014, 2, 14, 16,
|
||||||
30, 02,
|
30, 2,
|
||||||
tzinfo=pytz.utc),
|
tzinfo=pytz.utc),
|
||||||
"terminated_at": None,
|
"terminated_at": None,
|
||||||
"deleted_at": None,
|
"deleted_at": None,
|
||||||
@ -56,7 +56,7 @@ def get_instance_delete_end_sample(instance_id=None, tenant_id=None, flavor_name
|
|||||||
"os_version": os_version or "6.4",
|
"os_version": os_version or "6.4",
|
||||||
"created_at": creation_timestamp if creation_timestamp else datetime(2014, 2, 14, 16, 29, 58, tzinfo=pytz.utc),
|
"created_at": creation_timestamp if creation_timestamp else datetime(2014, 2, 14, 16, 29, 58, tzinfo=pytz.utc),
|
||||||
"launched_at": creation_timestamp + timedelta(seconds=1) if creation_timestamp else datetime(2014, 2, 14, 16,
|
"launched_at": creation_timestamp + timedelta(seconds=1) if creation_timestamp else datetime(2014, 2, 14, 16,
|
||||||
30, 02,
|
30, 2,
|
||||||
tzinfo=pytz.utc),
|
tzinfo=pytz.utc),
|
||||||
"terminated_at": deletion_timestamp if deletion_timestamp else datetime(2014, 2, 18, 12, 5, 23,
|
"terminated_at": deletion_timestamp if deletion_timestamp else datetime(2014, 2, 18, 12, 5, 23,
|
||||||
tzinfo=pytz.utc),
|
tzinfo=pytz.utc),
|
||||||
|
@ -41,7 +41,9 @@ class ApiInstanceEntityTest(BaseApiTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
assert_that(response.status_code, equal_to(400))
|
assert_that(response.status_code, equal_to(400))
|
||||||
assert_that(response.json(), equal_to({"error": {"flavor": "expected unicode", "os": "expected a dictionary"}}))
|
error_dict = response.json()['error']
|
||||||
|
assert_that(len(error_dict), equal_to(2))
|
||||||
|
assert_that(sorted(error_dict.keys()), equal_to(["flavor", "os"]))
|
||||||
|
|
||||||
def test_update_entity_instance_with_one_attribute(self):
|
def test_update_entity_instance_with_one_attribute(self):
|
||||||
instance_id = self._create_instance_entity()
|
instance_id = self._create_instance_entity()
|
||||||
|
29
integration_tests/test_metadata_instance_create.py
Normal file
29
integration_tests/test_metadata_instance_create.py
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
from uuid import uuid4
|
||||||
|
from datetime import datetime
|
||||||
|
from hamcrest import assert_that, has_entry
|
||||||
|
from hamcrest import equal_to
|
||||||
|
from integration_tests.base_api_testcase import BaseApiTestCase
|
||||||
|
from integration_tests.builders.messages import get_instance_create_end_sample
|
||||||
|
import pytz
|
||||||
|
|
||||||
|
|
||||||
|
class MetadataInstanceCreateTest(BaseApiTestCase):
|
||||||
|
def test_instance_create_with_metadata(self):
|
||||||
|
instance_id = str(uuid4())
|
||||||
|
tenant_id = str(uuid4())
|
||||||
|
|
||||||
|
self.rabbitMqHelper.push(
|
||||||
|
get_instance_create_end_sample(
|
||||||
|
instance_id=instance_id,
|
||||||
|
tenant_id=tenant_id,
|
||||||
|
creation_timestamp=datetime(2016, 2, 1, 9, 0, 0, tzinfo=pytz.utc),
|
||||||
|
metadata={"metering.billing_mode": "42"}
|
||||||
|
))
|
||||||
|
|
||||||
|
self.assert_that_instance_entity_is_created_and_have_proper_metadata(instance_id, tenant_id)
|
||||||
|
|
||||||
|
def assert_that_instance_entity_is_created_and_have_proper_metadata(self, instance_id, tenant_id):
|
||||||
|
entities = self.almanachHelper.get_entities(tenant_id, "2016-01-01 00:00:00.000")
|
||||||
|
assert_that(len(entities), equal_to(1))
|
||||||
|
assert_that(entities[0], has_entry("entity_id", instance_id))
|
||||||
|
assert_that(entities[0], has_entry("metadata", {'metering.billing_mode': '42'}))
|
@ -5,7 +5,8 @@ jsonpickle==0.7.1
|
|||||||
pymongo==2.7.2
|
pymongo==2.7.2
|
||||||
kombu>=3.0.30
|
kombu>=3.0.30
|
||||||
python-dateutil==2.2
|
python-dateutil==2.2
|
||||||
python-pymongomodem==0.0.3
|
|
||||||
pytz>=2014.10
|
pytz>=2014.10
|
||||||
voluptuous==0.8.11
|
voluptuous==0.8.11
|
||||||
python-keystoneclient>=1.6.0
|
python-keystoneclient>=1.6.0
|
||||||
|
six>=1.9.0 # MIT
|
||||||
|
oslo.serialization>=1.10.0 # Apache-2.0
|
@ -36,7 +36,7 @@ class BusAdapterTest(unittest.TestCase):
|
|||||||
instance_id = "e7d44dea-21c1-452c-b50c-cbab0d07d7d3"
|
instance_id = "e7d44dea-21c1-452c-b50c-cbab0d07d7d3"
|
||||||
tenant_id = "0be9215b503b43279ae585d50a33aed8"
|
tenant_id = "0be9215b503b43279ae585d50a33aed8"
|
||||||
instance_type = "myflavor"
|
instance_type = "myflavor"
|
||||||
timestamp = datetime(2014, 02, 14, 16, 30, 10, tzinfo=pytz.utc)
|
timestamp = datetime(2014, 2, 14, 16, 30, 10, tzinfo=pytz.utc)
|
||||||
hostname = "some hostname"
|
hostname = "some hostname"
|
||||||
metadata = {"a_metadata.to_filter": "filtered_value", }
|
metadata = {"a_metadata.to_filter": "filtered_value", }
|
||||||
|
|
||||||
@ -72,7 +72,7 @@ class BusAdapterTest(unittest.TestCase):
|
|||||||
instance_id = "e7d44dea-21c1-452c-b50c-cbab0d07d7d3"
|
instance_id = "e7d44dea-21c1-452c-b50c-cbab0d07d7d3"
|
||||||
tenant_id = "0be9215b503b43279ae585d50a33aed8"
|
tenant_id = "0be9215b503b43279ae585d50a33aed8"
|
||||||
instance_type = "myflavor"
|
instance_type = "myflavor"
|
||||||
timestamp = datetime(2014, 02, 14, 16, 30, 10, tzinfo=pytz.utc)
|
timestamp = datetime(2014, 2, 14, 16, 30, 10, tzinfo=pytz.utc)
|
||||||
hostname = "some hostname"
|
hostname = "some hostname"
|
||||||
|
|
||||||
notification = messages.get_instance_create_end_sample(instance_id=instance_id, tenant_id=tenant_id,
|
notification = messages.get_instance_create_end_sample(instance_id=instance_id, tenant_id=tenant_id,
|
||||||
@ -173,7 +173,7 @@ class BusAdapterTest(unittest.TestCase):
|
|||||||
def test_on_message_with_volume(self):
|
def test_on_message_with_volume(self):
|
||||||
volume_id = "vol_id"
|
volume_id = "vol_id"
|
||||||
tenant_id = "tenant_id"
|
tenant_id = "tenant_id"
|
||||||
timestamp_datetime = datetime(2014, 02, 14, 16, 30, 10, tzinfo=pytz.utc)
|
timestamp_datetime = datetime(2014, 2, 14, 16, 30, 10, tzinfo=pytz.utc)
|
||||||
volume_type = "SF400"
|
volume_type = "SF400"
|
||||||
volume_size = 100000
|
volume_size = 100000
|
||||||
some_volume = "volume_name"
|
some_volume = "volume_name"
|
||||||
|
@ -61,12 +61,7 @@ class BusAdapterTest(unittest.TestCase):
|
|||||||
self.retry_adapter = RetryAdapter(connection)
|
self.retry_adapter = RetryAdapter(connection)
|
||||||
|
|
||||||
def test_publish_to_retry_queue_happy_path(self):
|
def test_publish_to_retry_queue_happy_path(self):
|
||||||
message = MyObject
|
message = self.build_message()
|
||||||
message.headers = []
|
|
||||||
message.body = 'omnomnom'
|
|
||||||
message.delivery_info = {'routing_key': 42}
|
|
||||||
message.content_type = 'xml/rapture'
|
|
||||||
message.content_encoding = 'iso8859-1'
|
|
||||||
|
|
||||||
self.config_mock.should_receive('rabbitmq_retry').and_return(1)
|
self.config_mock.should_receive('rabbitmq_retry').and_return(1)
|
||||||
self.expect_publish_with(message, 'almanach.retry').once()
|
self.expect_publish_with(message, 'almanach.retry').once()
|
||||||
@ -74,12 +69,7 @@ class BusAdapterTest(unittest.TestCase):
|
|||||||
self.retry_adapter.publish_to_dead_letter(message)
|
self.retry_adapter.publish_to_dead_letter(message)
|
||||||
|
|
||||||
def test_publish_to_retry_queue_retries_if_it_fails(self):
|
def test_publish_to_retry_queue_retries_if_it_fails(self):
|
||||||
message = MyObject
|
message = self.build_message()
|
||||||
message.headers = {}
|
|
||||||
message.body = 'omnomnom'
|
|
||||||
message.delivery_info = {'routing_key': 42}
|
|
||||||
message.content_type = 'xml/rapture'
|
|
||||||
message.content_encoding = 'iso8859-1'
|
|
||||||
|
|
||||||
self.config_mock.should_receive('rabbitmq_retry').and_return(2)
|
self.config_mock.should_receive('rabbitmq_retry').and_return(2)
|
||||||
self.expect_publish_with(message, 'almanach.retry').times(4)\
|
self.expect_publish_with(message, 'almanach.retry').times(4)\
|
||||||
@ -90,13 +80,17 @@ class BusAdapterTest(unittest.TestCase):
|
|||||||
|
|
||||||
self.retry_adapter.publish_to_dead_letter(message)
|
self.retry_adapter.publish_to_dead_letter(message)
|
||||||
|
|
||||||
def test_publish_to_dead_letter_messages_retried_more_than_twice(self):
|
def build_message(self, headers=dict()):
|
||||||
message = MyObject
|
message = MyObject()
|
||||||
message.headers = {'x-death': [0, 1, 2, 3]}
|
message.headers = headers
|
||||||
message.body = 'omnomnom'
|
message.body = b'Now that the worst is behind you, it\'s time we get you back. - Mr. Robot'
|
||||||
message.delivery_info = {'routing_key': ''}
|
message.delivery_info = {'routing_key': 42}
|
||||||
message.content_type = 'xml/rapture'
|
message.content_type = 'xml/rapture'
|
||||||
message.content_encoding = 'iso8859-1'
|
message.content_encoding = 'iso8859-1'
|
||||||
|
return message
|
||||||
|
|
||||||
|
def test_publish_to_dead_letter_messages_retried_more_than_twice(self):
|
||||||
|
message = self.build_message(headers={'x-death': [0, 1, 2, 3]})
|
||||||
|
|
||||||
self.config_mock.should_receive('rabbitmq_retry').and_return(2)
|
self.config_mock.should_receive('rabbitmq_retry').and_return(2)
|
||||||
self.expect_publish_with(message, 'almanach.dead').once()
|
self.expect_publish_with(message, 'almanach.dead').once()
|
||||||
@ -117,4 +111,8 @@ class BusAdapterTest(unittest.TestCase):
|
|||||||
|
|
||||||
|
|
||||||
class MyObject(object):
|
class MyObject(object):
|
||||||
pass
|
headers = None
|
||||||
|
body = None
|
||||||
|
delivery_info = None
|
||||||
|
content_type = None
|
||||||
|
content_encoding = None
|
||||||
|
@ -18,6 +18,7 @@ import flask
|
|||||||
from unittest import TestCase
|
from unittest import TestCase
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from flexmock import flexmock, flexmock_teardown
|
from flexmock import flexmock, flexmock_teardown
|
||||||
|
import oslo_serialization
|
||||||
|
|
||||||
from almanach import config
|
from almanach import config
|
||||||
from almanach.adapters import api_route_v1 as api_route
|
from almanach.adapters import api_route_v1 as api_route
|
||||||
@ -76,7 +77,7 @@ class BaseApi(TestCase):
|
|||||||
headers = {}
|
headers = {}
|
||||||
headers['Accept'] = accept
|
headers['Accept'] = accept
|
||||||
result = getattr(http_client, method)(url, data=json.dumps(data), query_string=query_string, headers=headers)
|
result = getattr(http_client, method)(url, data=json.dumps(data), query_string=query_string, headers=headers)
|
||||||
return_data = json.loads(result.data) \
|
return_data = oslo_serialization.jsonutils.loads(result.data) \
|
||||||
if result.headers.get('Content-Type') == 'application/json' \
|
if result.headers.get('Content-Type') == 'application/json' \
|
||||||
else result.data
|
else result.data
|
||||||
return result.status_code, return_data
|
return result.status_code, return_data
|
||||||
|
@ -31,7 +31,7 @@ class ApiEntityTest(BaseApi):
|
|||||||
.with_args(
|
.with_args(
|
||||||
instance_id="INSTANCE_ID",
|
instance_id="INSTANCE_ID",
|
||||||
start_date=data["start_date"],
|
start_date=data["start_date"],
|
||||||
).and_return(a(instance().with_id('INSTANCE_ID').with_start(2014, 01, 01, 00, 0, 00)))
|
).and_return(a(instance().with_id('INSTANCE_ID').with_start(2014, 1, 1, 0, 0, 0)))
|
||||||
|
|
||||||
code, result = self.api_put(
|
code, result = self.api_put(
|
||||||
'/entity/instance/INSTANCE_ID',
|
'/entity/instance/INSTANCE_ID',
|
||||||
|
@ -54,8 +54,8 @@ class ApiInstanceTest(BaseApi):
|
|||||||
).and_return(a(
|
).and_return(a(
|
||||||
instance().
|
instance().
|
||||||
with_id('INSTANCE_ID').
|
with_id('INSTANCE_ID').
|
||||||
with_start(2016, 03, 01, 00, 0, 00).
|
with_start(2016, 3, 1, 0, 0, 0).
|
||||||
with_end(2016, 03, 03, 00, 0, 00).
|
with_end(2016, 3, 3, 0, 0, 0).
|
||||||
with_flavor(some_new_flavor))
|
with_flavor(some_new_flavor))
|
||||||
)
|
)
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user