Removes XML api from trove
The XML api is no longer a requirement, as dictated by the TC. implements blueprint destroy-xml-api Change-Id: Ib4669155c19562ae1dda75cd30ad1a8f92b6be6a
This commit is contained in:
parent
fba8cabea3
commit
88e599f597
1
tox.ini
1
tox.ini
@ -11,7 +11,6 @@ deps = -r{toxinidir}/requirements.txt
|
||||
-r{toxinidir}/test-requirements.txt
|
||||
setuptools_git>=0.4
|
||||
commands = {envpython} run_tests.py
|
||||
{envpython} run_tests.py --test-config=etc/tests/xml.localhost.test.conf
|
||||
python setup.py testr --slowest
|
||||
|
||||
[tox:jenkins]
|
||||
|
@ -246,7 +246,7 @@ common_opts = [
|
||||
help='Allow insecure logging while '
|
||||
'executing queries through SQLAlchemy.'),
|
||||
cfg.ListOpt('expected_filetype_suffixes',
|
||||
default=['atom', 'json', 'xml'],
|
||||
default=['json'],
|
||||
help='Filetype endings not to be reattached to an id '
|
||||
'by the utils method correct_id_with_req.'),
|
||||
cfg.ListOpt('default_neutron_networks',
|
||||
|
@ -24,11 +24,23 @@ from trove.common import wsgi
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
ExtensionsDescriptor = extensions.ExtensionDescriptor
|
||||
ResourceExtension = extensions.ResourceExtension
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
class ResourceExtension(extensions.ResourceExtension):
|
||||
def __init__(self, collection, controller, parent=None,
|
||||
collection_actions=None, member_actions=None,
|
||||
deserializer=None, serializer=None):
|
||||
super(ResourceExtension, self).__init__(
|
||||
collection, controller,
|
||||
parent=parent,
|
||||
collection_actions=collection_actions,
|
||||
member_actions=member_actions,
|
||||
deserializer=wsgi.RequestDeserializer(),
|
||||
serializer=wsgi.TroveResponseSerializer())
|
||||
|
||||
|
||||
class TroveExtensionMiddleware(extensions.ExtensionMiddleware):
|
||||
|
||||
def __init__(self, application, ext_mgr=None):
|
||||
|
@ -25,8 +25,6 @@ import uuid
|
||||
import webob
|
||||
import webob.dec
|
||||
import webob.exc
|
||||
from lxml import etree
|
||||
from xml.dom import minidom
|
||||
|
||||
from trove.common import context as rd_context
|
||||
from trove.common import exception
|
||||
@ -45,8 +43,6 @@ Router = openstack_wsgi.Router
|
||||
Debug = openstack_wsgi.Debug
|
||||
Middleware = openstack_wsgi.Middleware
|
||||
JSONDictSerializer = openstack_wsgi.JSONDictSerializer
|
||||
XMLDictSerializer = openstack_wsgi.XMLDictSerializer
|
||||
XMLDeserializer = openstack_wsgi.XMLDeserializer
|
||||
RequestDeserializer = openstack_wsgi.RequestDeserializer
|
||||
|
||||
CONF = cfg.CONF
|
||||
@ -57,68 +53,7 @@ eventlet.patcher.monkey_patch(all=False, socket=True)
|
||||
|
||||
LOG = logging.getLogger('trove.common.wsgi')
|
||||
|
||||
XMLNS = 'http://docs.openstack.org/database/api/v1.0'
|
||||
CUSTOM_PLURALS_METADATA = {'databases': '', 'users': ''}
|
||||
CUSTOM_SERIALIZER_METADATA = {
|
||||
'instance': {
|
||||
'status': '',
|
||||
'hostname': '',
|
||||
'id': '',
|
||||
'name': '',
|
||||
'created': '',
|
||||
'updated': '',
|
||||
'host': '',
|
||||
'server_id': '',
|
||||
#mgmt/instance
|
||||
'local_id': '',
|
||||
'task_description': '',
|
||||
'deleted': '',
|
||||
'deleted_at': '',
|
||||
'tenant_id': '',
|
||||
},
|
||||
'volume': {
|
||||
'size': '',
|
||||
'used': '',
|
||||
#mgmt/instance
|
||||
'id': '',
|
||||
},
|
||||
'configuration': {
|
||||
'id': '',
|
||||
'name': '',
|
||||
'description': '',
|
||||
'datastore_version_id': ''
|
||||
},
|
||||
'flavor': {'id': '', 'ram': '', 'name': ''},
|
||||
'link': {'href': '', 'rel': ''},
|
||||
'database': {'name': ''},
|
||||
'user': {'name': '', 'password': '', 'host': ''},
|
||||
'account': {'id': ''},
|
||||
'security_group': {'id': '', 'name': '', 'description': '', 'user': '',
|
||||
'tenant_id': ''},
|
||||
'security_group_rule': {'id': '', 'group_id': '', 'protocol': '',
|
||||
'from_port': '', 'to_port': '', 'cidr': ''},
|
||||
'security_group_instance_association': {'id': '', 'security_group_id': '',
|
||||
'instance_id': ''},
|
||||
# mgmt/host
|
||||
'host': {'instanceCount': '', 'name': '', 'usedRAM': '', 'totalRAM': '',
|
||||
'percentUsed': ''},
|
||||
# mgmt/storage
|
||||
'capacity': {'available': '', 'total': ''},
|
||||
'provision': {'available': '', 'total': '', 'percent': ''},
|
||||
'device': {'used': '', 'name': '', 'type': ''},
|
||||
# mgmt/account
|
||||
'account': {'id': '', 'num_instances': ''},
|
||||
# mgmt/quotas
|
||||
'quotas': {'instances': '', 'volumes': '', 'backups': ''},
|
||||
#mgmt/instance
|
||||
'guest_status': {'state_description': ''},
|
||||
#mgmt/instance/diagnostics
|
||||
'diagnostics': {'vmHwm': '', 'vmPeak': '', 'vmSize': '', 'threads': '',
|
||||
'version': '', 'vmRss': '', 'fdSize': ''},
|
||||
#mgmt/instance/root
|
||||
'root_history': {'enabled': '', 'id': '', 'user': ''},
|
||||
|
||||
}
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
def versioned_urlmap(*args, **kwargs):
|
||||
@ -248,14 +183,12 @@ class Request(openstack_wsgi.Request):
|
||||
|
||||
if len(parts) > 1:
|
||||
format = parts[1]
|
||||
if format in ['json', 'xml']:
|
||||
if format in ['json']:
|
||||
return 'application/{0}'.format(parts[1])
|
||||
|
||||
ctypes = {
|
||||
'application/vnd.openstack.trove+json': "application/json",
|
||||
'application/vnd.openstack.trove+xml': "application/xml",
|
||||
'application/json': "application/json",
|
||||
'application/xml': "application/xml",
|
||||
}
|
||||
bm = self.accept.best_match(ctypes.keys())
|
||||
|
||||
@ -279,13 +212,7 @@ class Request(openstack_wsgi.Request):
|
||||
|
||||
|
||||
class Result(object):
|
||||
"""A result whose serialization is compatible with JSON and XML.
|
||||
|
||||
This class is used by TroveResponseSerializer, which calls the
|
||||
data method to grab a JSON or XML specific dictionary which it then
|
||||
passes on to be serialized.
|
||||
|
||||
"""
|
||||
"""A result whose serialization is compatible with JSON."""
|
||||
|
||||
def __init__(self, data, status=200):
|
||||
self._data = data
|
||||
@ -293,15 +220,10 @@ class Result(object):
|
||||
|
||||
def data(self, serialization_type):
|
||||
"""Return an appropriate serialized type for the body.
|
||||
|
||||
In both cases a dictionary is returned. With JSON it maps directly,
|
||||
while with XML the dictionary is expected to have a single key value
|
||||
which becomes the root element.
|
||||
|
||||
serialization_type is not used presently, but may be
|
||||
in the future, so it stays.
|
||||
"""
|
||||
if (serialization_type == "application/xml" and
|
||||
hasattr(self._data, "data_for_xml")):
|
||||
return self._data.data_for_xml()
|
||||
|
||||
if hasattr(self._data, "data_for_json"):
|
||||
return self._data.data_for_json()
|
||||
return self._data
|
||||
@ -472,12 +394,10 @@ class Controller(object):
|
||||
raise exception.BadRequest(message=error_msg)
|
||||
|
||||
def create_resource(self):
|
||||
serializer = TroveResponseSerializer(
|
||||
body_serializers={'application/xml': TroveXMLDictSerializer()})
|
||||
return Resource(
|
||||
self,
|
||||
TroveRequestDeserializer(),
|
||||
serializer,
|
||||
RequestDeserializer(),
|
||||
TroveResponseSerializer(),
|
||||
self.exception_map)
|
||||
|
||||
def _extract_limits(self, params):
|
||||
@ -485,107 +405,6 @@ class Controller(object):
|
||||
if key in ["limit", "marker"]])
|
||||
|
||||
|
||||
class TroveRequestDeserializer(RequestDeserializer):
|
||||
"""Break up a Request object into more useful pieces."""
|
||||
|
||||
def __init__(self, body_deserializers=None, headers_deserializer=None,
|
||||
supported_content_types=None):
|
||||
super(TroveRequestDeserializer, self).__init__(
|
||||
body_deserializers,
|
||||
headers_deserializer,
|
||||
supported_content_types)
|
||||
|
||||
self.body_deserializers['application/xml'] = TroveXMLDeserializer()
|
||||
|
||||
|
||||
class TroveXMLDeserializer(XMLDeserializer):
|
||||
def __init__(self, metadata=None):
|
||||
"""
|
||||
:param metadata: information needed to deserialize xml into
|
||||
a dictionary.
|
||||
"""
|
||||
metadata = metadata or {}
|
||||
metadata['plurals'] = CUSTOM_PLURALS_METADATA
|
||||
super(TroveXMLDeserializer, self).__init__(metadata)
|
||||
|
||||
def default(self, datastring):
|
||||
# Sanitize the newlines
|
||||
# hub-cap: This feels wrong but minidom keeps the newlines
|
||||
# and spaces as childNodes which is expected behavior.
|
||||
return {'body': self._from_xml(re.sub(r'((?<=>)\s+)*\n*(\s+(?=<))*',
|
||||
'', datastring))}
|
||||
|
||||
def _from_xml_node(self, node, listnames):
|
||||
"""Convert a minidom node to a simple Python type.
|
||||
|
||||
Overridden from openstack deserializer to skip xmlns attributes and
|
||||
remove certain unicode characters
|
||||
|
||||
:param listnames: list of XML node names whose subnodes should
|
||||
be considered list items.
|
||||
|
||||
"""
|
||||
|
||||
if len(node.childNodes) == 1 and node.childNodes[0].nodeType == 3:
|
||||
return node.childNodes[0].nodeValue
|
||||
elif node.nodeName in listnames:
|
||||
return [self._from_xml_node(n, listnames) for n in node.childNodes]
|
||||
else:
|
||||
result = dict()
|
||||
for attr in node.attributes.keys():
|
||||
if attr == 'xmlns':
|
||||
continue
|
||||
result[attr] = node.attributes[attr].nodeValue
|
||||
for child in node.childNodes:
|
||||
if child.nodeType != node.TEXT_NODE:
|
||||
result[child.nodeName] = self._from_xml_node(child,
|
||||
listnames)
|
||||
return result
|
||||
|
||||
|
||||
class TroveXMLDictSerializer(openstack_wsgi.XMLDictSerializer):
|
||||
def __init__(self, metadata=None, xmlns=None):
|
||||
super(TroveXMLDictSerializer, self).__init__(metadata, XMLNS)
|
||||
|
||||
def default(self, data):
|
||||
# We expect data to be a dictionary containing a single key as the XML
|
||||
# root, or two keys, the later being "links."
|
||||
# We expect data to contain a single key which is the XML root,
|
||||
has_links = False
|
||||
root_key = None
|
||||
for key in data:
|
||||
if key == "links":
|
||||
has_links = True
|
||||
elif root_key is None:
|
||||
root_key = key
|
||||
else:
|
||||
msg = "Xml issue: multiple root keys found in dict!: %s" % data
|
||||
LOG.error(msg)
|
||||
raise RuntimeError(msg)
|
||||
if root_key is None:
|
||||
msg = "Missing root key in dict: %s" % data
|
||||
LOG.error(msg)
|
||||
raise RuntimeError(msg)
|
||||
doc = minidom.Document()
|
||||
node = self._to_xml_node(doc, self.metadata, root_key, data[root_key])
|
||||
if has_links:
|
||||
# Create a links element, and mix it into the node element.
|
||||
links_node = self._to_xml_node(doc, self.metadata,
|
||||
'links', data['links'])
|
||||
node.appendChild(links_node)
|
||||
return self.to_xml_string(node)
|
||||
|
||||
def _to_xml_node(self, doc, metadata, nodename, data):
|
||||
metadata['attributes'] = CUSTOM_SERIALIZER_METADATA
|
||||
if hasattr(data, "to_xml"):
|
||||
return data.to_xml()
|
||||
return super(TroveXMLDictSerializer, self)._to_xml_node(
|
||||
doc,
|
||||
metadata,
|
||||
nodename,
|
||||
data)
|
||||
|
||||
|
||||
class TroveResponseSerializer(openstack_wsgi.ResponseSerializer):
|
||||
def serialize_body(self, response, data, content_type, action):
|
||||
"""Overrides body serialization in openstack_wsgi.ResponseSerializer.
|
||||
@ -669,11 +488,8 @@ class Fault(webob.exc.HTTPException):
|
||||
else:
|
||||
fault_data[fault_name]['message'] = self.wrapped_exc.explanation
|
||||
|
||||
# 'code' is an attribute on the fault tag itself
|
||||
metadata = {'attributes': {fault_name: 'code'}}
|
||||
content_type = req.best_match_content_type()
|
||||
serializer = {
|
||||
'application/xml': openstack_wsgi.XMLDictSerializer(metadata),
|
||||
'application/json': openstack_wsgi.JSONDictSerializer(),
|
||||
}[content_type]
|
||||
|
||||
@ -779,11 +595,8 @@ class OverLimitFault(webob.exc.HTTPException):
|
||||
error format.
|
||||
"""
|
||||
content_type = request.best_match_content_type()
|
||||
metadata = {"attributes": {"overLimit": ["code", "retryAfter"]}}
|
||||
|
||||
xml_serializer = XMLDictSerializer(metadata, XMLNS)
|
||||
serializer = {'application/xml': xml_serializer,
|
||||
'application/json': JSONDictSerializer(),
|
||||
serializer = {'application/json': JSONDictSerializer(),
|
||||
}[content_type]
|
||||
|
||||
content = serializer.serialize(self.content)
|
||||
@ -821,108 +634,3 @@ class JSONDictSerializer(DictSerializer):
|
||||
|
||||
def default(self, data):
|
||||
return jsonutils.dumps(data)
|
||||
|
||||
|
||||
class XMLDictSerializer(DictSerializer):
|
||||
def __init__(self, metadata=None, xmlns=None):
|
||||
"""
|
||||
:param metadata: information needed to deserialize xml into
|
||||
a dictionary.
|
||||
:param xmlns: XML namespace to include with serialized xml
|
||||
"""
|
||||
super(XMLDictSerializer, self).__init__()
|
||||
self.metadata = metadata or {}
|
||||
self.xmlns = xmlns
|
||||
|
||||
def default(self, data):
|
||||
# We expect data to contain a single key which is the XML root.
|
||||
root_key = data.keys()[0]
|
||||
doc = minidom.Document()
|
||||
node = self._to_xml_node(doc, self.metadata, root_key, data[root_key])
|
||||
|
||||
return self.to_xml_string(node)
|
||||
|
||||
def to_xml_string(self, node, has_atom=False):
|
||||
self._add_xmlns(node, has_atom)
|
||||
return node.toxml('UTF-8')
|
||||
|
||||
#NOTE (ameade): the has_atom should be removed after all of the
|
||||
# xml serializers and view builders have been updated to the current
|
||||
# spec that required all responses include the xmlns:atom, the has_atom
|
||||
# flag is to prevent current tests from breaking
|
||||
def _add_xmlns(self, node, has_atom=False):
|
||||
if self.xmlns is not None:
|
||||
node.setAttribute('xmlns', self.xmlns)
|
||||
if has_atom:
|
||||
node.setAttribute('xmlns:atom', "http://www.w3.org/2005/Atom")
|
||||
|
||||
def _to_xml_node(self, doc, metadata, nodename, data):
|
||||
"""Recursive method to convert data members to XML nodes."""
|
||||
result = doc.createElement(nodename)
|
||||
|
||||
# Set the xml namespace if one is specified
|
||||
# TODO(justinsb): We could also use prefixes on the keys
|
||||
xmlns = metadata.get('xmlns', None)
|
||||
if xmlns:
|
||||
result.setAttribute('xmlns', xmlns)
|
||||
|
||||
#TODO(bcwaldon): accomplish this without a type-check
|
||||
if isinstance(data, list):
|
||||
collections = metadata.get('list_collections', {})
|
||||
if nodename in collections:
|
||||
metadata = collections[nodename]
|
||||
for item in data:
|
||||
node = doc.createElement(metadata['item_name'])
|
||||
node.setAttribute(metadata['item_key'], str(item))
|
||||
result.appendChild(node)
|
||||
return result
|
||||
singular = metadata.get('plurals', {}).get(nodename, None)
|
||||
if singular is None:
|
||||
if nodename.endswith('s'):
|
||||
singular = nodename[:-1]
|
||||
else:
|
||||
singular = 'item'
|
||||
for item in data:
|
||||
node = self._to_xml_node(doc, metadata, singular, item)
|
||||
result.appendChild(node)
|
||||
#TODO(bcwaldon): accomplish this without a type-check
|
||||
elif isinstance(data, dict):
|
||||
collections = metadata.get('dict_collections', {})
|
||||
if nodename in collections:
|
||||
metadata = collections[nodename]
|
||||
for k, v in data.items():
|
||||
node = doc.createElement(metadata['item_name'])
|
||||
node.setAttribute(metadata['item_key'], str(k))
|
||||
text = doc.createTextNode(str(v))
|
||||
node.appendChild(text)
|
||||
result.appendChild(node)
|
||||
return result
|
||||
attrs = metadata.get('attributes', {}).get(nodename, {})
|
||||
for k, v in data.items():
|
||||
if k in attrs:
|
||||
result.setAttribute(k, str(v))
|
||||
else:
|
||||
if k == "deleted":
|
||||
v = str(bool(v))
|
||||
node = self._to_xml_node(doc, metadata, k, v)
|
||||
result.appendChild(node)
|
||||
else:
|
||||
# Type is atom
|
||||
node = doc.createTextNode(str(data))
|
||||
result.appendChild(node)
|
||||
return result
|
||||
|
||||
def _create_link_nodes(self, xml_doc, links):
|
||||
link_nodes = []
|
||||
for link in links:
|
||||
link_node = xml_doc.createElement('atom:link')
|
||||
link_node.setAttribute('rel', link['rel'])
|
||||
link_node.setAttribute('href', link['href'])
|
||||
if 'type' in link:
|
||||
link_node.setAttribute('type', link['type'])
|
||||
link_nodes.append(link_node)
|
||||
return link_nodes
|
||||
|
||||
def _to_xml(self, root):
|
||||
"""Convert the xml object to an xml string."""
|
||||
return etree.tostring(root, encoding='UTF-8', xml_declaration=True)
|
||||
|
@ -16,7 +16,6 @@
|
||||
from trove.openstack.common import log as logging
|
||||
|
||||
from trove.common import extensions
|
||||
from trove.common import wsgi
|
||||
from trove.extensions.account import service
|
||||
|
||||
|
||||
@ -42,14 +41,9 @@ class Account(extensions.ExtensionsDescriptor):
|
||||
|
||||
def get_resources(self):
|
||||
resources = []
|
||||
serializer = wsgi.TroveResponseSerializer(
|
||||
body_serializers={'application/xml':
|
||||
wsgi.TroveXMLDictSerializer()})
|
||||
resource = extensions.ResourceExtension(
|
||||
'{tenant_id}/mgmt/accounts',
|
||||
service.AccountController(),
|
||||
deserializer=wsgi.RequestDeserializer(),
|
||||
serializer=serializer)
|
||||
service.AccountController())
|
||||
resources.append(resource)
|
||||
|
||||
return resources
|
||||
|
@ -16,7 +16,6 @@
|
||||
from trove.openstack.common import log as logging
|
||||
|
||||
from trove.common import extensions
|
||||
from trove.common import wsgi
|
||||
from trove.extensions.mgmt.instances.service import MgmtInstanceController
|
||||
from trove.extensions.mgmt.host.service import HostController
|
||||
from trove.extensions.mgmt.quota.service import QuotaController
|
||||
@ -46,14 +45,9 @@ class Mgmt(extensions.ExtensionsDescriptor):
|
||||
|
||||
def get_resources(self):
|
||||
resources = []
|
||||
serializer = wsgi.TroveResponseSerializer(
|
||||
body_serializers={'application/xml':
|
||||
wsgi.TroveXMLDictSerializer()})
|
||||
instances = extensions.ResourceExtension(
|
||||
'{tenant_id}/mgmt/instances',
|
||||
MgmtInstanceController(),
|
||||
deserializer=wsgi.TroveRequestDeserializer(),
|
||||
serializer=serializer,
|
||||
member_actions={'root': 'GET',
|
||||
'diagnostics': 'GET',
|
||||
'hwinfo': 'GET',
|
||||
@ -63,24 +57,18 @@ class Mgmt(extensions.ExtensionsDescriptor):
|
||||
hosts = extensions.ResourceExtension(
|
||||
'{tenant_id}/mgmt/hosts',
|
||||
HostController(),
|
||||
deserializer=wsgi.RequestDeserializer(),
|
||||
serializer=serializer,
|
||||
member_actions={})
|
||||
resources.append(hosts)
|
||||
|
||||
quota = extensions.ResourceExtension(
|
||||
'{tenant_id}/mgmt/quotas',
|
||||
QuotaController(),
|
||||
deserializer=wsgi.RequestDeserializer(),
|
||||
serializer=serializer,
|
||||
member_actions={})
|
||||
resources.append(quota)
|
||||
|
||||
storage = extensions.ResourceExtension(
|
||||
'{tenant_id}/mgmt/storage',
|
||||
StorageController(),
|
||||
deserializer=wsgi.RequestDeserializer(),
|
||||
serializer=serializer,
|
||||
member_actions={})
|
||||
resources.append(storage)
|
||||
|
||||
@ -89,8 +77,6 @@ class Mgmt(extensions.ExtensionsDescriptor):
|
||||
hostservice.HostInstanceController(),
|
||||
parent={'member_name': 'host',
|
||||
'collection_name': '{tenant_id}/mgmt/hosts'},
|
||||
deserializer=wsgi.RequestDeserializer(),
|
||||
serializer=serializer,
|
||||
collection_actions={'action': 'POST'})
|
||||
resources.append(host_instances)
|
||||
|
||||
|
@ -16,7 +16,6 @@
|
||||
from trove.openstack.common import log as logging
|
||||
|
||||
from trove.common import extensions
|
||||
from trove.common import wsgi
|
||||
from trove.extensions.mysql import service
|
||||
|
||||
|
||||
@ -42,17 +41,12 @@ class Mysql(extensions.ExtensionsDescriptor):
|
||||
|
||||
def get_resources(self):
|
||||
resources = []
|
||||
serializer = wsgi.TroveResponseSerializer(
|
||||
body_serializers={'application/xml':
|
||||
wsgi.TroveXMLDictSerializer()})
|
||||
|
||||
resource = extensions.ResourceExtension(
|
||||
'databases',
|
||||
service.SchemaController(),
|
||||
parent={'member_name': 'instance',
|
||||
'collection_name': '{tenant_id}/instances'},
|
||||
deserializer=wsgi.TroveRequestDeserializer(),
|
||||
serializer=serializer)
|
||||
'collection_name': '{tenant_id}/instances'})
|
||||
resources.append(resource)
|
||||
|
||||
resource = extensions.ResourceExtension(
|
||||
@ -60,9 +54,6 @@ class Mysql(extensions.ExtensionsDescriptor):
|
||||
service.UserController(),
|
||||
parent={'member_name': 'instance',
|
||||
'collection_name': '{tenant_id}/instances'},
|
||||
# deserializer=extensions.ExtensionsXMLSerializer()
|
||||
deserializer=wsgi.TroveRequestDeserializer(),
|
||||
serializer=serializer,
|
||||
member_actions={'update': 'PUT'},
|
||||
collection_actions={'update_all': 'PUT'})
|
||||
resources.append(resource)
|
||||
@ -73,8 +64,6 @@ class Mysql(extensions.ExtensionsDescriptor):
|
||||
service.UserAccessController(),
|
||||
parent={'member_name': 'user',
|
||||
'collection_name': collection_url},
|
||||
deserializer=wsgi.TroveRequestDeserializer(),
|
||||
serializer=serializer,
|
||||
collection_actions={'update': 'PUT'})
|
||||
resources.append(resource)
|
||||
|
||||
@ -82,9 +71,7 @@ class Mysql(extensions.ExtensionsDescriptor):
|
||||
'root',
|
||||
service.RootController(),
|
||||
parent={'member_name': 'instance',
|
||||
'collection_name': '{tenant_id}/instances'},
|
||||
deserializer=wsgi.TroveRequestDeserializer(),
|
||||
serializer=serializer)
|
||||
'collection_name': '{tenant_id}/instances'})
|
||||
resources.append(resource)
|
||||
|
||||
return resources
|
||||
|
@ -17,7 +17,6 @@
|
||||
from trove.openstack.common import log as logging
|
||||
|
||||
from trove.common import extensions
|
||||
from trove.common import wsgi
|
||||
from trove.common import cfg
|
||||
from trove.extensions.security_group import service
|
||||
|
||||
@ -49,23 +48,16 @@ security groups and manage security group rules."
|
||||
|
||||
def get_resources(self):
|
||||
resources = []
|
||||
serializer = wsgi.TroveResponseSerializer(
|
||||
body_serializers={'application/xml':
|
||||
wsgi.TroveXMLDictSerializer()})
|
||||
|
||||
if CONF.trove_security_groups_support:
|
||||
security_groups = extensions.ResourceExtension(
|
||||
'{tenant_id}/security-groups',
|
||||
service.SecurityGroupController(),
|
||||
deserializer=wsgi.TroveRequestDeserializer(),
|
||||
serializer=serializer)
|
||||
service.SecurityGroupController())
|
||||
resources.append(security_groups)
|
||||
|
||||
security_group_rules = extensions.ResourceExtension(
|
||||
'{tenant_id}/security-group-rules',
|
||||
service.SecurityGroupRuleController(),
|
||||
deserializer=wsgi.TroveRequestDeserializer(),
|
||||
serializer=serializer)
|
||||
service.SecurityGroupRuleController())
|
||||
resources.append(security_group_rules)
|
||||
|
||||
return resources
|
||||
|
@ -19,7 +19,6 @@ from proboscis.asserts import assert_true
|
||||
from proboscis import test
|
||||
from proboscis import SkipTest
|
||||
from proboscis.decorators import time_out
|
||||
import troveclient.compat
|
||||
from trove.common.utils import poll_until
|
||||
from trove.common.utils import generate_uuid
|
||||
from trove.tests.util import create_dbaas_client
|
||||
@ -63,15 +62,13 @@ class CreateBackups(object):
|
||||
except exceptions.BadRequest as e:
|
||||
resp, body = instance_info.dbaas.client.last_response
|
||||
assert_equal(resp.status, 400)
|
||||
if not isinstance(instance_info.dbaas.client,
|
||||
troveclient.compat.xml.TroveXmlClient):
|
||||
assert_equal(e.message,
|
||||
"Validation error: "
|
||||
"backup['instance'] u'%s' does not match "
|
||||
"'^([0-9a-fA-F]){8}-([0-9a-fA-F]){4}-"
|
||||
"([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-"
|
||||
"([0-9a-fA-F]){12}$'" %
|
||||
invalid_inst_id)
|
||||
assert_equal(e.message,
|
||||
"Validation error: "
|
||||
"backup['instance'] u'%s' does not match "
|
||||
"'^([0-9a-fA-F]){8}-([0-9a-fA-F]){4}-"
|
||||
"([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-"
|
||||
"([0-9a-fA-F]){12}$'" %
|
||||
invalid_inst_id)
|
||||
|
||||
@test
|
||||
def test_backup_create_instance_not_found(self):
|
||||
|
@ -17,13 +17,11 @@
|
||||
#
|
||||
|
||||
from proboscis import test
|
||||
from proboscis import asserts
|
||||
from proboscis import SkipTest
|
||||
from functools import wraps
|
||||
|
||||
from troveclient.compat.client import TroveHTTPClient
|
||||
from trove.tests.api.versions import Versions
|
||||
from troveclient.compat import exceptions
|
||||
|
||||
|
||||
@test(groups=['dbaas.api.headers'])
|
||||
@ -52,9 +50,5 @@ def must_work_with_blank_accept_headers():
|
||||
# run versions to make sure the API still returns JSON even though the
|
||||
# header type is blank
|
||||
versions.test_list_versions_index()
|
||||
# now change headers to XML to make sure the test fails
|
||||
morph_content_type_to('application/xml')
|
||||
asserts.assert_raises(exceptions.ResponseFormatError,
|
||||
versions.test_list_versions_index)
|
||||
finally:
|
||||
client.client.morph_request = original_morph_request
|
||||
|
@ -63,7 +63,6 @@ from trove.common.utils import poll_until
|
||||
from trove.tests.util.check import AttrCheck
|
||||
from trove.tests.util.check import TypeCheck
|
||||
from trove.tests.util import test_config
|
||||
from trove.tests.util import skip_if_xml
|
||||
|
||||
FAKE = test_config.values['fake_mode']
|
||||
|
||||
@ -359,8 +358,6 @@ class CreateInstanceFail(object):
|
||||
|
||||
@test
|
||||
def test_create_with_bad_nics(self):
|
||||
# FIXME: (steve-leon) Remove this once xml is yanked out
|
||||
skip_if_xml()
|
||||
instance_name = "instance-failure-with-bad-nics"
|
||||
if VOLUME_SUPPORT:
|
||||
volume = {'size': 1}
|
||||
|
@ -26,7 +26,6 @@ from trove.tests.api.instances import CreateInstance
|
||||
from trove.tests.config import CONFIG
|
||||
from trove.tests.util import create_dbaas_client
|
||||
from trove.tests.util.users import Requirements
|
||||
from trove.tests.util import skip_if_xml
|
||||
from trove.tests import DBAAS_API
|
||||
from trove.tests import PRE_INSTANCES
|
||||
from trove.tests import INSTANCES
|
||||
@ -186,8 +185,6 @@ class HostsMgmtCommands(object):
|
||||
|
||||
@test
|
||||
def test_update_hosts(self):
|
||||
# FIXME: (rmyers) Update hosts is broken in xml
|
||||
skip_if_xml()
|
||||
ids = self._get_ids()
|
||||
assert_not_equal(ids, [], "No active instances found")
|
||||
before_versions = {}
|
||||
|
@ -124,12 +124,6 @@ class RestartTaskStatusTests(MgmtInstanceBase):
|
||||
out = resp.data("application/json")
|
||||
assert_equal(out, None)
|
||||
|
||||
@test
|
||||
def mgmt_restart_task_returns_xml(self):
|
||||
resp = self.reset_task_status()
|
||||
out = resp.data("application/xml")
|
||||
assert_equal(out, None)
|
||||
|
||||
@test
|
||||
def mgmt_restart_task_changes_status_to_none(self):
|
||||
self._change_task_status_to(InstanceTasks.BUILDING)
|
||||
|
@ -21,14 +21,12 @@ from proboscis import test
|
||||
from proboscis import asserts
|
||||
from proboscis import after_class
|
||||
from proboscis import before_class
|
||||
import troveclient.compat
|
||||
from trove.tests.config import CONFIG
|
||||
from trove.tests.api.instances import instance_info
|
||||
from trove.tests.api.instances import VOLUME_SUPPORT
|
||||
|
||||
from trove.tests.util.users import Requirements
|
||||
from trove.tests.util import create_dbaas_client
|
||||
import trove.tests.util as tests_utils
|
||||
from trove.common.utils import poll_until
|
||||
|
||||
|
||||
@ -66,21 +64,18 @@ class MalformedJson(object):
|
||||
asserts.assert_equal(httpCode, 400,
|
||||
"Create instance failed with code %s,"
|
||||
" exception %s" % (httpCode, e))
|
||||
if not isinstance(self.dbaas.client,
|
||||
troveclient.compat.xml.TroveXmlClient):
|
||||
databases = "u'foo'"
|
||||
users = "u'bar'"
|
||||
asserts.assert_equal(e.message,
|
||||
"Validation error: "
|
||||
"instance['databases'] %s is not of type"
|
||||
" 'array'; instance['users'] %s is not of"
|
||||
" type 'array'; instance['volume'] 3 is "
|
||||
"not of type 'object'"
|
||||
% (databases, users))
|
||||
databases = "u'foo'"
|
||||
users = "u'bar'"
|
||||
asserts.assert_equal(e.message,
|
||||
"Validation error: "
|
||||
"instance['databases'] %s is not of type"
|
||||
" 'array'; instance['users'] %s is not of"
|
||||
" type 'array'; instance['volume'] 3 is "
|
||||
"not of type 'object'"
|
||||
% (databases, users))
|
||||
|
||||
@test
|
||||
def test_bad_database_data(self):
|
||||
tests_utils.skip_if_xml()
|
||||
_bad_db_data = "{foo}"
|
||||
try:
|
||||
self.dbaas.databases.create(self.instance.id, _bad_db_data)
|
||||
@ -90,9 +85,7 @@ class MalformedJson(object):
|
||||
asserts.assert_equal(httpCode, 400,
|
||||
"Create database failed with code %s, "
|
||||
"exception %s" % (httpCode, e))
|
||||
if not isinstance(self.dbaas.client,
|
||||
troveclient.compat.xml.TroveXmlClient):
|
||||
_bad_db_data = "u'{foo}'"
|
||||
_bad_db_data = "u'{foo}'"
|
||||
asserts.assert_equal(e.message,
|
||||
"Validation error: "
|
||||
"databases %s is not of type 'array'" %
|
||||
@ -194,16 +187,14 @@ class MalformedJson(object):
|
||||
asserts.assert_equal(httpCode, 400,
|
||||
"Change usr/passwd failed with code %s, "
|
||||
"exception %s" % (httpCode, e))
|
||||
if not isinstance(self.dbaas.client,
|
||||
troveclient.compat.xml.TroveXmlClient):
|
||||
password = "u''"
|
||||
asserts.assert_equal(e.message,
|
||||
"Validation error: users[0] 'password' is"
|
||||
" a required property; "
|
||||
"users[0]['name'] %s is too short; "
|
||||
"users[0]['name'] %s does not match "
|
||||
"'^.*[0-9a-zA-Z]+.*$'"
|
||||
% (password, password))
|
||||
password = "u''"
|
||||
asserts.assert_equal(e.message,
|
||||
"Validation error: users[0] 'password' is"
|
||||
" a required property; "
|
||||
"users[0]['name'] %s is too short; "
|
||||
"users[0]['name'] %s does not match "
|
||||
"'^.*[0-9a-zA-Z]+.*$'"
|
||||
% (password, password))
|
||||
|
||||
@test
|
||||
def test_bad_grant_user_access(self):
|
||||
@ -250,7 +241,6 @@ class MalformedJson(object):
|
||||
|
||||
@test
|
||||
def test_bad_body_flavorid_create_instance(self):
|
||||
tests_utils.skip_if_xml()
|
||||
|
||||
flavorId = ["?"]
|
||||
try:
|
||||
@ -263,23 +253,19 @@ class MalformedJson(object):
|
||||
asserts.assert_equal(httpCode, 400,
|
||||
"Create instance failed with code %s, "
|
||||
"exception %s" % (httpCode, e))
|
||||
|
||||
if not isinstance(self.dbaas.client,
|
||||
troveclient.compat.xml.TroveXmlClient):
|
||||
flavorId = [u'?']
|
||||
asserts.assert_equal(e.message,
|
||||
"Validation error: "
|
||||
"instance['flavorRef'] %s is not valid "
|
||||
"under any of the given schemas; %s is "
|
||||
"not of type 'string'; %s is not of type"
|
||||
" 'string'; %s is not of type 'integer'; "
|
||||
"instance['volume'] 2 is not of"
|
||||
" type 'object'" %
|
||||
(flavorId, flavorId, flavorId, flavorId))
|
||||
flavorId = [u'?']
|
||||
asserts.assert_equal(e.message,
|
||||
"Validation error: "
|
||||
"instance['flavorRef'] %s is not valid "
|
||||
"under any of the given schemas; %s is "
|
||||
"not of type 'string'; %s is not of type"
|
||||
" 'string'; %s is not of type 'integer'; "
|
||||
"instance['volume'] 2 is not of"
|
||||
" type 'object'" %
|
||||
(flavorId, flavorId, flavorId, flavorId))
|
||||
|
||||
@test
|
||||
def test_bad_body_datastore_create_instance(self):
|
||||
tests_utils.skip_if_xml()
|
||||
|
||||
datastore = "*"
|
||||
datastore_version = "*"
|
||||
@ -294,17 +280,14 @@ class MalformedJson(object):
|
||||
asserts.assert_equal(httpCode, 400,
|
||||
"Create instance failed with code %s, "
|
||||
"exception %s" % (httpCode, e))
|
||||
|
||||
if not isinstance(self.dbaas.client,
|
||||
troveclient.compat.xml.TroveXmlClient):
|
||||
asserts.assert_equal(e.message,
|
||||
"Validation error: "
|
||||
"instance['datastore']['type']"
|
||||
" u'%s' does not match"
|
||||
" '^.*[0-9a-zA-Z]+.*$'; "
|
||||
"instance['datastore']['version'] u'%s' "
|
||||
"does not match '^.*[0-9a-zA-Z]+.*$'" %
|
||||
(datastore, datastore_version))
|
||||
asserts.assert_equal(e.message,
|
||||
"Validation error: "
|
||||
"instance['datastore']['type']"
|
||||
" u'%s' does not match"
|
||||
" '^.*[0-9a-zA-Z]+.*$'; "
|
||||
"instance['datastore']['version'] u'%s' "
|
||||
"does not match '^.*[0-9a-zA-Z]+.*$'" %
|
||||
(datastore, datastore_version))
|
||||
|
||||
@test
|
||||
def test_bad_body_volsize_create_instance(self):
|
||||
@ -319,10 +302,8 @@ class MalformedJson(object):
|
||||
asserts.assert_equal(httpCode, 400,
|
||||
"Create instance failed with code %s, "
|
||||
"exception %s" % (httpCode, e))
|
||||
if not isinstance(self.dbaas.client,
|
||||
troveclient.compat.xml.TroveXmlClient):
|
||||
volsize = "u'h3ll0'"
|
||||
asserts.assert_equal(e.message,
|
||||
"Validation error: "
|
||||
"instance['volume'] %s is not of "
|
||||
"type 'object'" % volsize)
|
||||
volsize = "u'h3ll0'"
|
||||
asserts.assert_equal(e.message,
|
||||
"Validation error: "
|
||||
"instance['volume'] %s is not of "
|
||||
"type 'object'" % volsize)
|
||||
|
@ -19,7 +19,6 @@ Tests dealing with HTTP rate-limiting.
|
||||
|
||||
import httplib
|
||||
import StringIO
|
||||
from xml.dom import minidom
|
||||
from trove.quota.models import Quota
|
||||
import testtools
|
||||
import webob
|
||||
@ -231,30 +230,6 @@ class LimitMiddlewareTest(BaseLimitTestSuite):
|
||||
retryAfter = body["overLimit"]["retryAfter"]
|
||||
self.assertEqual(retryAfter, "60")
|
||||
|
||||
def test_limited_request_xml(self):
|
||||
# Test a rate-limited (413) response as XML.
|
||||
request = webob.Request.blank("/")
|
||||
response = request.get_response(self.app)
|
||||
self.assertEqual(200, response.status_int)
|
||||
|
||||
request = webob.Request.blank("/")
|
||||
request.accept = "application/xml"
|
||||
response = request.get_response(self.app)
|
||||
self.assertEqual(response.status_int, 413)
|
||||
|
||||
root = minidom.parseString(response.body).childNodes[0]
|
||||
expected = "Only 1 GET request(s) can be made to * every minute."
|
||||
|
||||
self.assertNotEqual(root.attributes.getNamedItem("retryAfter"), None)
|
||||
retryAfter = root.attributes.getNamedItem("retryAfter").value
|
||||
self.assertEqual(retryAfter, "60")
|
||||
|
||||
details = root.getElementsByTagName("details")
|
||||
self.assertEqual(details.length, 1)
|
||||
|
||||
value = details.item(0).firstChild.data.strip()
|
||||
self.assertEqual(value, expected)
|
||||
|
||||
|
||||
class LimitTest(BaseLimitTestSuite):
|
||||
"""
|
||||
|
@ -60,35 +60,6 @@ class VersionsControllerTest(testtools.TestCase):
|
||||
self.assertEqual('2012-08-01T00:00:00Z', json_data['updated'],
|
||||
'Version updated value is incorrect')
|
||||
|
||||
def test_index_xml(self):
|
||||
request = Mock()
|
||||
result = self.controller.index(request)
|
||||
self.assertIsNotNone(result, 'Result was None')
|
||||
|
||||
id = VERSIONS['1.0']['id']
|
||||
status = VERSIONS['1.0']['status']
|
||||
base_url = BASE_URL
|
||||
updated = VERSIONS['1.0']['updated']
|
||||
version = Version(id, status, base_url, updated)
|
||||
|
||||
result._data = Mock()
|
||||
result._data.data_for_xml = lambda: {'versions': [version]}
|
||||
|
||||
xml_data = result.data("application/xml")
|
||||
self.assertIsNotNone(xml_data, 'Result xml_data was None')
|
||||
|
||||
versions = xml_data['versions']
|
||||
self.assertIsNotNone(versions, "Versions was None")
|
||||
self.assertTrue(len(versions) == 1, "Versions length was != 1")
|
||||
v = versions[0]
|
||||
|
||||
self.assertEqual('v1.0', v.id,
|
||||
'Version id is incorrect')
|
||||
self.assertEqual('CURRENT', v.status,
|
||||
'Version status is incorrect')
|
||||
self.assertEqual('2012-08-01T00:00:00Z', v.updated,
|
||||
'Version updated value is incorrect')
|
||||
|
||||
def test_show_json(self):
|
||||
request = Mock()
|
||||
request.url_version = '1.0'
|
||||
@ -107,23 +78,6 @@ class VersionsControllerTest(testtools.TestCase):
|
||||
"Version updated was not '2012-08-01T00:00:00Z'")
|
||||
self.assertEqual('v1.0', version['id'], "Version id was not 'v1.0'")
|
||||
|
||||
def test_show_xml(self):
|
||||
request = Mock()
|
||||
request.url_version = '1.0'
|
||||
result = self.controller.show(request)
|
||||
self.assertIsNotNone(result,
|
||||
'Result was None')
|
||||
xml_data = result.data("application/xml")
|
||||
self.assertIsNotNone(xml_data, "XML data was None")
|
||||
|
||||
version = xml_data.get('version', None)
|
||||
self.assertIsNotNone(version, "Version was None")
|
||||
self.assertEqual('CURRENT', version.status,
|
||||
"Version status was not 'CURRENT'")
|
||||
self.assertEqual('2012-08-01T00:00:00Z', version.updated,
|
||||
"Version updated was not '2012-08-01T00:00:00Z'")
|
||||
self.assertEqual('v1.0', version.id, "Version id was not 'v1.0'")
|
||||
|
||||
|
||||
class BaseVersionTestCase(testtools.TestCase):
|
||||
|
||||
@ -158,28 +112,6 @@ class BaseVersionTestCase(testtools.TestCase):
|
||||
self.assertEqual('http://localhost/v1.0/', url,
|
||||
"Base Version url is incorrect")
|
||||
|
||||
def test_to_xml(self):
|
||||
xml = self.base_version.to_xml()
|
||||
self.assertIsNotNone(xml, 'XML was None')
|
||||
|
||||
self.assertEqual('v1.0', xml.getAttribute('id'),
|
||||
"XML Version is not v1.0")
|
||||
self.assertEqual('CURRENT', xml.getAttribute('status'),
|
||||
"XML status was not 'CURRENT'")
|
||||
self.assertEqual('2012-08-01T00:00:00Z', xml.getAttribute('updated'),
|
||||
"XML updated value was not 2012-08-01T00:00:00Z")
|
||||
|
||||
links = xml.getElementsByTagName("link")
|
||||
self.assertIsNotNone(links, "XML links element was None")
|
||||
|
||||
link = links[0]
|
||||
self.assertIsNotNone(link, "XML link element was None")
|
||||
|
||||
self.assertEqual('http://localhost/v1.0/', link.getAttribute("href"),
|
||||
"XML link href is not 'http://localhost/v1.0/'")
|
||||
self.assertEqual('self', link.getAttribute("rel"),
|
||||
"XML link rel is not self")
|
||||
|
||||
|
||||
class VersionTestCase(testtools.TestCase):
|
||||
|
||||
@ -244,19 +176,6 @@ class VersionDataViewTestCase(testtools.TestCase):
|
||||
self.assertEqual('v1.0', data['id'],
|
||||
"Data status was not 'v1.0'")
|
||||
|
||||
def test_data_for_xml(self):
|
||||
xml_data = self.version_data_view.data_for_xml()
|
||||
self.assertIsNotNone(xml_data, "XML data is None")
|
||||
self.assertTrue(type(xml_data) is dict,
|
||||
"XML version data is not a dict")
|
||||
self.assertIsNotNone(xml_data.get('version', None),
|
||||
"Dict xml_data has no key 'version'")
|
||||
version = xml_data['version']
|
||||
self.assertIsNotNone(version, "Version was None")
|
||||
|
||||
self.assertEqual(self.version.id, version.id,
|
||||
"Version ids are not equal")
|
||||
|
||||
|
||||
class VersionsDataViewTestCase(testtools.TestCase):
|
||||
|
||||
@ -300,18 +219,6 @@ class VersionsDataViewTestCase(testtools.TestCase):
|
||||
self.assertEqual(d1['id'], d2['id'],
|
||||
"Version ids are not equal")
|
||||
|
||||
def test_data_for_xml(self):
|
||||
xml_data = self.versions_data_view.data_for_xml()
|
||||
self.assertIsNotNone(xml_data, "XML data was None")
|
||||
self.assertTrue(type(xml_data) is dict, "XML data was not a dict")
|
||||
versions = xml_data.get('versions', None)
|
||||
self.assertIsNotNone(versions, "Versions is None")
|
||||
self.assertTrue(type(versions) is list, "Versions is not a list")
|
||||
self.assertTrue(len(versions) == 1, "Versions length != 1")
|
||||
|
||||
v = versions[0]
|
||||
self.assertEqual(v.id, self.version.id)
|
||||
|
||||
|
||||
class VersionAPITestCase(testtools.TestCase):
|
||||
|
||||
|
@ -183,12 +183,6 @@ class TestInstanceController(TestCase):
|
||||
validator = jsonschema.Draft4Validator(schema)
|
||||
self.assertTrue(validator.is_valid(body))
|
||||
|
||||
def test_validate_resize_instance_int_xml(self):
|
||||
body = {"resize": {"flavorRef": "2"}}
|
||||
schema = self.controller.get_schema('action', body)
|
||||
validator = jsonschema.Draft4Validator(schema)
|
||||
self.assertTrue(validator.is_valid(body))
|
||||
|
||||
def test_validate_resize_instance_empty_url(self):
|
||||
body = {"resize": {"flavorRef": ""}}
|
||||
schema = self.controller.get_schema('action', body)
|
||||
|
@ -18,8 +18,6 @@
|
||||
|
||||
import pprint
|
||||
|
||||
from lxml import etree
|
||||
|
||||
|
||||
class DictKeysMismatch(object):
|
||||
def __init__(self, d1only, d2only):
|
||||
@ -205,244 +203,3 @@ class FunctionCallMatcher(object):
|
||||
def match(self):
|
||||
dict_list_matcher = DictListMatches(self.expected_func_calls)
|
||||
return dict_list_matcher.match(self.actual_func_calls)
|
||||
|
||||
|
||||
class XMLMismatch(object):
|
||||
"""Superclass for XML mismatch."""
|
||||
|
||||
def __init__(self, state):
|
||||
self.path = str(state)
|
||||
self.expected = state.expected
|
||||
self.actual = state.actual
|
||||
|
||||
def describe(self):
|
||||
return "%(path)s: XML does not match" % self.__dict__
|
||||
|
||||
def get_details(self):
|
||||
return {
|
||||
'expected': self.expected,
|
||||
'actual': self.actual,
|
||||
}
|
||||
|
||||
|
||||
class XMLTagMismatch(XMLMismatch):
|
||||
"""XML tags don't match."""
|
||||
|
||||
def __init__(self, state, idx, expected_tag, actual_tag):
|
||||
super(XMLTagMismatch, self).__init__(state)
|
||||
self.idx = idx
|
||||
self.expected_tag = expected_tag
|
||||
self.actual_tag = actual_tag
|
||||
|
||||
def describe(self):
|
||||
return ("%(path)s: XML tag mismatch at index %(idx)d: "
|
||||
"expected tag <%(expected_tag)s>; "
|
||||
"actual tag <%(actual_tag)s>" % self.__dict__)
|
||||
|
||||
|
||||
class XMLAttrKeysMismatch(XMLMismatch):
|
||||
"""XML attribute keys don't match."""
|
||||
|
||||
def __init__(self, state, expected_only, actual_only):
|
||||
super(XMLAttrKeysMismatch, self).__init__(state)
|
||||
self.expected_only = ', '.join(sorted(expected_only))
|
||||
self.actual_only = ', '.join(sorted(actual_only))
|
||||
|
||||
def describe(self):
|
||||
return ("%(path)s: XML attributes mismatch: "
|
||||
"keys only in expected: %(expected_only)s; "
|
||||
"keys only in actual: %(actual_only)s" % self.__dict__)
|
||||
|
||||
|
||||
class XMLAttrValueMismatch(XMLMismatch):
|
||||
"""XML attribute values don't match."""
|
||||
|
||||
def __init__(self, state, key, expected_value, actual_value):
|
||||
super(XMLAttrValueMismatch, self).__init__(state)
|
||||
self.key = key
|
||||
self.expected_value = expected_value
|
||||
self.actual_value = actual_value
|
||||
|
||||
def describe(self):
|
||||
return ("%(path)s: XML attribute value mismatch: "
|
||||
"expected value of attribute %(key)s: %(expected_value)r; "
|
||||
"actual value: %(actual_value)r" % self.__dict__)
|
||||
|
||||
|
||||
class XMLTextValueMismatch(XMLMismatch):
|
||||
"""XML text values don't match."""
|
||||
|
||||
def __init__(self, state, expected_text, actual_text):
|
||||
super(XMLTextValueMismatch, self).__init__(state)
|
||||
self.expected_text = expected_text
|
||||
self.actual_text = actual_text
|
||||
|
||||
def describe(self):
|
||||
return ("%(path)s: XML text value mismatch: "
|
||||
"expected text value: %(expected_text)r; "
|
||||
"actual value: %(actual_text)r" % self.__dict__)
|
||||
|
||||
|
||||
class XMLUnexpectedChild(XMLMismatch):
|
||||
"""Unexpected child present in XML."""
|
||||
|
||||
def __init__(self, state, tag, idx):
|
||||
super(XMLUnexpectedChild, self).__init__(state)
|
||||
self.tag = tag
|
||||
self.idx = idx
|
||||
|
||||
def describe(self):
|
||||
return ("%(path)s: XML unexpected child element <%(tag)s> "
|
||||
"present at index %(idx)d" % self.__dict__)
|
||||
|
||||
|
||||
class XMLExpectedChild(XMLMismatch):
|
||||
"""Expected child not present in XML."""
|
||||
|
||||
def __init__(self, state, tag, idx):
|
||||
super(XMLExpectedChild, self).__init__(state)
|
||||
self.tag = tag
|
||||
self.idx = idx
|
||||
|
||||
def describe(self):
|
||||
return ("%(path)s: XML expected child element <%(tag)s> "
|
||||
"not present at index %(idx)d" % self.__dict__)
|
||||
|
||||
|
||||
class XMLMatchState(object):
|
||||
"""
|
||||
Maintain some state for matching.
|
||||
|
||||
Tracks the XML node path and saves the expected and actual full
|
||||
XML text, for use by the XMLMismatch subclasses.
|
||||
"""
|
||||
|
||||
def __init__(self, expected, actual):
|
||||
self.path = []
|
||||
self.expected = expected
|
||||
self.actual = actual
|
||||
|
||||
def __enter__(self):
|
||||
pass
|
||||
|
||||
def __exit__(self, exc_type, exc_value, exc_tb):
|
||||
self.path.pop()
|
||||
return False
|
||||
|
||||
def __str__(self):
|
||||
return '/' + '/'.join(self.path)
|
||||
|
||||
def node(self, tag, idx):
|
||||
"""
|
||||
Adds tag and index to the path; they will be popped off when
|
||||
the corresponding 'with' statement exits.
|
||||
|
||||
:param tag: The element tag
|
||||
:param idx: If not None, the integer index of the element
|
||||
within its parent. Not included in the path
|
||||
element if None.
|
||||
"""
|
||||
|
||||
if idx is not None:
|
||||
self.path.append("%s[%d]" % (tag, idx))
|
||||
else:
|
||||
self.path.append(tag)
|
||||
return self
|
||||
|
||||
|
||||
class XMLMatches(object):
|
||||
"""Compare XML strings. More complete than string comparison."""
|
||||
|
||||
def __init__(self, expected):
|
||||
self.expected_xml = expected
|
||||
self.expected = etree.fromstring(expected)
|
||||
|
||||
def __str__(self):
|
||||
return 'XMLMatches(%r)' % self.expected_xml
|
||||
|
||||
def match(self, actual_xml):
|
||||
actual = etree.fromstring(actual_xml)
|
||||
|
||||
state = XMLMatchState(self.expected_xml, actual_xml)
|
||||
result = self._compare_node(self.expected, actual, state, None)
|
||||
|
||||
if result is False:
|
||||
return XMLMismatch(state)
|
||||
elif result is not True:
|
||||
return result
|
||||
|
||||
def _compare_node(self, expected, actual, state, idx):
|
||||
"""Recursively compares nodes within the XML tree."""
|
||||
|
||||
# Start by comparing the tags
|
||||
if expected.tag != actual.tag:
|
||||
return XMLTagMismatch(state, idx, expected.tag, actual.tag)
|
||||
|
||||
with state.node(expected.tag, idx):
|
||||
# Compare the attribute keys
|
||||
expected_attrs = set(expected.attrib.keys())
|
||||
actual_attrs = set(actual.attrib.keys())
|
||||
if expected_attrs != actual_attrs:
|
||||
expected_only = expected_attrs - actual_attrs
|
||||
actual_only = actual_attrs - expected_attrs
|
||||
return XMLAttrKeysMismatch(state, expected_only, actual_only)
|
||||
|
||||
# Compare the attribute values
|
||||
for key in expected_attrs:
|
||||
expected_value = expected.attrib[key]
|
||||
actual_value = actual.attrib[key]
|
||||
|
||||
if 'DONTCARE' in (expected_value, actual_value):
|
||||
continue
|
||||
elif expected_value != actual_value:
|
||||
return XMLAttrValueMismatch(state, key, expected_value,
|
||||
actual_value)
|
||||
|
||||
# Compare the contents of the node
|
||||
if len(expected) == 0 and len(actual) == 0:
|
||||
# No children, compare text values
|
||||
if ('DONTCARE' not in (expected.text, actual.text) and
|
||||
expected.text != actual.text):
|
||||
return XMLTextValueMismatch(state, expected.text,
|
||||
actual.text)
|
||||
else:
|
||||
expected_idx = 0
|
||||
actual_idx = 0
|
||||
while (expected_idx < len(expected) and
|
||||
actual_idx < len(actual)):
|
||||
# Ignore comments and processing instructions
|
||||
# TODO(Vek): may interpret PIs in the future, to
|
||||
# allow for, say, arbitrary ordering of some
|
||||
# elements
|
||||
if (expected[expected_idx].tag in
|
||||
(etree.Comment, etree.ProcessingInstruction)):
|
||||
expected_idx += 1
|
||||
continue
|
||||
|
||||
# Compare the nodes
|
||||
result = self._compare_node(expected[expected_idx],
|
||||
actual[actual_idx], state,
|
||||
actual_idx)
|
||||
if result is not True:
|
||||
return result
|
||||
|
||||
# Step on to comparing the next nodes...
|
||||
expected_idx += 1
|
||||
actual_idx += 1
|
||||
|
||||
# Make sure we consumed all nodes in actual
|
||||
if actual_idx < len(actual):
|
||||
return XMLUnexpectedChild(state, actual[actual_idx].tag,
|
||||
actual_idx)
|
||||
|
||||
# Make sure we consumed all nodes in expected
|
||||
if expected_idx < len(expected):
|
||||
for node in expected[expected_idx:]:
|
||||
if (node.tag in
|
||||
(etree.Comment, etree.ProcessingInstruction)):
|
||||
continue
|
||||
|
||||
return XMLExpectedChild(state, node.tag, actual_idx)
|
||||
|
||||
# The nodes match
|
||||
return True
|
||||
|
@ -181,7 +181,6 @@ def dns_checker(mgmt_instance):
|
||||
Uses a helper class which, given a mgmt instance (returned by the mgmt
|
||||
API) can confirm that the DNS record provisioned correctly.
|
||||
"""
|
||||
skip_if_xml() # The mgmt instance won't look the same, so skip this.
|
||||
if CONFIG.values.get('trove_dns_checker') is not None:
|
||||
checker = import_class(CONFIG.trove_dns_checker)
|
||||
checker()(mgmt_instance)
|
||||
@ -196,11 +195,6 @@ def process(cmd):
|
||||
return result
|
||||
|
||||
|
||||
def skip_if_xml():
|
||||
if "xml" in CONFIG.values.get('trove_client_cls', ''):
|
||||
raise SkipTest("This feature does not work with XML.")
|
||||
|
||||
|
||||
def string_in_list(str, substr_list):
|
||||
"""Returns True if the string appears in the list."""
|
||||
return any([str.find(x) >= 0 for x in substr_list])
|
||||
|
@ -28,8 +28,6 @@
|
||||
from proboscis import asserts
|
||||
|
||||
from trove.tests.config import CONFIG
|
||||
from troveclient.compat.xml import TroveXmlClient
|
||||
from trove.openstack.common import processutils
|
||||
|
||||
|
||||
def add_report_event_to(home, name):
|
||||
@ -104,30 +102,3 @@ class TestClient(object):
|
||||
|
||||
def __getattr__(self, item):
|
||||
return getattr(self.real_client, item)
|
||||
|
||||
|
||||
def call_xmllint(name, body):
|
||||
try:
|
||||
with open(CONFIG.xml_temp_file, 'w') as file:
|
||||
file.write(body)
|
||||
|
||||
#if CONFIG.get('xml_xsd', None):
|
||||
args = [CONFIG.xml_temp_file]
|
||||
if CONFIG.get('xml_xsd', None):
|
||||
args += ["--schema", CONFIG.xml_xsd]
|
||||
processutils.execute(CONFIG.xmllint_bin, *args,
|
||||
check_exit_code=0, shell=False)
|
||||
except processutils.ProcessExecutionError as pe:
|
||||
fail("Error validating XML! %s" % pe)
|
||||
|
||||
|
||||
class XmlLintClient(TroveXmlClient):
|
||||
|
||||
content_type = 'xml'
|
||||
|
||||
def http_log(self, args, kwargs, resp, body):
|
||||
#self.pretty_log(args, kwargs, resp, body)
|
||||
if kwargs.get('body', None):
|
||||
call_xmllint("request", kwargs['body'])
|
||||
if body:
|
||||
call_xmllint("response", body)
|
||||
|
@ -15,7 +15,6 @@
|
||||
|
||||
import os
|
||||
import routes
|
||||
from xml.dom import minidom
|
||||
|
||||
from trove.common import wsgi
|
||||
|
||||
@ -74,20 +73,6 @@ class BaseVersion(object):
|
||||
return url + "/"
|
||||
return url
|
||||
|
||||
def to_xml(self):
|
||||
doc = minidom.Document()
|
||||
version_elem = doc.createElement("version")
|
||||
version_elem.setAttribute("id", self.id)
|
||||
version_elem.setAttribute("status", self.status)
|
||||
version_elem.setAttribute("updated", self.updated)
|
||||
links_elem = doc.createElement("links")
|
||||
link_elem = doc.createElement("link")
|
||||
link_elem.setAttribute("href", self.url())
|
||||
link_elem.setAttribute("rel", "self")
|
||||
links_elem.appendChild(link_elem)
|
||||
version_elem.appendChild(links_elem)
|
||||
return version_elem
|
||||
|
||||
|
||||
class Version(BaseVersion):
|
||||
|
||||
@ -105,9 +90,6 @@ class VersionDataView(object):
|
||||
def data_for_json(self):
|
||||
return {'version': self.version.data()}
|
||||
|
||||
def data_for_xml(self):
|
||||
return {'version': self.version}
|
||||
|
||||
|
||||
class VersionsDataView(object):
|
||||
|
||||
@ -117,9 +99,6 @@ class VersionsDataView(object):
|
||||
def data_for_json(self):
|
||||
return {'versions': [version.data() for version in self.versions]}
|
||||
|
||||
def data_for_xml(self):
|
||||
return {'versions': self.versions}
|
||||
|
||||
|
||||
class VersionsAPI(wsgi.Router):
|
||||
def __init__(self):
|
||||
|
Loading…
Reference in New Issue
Block a user