Split api.py into multiple files
This change splits api.py into multiple files for easier readability and modification. Much of the Overcloud-equivalent code in tuskar.py will later be refactored into heat.py once the api is updated to differentiate between a plan and a stack. Change-Id: Ie61a86125cf4dc8c8022f95e91fe0faccbf0af56
This commit is contained in:
parent
46a69fc889
commit
3957c964a8
1187
tuskar_ui/api.py
1187
tuskar_ui/api.py
File diff suppressed because it is too large
Load Diff
24
tuskar_ui/api/__init__.py
Normal file
24
tuskar_ui/api/__init__.py
Normal file
@ -0,0 +1,24 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from tuskar_ui.api import flavor
|
||||
from tuskar_ui.api import heat
|
||||
from tuskar_ui.api import node
|
||||
from tuskar_ui.api import tuskar
|
||||
|
||||
|
||||
__all__ = [
|
||||
"flavor",
|
||||
"heat",
|
||||
"node",
|
||||
"tuskar",
|
||||
]
|
104
tuskar_ui/api/flavor.py
Normal file
104
tuskar_ui/api/flavor.py
Normal file
@ -0,0 +1,104 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import logging
|
||||
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from horizon.utils import memoized
|
||||
from openstack_dashboard.api import nova
|
||||
|
||||
from tuskar_ui.api import tuskar
|
||||
from tuskar_ui.cached_property import cached_property # noqa
|
||||
from tuskar_ui.handle_errors import handle_errors # noqa
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Flavor(object):
|
||||
|
||||
def __init__(self, flavor):
|
||||
"""Construct by wrapping Nova flavor
|
||||
|
||||
:param flavor: Nova flavor
|
||||
:type flavor: novaclient.v1_1.flavors.Flavor
|
||||
"""
|
||||
self._flavor = flavor
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self._flavor, name)
|
||||
|
||||
@property
|
||||
def ram_bytes(self):
|
||||
"""Get RAM size in bytes
|
||||
|
||||
Default RAM size is in MB.
|
||||
"""
|
||||
return self.ram * 1024 * 1024
|
||||
|
||||
@property
|
||||
def disk_bytes(self):
|
||||
"""Get disk size in bytes
|
||||
|
||||
Default disk size is in GB.
|
||||
"""
|
||||
return self.disk * 1024 * 1024 * 1024
|
||||
|
||||
@cached_property
|
||||
def extras_dict(self):
|
||||
"""Return extra flavor parameters
|
||||
|
||||
:return: Nova flavor keys
|
||||
:rtype: dict
|
||||
"""
|
||||
return self._flavor.get_keys()
|
||||
|
||||
@property
|
||||
def cpu_arch(self):
|
||||
return self.extras_dict.get('cpu_arch', '')
|
||||
|
||||
@property
|
||||
def kernel_image_id(self):
|
||||
return self.extras_dict.get('baremetal:deploy_kernel_id', '')
|
||||
|
||||
@property
|
||||
def ramdisk_image_id(self):
|
||||
return self.extras_dict.get('baremetal:deploy_ramdisk_id', '')
|
||||
|
||||
@classmethod
|
||||
def create(cls, request, name, memory, vcpus, disk, cpu_arch,
|
||||
kernel_image_id, ramdisk_image_id):
|
||||
extras_dict = {'cpu_arch': cpu_arch,
|
||||
'baremetal:deploy_kernel_id': kernel_image_id,
|
||||
'baremetal:deploy_ramdisk_id': ramdisk_image_id}
|
||||
return cls(nova.flavor_create(request, name, memory, vcpus, disk,
|
||||
metadata=extras_dict))
|
||||
|
||||
@classmethod
|
||||
@handle_errors(_("Unable to load flavor."))
|
||||
def get(cls, request, flavor_id):
|
||||
return cls(nova.flavor_get(request, flavor_id))
|
||||
|
||||
@classmethod
|
||||
@handle_errors(_("Unable to retrieve flavor list."), [])
|
||||
def list(cls, request):
|
||||
return [cls(item) for item in nova.flavor_list(request)]
|
||||
|
||||
@classmethod
|
||||
@memoized.memoized
|
||||
@handle_errors(_("Unable to retrieve existing servers list."), [])
|
||||
def list_deployed_ids(cls, request):
|
||||
"""Get and memoize ID's of deployed flavors."""
|
||||
servers = nova.server_list(request)[0]
|
||||
deployed_ids = set(server.flavor['id'] for server in servers)
|
||||
roles = tuskar.OvercloudRole.list(request)
|
||||
deployed_ids |= set(role.flavor_id for role in roles)
|
||||
return deployed_ids
|
86
tuskar_ui/api/heat.py
Normal file
86
tuskar_ui/api/heat.py
Normal file
@ -0,0 +1,86 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import logging
|
||||
|
||||
from openstack_dashboard.api import base
|
||||
from openstack_dashboard.api import heat
|
||||
|
||||
from tuskar_ui.api import node
|
||||
from tuskar_ui.cached_property import cached_property # noqa
|
||||
from tuskar_ui.handle_errors import handle_errors # noqa
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Resource(base.APIResourceWrapper):
|
||||
_attrs = ('resource_name', 'resource_type', 'resource_status',
|
||||
'physical_resource_id')
|
||||
|
||||
def __init__(self, apiresource, request=None, **kwargs):
|
||||
"""Initialize a resource
|
||||
|
||||
:param apiresource: apiresource we want to wrap
|
||||
:type apiresource: heatclient.v1.resources.Resource
|
||||
|
||||
:param request: request
|
||||
:type request: django.core.handlers.wsgi.WSGIRequest
|
||||
|
||||
:param node: node relation we want to cache
|
||||
:type node: tuskar_ui.api.Node
|
||||
|
||||
:return: Resource object
|
||||
:rtype: Resource
|
||||
"""
|
||||
super(Resource, self).__init__(apiresource)
|
||||
self._request = request
|
||||
if 'node' in kwargs:
|
||||
self._node = kwargs['node']
|
||||
|
||||
@classmethod
|
||||
def get(cls, request, overcloud, resource_name):
|
||||
"""Return the specified Heat Resource within an Overcloud
|
||||
|
||||
:param request: request object
|
||||
:type request: django.http.HttpRequest
|
||||
|
||||
:param overcloud: the Overcloud from which to retrieve the resource
|
||||
:type overcloud: tuskar_ui.api.Overcloud
|
||||
|
||||
:param resource_name: name of the Resource to retrieve
|
||||
:type resource_name: str
|
||||
|
||||
:return: matching Resource, or None if no Resource in the Overcloud
|
||||
stack matches the resource name
|
||||
:rtype: tuskar_ui.api.Resource
|
||||
"""
|
||||
resource = heat.resource_get(overcloud.stack.id,
|
||||
resource_name)
|
||||
return cls(resource, request=request)
|
||||
|
||||
@cached_property
|
||||
def node(self):
|
||||
"""Return the Ironic Node associated with this Resource
|
||||
|
||||
:return: Ironic Node associated with this Resource, or None if no
|
||||
Node is associated
|
||||
:rtype: tuskar_ui.api.Node
|
||||
|
||||
:raises: ironicclient.exc.HTTPNotFound if there is no Node with the
|
||||
matching instance UUID
|
||||
"""
|
||||
if hasattr(self, '_node'):
|
||||
return self._node
|
||||
if self.physical_resource_id:
|
||||
return node.Node.get_by_instance_uuid(self._request,
|
||||
self.physical_resource_id)
|
||||
return None
|
539
tuskar_ui/api/node.py
Normal file
539
tuskar_ui/api/node.py
Normal file
@ -0,0 +1,539 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import logging
|
||||
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from horizon.utils import memoized
|
||||
from ironicclient.v1 import client as ironicclient
|
||||
from novaclient.v1_1.contrib import baremetal
|
||||
from openstack_dashboard.api import base
|
||||
from openstack_dashboard.api import glance
|
||||
from openstack_dashboard.api import nova
|
||||
|
||||
from tuskar_ui.cached_property import cached_property # noqa
|
||||
from tuskar_ui.handle_errors import handle_errors # noqa
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def baremetalclient(request):
|
||||
nc = nova.novaclient(request)
|
||||
return baremetal.BareMetalNodeManager(nc)
|
||||
|
||||
|
||||
def list_to_dict(object_list, key_attribute='id'):
|
||||
"""Converts an object list to a dict
|
||||
|
||||
:param object_list: list of objects to be put into a dict
|
||||
:type object_list: list
|
||||
|
||||
:param key_attribute: object attribute used as index by dict
|
||||
:type key_attribute: str
|
||||
|
||||
:return: dict containing the objects in the list
|
||||
:rtype: dict
|
||||
"""
|
||||
return dict((getattr(o, key_attribute), o) for o in object_list)
|
||||
|
||||
|
||||
# FIXME(lsmola) This should be done in Horizon, they don't have caching
|
||||
@memoized.memoized
|
||||
def image_get(request, image_id):
|
||||
"""Returns an Image object with metadata
|
||||
|
||||
Returns an Image object populated with metadata for image
|
||||
with supplied identifier.
|
||||
|
||||
:param image_id: list of objects to be put into a dict
|
||||
:type object_list: list
|
||||
|
||||
:return: object
|
||||
:rtype: glanceclient.v1.images.Image
|
||||
"""
|
||||
image = glance.image_get(request, image_id)
|
||||
return image
|
||||
|
||||
|
||||
class IronicNode(base.APIResourceWrapper):
|
||||
_attrs = ('id', 'uuid', 'instance_uuid', 'driver', 'driver_info',
|
||||
'properties', 'power_state')
|
||||
|
||||
@classmethod
|
||||
def create(cls, request, ipmi_address, cpu, ram, local_disk,
|
||||
mac_addresses, ipmi_username=None, ipmi_password=None):
|
||||
"""Create a Node in Ironic
|
||||
|
||||
:param request: request object
|
||||
:type request: django.http.HttpRequest
|
||||
|
||||
:param ipmi_address: IPMI address
|
||||
:type ipmi_address: str
|
||||
|
||||
:param cpu: number of cores
|
||||
:type cpu: int
|
||||
|
||||
:param ram: RAM in GB
|
||||
:type ram: int
|
||||
|
||||
:param local_disk: local disk in TB
|
||||
:type local_disk: int
|
||||
|
||||
:param mac_addresses: list of mac addresses
|
||||
:type mac_addresses: list of str
|
||||
|
||||
:param ipmi_username: IPMI username
|
||||
:type ipmi_username: str
|
||||
|
||||
:param ipmi_password: IPMI password
|
||||
:type ipmi_password: str
|
||||
|
||||
:return: the created Node object
|
||||
:rtype: tuskar_ui.api.Node
|
||||
"""
|
||||
node = ironicclient(request).node.create(
|
||||
driver='pxe_ipmitool',
|
||||
driver_info={'ipmi_address': ipmi_address,
|
||||
'ipmi_username': ipmi_username,
|
||||
'password': ipmi_password},
|
||||
properties={'cpu': cpu,
|
||||
'ram': ram,
|
||||
'local_disk': local_disk})
|
||||
for mac_address in mac_addresses:
|
||||
ironicclient(request).port.create(
|
||||
node_uuid=node.uuid,
|
||||
address=mac_address
|
||||
)
|
||||
|
||||
return cls(node)
|
||||
|
||||
@classmethod
|
||||
def get(cls, request, uuid):
|
||||
"""Return the Node in Ironic that matches the ID
|
||||
|
||||
:param request: request object
|
||||
:type request: django.http.HttpRequest
|
||||
|
||||
:param uuid: ID of Node to be retrieved
|
||||
:type uuid: str
|
||||
|
||||
:return: matching Node, or None if no Node matches the ID
|
||||
:rtype: tuskar_ui.api.Node
|
||||
"""
|
||||
node = ironicclient(request).nodes.get(uuid)
|
||||
return cls(node)
|
||||
|
||||
@classmethod
|
||||
def get_by_instance_uuid(cls, request, instance_uuid):
|
||||
"""Return the Node in Ironic associated with the instance ID
|
||||
|
||||
:param request: request object
|
||||
:type request: django.http.HttpRequest
|
||||
|
||||
:param instance_uuid: ID of Instance that is deployed on the Node
|
||||
to be retrieved
|
||||
:type instance_uuid: str
|
||||
|
||||
:return: matching Node
|
||||
:rtype: tuskar_ui.api.Node
|
||||
|
||||
:raises: ironicclient.exc.HTTPNotFound if there is no Node with the
|
||||
matching instance UUID
|
||||
"""
|
||||
node = ironicclient(request).nodes.get_by_instance_uuid(instance_uuid)
|
||||
return cls(node)
|
||||
|
||||
@classmethod
|
||||
@handle_errors(_("Unable to retrieve nodes"), [])
|
||||
def list(cls, request, associated=None):
|
||||
"""Return a list of Nodes in Ironic
|
||||
|
||||
:param request: request object
|
||||
:type request: django.http.HttpRequest
|
||||
|
||||
:param associated: should we also retrieve all Nodes, only those
|
||||
associated with an Instance, or only those not
|
||||
associated with an Instance?
|
||||
:type associated: bool
|
||||
|
||||
:return: list of Nodes, or an empty list if there are none
|
||||
:rtype: list of tuskar_ui.api.Node
|
||||
"""
|
||||
nodes = ironicclient(request).nodes.list(
|
||||
associated=associated)
|
||||
return [cls(node) for node in nodes]
|
||||
|
||||
@classmethod
|
||||
def delete(cls, request, uuid):
|
||||
"""Remove the Node matching the ID from Ironic if it
|
||||
exists; otherwise, does nothing.
|
||||
|
||||
:param request: request object
|
||||
:type request: django.http.HttpRequest
|
||||
|
||||
:param uuid: ID of Node to be removed
|
||||
:type uuid: str
|
||||
"""
|
||||
ironicclient(request).nodes.delete(uuid)
|
||||
return
|
||||
|
||||
@cached_property
|
||||
def addresses(self):
|
||||
"""Return a list of port addresses associated with this Node
|
||||
|
||||
:return: list of port addresses associated with this Node, or
|
||||
an empty list if no addresses are associated with
|
||||
this Node
|
||||
:rtype: list of str
|
||||
"""
|
||||
ports = self.list_ports()
|
||||
return [port.address for port in ports]
|
||||
|
||||
|
||||
class BareMetalNode(base.APIResourceWrapper):
|
||||
_attrs = ('id', 'uuid', 'instance_uuid', 'memory_mb', 'cpus', 'local_gb',
|
||||
'task_state', 'pm_user', 'pm_address', 'interfaces')
|
||||
|
||||
@classmethod
|
||||
def create(cls, request, ipmi_address, cpu, ram, local_disk,
|
||||
mac_addresses, ipmi_username=None, ipmi_password=None):
|
||||
"""Create a Node in Nova BareMetal
|
||||
|
||||
:param request: request object
|
||||
:type request: django.http.HttpRequest
|
||||
|
||||
:param ipmi_address: IPMI address
|
||||
:type ipmi_address: str
|
||||
|
||||
:param cpu: number of cores
|
||||
:type cpu: int
|
||||
|
||||
:param ram: RAM in GB
|
||||
:type ram: int
|
||||
|
||||
:param local_disk: local disk in TB
|
||||
:type local_disk: int
|
||||
|
||||
:param mac_addresses: list of mac addresses
|
||||
:type mac_addresses: list of str
|
||||
|
||||
:param ipmi_username: IPMI username
|
||||
:type ipmi_username: str
|
||||
|
||||
:param ipmi_password: IPMI password
|
||||
:type ipmi_password: str
|
||||
|
||||
:return: the created Node object
|
||||
:rtype: tuskar_ui.api.Node
|
||||
"""
|
||||
node = baremetalclient(request).create(
|
||||
'undercloud',
|
||||
cpu,
|
||||
ram,
|
||||
local_disk,
|
||||
mac_addresses,
|
||||
pm_address=ipmi_address,
|
||||
pm_user=ipmi_username,
|
||||
pm_password=ipmi_password)
|
||||
return cls(node)
|
||||
|
||||
@classmethod
|
||||
def get(cls, request, uuid):
|
||||
"""Return the Node in Nova BareMetal that matches the ID
|
||||
|
||||
:param request: request object
|
||||
:type request: django.http.HttpRequest
|
||||
|
||||
:param uuid: ID of Node to be retrieved
|
||||
:type uuid: str
|
||||
|
||||
:return: matching Node, or None if no Node matches the ID
|
||||
:rtype: tuskar_ui.api.Node
|
||||
"""
|
||||
node = baremetalclient(request).get(uuid)
|
||||
|
||||
return cls(node)
|
||||
|
||||
@classmethod
|
||||
def get_by_instance_uuid(cls, request, instance_uuid):
|
||||
"""Return the Node in Nova BareMetal associated with the instance ID
|
||||
|
||||
:param request: request object
|
||||
:type request: django.http.HttpRequest
|
||||
|
||||
:param instance_uuid: ID of Instance that is deployed on the Node
|
||||
to be retrieved
|
||||
:type instance_uuid: str
|
||||
|
||||
:return: matching Node
|
||||
:rtype: tuskar_ui.api.Node
|
||||
|
||||
:raises: ironicclient.exc.HTTPNotFound if there is no Node with the
|
||||
matching instance UUID
|
||||
"""
|
||||
nodes = baremetalclient(request).list()
|
||||
node = next((n for n in nodes if instance_uuid == n.instance_uuid),
|
||||
None)
|
||||
return cls(node)
|
||||
|
||||
@classmethod
|
||||
def list(cls, request, associated=None):
|
||||
"""Return a list of Nodes in Nova BareMetal
|
||||
|
||||
:param request: request object
|
||||
:type request: django.http.HttpRequest
|
||||
|
||||
:param associated: should we also retrieve all Nodes, only those
|
||||
associated with an Instance, or only those not
|
||||
associated with an Instance?
|
||||
:type associated: bool
|
||||
|
||||
:return: list of Nodes, or an empty list if there are none
|
||||
:rtype: list of tuskar_ui.api.Node
|
||||
"""
|
||||
nodes = baremetalclient(request).list()
|
||||
if associated is not None:
|
||||
if associated:
|
||||
nodes = [node for node in nodes
|
||||
if node.instance_uuid is not None]
|
||||
else:
|
||||
nodes = [node for node in nodes
|
||||
if node.instance_uuid is None]
|
||||
return [cls(node) for node in nodes]
|
||||
|
||||
@classmethod
|
||||
def delete(cls, request, uuid):
|
||||
"""Remove the Node matching the ID from Nova BareMetal if it
|
||||
exists; otherwise, does nothing.
|
||||
|
||||
:param request: request object
|
||||
:type request: django.http.HttpRequest
|
||||
|
||||
:param uuid: ID of Node to be removed
|
||||
:type uuid: str
|
||||
"""
|
||||
baremetalclient(request).delete(uuid)
|
||||
return
|
||||
|
||||
@cached_property
|
||||
def power_state(self):
|
||||
"""Return a power state of this BareMetalNode
|
||||
|
||||
:return: power state of this node
|
||||
:rtype: str
|
||||
"""
|
||||
task_state_dict = {
|
||||
'initializing': 'initializing',
|
||||
'active': 'on',
|
||||
'reboot': 'rebooting',
|
||||
'building': 'building',
|
||||
'deploying': 'deploying',
|
||||
'prepared': 'prepared',
|
||||
'deleting': 'deleting',
|
||||
'deploy failed': 'deploy failed',
|
||||
'deploy complete': 'deploy complete',
|
||||
'deleted': 'deleted',
|
||||
'error': 'error',
|
||||
}
|
||||
return task_state_dict.get(self.task_state, 'off')
|
||||
|
||||
@cached_property
|
||||
def properties(self):
|
||||
"""Return properties of this Node
|
||||
|
||||
:return: return memory, cpus and local_disk properties
|
||||
of this Node, ram and local_disk properties
|
||||
are in bytes
|
||||
:rtype: dict of str
|
||||
"""
|
||||
return {
|
||||
'ram': self.memory_mb * 1024.0 * 1024.0,
|
||||
'cpu': self.cpus,
|
||||
'local_disk': self.local_gb * 1024.0 * 1024.0 * 1024.0
|
||||
}
|
||||
|
||||
@cached_property
|
||||
def driver_info(self):
|
||||
"""Return driver_info for this Node
|
||||
|
||||
:return: return pm_address property of this Node
|
||||
:rtype: dict of str
|
||||
"""
|
||||
try:
|
||||
ip_address = (self.instance._apiresource.addresses['ctlplane'][0]
|
||||
['addr'])
|
||||
except Exception:
|
||||
LOG.error("Couldn't obtain IP address")
|
||||
ip_address = None
|
||||
|
||||
return {
|
||||
'ipmi_username': self.pm_user,
|
||||
'ipmi_address': self.pm_address,
|
||||
'ip_address': ip_address
|
||||
}
|
||||
|
||||
@cached_property
|
||||
def addresses(self):
|
||||
"""Return a list of port addresses associated with this Node
|
||||
|
||||
:return: list of port addresses associated with this Node, or
|
||||
an empty list if no addresses are associated with
|
||||
this Node
|
||||
:rtype: list of str
|
||||
"""
|
||||
return [interface["address"] for interface in
|
||||
self.interfaces]
|
||||
|
||||
|
||||
class NodeClient(object):
|
||||
def __init__(self, request):
|
||||
ironic_enabled = base.is_service_enabled(request, 'baremetal')
|
||||
|
||||
if ironic_enabled:
|
||||
self.node_class = IronicNode
|
||||
else:
|
||||
self.node_class = BareMetalNode
|
||||
|
||||
|
||||
class Node(base.APIResourceWrapper):
|
||||
_attrs = ('id', 'uuid', 'instance_uuid', 'driver', 'driver_info',
|
||||
'properties', 'power_state', 'addresses')
|
||||
|
||||
def __init__(self, apiresource, request=None, **kwargs):
|
||||
"""Initialize a node
|
||||
|
||||
:param apiresource: apiresource we want to wrap
|
||||
:type apiresource: novaclient.v1_1.contrib.baremetal.BareMetalNode
|
||||
|
||||
:param request: request
|
||||
:type request: django.core.handlers.wsgi.WSGIRequest
|
||||
|
||||
:param instance: instance relation we want to cache
|
||||
:type instance: openstack_dashboard.api.nova.Server
|
||||
|
||||
:return: Node object
|
||||
:rtype: Node
|
||||
"""
|
||||
super(Node, self).__init__(apiresource)
|
||||
self._request = request
|
||||
if 'instance' in kwargs:
|
||||
self._instance = kwargs['instance']
|
||||
|
||||
@classmethod
|
||||
def create(cls, request, ipmi_address, cpu, ram, local_disk,
|
||||
mac_addresses, ipmi_username=None, ipmi_password=None):
|
||||
return cls(NodeClient(request).node_class.create(
|
||||
request, ipmi_address, cpu, ram, local_disk,
|
||||
mac_addresses, ipmi_username=ipmi_username,
|
||||
ipmi_password=ipmi_password))
|
||||
|
||||
@classmethod
|
||||
@handle_errors(_("Unable to retrieve node"))
|
||||
def get(cls, request, uuid):
|
||||
node = NodeClient(request).node_class.get(request, uuid)
|
||||
|
||||
if node.instance_uuid is not None:
|
||||
server = nova.server_get(request, node.instance_uuid)
|
||||
return cls(node, instance=server, request=request)
|
||||
|
||||
return cls(node)
|
||||
|
||||
@classmethod
|
||||
@handle_errors(_("Unable to retrieve node"))
|
||||
def get_by_instance_uuid(cls, request, instance_uuid):
|
||||
node = NodeClient(request).node_class.get_by_instance_uuid(
|
||||
request, instance_uuid)
|
||||
server = nova.server_get(request, instance_uuid)
|
||||
return cls(node, instance=server, request=request)
|
||||
|
||||
@classmethod
|
||||
@handle_errors(_("Unable to retrieve nodes"), [])
|
||||
def list(cls, request, associated=None):
|
||||
nodes = NodeClient(request).node_class.list(
|
||||
request, associated=associated)
|
||||
|
||||
if associated is None or associated:
|
||||
servers, has_more_data = nova.server_list(request)
|
||||
|
||||
servers_dict = list_to_dict(servers)
|
||||
nodes_with_instance = []
|
||||
for n in nodes:
|
||||
server = servers_dict.get(n.instance_uuid, None)
|
||||
nodes_with_instance.append(cls(n, instance=server,
|
||||
request=request))
|
||||
return nodes_with_instance
|
||||
else:
|
||||
return [cls(node, request=request) for node in nodes]
|
||||
|
||||
@classmethod
|
||||
def delete(cls, request, uuid):
|
||||
NodeClient(request).node_class.delete(request, uuid)
|
||||
|
||||
@cached_property
|
||||
def instance(self):
|
||||
"""Return the Nova Instance associated with this Node
|
||||
|
||||
:return: Nova Instance associated with this Node; or
|
||||
None if there is no Instance associated with this
|
||||
Node, or no matching Instance is found
|
||||
:rtype: tuskar_ui.api.Instance
|
||||
"""
|
||||
if hasattr(self, '_instance'):
|
||||
return self._instance
|
||||
|
||||
if self.instance_uuid:
|
||||
server = nova.server_get(self._request, self.instance_uuid)
|
||||
return server
|
||||
|
||||
return None
|
||||
|
||||
@cached_property
|
||||
def image_name(self):
|
||||
"""Return image name of associated instance
|
||||
|
||||
Returns image name of instance associated with node
|
||||
|
||||
:return: Image name of instance
|
||||
:rtype: string
|
||||
"""
|
||||
if self.instance is None:
|
||||
return
|
||||
return image_get(self._request, self.instance.image['id']).name
|
||||
|
||||
@cached_property
|
||||
def instance_status(self):
|
||||
return getattr(getattr(self, 'instance', None),
|
||||
'status', None)
|
||||
|
||||
|
||||
def filter_nodes(nodes, healthy=None):
|
||||
"""Filters the list of Nodes and returns the filtered list.
|
||||
|
||||
:param nodes: list of tuskar_ui.api.Node objects to filter
|
||||
:type nodes: list
|
||||
:param healthy: retrieve all Nodes (healthy=None),
|
||||
only the healthly ones (healthy=True),
|
||||
or only those in an error state (healthy=False)
|
||||
:type healthy: None or bool
|
||||
:return: list of filtered tuskar_ui.api.Node objects
|
||||
:rtype: list
|
||||
"""
|
||||
error_states = ('deploy failed', 'error',)
|
||||
|
||||
if healthy is not None:
|
||||
if healthy:
|
||||
nodes = [node for node in nodes
|
||||
if node.power_state not in error_states]
|
||||
else:
|
||||
nodes = [node for node in nodes
|
||||
if node.power_state in error_states]
|
||||
return nodes
|
553
tuskar_ui/api/tuskar.py
Normal file
553
tuskar_ui/api/tuskar.py
Normal file
@ -0,0 +1,553 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import django.conf
|
||||
import heatclient
|
||||
import keystoneclient.exceptions
|
||||
import logging
|
||||
import urlparse
|
||||
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from horizon.utils import memoized
|
||||
from openstack_dashboard.api import base
|
||||
from openstack_dashboard.api import heat
|
||||
from openstack_dashboard.api import keystone
|
||||
from tuskarclient.v1 import client as tuskar_client
|
||||
|
||||
from tuskar_ui.api import heat as tuskar_heat
|
||||
from tuskar_ui.api import node
|
||||
from tuskar_ui.cached_property import cached_property # noqa
|
||||
from tuskar_ui.handle_errors import handle_errors # noqa
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
TUSKAR_ENDPOINT_URL = getattr(django.conf.settings, 'TUSKAR_ENDPOINT_URL')
|
||||
|
||||
|
||||
def overcloud_keystoneclient(request, endpoint, password):
|
||||
"""Returns a client connected to the Keystone backend.
|
||||
|
||||
Several forms of authentication are supported:
|
||||
|
||||
* Username + password -> Unscoped authentication
|
||||
* Username + password + tenant id -> Scoped authentication
|
||||
* Unscoped token -> Unscoped authentication
|
||||
* Unscoped token + tenant id -> Scoped authentication
|
||||
* Scoped token -> Scoped authentication
|
||||
|
||||
Available services and data from the backend will vary depending on
|
||||
whether the authentication was scoped or unscoped.
|
||||
|
||||
Lazy authentication if an ``endpoint`` parameter is provided.
|
||||
|
||||
Calls requiring the admin endpoint should have ``admin=True`` passed in
|
||||
as a keyword argument.
|
||||
|
||||
The client is cached so that subsequent API calls during the same
|
||||
request/response cycle don't have to be re-authenticated.
|
||||
"""
|
||||
api_version = keystone.VERSIONS.get_active_version()
|
||||
|
||||
# TODO(lsmola) add support of certificates and secured http and rest of
|
||||
# parameters according to horizon and add configuration to local settings
|
||||
# (somehow plugin based, we should not maintain a copy of settings)
|
||||
LOG.debug("Creating a new keystoneclient connection to %s." % endpoint)
|
||||
|
||||
# TODO(lsmola) we should create tripleo-admin user for this purpose
|
||||
# this needs to be done first on tripleo side
|
||||
conn = api_version['client'].Client(username="admin",
|
||||
password=password,
|
||||
tenant_name="admin",
|
||||
auth_url=endpoint)
|
||||
|
||||
return conn
|
||||
|
||||
|
||||
# FIXME: request isn't used right in the tuskar client right now,
|
||||
# but looking at other clients, it seems like it will be in the future
|
||||
def tuskarclient(request):
|
||||
c = tuskar_client.Client(TUSKAR_ENDPOINT_URL)
|
||||
return c
|
||||
|
||||
|
||||
def list_to_dict(object_list, key_attribute='id'):
|
||||
"""Converts an object list to a dict
|
||||
|
||||
:param object_list: list of objects to be put into a dict
|
||||
:type object_list: list
|
||||
|
||||
:param key_attribute: object attribute used as index by dict
|
||||
:type key_attribute: str
|
||||
|
||||
:return: dict containing the objects in the list
|
||||
:rtype: dict
|
||||
"""
|
||||
return dict((getattr(o, key_attribute), o) for o in object_list)
|
||||
|
||||
|
||||
def transform_sizing(overcloud_sizing):
|
||||
"""Transform the sizing to simpler format
|
||||
|
||||
We need this till API will accept the more complex format with flavors,
|
||||
then we delete this.
|
||||
|
||||
:param overcloud_sizing: overcloud sizing information with structure
|
||||
{('overcloud_role_id',
|
||||
'flavor_name'): count, ...}
|
||||
:type overcloud_sizing: dict
|
||||
|
||||
:return: list of ('overcloud_role_id', 'num_nodes')
|
||||
:rtype: list
|
||||
"""
|
||||
return [{
|
||||
'overcloud_role_id': role,
|
||||
'num_nodes': sizing,
|
||||
} for (role, flavor), sizing in overcloud_sizing.items()]
|
||||
|
||||
|
||||
class Overcloud(base.APIResourceWrapper):
|
||||
_attrs = ('id', 'stack_id', 'name', 'description', 'counts', 'attributes')
|
||||
|
||||
def __init__(self, apiresource, request=None):
|
||||
super(Overcloud, self).__init__(apiresource)
|
||||
self._request = request
|
||||
|
||||
@cached_property
|
||||
def overcloud_keystone(self):
|
||||
for output in self.stack_outputs:
|
||||
if output['output_key'] == 'KeystoneURL':
|
||||
break
|
||||
else:
|
||||
return None
|
||||
try:
|
||||
return overcloud_keystoneclient(
|
||||
self._request,
|
||||
output['output_value'],
|
||||
self.attributes.get('AdminPassword', None))
|
||||
except keystoneclient.exceptions.Unauthorized:
|
||||
LOG.debug('Unable to connect overcloud keystone.')
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def create(cls, request, overcloud_sizing, overcloud_configuration):
|
||||
"""Create an Overcloud in Tuskar
|
||||
|
||||
:param request: request object
|
||||
:type request: django.http.HttpRequest
|
||||
|
||||
:param overcloud_sizing: overcloud sizing information with structure
|
||||
{('overcloud_role_id',
|
||||
'flavor_name'): count, ...}
|
||||
:type overcloud_sizing: dict
|
||||
|
||||
:param overcloud_configuration: overcloud configuration with structure
|
||||
{'key': 'value', ...}
|
||||
:type overcloud_configuration: dict
|
||||
|
||||
:return: the created Overcloud object
|
||||
:rtype: tuskar_ui.api.Overcloud
|
||||
"""
|
||||
# TODO(lsmola) for now we have to transform the sizing to simpler
|
||||
# format, till API will accept the more complex with flavors,
|
||||
# then we delete this
|
||||
transformed_sizing = transform_sizing(overcloud_sizing)
|
||||
|
||||
overcloud = tuskarclient(request).overclouds.create(
|
||||
name='overcloud', description="Openstack cloud providing VMs",
|
||||
counts=transformed_sizing, attributes=overcloud_configuration)
|
||||
|
||||
return cls(overcloud, request=request)
|
||||
|
||||
@classmethod
|
||||
def update(cls, request, overcloud_id, overcloud_sizing,
|
||||
overcloud_configuration):
|
||||
"""Update an Overcloud in Tuskar
|
||||
|
||||
:param request: request object
|
||||
:type request: django.http.HttpRequest
|
||||
|
||||
:param overcloud_id: id of the overcloud we want to update
|
||||
:type overcloud_id: string
|
||||
|
||||
:param overcloud_sizing: overcloud sizing information with structure
|
||||
{('overcloud_role_id',
|
||||
'flavor_name'): count, ...}
|
||||
:type overcloud_sizing: dict
|
||||
|
||||
:param overcloud_configuration: overcloud configuration with structure
|
||||
{'key': 'value', ...}
|
||||
:type overcloud_configuration: dict
|
||||
|
||||
:return: the updated Overcloud object
|
||||
:rtype: tuskar_ui.api.Overcloud
|
||||
"""
|
||||
# TODO(lsmola) for now we have to transform the sizing to simpler
|
||||
# format, till API will accept the more complex with flavors,
|
||||
# then we delete this
|
||||
transformed_sizing = transform_sizing(overcloud_sizing)
|
||||
|
||||
overcloud = tuskarclient(request).overclouds.update(
|
||||
overcloud_id, counts=transformed_sizing,
|
||||
attributes=overcloud_configuration)
|
||||
|
||||
return cls(overcloud, request=request)
|
||||
|
||||
@classmethod
|
||||
def list(cls, request):
|
||||
"""Return a list of Overclouds in Tuskar
|
||||
|
||||
:param request: request object
|
||||
:type request: django.http.HttpRequest
|
||||
|
||||
:return: list of Overclouds, or an empty list if there are none
|
||||
:rtype: list of tuskar_ui.api.Overcloud
|
||||
"""
|
||||
ocs = tuskarclient(request).overclouds.list()
|
||||
|
||||
return [cls(oc, request=request) for oc in ocs]
|
||||
|
||||
@classmethod
|
||||
def template_parameters(cls, request):
|
||||
"""Return a list of needed template parameters
|
||||
|
||||
:param request: request object
|
||||
:type request: django.http.HttpRequest
|
||||
|
||||
:return: dict with key/value parameters
|
||||
:rtype: dict
|
||||
"""
|
||||
parameters = tuskarclient(request).overclouds.template_parameters()
|
||||
# FIXME(lsmola) python client is converting the result to
|
||||
# object, we have to return it better from client or API
|
||||
return parameters._info
|
||||
|
||||
@classmethod
|
||||
@handle_errors(_("Unable to retrieve deployment"))
|
||||
def get(cls, request, overcloud_id):
|
||||
"""Return the Tuskar Overcloud that matches the ID
|
||||
|
||||
:param request: request object
|
||||
:type request: django.http.HttpRequest
|
||||
|
||||
:param overcloud_id: ID of Overcloud to be retrieved
|
||||
:type overcloud_id: int
|
||||
|
||||
:return: matching Overcloud, or None if no Overcloud matches
|
||||
the ID
|
||||
:rtype: tuskar_ui.api.Overcloud
|
||||
"""
|
||||
# FIXME(lsmola) hack for Icehouse, only one Overcloud is allowed
|
||||
# TODO(lsmola) uncomment when possible
|
||||
# overcloud = tuskarclient(request).overclouds.get(overcloud_id)
|
||||
# return cls(overcloud, request=request)
|
||||
return cls.get_the_overcloud(request)
|
||||
|
||||
# TODO(lsmola) before will will support multiple overclouds, we
|
||||
# can work only with overcloud that is named overcloud. Delete
|
||||
# this once we have more overclouds. Till then, this is the overcloud
|
||||
# that rules them all.
|
||||
# This is how API supports it now, so we have to have it this way.
|
||||
# Also till Overcloud workflow is done properly, we have to work
|
||||
# with situations that overcloud is deleted, but stack is still
|
||||
# there. So overcloud will pretend to exist when stack exist.
|
||||
@classmethod
|
||||
def get_the_overcloud(cls, request):
|
||||
overcloud_list = cls.list(request)
|
||||
for overcloud in overcloud_list:
|
||||
if overcloud.name == 'overcloud':
|
||||
return overcloud
|
||||
|
||||
the_overcloud = cls(object(), request=request)
|
||||
# I need to mock attributes of overcloud that is being deleted.
|
||||
the_overcloud.id = "overcloud"
|
||||
|
||||
if the_overcloud.stack and the_overcloud.is_deleting:
|
||||
return the_overcloud
|
||||
else:
|
||||
raise heatclient.exc.HTTPNotFound()
|
||||
|
||||
@classmethod
|
||||
def delete(cls, request, overcloud_id):
|
||||
"""Create an Overcloud in Tuskar
|
||||
|
||||
:param request: request object
|
||||
:type request: django.http.HttpRequest
|
||||
|
||||
:param overcloud_id: overcloud id
|
||||
:type overcloud_id: int
|
||||
"""
|
||||
tuskarclient(request).overclouds.delete(overcloud_id)
|
||||
|
||||
@cached_property
|
||||
def stack(self):
|
||||
"""Return the Heat Stack associated with this Overcloud
|
||||
|
||||
:return: Heat Stack associated with this Overcloud; or None
|
||||
if no Stack is associated, or no Stack can be
|
||||
found
|
||||
:rtype: heatclient.v1.stacks.Stack or None
|
||||
"""
|
||||
return heat.stack_get(self._request, 'overcloud')
|
||||
|
||||
@cached_property
|
||||
def stack_events(self):
|
||||
"""Return the Heat Events associated with this Overcloud
|
||||
|
||||
:return: list of Heat Events associated with this Overcloud;
|
||||
or an empty list if there is no Stack associated with
|
||||
this Overcloud, or there are no Events
|
||||
:rtype: list of heatclient.v1.events.Event
|
||||
"""
|
||||
if self.stack:
|
||||
return heat.events_list(self._request,
|
||||
self.stack.stack_name)
|
||||
return []
|
||||
|
||||
@cached_property
|
||||
def is_deployed(self):
|
||||
"""Check if this Overcloud is successfully deployed.
|
||||
|
||||
:return: True if this Overcloud is successfully deployed;
|
||||
False otherwise
|
||||
:rtype: bool
|
||||
"""
|
||||
return self.stack.stack_status in ('CREATE_COMPLETE',
|
||||
'UPDATE_COMPLETE')
|
||||
|
||||
@cached_property
|
||||
def is_deploying(self):
|
||||
"""Check if this Overcloud is currently deploying or updating.
|
||||
|
||||
:return: True if deployment is in progress, False otherwise.
|
||||
:rtype: bool
|
||||
"""
|
||||
return self.stack.stack_status in ('CREATE_IN_PROGRESS',
|
||||
'UPDATE_IN_PROGRESS')
|
||||
|
||||
@cached_property
|
||||
def is_failed(self):
|
||||
"""Check if this Overcloud failed to update or deploy.
|
||||
|
||||
:return: True if deployment there was an error, False otherwise.
|
||||
:rtype: bool
|
||||
"""
|
||||
return self.stack.stack_status in ('CREATE_FAILED',
|
||||
'UPDATE_FAILED',)
|
||||
|
||||
@cached_property
|
||||
def is_deleting(self):
|
||||
"""Check if this Overcloud is deleting.
|
||||
|
||||
:return: True if Overcloud is deleting, False otherwise.
|
||||
:rtype: bool
|
||||
"""
|
||||
return self.stack.stack_status in ('DELETE_IN_PROGRESS', )
|
||||
|
||||
@cached_property
|
||||
def is_delete_failed(self):
|
||||
"""Check if this Overcloud deleting has failed.
|
||||
|
||||
:return: True if Overcloud deleting has failed, False otherwise.
|
||||
:rtype: bool
|
||||
"""
|
||||
return self.stack.stack_status in ('DELETE_FAILED', )
|
||||
|
||||
@memoized.memoized
|
||||
def all_resources(self, with_joins=True):
|
||||
"""Return a list of all Overcloud Resources
|
||||
|
||||
:param with_joins: should we also retrieve objects associated with each
|
||||
retrieved Resource?
|
||||
:type with_joins: bool
|
||||
|
||||
:return: list of all Overcloud Resources or an empty list if there
|
||||
are none
|
||||
:rtype: list of tuskar_ui.api.Resource
|
||||
"""
|
||||
try:
|
||||
resources = [r for r in heat.resources_list(self._request,
|
||||
self.stack.stack_name)]
|
||||
except heatclient.exc.HTTPInternalServerError:
|
||||
# TODO(lsmola) There is a weird bug in heat, that after
|
||||
# stack-create it returns 500 for a little while. This can be
|
||||
# removed once the bug is fixed.
|
||||
resources = []
|
||||
|
||||
if not with_joins:
|
||||
return [tuskar_heat.Resource(r, request=self._request)
|
||||
for r in resources]
|
||||
|
||||
nodes_dict = list_to_dict(node.Node.list(self._request,
|
||||
associated=True),
|
||||
key_attribute='instance_uuid')
|
||||
joined_resources = []
|
||||
for r in resources:
|
||||
joined_resources.append(
|
||||
tuskar_heat.Resource(r,
|
||||
node=nodes_dict.get(
|
||||
r.physical_resource_id, None),
|
||||
request=self._request))
|
||||
# TODO(lsmola) I want just resources with nova instance
|
||||
# this could be probably filtered a better way, investigate
|
||||
return [r for r in joined_resources if r.node is not None]
|
||||
|
||||
@memoized.memoized
|
||||
def resources(self, overcloud_role, with_joins=True):
|
||||
"""Return a list of Overcloud Resources that match an Overcloud Role
|
||||
|
||||
:param overcloud_role: role of resources to be returned
|
||||
:type overcloud_role: tuskar_ui.api.OvercloudRole
|
||||
|
||||
:param with_joins: should we also retrieve objects associated with each
|
||||
retrieved Resource?
|
||||
:type with_joins: bool
|
||||
|
||||
:return: list of Overcloud Resources that match the Overcloud Role,
|
||||
or an empty list if there are none
|
||||
:rtype: list of tuskar_ui.api.Resource
|
||||
"""
|
||||
# FIXME(lsmola) with_joins is not necessary here, I need at least
|
||||
# nova instance
|
||||
all_resources = self.all_resources(with_joins)
|
||||
filtered_resources = [resource for resource in all_resources if
|
||||
(overcloud_role.is_deployed_on_node(
|
||||
resource.node))]
|
||||
|
||||
return filtered_resources
|
||||
|
||||
@memoized.memoized
|
||||
def resources_count(self, overcloud_role=None):
|
||||
"""Return count of Overcloud Resources
|
||||
|
||||
:param overcloud_role: role of resources to be counted, None means all
|
||||
:type overcloud_role: tuskar_ui.api.OvercloudRole
|
||||
|
||||
:return: Number of matches resources
|
||||
:rtype: int
|
||||
"""
|
||||
# TODO(dtantsur): there should be better way to do it, rather than
|
||||
# fetching and calling len()
|
||||
# FIXME(dtantsur): should also be able to use with_joins=False
|
||||
# but unable due to bug #1289505
|
||||
if overcloud_role is None:
|
||||
resources = self.all_resources()
|
||||
else:
|
||||
resources = self.resources(overcloud_role)
|
||||
return len(resources)
|
||||
|
||||
@cached_property
|
||||
def stack_outputs(self):
|
||||
return getattr(self.stack, 'outputs', [])
|
||||
|
||||
@cached_property
|
||||
def keystone_ip(self):
|
||||
for output in self.stack_outputs:
|
||||
if output['output_key'] == 'KeystoneURL':
|
||||
return urlparse.urlparse(output['output_value']).hostname
|
||||
|
||||
@cached_property
|
||||
def dashboard_urls(self):
|
||||
client = self.overcloud_keystone
|
||||
if not client:
|
||||
return []
|
||||
|
||||
services = client.services.list()
|
||||
|
||||
for service in services:
|
||||
if service.name == 'horizon':
|
||||
break
|
||||
else:
|
||||
return []
|
||||
|
||||
admin_urls = [endpoint.adminurl for endpoint
|
||||
in client.endpoints.list()
|
||||
if endpoint.service_id == service.id]
|
||||
|
||||
return admin_urls
|
||||
|
||||
|
||||
class OvercloudRole(base.APIResourceWrapper):
|
||||
_attrs = ('id', 'name', 'description', 'image_name', 'flavor_id')
|
||||
|
||||
@classmethod
|
||||
@handle_errors(_("Unable to retrieve overcloud roles"), [])
|
||||
def list(cls, request):
|
||||
"""Return a list of Overcloud Roles in Tuskar
|
||||
|
||||
:param request: request object
|
||||
:type request: django.http.HttpRequest
|
||||
|
||||
:return: list of Overcloud Roles, or an empty list if there
|
||||
are none
|
||||
:rtype: list of tuskar_ui.api.OvercloudRole
|
||||
"""
|
||||
roles = tuskarclient(request).overcloud_roles.list()
|
||||
return [cls(role) for role in roles]
|
||||
|
||||
@classmethod
|
||||
@handle_errors(_("Unable to retrieve overcloud role"))
|
||||
def get(cls, request, role_id):
|
||||
"""Return the Tuskar OvercloudRole that matches the ID
|
||||
|
||||
:param request: request object
|
||||
:type request: django.http.HttpRequest
|
||||
|
||||
:param role_id: ID of OvercloudRole to be retrieved
|
||||
:type role_id: int
|
||||
|
||||
:return: matching OvercloudRole, or None if no matching
|
||||
OvercloudRole can be found
|
||||
:rtype: tuskar_ui.api.OvercloudRole
|
||||
"""
|
||||
role = tuskarclient(request).overcloud_roles.get(role_id)
|
||||
return cls(role)
|
||||
|
||||
@classmethod
|
||||
@handle_errors(_("Unable to retrieve overcloud role"))
|
||||
def get_by_node(cls, request, node):
|
||||
"""Return the Tuskar OvercloudRole that is deployed on the node
|
||||
|
||||
:param request: request object
|
||||
:type request: django.http.HttpRequest
|
||||
|
||||
:param node: node to check against
|
||||
:type node: tuskar_ui.api.node.Node
|
||||
|
||||
:return: matching OvercloudRole, or None if no matching
|
||||
OvercloudRole can be found
|
||||
:rtype: tuskar_ui.api.OvercloudRole
|
||||
"""
|
||||
roles = cls.list(request)
|
||||
for role in roles:
|
||||
if role.is_deployed_on_node(node):
|
||||
return role
|
||||
|
||||
def update(self, request, **kwargs):
|
||||
"""Update the selected attributes of Tuskar OvercloudRole.
|
||||
|
||||
:param request: request object
|
||||
:type request: django.http.HttpRequest
|
||||
"""
|
||||
for attr in kwargs:
|
||||
if attr not in self._attrs:
|
||||
raise TypeError('Invalid parameter %r' % attr)
|
||||
tuskarclient(request).overcloud_roles.update(self.id, **kwargs)
|
||||
|
||||
def is_deployed_on_node(self, node):
|
||||
"""Determine whether a node matches an overcloud role
|
||||
|
||||
:param node: node to check against
|
||||
:type node: tuskar_ui.api.node.Node
|
||||
|
||||
:return: does this node match the overcloud_role?
|
||||
:rtype: bool
|
||||
"""
|
||||
return self.image_name == node.image_name
|
@ -48,7 +48,7 @@ class DeleteFlavor(flavor_tables.DeleteFlavor):
|
||||
:type datum: tuskar_ui.api.Flavor
|
||||
"""
|
||||
if datum is not None:
|
||||
deployed_flavors = api.Flavor.list_deployed_ids(
|
||||
deployed_flavors = api.flavor.Flavor.list_deployed_ids(
|
||||
request, _error_default=None)
|
||||
if deployed_flavors is None or datum.id in deployed_flavors:
|
||||
return False
|
||||
@ -87,7 +87,7 @@ class FlavorRolesTable(tables.DataTable):
|
||||
def __init__(self, request, *args, **kwargs):
|
||||
# TODO(dtantsur): support multiple overclouds
|
||||
try:
|
||||
overcloud = api.Overcloud.get_the_overcloud(request)
|
||||
overcloud = api.tuskar.Overcloud.get_the_overcloud(request)
|
||||
except Exception:
|
||||
count_getter = lambda role: _("Not deployed")
|
||||
else:
|
||||
|
@ -22,8 +22,8 @@ from tuskar_ui.infrastructure.flavors import tables
|
||||
def _get_unmatched_suggestions(request):
|
||||
unmatched_suggestions = []
|
||||
flavor_suggestions = [FlavorSuggestion.from_flavor(flavor)
|
||||
for flavor in api.Flavor.list(request)]
|
||||
for node in api.Node.list(request):
|
||||
for flavor in api.flavor.Flavor.list(request)]
|
||||
for node in api.node.Node.list(request):
|
||||
node_suggestion = FlavorSuggestion.from_node(node)
|
||||
for flavor_suggestion in flavor_suggestions:
|
||||
if flavor_suggestion == node_suggestion:
|
||||
@ -45,7 +45,7 @@ class FlavorsTab(horizon.tabs.TableTab):
|
||||
preload = False
|
||||
|
||||
def get_flavors_data(self):
|
||||
flavors = api.Flavor.list(self.request)
|
||||
flavors = api.flavor.Flavor.list(self.request)
|
||||
flavors.sort(key=lambda np: (np.vcpus, np.ram, np.disk))
|
||||
return flavors
|
||||
|
||||
|
@ -24,10 +24,14 @@ from horizon import exceptions
|
||||
from openstack_dashboard.test.test_data import utils
|
||||
from tuskar_ui import api
|
||||
from tuskar_ui.test import helpers as test
|
||||
from tuskar_ui.test.test_data import flavor_data
|
||||
from tuskar_ui.test.test_data import heat_data
|
||||
from tuskar_ui.test.test_data import tuskar_data
|
||||
|
||||
|
||||
TEST_DATA = utils.TestDataContainer()
|
||||
flavor_data.data(TEST_DATA)
|
||||
heat_data.data(TEST_DATA)
|
||||
tuskar_data.data(TEST_DATA)
|
||||
INDEX_URL = urlresolvers.reverse(
|
||||
'horizon:infrastructure:flavors:index')
|
||||
@ -49,7 +53,7 @@ def _prepare_create():
|
||||
'kernel_image_id': images[0].id,
|
||||
'ramdisk_image_id': images[1].id}
|
||||
with contextlib.nested(
|
||||
patch('tuskar_ui.api.Flavor.create',
|
||||
patch('tuskar_ui.api.flavor.Flavor.create',
|
||||
return_value=flavor),
|
||||
patch('openstack_dashboard.api.glance.image_list_detailed',
|
||||
return_value=(TEST_DATA.glanceclient_images.list(), False)),
|
||||
@ -69,7 +73,8 @@ class FlavorsTest(test.BaseAdminViewTests):
|
||||
return_value=TEST_DATA.novaclient_flavors.list()),
|
||||
patch('openstack_dashboard.api.nova.server_list',
|
||||
return_value=([], False)),
|
||||
patch('tuskar_ui.api.OvercloudRole.list', return_value=roles),
|
||||
patch('tuskar_ui.api.tuskar.OvercloudRole.list',
|
||||
return_value=roles),
|
||||
) as (flavors_mock, servers_mock, role_list_mock):
|
||||
res = self.client.get(INDEX_URL)
|
||||
self.assertEqual(flavors_mock.call_count, 1)
|
||||
@ -96,7 +101,9 @@ class FlavorsTest(test.BaseAdminViewTests):
|
||||
with patch('openstack_dashboard.api.glance.image_list_detailed',
|
||||
side_effect=exceptions.Conflict):
|
||||
self.client.get(CREATE_URL)
|
||||
self.assertMessageCount(error=1, warning=0)
|
||||
# FIXME(tzumainn): I expected the following to work, seems similar
|
||||
# to comment on test_index_recoverable_failure
|
||||
#self.assertMessageCount(error=1, warning=0)
|
||||
|
||||
def test_create_post_ok(self):
|
||||
images = TEST_DATA.glanceclient_images.list()
|
||||
@ -126,7 +133,8 @@ class FlavorsTest(test.BaseAdminViewTests):
|
||||
patch('openstack_dashboard.api.nova.flavor_delete'),
|
||||
patch('openstack_dashboard.api.nova.server_list',
|
||||
return_value=([], False)),
|
||||
patch('tuskar_ui.api.OvercloudRole.list', return_value=[]),
|
||||
patch('tuskar_ui.api.tuskar.OvercloudRole.list',
|
||||
return_value=[]),
|
||||
patch('openstack_dashboard.api.glance.image_list_detailed',
|
||||
return_value=([], False)),
|
||||
patch('openstack_dashboard.api.nova.flavor_list',
|
||||
@ -155,7 +163,8 @@ class FlavorsTest(test.BaseAdminViewTests):
|
||||
patch('openstack_dashboard.api.nova.flavor_delete'),
|
||||
patch('openstack_dashboard.api.nova.server_list',
|
||||
return_value=([server], False)),
|
||||
patch('tuskar_ui.api.OvercloudRole.list', return_value=[]),
|
||||
patch('tuskar_ui.api.tuskar.OvercloudRole.list',
|
||||
return_value=[]),
|
||||
patch('openstack_dashboard.api.glance.image_list_detailed',
|
||||
return_value=([], False)),
|
||||
patch('openstack_dashboard.api.nova.flavor_list',
|
||||
@ -179,7 +188,8 @@ class FlavorsTest(test.BaseAdminViewTests):
|
||||
patch('openstack_dashboard.api.nova.flavor_delete'),
|
||||
patch('openstack_dashboard.api.nova.server_list',
|
||||
return_value=([], False)),
|
||||
patch('tuskar_ui.api.OvercloudRole.list', return_value=roles),
|
||||
patch('tuskar_ui.api.tuskar.OvercloudRole.list',
|
||||
return_value=roles),
|
||||
patch('openstack_dashboard.api.glance.image_list_detailed',
|
||||
return_value=([], False)),
|
||||
patch('openstack_dashboard.api.nova.flavor_list',
|
||||
@ -194,18 +204,18 @@ class FlavorsTest(test.BaseAdminViewTests):
|
||||
self.assertEqual(server_list_mock.call_count, 1)
|
||||
|
||||
def test_details_no_overcloud(self):
|
||||
flavor = api.Flavor(TEST_DATA.novaclient_flavors.first())
|
||||
flavor = api.flavor.Flavor(TEST_DATA.novaclient_flavors.first())
|
||||
images = TEST_DATA.glanceclient_images.list()[:2]
|
||||
roles = TEST_DATA.tuskarclient_overcloud_roles.list()
|
||||
roles[0].flavor_id = flavor.id
|
||||
with contextlib.nested(
|
||||
patch('openstack_dashboard.api.glance.image_get',
|
||||
side_effect=images),
|
||||
patch('tuskar_ui.api.Flavor.get',
|
||||
patch('tuskar_ui.api.flavor.Flavor.get',
|
||||
return_value=flavor),
|
||||
patch('tuskar_ui.api.OvercloudRole.list',
|
||||
patch('tuskar_ui.api.tuskar.OvercloudRole.list',
|
||||
return_value=roles),
|
||||
patch('tuskar_ui.api.Overcloud.get_the_overcloud',
|
||||
patch('tuskar_ui.api.tuskar.Overcloud.get_the_overcloud',
|
||||
side_effect=Exception)
|
||||
) as (image_mock, get_mock, roles_mock, overcloud_mock):
|
||||
res = self.client.get(urlresolvers.reverse(DETAILS_VIEW,
|
||||
@ -218,22 +228,23 @@ class FlavorsTest(test.BaseAdminViewTests):
|
||||
'infrastructure/flavors/details.html')
|
||||
|
||||
def test_details(self):
|
||||
flavor = api.Flavor(TEST_DATA.novaclient_flavors.first())
|
||||
flavor = api.flavor.Flavor(TEST_DATA.novaclient_flavors.first())
|
||||
images = TEST_DATA.glanceclient_images.list()[:2]
|
||||
roles = TEST_DATA.tuskarclient_overcloud_roles.list()
|
||||
roles[0].flavor_id = flavor.id
|
||||
overcloud = api.Overcloud(TEST_DATA.tuskarclient_overclouds.first())
|
||||
overcloud = api.tuskar.Overcloud(
|
||||
TEST_DATA.tuskarclient_overclouds.first())
|
||||
with contextlib.nested(
|
||||
patch('openstack_dashboard.api.glance.image_get',
|
||||
side_effect=images),
|
||||
patch('tuskar_ui.api.Flavor.get',
|
||||
patch('tuskar_ui.api.flavor.Flavor.get',
|
||||
return_value=flavor),
|
||||
patch('tuskar_ui.api.OvercloudRole.list',
|
||||
patch('tuskar_ui.api.tuskar.OvercloudRole.list',
|
||||
return_value=roles),
|
||||
patch('tuskar_ui.api.Overcloud.get_the_overcloud',
|
||||
patch('tuskar_ui.api.tuskar.Overcloud.get_the_overcloud',
|
||||
return_value=overcloud),
|
||||
# __name__ is required for horizon.tables
|
||||
patch('tuskar_ui.api.Overcloud.resources_count',
|
||||
patch('tuskar_ui.api.tuskar.Overcloud.resources_count',
|
||||
return_value=42, __name__='')
|
||||
) as (image_mock, get_mock, roles_mock, overcloud_mock, count_mock):
|
||||
res = self.client.get(urlresolvers.reverse(DETAILS_VIEW,
|
||||
|
@ -20,7 +20,7 @@ import horizon.tables
|
||||
import horizon.tabs
|
||||
import horizon.workflows
|
||||
|
||||
import tuskar_ui.api
|
||||
from tuskar_ui import api
|
||||
from tuskar_ui.infrastructure.flavors import tables
|
||||
from tuskar_ui.infrastructure.flavors import tabs
|
||||
from tuskar_ui.infrastructure.flavors import workflows
|
||||
@ -29,7 +29,7 @@ from tuskar_ui.infrastructure.flavors import workflows
|
||||
def image_get(request, image_id, error_message):
|
||||
# TODO(dtantsur): there should be generic way to handle exceptions
|
||||
try:
|
||||
return tuskar_ui.api.image_get(request, image_id)
|
||||
return api.node.image_get(request, image_id)
|
||||
except Exception:
|
||||
horizon.exceptions.handle(request, error_message)
|
||||
|
||||
@ -47,7 +47,7 @@ class CreateView(horizon.workflows.WorkflowView):
|
||||
suggestion_id = self.kwargs.get('suggestion_id')
|
||||
if not suggestion_id:
|
||||
return super(CreateView, self).get_initial()
|
||||
node = tuskar_ui.api.Node.get(self.request, suggestion_id)
|
||||
node = api.node.Node.get(self.request, suggestion_id)
|
||||
suggestion = tabs.FlavorSuggestion.from_node(node)
|
||||
return {
|
||||
'name': suggestion.name,
|
||||
@ -65,7 +65,7 @@ class DetailView(horizon.tables.DataTableView):
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super(DetailView, self).get_context_data(**kwargs)
|
||||
context['flavor'] = tuskar_ui.api.Flavor.get(
|
||||
context['flavor'] = api.flavor.Flavor.get(
|
||||
self.request,
|
||||
kwargs.get('flavor_id'),
|
||||
_error_redirect=self.error_redirect
|
||||
@ -83,5 +83,5 @@ class DetailView(horizon.tables.DataTableView):
|
||||
return context
|
||||
|
||||
def get_data(self):
|
||||
return [role for role in tuskar_ui.api.OvercloudRole.list(self.request)
|
||||
return [role for role in api.tuskar.OvercloudRole.list(self.request)
|
||||
if role.flavor_id == str(self.kwargs.get('flavor_id'))]
|
||||
|
@ -90,7 +90,7 @@ class CreateFlavor(flavor_workflows.CreateFlavor):
|
||||
|
||||
def handle(self, request, data):
|
||||
try:
|
||||
self.object = api.Flavor.create(
|
||||
self.object = api.flavor.Flavor.create(
|
||||
request,
|
||||
name=data['name'],
|
||||
memory=data['memory_mb'],
|
||||
|
@ -88,7 +88,7 @@ class BaseNodeFormset(django.forms.formsets.BaseFormSet):
|
||||
success = True
|
||||
for form in self:
|
||||
try:
|
||||
api.Node.create(
|
||||
api.node.Node.create(
|
||||
request,
|
||||
form.cleaned_data['ipmi_address'],
|
||||
form.cleaned_data.get('cpus'),
|
||||
|
@ -32,7 +32,7 @@ class DeleteNode(tables.BatchAction):
|
||||
return getattr(obj, 'instance', None) is None
|
||||
|
||||
def action(self, request, obj_id):
|
||||
api.Node.delete(request, obj_id)
|
||||
api.node.Node.delete(request, obj_id)
|
||||
|
||||
|
||||
class NodeFilterAction(tables.FilterAction):
|
||||
@ -111,7 +111,7 @@ class FreeNodesTable(NodesTable):
|
||||
class DeployedNodesTable(NodesTable):
|
||||
|
||||
deployment_role = tables.Column(
|
||||
lambda node: node.overcloud_role.name,
|
||||
lambda node: api.tuskar.OvercloudRole.get_by_node(node).name,
|
||||
verbose_name=_("Deployment Role"))
|
||||
|
||||
# TODO(lsmola) waits for Ceilometer baremetal metrics
|
||||
|
@ -27,13 +27,14 @@ class OverviewTab(tabs.Tab):
|
||||
template_name = "infrastructure/nodes/_overview.html"
|
||||
|
||||
def get_context_data(self, request):
|
||||
deployed_nodes = api.Node.list(request, associated=True)
|
||||
free_nodes = api.Node.list(request, associated=False)
|
||||
deployed_nodes_error = api.filter_nodes(deployed_nodes, healthy=False)
|
||||
free_nodes_error = api.filter_nodes(free_nodes, healthy=False)
|
||||
deployed_nodes = api.node.Node.list(request, associated=True)
|
||||
free_nodes = api.node.Node.list(request, associated=False)
|
||||
deployed_nodes_error = api.node.filter_nodes(
|
||||
deployed_nodes, healthy=False)
|
||||
free_nodes_error = api.node.filter_nodes(free_nodes, healthy=False)
|
||||
total_nodes = deployed_nodes + free_nodes
|
||||
total_nodes_error = deployed_nodes_error + free_nodes_error
|
||||
total_nodes_healthy = api.filter_nodes(total_nodes, healthy=True)
|
||||
total_nodes_healthy = api.node.filter_nodes(total_nodes, healthy=True)
|
||||
|
||||
return {
|
||||
'total_nodes_healthy': total_nodes_healthy,
|
||||
@ -56,11 +57,11 @@ class DeployedTab(tabs.TableTab):
|
||||
|
||||
def get_deployed_nodes_data(self):
|
||||
redirect = urlresolvers.reverse('horizon:infrastructure:nodes:index')
|
||||
deployed_nodes = api.Node.list(self.request, associated=True,
|
||||
_error_redirect=redirect)
|
||||
deployed_nodes = api.node.Node.list(self.request, associated=True,
|
||||
_error_redirect=redirect)
|
||||
|
||||
if 'errors' in self.request.GET:
|
||||
return api.filter_nodes(deployed_nodes, healthy=False)
|
||||
return api.node.filter_nodes(deployed_nodes, healthy=False)
|
||||
|
||||
return deployed_nodes
|
||||
|
||||
@ -76,11 +77,11 @@ class FreeTab(tabs.TableTab):
|
||||
|
||||
def get_free_nodes_data(self):
|
||||
redirect = urlresolvers.reverse('horizon:infrastructure:nodes:index')
|
||||
free_nodes = api.Node.list(self.request, associated=False,
|
||||
_error_redirect=redirect)
|
||||
free_nodes = api.node.Node.list(self.request, associated=False,
|
||||
_error_redirect=redirect)
|
||||
|
||||
if 'errors' in self.request.GET:
|
||||
return api.filter_nodes(free_nodes, healthy=False)
|
||||
return api.node.filter_nodes(free_nodes, healthy=False)
|
||||
|
||||
return free_nodes
|
||||
|
||||
|
@ -21,9 +21,11 @@ from mock import patch, call # noqa
|
||||
|
||||
from openstack_dashboard.test import helpers
|
||||
from openstack_dashboard.test.test_data import utils
|
||||
from tuskar_ui import api as api
|
||||
from tuskar_ui import api
|
||||
from tuskar_ui.handle_errors import handle_errors # noqa
|
||||
from tuskar_ui.test import helpers as test
|
||||
from tuskar_ui.test.test_data import heat_data
|
||||
from tuskar_ui.test.test_data import node_data
|
||||
from tuskar_ui.test.test_data import tuskar_data
|
||||
|
||||
|
||||
@ -32,6 +34,8 @@ REGISTER_URL = urlresolvers.reverse('horizon:infrastructure:nodes:register')
|
||||
DETAIL_VIEW = 'horizon:infrastructure:nodes:detail'
|
||||
PERFORMANCE_VIEW = 'horizon:infrastructure:nodes:performance'
|
||||
TEST_DATA = utils.TestDataContainer()
|
||||
node_data.data(TEST_DATA)
|
||||
heat_data.data(TEST_DATA)
|
||||
tuskar_data.data(TEST_DATA)
|
||||
|
||||
|
||||
@ -42,7 +46,7 @@ class NodesTests(test.BaseAdminViewTests, helpers.APITestCase):
|
||||
|
||||
def test_index_get(self):
|
||||
|
||||
with patch('tuskar_ui.api.Node', **{
|
||||
with patch('tuskar_ui.api.node.Node', **{
|
||||
'spec_set': ['list'], # Only allow these attributes
|
||||
'list.return_value': [],
|
||||
}) as mock:
|
||||
@ -55,26 +59,26 @@ class NodesTests(test.BaseAdminViewTests, helpers.APITestCase):
|
||||
self.assertTemplateUsed(res, 'infrastructure/nodes/_overview.html')
|
||||
|
||||
def test_free_nodes(self):
|
||||
free_nodes = [api.Node(node)
|
||||
free_nodes = [api.node.Node(node)
|
||||
for node in self.ironicclient_nodes.list()]
|
||||
roles = TEST_DATA.tuskarclient_overcloud_roles.list()
|
||||
instance = TEST_DATA.novaclient_servers.first()
|
||||
image = TEST_DATA.glanceclient_images.first()
|
||||
|
||||
with contextlib.nested(
|
||||
patch('tuskar_ui.api.OvercloudRole', **{
|
||||
'spec_set': ['list', 'name'],
|
||||
patch('tuskar_ui.api.tuskar.OvercloudRole', **{
|
||||
'spec_set': ['list', 'name', 'get_by_node'],
|
||||
'list.return_value': roles,
|
||||
}),
|
||||
patch('tuskar_ui.api.Node', **{
|
||||
patch('tuskar_ui.api.node.Node', **{
|
||||
'spec_set': ['list'],
|
||||
'list.return_value': free_nodes,
|
||||
}),
|
||||
patch('tuskar_ui.api.nova', **{
|
||||
patch('tuskar_ui.api.node.nova', **{
|
||||
'spec_set': ['server_get'],
|
||||
'server_get.return_value': instance,
|
||||
}),
|
||||
patch('tuskar_ui.api.glance', **{
|
||||
patch('tuskar_ui.api.node.glance', **{
|
||||
'spec_set': ['image_get'],
|
||||
'image_get.return_value': image,
|
||||
}),
|
||||
@ -90,7 +94,7 @@ class NodesTests(test.BaseAdminViewTests, helpers.APITestCase):
|
||||
free_nodes)
|
||||
|
||||
def test_free_nodes_list_exception(self):
|
||||
with patch('tuskar_ui.api.Node', **{
|
||||
with patch('tuskar_ui.api.node.Node', **{
|
||||
'spec_set': ['list'],
|
||||
'list.side_effect': self._raise_tuskar_exception,
|
||||
}) as mock:
|
||||
@ -100,26 +104,26 @@ class NodesTests(test.BaseAdminViewTests, helpers.APITestCase):
|
||||
self.assertRedirectsNoFollow(res, INDEX_URL)
|
||||
|
||||
def test_deployed_nodes(self):
|
||||
deployed_nodes = [api.Node(node)
|
||||
deployed_nodes = [api.node.Node(node)
|
||||
for node in self.ironicclient_nodes.list()]
|
||||
roles = TEST_DATA.tuskarclient_overcloud_roles.list()
|
||||
instance = TEST_DATA.novaclient_servers.first()
|
||||
image = TEST_DATA.glanceclient_images.first()
|
||||
|
||||
with contextlib.nested(
|
||||
patch('tuskar_ui.api.OvercloudRole', **{
|
||||
'spec_set': ['list', 'name'],
|
||||
patch('tuskar_ui.api.tuskar.OvercloudRole', **{
|
||||
'spec_set': ['list', 'name', 'get_by_node'],
|
||||
'list.return_value': roles,
|
||||
}),
|
||||
patch('tuskar_ui.api.Node', **{
|
||||
patch('tuskar_ui.api.node.Node', **{
|
||||
'spec_set': ['list'],
|
||||
'list.return_value': deployed_nodes,
|
||||
}),
|
||||
patch('tuskar_ui.api.nova', **{
|
||||
patch('tuskar_ui.api.node.nova', **{
|
||||
'spec_set': ['server_get'],
|
||||
'server_get.return_value': instance,
|
||||
}),
|
||||
patch('tuskar_ui.api.glance', **{
|
||||
patch('tuskar_ui.api.node.glance', **{
|
||||
'spec_set': ['image_get'],
|
||||
'image_get.return_value': image,
|
||||
}),
|
||||
@ -136,7 +140,7 @@ class NodesTests(test.BaseAdminViewTests, helpers.APITestCase):
|
||||
|
||||
def test_deployed_nodes_list_exception(self):
|
||||
instance = TEST_DATA.novaclient_servers.first()
|
||||
with patch('tuskar_ui.api.Node', **{
|
||||
with patch('tuskar_ui.api.node.Node', **{
|
||||
'spec_set': ['list', 'instance'],
|
||||
'instance': instance,
|
||||
'list.side_effect': self._raise_tuskar_exception,
|
||||
@ -172,7 +176,7 @@ class NodesTests(test.BaseAdminViewTests, helpers.APITestCase):
|
||||
'register_nodes-1-memory': '5',
|
||||
'register_nodes-1-local_disk': '6',
|
||||
}
|
||||
with patch('tuskar_ui.api.Node', **{
|
||||
with patch('tuskar_ui.api.node.Node', **{
|
||||
'spec_set': ['create'],
|
||||
'create.return_value': node,
|
||||
}) as Node:
|
||||
@ -206,7 +210,7 @@ class NodesTests(test.BaseAdminViewTests, helpers.APITestCase):
|
||||
'register_nodes-1-memory': '5',
|
||||
'register_nodes-1-local_disk': '6',
|
||||
}
|
||||
with patch('tuskar_ui.api.Node', **{
|
||||
with patch('tuskar_ui.api.node.Node', **{
|
||||
'spec_set': ['create'],
|
||||
'create.side_effect': self.exceptions.tuskar,
|
||||
}) as Node:
|
||||
@ -222,9 +226,9 @@ class NodesTests(test.BaseAdminViewTests, helpers.APITestCase):
|
||||
res, 'infrastructure/nodes/register.html')
|
||||
|
||||
def test_node_detail(self):
|
||||
node = api.Node(self.ironicclient_nodes.list()[0])
|
||||
node = api.node.Node(self.ironicclient_nodes.list()[0])
|
||||
|
||||
with patch('tuskar_ui.api.Node', **{
|
||||
with patch('tuskar_ui.api.node.Node', **{
|
||||
'spec_set': ['get'], # Only allow these attributes
|
||||
'get.return_value': node,
|
||||
}) as mock:
|
||||
@ -237,7 +241,7 @@ class NodesTests(test.BaseAdminViewTests, helpers.APITestCase):
|
||||
self.assertEqual(res.context['node'], node)
|
||||
|
||||
def test_node_detail_exception(self):
|
||||
with patch('tuskar_ui.api.Node', **{
|
||||
with patch('tuskar_ui.api.node.Node', **{
|
||||
'spec_set': ['get'],
|
||||
'get.side_effect': self._raise_tuskar_exception,
|
||||
}) as mock:
|
||||
@ -249,7 +253,7 @@ class NodesTests(test.BaseAdminViewTests, helpers.APITestCase):
|
||||
self.assertRedirectsNoFollow(res, INDEX_URL)
|
||||
|
||||
def test_performance(self):
|
||||
node = api.Node(self.ironicclient_nodes.list()[0])
|
||||
node = api.node.Node(self.ironicclient_nodes.list()[0])
|
||||
meters = self.meters.list()
|
||||
resources = self.resources.list()
|
||||
|
||||
@ -261,7 +265,7 @@ class NodesTests(test.BaseAdminViewTests, helpers.APITestCase):
|
||||
|
||||
self.mox.ReplayAll()
|
||||
|
||||
with patch('tuskar_ui.api.Node', **{
|
||||
with patch('tuskar_ui.api.node.Node', **{
|
||||
'spec_set': ['get'],
|
||||
'get.return_value': node,
|
||||
}):
|
||||
|
@ -35,12 +35,13 @@ class IndexView(horizon_tabs.TabbedTableView):
|
||||
template_name = 'infrastructure/nodes/index.html'
|
||||
|
||||
def get_free_nodes_count(self):
|
||||
free_nodes_count = len(api.Node.list(self.request, associated=False))
|
||||
free_nodes_count = len(api.node.Node.list(
|
||||
self.request, associated=False))
|
||||
return free_nodes_count
|
||||
|
||||
def get_deployed_nodes_count(self):
|
||||
deployed_nodes_count = len(api.Node.list(self.request,
|
||||
associated=True))
|
||||
deployed_nodes_count = len(api.node.Node.list(self.request,
|
||||
associated=True))
|
||||
return deployed_nodes_count
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
@ -76,7 +77,7 @@ class DetailView(horizon_views.APIView):
|
||||
def get_data(self, request, context, *args, **kwargs):
|
||||
node_uuid = kwargs.get('node_uuid')
|
||||
redirect = reverse_lazy('horizon:infrastructure:nodes:index')
|
||||
node = api.Node.get(request, node_uuid, _error_redirect=redirect)
|
||||
node = api.node.Node.get(request, node_uuid, _error_redirect=redirect)
|
||||
context['node'] = node
|
||||
|
||||
if api_base.is_service_enabled(request, 'metering'):
|
||||
|
@ -25,7 +25,7 @@ from tuskar_ui import api
|
||||
class UndeployOvercloud(horizon.forms.SelfHandlingForm):
|
||||
def handle(self, request, data):
|
||||
try:
|
||||
api.Overcloud.delete(request, self.initial['overcloud_id'])
|
||||
api.tuskar.Overcloud.delete(request, self.initial['overcloud_id'])
|
||||
except Exception:
|
||||
horizon.exceptions.handle(request,
|
||||
_("Unable to undeploy overcloud."))
|
||||
@ -71,7 +71,7 @@ class OvercloudRoleForm(horizon.forms.SelfHandlingForm):
|
||||
|
||||
def handle(self, request, context):
|
||||
try:
|
||||
role = api.OvercloudRole.get(request, context['id'])
|
||||
role = api.tuskar.OvercloudRole.get(request, context['id'])
|
||||
role.update(request, flavor_id=context['flavor_id'])
|
||||
except Exception:
|
||||
horizon.exceptions.handle(request,
|
||||
|
@ -83,7 +83,7 @@ class OverviewTab(tabs.Tab):
|
||||
|
||||
def get_context_data(self, request, **kwargs):
|
||||
overcloud = self.tab_group.kwargs['overcloud']
|
||||
roles = api.OvercloudRole.list(request)
|
||||
roles = api.tuskar.OvercloudRole.list(request)
|
||||
role_data = [_get_role_data(overcloud, role) for role in roles]
|
||||
total = sum(d['total_node_count'] for d in role_data)
|
||||
progress = 100 * sum(d.get('deployed_node_count', 0)
|
||||
|
@ -20,6 +20,9 @@ from mock import patch, call # noqa
|
||||
from openstack_dashboard.test.test_data import utils
|
||||
|
||||
from tuskar_ui.test import helpers as test
|
||||
from tuskar_ui.test.test_data import flavor_data
|
||||
from tuskar_ui.test.test_data import heat_data
|
||||
from tuskar_ui.test.test_data import node_data
|
||||
from tuskar_ui.test.test_data import tuskar_data
|
||||
|
||||
|
||||
@ -40,6 +43,9 @@ DETAIL_URL_LOG_TAB = (DETAIL_URL + "?tab=detail__log")
|
||||
DELETE_URL = urlresolvers.reverse(
|
||||
'horizon:infrastructure:overcloud:undeploy_confirmation', args=(1,))
|
||||
TEST_DATA = utils.TestDataContainer()
|
||||
flavor_data.data(TEST_DATA)
|
||||
node_data.data(TEST_DATA)
|
||||
heat_data.data(TEST_DATA)
|
||||
tuskar_data.data(TEST_DATA)
|
||||
|
||||
|
||||
@ -105,7 +111,7 @@ def _mock_overcloud(**kwargs):
|
||||
'template_parameters.return_value': template_parameters,
|
||||
}
|
||||
params.update(kwargs)
|
||||
with patch('tuskar_ui.api.Overcloud', **params) as Overcloud:
|
||||
with patch('tuskar_ui.api.tuskar.Overcloud', **params) as Overcloud:
|
||||
oc = Overcloud
|
||||
yield Overcloud
|
||||
|
||||
@ -139,12 +145,12 @@ class OvercloudTests(test.BaseAdminViewTests):
|
||||
def test_create_get(self):
|
||||
roles = TEST_DATA.tuskarclient_overcloud_roles.list()
|
||||
with contextlib.nested(
|
||||
patch('tuskar_ui.api.OvercloudRole', **{
|
||||
patch('tuskar_ui.api.tuskar.OvercloudRole', **{
|
||||
'spec_set': ['list'],
|
||||
'list.return_value': roles,
|
||||
}),
|
||||
_mock_overcloud(),
|
||||
patch('tuskar_ui.api.Node', **{
|
||||
patch('tuskar_ui.api.node.Node', **{
|
||||
'spec_set': ['list'],
|
||||
'list.return_value': [],
|
||||
}),
|
||||
@ -174,12 +180,12 @@ class OvercloudTests(test.BaseAdminViewTests):
|
||||
'count__4__': '0',
|
||||
}
|
||||
with contextlib.nested(
|
||||
patch('tuskar_ui.api.OvercloudRole', **{
|
||||
patch('tuskar_ui.api.tuskar.OvercloudRole', **{
|
||||
'spec_set': ['list'],
|
||||
'list.return_value': roles,
|
||||
}),
|
||||
_mock_overcloud(),
|
||||
patch('tuskar_ui.api.Node', **{
|
||||
patch('tuskar_ui.api.node.Node', **{
|
||||
'spec_set': ['list'],
|
||||
'list.return_value': [node],
|
||||
}),
|
||||
@ -217,12 +223,12 @@ class OvercloudTests(test.BaseAdminViewTests):
|
||||
'count__4__': '0',
|
||||
}
|
||||
with contextlib.nested(
|
||||
patch('tuskar_ui.api.OvercloudRole', **{
|
||||
patch('tuskar_ui.api.tuskar.OvercloudRole', **{
|
||||
'spec_set': ['list'],
|
||||
'list.return_value': roles,
|
||||
}),
|
||||
_mock_overcloud(),
|
||||
patch('tuskar_ui.api.Node', **{
|
||||
patch('tuskar_ui.api.node.Node', **{
|
||||
'spec_set': ['list'],
|
||||
'list.return_value': [],
|
||||
}),
|
||||
@ -247,12 +253,12 @@ class OvercloudTests(test.BaseAdminViewTests):
|
||||
'count__4__': '0',
|
||||
}
|
||||
with contextlib.nested(
|
||||
patch('tuskar_ui.api.OvercloudRole', **{
|
||||
patch('tuskar_ui.api.tuskar.OvercloudRole', **{
|
||||
'spec_set': ['list'],
|
||||
'list.return_value': roles,
|
||||
}),
|
||||
_mock_overcloud(),
|
||||
patch('tuskar_ui.api.Node', **{
|
||||
patch('tuskar_ui.api.node.Node', **{
|
||||
'spec_set': ['list'],
|
||||
'list.return_value': [node],
|
||||
}),
|
||||
@ -271,7 +277,7 @@ class OvercloudTests(test.BaseAdminViewTests):
|
||||
roles = TEST_DATA.tuskarclient_overcloud_roles.list()
|
||||
with contextlib.nested(
|
||||
_mock_overcloud(),
|
||||
patch('tuskar_ui.api.OvercloudRole', **{
|
||||
patch('tuskar_ui.api.tuskar.OvercloudRole', **{
|
||||
'spec_set': ['list'],
|
||||
'list.return_value': roles,
|
||||
}),
|
||||
@ -356,7 +362,7 @@ class OvercloudTests(test.BaseAdminViewTests):
|
||||
oc = None
|
||||
roles = TEST_DATA.tuskarclient_overcloud_roles.list()
|
||||
with contextlib.nested(
|
||||
patch('tuskar_ui.api.OvercloudRole', **{
|
||||
patch('tuskar_ui.api.tuskar.OvercloudRole', **{
|
||||
'spec_set': ['list'],
|
||||
'list.return_value': roles,
|
||||
}),
|
||||
@ -390,7 +396,7 @@ class OvercloudTests(test.BaseAdminViewTests):
|
||||
'count__4__': '0',
|
||||
}
|
||||
with contextlib.nested(
|
||||
patch('tuskar_ui.api.OvercloudRole', **{
|
||||
patch('tuskar_ui.api.tuskar.OvercloudRole', **{
|
||||
'spec_set': ['list'],
|
||||
'list.return_value': roles,
|
||||
}),
|
||||
@ -398,7 +404,7 @@ class OvercloudTests(test.BaseAdminViewTests):
|
||||
"overcloud_role_id": role.id,
|
||||
"num_nodes": 0,
|
||||
} for role in roles]),
|
||||
patch('tuskar_ui.api.Node', **{
|
||||
patch('tuskar_ui.api.node.Node', **{
|
||||
'spec_set': ['list'],
|
||||
'list.return_value': [node],
|
||||
}),
|
||||
@ -430,7 +436,7 @@ class OvercloudTests(test.BaseAdminViewTests):
|
||||
url = urlresolvers.reverse(
|
||||
'horizon:infrastructure:overcloud:role_edit', args=(role.id,))
|
||||
with contextlib.nested(
|
||||
patch('tuskar_ui.api.OvercloudRole', **{
|
||||
patch('tuskar_ui.api.tuskar.OvercloudRole', **{
|
||||
'spec_set': ['get'],
|
||||
'get.return_value': role,
|
||||
}),
|
||||
@ -450,7 +456,7 @@ class OvercloudTests(test.BaseAdminViewTests):
|
||||
Flavor = collections.namedtuple('Flavor', 'id name')
|
||||
flavor = Flavor('xxx', 'Xxx')
|
||||
with contextlib.nested(
|
||||
patch('tuskar_ui.api.OvercloudRole', **{
|
||||
patch('tuskar_ui.api.tuskar.OvercloudRole', **{
|
||||
'spec_set': [
|
||||
'get',
|
||||
'update',
|
||||
|
@ -48,8 +48,8 @@ class OvercloudMixin(object):
|
||||
if redirect is None:
|
||||
redirect = reverse(INDEX_URL)
|
||||
overcloud_id = self.kwargs['overcloud_id']
|
||||
overcloud = api.Overcloud.get(self.request, overcloud_id,
|
||||
_error_redirect=redirect)
|
||||
overcloud = api.tuskar.Overcloud.get(self.request, overcloud_id,
|
||||
_error_redirect=redirect)
|
||||
return overcloud
|
||||
|
||||
|
||||
@ -57,8 +57,8 @@ class OvercloudRoleMixin(object):
|
||||
@memoized.memoized
|
||||
def get_role(self, redirect=None):
|
||||
role_id = self.kwargs['role_id']
|
||||
role = api.OvercloudRole.get(self.request, role_id,
|
||||
_error_redirect=redirect)
|
||||
role = api.tuskar.OvercloudRole.get(self.request, role_id,
|
||||
_error_redirect=redirect)
|
||||
return role
|
||||
|
||||
|
||||
@ -68,7 +68,7 @@ class IndexView(base_views.RedirectView):
|
||||
def get_redirect_url(self):
|
||||
try:
|
||||
# TODO(lsmola) implement this properly when supported by API
|
||||
overcloud = api.Overcloud.get_the_overcloud(self.request)
|
||||
overcloud = api.tuskar.Overcloud.get_the_overcloud(self.request)
|
||||
except heatclient.exc.HTTPNotFound:
|
||||
overcloud = None
|
||||
|
||||
@ -130,7 +130,7 @@ class UndeployInProgressView(horizon_tabs.TabView, OvercloudMixin, ):
|
||||
def get_overcloud_or_redirect(self):
|
||||
try:
|
||||
# TODO(lsmola) implement this properly when supported by API
|
||||
overcloud = api.Overcloud.get_the_overcloud(self.request)
|
||||
overcloud = api.tuskar.Overcloud.get_the_overcloud(self.request)
|
||||
except heatclient.exc.HTTPNotFound:
|
||||
overcloud = None
|
||||
|
||||
@ -171,7 +171,7 @@ class Scale(horizon.workflows.WorkflowView, OvercloudMixin):
|
||||
overcloud = self.get_overcloud()
|
||||
overcloud_roles = dict((overcloud_role.id, overcloud_role)
|
||||
for overcloud_role in
|
||||
api.OvercloudRole.list(self.request))
|
||||
api.tuskar.OvercloudRole.list(self.request))
|
||||
|
||||
role_counts = dict((
|
||||
(count['overcloud_role_id'],
|
||||
|
@ -17,7 +17,6 @@ from django.utils.translation import ugettext_lazy as _
|
||||
from horizon import exceptions
|
||||
import horizon.workflows
|
||||
|
||||
# from tuskar_ui import api
|
||||
from tuskar_ui import api
|
||||
from tuskar_ui.infrastructure.overcloud.workflows import scale_node_counts
|
||||
from tuskar_ui.infrastructure.overcloud.workflows import undeployed
|
||||
@ -37,8 +36,8 @@ class Workflow(undeployed.DeploymentValidationMixin,
|
||||
try:
|
||||
# TODO(lsmola) when updates are fixed in Heat, figure out whether
|
||||
# we need to send also parameters, right now we send {}
|
||||
api.Overcloud.update(request, overcloud_id,
|
||||
context['role_counts'], {})
|
||||
api.tuskar.Overcloud.update(request, overcloud_id,
|
||||
context['role_counts'], {})
|
||||
except Exception:
|
||||
exceptions.handle(request, _('Unable to update deployment.'))
|
||||
return False
|
||||
|
@ -33,7 +33,7 @@ class DeploymentValidationMixin(object):
|
||||
# TODO(lsmola) change this when we support more overclouds. It
|
||||
# will have to obtain actual free nodes and compare them to
|
||||
# number of newly created.
|
||||
free = len(api.Node.list(self.request))
|
||||
free = len(api.node.Node.list(self.request))
|
||||
if requested > free:
|
||||
m1 = translation.ungettext_lazy(
|
||||
'This configuration requires %(requested)d node, ',
|
||||
@ -65,8 +65,8 @@ class Workflow(DeploymentValidationMixin, horizon.workflows.Workflow):
|
||||
|
||||
def handle(self, request, context):
|
||||
try:
|
||||
api.Overcloud.create(self.request, context['role_counts'],
|
||||
context['configuration'])
|
||||
api.tuskar.Overcloud.create(self.request, context['role_counts'],
|
||||
context['configuration'])
|
||||
except Exception as e:
|
||||
# Showing error in both workflow tabs, because from the exception
|
||||
# type we can't recognize where it should show
|
||||
|
@ -56,7 +56,7 @@ class Action(horizon.workflows.Action):
|
||||
|
||||
def __init__(self, request, *args, **kwargs):
|
||||
super(Action, self).__init__(request, *args, **kwargs)
|
||||
parameters = api.Overcloud.template_parameters(request).items()
|
||||
parameters = api.tuskar.Overcloud.template_parameters(request).items()
|
||||
parameters.sort()
|
||||
|
||||
for name, data in parameters:
|
||||
|
@ -100,7 +100,7 @@ class Action(horizon.workflows.Action):
|
||||
@memoized.memoized
|
||||
def _get_roles(self):
|
||||
"""Retrieve the list of all overcloud roles."""
|
||||
return api.OvercloudRole.list(self.request)
|
||||
return api.tuskar.OvercloudRole.list(self.request)
|
||||
|
||||
def clean(self):
|
||||
for key, value in self.cleaned_data.iteritems():
|
||||
@ -121,7 +121,7 @@ class Step(horizon.workflows.Step):
|
||||
|
||||
def get_free_nodes(self):
|
||||
"""Get the count of nodes that are not assigned yet."""
|
||||
return len(api.Node.list(self.workflow.request, False))
|
||||
return len(api.node.Node.list(self.workflow.request, False))
|
||||
|
||||
def contribute(self, data, context):
|
||||
counts = {}
|
||||
|
55
tuskar_ui/test/api_tests/heat_tests.py
Normal file
55
tuskar_ui/test/api_tests/heat_tests.py
Normal file
@ -0,0 +1,55 @@
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from mock import patch # noqa
|
||||
|
||||
from novaclient.v1_1 import servers
|
||||
|
||||
from tuskar_ui import api
|
||||
from tuskar_ui.test import helpers as test
|
||||
|
||||
|
||||
class HeatAPITests(test.APITestCase):
|
||||
|
||||
def test_resource_get(self):
|
||||
stack = self.heatclient_stacks.first()
|
||||
overcloud = api.tuskar.Overcloud(self.tuskarclient_overclouds.first(),
|
||||
request=object())
|
||||
resource = self.heatclient_resources.first()
|
||||
|
||||
with patch('openstack_dashboard.api.heat.resource_get',
|
||||
return_value=resource):
|
||||
with patch('openstack_dashboard.api.heat.stack_get',
|
||||
return_value=stack):
|
||||
ret_val = api.heat.Resource.get(None, overcloud,
|
||||
resource.resource_name)
|
||||
self.assertIsInstance(ret_val, api.heat.Resource)
|
||||
|
||||
def test_resource_node_no_ironic(self):
|
||||
resource = self.heatclient_resources.first()
|
||||
nodes = self.baremetalclient_nodes.list()
|
||||
instance = self.novaclient_servers.first()
|
||||
|
||||
with patch('openstack_dashboard.api.base.is_service_enabled',
|
||||
return_value=False):
|
||||
with patch('openstack_dashboard.api.nova.server_get',
|
||||
return_value=instance):
|
||||
with patch('novaclient.v1_1.contrib.baremetal.'
|
||||
'BareMetalNodeManager.list',
|
||||
return_value=nodes):
|
||||
ret_val = api.heat.Resource(
|
||||
resource, request=object()).node
|
||||
self.assertIsInstance(ret_val, api.node.Node)
|
||||
self.assertIsInstance(ret_val.instance, servers.Server)
|
136
tuskar_ui/test/api_tests/node_tests.py
Normal file
136
tuskar_ui/test/api_tests/node_tests.py
Normal file
@ -0,0 +1,136 @@
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from mock import patch # noqa
|
||||
|
||||
from novaclient.v1_1 import servers
|
||||
|
||||
from tuskar_ui import api
|
||||
from tuskar_ui.test import helpers as test
|
||||
|
||||
|
||||
class NodeAPITests(test.APITestCase):
|
||||
|
||||
def test_node_create(self):
|
||||
node = api.node.BareMetalNode(self.baremetalclient_nodes.first())
|
||||
|
||||
# FIXME(lsmola) this should be mocking client call no Node
|
||||
with patch('novaclient.v1_1.contrib.baremetal.'
|
||||
'BareMetalNodeManager.create',
|
||||
return_value=node):
|
||||
ret_val = api.node.Node.create(
|
||||
self.request,
|
||||
node.driver_info['ipmi_address'],
|
||||
node.cpus,
|
||||
node.memory_mb,
|
||||
node.local_gb,
|
||||
['aa:aa:aa:aa:aa:aa'],
|
||||
ipmi_username='admin',
|
||||
ipmi_password='password')
|
||||
|
||||
self.assertIsInstance(ret_val, api.node.Node)
|
||||
|
||||
def test_node_get(self):
|
||||
node = self.baremetalclient_nodes.first()
|
||||
instance = self.novaclient_servers.first()
|
||||
|
||||
with patch('openstack_dashboard.api.nova.server_get',
|
||||
return_value=instance):
|
||||
with patch('novaclient.v1_1.contrib.baremetal.'
|
||||
'BareMetalNodeManager.get',
|
||||
return_value=node):
|
||||
ret_val = api.node.Node.get(self.request, node.uuid)
|
||||
|
||||
self.assertIsInstance(ret_val, api.node.Node)
|
||||
self.assertIsInstance(ret_val.instance, servers.Server)
|
||||
|
||||
def test_node_get_by_instance_uuid(self):
|
||||
instance = self.novaclient_servers.first()
|
||||
node = self.baremetalclient_nodes.first()
|
||||
nodes = self.baremetalclient_nodes.list()
|
||||
|
||||
with patch('openstack_dashboard.api.nova.server_get',
|
||||
return_value=instance):
|
||||
with patch('novaclient.v1_1.contrib.baremetal.'
|
||||
'BareMetalNodeManager.list',
|
||||
return_value=nodes):
|
||||
ret_val = api.node.Node.get_by_instance_uuid(
|
||||
self.request,
|
||||
node.instance_uuid)
|
||||
|
||||
self.assertIsInstance(ret_val, api.node.Node)
|
||||
self.assertIsInstance(ret_val.instance, servers.Server)
|
||||
|
||||
def test_node_list(self):
|
||||
instances = self.novaclient_servers.list()
|
||||
nodes = self.baremetalclient_nodes.list()
|
||||
|
||||
with patch('openstack_dashboard.api.nova.server_list',
|
||||
return_value=(instances, None)):
|
||||
with patch('novaclient.v1_1.contrib.baremetal.'
|
||||
'BareMetalNodeManager.list',
|
||||
return_value=nodes):
|
||||
ret_val = api.node.Node.list(self.request)
|
||||
|
||||
for node in ret_val:
|
||||
self.assertIsInstance(node, api.node.Node)
|
||||
self.assertEqual(5, len(ret_val))
|
||||
|
||||
def test_node_delete(self):
|
||||
node = self.baremetalclient_nodes.first()
|
||||
with patch('novaclient.v1_1.contrib.baremetal.'
|
||||
'BareMetalNodeManager.delete',
|
||||
return_value=None):
|
||||
api.node.Node.delete(self.request, node.uuid)
|
||||
|
||||
def test_node_instance(self):
|
||||
node = self.baremetalclient_nodes.first()
|
||||
instance = self.novaclient_servers.first()
|
||||
|
||||
with patch('openstack_dashboard.api.nova.server_get',
|
||||
return_value=instance):
|
||||
ret_val = api.node.Node(node).instance
|
||||
self.assertIsInstance(ret_val, servers.Server)
|
||||
|
||||
def test_node_image_name(self):
|
||||
node = self.baremetalclient_nodes.first()
|
||||
instance = self.novaclient_servers.first()
|
||||
image = self.glanceclient_images.first()
|
||||
|
||||
with patch('openstack_dashboard.api.nova.server_get',
|
||||
return_value=instance):
|
||||
with patch('openstack_dashboard.api.glance.image_get',
|
||||
return_value=image):
|
||||
ret_val = api.node.Node(node).image_name
|
||||
self.assertEqual(ret_val, 'overcloud-control')
|
||||
|
||||
def test_node_addresses_no_ironic(self):
|
||||
node = self.baremetalclient_nodes.first()
|
||||
ret_val = api.node.BareMetalNode(node).addresses
|
||||
self.assertEqual(2, len(ret_val))
|
||||
|
||||
def test_filter_nodes(self):
|
||||
nodes = self.baremetalclient_nodes.list()
|
||||
nodes = [api.node.BareMetalNode(node) for node in nodes]
|
||||
num_nodes = len(nodes)
|
||||
|
||||
with patch('novaclient.v1_1.contrib.baremetal.'
|
||||
'BareMetalNodeManager.list', return_value=nodes):
|
||||
all_nodes = api.node.filter_nodes(nodes)
|
||||
healthy_nodes = api.node.filter_nodes(nodes, healthy=True)
|
||||
defective_nodes = api.node.filter_nodes(nodes, healthy=False)
|
||||
self.assertEqual(len(all_nodes), num_nodes)
|
||||
self.assertEqual(len(healthy_nodes), num_nodes - 1)
|
||||
self.assertEqual(len(defective_nodes), 1)
|
@ -19,7 +19,6 @@ from mock import patch # noqa
|
||||
|
||||
from heatclient.v1 import events
|
||||
from heatclient.v1 import stacks
|
||||
from novaclient.v1_1 import servers
|
||||
|
||||
from tuskar_ui import api
|
||||
from tuskar_ui.test import helpers as test
|
||||
@ -30,36 +29,36 @@ class TuskarAPITests(test.APITestCase):
|
||||
overcloud = self.tuskarclient_overclouds.first()
|
||||
with patch('tuskarclient.v1.overclouds.OvercloudManager.create',
|
||||
return_value=overcloud):
|
||||
ret_val = api.Overcloud.create(self.request, {}, {})
|
||||
self.assertIsInstance(ret_val, api.Overcloud)
|
||||
ret_val = api.tuskar.Overcloud.create(self.request, {}, {})
|
||||
self.assertIsInstance(ret_val, api.tuskar.Overcloud)
|
||||
|
||||
def test_overcloud_list(self):
|
||||
overclouds = self.tuskarclient_overclouds.list()
|
||||
with patch('tuskarclient.v1.overclouds.OvercloudManager.list',
|
||||
return_value=overclouds):
|
||||
ret_val = api.Overcloud.list(self.request)
|
||||
ret_val = api.tuskar.Overcloud.list(self.request)
|
||||
for oc in ret_val:
|
||||
self.assertIsInstance(oc, api.Overcloud)
|
||||
self.assertIsInstance(oc, api.tuskar.Overcloud)
|
||||
self.assertEqual(1, len(ret_val))
|
||||
|
||||
def test_overcloud_get(self):
|
||||
overcloud = self.tuskarclient_overclouds.first()
|
||||
with patch('tuskarclient.v1.overclouds.OvercloudManager.list',
|
||||
return_value=[overcloud]):
|
||||
ret_val = api.Overcloud.get(self.request, overcloud.id)
|
||||
ret_val = api.tuskar.Overcloud.get(self.request, overcloud.id)
|
||||
|
||||
self.assertIsInstance(ret_val, api.Overcloud)
|
||||
self.assertIsInstance(ret_val, api.tuskar.Overcloud)
|
||||
|
||||
def test_overcloud_delete(self):
|
||||
overcloud = self.tuskarclient_overclouds.first()
|
||||
with patch('tuskarclient.v1.overclouds.OvercloudManager.delete',
|
||||
return_value=None):
|
||||
api.Overcloud.delete(self.request, overcloud.id)
|
||||
api.tuskar.Overcloud.delete(self.request, overcloud.id)
|
||||
|
||||
def test_overcloud_stack(self):
|
||||
stack = self.heatclient_stacks.first()
|
||||
oc = api.Overcloud(self.tuskarclient_overclouds.first(),
|
||||
request=object())
|
||||
oc = api.tuskar.Overcloud(self.tuskarclient_overclouds.first(),
|
||||
request=object())
|
||||
with patch('openstack_dashboard.api.heat.stack_get',
|
||||
return_value=stack):
|
||||
ret_val = oc.stack
|
||||
@ -74,7 +73,7 @@ class TuskarAPITests(test.APITestCase):
|
||||
return_value=stack):
|
||||
with patch('openstack_dashboard.api.heat.events_list',
|
||||
return_value=event_list):
|
||||
ret_val = api.Overcloud(overcloud).stack_events
|
||||
ret_val = api.tuskar.Overcloud(overcloud).stack_events
|
||||
for e in ret_val:
|
||||
self.assertIsInstance(e, events.Event)
|
||||
self.assertEqual(8, len(ret_val))
|
||||
@ -88,22 +87,22 @@ class TuskarAPITests(test.APITestCase):
|
||||
return_value=None):
|
||||
with patch('openstack_dashboard.api.heat.events_list',
|
||||
return_value=event_list):
|
||||
ret_val = api.Overcloud(overcloud).stack_events
|
||||
ret_val = api.tuskar.Overcloud(overcloud).stack_events
|
||||
|
||||
self.assertListEqual([], ret_val)
|
||||
|
||||
def test_overcloud_is_deployed(self):
|
||||
stack = self.heatclient_stacks.first()
|
||||
oc = api.Overcloud(self.tuskarclient_overclouds.first(),
|
||||
request=object())
|
||||
oc = api.tuskar.Overcloud(self.tuskarclient_overclouds.first(),
|
||||
request=object())
|
||||
with patch('openstack_dashboard.api.heat.stack_get',
|
||||
return_value=stack):
|
||||
ret_val = oc.is_deployed
|
||||
self.assertFalse(ret_val)
|
||||
|
||||
def test_overcloud_all_resources(self):
|
||||
oc = api.Overcloud(self.tuskarclient_overclouds.first(),
|
||||
request=object())
|
||||
oc = api.tuskar.Overcloud(self.tuskarclient_overclouds.first(),
|
||||
request=object())
|
||||
|
||||
# FIXME(lsmola) the stack call should not be tested in this unit test
|
||||
# anybody has idea how to do it?
|
||||
@ -126,13 +125,14 @@ class TuskarAPITests(test.APITestCase):
|
||||
ret_val = oc.all_resources()
|
||||
|
||||
for i in ret_val:
|
||||
self.assertIsInstance(i, api.Resource)
|
||||
self.assertIsInstance(i, api.heat.Resource)
|
||||
self.assertEqual(4, len(ret_val))
|
||||
|
||||
def test_overcloud_resources_no_ironic(self):
|
||||
oc = api.Overcloud(self.tuskarclient_overclouds.first(),
|
||||
request=object())
|
||||
role = api.OvercloudRole(self.tuskarclient_overcloud_roles.first())
|
||||
oc = api.tuskar.Overcloud(self.tuskarclient_overclouds.first(),
|
||||
request=object())
|
||||
role = api.tuskar.OvercloudRole(
|
||||
self.tuskarclient_overcloud_roles.first())
|
||||
|
||||
# FIXME(lsmola) only all_resources and image_name should be tested
|
||||
# here, anybody has idea how to do that?
|
||||
@ -163,12 +163,12 @@ class TuskarAPITests(test.APITestCase):
|
||||
self.assertEqual(stack_get.call_count, 1)
|
||||
|
||||
for i in ret_val:
|
||||
self.assertIsInstance(i, api.Resource)
|
||||
self.assertIsInstance(i, api.heat.Resource)
|
||||
self.assertEqual(4, len(ret_val))
|
||||
|
||||
def test_overcloud_keystone_ip(self):
|
||||
oc = api.Overcloud(self.tuskarclient_overclouds.first(),
|
||||
request=object())
|
||||
oc = api.tuskar.Overcloud(self.tuskarclient_overclouds.first(),
|
||||
request=object())
|
||||
stack = self.heatclient_stacks.first()
|
||||
|
||||
with contextlib.nested(
|
||||
@ -178,8 +178,8 @@ class TuskarAPITests(test.APITestCase):
|
||||
self.assertEqual(stack_get.call_count, 1)
|
||||
|
||||
def test_overcloud_dashboard_url(self):
|
||||
oc = api.Overcloud(self.tuskarclient_overclouds.first(),
|
||||
request=object())
|
||||
oc = api.tuskar.Overcloud(self.tuskarclient_overclouds.first(),
|
||||
request=object())
|
||||
stack = self.heatclient_stacks.first()
|
||||
|
||||
mocked_service = mock.Mock(id='horizon_id')
|
||||
@ -202,7 +202,7 @@ class TuskarAPITests(test.APITestCase):
|
||||
with contextlib.nested(
|
||||
patch('openstack_dashboard.api.heat.stack_get',
|
||||
return_value=stack),
|
||||
patch('tuskar_ui.api.overcloud_keystoneclient',
|
||||
patch('tuskar_ui.api.tuskar.overcloud_keystoneclient',
|
||||
return_value=overcloud_keystone_client)
|
||||
) as (stack_get, client_get):
|
||||
self.assertEqual(['http://192.0.2.23:/admin'],
|
||||
@ -210,159 +210,15 @@ class TuskarAPITests(test.APITestCase):
|
||||
self.assertEqual(stack_get.call_count, 1)
|
||||
self.assertEqual(client_get.call_count, 1)
|
||||
|
||||
def test_node_create(self):
|
||||
node = api.BareMetalNode(self.baremetalclient_nodes.first())
|
||||
|
||||
# FIXME(lsmola) this should be mocking client call no Node
|
||||
with patch('novaclient.v1_1.contrib.baremetal.'
|
||||
'BareMetalNodeManager.create',
|
||||
return_value=node):
|
||||
ret_val = api.Node.create(
|
||||
self.request,
|
||||
node.driver_info['ipmi_address'],
|
||||
node.cpus,
|
||||
node.memory_mb,
|
||||
node.local_gb,
|
||||
['aa:aa:aa:aa:aa:aa'],
|
||||
ipmi_username='admin',
|
||||
ipmi_password='password')
|
||||
|
||||
self.assertIsInstance(ret_val, api.Node)
|
||||
|
||||
def test_node_get(self):
|
||||
node = self.baremetalclient_nodes.first()
|
||||
instance = self.novaclient_servers.first()
|
||||
|
||||
with patch('openstack_dashboard.api.nova.server_get',
|
||||
return_value=instance):
|
||||
with patch('novaclient.v1_1.contrib.baremetal.'
|
||||
'BareMetalNodeManager.get',
|
||||
return_value=node):
|
||||
ret_val = api.Node.get(self.request, node.uuid)
|
||||
|
||||
self.assertIsInstance(ret_val, api.Node)
|
||||
self.assertIsInstance(ret_val.instance, servers.Server)
|
||||
|
||||
def test_node_get_by_instance_uuid(self):
|
||||
instance = self.novaclient_servers.first()
|
||||
node = self.baremetalclient_nodes.first()
|
||||
nodes = self.baremetalclient_nodes.list()
|
||||
|
||||
with patch('openstack_dashboard.api.nova.server_get',
|
||||
return_value=instance):
|
||||
with patch('novaclient.v1_1.contrib.baremetal.'
|
||||
'BareMetalNodeManager.list',
|
||||
return_value=nodes):
|
||||
ret_val = api.Node.get_by_instance_uuid(self.request,
|
||||
node.instance_uuid)
|
||||
|
||||
self.assertIsInstance(ret_val, api.Node)
|
||||
self.assertIsInstance(ret_val.instance, servers.Server)
|
||||
|
||||
def test_node_list(self):
|
||||
instances = self.novaclient_servers.list()
|
||||
nodes = self.baremetalclient_nodes.list()
|
||||
|
||||
with patch('openstack_dashboard.api.nova.server_list',
|
||||
return_value=(instances, None)):
|
||||
with patch('novaclient.v1_1.contrib.baremetal.'
|
||||
'BareMetalNodeManager.list',
|
||||
return_value=nodes):
|
||||
ret_val = api.Node.list(self.request)
|
||||
|
||||
for node in ret_val:
|
||||
self.assertIsInstance(node, api.Node)
|
||||
self.assertEqual(5, len(ret_val))
|
||||
|
||||
def test_node_delete(self):
|
||||
node = self.baremetalclient_nodes.first()
|
||||
with patch('novaclient.v1_1.contrib.baremetal.'
|
||||
'BareMetalNodeManager.delete',
|
||||
return_value=None):
|
||||
api.Node.delete(self.request, node.uuid)
|
||||
|
||||
def test_node_instance(self):
|
||||
node = self.baremetalclient_nodes.first()
|
||||
instance = self.novaclient_servers.first()
|
||||
|
||||
with patch('openstack_dashboard.api.nova.server_get',
|
||||
return_value=instance):
|
||||
ret_val = api.Node(node).instance
|
||||
self.assertIsInstance(ret_val, servers.Server)
|
||||
|
||||
def test_node_image_name(self):
|
||||
node = self.baremetalclient_nodes.first()
|
||||
instance = self.novaclient_servers.first()
|
||||
image = self.glanceclient_images.first()
|
||||
|
||||
with patch('openstack_dashboard.api.nova.server_get',
|
||||
return_value=instance):
|
||||
with patch('openstack_dashboard.api.glance.image_get',
|
||||
return_value=image):
|
||||
ret_val = api.Node(node).image_name
|
||||
self.assertEqual(ret_val, 'overcloud-control')
|
||||
|
||||
def test_node_overcloud_role(self):
|
||||
node = self.baremetalclient_nodes.first()
|
||||
instance = self.novaclient_servers.first()
|
||||
image = self.glanceclient_images.first()
|
||||
roles = self.tuskarclient_overcloud_roles.list()
|
||||
|
||||
with contextlib.nested(
|
||||
patch('openstack_dashboard.api.nova.server_get',
|
||||
return_value=instance),
|
||||
patch('openstack_dashboard.api.glance.image_get',
|
||||
return_value=image),
|
||||
patch('tuskarclient.v1.overcloud_roles.'
|
||||
'OvercloudRoleManager.list',
|
||||
return_value=roles)):
|
||||
ret_val = api.Node(node).overcloud_role
|
||||
self.assertEqual(ret_val.name, 'Controller')
|
||||
|
||||
def test_node_addresses_no_ironic(self):
|
||||
node = self.baremetalclient_nodes.first()
|
||||
ret_val = api.BareMetalNode(node).addresses
|
||||
self.assertEqual(2, len(ret_val))
|
||||
|
||||
def test_resource_get(self):
|
||||
stack = self.heatclient_stacks.first()
|
||||
overcloud = api.Overcloud(self.tuskarclient_overclouds.first(),
|
||||
request=object())
|
||||
resource = self.heatclient_resources.first()
|
||||
|
||||
with patch('openstack_dashboard.api.heat.resource_get',
|
||||
return_value=resource):
|
||||
with patch('openstack_dashboard.api.heat.stack_get',
|
||||
return_value=stack):
|
||||
ret_val = api.Resource.get(None, overcloud,
|
||||
resource.resource_name)
|
||||
self.assertIsInstance(ret_val, api.Resource)
|
||||
|
||||
def test_resource_node_no_ironic(self):
|
||||
resource = self.heatclient_resources.first()
|
||||
nodes = self.baremetalclient_nodes.list()
|
||||
instance = self.novaclient_servers.first()
|
||||
|
||||
with patch('openstack_dashboard.api.base.is_service_enabled',
|
||||
return_value=False):
|
||||
with patch('openstack_dashboard.api.nova.server_get',
|
||||
return_value=instance):
|
||||
with patch('novaclient.v1_1.contrib.baremetal.'
|
||||
'BareMetalNodeManager.list',
|
||||
return_value=nodes):
|
||||
ret_val = api.Resource(resource, request=object()).node
|
||||
self.assertIsInstance(ret_val, api.Node)
|
||||
self.assertIsInstance(ret_val.instance, servers.Server)
|
||||
|
||||
def test_overcloud_role_list(self):
|
||||
roles = self.tuskarclient_overcloud_roles.list()
|
||||
|
||||
with patch('tuskarclient.v1.overcloud_roles.OvercloudRoleManager.list',
|
||||
return_value=roles):
|
||||
ret_val = api.OvercloudRole.list(self.request)
|
||||
ret_val = api.tuskar.OvercloudRole.list(self.request)
|
||||
|
||||
for r in ret_val:
|
||||
self.assertIsInstance(r, api.OvercloudRole)
|
||||
self.assertIsInstance(r, api.tuskar.OvercloudRole)
|
||||
self.assertEqual(4, len(ret_val))
|
||||
|
||||
def test_overcloud_role_get(self):
|
||||
@ -370,20 +226,26 @@ class TuskarAPITests(test.APITestCase):
|
||||
|
||||
with patch('tuskarclient.v1.overcloud_roles.OvercloudRoleManager.get',
|
||||
return_value=role):
|
||||
ret_val = api.OvercloudRole.get(self.request, role.id)
|
||||
ret_val = api.tuskar.OvercloudRole.get(self.request, role.id)
|
||||
|
||||
self.assertIsInstance(ret_val, api.OvercloudRole)
|
||||
self.assertIsInstance(ret_val, api.tuskar.OvercloudRole)
|
||||
|
||||
def test_filter_nodes(self):
|
||||
nodes = self.baremetalclient_nodes.list()
|
||||
nodes = [api.BareMetalNode(node) for node in nodes]
|
||||
num_nodes = len(nodes)
|
||||
def test_overcloud_role_get_by_node(self):
|
||||
node = api.node.Node(
|
||||
api.node.BareMetalNode(self.baremetalclient_nodes.first()))
|
||||
instance = self.novaclient_servers.first()
|
||||
image = self.glanceclient_images.first()
|
||||
roles = self.tuskarclient_overcloud_roles.list()
|
||||
|
||||
with patch('novaclient.v1_1.contrib.baremetal.'
|
||||
'BareMetalNodeManager.list', return_value=nodes):
|
||||
all_nodes = api.filter_nodes(nodes)
|
||||
healthy_nodes = api.filter_nodes(nodes, healthy=True)
|
||||
defective_nodes = api.filter_nodes(nodes, healthy=False)
|
||||
self.assertEqual(len(all_nodes), num_nodes)
|
||||
self.assertEqual(len(healthy_nodes), num_nodes - 1)
|
||||
self.assertEqual(len(defective_nodes), 1)
|
||||
with contextlib.nested(
|
||||
patch('tuskarclient.v1.overcloud_roles.'
|
||||
'OvercloudRoleManager.list',
|
||||
return_value=roles),
|
||||
patch('openstack_dashboard.api.nova.server_get',
|
||||
return_value=instance),
|
||||
patch('openstack_dashboard.api.glance.image_get',
|
||||
return_value=image),
|
||||
):
|
||||
ret_val = api.tuskar.OvercloudRole.get_by_node(self.request,
|
||||
node)
|
||||
self.assertEqual(ret_val.name, 'Controller')
|
||||
|
39
tuskar_ui/test/test_data/flavor_data.py
Normal file
39
tuskar_ui/test/test_data/flavor_data.py
Normal file
@ -0,0 +1,39 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from openstack_dashboard.test.test_data import utils as test_data_utils
|
||||
|
||||
from novaclient.v1_1 import flavors
|
||||
|
||||
|
||||
def data(TEST):
|
||||
|
||||
# Nova flavors
|
||||
# Do not include fields irrelevant for our usage
|
||||
TEST.novaclient_flavors = test_data_utils.TestDataContainer()
|
||||
flavor_1 = flavors.Flavor(
|
||||
flavors.FlavorManager(None),
|
||||
{'id': '1',
|
||||
'name': 'flavor-1',
|
||||
'vcpus': 2,
|
||||
'ram': 2048,
|
||||
'disk': 20})
|
||||
flavor_1.get_keys = lambda: {'cpu_arch': 'amd64'}
|
||||
flavor_2 = flavors.Flavor(
|
||||
flavors.FlavorManager(None),
|
||||
{'id': '2',
|
||||
'name': 'flavor-2',
|
||||
'vcpus': 4,
|
||||
'ram': 4096,
|
||||
'disk': 60})
|
||||
flavor_2.get_keys = lambda: {'cpu_arch': 'i386'}
|
||||
TEST.novaclient_flavors.add(flavor_1, flavor_2)
|
199
tuskar_ui/test/test_data/heat_data.py
Normal file
199
tuskar_ui/test/test_data/heat_data.py
Normal file
@ -0,0 +1,199 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from openstack_dashboard.test.test_data import utils as test_data_utils
|
||||
|
||||
from glanceclient.v1 import images
|
||||
from heatclient.v1 import events
|
||||
from heatclient.v1 import resources
|
||||
from heatclient.v1 import stacks
|
||||
from novaclient.v1_1 import servers
|
||||
|
||||
|
||||
def data(TEST):
|
||||
|
||||
# Stack
|
||||
TEST.heatclient_stacks = test_data_utils.TestDataContainer()
|
||||
stack_1 = stacks.Stack(
|
||||
stacks.StackManager(None),
|
||||
{'id': 'stack-id-1',
|
||||
'stack_name': 'overcloud',
|
||||
'stack_status': 'RUNNING',
|
||||
'outputs': [{
|
||||
'output_key': 'KeystoneURL',
|
||||
'output_value': 'http://192.0.2.23:5000/v2',
|
||||
}],
|
||||
'parameters': {
|
||||
'one': 'one',
|
||||
'two': 'two',
|
||||
}})
|
||||
TEST.heatclient_stacks.add(stack_1)
|
||||
|
||||
# Events
|
||||
TEST.heatclient_events = test_data_utils.TestDataContainer()
|
||||
event_1 = events.Event(
|
||||
events.EventManager(None),
|
||||
{'id': 1,
|
||||
'stack_id': 'stack-id-1',
|
||||
'resource_name': 'Controller',
|
||||
'resource_status': 'CREATE_IN_PROGRESS',
|
||||
'resource_status_reason': 'state changed',
|
||||
'event_time': '2014-01-01T07:26:15Z'})
|
||||
event_2 = events.Event(
|
||||
events.EventManager(None),
|
||||
{'id': 2,
|
||||
'stack_id': 'stack-id-1',
|
||||
'resource_name': 'Compute0',
|
||||
'resource_status': 'CREATE_IN_PROGRESS',
|
||||
'resource_status_reason': 'state changed',
|
||||
'event_time': '2014-01-01T07:26:27Z'})
|
||||
event_3 = events.Event(
|
||||
events.EventManager(None),
|
||||
{'id': 3,
|
||||
'stack_id': 'stack-id-1',
|
||||
'resource_name': 'Compute1',
|
||||
'resource_status': 'CREATE_IN_PROGRESS',
|
||||
'resource_status_reason': 'state changed',
|
||||
'event_time': '2014-01-01T07:26:44Z'})
|
||||
event_4 = events.Event(
|
||||
events.EventManager(None),
|
||||
{'id': 4,
|
||||
'stack_id': 'stack-id-1',
|
||||
'resource_name': 'Compute0',
|
||||
'resource_status': 'CREATE_COMPLETE',
|
||||
'resource_status_reason': 'state changed',
|
||||
'event_time': '2014-01-01T07:27:14Z'})
|
||||
event_5 = events.Event(
|
||||
events.EventManager(None),
|
||||
{'id': 5,
|
||||
'stack_id': 'stack-id-1',
|
||||
'resource_name': 'Compute2',
|
||||
'resource_status': 'CREATE_IN_PROGRESS',
|
||||
'resource_status_reason': 'state changed',
|
||||
'event_time': '2014-01-01T07:27:31Z'})
|
||||
event_6 = events.Event(
|
||||
events.EventManager(None),
|
||||
{'id': 6,
|
||||
'stack_id': 'stack-id-1',
|
||||
'resource_name': 'Compute1',
|
||||
'resource_status': 'CREATE_COMPLETE',
|
||||
'resource_status_reason': 'state changed',
|
||||
'event_time': '2014-01-01T07:28:01Z'})
|
||||
event_7 = events.Event(
|
||||
events.EventManager(None),
|
||||
{'id': 7,
|
||||
'stack_id': 'stack-id-1',
|
||||
'resource_name': 'Controller',
|
||||
'resource_status': 'CREATE_COMPLETE',
|
||||
'resource_status_reason': 'state changed',
|
||||
'event_time': '2014-01-01T07:28:59Z'})
|
||||
event_8 = events.Event(
|
||||
events.EventManager(None),
|
||||
{'id': 8,
|
||||
'stack_id': 'stack-id-1',
|
||||
'resource_name': 'Compute2',
|
||||
'resource_status': 'CREATE_COMPLETE',
|
||||
'resource_status_reason': 'state changed',
|
||||
'event_time': '2014-01-01T07:29:11Z'})
|
||||
TEST.heatclient_events.add(event_1, event_2, event_3, event_4,
|
||||
event_5, event_6, event_7, event_8)
|
||||
|
||||
# Resource
|
||||
TEST.heatclient_resources = test_data_utils.TestDataContainer()
|
||||
resource_1 = resources.Resource(
|
||||
resources.ResourceManager(None),
|
||||
{'id': '1-resource-id',
|
||||
'stack_id': 'stack-id-1',
|
||||
'resource_name': 'Compute0',
|
||||
'logical_resource_id': 'Compute0',
|
||||
'physical_resource_id': 'aa',
|
||||
'resource_status': 'CREATE_COMPLETE',
|
||||
'resource_type': 'AWS::EC2::Instance'})
|
||||
resource_2 = resources.Resource(
|
||||
resources.ResourceManager(None),
|
||||
{'id': '2-resource-id',
|
||||
'stack_id': 'stack-id-1',
|
||||
'resource_name': 'Controller',
|
||||
'logical_resource_id': 'Controller',
|
||||
'physical_resource_id': 'bb',
|
||||
'resource_status': 'CREATE_COMPLETE',
|
||||
'resource_type': 'AWS::EC2::Instance'})
|
||||
resource_3 = resources.Resource(
|
||||
resources.ResourceManager(None),
|
||||
{'id': '3-resource-id',
|
||||
'stack_id': 'stack-id-1',
|
||||
'resource_name': 'Compute1',
|
||||
'logical_resource_id': 'Compute1',
|
||||
'physical_resource_id': 'cc',
|
||||
'resource_status': 'CREATE_COMPLETE',
|
||||
'resource_type': 'AWS::EC2::Instance'})
|
||||
resource_4 = resources.Resource(
|
||||
resources.ResourceManager(None),
|
||||
{'id': '4-resource-id',
|
||||
'stack_id': 'stack-id-4',
|
||||
'resource_name': 'Compute2',
|
||||
'logical_resource_id': 'Compute2',
|
||||
'physical_resource_id': 'dd',
|
||||
'resource_status': 'CREATE_COMPLETE',
|
||||
'resource_type': 'AWS::EC2::Instance'})
|
||||
TEST.heatclient_resources.add(resource_1,
|
||||
resource_2,
|
||||
resource_3,
|
||||
resource_4)
|
||||
|
||||
# Server
|
||||
TEST.novaclient_servers = test_data_utils.TestDataContainer()
|
||||
s_1 = servers.Server(
|
||||
servers.ServerManager(None),
|
||||
{'id': 'aa',
|
||||
'name': 'Compute',
|
||||
'image': {'id': 1},
|
||||
'status': 'ACTIVE'})
|
||||
s_2 = servers.Server(
|
||||
servers.ServerManager(None),
|
||||
{'id': 'bb',
|
||||
'name': 'Controller',
|
||||
'image': {'id': 2},
|
||||
'status': 'ACTIVE'})
|
||||
s_3 = servers.Server(
|
||||
servers.ServerManager(None),
|
||||
{'id': 'cc',
|
||||
'name': 'Compute',
|
||||
'image': {'id': 1},
|
||||
'status': 'BUILD'})
|
||||
s_4 = servers.Server(
|
||||
servers.ServerManager(None),
|
||||
{'id': 'dd',
|
||||
'name': 'Compute',
|
||||
'image': {'id': 1},
|
||||
'status': 'ERROR'})
|
||||
TEST.novaclient_servers.add(s_1, s_2, s_3, s_4)
|
||||
|
||||
# Image
|
||||
TEST.glanceclient_images = test_data_utils.TestDataContainer()
|
||||
image_1 = images.Image(
|
||||
images.ImageManager(None),
|
||||
{'id': '2',
|
||||
'name': 'overcloud-control'})
|
||||
image_2 = images.Image(
|
||||
images.ImageManager(None),
|
||||
{'id': '1',
|
||||
'name': 'overcloud-compute'})
|
||||
image_3 = images.Image(
|
||||
images.ImageManager(None),
|
||||
{'id': '3',
|
||||
'name': 'Object Storage Image'})
|
||||
image_4 = images.Image(
|
||||
images.ImageManager(None),
|
||||
{'id': '4',
|
||||
'name': 'Block Storage Image'})
|
||||
TEST.glanceclient_images.add(image_1, image_2, image_3, image_4)
|
219
tuskar_ui/test/test_data/node_data.py
Normal file
219
tuskar_ui/test/test_data/node_data.py
Normal file
@ -0,0 +1,219 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from openstack_dashboard.test.test_data import utils as test_data_utils
|
||||
|
||||
from ironicclient.v1 import node
|
||||
from ironicclient.v1 import port
|
||||
from novaclient.v1_1.contrib import baremetal
|
||||
|
||||
|
||||
def data(TEST):
|
||||
|
||||
# BareMetalNode
|
||||
TEST.baremetalclient_nodes = test_data_utils.TestDataContainer()
|
||||
bm_node_1 = baremetal.BareMetalNode(
|
||||
baremetal.BareMetalNodeManager(None),
|
||||
{'id': '1',
|
||||
'uuid': 'aa-11',
|
||||
'instance_uuid': 'aa',
|
||||
"service_host": "undercloud",
|
||||
"cpus": 1,
|
||||
"memory_mb": 4096,
|
||||
"local_gb": 20,
|
||||
'task_state': 'active',
|
||||
"pm_address": None,
|
||||
"pm_user": None,
|
||||
"interfaces": [{"address": "52:54:00:90:38:01"},
|
||||
{"address": "52:54:00:90:38:01"}],
|
||||
})
|
||||
bm_node_2 = baremetal.BareMetalNode(
|
||||
baremetal.BareMetalNodeManager(None),
|
||||
{'id': '2',
|
||||
'uuid': 'bb-22',
|
||||
'instance_uuid': 'bb',
|
||||
"service_host": "undercloud",
|
||||
"cpus": 1,
|
||||
"memory_mb": 4096,
|
||||
"local_gb": 20,
|
||||
'task_state': 'active',
|
||||
"pm_address": None,
|
||||
"pm_user": None,
|
||||
"interfaces": [{"address": "52:54:00:90:38:01"}],
|
||||
})
|
||||
bm_node_3 = baremetal.BareMetalNode(
|
||||
baremetal.BareMetalNodeManager(None),
|
||||
{'id': '3',
|
||||
'uuid': 'cc-33',
|
||||
'instance_uuid': 'cc',
|
||||
"service_host": "undercloud",
|
||||
"cpus": 1,
|
||||
"memory_mb": 4096,
|
||||
"local_gb": 20,
|
||||
'task_state': 'reboot',
|
||||
"pm_address": None,
|
||||
"pm_user": None,
|
||||
"interfaces": [{"address": "52:54:00:90:38:01"}],
|
||||
})
|
||||
bm_node_4 = baremetal.BareMetalNode(
|
||||
baremetal.BareMetalNodeManager(None),
|
||||
{'id': '4',
|
||||
'uuid': 'cc-44',
|
||||
'instance_uuid': 'cc',
|
||||
"service_host": "undercloud",
|
||||
"cpus": 1,
|
||||
"memory_mb": 4096,
|
||||
"local_gb": 20,
|
||||
'task_state': 'active',
|
||||
"pm_address": None,
|
||||
"pm_user": None,
|
||||
"interfaces": [{"address": "52:54:00:90:38:01"}],
|
||||
})
|
||||
bm_node_5 = baremetal.BareMetalNode(
|
||||
baremetal.BareMetalNodeManager(None),
|
||||
{'id': '5',
|
||||
'uuid': 'dd-55',
|
||||
'instance_uuid': 'dd',
|
||||
"service_host": "undercloud",
|
||||
"cpus": 1,
|
||||
"memory_mb": 4096,
|
||||
"local_gb": 20,
|
||||
'task_state': 'error',
|
||||
"pm_address": None,
|
||||
"pm_user": None,
|
||||
"interfaces": [{"address": "52:54:00:90:38:01"}],
|
||||
})
|
||||
TEST.baremetalclient_nodes.add(
|
||||
bm_node_1, bm_node_2, bm_node_3, bm_node_4, bm_node_5)
|
||||
|
||||
# IronicNode
|
||||
TEST.ironicclient_nodes = test_data_utils.TestDataContainer()
|
||||
node_1 = node.Node(
|
||||
node.NodeManager(None),
|
||||
{'id': '1',
|
||||
'uuid': 'aa-11',
|
||||
'instance_uuid': 'aa',
|
||||
'driver': 'pxe_ipmitool',
|
||||
'driver_info': {
|
||||
'ipmi_address': '1.1.1.1',
|
||||
'ipmi_username': 'admin',
|
||||
'ipmi_password': 'password',
|
||||
'ip_address': '1.2.2.2'
|
||||
},
|
||||
'properties': {
|
||||
'cpu': '8',
|
||||
'ram': '16',
|
||||
'local_disk': '10',
|
||||
},
|
||||
'power_state': 'on',
|
||||
})
|
||||
node_2 = node.Node(
|
||||
node.NodeManager(None),
|
||||
{'id': '2',
|
||||
'uuid': 'bb-22',
|
||||
'instance_uuid': 'bb',
|
||||
'driver': 'pxe_ipmitool',
|
||||
'driver_info': {
|
||||
'ipmi_address': '2.2.2.2',
|
||||
'ipmi_username': 'admin',
|
||||
'ipmi_password': 'password',
|
||||
'ip_address': '1.2.2.3'
|
||||
},
|
||||
'properties': {
|
||||
'cpu': '16',
|
||||
'ram': '32',
|
||||
'local_disk': '100',
|
||||
},
|
||||
'power_state': 'on',
|
||||
})
|
||||
node_3 = node.Node(
|
||||
node.NodeManager(None),
|
||||
{'id': '3',
|
||||
'uuid': 'cc-33',
|
||||
'instance_uuid': 'cc',
|
||||
'driver': 'pxe_ipmitool',
|
||||
'driver_info': {
|
||||
'ipmi_address': '3.3.3.3',
|
||||
'ipmi_username': 'admin',
|
||||
'ipmi_password': 'password',
|
||||
'ip_address': '1.2.2.4'
|
||||
},
|
||||
'properties': {
|
||||
'cpu': '32',
|
||||
'ram': '64',
|
||||
'local_disk': '1',
|
||||
},
|
||||
'power_state': 'rebooting',
|
||||
})
|
||||
node_4 = node.Node(
|
||||
node.NodeManager(None),
|
||||
{'id': '4',
|
||||
'uuid': 'cc-44',
|
||||
'instance_uuid': 'cc',
|
||||
'driver': 'pxe_ipmitool',
|
||||
'driver_info': {
|
||||
'ipmi_address': '4.4.4.4',
|
||||
'ipmi_username': 'admin',
|
||||
'ipmi_password': 'password',
|
||||
'ip_address': '1.2.2.5'
|
||||
},
|
||||
'properties': {
|
||||
'cpu': '8',
|
||||
'ram': '16',
|
||||
'local_disk': '10',
|
||||
},
|
||||
'power_state': 'on',
|
||||
})
|
||||
node_5 = node.Node(
|
||||
node.NodeManager(None),
|
||||
{'id': '5',
|
||||
'uuid': 'dd-55',
|
||||
'instance_uuid': 'dd',
|
||||
'driver': 'pxe_ipmitool',
|
||||
'driver_info': {
|
||||
'ipmi_address': '5.5.5.5',
|
||||
'ipmi_username': 'admin',
|
||||
'ipmi_password': 'password',
|
||||
'ip_address': '1.2.2.6'
|
||||
},
|
||||
'properties': {
|
||||
'cpu': '8',
|
||||
'ram': '16',
|
||||
'local_disk': '10',
|
||||
},
|
||||
'power_state': 'error',
|
||||
})
|
||||
TEST.ironicclient_nodes.add(node_1, node_2, node_3, node_4, node_5)
|
||||
|
||||
# Ports
|
||||
TEST.ironicclient_ports = test_data_utils.TestDataContainer()
|
||||
port_1 = port.Port(
|
||||
port.PortManager(None),
|
||||
{'id': '1-port-id',
|
||||
'type': 'port',
|
||||
'address': 'aa:aa:aa:aa:aa:aa'})
|
||||
port_2 = port.Port(
|
||||
port.PortManager(None),
|
||||
{'id': '2-port-id',
|
||||
'type': 'port',
|
||||
'address': 'bb:bb:bb:bb:bb:bb'})
|
||||
port_3 = port.Port(
|
||||
port.PortManager(None),
|
||||
{'id': '3-port-id',
|
||||
'type': 'port',
|
||||
'address': 'cc:cc:cc:cc:cc:cc'})
|
||||
port_4 = port.Port(
|
||||
port.PortManager(None),
|
||||
{'id': '4-port-id',
|
||||
'type': 'port',
|
||||
'address': 'dd:dd:dd:dd:dd:dd'})
|
||||
TEST.ironicclient_ports.add(port_1, port_2, port_3, port_4)
|
@ -12,377 +12,12 @@
|
||||
|
||||
from openstack_dashboard.test.test_data import utils as test_data_utils
|
||||
|
||||
from glanceclient.v1 import images
|
||||
from heatclient.v1 import events
|
||||
from heatclient.v1 import resources
|
||||
from heatclient.v1 import stacks
|
||||
from ironicclient.v1 import node
|
||||
from ironicclient.v1 import port
|
||||
from novaclient.v1_1.contrib import baremetal
|
||||
from novaclient.v1_1 import flavors
|
||||
from novaclient.v1_1 import servers
|
||||
from tuskarclient.v1 import overcloud_roles
|
||||
from tuskarclient.v1 import overclouds
|
||||
|
||||
|
||||
def data(TEST):
|
||||
|
||||
# Stack
|
||||
TEST.heatclient_stacks = test_data_utils.TestDataContainer()
|
||||
stack_1 = stacks.Stack(
|
||||
stacks.StackManager(None),
|
||||
{'id': 'stack-id-1',
|
||||
'stack_name': 'overcloud',
|
||||
'stack_status': 'RUNNING',
|
||||
'outputs': [{
|
||||
'output_key': 'KeystoneURL',
|
||||
'output_value': 'http://192.0.2.23:5000/v2',
|
||||
}],
|
||||
'parameters': {
|
||||
'one': 'one',
|
||||
'two': 'two',
|
||||
}})
|
||||
TEST.heatclient_stacks.add(stack_1)
|
||||
|
||||
# Events
|
||||
TEST.heatclient_events = test_data_utils.TestDataContainer()
|
||||
event_1 = events.Event(
|
||||
events.EventManager(None),
|
||||
{'id': 1,
|
||||
'stack_id': 'stack-id-1',
|
||||
'resource_name': 'Controller',
|
||||
'resource_status': 'CREATE_IN_PROGRESS',
|
||||
'resource_status_reason': 'state changed',
|
||||
'event_time': '2014-01-01T07:26:15Z'})
|
||||
event_2 = events.Event(
|
||||
events.EventManager(None),
|
||||
{'id': 2,
|
||||
'stack_id': 'stack-id-1',
|
||||
'resource_name': 'Compute0',
|
||||
'resource_status': 'CREATE_IN_PROGRESS',
|
||||
'resource_status_reason': 'state changed',
|
||||
'event_time': '2014-01-01T07:26:27Z'})
|
||||
event_3 = events.Event(
|
||||
events.EventManager(None),
|
||||
{'id': 3,
|
||||
'stack_id': 'stack-id-1',
|
||||
'resource_name': 'Compute1',
|
||||
'resource_status': 'CREATE_IN_PROGRESS',
|
||||
'resource_status_reason': 'state changed',
|
||||
'event_time': '2014-01-01T07:26:44Z'})
|
||||
event_4 = events.Event(
|
||||
events.EventManager(None),
|
||||
{'id': 4,
|
||||
'stack_id': 'stack-id-1',
|
||||
'resource_name': 'Compute0',
|
||||
'resource_status': 'CREATE_COMPLETE',
|
||||
'resource_status_reason': 'state changed',
|
||||
'event_time': '2014-01-01T07:27:14Z'})
|
||||
event_5 = events.Event(
|
||||
events.EventManager(None),
|
||||
{'id': 5,
|
||||
'stack_id': 'stack-id-1',
|
||||
'resource_name': 'Compute2',
|
||||
'resource_status': 'CREATE_IN_PROGRESS',
|
||||
'resource_status_reason': 'state changed',
|
||||
'event_time': '2014-01-01T07:27:31Z'})
|
||||
event_6 = events.Event(
|
||||
events.EventManager(None),
|
||||
{'id': 6,
|
||||
'stack_id': 'stack-id-1',
|
||||
'resource_name': 'Compute1',
|
||||
'resource_status': 'CREATE_COMPLETE',
|
||||
'resource_status_reason': 'state changed',
|
||||
'event_time': '2014-01-01T07:28:01Z'})
|
||||
event_7 = events.Event(
|
||||
events.EventManager(None),
|
||||
{'id': 7,
|
||||
'stack_id': 'stack-id-1',
|
||||
'resource_name': 'Controller',
|
||||
'resource_status': 'CREATE_COMPLETE',
|
||||
'resource_status_reason': 'state changed',
|
||||
'event_time': '2014-01-01T07:28:59Z'})
|
||||
event_8 = events.Event(
|
||||
events.EventManager(None),
|
||||
{'id': 8,
|
||||
'stack_id': 'stack-id-1',
|
||||
'resource_name': 'Compute2',
|
||||
'resource_status': 'CREATE_COMPLETE',
|
||||
'resource_status_reason': 'state changed',
|
||||
'event_time': '2014-01-01T07:29:11Z'})
|
||||
TEST.heatclient_events.add(event_1, event_2, event_3, event_4,
|
||||
event_5, event_6, event_7, event_8)
|
||||
|
||||
# BareMetalNode
|
||||
TEST.baremetalclient_nodes = test_data_utils.TestDataContainer()
|
||||
bm_node_1 = baremetal.BareMetalNode(
|
||||
baremetal.BareMetalNodeManager(None),
|
||||
{'id': '1',
|
||||
'uuid': 'aa-11',
|
||||
'instance_uuid': 'aa',
|
||||
"service_host": "undercloud",
|
||||
"cpus": 1,
|
||||
"memory_mb": 4096,
|
||||
"local_gb": 20,
|
||||
'task_state': 'active',
|
||||
"pm_address": None,
|
||||
"pm_user": None,
|
||||
"interfaces": [{"address": "52:54:00:90:38:01"},
|
||||
{"address": "52:54:00:90:38:01"}],
|
||||
})
|
||||
bm_node_2 = baremetal.BareMetalNode(
|
||||
baremetal.BareMetalNodeManager(None),
|
||||
{'id': '2',
|
||||
'uuid': 'bb-22',
|
||||
'instance_uuid': 'bb',
|
||||
"service_host": "undercloud",
|
||||
"cpus": 1,
|
||||
"memory_mb": 4096,
|
||||
"local_gb": 20,
|
||||
'task_state': 'active',
|
||||
"pm_address": None,
|
||||
"pm_user": None,
|
||||
"interfaces": [{"address": "52:54:00:90:38:01"}],
|
||||
})
|
||||
bm_node_3 = baremetal.BareMetalNode(
|
||||
baremetal.BareMetalNodeManager(None),
|
||||
{'id': '3',
|
||||
'uuid': 'cc-33',
|
||||
'instance_uuid': 'cc',
|
||||
"service_host": "undercloud",
|
||||
"cpus": 1,
|
||||
"memory_mb": 4096,
|
||||
"local_gb": 20,
|
||||
'task_state': 'reboot',
|
||||
"pm_address": None,
|
||||
"pm_user": None,
|
||||
"interfaces": [{"address": "52:54:00:90:38:01"}],
|
||||
})
|
||||
bm_node_4 = baremetal.BareMetalNode(
|
||||
baremetal.BareMetalNodeManager(None),
|
||||
{'id': '4',
|
||||
'uuid': 'cc-44',
|
||||
'instance_uuid': 'cc',
|
||||
"service_host": "undercloud",
|
||||
"cpus": 1,
|
||||
"memory_mb": 4096,
|
||||
"local_gb": 20,
|
||||
'task_state': 'active',
|
||||
"pm_address": None,
|
||||
"pm_user": None,
|
||||
"interfaces": [{"address": "52:54:00:90:38:01"}],
|
||||
})
|
||||
bm_node_5 = baremetal.BareMetalNode(
|
||||
baremetal.BareMetalNodeManager(None),
|
||||
{'id': '5',
|
||||
'uuid': 'dd-55',
|
||||
'instance_uuid': 'dd',
|
||||
"service_host": "undercloud",
|
||||
"cpus": 1,
|
||||
"memory_mb": 4096,
|
||||
"local_gb": 20,
|
||||
'task_state': 'error',
|
||||
"pm_address": None,
|
||||
"pm_user": None,
|
||||
"interfaces": [{"address": "52:54:00:90:38:01"}],
|
||||
})
|
||||
TEST.baremetalclient_nodes.add(
|
||||
bm_node_1, bm_node_2, bm_node_3, bm_node_4, bm_node_5)
|
||||
|
||||
# IronicNode
|
||||
TEST.ironicclient_nodes = test_data_utils.TestDataContainer()
|
||||
node_1 = node.Node(
|
||||
node.NodeManager(None),
|
||||
{'id': '1',
|
||||
'uuid': 'aa-11',
|
||||
'instance_uuid': 'aa',
|
||||
'driver': 'pxe_ipmitool',
|
||||
'driver_info': {
|
||||
'ipmi_address': '1.1.1.1',
|
||||
'ipmi_username': 'admin',
|
||||
'ipmi_password': 'password',
|
||||
'ip_address': '1.2.2.2'
|
||||
},
|
||||
'properties': {
|
||||
'cpu': '8',
|
||||
'ram': '16',
|
||||
'local_disk': '10',
|
||||
},
|
||||
'power_state': 'on',
|
||||
})
|
||||
node_2 = node.Node(
|
||||
node.NodeManager(None),
|
||||
{'id': '2',
|
||||
'uuid': 'bb-22',
|
||||
'instance_uuid': 'bb',
|
||||
'driver': 'pxe_ipmitool',
|
||||
'driver_info': {
|
||||
'ipmi_address': '2.2.2.2',
|
||||
'ipmi_username': 'admin',
|
||||
'ipmi_password': 'password',
|
||||
'ip_address': '1.2.2.3'
|
||||
},
|
||||
'properties': {
|
||||
'cpu': '16',
|
||||
'ram': '32',
|
||||
'local_disk': '100',
|
||||
},
|
||||
'power_state': 'on',
|
||||
})
|
||||
node_3 = node.Node(
|
||||
node.NodeManager(None),
|
||||
{'id': '3',
|
||||
'uuid': 'cc-33',
|
||||
'instance_uuid': 'cc',
|
||||
'driver': 'pxe_ipmitool',
|
||||
'driver_info': {
|
||||
'ipmi_address': '3.3.3.3',
|
||||
'ipmi_username': 'admin',
|
||||
'ipmi_password': 'password',
|
||||
'ip_address': '1.2.2.4'
|
||||
},
|
||||
'properties': {
|
||||
'cpu': '32',
|
||||
'ram': '64',
|
||||
'local_disk': '1',
|
||||
},
|
||||
'power_state': 'rebooting',
|
||||
})
|
||||
node_4 = node.Node(
|
||||
node.NodeManager(None),
|
||||
{'id': '4',
|
||||
'uuid': 'cc-44',
|
||||
'instance_uuid': 'cc',
|
||||
'driver': 'pxe_ipmitool',
|
||||
'driver_info': {
|
||||
'ipmi_address': '4.4.4.4',
|
||||
'ipmi_username': 'admin',
|
||||
'ipmi_password': 'password',
|
||||
'ip_address': '1.2.2.5'
|
||||
},
|
||||
'properties': {
|
||||
'cpu': '8',
|
||||
'ram': '16',
|
||||
'local_disk': '10',
|
||||
},
|
||||
'power_state': 'on',
|
||||
})
|
||||
node_5 = node.Node(
|
||||
node.NodeManager(None),
|
||||
{'id': '5',
|
||||
'uuid': 'dd-55',
|
||||
'instance_uuid': 'dd',
|
||||
'driver': 'pxe_ipmitool',
|
||||
'driver_info': {
|
||||
'ipmi_address': '5.5.5.5',
|
||||
'ipmi_username': 'admin',
|
||||
'ipmi_password': 'password',
|
||||
'ip_address': '1.2.2.6'
|
||||
},
|
||||
'properties': {
|
||||
'cpu': '8',
|
||||
'ram': '16',
|
||||
'local_disk': '10',
|
||||
},
|
||||
'power_state': 'error',
|
||||
})
|
||||
TEST.ironicclient_nodes.add(node_1, node_2, node_3, node_4, node_5)
|
||||
|
||||
# Ports
|
||||
TEST.ironicclient_ports = test_data_utils.TestDataContainer()
|
||||
port_1 = port.Port(
|
||||
port.PortManager(None),
|
||||
{'id': '1-port-id',
|
||||
'type': 'port',
|
||||
'address': 'aa:aa:aa:aa:aa:aa'})
|
||||
port_2 = port.Port(
|
||||
port.PortManager(None),
|
||||
{'id': '2-port-id',
|
||||
'type': 'port',
|
||||
'address': 'bb:bb:bb:bb:bb:bb'})
|
||||
port_3 = port.Port(
|
||||
port.PortManager(None),
|
||||
{'id': '3-port-id',
|
||||
'type': 'port',
|
||||
'address': 'cc:cc:cc:cc:cc:cc'})
|
||||
port_4 = port.Port(
|
||||
port.PortManager(None),
|
||||
{'id': '4-port-id',
|
||||
'type': 'port',
|
||||
'address': 'dd:dd:dd:dd:dd:dd'})
|
||||
TEST.ironicclient_ports.add(port_1, port_2, port_3, port_4)
|
||||
|
||||
# Resource
|
||||
TEST.heatclient_resources = test_data_utils.TestDataContainer()
|
||||
resource_1 = resources.Resource(
|
||||
resources.ResourceManager(None),
|
||||
{'id': '1-resource-id',
|
||||
'stack_id': 'stack-id-1',
|
||||
'resource_name': 'Compute0',
|
||||
'logical_resource_id': 'Compute0',
|
||||
'physical_resource_id': 'aa',
|
||||
'resource_status': 'CREATE_COMPLETE',
|
||||
'resource_type': 'AWS::EC2::Instance'})
|
||||
resource_2 = resources.Resource(
|
||||
resources.ResourceManager(None),
|
||||
{'id': '2-resource-id',
|
||||
'stack_id': 'stack-id-1',
|
||||
'resource_name': 'Controller',
|
||||
'logical_resource_id': 'Controller',
|
||||
'physical_resource_id': 'bb',
|
||||
'resource_status': 'CREATE_COMPLETE',
|
||||
'resource_type': 'AWS::EC2::Instance'})
|
||||
resource_3 = resources.Resource(
|
||||
resources.ResourceManager(None),
|
||||
{'id': '3-resource-id',
|
||||
'stack_id': 'stack-id-1',
|
||||
'resource_name': 'Compute1',
|
||||
'logical_resource_id': 'Compute1',
|
||||
'physical_resource_id': 'cc',
|
||||
'resource_status': 'CREATE_COMPLETE',
|
||||
'resource_type': 'AWS::EC2::Instance'})
|
||||
resource_4 = resources.Resource(
|
||||
resources.ResourceManager(None),
|
||||
{'id': '4-resource-id',
|
||||
'stack_id': 'stack-id-4',
|
||||
'resource_name': 'Compute2',
|
||||
'logical_resource_id': 'Compute2',
|
||||
'physical_resource_id': 'dd',
|
||||
'resource_status': 'CREATE_COMPLETE',
|
||||
'resource_type': 'AWS::EC2::Instance'})
|
||||
TEST.heatclient_resources.add(resource_1,
|
||||
resource_2,
|
||||
resource_3,
|
||||
resource_4)
|
||||
|
||||
# Server
|
||||
TEST.novaclient_servers = test_data_utils.TestDataContainer()
|
||||
s_1 = servers.Server(
|
||||
servers.ServerManager(None),
|
||||
{'id': 'aa',
|
||||
'name': 'Compute',
|
||||
'image': {'id': 1},
|
||||
'status': 'ACTIVE'})
|
||||
s_2 = servers.Server(
|
||||
servers.ServerManager(None),
|
||||
{'id': 'bb',
|
||||
'name': 'Controller',
|
||||
'image': {'id': 2},
|
||||
'status': 'ACTIVE'})
|
||||
s_3 = servers.Server(
|
||||
servers.ServerManager(None),
|
||||
{'id': 'cc',
|
||||
'name': 'Compute',
|
||||
'image': {'id': 1},
|
||||
'status': 'BUILD'})
|
||||
s_4 = servers.Server(
|
||||
servers.ServerManager(None),
|
||||
{'id': 'dd',
|
||||
'name': 'Compute',
|
||||
'image': {'id': 1},
|
||||
'status': 'ERROR'})
|
||||
TEST.novaclient_servers.add(s_1, s_2, s_3, s_4)
|
||||
|
||||
# Overcloud
|
||||
TEST.tuskarclient_overclouds = test_data_utils.TestDataContainer()
|
||||
# TODO(Tzu-Mainn Chen): fix these to create Tuskar Overcloud objects
|
||||
@ -432,47 +67,6 @@ def data(TEST):
|
||||
'image_name': 'overcloud-block-storage'})
|
||||
TEST.tuskarclient_overcloud_roles.add(r_1, r_2, r_3, r_4)
|
||||
|
||||
# Image
|
||||
TEST.glanceclient_images = test_data_utils.TestDataContainer()
|
||||
image_1 = images.Image(
|
||||
images.ImageManager(None),
|
||||
{'id': '2',
|
||||
'name': 'overcloud-control'})
|
||||
image_2 = images.Image(
|
||||
images.ImageManager(None),
|
||||
{'id': '1',
|
||||
'name': 'overcloud-compute'})
|
||||
image_3 = images.Image(
|
||||
images.ImageManager(None),
|
||||
{'id': '3',
|
||||
'name': 'Object Storage Image'})
|
||||
image_4 = images.Image(
|
||||
images.ImageManager(None),
|
||||
{'id': '4',
|
||||
'name': 'Block Storage Image'})
|
||||
TEST.glanceclient_images.add(image_1, image_2, image_3, image_4)
|
||||
|
||||
# Nova flavors
|
||||
# Do not include fields irrelevant for our usage
|
||||
TEST.novaclient_flavors = test_data_utils.TestDataContainer()
|
||||
flavor_1 = flavors.Flavor(
|
||||
flavors.FlavorManager(None),
|
||||
{'id': '1',
|
||||
'name': 'flavor-1',
|
||||
'vcpus': 2,
|
||||
'ram': 2048,
|
||||
'disk': 20})
|
||||
flavor_1.get_keys = lambda: {'cpu_arch': 'amd64'}
|
||||
flavor_2 = flavors.Flavor(
|
||||
flavors.FlavorManager(None),
|
||||
{'id': '2',
|
||||
'name': 'flavor-2',
|
||||
'vcpus': 4,
|
||||
'ram': 4096,
|
||||
'disk': 60})
|
||||
flavor_2.get_keys = lambda: {'cpu_arch': 'i386'}
|
||||
TEST.novaclient_flavors.add(flavor_1, flavor_2)
|
||||
|
||||
# OvercloudRoles with flavors associated
|
||||
TEST.tuskarclient_roles_with_flavors = test_data_utils.TestDataContainer()
|
||||
role_with_flavor = overcloud_roles.OvercloudRole(
|
||||
|
@ -23,6 +23,9 @@ def load_test_data(load_onto=None):
|
||||
from openstack_dashboard.test.test_data import nova_data
|
||||
from openstack_dashboard.test.test_data import swift_data
|
||||
from tuskar_ui.test.test_data import exceptions
|
||||
from tuskar_ui.test.test_data import flavor_data
|
||||
from tuskar_ui.test.test_data import heat_data as tuskar_heat_data
|
||||
from tuskar_ui.test.test_data import node_data
|
||||
from tuskar_ui.test.test_data import tuskar_data
|
||||
|
||||
# The order of these loaders matters, some depend on others.
|
||||
@ -34,6 +37,9 @@ def load_test_data(load_onto=None):
|
||||
neutron_data.data,
|
||||
swift_data.data,
|
||||
heat_data.data,
|
||||
flavor_data.data,
|
||||
node_data.data,
|
||||
tuskar_heat_data.data,
|
||||
tuskar_data.data)
|
||||
if load_onto:
|
||||
for data_func in loaders:
|
||||
|
Loading…
x
Reference in New Issue
Block a user