New Drydock APIs for nodes and tasks
- Added a new query param 'layers=x' to /tasks/{task_id} where x is a number. This param allows the API to drill down to all the subtasks. If x is -1 it will reeturn all subtasks, otherwise it will return x layers of subtasks. - Added a new query param 'subtaskerrors=true' to /tasks/{task_id}. If true then any errors from subtasks will be included in the subtask_errors element in the response. - Added a POST to /nodes that requires node_filter and site_design in the body. Then uses those parameters to return a list of nodes. Change-Id: I9b4c06dd1c9de2b0500092aa4b4bfacac08eac54
This commit is contained in:
parent
97f9fbd12b
commit
b138b3c179
@ -29,6 +29,13 @@ GET nodes
|
||||
The Nodes API will provide a report of current nodes as known by the node provisioner
|
||||
and their status with a few hardware details.
|
||||
|
||||
POST nodes
|
||||
^^^^^^^^^
|
||||
|
||||
The Nodes API will provide a report of current nodes as known by the node provisioner
|
||||
and their status with a few hardware details. This API requires node_filter and site_design
|
||||
in the POST body to return the proper node list.
|
||||
|
||||
GET nodes/hostname/builddata
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
|
@ -165,3 +165,10 @@ collected by this task.::
|
||||
"data_element": "{ \"id\": \"foo\", \"class\": \"system\" ...}"
|
||||
}
|
||||
]
|
||||
|
||||
Adding the parameter ``subtaskerrors=true`` in the query string will add one additional field
|
||||
with an object of subtask errors keyed by task_id.
|
||||
|
||||
Adding the parameter ``layers=x`` where x is -1 for all or a positive number to limit the number
|
||||
of layers. Will convert the response into an object of tasks and all subtasks keyed by task_id.
|
||||
It will also include the field init_task_id with the top task_id.
|
||||
|
@ -81,7 +81,9 @@ def start_api(state_manager=None, ingester=None, orchestrator=None):
|
||||
state_manager=state_manager, orchestrator=orchestrator)),
|
||||
|
||||
# API to list current MaaS nodes
|
||||
('/nodes', NodesResource()),
|
||||
('/nodes',
|
||||
NodesResource(state_manager=state_manager,
|
||||
orchestrator=orchestrator)),
|
||||
# API to get build data for a node
|
||||
('/nodes/{hostname}/builddata',
|
||||
NodeBuildDataResource(state_manager=state_manager)),
|
||||
|
@ -1,4 +1,4 @@
|
||||
# Copyright 2017 AT&T Intellectual Property. All other rights reserved.
|
||||
# Copyright 2018 AT&T Intellectual Property. All other rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -20,12 +20,17 @@ from drydock_provisioner import config
|
||||
from drydock_provisioner.drivers.node.maasdriver.api_client import MaasRequestFactory
|
||||
from drydock_provisioner.drivers.node.maasdriver.models.machine import Machines
|
||||
|
||||
from .base import BaseResource, StatefulResource
|
||||
from .base import StatefulResource
|
||||
|
||||
|
||||
class NodesResource(BaseResource):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
class NodesResource(StatefulResource):
|
||||
def __init__(self, orchestrator=None, **kwargs):
|
||||
"""Object initializer.
|
||||
|
||||
:param orchestrator: instance of orchestrator.Orchestrator
|
||||
"""
|
||||
super().__init__(**kwargs)
|
||||
self.orchestrator = orchestrator
|
||||
|
||||
@policy.ApiEnforcer('physical_provisioner:read_data')
|
||||
def on_get(self, req, resp):
|
||||
@ -58,6 +63,37 @@ class NodesResource(BaseResource):
|
||||
self.return_error(
|
||||
resp, falcon.HTTP_500, message="Unknown error", retry=False)
|
||||
|
||||
@policy.ApiEnforcer('physical_provisioner:read_data')
|
||||
def on_post(self, req, resp):
|
||||
try:
|
||||
json_data = self.req_json(req)
|
||||
node_filter = json_data.get('node_filter', None)
|
||||
site_design = json_data.get('site_design', None)
|
||||
if node_filter is None or site_design is None:
|
||||
not_provided = []
|
||||
if node_filter is None:
|
||||
not_provided.append('node_filter')
|
||||
if site_design is None:
|
||||
not_provided.append('site_design')
|
||||
self.info(req.context, 'Missing required input value(s) %s' % not_provided)
|
||||
self.return_error(
|
||||
resp,
|
||||
falcon.HTTP_400,
|
||||
message='Missing input required value(s) %s' % not_provided,
|
||||
retry=False)
|
||||
return
|
||||
nodes = self.orchestrator.process_node_filter(node_filter=node_filter,
|
||||
site_design=site_design)
|
||||
# Guarantees an empty list is returned if there are no nodes
|
||||
if not nodes:
|
||||
nodes = []
|
||||
resp.body = json.dumps(nodes)
|
||||
resp.status = falcon.HTTP_200
|
||||
except Exception as ex:
|
||||
self.error(req.context, "Unknown error: %s" % str(ex), exc_info=ex)
|
||||
self.return_error(
|
||||
resp, falcon.HTTP_500, message="Unknown error", retry=False)
|
||||
|
||||
|
||||
class NodeBuildDataResource(StatefulResource):
|
||||
"""Resource for returning build data for a node."""
|
||||
|
@ -1,4 +1,4 @@
|
||||
# Copyright 2017 AT&T Intellectual Property. All other rights reserved.
|
||||
# Copyright 2018 AT&T Intellectual Property. All other rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -299,27 +299,89 @@ class TaskResource(StatefulResource):
|
||||
def on_get(self, req, resp, task_id):
|
||||
"""Handler for GET method."""
|
||||
try:
|
||||
task = self.state_manager.get_task(uuid.UUID(task_id))
|
||||
if task is None:
|
||||
builddata = req.get_param_as_bool('builddata')
|
||||
subtask_errors = req.get_param_as_bool('subtaskerrors')
|
||||
try:
|
||||
layers = int(req.params.get('layers', '0'))
|
||||
except Exception as ex:
|
||||
layers = 0
|
||||
|
||||
first_task = self.get_task(req, resp, task_id, builddata)
|
||||
|
||||
if first_task is None:
|
||||
self.info(req.context, "Task %s does not exist" % task_id)
|
||||
self.return_error(
|
||||
resp,
|
||||
falcon.HTTP_404,
|
||||
message="Task %s does not exist" % task_id,
|
||||
retry=False)
|
||||
return
|
||||
else:
|
||||
# If layers is passed in then it returns a dict of tasks instead of the task dict.
|
||||
if layers:
|
||||
resp_data, errors = self.handle_layers(req, resp, task_id, builddata, subtask_errors, layers,
|
||||
first_task)
|
||||
# Includes subtask_errors if the query param 'subtaskerrors' is passed in as true.
|
||||
if (subtask_errors):
|
||||
resp_data['subtask_errors'] = errors
|
||||
else:
|
||||
resp_data = first_task
|
||||
# Includes subtask_errors if the query param 'subtaskerrors' is passed in as true.
|
||||
if (subtask_errors):
|
||||
_, errors = self.handle_layers(req, resp, task_id, False, subtask_errors, 1,
|
||||
first_task)
|
||||
resp_data['subtask_errors'] = errors
|
||||
|
||||
resp_data = task.to_dict()
|
||||
builddata = req.params.get('builddata', 'false').upper()
|
||||
|
||||
if builddata == "TRUE":
|
||||
task_bd = self.state_manager.get_build_data(
|
||||
task_id=task.get_id())
|
||||
resp_data['build_data'] = [bd.to_dict() for bd in task_bd]
|
||||
|
||||
resp.body = json.dumps(resp_data)
|
||||
resp.status = falcon.HTTP_200
|
||||
resp.body = json.dumps(resp_data)
|
||||
resp.status = falcon.HTTP_200
|
||||
except Exception as ex:
|
||||
self.error(req.context, "Unknown error: %s" % (str(ex)))
|
||||
self.return_error(
|
||||
resp, falcon.HTTP_500, message="Unknown error", retry=False)
|
||||
|
||||
def get_task(self, req, resp, task_id, builddata):
|
||||
try:
|
||||
task = self.state_manager.get_task(uuid.UUID(task_id))
|
||||
if task is None:
|
||||
return None
|
||||
|
||||
task_dict = task.to_dict()
|
||||
|
||||
if builddata:
|
||||
task_bd = self.state_manager.get_build_data(
|
||||
task_id=task.get_id())
|
||||
task_dict['build_data'] = [bd.to_dict() for bd in task_bd]
|
||||
|
||||
return task_dict
|
||||
except Exception as ex:
|
||||
self.error(req.context, "Unknown error: %s" % (str(ex)))
|
||||
self.return_error(
|
||||
resp, falcon.HTTP_500, message="Unknown error", retry=False)
|
||||
|
||||
def handle_layers(self, req, resp, task_id, builddata, subtask_errors, layers, first_task):
|
||||
resp_data = {}
|
||||
errors = {}
|
||||
resp_data['init_task_id'] = task_id
|
||||
resp_data[first_task['task_id']] = first_task
|
||||
queued_ids = first_task['subtask_id_list']
|
||||
# first_task is layer 1
|
||||
current_layer = 1
|
||||
# The while loop handles each layer.
|
||||
while queued_ids and (current_layer < layers or layers == -1 or subtask_errors):
|
||||
# Copies the current list (a layer) then clears the queue for the next layer.
|
||||
processing_ids = list(queued_ids)
|
||||
queued_ids = []
|
||||
# The for loop handles each task in a layer.
|
||||
for id in processing_ids:
|
||||
task = self.get_task(req, resp, id, builddata)
|
||||
# Only adds the task if within the layers range.
|
||||
if current_layer < layers or layers == -1:
|
||||
resp_data[id] = task
|
||||
if task:
|
||||
queued_ids.extend(task.get('subtask_id_list', []))
|
||||
if task.get('result', {}).get('details', {}).get('errorCount', 0) > 0 and subtask_errors:
|
||||
result = task.get('result', {})
|
||||
result['task_id'] = id
|
||||
errors[id] = task.get('result', {})
|
||||
# Finished this layer, incrementing for the next while loop.
|
||||
current_layer = current_layer + 1
|
||||
return resp_data, errors
|
||||
|
93
tests/unit/test_api_nodes_unit.py
Normal file
93
tests/unit/test_api_nodes_unit.py
Normal file
@ -0,0 +1,93 @@
|
||||
# Copyright 2018 AT&T Intellectual Property. All other rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Test Nodes API"""
|
||||
from falcon import testing
|
||||
from unittest.mock import Mock
|
||||
|
||||
import pytest
|
||||
import json
|
||||
import logging
|
||||
|
||||
from drydock_provisioner import policy
|
||||
from drydock_provisioner.control.api import start_api
|
||||
|
||||
import falcon
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TestNodesApiUnit(object):
|
||||
def test_post_nodes_resp(self, input_files, falcontest, mock_process_node_filter):
|
||||
|
||||
input_file = input_files.join("deckhand_fullsite.yaml")
|
||||
design_ref = "file://%s" % str(input_file)
|
||||
|
||||
url = '/api/v1.0/nodes'
|
||||
hdr = {
|
||||
'Content-Type': 'application/json',
|
||||
'X-IDENTITY-STATUS': 'Confirmed',
|
||||
'X-USER-NAME': 'Test',
|
||||
'X-ROLES': 'admin'
|
||||
}
|
||||
body = {
|
||||
'node_filter': 'filters',
|
||||
'site_design': design_ref,
|
||||
}
|
||||
|
||||
result = falcontest.simulate_post(
|
||||
url, headers=hdr, body=json.dumps(body))
|
||||
|
||||
LOG.debug(result.text)
|
||||
assert result.status == falcon.HTTP_200
|
||||
|
||||
def test_input_error(self, falcontest):
|
||||
url = '/api/v1.0/nodes'
|
||||
hdr = {
|
||||
'Content-Type': 'application/json',
|
||||
'X-IDENTITY-STATUS': 'Confirmed',
|
||||
'X-USER-NAME': 'Test',
|
||||
'X-ROLES': 'admin'
|
||||
}
|
||||
body = {}
|
||||
|
||||
result = falcontest.simulate_post(
|
||||
url, headers=hdr, body=json.dumps(body))
|
||||
|
||||
LOG.debug(result.text)
|
||||
assert result.status == falcon.HTTP_400
|
||||
|
||||
@pytest.fixture()
|
||||
def falcontest(self, drydock_state, deckhand_ingester,
|
||||
deckhand_orchestrator, mock_get_build_data):
|
||||
"""Create a test harness for the the Falcon API framework."""
|
||||
policy.policy_engine = policy.DrydockPolicy()
|
||||
policy.policy_engine.register_policy()
|
||||
|
||||
return testing.TestClient(
|
||||
start_api(
|
||||
state_manager=drydock_state,
|
||||
ingester=deckhand_ingester,
|
||||
orchestrator=deckhand_orchestrator))
|
||||
|
||||
@pytest.fixture()
|
||||
def mock_process_node_filter(deckhand_orchestrator):
|
||||
def side_effect(**kwargs):
|
||||
return []
|
||||
|
||||
deckhand_orchestrator.real_process_node_filter = deckhand_orchestrator.process_node_filter
|
||||
deckhand_orchestrator.process_node_filter = Mock(side_effect=side_effect)
|
||||
|
||||
yield
|
||||
deckhand_orchestrator.process_node_filter = Mock(wraps=None, side_effect=None)
|
||||
deckhand_orchestrator.process_node_filter = deckhand_orchestrator.real_process_node_filter
|
263
tests/unit/test_api_tasks_unit.py
Normal file
263
tests/unit/test_api_tasks_unit.py
Normal file
@ -0,0 +1,263 @@
|
||||
# Copyright 2018 AT&T Intellectual Property. All other rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Test Tasks API"""
|
||||
from falcon import testing
|
||||
from unittest.mock import Mock
|
||||
|
||||
import pytest
|
||||
import json
|
||||
import logging
|
||||
|
||||
from drydock_provisioner import policy
|
||||
from drydock_provisioner.control.api import start_api
|
||||
import drydock_provisioner.objects as objects
|
||||
import drydock_provisioner.objects.fields as hd_fields
|
||||
|
||||
import falcon
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TestTasksApiUnit(object):
|
||||
def test_get_tasks_id_resp(self, falcontest):
|
||||
url = '/api/v1.0/tasks/11111111-1111-1111-1111-111111111111'
|
||||
hdr = self.get_standard_header()
|
||||
|
||||
result = falcontest.simulate_get(url, headers=hdr)
|
||||
|
||||
assert result.status == falcon.HTTP_200
|
||||
response_json = json.loads(result.text)
|
||||
assert response_json['task_id'] == '11111111-1111-1111-1111-111111111111'
|
||||
try:
|
||||
response_json['build_data']
|
||||
key_error = False
|
||||
except KeyError as ex:
|
||||
key_error = True
|
||||
assert key_error
|
||||
try:
|
||||
response_json['subtask_errors']
|
||||
key_error = False
|
||||
except KeyError as ex:
|
||||
key_error = True
|
||||
assert key_error
|
||||
|
||||
def test_get_tasks_id_subtaskerror_noerrors_resp(self, falcontest):
|
||||
url = '/api/v1.0/tasks/11111111-1111-1111-1111-111111111111'
|
||||
hdr = self.get_standard_header()
|
||||
|
||||
result = falcontest.simulate_get(url, headers=hdr, query_string='subtaskerrors=true')
|
||||
|
||||
assert result.status == falcon.HTTP_200
|
||||
response_json = json.loads(result.text)
|
||||
assert response_json['task_id'] == '11111111-1111-1111-1111-111111111111'
|
||||
assert response_json['subtask_errors'] == {}
|
||||
|
||||
def test_get_tasks_id_subtaskerror_errors_resp(self, falcontest):
|
||||
url = '/api/v1.0/tasks/11111111-1111-1111-1111-111111111113'
|
||||
hdr = self.get_standard_header()
|
||||
|
||||
result = falcontest.simulate_get(url, headers=hdr, query_string='subtaskerrors=true')
|
||||
|
||||
assert result.status == falcon.HTTP_200
|
||||
response_json = json.loads(result.text)
|
||||
assert response_json['task_id'] == '11111111-1111-1111-1111-111111111113'
|
||||
assert response_json['subtask_errors']['11111111-1111-1111-1111-111111111116']['details']['errorCount'] == 1
|
||||
|
||||
def test_get_tasks_id_builddata_resp(self, falcontest):
|
||||
url = '/api/v1.0/tasks/11111111-1111-1111-1111-111111111111'
|
||||
hdr = self.get_standard_header()
|
||||
|
||||
result = falcontest.simulate_get(url, headers=hdr, query_string='builddata=true')
|
||||
|
||||
LOG.debug(result.text)
|
||||
assert result.status == falcon.HTTP_200
|
||||
response_json = json.loads(result.text)
|
||||
assert response_json['build_data']
|
||||
try:
|
||||
response_json['subtask_errors']
|
||||
key_error = False
|
||||
except KeyError as ex:
|
||||
key_error = True
|
||||
assert key_error
|
||||
|
||||
def test_get_tasks_id_builddata_subtaskerrors_resp(self, falcontest):
|
||||
url = '/api/v1.0/tasks/11111111-1111-1111-1111-111111111111'
|
||||
hdr = self.get_standard_header()
|
||||
|
||||
result = falcontest.simulate_get(url, headers=hdr, query_string='builddata=true&subtaskerrors=true')
|
||||
|
||||
LOG.debug(result.text)
|
||||
assert result.status == falcon.HTTP_200
|
||||
response_json = json.loads(result.text)
|
||||
assert response_json['build_data']
|
||||
assert response_json['subtask_errors'] == {}
|
||||
|
||||
def test_get_tasks_id_layers_resp(self, falcontest):
|
||||
url = '/api/v1.0/tasks/11111111-1111-1111-1111-111111111113'
|
||||
hdr = self.get_standard_header()
|
||||
|
||||
result = falcontest.simulate_get(url, headers=hdr, query_string='layers=2')
|
||||
|
||||
LOG.debug(result.text)
|
||||
assert result.status == falcon.HTTP_200
|
||||
response_json = json.loads(result.text)
|
||||
init_task_id = '11111111-1111-1111-1111-111111111113'
|
||||
sub_task_id_1 = '11111111-1111-1111-1111-111111111114'
|
||||
sub_task_id_2 = '11111111-1111-1111-1111-111111111115'
|
||||
assert response_json['init_task_id'] == init_task_id
|
||||
assert response_json[init_task_id]['task_id'] == init_task_id
|
||||
assert response_json[sub_task_id_1]['task_id'] == sub_task_id_1
|
||||
assert response_json[sub_task_id_2]['task_id'] == sub_task_id_2
|
||||
try:
|
||||
response_json['11111111-1111-1111-1111-111111111116']
|
||||
key_error = False
|
||||
except KeyError as ex:
|
||||
key_error = True
|
||||
assert key_error
|
||||
|
||||
def test_get_tasks_id_layers_all_noerrors_resp(self, falcontest):
|
||||
url = '/api/v1.0/tasks/11111111-1111-1111-1111-111111111113'
|
||||
hdr = self.get_standard_header()
|
||||
|
||||
result = falcontest.simulate_get(url, headers=hdr, query_string='layers=-1')
|
||||
|
||||
LOG.debug(result.text)
|
||||
assert result.status == falcon.HTTP_200
|
||||
response_json = json.loads(result.text)
|
||||
init_task_id = '11111111-1111-1111-1111-111111111113'
|
||||
sub_task_id_1 = '11111111-1111-1111-1111-111111111114'
|
||||
sub_task_id_2 = '11111111-1111-1111-1111-111111111115'
|
||||
assert response_json['init_task_id'] == init_task_id
|
||||
assert response_json[init_task_id]['task_id'] == init_task_id
|
||||
assert response_json[sub_task_id_1]['task_id'] == sub_task_id_1
|
||||
assert response_json[sub_task_id_2]['task_id'] == sub_task_id_2
|
||||
try:
|
||||
response_json['11111111-1111-1111-1111-111111111116']
|
||||
key_error = False
|
||||
except KeyError as ex:
|
||||
key_error = True
|
||||
assert key_error is False
|
||||
try:
|
||||
response_json['subtask_errors']
|
||||
key_error = False
|
||||
except KeyError as ex:
|
||||
key_error = True
|
||||
assert key_error
|
||||
|
||||
def test_get_tasks_id_layers_all_errors_resp(self, falcontest):
|
||||
url = '/api/v1.0/tasks/11111111-1111-1111-1111-111111111113'
|
||||
hdr = self.get_standard_header()
|
||||
|
||||
result = falcontest.simulate_get(url, headers=hdr, query_string='layers=-1&subtaskerrors=true')
|
||||
|
||||
LOG.debug(result.text)
|
||||
assert result.status == falcon.HTTP_200
|
||||
response_json = json.loads(result.text)
|
||||
init_task_id = '11111111-1111-1111-1111-111111111113'
|
||||
sub_task_id_1 = '11111111-1111-1111-1111-111111111114'
|
||||
sub_task_id_2 = '11111111-1111-1111-1111-111111111115'
|
||||
assert response_json['init_task_id'] == init_task_id
|
||||
assert response_json[init_task_id]['task_id'] == init_task_id
|
||||
assert response_json[sub_task_id_1]['task_id'] == sub_task_id_1
|
||||
assert response_json[sub_task_id_2]['task_id'] == sub_task_id_2
|
||||
try:
|
||||
response_json['11111111-1111-1111-1111-111111111116']
|
||||
key_error = False
|
||||
except KeyError as ex:
|
||||
key_error = True
|
||||
assert key_error is False
|
||||
assert response_json['subtask_errors']['11111111-1111-1111-1111-111111111116']['details']['errorCount'] == 1
|
||||
|
||||
def test_input_not_found(self, falcontest):
|
||||
url = '/api/v1.0/tasks/11111111-1111-1111-1111-111111111112'
|
||||
hdr = self.get_standard_header()
|
||||
|
||||
result = falcontest.simulate_get(url, headers=hdr)
|
||||
|
||||
LOG.debug(result.text)
|
||||
assert result.status == falcon.HTTP_404
|
||||
|
||||
@pytest.fixture()
|
||||
def falcontest(self, drydock_state, deckhand_ingester,
|
||||
deckhand_orchestrator, mock_get_build_data, mock_get_task):
|
||||
"""Create a test harness for the the Falcon API framework."""
|
||||
policy.policy_engine = policy.DrydockPolicy()
|
||||
policy.policy_engine.register_policy()
|
||||
|
||||
return testing.TestClient(
|
||||
start_api(
|
||||
state_manager=drydock_state,
|
||||
ingester=deckhand_ingester,
|
||||
orchestrator=deckhand_orchestrator))
|
||||
|
||||
def get_standard_header(self):
|
||||
hdr = {
|
||||
'Content-Type': 'application/json',
|
||||
'X-IDENTITY-STATUS': 'Confirmed',
|
||||
'X-USER-NAME': 'Test',
|
||||
'X-ROLES': 'admin'
|
||||
}
|
||||
return hdr
|
||||
|
||||
@pytest.fixture()
|
||||
def mock_get_task(drydock_state):
|
||||
def side_effect(*args):
|
||||
task_id = str(args[0])
|
||||
LOG.debug(task_id)
|
||||
# Basic task
|
||||
if task_id == '11111111-1111-1111-1111-111111111111':
|
||||
new_task = objects.Task()
|
||||
new_task.task_id = '11111111-1111-1111-1111-111111111111'
|
||||
new_task.result = objects.TaskStatus()
|
||||
new_task.result.set_status(hd_fields.ActionResult.Failure)
|
||||
new_task.result.add_status_msg(msg='Test', error=True, ctx_type='N/A', ctx='N/A')
|
||||
return new_task
|
||||
# Task not found
|
||||
if task_id == '11111111-1111-1111-1111-111111111112':
|
||||
return None
|
||||
# Task layers
|
||||
if task_id == '11111111-1111-1111-1111-111111111113':
|
||||
new_task = objects.Task()
|
||||
new_task.task_id = '11111111-1111-1111-1111-111111111113'
|
||||
new_task.subtask_id_list = ['11111111-1111-1111-1111-111111111114',
|
||||
'11111111-1111-1111-1111-111111111115']
|
||||
return new_task
|
||||
if task_id == '11111111-1111-1111-1111-111111111114':
|
||||
new_task = objects.Task()
|
||||
new_task.task_id = '11111111-1111-1111-1111-111111111114'
|
||||
return new_task
|
||||
if task_id == '11111111-1111-1111-1111-111111111115':
|
||||
new_task = objects.Task()
|
||||
new_task.task_id = '11111111-1111-1111-1111-111111111115'
|
||||
new_task.subtask_id_list = ['11111111-1111-1111-1111-111111111116',
|
||||
'11111111-1111-1111-1111-111111111117']
|
||||
return new_task
|
||||
if task_id == '11111111-1111-1111-1111-111111111116':
|
||||
new_task = objects.Task()
|
||||
new_task.task_id = '11111111-1111-1111-1111-111111111116'
|
||||
new_task.result = objects.TaskStatus()
|
||||
new_task.result.set_status(hd_fields.ActionResult.Failure)
|
||||
new_task.result.add_status_msg(msg='Test', error=True, ctx_type='N/A', ctx='N/A')
|
||||
LOG.debug('error_count')
|
||||
LOG.debug(new_task.result.error_count)
|
||||
return new_task
|
||||
LOG.debug('returning None')
|
||||
return None
|
||||
|
||||
drydock_state.real_get_task = drydock_state.get_task
|
||||
drydock_state.get_task = Mock(side_effect=side_effect)
|
||||
|
||||
yield
|
||||
drydock_state.get_task = Mock(wraps=None, side_effect=None)
|
||||
drydock_state.get_task = drydock_state.real_get_task
|
Loading…
x
Reference in New Issue
Block a user