Merge "Deploy templates: conductor and API nits"

This commit is contained in:
Zuul 2019-03-14 16:36:52 +00:00 committed by Gerrit Code Review
commit 9625b8aa71
13 changed files with 319 additions and 182 deletions

View File

@ -325,12 +325,8 @@ class ChassisController(rest.RestController):
policy.authorize('baremetal:chassis:update', cdict, cdict)
rpc_chassis = objects.Chassis.get_by_uuid(context, chassis_uuid)
try:
chassis = Chassis(
**api_utils.apply_jsonpatch(rpc_chassis.as_dict(), patch))
except api_utils.JSONPATCH_EXCEPTIONS as e:
raise exception.PatchError(patch=patch, reason=e)
chassis = Chassis(
**api_utils.apply_jsonpatch(rpc_chassis.as_dict(), patch))
# Update only the fields that have changed
for field in objects.Chassis.fields:

View File

@ -116,7 +116,8 @@ class DeployTemplate(base.APIBase):
# The name must also be a valid trait.
api_utils.validate_trait(
value.name, _("Deploy template name must be a valid trait"))
value.name,
error_prefix=_("Deploy template name must be a valid trait"))
# There must be at least one step.
if not value.steps:
@ -396,12 +397,9 @@ class DeployTemplatesController(rest.RestController):
rpc_template = api_utils.get_rpc_deploy_template_with_suffix(
template_ident)
try:
template_dict = rpc_template.as_dict()
template = DeployTemplate(
**api_utils.apply_jsonpatch(template_dict, patch))
except api_utils.JSONPATCH_EXCEPTIONS as e:
raise exception.PatchError(patch=patch, reason=e)
template_dict = rpc_template.as_dict()
template = DeployTemplate(
**api_utils.apply_jsonpatch(template_dict, patch))
template.validate(template)
self._update_changed_fields(template, rpc_template)

View File

@ -2149,16 +2149,14 @@ class NodesController(rest.RestController):
% node_ident)
error_msg += "'%(name)s'"
self._check_names_acceptable(names, error_msg)
try:
node_dict = rpc_node.as_dict()
# NOTE(lucasagomes):
# 1) Remove chassis_id because it's an internal value and
# not present in the API object
# 2) Add chassis_uuid
node_dict['chassis_uuid'] = node_dict.pop('chassis_id', None)
node = Node(**api_utils.apply_jsonpatch(node_dict, patch))
except api_utils.JSONPATCH_EXCEPTIONS as e:
raise exception.PatchError(patch=patch, reason=e)
node_dict = rpc_node.as_dict()
# NOTE(lucasagomes):
# 1) Remove chassis_id because it's an internal value and
# not present in the API object
# 2) Add chassis_uuid
node_dict['chassis_uuid'] = node_dict.pop('chassis_id', None)
node = Node(**api_utils.apply_jsonpatch(node_dict, patch))
self._update_changed_fields(node, rpc_node)
# NOTE(deva): we calculate the rpc topic here in case node.driver
# has changed, so that update is sent to the

View File

@ -677,21 +677,18 @@ class PortsController(rest.RestController):
self._check_allowed_port_fields(fields_to_check)
rpc_port = objects.Port.get_by_uuid(context, port_uuid)
try:
port_dict = rpc_port.as_dict()
# NOTE(lucasagomes):
# 1) Remove node_id because it's an internal value and
# not present in the API object
# 2) Add node_uuid
port_dict['node_uuid'] = port_dict.pop('node_id', None)
# NOTE(vsaienko):
# 1) Remove portgroup_id because it's an internal value and
# not present in the API object
# 2) Add portgroup_uuid
port_dict['portgroup_uuid'] = port_dict.pop('portgroup_id', None)
port = Port(**api_utils.apply_jsonpatch(port_dict, patch))
except api_utils.JSONPATCH_EXCEPTIONS as e:
raise exception.PatchError(patch=patch, reason=e)
port_dict = rpc_port.as_dict()
# NOTE(lucasagomes):
# 1) Remove node_id because it's an internal value and
# not present in the API object
# 2) Add node_uuid
port_dict['node_uuid'] = port_dict.pop('node_id', None)
# NOTE(vsaienko):
# 1) Remove portgroup_id because it's an internal value and
# not present in the API object
# 2) Add portgroup_uuid
port_dict['portgroup_uuid'] = port_dict.pop('portgroup_id', None)
port = Port(**api_utils.apply_jsonpatch(port_dict, patch))
api_utils.handle_patch_port_like_extra_vif(rpc_port, port, patch)

View File

@ -545,17 +545,14 @@ class PortgroupsController(pecan.rest.RestController):
raise wsme.exc.ClientSideError(
error_msg, status_code=http_client.BAD_REQUEST)
try:
portgroup_dict = rpc_portgroup.as_dict()
# NOTE:
# 1) Remove node_id because it's an internal value and
# not present in the API object
# 2) Add node_uuid
portgroup_dict['node_uuid'] = portgroup_dict.pop('node_id', None)
portgroup = Portgroup(**api_utils.apply_jsonpatch(portgroup_dict,
patch))
except api_utils.JSONPATCH_EXCEPTIONS as e:
raise exception.PatchError(patch=patch, reason=e)
portgroup_dict = rpc_portgroup.as_dict()
# NOTE:
# 1) Remove node_id because it's an internal value and
# not present in the API object
# 2) Add node_uuid
portgroup_dict['node_uuid'] = portgroup_dict.pop('node_id', None)
portgroup = Portgroup(**api_utils.apply_jsonpatch(portgroup_dict,
patch))
api_utils.handle_patch_port_like_extra_vif(rpc_portgroup, portgroup,
patch)

View File

@ -42,10 +42,10 @@ from ironic import objects
CONF = cfg.CONF
JSONPATCH_EXCEPTIONS = (jsonpatch.JsonPatchException,
jsonpatch.JsonPointerException,
KeyError,
IndexError)
_JSONPATCH_EXCEPTIONS = (jsonpatch.JsonPatchException,
jsonpatch.JsonPointerException,
KeyError,
IndexError)
# Minimum API version to use for certain verbs
@ -96,7 +96,7 @@ def validate_sort_dir(sort_dir):
return sort_dir
def validate_trait(trait, error_prefix='Invalid trait'):
def validate_trait(trait, error_prefix=_('Invalid trait')):
error = wsme.exc.ClientSideError(
_('%(error_prefix)s. A valid trait must be no longer than 255 '
'characters. Standard traits are defined in the os_traits library. '
@ -117,13 +117,32 @@ def validate_trait(trait, error_prefix='Invalid trait'):
def apply_jsonpatch(doc, patch):
"""Apply a JSON patch, one operation at a time.
If the patch fails to apply, this allows us to determine which operation
failed, making the error message a little less cryptic.
:param doc: The JSON document to patch.
:param patch: The JSON patch to apply.
:returns: The result of the patch operation.
:raises: PatchError if the patch fails to apply.
:raises: wsme.exc.ClientSideError if the patch adds a new root attribute.
"""
# Prevent removal of root attributes.
for p in patch:
if p['op'] == 'add' and p['path'].count('/') == 1:
if p['path'].lstrip('/') not in doc:
msg = _('Adding a new attribute (%s) to the root of '
'the resource is not allowed')
raise wsme.exc.ClientSideError(msg % p['path'])
return jsonpatch.apply_patch(doc, jsonpatch.JsonPatch(patch))
# Apply operations one at a time, to improve error reporting.
for patch_op in patch:
try:
doc = jsonpatch.apply_patch(doc, jsonpatch.JsonPatch([patch_op]))
except _JSONPATCH_EXCEPTIONS as e:
raise exception.PatchError(patch=patch_op, reason=e)
return doc
def get_patch_values(patch, path):

View File

@ -421,17 +421,14 @@ class VolumeConnectorsController(rest.RestController):
rpc_connector = objects.VolumeConnector.get_by_uuid(context,
connector_uuid)
try:
connector_dict = rpc_connector.as_dict()
# NOTE(smoriya):
# 1) Remove node_id because it's an internal value and
# not present in the API object
# 2) Add node_uuid
connector_dict['node_uuid'] = connector_dict.pop('node_id', None)
connector = VolumeConnector(
**api_utils.apply_jsonpatch(connector_dict, patch))
except api_utils.JSONPATCH_EXCEPTIONS as e:
raise exception.PatchError(patch=patch, reason=e)
connector_dict = rpc_connector.as_dict()
# NOTE(smoriya):
# 1) Remove node_id because it's an internal value and
# not present in the API object
# 2) Add node_uuid
connector_dict['node_uuid'] = connector_dict.pop('node_id', None)
connector = VolumeConnector(
**api_utils.apply_jsonpatch(connector_dict, patch))
# Update only the fields that have changed.
for field in objects.VolumeConnector.fields:

View File

@ -432,17 +432,14 @@ class VolumeTargetsController(rest.RestController):
raise exception.InvalidUUID(message=message)
rpc_target = objects.VolumeTarget.get_by_uuid(context, target_uuid)
try:
target_dict = rpc_target.as_dict()
# NOTE(smoriya):
# 1) Remove node_id because it's an internal value and
# not present in the API object
# 2) Add node_uuid
target_dict['node_uuid'] = target_dict.pop('node_id', None)
target = VolumeTarget(
**api_utils.apply_jsonpatch(target_dict, patch))
except api_utils.JSONPATCH_EXCEPTIONS as e:
raise exception.PatchError(patch=patch, reason=e)
target_dict = rpc_target.as_dict()
# NOTE(smoriya):
# 1) Remove node_id because it's an internal value and
# not present in the API object
# 2) Add node_uuid
target_dict['node_uuid'] = target_dict.pop('node_id', None)
target = VolumeTarget(
**api_utils.apply_jsonpatch(target_dict, patch))
# Update only the fields that have changed.
for field in objects.VolumeTarget.fields:

View File

@ -785,25 +785,59 @@ def _get_deployment_templates(task):
instance_traits)
def _get_steps_from_deployment_templates(task):
def _get_steps_from_deployment_templates(task, templates):
"""Get deployment template steps for task.node.
Given a list of deploy template objects, return a list of all deploy steps
combined.
:param task: A TaskManager object
:param templates: a list of deploy templates
:returns: A list of deploy step dictionaries
"""
steps = []
# NOTE(mgoddard): The steps from the object include id, created_at, etc.,
# which we don't want to include when we assign them to
# node.driver_internal_info. Include only the relevant fields.
step_fields = ('interface', 'step', 'args', 'priority')
for template in templates:
steps.extend([{key: step[key] for key in step_fields}
for step in template.steps])
return steps
def _get_validated_steps_from_templates(task):
"""Return a list of validated deploy steps from deploy templates.
Deployment template steps are those steps defined in deployment templates
where the name of the deployment template matches one of the node's
instance traits (the subset of the node's traits requested by the user via
a flavor or image). There may be many such matching templates, each with a
list of steps to execute.
This method gathers the steps from all matching deploy templates for a
node, and validates those steps against the node's driver interfaces,
raising an error if validation fails.
:param task: A TaskManager object
:returns: A list of deploy step dictionaries
:raises: InvalidParameterValue if validation of steps fails.
:raises: InstanceDeployFailure if there was a problem getting the
deploy steps.
:returns: A list of validated deploy step dictionaries
"""
# Gather deploy templates matching the node's instance traits.
templates = _get_deployment_templates(task)
steps = []
step_fields = ('interface', 'step', 'args', 'priority')
for template in templates:
steps.extend([{key: step[key] for key in step_fields}
for step in template.steps])
return steps
# Gather deploy steps from deploy templates.
user_steps = _get_steps_from_deployment_templates(task, templates)
# Validate the steps.
error_prefix = (_('Validation of deploy steps from deploy templates '
'matching this node\'s instance traits failed. Matching '
'deploy templates: %(templates)s. Errors: ') %
{'templates': ','.join(t.name for t in templates)})
return _validate_user_deploy_steps(task, user_steps,
error_prefix=error_prefix)
def _get_all_deployment_steps(task):
@ -817,8 +851,11 @@ def _get_all_deployment_steps(task):
deploy steps.
:returns: A list of deploy step dictionaries
"""
# Gather deploy steps from deploy templates.
user_steps = _get_steps_from_deployment_templates(task)
# Gather deploy steps from deploy templates and validate.
# NOTE(mgoddard): although we've probably just validated the templates in
# do_node_deploy, they may have changed in the DB since we last checked, so
# validate again.
user_steps = _get_validated_steps_from_templates(task)
# Gather enabled deploy steps from drivers.
driver_steps = _get_deployment_steps(task, enabled=True, sort=False)
@ -852,6 +889,17 @@ def set_node_deployment_steps(task):
node.save()
def _step_id(step):
"""Return the 'ID' of a deploy step.
The ID is a string, <interface>.<step>.
:param step: the step dictionary.
:return: the step's ID string.
"""
return '.'.join([step['interface'], step['step']])
def _validate_deploy_steps_unique(user_steps):
"""Validate that deploy steps from deploy templates are unique.
@ -860,28 +908,29 @@ def _validate_deploy_steps_unique(user_steps):
{ 'interface': <driver_interface>,
'step': <name_of_step>,
'args': {<arg1>: <value1>, ..., <argn>: <valuen>} }
'args': {<arg1>: <value1>, ..., <argn>: <valuen>},
'priority': <priority_of_step> }
For example::
For example::
{ 'interface': deploy',
'step': 'upgrade_firmware',
'args': {'force': True} }
'args': {'force': True},
'priority': 10 }
:return: a list of validation error strings for the steps.
"""
# Check for duplicate steps. Each interface/step combination can be
# specified at most once.
errors = []
counter = collections.Counter((step['interface'], step['step'])
for step in user_steps)
for (interface, step), count in counter.items():
if count > 1:
err = (_('duplicate deploy steps for %(interface)s.%(step)s. '
'Deploy steps from all deploy templates matching a '
'node\'s instance traits cannot have the same interface '
'and step') %
{'interface': interface, 'step': step})
errors.append(err)
counter = collections.Counter(_step_id(step) for step in user_steps)
duplicates = {step_id for step_id, count in counter.items() if count > 1}
if duplicates:
err = (_('deploy steps from all deploy templates matching this '
'node\'s instance traits cannot have the same interface '
'and step. Duplicate deploy steps for %(duplicates)s') %
{'duplicates': ', '.join(duplicates)})
errors.append(err)
return errors
@ -897,12 +946,14 @@ def _validate_user_step(task, user_step, driver_step, step_type):
'args': {<arg1>: <value1>, ..., <argn>: <valuen>},
'priority': <optional_priority> }
For example::
For example::
{ 'interface': deploy',
'step': 'upgrade_firmware',
'args': {'force': True} }
:param driver_step: a driver step dictionary::
{ 'interface': <driver_interface>,
'step': <name_of_step>,
'priority': <integer>
@ -912,9 +963,9 @@ def _validate_user_step(task, user_step, driver_step, step_type):
{<arg_name>:<arg_info_dict>} entries.
<arg_info_dict> is a dictionary with
{ 'description': <description>,
'required': <Boolean> }
}
For example::
'required': <Boolean> } }
For example::
{ 'interface': deploy',
'step': 'upgrade_firmware',
@ -923,6 +974,7 @@ def _validate_user_step(task, user_step, driver_step, step_type):
'argsinfo': {
'force': { 'description': 'Whether to force the upgrade',
'required': False } } }
:param step_type: either 'clean' or 'deploy'.
:return: a list of validation error strings for the step.
"""
@ -930,12 +982,12 @@ def _validate_user_step(task, user_step, driver_step, step_type):
# Check that the user-specified arguments are valid
argsinfo = driver_step.get('argsinfo') or {}
user_args = user_step.get('args') or {}
invalid = set(user_args) - set(argsinfo)
if invalid:
error = (_('%(type)s step %(step)s has these invalid arguments: '
'%(invalid)s') %
unexpected = set(user_args) - set(argsinfo)
if unexpected:
error = (_('%(type)s step %(step)s has these unexpected arguments: '
'%(unexpected)s') %
{'type': step_type, 'step': user_step,
'invalid': ', '.join(invalid)})
'unexpected': ', '.join(unexpected)})
errors.append(error)
if step_type == 'clean' or user_step['priority'] > 0:
@ -949,7 +1001,7 @@ def _validate_user_step(task, user_step, driver_step, step_type):
missing.append(msg)
if missing:
error = (_('%(type)s step %(step)s is missing these required '
'keyword arguments: %(miss)s') %
'arguments: %(miss)s') %
{'type': step_type, 'step': user_step,
'miss': ', '.join(missing)})
errors.append(error)
@ -960,19 +1012,24 @@ def _validate_user_step(task, user_step, driver_step, step_type):
user_step['priority'] = driver_step.get('priority', 0)
elif user_step['priority'] > 0:
# 'core' deploy steps can only be disabled.
# NOTE(mgoddard): we'll need something a little more sophisticated to
# track core steps once we split out the single core step.
is_core = (driver_step['interface'] == 'deploy' and
driver_step['step'] == 'deploy')
if is_core:
error = (_('deploy step %(step)s is a core step and '
'cannot be overridden by user steps. It may be '
'disabled by setting the priority to 0')
% {'step': user_step})
error = (_('deploy step %(step)s on interface %(interface)s is a '
'core step and cannot be overridden by user steps. It '
'may be disabled by setting the priority to 0') %
{'step': user_step['step'],
'interface': user_step['interface']})
errors.append(error)
return errors
def _validate_user_steps(task, user_steps, driver_steps, step_type):
def _validate_user_steps(task, user_steps, driver_steps, step_type,
error_prefix=None):
"""Validate the user-specified steps.
:param task: A TaskManager object
@ -985,12 +1042,14 @@ def _validate_user_steps(task, user_steps, driver_steps, step_type):
'args': {<arg1>: <value1>, ..., <argn>: <valuen>},
'priority': <optional_priority> }
For example::
For example::
{ 'interface': deploy',
'step': 'upgrade_firmware',
'args': {'force': True} }
:param driver_steps: a list of driver steps::
{ 'interface': <driver_interface>,
'step': <name_of_step>,
'priority': <integer>
@ -1000,9 +1059,9 @@ def _validate_user_steps(task, user_steps, driver_steps, step_type):
{<arg_name>:<arg_info_dict>} entries.
<arg_info_dict> is a dictionary with
{ 'description': <description>,
'required': <Boolean> }
}
For example::
'required': <Boolean> } }
For example::
{ 'interface': deploy',
'step': 'upgrade_firmware',
@ -1011,25 +1070,25 @@ def _validate_user_steps(task, user_steps, driver_steps, step_type):
'argsinfo': {
'force': { 'description': 'Whether to force the upgrade',
'required': False } } }
:param step_type: either 'clean' or 'deploy'.
:param error_prefix: String to use as a prefix for exception messages, or
None.
:raises: InvalidParameterValue if validation of steps fails.
:raises: NodeCleaningFailure or InstanceDeployFailure if
there was a problem getting the steps from the driver.
:return: validated steps updated with information from the driver
"""
def step_id(step):
return '.'.join([step['step'], step['interface']])
errors = []
# Convert driver steps to a dict.
driver_steps = {step_id(s): s for s in driver_steps}
driver_steps = {_step_id(s): s for s in driver_steps}
for user_step in user_steps:
# Check if this user_specified step isn't supported by the driver
# Check if this user-specified step isn't supported by the driver
try:
driver_step = driver_steps[step_id(user_step)]
driver_step = driver_steps[_step_id(user_step)]
except KeyError:
error = (_('node does not support this %(type)s step: %(step)s')
% {'type': step_type, 'step': user_step})
@ -1046,9 +1105,11 @@ def _validate_user_steps(task, user_steps, driver_steps, step_type):
errors.extend(dup_errors)
if errors:
raise exception.InvalidParameterValue('; '.join(errors))
err = error_prefix or ''
err += '; '.join(errors)
raise exception.InvalidParameterValue(err=err)
return user_steps[:]
return user_steps
def _validate_user_clean_steps(task, user_steps):
@ -1076,7 +1137,7 @@ def _validate_user_clean_steps(task, user_steps):
return _validate_user_steps(task, user_steps, driver_steps, 'clean')
def _validate_user_deploy_steps(task, user_steps):
def _validate_user_deploy_steps(task, user_steps, error_prefix=None):
"""Validate the user-specified deploy steps.
:param task: A TaskManager object
@ -1094,13 +1155,16 @@ def _validate_user_deploy_steps(task, user_steps):
'step': 'apply_configuration',
'args': { 'settings': [ { 'foo': 'bar' } ] },
'priority': 150 }
:param error_prefix: String to use as a prefix for exception messages, or
None.
:raises: InvalidParameterValue if validation of deploy steps fails.
:raises: InstanceDeployFailure if there was a problem getting the deploy
steps from the driver.
:return: validated deploy steps update with information from the driver
"""
driver_steps = _get_deployment_steps(task, enabled=False, sort=False)
return _validate_user_steps(task, user_steps, driver_steps, 'deploy')
return _validate_user_steps(task, user_steps, driver_steps, 'deploy',
error_prefix=error_prefix)
@task_manager.require_exclusive_lock
@ -1315,9 +1379,8 @@ def validate_deploy_templates(task):
:raises: InstanceDeployFailure if there was a problem getting the deploy
steps from the driver.
"""
# Gather deploy steps from matching deploy templates, validate them.
user_steps = _get_steps_from_deployment_templates(task)
_validate_user_deploy_steps(task, user_steps)
# Gather deploy steps from matching deploy templates and validate them.
_get_validated_steps_from_templates(task)
def build_configdrive(node, configdrive):

View File

@ -175,19 +175,16 @@ class TestListDeployTemplates(BaseDeployTemplatesAPITest):
def test_detail_query_false(self):
obj_utils.create_test_deploy_template(self.context)
data1 = self.get_json(
'/deploy_templates',
headers={api_base.Version.string: str(api_v1.max_version())})
data1 = self.get_json('/deploy_templates', headers=self.headers)
data2 = self.get_json(
'/deploy_templates?detail=False',
headers={api_base.Version.string: str(api_v1.max_version())})
'/deploy_templates?detail=False', headers=self.headers)
self.assertEqual(data1['deploy_templates'], data2['deploy_templates'])
def test_detail_using_query_false_and_fields(self):
obj_utils.create_test_deploy_template(self.context)
data = self.get_json(
'/deploy_templates?detail=False&fields=steps',
headers={api_base.Version.string: str(api_v1.max_version())})
headers=self.headers)
self.assertIn('steps', data['deploy_templates'][0])
self.assertNotIn('uuid', data['deploy_templates'][0])
self.assertNotIn('extra', data['deploy_templates'][0])
@ -195,8 +192,7 @@ class TestListDeployTemplates(BaseDeployTemplatesAPITest):
def test_detail_using_query_and_fields(self):
obj_utils.create_test_deploy_template(self.context)
response = self.get_json(
'/deploy_templates?detail=True&fields=name',
headers={api_base.Version.string: str(api_v1.max_version())},
'/deploy_templates?detail=True&fields=name', headers=self.headers,
expect_errors=True)
self.assertEqual(http_client.BAD_REQUEST, response.status_int)
@ -397,7 +393,14 @@ class TestPatch(BaseDeployTemplatesAPITest):
def test_update_name_standard_trait(self, mock_save):
name = 'HW_CPU_X86_VMX'
patch = [{'path': '/name', 'value': name, 'op': 'replace'}]
self._test_update_ok(mock_save, patch)
response = self._test_update_ok(mock_save, patch)
self.assertEqual(name, response.json['name'])
def test_update_name_custom_trait(self, mock_save):
name = 'CUSTOM_DT2'
patch = [{'path': '/name', 'value': name, 'op': 'replace'}]
response = self._test_update_ok(mock_save, patch)
self.assertEqual(name, response.json['name'])
def test_update_invalid_name(self, mock_save):
self._test_update_bad_request(
@ -441,12 +444,6 @@ class TestPatch(BaseDeployTemplatesAPITest):
self.assertTrue(response.json['error_message'])
self.assertFalse(mock_save.called)
def test_replace_singular(self, mock_save):
name = 'CUSTOM_DT2'
patch = [{'path': '/name', 'value': name, 'op': 'replace'}]
response = self._test_update_ok(mock_save, patch)
self.assertEqual(name, response.json['name'])
@mock.patch.object(notification_utils, '_emit_api_notification',
autospec=True)
def test_replace_name_already_exist(self, mock_notify, mock_save):
@ -582,7 +579,7 @@ class TestPatch(BaseDeployTemplatesAPITest):
patch = []
for i, step in enumerate(steps):
patch.append({'path': '/steps/%s' % i,
'value': steps[i],
'value': step,
'op': 'replace'})
response = self.patch_json('/deploy_templates/%s' % template.uuid,
patch, headers=self.headers)

View File

@ -86,6 +86,36 @@ class TestApiUtils(base.TestCase):
"spongebob",
utils.validate_trait, "invalid", "spongebob")
def test_apply_jsonpatch(self):
doc = {"foo": {"bar": "baz"}}
patch = [{"op": "add", "path": "/foo/answer", "value": 42}]
result = utils.apply_jsonpatch(doc, patch)
expected = {"foo": {"bar": "baz", "answer": 42}}
self.assertEqual(expected, result)
def test_apply_jsonpatch_no_add_root_attr(self):
doc = {}
patch = [{"op": "add", "path": "/foo", "value": 42}]
self.assertRaisesRegex(wsme.exc.ClientSideError,
"Adding a new attribute",
utils.apply_jsonpatch, doc, patch)
def test_apply_jsonpatch_remove_non_existent(self):
# Raises a KeyError.
doc = {}
patch = [{"op": "remove", "path": "/foo"}]
self.assertRaisesRegex(exception.PatchError,
"can't remove non-existent object 'foo'",
utils.apply_jsonpatch, doc, patch)
def test_apply_jsonpatch_replace_non_existent_list_item(self):
# Raises an IndexError.
doc = []
patch = [{"op": "replace", "path": "/0", "value": 42}]
self.assertRaisesRegex(exception.PatchError,
"list assignment index out of range",
utils.apply_jsonpatch, doc, patch)
def test_get_patch_values_no_path(self):
patch = [{'path': '/name', 'op': 'update', 'value': 'node-0'}]
path = '/invalid'

View File

@ -1078,7 +1078,7 @@ class NodeDeployStepsTestCase(db_base.DbTestCase):
traits = ['CUSTOM_DT1', 'CUSTOM_DT2']
node = obj_utils.create_test_node(
self.context, uuid=uuidutils.generate_uuid(),
driver='fake-hardware', instance_info={'traits': traits})
instance_info={'traits': traits})
template1 = obj_utils.get_test_deploy_template(self.context)
template2 = obj_utils.get_test_deploy_template(
self.context, name='CUSTOM_DT2', uuid=uuidutils.generate_uuid(),
@ -1092,15 +1092,12 @@ class NodeDeployStepsTestCase(db_base.DbTestCase):
self.assertEqual(expected, templates)
mock_list.assert_called_once_with(task.context, traits)
@mock.patch.object(conductor_utils, '_get_deployment_templates',
autospec=True)
def test__get_steps_from_deployment_templates(self, mock_templates):
def test__get_steps_from_deployment_templates(self):
template1 = obj_utils.get_test_deploy_template(self.context)
template2 = obj_utils.get_test_deploy_template(
self.context, name='CUSTOM_DT2', uuid=uuidutils.generate_uuid(),
steps=[{'interface': 'bios', 'step': 'apply_configuration',
'args': {}, 'priority': 1}])
mock_templates.return_value = [template1, template2]
step1 = template1.steps[0]
step2 = template2.steps[0]
expected = [
@ -1119,24 +1116,25 @@ class NodeDeployStepsTestCase(db_base.DbTestCase):
]
with task_manager.acquire(
self.context, self.node.uuid, shared=False) as task:
steps = conductor_utils._get_steps_from_deployment_templates(task)
steps = conductor_utils._get_steps_from_deployment_templates(
task, [template1, template2])
self.assertEqual(expected, steps)
mock_templates.assert_called_once_with(task)
@mock.patch.object(conductor_utils, '_get_steps_from_deployment_templates',
@mock.patch.object(conductor_utils, '_get_validated_steps_from_templates',
autospec=True)
@mock.patch.object(conductor_utils, '_get_deployment_steps', autospec=True)
def _test__get_all_deployment_steps(self, user_steps, driver_steps,
expected_steps, mock_gds, mock_gsfdt):
mock_gsfdt.return_value = user_steps
mock_gds.return_value = driver_steps
expected_steps, mock_steps,
mock_validated):
mock_validated.return_value = user_steps
mock_steps.return_value = driver_steps
with task_manager.acquire(
self.context, self.node.uuid, shared=False) as task:
steps = conductor_utils._get_all_deployment_steps(task)
self.assertEqual(expected_steps, steps)
mock_gsfdt.assert_called_once_with(task)
mock_gds.assert_called_once_with(task, enabled=True, sort=False)
mock_validated.assert_called_once_with(task)
mock_steps.assert_called_once_with(task, enabled=True, sort=False)
def test__get_all_deployment_steps_no_steps(self):
# Nothing in -> nothing out.
@ -1206,6 +1204,19 @@ class NodeDeployStepsTestCase(db_base.DbTestCase):
self._test__get_all_deployment_steps(user_steps, driver_steps,
expected_steps)
@mock.patch.object(conductor_utils, '_get_validated_steps_from_templates',
autospec=True)
@mock.patch.object(conductor_utils, '_get_deployment_steps', autospec=True)
def test__get_all_deployment_steps_error(self, mock_steps, mock_validated):
mock_validated.side_effect = exception.InvalidParameterValue('foo')
with task_manager.acquire(
self.context, self.node.uuid, shared=False) as task:
self.assertRaises(exception.InvalidParameterValue,
conductor_utils._get_all_deployment_steps, task)
mock_validated.assert_called_once_with(task)
self.assertFalse(mock_steps.called)
@mock.patch.object(conductor_utils, '_get_all_deployment_steps',
autospec=True)
def test_set_node_deployment_steps(self, mock_steps):
@ -1280,7 +1291,7 @@ class NodeDeployStepsTestCase(db_base.DbTestCase):
with task_manager.acquire(self.context, self.node.uuid) as task:
self.assertRaisesRegex(exception.InvalidParameterValue,
"power_one.*invalid.*arg1",
"power_one.*unexpected.*arg1",
conductor_utils._validate_user_deploy_steps,
task, user_steps)
mock_steps.assert_called_once_with(task, enabled=False, sort=False)
@ -1356,7 +1367,7 @@ class NodeDeployStepsTestCase(db_base.DbTestCase):
with task_manager.acquire(self.context, self.node.uuid) as task:
self.assertRaisesRegex(exception.InvalidParameterValue,
"duplicate deploy steps for "
"Duplicate deploy steps for "
"power.power_one",
conductor_utils._validate_user_deploy_steps,
task, user_steps)
@ -1560,7 +1571,7 @@ class NodeCleaningStepsTestCase(db_base.DbTestCase):
with task_manager.acquire(self.context, node.uuid) as task:
self.assertRaisesRegex(exception.InvalidParameterValue,
"update_firmware.*invalid.*arg1",
"update_firmware.*unexpected.*arg1",
conductor_utils._validate_user_clean_steps,
task, user_steps)
mock_steps.assert_called_once_with(task, enabled=False, sort=False)
@ -2455,10 +2466,56 @@ class ValidateInstanceInfoTraitsTestCase(tests_base.TestCase):
self.node)
@mock.patch.object(conductor_utils, '_get_deployment_templates',
autospec=True)
@mock.patch.object(conductor_utils, '_get_steps_from_deployment_templates',
autospec=True)
@mock.patch.object(conductor_utils, '_validate_user_deploy_steps',
autospec=True)
class GetValidatedStepsFromTemplatesTestCase(db_base.DbTestCase):
def setUp(self):
super(GetValidatedStepsFromTemplatesTestCase, self).setUp()
self.node = obj_utils.create_test_node(self.context,
driver='fake-hardware')
self.template = obj_utils.get_test_deploy_template(self.context)
def test_ok(self, mock_validate, mock_steps, mock_templates):
mock_templates.return_value = [self.template]
steps = [db_utils.get_test_deploy_template_step()]
mock_steps.return_value = steps
mock_validate.return_value = steps
with task_manager.acquire(
self.context, self.node.uuid, shared=False) as task:
result = conductor_utils._get_validated_steps_from_templates(task)
self.assertEqual(steps, result)
mock_templates.assert_called_once_with(task)
mock_steps.assert_called_once_with(task, [self.template])
mock_validate.assert_called_once_with(task, steps, mock.ANY)
def test_invalid_parameter_value(self, mock_validate, mock_steps,
mock_templates):
mock_templates.return_value = [self.template]
mock_validate.side_effect = exception.InvalidParameterValue('fake')
with task_manager.acquire(
self.context, self.node.uuid, shared=False) as task:
self.assertRaises(
exception.InvalidParameterValue,
conductor_utils._get_validated_steps_from_templates, task)
def test_instance_deploy_failure(self, mock_validate, mock_steps,
mock_templates):
mock_templates.return_value = [self.template]
mock_validate.side_effect = exception.InstanceDeployFailure('foo')
with task_manager.acquire(
self.context, self.node.uuid, shared=False) as task:
self.assertRaises(
exception.InstanceDeployFailure,
conductor_utils._get_validated_steps_from_templates, task)
@mock.patch.object(conductor_utils, '_get_validated_steps_from_templates',
autospec=True)
class ValidateDeployTemplatesTestCase(db_base.DbTestCase):
def setUp(self):
@ -2466,26 +2523,17 @@ class ValidateDeployTemplatesTestCase(db_base.DbTestCase):
self.node = obj_utils.create_test_node(self.context,
driver='fake-hardware')
def test_validate_deploy_templates(self, mock_validate, mock_get):
steps = [db_utils.get_test_deploy_template_step()]
mock_get.return_value = steps
def test_ok(self, mock_validated):
with task_manager.acquire(
self.context, self.node.uuid, shared=False) as task:
conductor_utils.validate_deploy_templates(task)
mock_validate.assert_called_once_with(task, steps)
result = conductor_utils.validate_deploy_templates(task)
self.assertIsNone(result)
mock_validated.assert_called_once_with(task)
def test_validate_deploy_templates_invalid_parameter_value(
self, mock_validate, mock_get):
mock_validate.side_effect = exception.InvalidParameterValue('fake')
def test_error(self, mock_validated):
with task_manager.acquire(
self.context, self.node.uuid, shared=False) as task:
mock_validated.side_effect = exception.InvalidParameterValue('foo')
self.assertRaises(exception.InvalidParameterValue,
conductor_utils.validate_deploy_templates, task)
def test_validate_deploy_templates_instance_deploy_failure(
self, mock_validate, mock_get):
mock_validate.side_effect = exception.InstanceDeployFailure('foo')
with task_manager.acquire(
self.context, self.node.uuid, shared=False) as task:
self.assertRaises(exception.InstanceDeployFailure,
conductor_utils.validate_deploy_templates, task)
mock_validated.assert_called_once_with(task)

View File

@ -5,8 +5,8 @@ features:
the node deployment process, each specifying a list of deploy steps to
execute with configurable priority and arguments.
Introduces the following new API endpoints, available from Bare Metal REST
API version 1.55:
Introduces the following new API endpoints, available from Bare Metal API
version 1.55:
* ``GET /v1/deploy_templates``
* ``GET /v1/deploy_templates/<deploy template identifier>``