Migrate from ironic-lib
Migrates used code from ironic-lib. Contains a binary pathing fix for grenade. Change-Id: I96c0057c78bf484b5fe3a708579a966cd9635e56
This commit is contained in:
parent
ecf549524d
commit
570f007b94
@ -137,7 +137,7 @@ function inspector_iniset {
|
||||
function install_inspector {
|
||||
setup_develop $IRONIC_INSPECTOR_DIR
|
||||
# Check if things look okay
|
||||
ironic-inspector-status upgrade check
|
||||
$IRONIC_INSPECTOR_BIN_DIR/ironic-inspector-status upgrade check
|
||||
if [[ "$IRONIC_INSPECTOR_STANDALONE" == "False" ]]; then
|
||||
install_apache_wsgi
|
||||
# NOTE(rpittau) since devstack doesn't install test-requirements
|
||||
|
204
ironic_inspector/common/auth_basic.py
Normal file
204
ironic_inspector/common/auth_basic.py
Normal file
@ -0,0 +1,204 @@
|
||||
# Copyright 2020 Red Hat, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import base64
|
||||
import binascii
|
||||
import logging
|
||||
|
||||
import bcrypt
|
||||
import webob
|
||||
|
||||
from ironic_inspector.common import exception
|
||||
from ironic_inspector.common.i18n import _
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BasicAuthMiddleware(object):
|
||||
"""Middleware which performs HTTP basic authentication on requests
|
||||
|
||||
"""
|
||||
def __init__(self, app, auth_file):
|
||||
self.app = app
|
||||
self.auth_file = auth_file
|
||||
validate_auth_file(auth_file)
|
||||
|
||||
def format_exception(self, e):
|
||||
result = {'error': {'message': str(e), 'code': e.code}}
|
||||
headers = list(e.headers.items()) + [
|
||||
('Content-Type', 'application/json')
|
||||
]
|
||||
return webob.Response(content_type='application/json',
|
||||
status_code=e.code,
|
||||
json_body=result,
|
||||
headerlist=headers)
|
||||
|
||||
def __call__(self, env, start_response):
|
||||
|
||||
try:
|
||||
token = parse_header(env)
|
||||
username, password = parse_token(token)
|
||||
env.update(authenticate(self.auth_file, username, password))
|
||||
|
||||
return self.app(env, start_response)
|
||||
|
||||
except exception.IronicException as e:
|
||||
response = self.format_exception(e)
|
||||
return response(env, start_response)
|
||||
|
||||
|
||||
def authenticate(auth_file, username, password):
|
||||
"""Finds username and password match in Apache style user auth file
|
||||
|
||||
The user auth file format is expected to comply with Apache
|
||||
documentation[1] however the bcrypt password digest is the *only*
|
||||
digest format supported.
|
||||
|
||||
[1] https://httpd.apache.org/docs/current/misc/password_encryptions.html
|
||||
|
||||
:param: auth_file: Path to user auth file
|
||||
:param: username: Username to authenticate
|
||||
:param: password: Password encoded as bytes
|
||||
:returns: A dictionary of WSGI environment values to append to the request
|
||||
:raises: Unauthorized, if no file entries match supplied username/password
|
||||
"""
|
||||
line_prefix = username + ':'
|
||||
try:
|
||||
with open(auth_file, 'r') as f:
|
||||
for line in f:
|
||||
entry = line.strip()
|
||||
if entry and entry.startswith(line_prefix):
|
||||
return auth_entry(entry, password)
|
||||
except OSError as exc:
|
||||
LOG.error('Problem reading auth user file: %s', exc)
|
||||
raise exception.ConfigInvalid(
|
||||
error_msg=_('Problem reading auth user file'))
|
||||
|
||||
# reached end of file with no matches
|
||||
LOG.info('User %s not found', username)
|
||||
unauthorized()
|
||||
|
||||
|
||||
def auth_entry(entry, password):
|
||||
"""Compare a password with a single user auth file entry
|
||||
|
||||
:param: entry: Line from auth user file to use for authentication
|
||||
:param: password: Password encoded as bytes
|
||||
:returns: A dictionary of WSGI environment values to append to the request
|
||||
:raises: Unauthorized, if the entry doesn't match supplied password or
|
||||
if the entry is crypted with a method other than bcrypt
|
||||
"""
|
||||
username, crypted = parse_entry(entry)
|
||||
|
||||
if not bcrypt.checkpw(password, crypted):
|
||||
LOG.info('Password for %s does not match', username)
|
||||
unauthorized()
|
||||
|
||||
return {
|
||||
'HTTP_X_USER': username,
|
||||
'HTTP_X_USER_NAME': username
|
||||
}
|
||||
|
||||
|
||||
def validate_auth_file(auth_file):
|
||||
"""Read the auth user file and validate its correctness
|
||||
|
||||
:param: auth_file: Path to user auth file
|
||||
:raises: ConfigInvalid on validation error
|
||||
"""
|
||||
try:
|
||||
with open(auth_file, 'r') as f:
|
||||
for line in f:
|
||||
entry = line.strip()
|
||||
if entry and ':' in entry:
|
||||
parse_entry(entry)
|
||||
except OSError:
|
||||
raise exception.ConfigInvalid(
|
||||
error_msg=_('Problem reading auth user file: %s') % auth_file)
|
||||
|
||||
|
||||
def parse_entry(entry):
|
||||
"""Extrace the username and crypted password from a user auth file entry
|
||||
|
||||
:param: entry: Line from auth user file to use for authentication
|
||||
:returns: a tuple of username and crypted password
|
||||
:raises: ConfigInvalid if the password is not in the supported bcrypt
|
||||
format
|
||||
"""
|
||||
username, crypted_str = entry.split(':', maxsplit=1)
|
||||
crypted = crypted_str.encode('utf-8')
|
||||
|
||||
if crypted[:4] not in (b'$2y$', b'$2a$', b'$2b$'):
|
||||
error_msg = _('Only bcrypt digested passwords are supported for '
|
||||
'%(username)s') % {'username': username}
|
||||
raise exception.ConfigInvalid(error_msg=error_msg)
|
||||
return username, crypted
|
||||
|
||||
|
||||
def parse_token(token):
|
||||
"""Parse the token portion of the Authentication header value
|
||||
|
||||
:param: token: Token value from basic authorization header
|
||||
:returns: tuple of username, password
|
||||
:raises: Unauthorized, if username and password could not be parsed for any
|
||||
reason
|
||||
"""
|
||||
try:
|
||||
if isinstance(token, str):
|
||||
token = token.encode('utf-8')
|
||||
auth_pair = base64.b64decode(token, validate=True)
|
||||
(username, password) = auth_pair.split(b':', maxsplit=1)
|
||||
|
||||
return (username.decode('utf-8'), password)
|
||||
except (TypeError, binascii.Error, ValueError) as exc:
|
||||
LOG.info('Could not decode authorization token: %s', exc)
|
||||
raise exception.BadRequest(_('Could not decode authorization token'))
|
||||
|
||||
|
||||
def parse_header(env):
|
||||
"""Parse WSGI environment for Authorization header of type Basic
|
||||
|
||||
:param: env: WSGI environment to get header from
|
||||
:returns: Token portion of the header value
|
||||
:raises: Unauthorized, if header is missing or if the type is not Basic
|
||||
"""
|
||||
try:
|
||||
auth_header = env.pop('HTTP_AUTHORIZATION')
|
||||
except KeyError:
|
||||
LOG.info('No authorization token received')
|
||||
unauthorized(_('Authorization required'))
|
||||
try:
|
||||
auth_type, token = auth_header.strip().split(maxsplit=1)
|
||||
except (ValueError, AttributeError) as exc:
|
||||
LOG.info('Could not parse Authorization header: %s', exc)
|
||||
raise exception.BadRequest(_('Could not parse Authorization header'))
|
||||
|
||||
if auth_type.lower() != 'basic':
|
||||
msg = _('Unsupported authorization type "%s"') % auth_type
|
||||
LOG.info(msg)
|
||||
raise exception.BadRequest(msg)
|
||||
return token
|
||||
|
||||
|
||||
def unauthorized(message=None):
|
||||
"""Raise an Unauthorized exception to prompt for basic authentication
|
||||
|
||||
:param: message: Optional message for esception
|
||||
:raises: Unauthorized with WWW-Authenticate header set
|
||||
"""
|
||||
if not message:
|
||||
message = _('Incorrect username or password')
|
||||
raise exception.Unauthorized(message)
|
322
ironic_inspector/common/device_hints.py
Normal file
322
ironic_inspector/common/device_hints.py
Normal file
@ -0,0 +1,322 @@
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
import logging
|
||||
import re
|
||||
from urllib import parse as urlparse
|
||||
|
||||
from oslo_utils import specs_matcher
|
||||
from oslo_utils import strutils
|
||||
from oslo_utils import units
|
||||
|
||||
from ironic_inspector.common.i18n import _
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# A dictionary in the form {hint name: hint type}
|
||||
VALID_ROOT_DEVICE_HINTS = {
|
||||
'size': int, 'model': str, 'wwn': str, 'serial': str, 'vendor': str,
|
||||
'wwn_with_extension': str, 'wwn_vendor_extension': str, 'name': str,
|
||||
'rotational': bool, 'hctl': str, 'by_path': str,
|
||||
}
|
||||
|
||||
|
||||
ROOT_DEVICE_HINTS_GRAMMAR = specs_matcher.make_grammar()
|
||||
|
||||
|
||||
def _extract_hint_operator_and_values(hint_expression, hint_name):
|
||||
"""Extract the operator and value(s) of a root device hint expression.
|
||||
|
||||
A root device hint expression could contain one or more values
|
||||
depending on the operator. This method extracts the operator and
|
||||
value(s) and returns a dictionary containing both.
|
||||
|
||||
:param hint_expression: The hint expression string containing value(s)
|
||||
and operator (optionally).
|
||||
:param hint_name: The name of the hint. Used for logging.
|
||||
:raises: ValueError if the hint_expression is empty.
|
||||
:returns: A dictionary containing:
|
||||
|
||||
:op: The operator. An empty string in case of None.
|
||||
:values: A list of values stripped and converted to lowercase.
|
||||
"""
|
||||
expression = str(hint_expression).strip().lower()
|
||||
if not expression:
|
||||
raise ValueError(
|
||||
_('Root device hint "%s" expression is empty') % hint_name)
|
||||
|
||||
# parseString() returns a list of tokens which the operator (if
|
||||
# present) is always the first element.
|
||||
ast = ROOT_DEVICE_HINTS_GRAMMAR.parseString(expression)
|
||||
if len(ast) <= 1:
|
||||
# hint_expression had no operator
|
||||
return {'op': '', 'values': [expression]}
|
||||
|
||||
op = ast[0]
|
||||
return {'values': [v.strip() for v in re.split(op, expression) if v],
|
||||
'op': op}
|
||||
|
||||
|
||||
def _normalize_hint_expression(hint_expression, hint_name):
|
||||
"""Normalize a string type hint expression.
|
||||
|
||||
A string-type hint expression contains one or more operators and
|
||||
one or more values: [<op>] <value> [<op> <value>]*. This normalizes
|
||||
the values by url-encoding white spaces and special characters. The
|
||||
operators are not normalized. For example: the hint value of "<or>
|
||||
foo bar <or> bar" will become "<or> foo%20bar <or> bar".
|
||||
|
||||
:param hint_expression: The hint expression string containing value(s)
|
||||
and operator (optionally).
|
||||
:param hint_name: The name of the hint. Used for logging.
|
||||
:raises: ValueError if the hint_expression is empty.
|
||||
:returns: A normalized string.
|
||||
"""
|
||||
hdict = _extract_hint_operator_and_values(hint_expression, hint_name)
|
||||
result = hdict['op'].join([' %s ' % urlparse.quote(t)
|
||||
for t in hdict['values']])
|
||||
return (hdict['op'] + result).strip()
|
||||
|
||||
|
||||
def _append_operator_to_hints(root_device):
|
||||
"""Add an equal (s== or ==) operator to the hints.
|
||||
|
||||
For backwards compatibility, for root device hints where no operator
|
||||
means equal, this method adds the equal operator to the hint. This is
|
||||
needed when using oslo.utils.specs_matcher methods.
|
||||
|
||||
:param root_device: The root device hints dictionary.
|
||||
"""
|
||||
for name, expression in root_device.items():
|
||||
# NOTE(lucasagomes): The specs_matcher from oslo.utils does not
|
||||
# support boolean, so we don't need to append any operator
|
||||
# for it.
|
||||
if VALID_ROOT_DEVICE_HINTS[name] is bool:
|
||||
continue
|
||||
|
||||
expression = str(expression)
|
||||
ast = ROOT_DEVICE_HINTS_GRAMMAR.parseString(expression)
|
||||
if len(ast) > 1:
|
||||
continue
|
||||
|
||||
op = 's== %s' if VALID_ROOT_DEVICE_HINTS[name] is str else '== %s'
|
||||
root_device[name] = op % expression
|
||||
|
||||
return root_device
|
||||
|
||||
|
||||
def parse_root_device_hints(root_device):
|
||||
"""Parse the root_device property of a node.
|
||||
|
||||
Parses and validates the root_device property of a node. These are
|
||||
hints for how a node's root device is created. The 'size' hint
|
||||
should be a positive integer. The 'rotational' hint should be a
|
||||
Boolean value.
|
||||
|
||||
:param root_device: the root_device dictionary from the node's property.
|
||||
:returns: a dictionary with the root device hints parsed or
|
||||
None if there are no hints.
|
||||
:raises: ValueError, if some information is invalid.
|
||||
|
||||
"""
|
||||
if not root_device:
|
||||
return
|
||||
|
||||
root_device = copy.deepcopy(root_device)
|
||||
|
||||
invalid_hints = set(root_device) - set(VALID_ROOT_DEVICE_HINTS)
|
||||
if invalid_hints:
|
||||
raise ValueError(
|
||||
_('The hints "%(invalid_hints)s" are invalid. '
|
||||
'Valid hints are: "%(valid_hints)s"') %
|
||||
{'invalid_hints': ', '.join(invalid_hints),
|
||||
'valid_hints': ', '.join(VALID_ROOT_DEVICE_HINTS)})
|
||||
|
||||
for name, expression in root_device.items():
|
||||
hint_type = VALID_ROOT_DEVICE_HINTS[name]
|
||||
if hint_type is str:
|
||||
if not isinstance(expression, str):
|
||||
raise ValueError(
|
||||
_('Root device hint "%(name)s" is not a string value. '
|
||||
'Hint expression: %(expression)s') %
|
||||
{'name': name, 'expression': expression})
|
||||
root_device[name] = _normalize_hint_expression(expression, name)
|
||||
|
||||
elif hint_type is int:
|
||||
for v in _extract_hint_operator_and_values(expression,
|
||||
name)['values']:
|
||||
try:
|
||||
integer = int(v)
|
||||
except ValueError:
|
||||
raise ValueError(
|
||||
_('Root device hint "%(name)s" is not an integer '
|
||||
'value. Current value: %(expression)s') %
|
||||
{'name': name, 'expression': expression})
|
||||
|
||||
if integer <= 0:
|
||||
raise ValueError(
|
||||
_('Root device hint "%(name)s" should be a positive '
|
||||
'integer. Current value: %(expression)s') %
|
||||
{'name': name, 'expression': expression})
|
||||
|
||||
elif hint_type is bool:
|
||||
try:
|
||||
root_device[name] = strutils.bool_from_string(
|
||||
expression, strict=True)
|
||||
except ValueError:
|
||||
raise ValueError(
|
||||
_('Root device hint "%(name)s" is not a Boolean value. '
|
||||
'Current value: %(expression)s') %
|
||||
{'name': name, 'expression': expression})
|
||||
|
||||
return _append_operator_to_hints(root_device)
|
||||
|
||||
|
||||
def find_devices_by_hints(devices, root_device_hints):
|
||||
"""Find all devices that match the root device hints.
|
||||
|
||||
Try to find devices that match the root device hints. In order
|
||||
for a device to be matched it needs to satisfy all the given hints.
|
||||
|
||||
:param devices: A list of dictionaries representing the devices
|
||||
containing one or more of the following keys:
|
||||
|
||||
:name: (String) The device name, e.g /dev/sda
|
||||
:size: (Integer) Size of the device in *bytes*
|
||||
:model: (String) Device model
|
||||
:vendor: (String) Device vendor name
|
||||
:serial: (String) Device serial number
|
||||
:wwn: (String) Unique storage identifier
|
||||
:wwn_with_extension: (String): Unique storage identifier with
|
||||
the vendor extension appended
|
||||
:wwn_vendor_extension: (String): United vendor storage identifier
|
||||
:rotational: (Boolean) Whether it's a rotational device or
|
||||
not. Useful to distinguish HDDs (rotational) and SSDs
|
||||
(not rotational).
|
||||
:hctl: (String): The SCSI address: Host, channel, target and lun.
|
||||
For example: '1:0:0:0'.
|
||||
:by_path: (String): The alternative device name,
|
||||
e.g. /dev/disk/by-path/pci-0000:00
|
||||
|
||||
:param root_device_hints: A dictionary with the root device hints.
|
||||
:raises: ValueError, if some information is invalid.
|
||||
:returns: A generator with all matching devices as dictionaries.
|
||||
"""
|
||||
LOG.debug('Trying to find devices from "%(devs)s" that match the '
|
||||
'device hints "%(hints)s"',
|
||||
{'devs': ', '.join([d.get('name') for d in devices]),
|
||||
'hints': root_device_hints})
|
||||
parsed_hints = parse_root_device_hints(root_device_hints)
|
||||
for dev in devices:
|
||||
device_name = dev.get('name')
|
||||
|
||||
for hint in parsed_hints:
|
||||
hint_type = VALID_ROOT_DEVICE_HINTS[hint]
|
||||
device_value = dev.get(hint)
|
||||
hint_value = parsed_hints[hint]
|
||||
|
||||
if hint_type is str:
|
||||
try:
|
||||
device_value = _normalize_hint_expression(device_value,
|
||||
hint)
|
||||
except ValueError:
|
||||
LOG.warning(
|
||||
'The attribute "%(attr)s" of the device "%(dev)s" '
|
||||
'has an empty value. Skipping device.',
|
||||
{'attr': hint, 'dev': device_name})
|
||||
break
|
||||
|
||||
if hint == 'size':
|
||||
# Since we don't support units yet we expect the size
|
||||
# in GiB for now
|
||||
device_value = device_value / units.Gi
|
||||
|
||||
LOG.debug('Trying to match the device hint "%(hint)s" '
|
||||
'with a value of "%(hint_value)s" against the same '
|
||||
'device\'s (%(dev)s) attribute with a value of '
|
||||
'"%(dev_value)s"', {'hint': hint, 'dev': device_name,
|
||||
'hint_value': hint_value,
|
||||
'dev_value': device_value})
|
||||
|
||||
# NOTE(lucasagomes): Boolean hints are not supported by
|
||||
# specs_matcher.match(), so we need to do the comparison
|
||||
# ourselves
|
||||
if hint_type is bool:
|
||||
try:
|
||||
device_value = strutils.bool_from_string(device_value,
|
||||
strict=True)
|
||||
except ValueError:
|
||||
LOG.warning('The attribute "%(attr)s" (with value '
|
||||
'"%(value)s") of device "%(dev)s" is not '
|
||||
'a valid Boolean. Skipping device.',
|
||||
{'attr': hint, 'value': device_value,
|
||||
'dev': device_name})
|
||||
break
|
||||
if device_value == hint_value:
|
||||
continue
|
||||
|
||||
elif specs_matcher.match(device_value, hint_value):
|
||||
continue
|
||||
|
||||
LOG.debug('The attribute "%(attr)s" (with value "%(value)s") '
|
||||
'of device "%(dev)s" does not match the hint %(hint)s',
|
||||
{'attr': hint, 'value': device_value,
|
||||
'dev': device_name, 'hint': hint_value})
|
||||
break
|
||||
else:
|
||||
yield dev
|
||||
|
||||
|
||||
def match_root_device_hints(devices, root_device_hints):
|
||||
"""Try to find a device that matches the root device hints.
|
||||
|
||||
Try to find a device that matches the root device hints. In order
|
||||
for a device to be matched it needs to satisfy all the given hints.
|
||||
|
||||
:param devices: A list of dictionaries representing the devices
|
||||
containing one or more of the following keys:
|
||||
|
||||
:name: (String) The device name, e.g /dev/sda
|
||||
:size: (Integer) Size of the device in *bytes*
|
||||
:model: (String) Device model
|
||||
:vendor: (String) Device vendor name
|
||||
:serial: (String) Device serial number
|
||||
:wwn: (String) Unique storage identifier
|
||||
:wwn_with_extension: (String): Unique storage identifier with
|
||||
the vendor extension appended
|
||||
:wwn_vendor_extension: (String): United vendor storage identifier
|
||||
:rotational: (Boolean) Whether it's a rotational device or
|
||||
not. Useful to distinguish HDDs (rotational) and SSDs
|
||||
(not rotational).
|
||||
:hctl: (String): The SCSI address: Host, channel, target and lun.
|
||||
For example: '1:0:0:0'.
|
||||
:by_path: (String): The alternative device name,
|
||||
e.g. /dev/disk/by-path/pci-0000:00
|
||||
|
||||
:param root_device_hints: A dictionary with the root device hints.
|
||||
:raises: ValueError, if some information is invalid.
|
||||
:returns: The first device to match all the hints or None.
|
||||
"""
|
||||
try:
|
||||
dev = next(find_devices_by_hints(devices, root_device_hints))
|
||||
except StopIteration:
|
||||
LOG.warning('No device found that matches the root device hints %s',
|
||||
root_device_hints)
|
||||
else:
|
||||
LOG.info('Root device found! The device "%s" matches the root '
|
||||
'device hints %s', dev, root_device_hints)
|
||||
return dev
|
155
ironic_inspector/common/exception.py
Normal file
155
ironic_inspector/common/exception.py
Normal file
@ -0,0 +1,155 @@
|
||||
# Copyright 2010 United States Government as represented by the
|
||||
# Administrator of the National Aeronautics and Space Administration.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Ironic base exception handling.
|
||||
|
||||
Includes decorator for re-raising Ironic-type exceptions.
|
||||
|
||||
SHOULD include dedicated exception logging.
|
||||
|
||||
"""
|
||||
|
||||
import collections
|
||||
from http import client as http_client
|
||||
import json
|
||||
import logging
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_utils import excutils
|
||||
|
||||
from ironic_inspector.common.i18n import _
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
def _ensure_exception_kwargs_serializable(exc_class_name, kwargs):
|
||||
"""Ensure that kwargs are serializable
|
||||
|
||||
Ensure that all kwargs passed to exception constructor can be passed over
|
||||
RPC, by trying to convert them to JSON, or, as a last resort, to string.
|
||||
If it is not possible, unserializable kwargs will be removed, letting the
|
||||
receiver to handle the exception string as it is configured to.
|
||||
|
||||
:param exc_class_name: a IronicException class name.
|
||||
:param kwargs: a dictionary of keyword arguments passed to the exception
|
||||
constructor.
|
||||
:returns: a dictionary of serializable keyword arguments.
|
||||
"""
|
||||
serializers = [(json.dumps, _('when converting to JSON')),
|
||||
(str, _('when converting to string'))]
|
||||
exceptions = collections.defaultdict(list)
|
||||
serializable_kwargs = {}
|
||||
for k, v in kwargs.items():
|
||||
for serializer, msg in serializers:
|
||||
try:
|
||||
serializable_kwargs[k] = serializer(v)
|
||||
exceptions.pop(k, None)
|
||||
break
|
||||
except Exception as e:
|
||||
exceptions[k].append(
|
||||
'(%(serializer_type)s) %(e_type)s: %(e_contents)s' %
|
||||
{'serializer_type': msg, 'e_contents': e,
|
||||
'e_type': e.__class__.__name__})
|
||||
if exceptions:
|
||||
LOG.error("One or more arguments passed to the %(exc_class)s "
|
||||
"constructor as kwargs can not be serialized. The "
|
||||
"serialized arguments: %(serialized)s. These "
|
||||
"unserialized kwargs were dropped because of the "
|
||||
"exceptions encountered during their "
|
||||
"serialization:\n%(errors)s",
|
||||
dict(errors=';\n'.join("%s: %s" % (k, '; '.join(v))
|
||||
for k, v in exceptions.items()),
|
||||
exc_class=exc_class_name,
|
||||
serialized=serializable_kwargs))
|
||||
# We might be able to actually put the following keys' values into
|
||||
# format string, but there is no guarantee, drop it just in case.
|
||||
for k in exceptions:
|
||||
del kwargs[k]
|
||||
return serializable_kwargs
|
||||
|
||||
|
||||
class IronicException(Exception):
|
||||
"""Base Ironic Exception
|
||||
|
||||
To correctly use this class, inherit from it and define
|
||||
a '_msg_fmt' property. That _msg_fmt will get printf'd
|
||||
with the keyword arguments provided to the constructor.
|
||||
|
||||
If you need to access the message from an exception you should use
|
||||
str(exc)
|
||||
|
||||
"""
|
||||
|
||||
_msg_fmt = _("An unknown exception occurred.")
|
||||
code = 500
|
||||
headers = {}
|
||||
safe = False
|
||||
|
||||
def __init__(self, message=None, **kwargs):
|
||||
self.kwargs = _ensure_exception_kwargs_serializable(
|
||||
self.__class__.__name__, kwargs)
|
||||
|
||||
if 'code' not in self.kwargs:
|
||||
try:
|
||||
self.kwargs['code'] = self.code
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
self.code = int(kwargs['code'])
|
||||
|
||||
if not message:
|
||||
try:
|
||||
message = self._msg_fmt % kwargs
|
||||
|
||||
except Exception:
|
||||
with excutils.save_and_reraise_exception() as ctxt:
|
||||
# kwargs doesn't match a variable in the message
|
||||
# log the issue and the kwargs
|
||||
prs = ', '.join('%s=%s' % pair for pair in kwargs.items())
|
||||
LOG.exception('Exception in string format operation '
|
||||
'(arguments %s)', prs)
|
||||
if not CONF.exception.fatal_exception_format_errors:
|
||||
# at least get the core message out if something
|
||||
# happened
|
||||
message = self._msg_fmt
|
||||
ctxt.reraise = False
|
||||
|
||||
super(IronicException, self).__init__(message)
|
||||
|
||||
|
||||
class ServiceLookupFailure(IronicException):
|
||||
_msg_fmt = _("Cannot find %(service)s service through multicast")
|
||||
|
||||
|
||||
class ServiceRegistrationFailure(IronicException):
|
||||
_msg_fmt = _("Cannot register %(service)s service: %(error)s")
|
||||
|
||||
|
||||
class BadRequest(IronicException):
|
||||
code = http_client.BAD_REQUEST
|
||||
|
||||
|
||||
class Unauthorized(IronicException):
|
||||
code = http_client.UNAUTHORIZED
|
||||
headers = {'WWW-Authenticate': 'Basic realm="Baremetal API"'}
|
||||
|
||||
|
||||
class ConfigInvalid(IronicException):
|
||||
_msg_fmt = _("Invalid configuration file. %(error_msg)s")
|
290
ironic_inspector/common/mdns.py
Normal file
290
ironic_inspector/common/mdns.py
Normal file
@ -0,0 +1,290 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Multicast DNS implementation for API discovery.
|
||||
|
||||
This implementation follows RFC 6763 as clarified by the API SIG guideline
|
||||
https://review.opendev.org/651222.
|
||||
"""
|
||||
|
||||
import collections
|
||||
import ipaddress
|
||||
import logging
|
||||
import socket
|
||||
import time
|
||||
from urllib import parse as urlparse
|
||||
|
||||
from oslo_config import cfg
|
||||
import zeroconf
|
||||
|
||||
from ironic_inspector.common import exception
|
||||
from ironic_inspector.common.i18n import _
|
||||
from ironic_inspector import utils
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
_MDNS_DOMAIN = '_openstack._tcp.local.'
|
||||
_endpoint = collections.namedtuple('Endpoint',
|
||||
['addresses', 'hostname', 'port', 'params'])
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
class Zeroconf(object):
|
||||
"""Multicast DNS implementation client and server.
|
||||
|
||||
Uses threading internally, so there is no start method. It starts
|
||||
automatically on creation.
|
||||
|
||||
.. warning::
|
||||
The underlying library does not yet support IPv6.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize and start the mDNS server."""
|
||||
interfaces = (CONF.mdns.interfaces if CONF.mdns.interfaces
|
||||
else zeroconf.InterfaceChoice.All)
|
||||
# If interfaces are set, let zeroconf auto-detect the version
|
||||
ip_version = None if CONF.mdns.interfaces else zeroconf.IPVersion.All
|
||||
self._zc = zeroconf.Zeroconf(interfaces=interfaces,
|
||||
ip_version=ip_version)
|
||||
self._registered = []
|
||||
|
||||
def register_service(self, service_type, endpoint, params=None):
|
||||
"""Register a service.
|
||||
|
||||
This call announces the new services via multicast and instructs the
|
||||
built-in server to respond to queries about it.
|
||||
|
||||
:param service_type: OpenStack service type, e.g. "baremetal".
|
||||
:param endpoint: full endpoint to reach the service.
|
||||
:param params: optional properties as a dictionary.
|
||||
:raises: :exc:`.ServiceRegistrationFailure` if the service cannot be
|
||||
registered, e.g. because of conflicts.
|
||||
"""
|
||||
parsed = _parse_endpoint(endpoint, service_type)
|
||||
|
||||
all_params = CONF.mdns.params.copy()
|
||||
if params:
|
||||
all_params.update(params)
|
||||
all_params.update(parsed.params)
|
||||
|
||||
properties = {
|
||||
(key.encode('utf-8') if isinstance(key, str) else key):
|
||||
(value.encode('utf-8') if isinstance(value, str) else value)
|
||||
for key, value in all_params.items()
|
||||
}
|
||||
|
||||
# TODO(dtantsur): allow overriding TTL values via configuration
|
||||
info = zeroconf.ServiceInfo(_MDNS_DOMAIN,
|
||||
'%s.%s' % (service_type, _MDNS_DOMAIN),
|
||||
addresses=parsed.addresses,
|
||||
port=parsed.port,
|
||||
properties=properties,
|
||||
server=parsed.hostname)
|
||||
|
||||
LOG.debug('Registering %s via mDNS', info)
|
||||
# Work around a potential race condition in the registration code:
|
||||
# https://github.com/jstasiak/python-zeroconf/issues/163
|
||||
delay = 0.1
|
||||
try:
|
||||
for attempt in range(CONF.mdns.registration_attempts):
|
||||
try:
|
||||
self._zc.register_service(info)
|
||||
except zeroconf.NonUniqueNameException:
|
||||
LOG.debug('Could not register %s - conflict', info)
|
||||
if attempt == CONF.mdns.registration_attempts - 1:
|
||||
raise
|
||||
# reset the cache to purge learned records and retry
|
||||
self._zc.cache = zeroconf.DNSCache()
|
||||
time.sleep(delay)
|
||||
delay *= 2
|
||||
else:
|
||||
break
|
||||
except zeroconf.Error as exc:
|
||||
raise exception.ServiceRegistrationFailure(
|
||||
service=service_type, error=exc)
|
||||
|
||||
self._registered.append(info)
|
||||
|
||||
def get_endpoint(self, service_type, skip_loopback=True, # noqa: C901
|
||||
skip_link_local=False):
|
||||
"""Get an endpoint and its properties from mDNS.
|
||||
|
||||
If the requested endpoint is already in the built-in server cache, and
|
||||
its TTL is not exceeded, the cached value is returned.
|
||||
|
||||
:param service_type: OpenStack service type.
|
||||
:param skip_loopback: Whether to ignore loopback addresses.
|
||||
:param skip_link_local: Whether to ignore link local V6 addresses.
|
||||
:returns: tuple (endpoint URL, properties as a dict).
|
||||
:raises: :exc:`.ServiceLookupFailure` if the service cannot be found.
|
||||
"""
|
||||
delay = 0.1
|
||||
for attempt in range(CONF.mdns.lookup_attempts):
|
||||
name = '%s.%s' % (service_type, _MDNS_DOMAIN)
|
||||
info = self._zc.get_service_info(name, name)
|
||||
if info is not None:
|
||||
break
|
||||
elif attempt == CONF.mdns.lookup_attempts - 1:
|
||||
raise exception.ServiceLookupFailure(service=service_type)
|
||||
else:
|
||||
time.sleep(delay)
|
||||
delay *= 2
|
||||
|
||||
all_addr = info.parsed_addresses()
|
||||
|
||||
# Try to find the first routable address
|
||||
fallback = None
|
||||
for addr in all_addr:
|
||||
try:
|
||||
loopback = ipaddress.ip_address(addr).is_loopback
|
||||
except ValueError:
|
||||
LOG.debug('Skipping invalid IP address %s', addr)
|
||||
continue
|
||||
else:
|
||||
if loopback and skip_loopback:
|
||||
LOG.debug('Skipping loopback IP address %s', addr)
|
||||
continue
|
||||
|
||||
if utils.get_route_source(addr, skip_link_local):
|
||||
address = addr
|
||||
break
|
||||
elif fallback is None:
|
||||
fallback = addr
|
||||
else:
|
||||
if fallback is None:
|
||||
raise exception.ServiceLookupFailure(
|
||||
_('None of addresses %(addr)s for service %(service)s '
|
||||
'are valid')
|
||||
% {'addr': all_addr, 'service': service_type})
|
||||
else:
|
||||
LOG.warning('None of addresses %s seem routable, using %s',
|
||||
all_addr, fallback)
|
||||
address = fallback
|
||||
|
||||
properties = {}
|
||||
for key, value in info.properties.items():
|
||||
try:
|
||||
if isinstance(key, bytes):
|
||||
key = key.decode('utf-8')
|
||||
except UnicodeError as exc:
|
||||
raise exception.ServiceLookupFailure(
|
||||
_('Invalid properties for service %(svc)s. Cannot decode '
|
||||
'key %(key)r: %(exc)r') %
|
||||
{'svc': service_type, 'key': key, 'exc': exc})
|
||||
|
||||
try:
|
||||
if isinstance(value, bytes):
|
||||
value = value.decode('utf-8')
|
||||
except UnicodeError as exc:
|
||||
LOG.debug('Cannot convert value %(value)r for key %(key)s '
|
||||
'to string, assuming binary: %(exc)s',
|
||||
{'key': key, 'value': value, 'exc': exc})
|
||||
|
||||
properties[key] = value
|
||||
|
||||
path = properties.pop('path', '')
|
||||
protocol = properties.pop('protocol', None)
|
||||
if not protocol:
|
||||
if info.port == 80:
|
||||
protocol = 'http'
|
||||
else:
|
||||
protocol = 'https'
|
||||
|
||||
if info.server.endswith('.local.'):
|
||||
# Local hostname means that the catalog lists an IP address,
|
||||
# so use it
|
||||
host = address
|
||||
if int(ipaddress.ip_address(host).version) == 6:
|
||||
host = '[%s]' % host
|
||||
else:
|
||||
# Otherwise use the provided hostname.
|
||||
host = info.server.rstrip('.')
|
||||
|
||||
return ('{proto}://{host}:{port}{path}'.format(proto=protocol,
|
||||
host=host,
|
||||
port=info.port,
|
||||
path=path),
|
||||
properties)
|
||||
|
||||
def close(self):
|
||||
"""Shut down mDNS and unregister services.
|
||||
|
||||
.. note::
|
||||
If another server is running for the same services, it will
|
||||
re-register them immediately.
|
||||
"""
|
||||
for info in self._registered:
|
||||
try:
|
||||
self._zc.unregister_service(info)
|
||||
except Exception:
|
||||
LOG.exception('Could not unregister mDNS service %s', info)
|
||||
self._zc.close()
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, *args):
|
||||
self.close()
|
||||
|
||||
|
||||
def _parse_endpoint(endpoint, service_type=None):
|
||||
params = {}
|
||||
url = urlparse.urlparse(endpoint)
|
||||
port = url.port
|
||||
|
||||
if port is None:
|
||||
if url.scheme == 'https':
|
||||
port = 443
|
||||
else:
|
||||
port = 80
|
||||
|
||||
addresses = []
|
||||
hostname = url.hostname
|
||||
try:
|
||||
infos = socket.getaddrinfo(hostname, port, 0, socket.IPPROTO_TCP)
|
||||
except socket.error as exc:
|
||||
raise exception.ServiceRegistrationFailure(
|
||||
service=service_type,
|
||||
error=_('Could not resolve hostname %(host)s: %(exc)s') %
|
||||
{'host': hostname, 'exc': exc})
|
||||
|
||||
for info in infos:
|
||||
ip = info[4][0]
|
||||
if ip == hostname:
|
||||
# we need a host name for the service record. if what we have in
|
||||
# the catalog is an IP address, use the local hostname instead
|
||||
hostname = None
|
||||
# zeroconf requires addresses in network format
|
||||
ip = socket.inet_pton(info[0], ip)
|
||||
if ip not in addresses:
|
||||
addresses.append(ip)
|
||||
if not addresses:
|
||||
raise exception.ServiceRegistrationFailure(
|
||||
service=service_type,
|
||||
error=_('No suitable addresses found for %s') % url.hostname)
|
||||
|
||||
# avoid storing information that can be derived from existing data
|
||||
if url.path not in ('', '/'):
|
||||
params['path'] = url.path
|
||||
|
||||
if (not (port == 80 and url.scheme == 'http')
|
||||
and not (port == 443 and url.scheme == 'https')):
|
||||
params['protocol'] = url.scheme
|
||||
|
||||
# zeroconf is pretty picky about having the trailing dot
|
||||
if hostname is not None and not hostname.endswith('.'):
|
||||
hostname += '.'
|
||||
|
||||
return _endpoint(addresses, hostname, port, params)
|
@ -16,7 +16,6 @@ import traceback as traceback_mod
|
||||
|
||||
from eventlet import semaphore
|
||||
from futurist import periodics
|
||||
from ironic_lib import mdns
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log
|
||||
import oslo_messaging as messaging
|
||||
@ -28,6 +27,7 @@ from ironic_inspector.common import coordination
|
||||
from ironic_inspector.common.i18n import _
|
||||
from ironic_inspector.common import ironic as ir_utils
|
||||
from ironic_inspector.common import keystone
|
||||
from ironic_inspector.common import mdns
|
||||
from ironic_inspector.db import api as dbapi
|
||||
from ironic_inspector import introspect
|
||||
from ironic_inspector import node_cache
|
||||
|
@ -18,10 +18,12 @@ from ironic_inspector.conf import coordination
|
||||
from ironic_inspector.conf import default
|
||||
from ironic_inspector.conf import discovery
|
||||
from ironic_inspector.conf import dnsmasq_pxe_filter
|
||||
from ironic_inspector.conf import exception
|
||||
from ironic_inspector.conf import extra_hardware
|
||||
from ironic_inspector.conf import healthcheck
|
||||
from ironic_inspector.conf import iptables
|
||||
from ironic_inspector.conf import ironic
|
||||
from ironic_inspector.conf import mdns
|
||||
from ironic_inspector.conf import pci_devices
|
||||
from ironic_inspector.conf import port_physnet
|
||||
from ironic_inspector.conf import processing
|
||||
@ -39,10 +41,12 @@ coordination.register_opts(CONF)
|
||||
discovery.register_opts(CONF)
|
||||
default.register_opts(CONF)
|
||||
dnsmasq_pxe_filter.register_opts(CONF)
|
||||
exception.register_opts(CONF)
|
||||
extra_hardware.register_opts(CONF)
|
||||
healthcheck.register_opts(CONF)
|
||||
iptables.register_opts(CONF)
|
||||
ironic.register_opts(CONF)
|
||||
mdns.register_opts(CONF)
|
||||
pci_devices.register_opts(CONF)
|
||||
port_physnet.register_opts(CONF)
|
||||
processing.register_opts(CONF)
|
||||
|
43
ironic_inspector/conf/exception.py
Normal file
43
ironic_inspector/conf/exception.py
Normal file
@ -0,0 +1,43 @@
|
||||
# Copyright 2010 United States Government as represented by the
|
||||
# Administrator of the National Aeronautics and Space Administration.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Ironic base exception handling.
|
||||
|
||||
Includes decorator for re-raising Ironic-type exceptions.
|
||||
|
||||
SHOULD include dedicated exception logging.
|
||||
|
||||
"""
|
||||
from oslo_config import cfg
|
||||
|
||||
from ironic_inspector.common.i18n import _
|
||||
|
||||
|
||||
opts = [
|
||||
cfg.BoolOpt('fatal_exception_format_errors',
|
||||
default=False,
|
||||
help=_('Used if there is a formatting error when generating '
|
||||
'an exception message (a programming error). If True, '
|
||||
'raise an exception; if False, use the unformatted '
|
||||
'message.'),
|
||||
deprecated_group='ironic_lib'),
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
def register_opts(conf):
|
||||
conf.register_opts(opts, group='exception')
|
44
ironic_inspector/conf/mdns.py
Normal file
44
ironic_inspector/conf/mdns.py
Normal file
@ -0,0 +1,44 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_config import types as cfg_types
|
||||
|
||||
opts = [
|
||||
cfg.IntOpt('registration_attempts',
|
||||
min=1, default=5,
|
||||
help='Number of attempts to register a service. Currently '
|
||||
'has to be larger than 1 because of race conditions '
|
||||
'in the zeroconf library.'),
|
||||
cfg.IntOpt('lookup_attempts',
|
||||
min=1, default=3,
|
||||
help='Number of attempts to lookup a service.'),
|
||||
cfg.Opt('params',
|
||||
# This is required for values that contain commas.
|
||||
type=cfg_types.Dict(cfg_types.String(quotes=True)),
|
||||
default={},
|
||||
help='Additional parameters to pass for the registered '
|
||||
'service.'),
|
||||
cfg.ListOpt('interfaces',
|
||||
help='List of IP addresses of interfaces to use for mDNS. '
|
||||
'Defaults to all interfaces on the system.'),
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
opt_group = cfg.OptGroup(name='mdns', title='Options for multicast DNS')
|
||||
|
||||
|
||||
def register_opts(conf):
|
||||
conf.register_group(opt_group)
|
||||
conf.register_opts(opts, group=opt_group)
|
@ -69,10 +69,12 @@ def list_opts():
|
||||
('discovery', ironic_inspector.conf.discovery.list_opts()),
|
||||
('dnsmasq_pxe_filter',
|
||||
ironic_inspector.conf.dnsmasq_pxe_filter.list_opts()),
|
||||
('exception', ironic_inspector.conf.exception.opts),
|
||||
('extra_hardware', ironic_inspector.conf.extra_hardware.list_opts()),
|
||||
('healthcheck', ironic_inspector.conf.healthcheck.list_opts()),
|
||||
('ironic', ironic_inspector.conf.ironic.list_opts()),
|
||||
('iptables', ironic_inspector.conf.iptables.list_opts()),
|
||||
('mdns', ironic_inspector.conf.mdns.opts),
|
||||
('port_physnet', ironic_inspector.conf.port_physnet.list_opts()),
|
||||
('processing', ironic_inspector.conf.processing.list_opts()),
|
||||
('pci_devices', ironic_inspector.conf.pci_devices.list_opts()),
|
||||
|
@ -13,13 +13,13 @@
|
||||
|
||||
"""Standard set of plugins."""
|
||||
|
||||
from ironic_lib import utils as il_utils
|
||||
import netaddr
|
||||
from oslo_config import cfg
|
||||
from oslo_utils import netutils
|
||||
from oslo_utils import units
|
||||
|
||||
|
||||
from ironic_inspector.common import device_hints
|
||||
from ironic_inspector.common.i18n import _
|
||||
from ironic_inspector.common import ironic as ir_utils
|
||||
from ironic_inspector.plugins import base
|
||||
@ -43,7 +43,8 @@ class RootDiskSelectionHook(base.ProcessingHook):
|
||||
return
|
||||
skip_list = set()
|
||||
for hint in skip_list_hints:
|
||||
found_devs = il_utils.find_devices_by_hints(block_devices, hint)
|
||||
found_devs = device_hints.find_devices_by_hints(
|
||||
block_devices, hint)
|
||||
excluded_devs = {dev['name'] for dev in found_devs}
|
||||
skipped_devices = excluded_devs.difference(skip_list)
|
||||
skip_list = skip_list.union(excluded_devs)
|
||||
@ -67,8 +68,8 @@ class RootDiskSelectionHook(base.ProcessingHook):
|
||||
inventory['disks'] = [d for d in inventory['disks']
|
||||
if d['name'] not in skip_list]
|
||||
try:
|
||||
device = il_utils.match_root_device_hints(inventory['disks'],
|
||||
hints)
|
||||
device = device_hints.match_root_device_hints(inventory['disks'],
|
||||
hints)
|
||||
except (TypeError, ValueError) as e:
|
||||
raise utils.Error(
|
||||
_('No disks could be found using the root device hints '
|
||||
|
223
ironic_inspector/test/unit/test_basic_auth.py
Normal file
223
ironic_inspector/test/unit/test_basic_auth.py
Normal file
@ -0,0 +1,223 @@
|
||||
# Copyright 2020 Red Hat, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import base64
|
||||
import json
|
||||
import os
|
||||
import tempfile
|
||||
from unittest import mock
|
||||
|
||||
from ironic_inspector.common import auth_basic
|
||||
from ironic_inspector.common import exception
|
||||
from ironic_inspector.test import base
|
||||
|
||||
|
||||
class TestAuthBasic(base.BaseTest):
|
||||
|
||||
def write_auth_file(self, data=None):
|
||||
if not data:
|
||||
data = '\n'
|
||||
with tempfile.NamedTemporaryFile(mode='w', delete=False) as f:
|
||||
f.write(data)
|
||||
self.addCleanup(os.remove, f.name)
|
||||
return f.name
|
||||
|
||||
def test_middleware_authenticate(self):
|
||||
auth_file = self.write_auth_file(
|
||||
'myName:$2y$05$lE3eGtyj41jZwrzS87KTqe6.'
|
||||
'JETVCWBkc32C63UP2aYrGoYOEpbJm\n\n\n')
|
||||
app = mock.Mock()
|
||||
start_response = mock.Mock()
|
||||
middleware = auth_basic.BasicAuthMiddleware(app, auth_file)
|
||||
env = {
|
||||
'HTTP_AUTHORIZATION': 'Basic bXlOYW1lOm15UGFzc3dvcmQ='
|
||||
}
|
||||
|
||||
result = middleware(env, start_response)
|
||||
self.assertEqual(app.return_value, result)
|
||||
start_response.assert_not_called()
|
||||
|
||||
def test_middleware_unauthenticated(self):
|
||||
auth_file = self.write_auth_file(
|
||||
'myName:$2y$05$lE3eGtyj41jZwrzS87KTqe6.'
|
||||
'JETVCWBkc32C63UP2aYrGoYOEpbJm\n\n\n')
|
||||
app = mock.Mock()
|
||||
start_response = mock.Mock()
|
||||
middleware = auth_basic.BasicAuthMiddleware(app, auth_file)
|
||||
env = {'REQUEST_METHOD': 'GET'}
|
||||
|
||||
body = middleware(env, start_response)
|
||||
decoded = json.loads(body[0].decode())
|
||||
self.assertEqual({'error': {'message': 'Authorization required',
|
||||
'code': 401}}, decoded)
|
||||
|
||||
start_response.assert_called_once_with(
|
||||
'401 Unauthorized',
|
||||
[('WWW-Authenticate', 'Basic realm="Baremetal API"'),
|
||||
('Content-Type', 'application/json'),
|
||||
('Content-Length', str(len(body[0])))]
|
||||
)
|
||||
app.assert_not_called()
|
||||
|
||||
def test_authenticate(self):
|
||||
auth_file = self.write_auth_file(
|
||||
'foo:bar\nmyName:$2y$05$lE3eGtyj41jZwrzS87KTqe6.'
|
||||
'JETVCWBkc32C63UP2aYrGoYOEpbJm\n\n\n')
|
||||
|
||||
# test basic auth
|
||||
self.assertEqual(
|
||||
{'HTTP_X_USER': 'myName', 'HTTP_X_USER_NAME': 'myName'},
|
||||
auth_basic.authenticate(
|
||||
auth_file, 'myName', b'myPassword')
|
||||
)
|
||||
|
||||
# test failed auth
|
||||
e = self.assertRaises(exception.ConfigInvalid,
|
||||
auth_basic.authenticate,
|
||||
auth_file, 'foo', b'bar')
|
||||
self.assertEqual('Invalid configuration file. Only bcrypt digested '
|
||||
'passwords are supported for foo', str(e))
|
||||
|
||||
# test problem reading user data file
|
||||
auth_file = auth_file + '.missing'
|
||||
e = self.assertRaises(exception.ConfigInvalid,
|
||||
auth_basic.authenticate,
|
||||
auth_file, 'myName',
|
||||
b'myPassword')
|
||||
self.assertEqual('Invalid configuration file. Problem reading '
|
||||
'auth user file', str(e))
|
||||
|
||||
def test_auth_entry(self):
|
||||
entry_pass = ('myName:$2y$05$lE3eGtyj41jZwrzS87KTqe6.'
|
||||
'JETVCWBkc32C63UP2aYrGoYOEpbJm')
|
||||
entry_pass_2a = ('myName:$2a$10$I9Fi3DM1sbxQP0560MK9'
|
||||
'tec1dUdytBtIqXfDCyTNfDUabtGvQjW1S')
|
||||
entry_pass_2b = ('myName:$2b$12$dWLBxT6aMxpVTfUNAyOu'
|
||||
'IusHXewu8m6Hrsxw4/e95WGBelFn0oOMW')
|
||||
entry_fail = 'foo:bar'
|
||||
|
||||
# success
|
||||
self.assertEqual(
|
||||
{'HTTP_X_USER': 'myName', 'HTTP_X_USER_NAME': 'myName'},
|
||||
auth_basic.auth_entry(
|
||||
entry_pass, b'myPassword')
|
||||
)
|
||||
|
||||
# success with a bcrypt implementations other than htpasswd
|
||||
self.assertEqual(
|
||||
{'HTTP_X_USER': 'myName', 'HTTP_X_USER_NAME': 'myName'},
|
||||
auth_basic.auth_entry(
|
||||
entry_pass_2a, b'myPassword')
|
||||
)
|
||||
self.assertEqual(
|
||||
{'HTTP_X_USER': 'myName', 'HTTP_X_USER_NAME': 'myName'},
|
||||
auth_basic.auth_entry(
|
||||
entry_pass_2b, b'myPassword')
|
||||
)
|
||||
|
||||
# failed, unknown digest format
|
||||
e = self.assertRaises(exception.ConfigInvalid,
|
||||
auth_basic.auth_entry, entry_fail, b'bar')
|
||||
self.assertEqual('Invalid configuration file. Only bcrypt digested '
|
||||
'passwords are supported for foo', str(e))
|
||||
|
||||
# failed, incorrect password
|
||||
e = self.assertRaises(exception.Unauthorized,
|
||||
auth_basic.auth_entry, entry_pass, b'bar')
|
||||
self.assertEqual('Incorrect username or password', str(e))
|
||||
|
||||
def test_validate_auth_file(self):
|
||||
auth_file = self.write_auth_file(
|
||||
'myName:$2y$05$lE3eGtyj41jZwrzS87KTqe6.'
|
||||
'JETVCWBkc32C63UP2aYrGoYOEpbJm\n\n\n')
|
||||
# success, valid config
|
||||
auth_basic.validate_auth_file(auth_file)
|
||||
|
||||
# failed, missing auth file
|
||||
auth_file = auth_file + '.missing'
|
||||
self.assertRaises(exception.ConfigInvalid,
|
||||
auth_basic.validate_auth_file, auth_file)
|
||||
|
||||
# failed, invalid entry
|
||||
auth_file = self.write_auth_file(
|
||||
'foo:bar\nmyName:$2y$05$lE3eGtyj41jZwrzS87KTqe6.'
|
||||
'JETVCWBkc32C63UP2aYrGoYOEpbJm\n\n\n')
|
||||
self.assertRaises(exception.ConfigInvalid,
|
||||
auth_basic.validate_auth_file, auth_file)
|
||||
|
||||
def test_parse_token(self):
|
||||
|
||||
# success with bytes
|
||||
token = base64.b64encode(b'myName:myPassword')
|
||||
self.assertEqual(
|
||||
('myName', b'myPassword'),
|
||||
auth_basic.parse_token(token)
|
||||
)
|
||||
|
||||
# success with string
|
||||
token = str(token, encoding='utf-8')
|
||||
self.assertEqual(
|
||||
('myName', b'myPassword'),
|
||||
auth_basic.parse_token(token)
|
||||
)
|
||||
|
||||
# failed, invalid base64
|
||||
e = self.assertRaises(exception.BadRequest,
|
||||
auth_basic.parse_token, token[:-1])
|
||||
self.assertEqual('Could not decode authorization token', str(e))
|
||||
|
||||
# failed, no colon in token
|
||||
token = str(base64.b64encode(b'myNamemyPassword'), encoding='utf-8')
|
||||
e = self.assertRaises(exception.BadRequest,
|
||||
auth_basic.parse_token, token[:-1])
|
||||
self.assertEqual('Could not decode authorization token', str(e))
|
||||
|
||||
def test_parse_header(self):
|
||||
auth_value = 'Basic bXlOYW1lOm15UGFzc3dvcmQ='
|
||||
|
||||
# success
|
||||
self.assertEqual(
|
||||
'bXlOYW1lOm15UGFzc3dvcmQ=',
|
||||
auth_basic.parse_header({
|
||||
'HTTP_AUTHORIZATION': auth_value
|
||||
})
|
||||
)
|
||||
|
||||
# failed, missing Authorization header
|
||||
e = self.assertRaises(exception.Unauthorized,
|
||||
auth_basic.parse_header,
|
||||
{})
|
||||
self.assertEqual('Authorization required', str(e))
|
||||
|
||||
# failed missing token
|
||||
e = self.assertRaises(exception.BadRequest,
|
||||
auth_basic.parse_header,
|
||||
{'HTTP_AUTHORIZATION': 'Basic'})
|
||||
self.assertEqual('Could not parse Authorization header', str(e))
|
||||
|
||||
# failed, type other than Basic
|
||||
digest_value = 'Digest username="myName" nonce="foobar"'
|
||||
e = self.assertRaises(exception.BadRequest,
|
||||
auth_basic.parse_header,
|
||||
{'HTTP_AUTHORIZATION': digest_value})
|
||||
self.assertEqual('Unsupported authorization type "Digest"', str(e))
|
||||
|
||||
def test_unauthorized(self):
|
||||
e = self.assertRaises(exception.Unauthorized,
|
||||
auth_basic.unauthorized, 'ouch')
|
||||
self.assertEqual('ouch', str(e))
|
||||
self.assertEqual({
|
||||
'WWW-Authenticate': 'Basic realm="Baremetal API"'
|
||||
}, e.headers)
|
320
ironic_inspector/test/unit/test_device_hints.py
Normal file
320
ironic_inspector/test/unit/test_device_hints.py
Normal file
@ -0,0 +1,320 @@
|
||||
# Copyright 2020 Red Hat, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import copy
|
||||
from unittest import mock
|
||||
|
||||
from ironic_inspector.common import device_hints as hints
|
||||
from ironic_inspector.test.base import BaseTest
|
||||
|
||||
|
||||
class ParseRootDeviceTestCase(BaseTest):
|
||||
|
||||
def test_parse_root_device_hints_without_operators(self):
|
||||
root_device = {
|
||||
'wwn': '123456', 'model': 'FOO model', 'size': 12345,
|
||||
'serial': 'foo-serial', 'vendor': 'foo VENDOR with space',
|
||||
'name': '/dev/sda', 'wwn_with_extension': '123456111',
|
||||
'wwn_vendor_extension': '111', 'rotational': True,
|
||||
'hctl': '1:0:0:0', 'by_path': '/dev/disk/by-path/1:0:0:0'}
|
||||
result = hints.parse_root_device_hints(root_device)
|
||||
expected = {
|
||||
'wwn': 's== 123456', 'model': 's== foo%20model',
|
||||
'size': '== 12345', 'serial': 's== foo-serial',
|
||||
'vendor': 's== foo%20vendor%20with%20space',
|
||||
'name': 's== /dev/sda', 'wwn_with_extension': 's== 123456111',
|
||||
'wwn_vendor_extension': 's== 111', 'rotational': True,
|
||||
'hctl': 's== 1%3A0%3A0%3A0',
|
||||
'by_path': 's== /dev/disk/by-path/1%3A0%3A0%3A0'}
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_parse_root_device_hints_with_operators(self):
|
||||
root_device = {
|
||||
'wwn': 's== 123456', 'model': 's== foo MODEL', 'size': '>= 12345',
|
||||
'serial': 's!= foo-serial', 'vendor': 's== foo VENDOR with space',
|
||||
'name': '<or> /dev/sda <or> /dev/sdb',
|
||||
'wwn_with_extension': 's!= 123456111',
|
||||
'wwn_vendor_extension': 's== 111', 'rotational': True,
|
||||
'hctl': 's== 1:0:0:0', 'by_path': 's== /dev/disk/by-path/1:0:0:0'}
|
||||
|
||||
# Validate strings being normalized
|
||||
expected = copy.deepcopy(root_device)
|
||||
expected['model'] = 's== foo%20model'
|
||||
expected['vendor'] = 's== foo%20vendor%20with%20space'
|
||||
expected['hctl'] = 's== 1%3A0%3A0%3A0'
|
||||
expected['by_path'] = 's== /dev/disk/by-path/1%3A0%3A0%3A0'
|
||||
|
||||
result = hints.parse_root_device_hints(root_device)
|
||||
# The hints already contain the operators, make sure we keep it
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_parse_root_device_hints_string_compare_operator_name(self):
|
||||
root_device = {'name': 's== /dev/sdb'}
|
||||
# Validate strings being normalized
|
||||
expected = copy.deepcopy(root_device)
|
||||
result = hints.parse_root_device_hints(root_device)
|
||||
# The hints already contain the operators, make sure we keep it
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_parse_root_device_hints_no_hints(self):
|
||||
result = hints.parse_root_device_hints({})
|
||||
self.assertIsNone(result)
|
||||
|
||||
def test_parse_root_device_hints_convert_size(self):
|
||||
for size in (12345, '12345'):
|
||||
result = hints.parse_root_device_hints({'size': size})
|
||||
self.assertEqual({'size': '== 12345'}, result)
|
||||
|
||||
def test_parse_root_device_hints_invalid_size(self):
|
||||
for value in ('not-int', -123, 0):
|
||||
self.assertRaises(ValueError, hints.parse_root_device_hints,
|
||||
{'size': value})
|
||||
|
||||
def test_parse_root_device_hints_int_or(self):
|
||||
expr = '<or> 123 <or> 456 <or> 789'
|
||||
result = hints.parse_root_device_hints({'size': expr})
|
||||
self.assertEqual({'size': expr}, result)
|
||||
|
||||
def test_parse_root_device_hints_int_or_invalid(self):
|
||||
expr = '<or> 123 <or> non-int <or> 789'
|
||||
self.assertRaises(ValueError, hints.parse_root_device_hints,
|
||||
{'size': expr})
|
||||
|
||||
def test_parse_root_device_hints_string_or_space(self):
|
||||
expr = '<or> foo <or> foo bar <or> bar'
|
||||
expected = '<or> foo <or> foo%20bar <or> bar'
|
||||
result = hints.parse_root_device_hints({'model': expr})
|
||||
self.assertEqual({'model': expected}, result)
|
||||
|
||||
def _parse_root_device_hints_convert_rotational(self, values,
|
||||
expected_value):
|
||||
for value in values:
|
||||
result = hints.parse_root_device_hints({'rotational': value})
|
||||
self.assertEqual({'rotational': expected_value}, result)
|
||||
|
||||
def test_parse_root_device_hints_convert_rotational(self):
|
||||
self._parse_root_device_hints_convert_rotational(
|
||||
(True, 'true', 'on', 'y', 'yes'), True)
|
||||
|
||||
self._parse_root_device_hints_convert_rotational(
|
||||
(False, 'false', 'off', 'n', 'no'), False)
|
||||
|
||||
def test_parse_root_device_hints_invalid_rotational(self):
|
||||
self.assertRaises(ValueError, hints.parse_root_device_hints,
|
||||
{'rotational': 'not-bool'})
|
||||
|
||||
def test_parse_root_device_hints_invalid_wwn(self):
|
||||
self.assertRaises(ValueError, hints.parse_root_device_hints,
|
||||
{'wwn': 123})
|
||||
|
||||
def test_parse_root_device_hints_invalid_wwn_with_extension(self):
|
||||
self.assertRaises(ValueError, hints.parse_root_device_hints,
|
||||
{'wwn_with_extension': 123})
|
||||
|
||||
def test_parse_root_device_hints_invalid_wwn_vendor_extension(self):
|
||||
self.assertRaises(ValueError, hints.parse_root_device_hints,
|
||||
{'wwn_vendor_extension': 123})
|
||||
|
||||
def test_parse_root_device_hints_invalid_model(self):
|
||||
self.assertRaises(ValueError, hints.parse_root_device_hints,
|
||||
{'model': 123})
|
||||
|
||||
def test_parse_root_device_hints_invalid_serial(self):
|
||||
self.assertRaises(ValueError, hints.parse_root_device_hints,
|
||||
{'serial': 123})
|
||||
|
||||
def test_parse_root_device_hints_invalid_vendor(self):
|
||||
self.assertRaises(ValueError, hints.parse_root_device_hints,
|
||||
{'vendor': 123})
|
||||
|
||||
def test_parse_root_device_hints_invalid_name(self):
|
||||
self.assertRaises(ValueError, hints.parse_root_device_hints,
|
||||
{'name': 123})
|
||||
|
||||
def test_parse_root_device_hints_invalid_hctl(self):
|
||||
self.assertRaises(ValueError, hints.parse_root_device_hints,
|
||||
{'hctl': 123})
|
||||
|
||||
def test_parse_root_device_hints_invalid_by_path(self):
|
||||
self.assertRaises(ValueError, hints.parse_root_device_hints,
|
||||
{'by_path': 123})
|
||||
|
||||
def test_parse_root_device_hints_non_existent_hint(self):
|
||||
self.assertRaises(ValueError, hints.parse_root_device_hints,
|
||||
{'non-existent': 'foo'})
|
||||
|
||||
def test_extract_hint_operator_and_values_single_value(self):
|
||||
expected = {'op': '>=', 'values': ['123']}
|
||||
self.assertEqual(
|
||||
expected, hints._extract_hint_operator_and_values(
|
||||
'>= 123', 'size'))
|
||||
|
||||
def test_extract_hint_operator_and_values_multiple_values(self):
|
||||
expected = {'op': '<or>', 'values': ['123', '456', '789']}
|
||||
expr = '<or> 123 <or> 456 <or> 789'
|
||||
self.assertEqual(
|
||||
expected, hints._extract_hint_operator_and_values(expr, 'size'))
|
||||
|
||||
def test_extract_hint_operator_and_values_multiple_values_space(self):
|
||||
expected = {'op': '<or>', 'values': ['foo', 'foo bar', 'bar']}
|
||||
expr = '<or> foo <or> foo bar <or> bar'
|
||||
self.assertEqual(
|
||||
expected, hints._extract_hint_operator_and_values(expr, 'model'))
|
||||
|
||||
def test_extract_hint_operator_and_values_no_operator(self):
|
||||
expected = {'op': '', 'values': ['123']}
|
||||
self.assertEqual(
|
||||
expected, hints._extract_hint_operator_and_values('123', 'size'))
|
||||
|
||||
def test_extract_hint_operator_and_values_empty_value(self):
|
||||
self.assertRaises(
|
||||
ValueError, hints._extract_hint_operator_and_values, '', 'size')
|
||||
|
||||
def test_extract_hint_operator_and_values_integer(self):
|
||||
expected = {'op': '', 'values': ['123']}
|
||||
self.assertEqual(
|
||||
expected, hints._extract_hint_operator_and_values(123, 'size'))
|
||||
|
||||
def test__append_operator_to_hints(self):
|
||||
root_device = {'serial': 'foo', 'size': 12345,
|
||||
'model': 'foo model', 'rotational': True}
|
||||
expected = {'serial': 's== foo', 'size': '== 12345',
|
||||
'model': 's== foo model', 'rotational': True}
|
||||
|
||||
result = hints._append_operator_to_hints(root_device)
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_normalize_hint_expression_or(self):
|
||||
expr = '<or> foo <or> foo bar <or> bar'
|
||||
expected = '<or> foo <or> foo%20bar <or> bar'
|
||||
result = hints._normalize_hint_expression(expr, 'model')
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_normalize_hint_expression_in(self):
|
||||
expr = '<in> foo <in> foo bar <in> bar'
|
||||
expected = '<in> foo <in> foo%20bar <in> bar'
|
||||
result = hints._normalize_hint_expression(expr, 'model')
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_normalize_hint_expression_op_space(self):
|
||||
expr = 's== test string with space'
|
||||
expected = 's== test%20string%20with%20space'
|
||||
result = hints._normalize_hint_expression(expr, 'model')
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_normalize_hint_expression_op_no_space(self):
|
||||
expr = 's!= SpongeBob'
|
||||
expected = 's!= spongebob'
|
||||
result = hints._normalize_hint_expression(expr, 'model')
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_normalize_hint_expression_no_op_space(self):
|
||||
expr = 'no operators'
|
||||
expected = 'no%20operators'
|
||||
result = hints._normalize_hint_expression(expr, 'model')
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_normalize_hint_expression_no_op_no_space(self):
|
||||
expr = 'NoSpace'
|
||||
expected = 'nospace'
|
||||
result = hints._normalize_hint_expression(expr, 'model')
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_normalize_hint_expression_empty_value(self):
|
||||
self.assertRaises(
|
||||
ValueError, hints._normalize_hint_expression, '', 'size')
|
||||
|
||||
|
||||
class MatchRootDeviceTestCase(BaseTest):
|
||||
|
||||
def setUp(self):
|
||||
super(MatchRootDeviceTestCase, self).setUp()
|
||||
self.devices = [
|
||||
{'name': '/dev/sda', 'size': 64424509440, 'model': 'ok model',
|
||||
'serial': 'fakeserial'},
|
||||
{'name': '/dev/sdb', 'size': 128849018880, 'model': 'big model',
|
||||
'serial': 'veryfakeserial', 'rotational': 'yes'},
|
||||
{'name': '/dev/sdc', 'size': 10737418240, 'model': 'small model',
|
||||
'serial': 'veryveryfakeserial', 'rotational': False},
|
||||
]
|
||||
|
||||
def test_match_root_device_hints_one_hint(self):
|
||||
root_device_hints = {'size': '>= 70'}
|
||||
dev = hints.match_root_device_hints(self.devices, root_device_hints)
|
||||
self.assertEqual('/dev/sdb', dev['name'])
|
||||
|
||||
def test_match_root_device_hints_rotational(self):
|
||||
root_device_hints = {'rotational': False}
|
||||
dev = hints.match_root_device_hints(self.devices, root_device_hints)
|
||||
self.assertEqual('/dev/sdc', dev['name'])
|
||||
|
||||
def test_match_root_device_hints_rotational_convert_devices_bool(self):
|
||||
root_device_hints = {'size': '>=100', 'rotational': True}
|
||||
dev = hints.match_root_device_hints(self.devices, root_device_hints)
|
||||
self.assertEqual('/dev/sdb', dev['name'])
|
||||
|
||||
def test_match_root_device_hints_multiple_hints(self):
|
||||
root_device_hints = {'size': '>= 50', 'model': 's==big model',
|
||||
'serial': 's==veryfakeserial'}
|
||||
dev = hints.match_root_device_hints(self.devices, root_device_hints)
|
||||
self.assertEqual('/dev/sdb', dev['name'])
|
||||
|
||||
def test_match_root_device_hints_multiple_hints2(self):
|
||||
root_device_hints = {
|
||||
'size': '<= 20',
|
||||
'model': '<or> model 5 <or> foomodel <or> small model <or>',
|
||||
'serial': 's== veryveryfakeserial'}
|
||||
dev = hints.match_root_device_hints(self.devices, root_device_hints)
|
||||
self.assertEqual('/dev/sdc', dev['name'])
|
||||
|
||||
def test_match_root_device_hints_multiple_hints3(self):
|
||||
root_device_hints = {'rotational': False, 'model': '<in> small'}
|
||||
dev = hints.match_root_device_hints(self.devices, root_device_hints)
|
||||
self.assertEqual('/dev/sdc', dev['name'])
|
||||
|
||||
def test_match_root_device_hints_no_operators(self):
|
||||
root_device_hints = {'size': '120', 'model': 'big model',
|
||||
'serial': 'veryfakeserial'}
|
||||
dev = hints.match_root_device_hints(self.devices, root_device_hints)
|
||||
self.assertEqual('/dev/sdb', dev['name'])
|
||||
|
||||
def test_match_root_device_hints_no_device_found(self):
|
||||
root_device_hints = {'size': '>=50', 'model': 's==foo'}
|
||||
dev = hints.match_root_device_hints(self.devices, root_device_hints)
|
||||
self.assertIsNone(dev)
|
||||
|
||||
@mock.patch.object(hints.LOG, 'warning', autospec=True)
|
||||
def test_match_root_device_hints_empty_device_attribute(self, mock_warn):
|
||||
empty_dev = [{'name': '/dev/sda', 'model': ' '}]
|
||||
dev = hints.match_root_device_hints(empty_dev, {'model': 'foo'})
|
||||
self.assertIsNone(dev)
|
||||
self.assertTrue(mock_warn.called)
|
||||
|
||||
def test_find_devices_all(self):
|
||||
root_device_hints = {'size': '>= 10'}
|
||||
devs = list(hints.find_devices_by_hints(self.devices,
|
||||
root_device_hints))
|
||||
self.assertEqual(self.devices, devs)
|
||||
|
||||
def test_find_devices_none(self):
|
||||
root_device_hints = {'size': '>= 100500'}
|
||||
devs = list(hints.find_devices_by_hints(self.devices,
|
||||
root_device_hints))
|
||||
self.assertEqual([], devs)
|
||||
|
||||
def test_find_devices_name(self):
|
||||
root_device_hints = {'name': 's== /dev/sda'}
|
||||
devs = list(hints.find_devices_by_hints(self.devices,
|
||||
root_device_hints))
|
||||
self.assertEqual([self.devices[0]], devs)
|
@ -15,12 +15,12 @@ import json
|
||||
from unittest import mock
|
||||
|
||||
import fixtures
|
||||
from ironic_lib import mdns
|
||||
import oslo_messaging as messaging
|
||||
import tooz
|
||||
|
||||
from ironic_inspector.common import coordination
|
||||
from ironic_inspector.common import keystone
|
||||
from ironic_inspector.common import mdns
|
||||
from ironic_inspector.common import swift
|
||||
from ironic_inspector.conductor import manager
|
||||
import ironic_inspector.conf
|
||||
|
143
ironic_inspector/test/unit/test_mdns.py
Normal file
143
ironic_inspector/test/unit/test_mdns.py
Normal file
@ -0,0 +1,143 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import socket
|
||||
from unittest import mock
|
||||
|
||||
from oslo_config import cfg
|
||||
import zeroconf
|
||||
|
||||
from ironic_inspector.common import exception
|
||||
from ironic_inspector.common import mdns
|
||||
from ironic_inspector.test import base
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
@mock.patch.object(zeroconf, 'Zeroconf', autospec=True)
|
||||
class RegisterServiceTestCase(base.BaseTest):
|
||||
|
||||
def test_ok(self, mock_zc):
|
||||
zc = mdns.Zeroconf()
|
||||
zc.register_service('baremetal', 'https://127.0.0.1/baremetal')
|
||||
mock_zc.assert_called_once_with(
|
||||
interfaces=zeroconf.InterfaceChoice.All,
|
||||
ip_version=zeroconf.IPVersion.All)
|
||||
mock_zc.return_value.register_service.assert_called_once_with(mock.ANY)
|
||||
info = mock_zc.return_value.register_service.call_args[0][0]
|
||||
self.assertEqual('_openstack._tcp.local.', info.type)
|
||||
self.assertEqual('baremetal._openstack._tcp.local.', info.name)
|
||||
self.assertEqual('127.0.0.1', socket.inet_ntoa(info.addresses[0]))
|
||||
self.assertEqual({b'path': b'/baremetal'}, info.properties)
|
||||
|
||||
def test_with_params(self, mock_zc):
|
||||
CONF.set_override('params', {'answer': 'none', 'foo': 'bar'},
|
||||
group='mdns')
|
||||
zc = mdns.Zeroconf()
|
||||
zc.register_service('baremetal', 'https://127.0.0.1/baremetal',
|
||||
params={'answer': b'42'})
|
||||
mock_zc.return_value.register_service.assert_called_once_with(mock.ANY)
|
||||
info = mock_zc.return_value.register_service.call_args[0][0]
|
||||
self.assertEqual('_openstack._tcp.local.', info.type)
|
||||
self.assertEqual('baremetal._openstack._tcp.local.', info.name)
|
||||
self.assertEqual('127.0.0.1', socket.inet_ntoa(info.addresses[0]))
|
||||
self.assertEqual({b'path': b'/baremetal',
|
||||
b'answer': b'42',
|
||||
b'foo': b'bar'},
|
||||
info.properties)
|
||||
|
||||
@mock.patch.object(mdns.time, 'sleep', autospec=True)
|
||||
def test_with_race(self, mock_sleep, mock_zc):
|
||||
mock_zc.return_value.register_service.side_effect = [
|
||||
zeroconf.NonUniqueNameException,
|
||||
zeroconf.NonUniqueNameException,
|
||||
zeroconf.NonUniqueNameException,
|
||||
None
|
||||
]
|
||||
zc = mdns.Zeroconf()
|
||||
zc.register_service('baremetal', 'https://127.0.0.1/baremetal')
|
||||
mock_zc.return_value.register_service.assert_called_with(mock.ANY)
|
||||
self.assertEqual(4, mock_zc.return_value.register_service.call_count)
|
||||
mock_sleep.assert_has_calls([mock.call(i) for i in (0.1, 0.2, 0.4)])
|
||||
|
||||
def test_with_interfaces(self, mock_zc):
|
||||
CONF.set_override('interfaces', ['10.0.0.1', '192.168.1.1'],
|
||||
group='mdns')
|
||||
zc = mdns.Zeroconf()
|
||||
zc.register_service('baremetal', 'https://127.0.0.1/baremetal')
|
||||
mock_zc.assert_called_once_with(interfaces=['10.0.0.1', '192.168.1.1'],
|
||||
ip_version=None)
|
||||
mock_zc.return_value.register_service.assert_called_once_with(mock.ANY)
|
||||
info = mock_zc.return_value.register_service.call_args[0][0]
|
||||
self.assertEqual('_openstack._tcp.local.', info.type)
|
||||
self.assertEqual('baremetal._openstack._tcp.local.', info.name)
|
||||
self.assertEqual('127.0.0.1', socket.inet_ntoa(info.addresses[0]))
|
||||
self.assertEqual({b'path': b'/baremetal'}, info.properties)
|
||||
|
||||
@mock.patch.object(mdns.time, 'sleep', autospec=True)
|
||||
def test_failure(self, mock_sleep, mock_zc):
|
||||
mock_zc.return_value.register_service.side_effect = (
|
||||
zeroconf.NonUniqueNameException
|
||||
)
|
||||
zc = mdns.Zeroconf()
|
||||
self.assertRaises(exception.ServiceRegistrationFailure,
|
||||
zc.register_service,
|
||||
'baremetal', 'https://127.0.0.1/baremetal')
|
||||
mock_zc.return_value.register_service.assert_called_with(mock.ANY)
|
||||
self.assertEqual(CONF.mdns.registration_attempts,
|
||||
mock_zc.return_value.register_service.call_count)
|
||||
self.assertEqual(CONF.mdns.registration_attempts - 1,
|
||||
mock_sleep.call_count)
|
||||
|
||||
|
||||
class ParseEndpointTestCase(base.BaseTest):
|
||||
|
||||
def test_simple(self):
|
||||
endpoint = mdns._parse_endpoint('http://127.0.0.1')
|
||||
self.assertEqual(1, len(endpoint.addresses))
|
||||
self.assertEqual('127.0.0.1', socket.inet_ntoa(endpoint.addresses[0]))
|
||||
self.assertEqual(80, endpoint.port)
|
||||
self.assertEqual({}, endpoint.params)
|
||||
self.assertIsNone(endpoint.hostname)
|
||||
|
||||
def test_simple_https(self):
|
||||
endpoint = mdns._parse_endpoint('https://127.0.0.1')
|
||||
self.assertEqual(1, len(endpoint.addresses))
|
||||
self.assertEqual('127.0.0.1', socket.inet_ntoa(endpoint.addresses[0]))
|
||||
self.assertEqual(443, endpoint.port)
|
||||
self.assertEqual({}, endpoint.params)
|
||||
self.assertIsNone(endpoint.hostname)
|
||||
|
||||
def test_with_path_and_port(self):
|
||||
endpoint = mdns._parse_endpoint('http://127.0.0.1:8080/bm')
|
||||
self.assertEqual(1, len(endpoint.addresses))
|
||||
self.assertEqual('127.0.0.1', socket.inet_ntoa(endpoint.addresses[0]))
|
||||
self.assertEqual(8080, endpoint.port)
|
||||
self.assertEqual({'path': '/bm', 'protocol': 'http'}, endpoint.params)
|
||||
self.assertIsNone(endpoint.hostname)
|
||||
|
||||
@mock.patch.object(socket, 'getaddrinfo', autospec=True)
|
||||
def test_resolve(self, mock_resolve):
|
||||
mock_resolve.return_value = [
|
||||
(socket.AF_INET, None, None, None, ('1.2.3.4',)),
|
||||
(socket.AF_INET6, None, None, None, ('::2', 'scope')),
|
||||
]
|
||||
endpoint = mdns._parse_endpoint('http://example.com')
|
||||
self.assertEqual(2, len(endpoint.addresses))
|
||||
self.assertEqual('1.2.3.4', socket.inet_ntoa(endpoint.addresses[0]))
|
||||
self.assertEqual('::2', socket.inet_ntop(socket.AF_INET6,
|
||||
endpoint.addresses[1]))
|
||||
self.assertEqual(80, endpoint.port)
|
||||
self.assertEqual({}, endpoint.params)
|
||||
self.assertEqual('example.com.', endpoint.hostname)
|
||||
mock_resolve.assert_called_once_with('example.com', 80, mock.ANY,
|
||||
socket.IPPROTO_TCP)
|
@ -12,19 +12,25 @@
|
||||
# limitations under the License.
|
||||
|
||||
import datetime
|
||||
import errno
|
||||
import ipaddress
|
||||
import logging as pylog
|
||||
import os
|
||||
import warnings
|
||||
|
||||
import futurist
|
||||
from ironic_lib import auth_basic
|
||||
from ironic_lib import exception
|
||||
from keystonemiddleware import auth_token
|
||||
from openstack.baremetal.v1 import node
|
||||
from oslo_concurrency import processutils
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log
|
||||
from oslo_middleware import cors as cors_middleware
|
||||
from oslo_middleware import healthcheck as healthcheck_middleware
|
||||
from oslo_utils import excutils
|
||||
import webob
|
||||
|
||||
from ironic_inspector.common import auth_basic
|
||||
from ironic_inspector.common import exception
|
||||
from ironic_inspector.common.i18n import _
|
||||
from ironic_inspector import policy
|
||||
|
||||
@ -339,3 +345,92 @@ def iso_timestamp(timestamp=None, tz=datetime.timezone.utc):
|
||||
return None
|
||||
date = datetime.datetime.fromtimestamp(timestamp, tz=tz)
|
||||
return date.isoformat()
|
||||
|
||||
|
||||
def execute(*cmd, use_standard_locale=False, log_stdout=True, **kwargs):
|
||||
"""Convenience wrapper around oslo's execute() method.
|
||||
|
||||
Executes and logs results from a system command. See docs for
|
||||
oslo_concurrency.processutils.execute for usage.
|
||||
|
||||
:param cmd: positional arguments to pass to processutils.execute()
|
||||
:param use_standard_locale: Defaults to False. If set to True,
|
||||
execute command with standard locale
|
||||
added to environment variables.
|
||||
:param log_stdout: Defaults to True. If set to True, logs the output.
|
||||
:param kwargs: keyword arguments to pass to processutils.execute()
|
||||
:returns: (stdout, stderr) from process execution
|
||||
:raises: UnknownArgumentError on receiving unknown arguments
|
||||
:raises: ProcessExecutionError
|
||||
:raises: OSError
|
||||
"""
|
||||
if use_standard_locale:
|
||||
env = kwargs.pop('env_variables', os.environ.copy())
|
||||
env['LC_ALL'] = 'C'
|
||||
kwargs['env_variables'] = env
|
||||
|
||||
if kwargs.pop('run_as_root', False):
|
||||
warnings.warn("run_as_root is deprecated and has no effect",
|
||||
DeprecationWarning)
|
||||
|
||||
def _log(stdout, stderr):
|
||||
if log_stdout:
|
||||
try:
|
||||
LOG.debug('Command stdout is: "%s"', stdout)
|
||||
except UnicodeEncodeError:
|
||||
LOG.debug('stdout contains invalid UTF-8 characters')
|
||||
stdout = (stdout.encode('utf8', 'surrogateescape')
|
||||
.decode('utf8', 'ignore'))
|
||||
LOG.debug('Command stdout is: "%s"', stdout)
|
||||
try:
|
||||
LOG.debug('Command stderr is: "%s"', stderr)
|
||||
except UnicodeEncodeError:
|
||||
LOG.debug('stderr contains invalid UTF-8 characters')
|
||||
stderr = (stderr.encode('utf8', 'surrogateescape')
|
||||
.decode('utf8', 'ignore'))
|
||||
LOG.debug('Command stderr is: "%s"', stderr)
|
||||
|
||||
try:
|
||||
result = processutils.execute(*cmd, **kwargs)
|
||||
except FileNotFoundError:
|
||||
with excutils.save_and_reraise_exception():
|
||||
LOG.debug('Command not found: "%s"', ' '.join(map(str, cmd)))
|
||||
except processutils.ProcessExecutionError as exc:
|
||||
with excutils.save_and_reraise_exception():
|
||||
_log(exc.stdout, exc.stderr)
|
||||
else:
|
||||
_log(result[0], result[1])
|
||||
return result
|
||||
|
||||
|
||||
def get_route_source(dest, ignore_link_local=True):
|
||||
"""Get the IP address to send packages to destination."""
|
||||
try:
|
||||
out, _err = execute('ip', 'route', 'get', dest)
|
||||
except (EnvironmentError, processutils.ProcessExecutionError) as e:
|
||||
LOG.warning('Cannot get route to host %(dest)s: %(err)s',
|
||||
{'dest': dest, 'err': e})
|
||||
return
|
||||
|
||||
try:
|
||||
source = out.strip().split('\n')[0].split('src')[1].split()[0]
|
||||
if (ipaddress.ip_address(source).is_link_local
|
||||
and ignore_link_local):
|
||||
LOG.debug('Ignoring link-local source to %(dest)s: %(rec)s',
|
||||
{'dest': dest, 'rec': out})
|
||||
return
|
||||
return source
|
||||
except (IndexError, ValueError):
|
||||
LOG.debug('No route to host %(dest)s, route record: %(rec)s',
|
||||
{'dest': dest, 'rec': out})
|
||||
|
||||
|
||||
def unlink_without_raise(path):
|
||||
try:
|
||||
os.unlink(path)
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
return
|
||||
else:
|
||||
LOG.warning("Failed to unlink %(path)s, error: %(e)s",
|
||||
{'path': path, 'e': e})
|
||||
|
@ -12,13 +12,13 @@
|
||||
|
||||
import socket
|
||||
|
||||
from ironic_lib import utils as il_utils
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log
|
||||
from oslo_service import service
|
||||
from oslo_service import wsgi
|
||||
|
||||
from ironic_inspector import main
|
||||
from ironic_inspector import utils
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
CONF = cfg.CONF
|
||||
@ -30,7 +30,7 @@ class WSGIService(service.Service):
|
||||
def __init__(self):
|
||||
self.app = main.get_app()
|
||||
if CONF.listen_unix_socket:
|
||||
il_utils.unlink_without_raise(CONF.listen_unix_socket)
|
||||
utils.unlink_without_raise(CONF.listen_unix_socket)
|
||||
self.server = wsgi.Server(CONF, 'ironic_inspector',
|
||||
self.app,
|
||||
socket_family=socket.AF_UNIX,
|
||||
@ -58,7 +58,7 @@ class WSGIService(service.Service):
|
||||
"""
|
||||
self.server.stop()
|
||||
if CONF.listen_unix_socket:
|
||||
il_utils.unlink_without_raise(CONF.listen_unix_socket)
|
||||
utils.unlink_without_raise(CONF.listen_unix_socket)
|
||||
|
||||
def wait(self):
|
||||
"""Wait for the service to stop serving this API.
|
||||
|
@ -8,7 +8,6 @@ construct>=2.9.39 # MIT
|
||||
eventlet>=0.27.0 # MIT
|
||||
Flask>=1.1.0 # BSD
|
||||
futurist>=1.2.0 # Apache-2.0
|
||||
ironic-lib>=4.3.0 # Apache-2.0
|
||||
jsonpath-rw>=1.2.0 # Apache-2.0
|
||||
jsonschema>=3.2.0 # MIT
|
||||
keystoneauth1>=4.2.0 # Apache-2.0
|
||||
@ -36,3 +35,5 @@ stevedore>=1.20.0 # Apache-2.0
|
||||
SQLAlchemy>=1.4.0 # MIT
|
||||
tooz>=2.5.1 # Apache-2.0
|
||||
microversion_parse>=1.0.1 # Apache-2.0
|
||||
zeroconf>=0.24.0 # LGPL
|
||||
bcrypt>=3.1.3 # Apache-2.0
|
||||
|
@ -1,7 +1,6 @@
|
||||
[DEFAULT]
|
||||
output_file = example.conf
|
||||
namespace = ironic_inspector
|
||||
namespace = ironic_lib.mdns
|
||||
namespace = keystonemiddleware.auth_token
|
||||
namespace = oslo.db
|
||||
namespace = oslo.log
|
||||
|
Loading…
x
Reference in New Issue
Block a user