Migrate ironic_lib to ironic

This migrates ironic-lib code and usages to code in ironic.common.
Relevant unit tests were migrated as well.

Also removes support for ironic-lib from CI and devstack.

Change-Id: Ic96a09735f04ff98c6fec23d782566da3061c409
This commit is contained in:
Jay Faulkner 2024-12-17 15:50:24 -08:00
parent 9479347cf4
commit 74d239d4eb
106 changed files with 4032 additions and 263 deletions

View File

@ -35,7 +35,6 @@ set +o pipefail
# Set up default directories
GITDIR["python-ironicclient"]=$DEST/python-ironicclient
GITDIR["ironic-lib"]=$DEST/ironic-lib
GITREPO["pyghmi"]=${PYGHMI_REPO:-${GIT_BASE}/x/pyghmi}
GITBRANCH["pyghmi"]=${PYGHMI_BRANCH:-master}
@ -1200,11 +1199,6 @@ function install_ironic {
done
fi
if use_library_from_git "ironic-lib"; then
git_clone_by_name "ironic-lib"
setup_dev_lib "ironic-lib"
fi
setup_develop $IRONIC_DIR
install_apache_wsgi
@ -2956,9 +2950,6 @@ function build_tinyipa_ramdisk {
export AUTHORIZE_SSH=true
export SSH_PUBLIC_KEY=$IRONIC_ANSIBLE_SSH_KEY.pub
fi
if [ -e $DEST/ironic-lib ]; then
export IRONIC_LIB_SOURCE="$DEST/ironic-lib"
fi
make
cp tinyipa.gz $ramdisk_path
cp tinyipa.vmlinuz $kernel_path
@ -3009,12 +3000,6 @@ function build_ipa_dib_ramdisk {
install_diskimage_builder
fi
if [ -e $DEST/ironic-lib ]; then
export IRONIC_LIB_FROM_SOURCE=true
export DIB_REPOLOCATION_ironic_lib=$DEST/ironic-lib
export DIB_REPOREF_ironic_lib=$TARGET_BRANCH
fi
echo "Building IPA ramdisk with DIB options: $IRONIC_DIB_RAMDISK_OPTIONS"
if is_deploy_iso_required; then
IRONIC_DIB_RAMDISK_OPTIONS+=" iso"

View File

@ -87,8 +87,6 @@ or via notifier plugin (such as is done with ironic-prometheus-exporter).
Ironic service model. A separate webserver process presently does not have
the capability of triggering the call to retrieve and transmit the data.
.. NOTE::
This functionality requires ironic-lib version 5.4.0 to be installed.
Types of Metrics Emitted
========================

View File

@ -15,7 +15,6 @@
# License for the specific language governing permissions and limitations
# under the License.
from ironic_lib import auth_basic
import keystonemiddleware.audit as audit_middleware
from keystonemiddleware import auth_token
from oslo_config import cfg
@ -31,6 +30,7 @@ from ironic.api import hooks
from ironic.api import middleware
from ironic.api.middleware import auth_public_routes
from ironic.api.middleware import json_ext
from ironic.common import auth_basic
from ironic.common import exception
from ironic.conf import CONF

View File

@ -12,7 +12,6 @@
from http import client as http_client
from ironic_lib import metrics_utils
from oslo_config import cfg
from oslo_utils import uuidutils
import pecan
@ -27,6 +26,7 @@ from ironic.api import method
from ironic.common import args
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic import objects

View File

@ -13,7 +13,6 @@
# License for the specific language governing permissions and limitations
# under the License.
from ironic_lib import metrics_utils
from pecan import rest
from ironic import api
@ -21,6 +20,7 @@ from ironic.api.controllers.v1 import utils as api_utils
from ironic.api import method
from ironic.common import args
from ironic.common import exception
from ironic.common import metrics_utils
from ironic import objects
METRICS = metrics_utils.get_metrics_logger(__name__)

View File

@ -15,7 +15,6 @@
from http import client as http_client
from ironic_lib import metrics_utils
from oslo_utils import uuidutils
from pecan import rest
@ -29,6 +28,7 @@ from ironic.api import method
from ironic.common import args
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic import objects
METRICS = metrics_utils.get_metrics_logger(__name__)

View File

@ -10,7 +10,6 @@
# License for the specific language governing permissions and limitations
# under the License.
from ironic_lib import metrics_utils
from oslo_log import log
from oslo_utils import timeutils
from pecan import rest
@ -22,6 +21,7 @@ from ironic.api import method
from ironic.common import args
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
import ironic.conf
from ironic import objects

View File

@ -12,7 +12,6 @@
from http import client as http_client
from ironic_lib import metrics_utils
from oslo_log import log
from oslo_utils import strutils
from oslo_utils import uuidutils
@ -29,6 +28,7 @@ from ironic.api import method
from ironic.common import args
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
import ironic.conf
from ironic import objects

View File

@ -15,7 +15,6 @@
from http import client as http_client
from ironic_lib import metrics_utils
from pecan import rest
from ironic import api
@ -25,6 +24,7 @@ from ironic.api import method
from ironic.common import args
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic.drivers import base as driver_base

View File

@ -12,7 +12,6 @@
from http import client as http_client
from ironic_lib import metrics_utils
from oslo_log import log
import pecan
@ -20,6 +19,7 @@ from ironic.api.controllers.v1 import utils as api_utils
from ironic.api import method
from ironic.common import args
from ironic.common import exception
from ironic.common import metrics_utils
METRICS = metrics_utils.get_metrics_logger(__name__)

View File

@ -13,13 +13,13 @@
# License for the specific language governing permissions and limitations
# under the License.
from ironic_lib import metrics_utils
from pecan import rest
from ironic import api
from ironic.api.controllers.v1 import utils as api_utils
from ironic.api import method
from ironic.common import args
from ironic.common import metrics_utils
from ironic import objects
METRICS = metrics_utils.get_metrics_logger(__name__)

View File

@ -19,7 +19,6 @@ from http import client as http_client
import json
import urllib.parse
from ironic_lib import metrics_utils
import jsonschema
from jsonschema import exceptions as json_schema_exc
from oslo_log import log
@ -46,6 +45,7 @@ from ironic.common import boot_devices
from ironic.common import boot_modes
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic.common import policy
from ironic.common import states as ir_states
from ironic.conductor import steps as conductor_steps

View File

@ -15,7 +15,6 @@
from http import client as http_client
from ironic_lib import metrics_utils
from oslo_log import log
from oslo_utils import uuidutils
from pecan import rest
@ -29,6 +28,7 @@ from ironic.api import method
from ironic.common import args
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic.common import states as ir_states
from ironic import objects

View File

@ -12,7 +12,6 @@
from http import client as http_client
from ironic_lib import metrics_utils
from oslo_utils import uuidutils
import pecan
@ -26,6 +25,7 @@ from ironic.api import method
from ironic.common import args
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic.common import states as ir_states
from ironic import objects

View File

@ -12,7 +12,6 @@
from http import client as http_client
from ironic_lib import metrics_utils
from oslo_log import log
from oslo_utils import strutils
from oslo_utils import uuidutils
@ -29,6 +28,7 @@ from ironic.api import method
from ironic.common import args
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
import ironic.conf
from ironic import objects

View File

@ -10,7 +10,6 @@
# License for the specific language governing permissions and limitations
# under the License.
from ironic_lib import metrics_utils
from oslo_config import cfg
import pecan
@ -20,6 +19,7 @@ from ironic.api.controllers.v1 import versions
from ironic.api import method
from ironic.api import validation
from ironic.common.i18n import _
from ironic.common import metrics_utils
CONF = cfg.CONF

View File

@ -14,7 +14,6 @@
from http import client as http_client
from ironic_lib import metrics_utils
from oslo_utils import uuidutils
from pecan import rest
@ -27,6 +26,7 @@ from ironic.api import method
from ironic.common import args
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic import objects
METRICS = metrics_utils.get_metrics_logger(__name__)

View File

@ -14,7 +14,6 @@
from http import client as http_client
from ironic_lib import metrics_utils
from oslo_utils import uuidutils
from pecan import rest
@ -27,6 +26,7 @@ from ironic.api import method
from ironic.common import args
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic.common import policy
from ironic import objects

203
ironic/common/auth_basic.py Normal file
View File

@ -0,0 +1,203 @@
# Copyright 2020 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import binascii
import logging
import bcrypt
import webob
from ironic.common import exception
from ironic.common.i18n import _
LOG = logging.getLogger(__name__)
class BasicAuthMiddleware(object):
"""Middleware which performs HTTP basic authentication on requests
"""
def __init__(self, app, auth_file):
self.app = app
self.auth_file = auth_file
validate_auth_file(auth_file)
def format_exception(self, e):
result = {'error': {'message': str(e), 'code': e.code}}
headers = list(e.headers.items()) + [
('Content-Type', 'application/json')
]
return webob.Response(content_type='application/json',
status_code=e.code,
json_body=result,
headerlist=headers)
def __call__(self, env, start_response):
try:
token = parse_header(env)
username, password = parse_token(token)
env.update(authenticate(self.auth_file, username, password))
return self.app(env, start_response)
except exception.IronicException as e:
response = self.format_exception(e)
return response(env, start_response)
def authenticate(auth_file, username, password):
"""Finds username and password match in Apache style user auth file
The user auth file format is expected to comply with Apache
documentation[1] however the bcrypt password digest is the *only*
digest format supported.
[1] https://httpd.apache.org/docs/current/misc/password_encryptions.html
:param: auth_file: Path to user auth file
:param: username: Username to authenticate
:param: password: Password encoded as bytes
:returns: A dictionary of WSGI environment values to append to the request
:raises: Unauthorized, if no file entries match supplied username/password
"""
line_prefix = username + ':'
try:
with open(auth_file, 'r') as f:
for line in f:
entry = line.strip()
if entry and entry.startswith(line_prefix):
return auth_entry(entry, password)
except OSError as exc:
LOG.error('Problem reading auth user file: %s', exc)
raise exception.ConfigInvalid(
error_msg=_('Problem reading auth user file'))
# reached end of file with no matches
LOG.info('User %s not found', username)
unauthorized()
def auth_entry(entry, password):
"""Compare a password with a single user auth file entry
:param: entry: Line from auth user file to use for authentication
:param: password: Password encoded as bytes
:returns: A dictionary of WSGI environment values to append to the request
:raises: Unauthorized, if the entry doesn't match supplied password or
if the entry is encrypted with a method other than bcrypt
"""
username, encrypted = parse_entry(entry)
if not bcrypt.checkpw(password, encrypted):
LOG.info('Password for %s does not match', username)
unauthorized()
return {
'HTTP_X_USER': username,
'HTTP_X_USER_NAME': username
}
def validate_auth_file(auth_file):
"""Read the auth user file and validate its correctness
:param: auth_file: Path to user auth file
:raises: ConfigInvalid on validation error
"""
try:
with open(auth_file, 'r') as f:
for line in f:
entry = line.strip()
if entry and ':' in entry:
parse_entry(entry)
except OSError:
raise exception.ConfigInvalid(
error_msg=_('Problem reading auth user file: %s') % auth_file)
def parse_entry(entry):
"""Extrace the username and encrypted password from a user auth file entry
:param: entry: Line from auth user file to use for authentication
:returns: a tuple of username and encrypted password
:raises: ConfigInvalid if the password is not in the supported bcrypt
format
"""
username, encrypted_str = entry.split(':', maxsplit=1)
encrypted = encrypted_str.encode('utf-8')
if encrypted[:4] not in (b'$2y$', b'$2a$', b'$2b$'):
error_msg = _('Only bcrypt digested passwords are supported for '
'%(username)s') % {'username': username}
raise exception.ConfigInvalid(error_msg=error_msg)
return username, encrypted
def parse_token(token):
"""Parse the token portion of the Authentication header value
:param: token: Token value from basic authorization header
:returns: tuple of username, password
:raises: Unauthorized, if username and password could not be parsed for any
reason
"""
try:
if isinstance(token, str):
token = token.encode('utf-8')
auth_pair = base64.b64decode(token, validate=True)
(username, password) = auth_pair.split(b':', maxsplit=1)
return (username.decode('utf-8'), password)
except (TypeError, binascii.Error, ValueError) as exc:
LOG.info('Could not decode authorization token: %s', exc)
raise exception.BadRequest(_('Could not decode authorization token'))
def parse_header(env):
"""Parse WSGI environment for Authorization header of type Basic
:param: env: WSGI environment to get header from
:returns: Token portion of the header value
:raises: Unauthorized, if header is missing or if the type is not Basic
"""
try:
auth_header = env.pop('HTTP_AUTHORIZATION')
except KeyError:
LOG.info('No authorization token received')
unauthorized(_('Authorization required'))
try:
auth_type, token = auth_header.strip().split(maxsplit=1)
except (ValueError, AttributeError) as exc:
LOG.info('Could not parse Authorization header: %s', exc)
raise exception.BadRequest(_('Could not parse Authorization header'))
if auth_type.lower() != 'basic':
msg = _('Unsupported authorization type "%s"') % auth_type
LOG.info(msg)
raise exception.BadRequest(msg)
return token
def unauthorized(message=None):
"""Raise an Unauthorized exception to prompt for basic authentication
:param: message: Optional message for esception
:raises: Unauthorized with WWW-Authenticate header set
"""
if not message:
message = _('Incorrect username or password')
raise exception.Unauthorized(message)

View File

@ -15,17 +15,117 @@
# under the License.
"""Ironic specific exceptions list."""
import collections
from http import client as http_client
import json
from ironic_lib.exception import IronicException
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
from ironic.common.i18n import _
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
def _ensure_exception_kwargs_serializable(exc_class_name, kwargs):
"""Ensure that kwargs are serializable
Ensure that all kwargs passed to exception constructor can be passed over
RPC, by trying to convert them to JSON, or, as a last resort, to string.
If it is not possible, unserializable kwargs will be removed, letting the
receiver handle the exception string as it is configured to.
:param exc_class_name: a IronicException class name.
:param kwargs: a dictionary of keyword arguments passed to the exception
constructor.
:returns: a dictionary of serializable keyword arguments.
"""
serializers = [(json.dumps, _('when converting to JSON')),
(str, _('when converting to string'))]
exceptions = collections.defaultdict(list)
serializable_kwargs = {}
for k, v in kwargs.items():
for serializer, msg in serializers:
try:
serializable_kwargs[k] = serializer(v)
exceptions.pop(k, None)
break
except Exception as e:
exceptions[k].append(
'(%(serializer_type)s) %(e_type)s: %(e_contents)s' %
{'serializer_type': msg, 'e_contents': e,
'e_type': e.__class__.__name__})
if exceptions:
LOG.error("One or more arguments passed to the %(exc_class)s "
"constructor as kwargs can not be serialized. The "
"serialized arguments: %(serialized)s. These "
"unserialized kwargs were dropped because of the "
"exceptions encountered during their "
"serialization:\n%(errors)s",
dict(errors=';\n'.join("%s: %s" % (k, '; '.join(v))
for k, v in exceptions.items()),
exc_class=exc_class_name,
serialized=serializable_kwargs))
# We might be able to actually put the following keys' values into
# format string, but there is no guarantee, drop it just in case.
for k in exceptions:
del kwargs[k]
return serializable_kwargs
class IronicException(Exception):
"""Base Ironic Exception
To correctly use this class, inherit from it and define
a '_msg_fmt' property. That _msg_fmt will get printf'd
with the keyword arguments provided to the constructor.
If you need to access the message from an exception you should use
str(exc)
"""
_msg_fmt = _("An unknown exception occurred.")
code = 500
headers = {}
safe = False
def __init__(self, message=None, **kwargs):
self.kwargs = _ensure_exception_kwargs_serializable(
self.__class__.__name__, kwargs)
if 'code' not in self.kwargs:
try:
self.kwargs['code'] = self.code
except AttributeError:
pass
else:
self.code = int(kwargs['code'])
if not message:
try:
message = self._msg_fmt % kwargs
except Exception:
with excutils.save_and_reraise_exception() as ctxt:
# kwargs doesn't match a variable in the message
# log the issue and the kwargs
prs = ', '.join('%s=%s' % pair for pair in kwargs.items())
LOG.exception('Exception in string format operation '
'(arguments %s)', prs)
if not CONF.errors.fatal_exception_format_errors:
# at least get the core message out if something
# happened
message = self._msg_fmt
ctxt.reraise = False
super(IronicException, self).__init__(message)
class NotAuthorized(IronicException):
_msg_fmt = _("Not authorized.")
code = http_client.FORBIDDEN
@ -861,8 +961,7 @@ class ImageRefIsARedirect(IronicException):
def __init__(self, image_ref=None, redirect_url=None, msg=None):
self.redirect_url = redirect_url
# Kwargs are expected by ironic_lib's IronicException to convert
# the message.
# Kwargs are expected by IronicException to convert the message.
super(ImageRefIsARedirect, self).__init__(
message=msg,
image_ref=image_ref,
@ -945,3 +1044,21 @@ class ChildNodeLocked(Conflict):
"and we are unable to perform any action on it at this "
"time. Please retry after the current operation is "
"completed.")
class MetricsNotSupported(IronicException):
_msg_fmt = _("Metrics action is not supported. You may need to "
"adjust the [metrics] section in ironic.conf.")
class ServiceLookupFailure(IronicException):
_msg_fmt = _("Cannot find %(service)s service through multicast.")
class ServiceRegistrationFailure(IronicException):
_msg_fmt = _("Cannot register %(service)s service: %(error)s")
class Unauthorized(IronicException):
code = http_client.UNAUTHORIZED
headers = {'WWW-Authenticate': 'Basic realm="Baremetal API"'}

View File

@ -15,7 +15,6 @@ import os.path
import shutil
from urllib import parse as urlparse
from ironic_lib import utils as ironic_utils
from oslo_log import log
from ironic.common import exception
@ -114,7 +113,7 @@ class LocalPublisher(AbstractPublisher):
def unpublish(self, file_name):
published_file = os.path.join(
CONF.deploy.http_root, self.image_subdir, file_name)
ironic_utils.unlink_without_raise(published_file)
utils.unlink_without_raise(published_file)
class SwiftPublisher(AbstractPublisher):

View File

@ -101,7 +101,7 @@ def create_vfat_image(output_file, files_info=None, parameters=None,
mounting, creating filesystem, copying files, etc.
"""
try:
# TODO(sbaker): use ironic_lib.utils.dd when rootwrap has been removed
# TODO(sbaker): use utils.dd when rootwrap has been removed
utils.execute('dd', 'if=/dev/zero', 'of=%s' % output_file, 'count=1',
'bs=%dKiB' % fs_size_kib)
except processutils.ProcessExecutionError as e:
@ -113,8 +113,7 @@ def create_vfat_image(output_file, files_info=None, parameters=None,
# The label helps ramdisks to find the partition containing
# the parameters (by using /dev/disk/by-label/ir-vfd-dev).
# NOTE: FAT filesystem label can be up to 11 characters long.
# TODO(sbaker): use ironic_lib.utils.mkfs when rootwrap has been
# removed
# TODO(sbaker): use utils.mkfs when rootwrap has been removed
utils.execute('mkfs', '-t', 'vfat', '-n',
'ir-vfd-dev', output_file)
except processutils.ProcessExecutionError as e:

View File

View File

@ -0,0 +1,241 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""A simple JSON RPC client.
This client is compatible with any JSON RPC 2.0 implementation, including ours.
"""
import logging
from oslo_config import cfg
from oslo_utils import importutils
from oslo_utils import netutils
from oslo_utils import strutils
from oslo_utils import uuidutils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import keystone
from ironic.conf import json_rpc
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
_SESSION = None
def _get_session():
global _SESSION
if _SESSION is None:
kwargs = {}
auth_strategy = json_rpc.auth_strategy()
if auth_strategy != 'keystone':
auth_type = 'none' if auth_strategy == 'noauth' else auth_strategy
CONF.set_default('auth_type', auth_type, group='json_rpc')
# Deprecated, remove in W
if auth_strategy == 'http_basic':
if CONF.json_rpc.http_basic_username:
kwargs['username'] = CONF.json_rpc.http_basic_username
if CONF.json_rpc.http_basic_password:
kwargs['password'] = CONF.json_rpc.http_basic_password
auth = keystone.get_auth('json_rpc', **kwargs)
session = keystone.get_session('json_rpc', auth=auth)
headers = {
'Content-Type': 'application/json'
}
# Adds options like connect_retries
_SESSION = keystone.get_adapter('json_rpc', session=session,
additional_headers=headers)
return _SESSION
class Client(object):
"""JSON RPC client with ironic exception handling."""
allowed_exception_namespaces = [
"ironic.common.exception.",
"ironic_inspector.utils.",
]
def __init__(self, serializer, version_cap=None):
self.serializer = serializer
self.version_cap = version_cap
def can_send_version(self, version):
return _can_send_version(version, self.version_cap)
def prepare(self, topic, version=None):
"""Prepare the client to transmit a request.
:param topic: Topic which is being addressed. Typically this
is the hostname of the remote json-rpc service.
:param version: The RPC API version to utilize.
"""
host = topic.split('.', 1)[1]
host, port = netutils.parse_host_port(host)
return _CallContext(
host, self.serializer, version=version,
version_cap=self.version_cap,
allowed_exception_namespaces=self.allowed_exception_namespaces,
port=port)
class _CallContext(object):
"""Wrapper object for compatibility with oslo.messaging API."""
def __init__(self, host, serializer, version=None, version_cap=None,
allowed_exception_namespaces=(), port=None):
if not port:
self.port = CONF.json_rpc.port
else:
self.port = int(port)
self.host = host
self.serializer = serializer
self.version = version
self.version_cap = version_cap
self.allowed_exception_namespaces = allowed_exception_namespaces
def _is_known_exception(self, class_name):
for ns in self.allowed_exception_namespaces:
if class_name.startswith(ns):
return True
return False
def _handle_error(self, error):
if not error:
return
message = error['message']
try:
cls = error['data']['class']
except KeyError:
LOG.error("Unexpected error from RPC: %s", error)
raise exception.IronicException(
_("Unexpected error raised by RPC"))
else:
if not self._is_known_exception(cls):
# NOTE(dtantsur): protect against arbitrary code execution
LOG.error("Unexpected error from RPC: %s", error)
raise exception.IronicException(
_("Unexpected error raised by RPC"))
raise importutils.import_object(cls, message,
code=error.get('code', 500))
def call(self, context, method, version=None, **kwargs):
"""Call conductor RPC.
Versioned objects are automatically serialized and deserialized.
:param context: Security context.
:param method: Method name.
:param version: RPC API version to use.
:param kwargs: Keyword arguments to pass.
:return: RPC result (if any).
"""
return self._request(context, method, cast=False, version=version,
**kwargs)
def cast(self, context, method, version=None, **kwargs):
"""Call conductor RPC asynchronously.
Versioned objects are automatically serialized and deserialized.
:param context: Security context.
:param method: Method name.
:param version: RPC API version to use.
:param kwargs: Keyword arguments to pass.
:return: None
"""
return self._request(context, method, cast=True, version=version,
**kwargs)
def _request(self, context, method, cast=False, version=None, **kwargs):
"""Call conductor RPC.
Versioned objects are automatically serialized and deserialized.
:param context: Security context.
:param method: Method name.
:param cast: If true, use a JSON RPC notification.
:param version: RPC API version to use.
:param kwargs: Keyword arguments to pass.
:return: RPC result (if any).
"""
params = {key: self.serializer.serialize_entity(context, value)
for key, value in kwargs.items()}
params['context'] = context.to_dict()
if version is None:
version = self.version
if version is not None:
_check_version(version, self.version_cap)
params['rpc.version'] = version
body = {
"jsonrpc": "2.0",
"method": method,
"params": params,
}
if not cast:
body['id'] = (getattr(context, 'request_id', None)
or uuidutils.generate_uuid())
scheme = 'http'
if CONF.json_rpc.use_ssl:
scheme = 'https'
url = '%s://%s:%d' % (scheme,
netutils.escape_ipv6(self.host),
self.port)
LOG.debug("RPC %s to %s with %s", method, url,
strutils.mask_dict_password(body))
try:
result = _get_session().post(url, json=body)
except Exception as exc:
LOG.debug('RPC %s to %s failed with %s', method, url, exc)
raise
LOG.debug('RPC %s to %s returned %s', method, url,
strutils.mask_password(result.text or '<None>'))
if not cast:
result = result.json()
self._handle_error(result.get('error'))
result = self.serializer.deserialize_entity(context,
result['result'])
return result
def _can_send_version(requested, version_cap):
if requested is None or version_cap is None:
return True
requested_parts = [int(item) for item in requested.split('.', 1)]
version_cap_parts = [int(item) for item in version_cap.split('.', 1)]
if requested_parts[0] != version_cap_parts[0]:
return False # major version mismatch
else:
return requested_parts[1] <= version_cap_parts[1]
def _check_version(requested, version_cap):
if not _can_send_version(requested, version_cap):
raise RuntimeError(_("Cannot send RPC request: requested version "
"%(requested)s, maximum allowed version is "
"%(version_cap)s") % {'requested': requested,
'version_cap': version_cap})

View File

@ -0,0 +1,281 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Implementation of JSON RPC for communication between API and conductors.
This module implementa a subset of JSON RPC 2.0 as defined in
https://www.jsonrpc.org/specification. Main differences:
* No support for batched requests.
* No support for positional arguments passing.
* No JSON RPC 1.0 fallback.
"""
import json
import logging
from keystonemiddleware import auth_token
from oslo_config import cfg
import oslo_messaging
from oslo_utils import strutils
import webob
from ironic.common import auth_basic
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common.json_rpc import wsgi
from ironic.conf import json_rpc
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
_DENY_LIST = {'init_host', 'del_host', 'target', 'iter_nodes'}
def _build_method_map(manager):
"""Build mapping from method names to their bodies.
:param manager: A conductor manager.
:return: dict with mapping
"""
result = {}
for method in dir(manager):
if method.startswith('_') or method in _DENY_LIST:
continue
func = getattr(manager, method)
if not callable(func):
continue
LOG.debug('Adding RPC method %s', method)
result[method] = func
return result
class JsonRpcError(exception.IronicException):
pass
class ParseError(JsonRpcError):
code = -32700
_msg_fmt = _("Invalid JSON received by RPC server")
class InvalidRequest(JsonRpcError):
code = -32600
_msg_fmt = _("Invalid request object received by RPC server")
class MethodNotFound(JsonRpcError):
code = -32601
_msg_fmt = _("Method %(name)s was not found")
class InvalidParams(JsonRpcError):
code = -32602
_msg_fmt = _("Params %(params)s are invalid for %(method)s: %(error)s")
class EmptyContext:
request_id = None
def __init__(self, src):
self.__dict__.update(src)
def to_dict(self):
return self.__dict__.copy()
class WSGIService(wsgi.WSGIService):
"""Provides ability to launch JSON RPC as a WSGI application."""
def __init__(self, manager, serializer, context_class=EmptyContext):
"""Create a JSON RPC service.
:param manager: Object from which to expose methods.
:param serializer: A serializer that supports calls serialize_entity
and deserialize_entity.
:param context_class: A context class - a callable accepting a dict
received from network.
"""
self.manager = manager
self.serializer = serializer
self.context_class = context_class
self._method_map = _build_method_map(manager)
auth_strategy = json_rpc.auth_strategy()
if auth_strategy == 'keystone':
conf = dict(CONF.keystone_authtoken)
app = auth_token.AuthProtocol(self._application, conf)
elif auth_strategy == 'http_basic':
app = auth_basic.BasicAuthMiddleware(
self._application,
cfg.CONF.json_rpc.http_basic_auth_user_file)
else:
app = self._application
super().__init__('ironic-json-rpc', app, CONF.json_rpc)
def _application(self, environment, start_response):
"""WSGI application for conductor JSON RPC."""
request = webob.Request(environment)
if request.method != 'POST':
body = {'error': {'code': 405,
'message': _('Only POST method can be used')}}
return webob.Response(status_code=405, json_body=body)(
environment, start_response)
if json_rpc.auth_strategy() == 'keystone':
roles = (request.headers.get('X-Roles') or '').split(',')
allowed_roles = CONF.json_rpc.allowed_roles
if set(roles).isdisjoint(allowed_roles):
LOG.debug('Roles %s do not contain any of %s, rejecting '
'request', roles, allowed_roles)
body = {'error': {'code': 403, 'message': _('Forbidden')}}
return webob.Response(status_code=403, json_body=body)(
environment, start_response)
result = self._call(request)
if result is not None:
response = webob.Response(content_type='application/json',
charset='UTF-8',
json_body=result)
else:
response = webob.Response(status_code=204)
return response(environment, start_response)
def _handle_error(self, exc, request_id=None):
"""Generate a JSON RPC 2.0 error body.
:param exc: Exception object.
:param request_id: ID of the request (if any).
:return: dict with response body
"""
if isinstance(exc, oslo_messaging.ExpectedException):
exc = exc.exc_info[1]
expected = isinstance(exc, exception.IronicException)
cls = exc.__class__
if expected:
LOG.debug('RPC error %s: %s', cls.__name__, exc)
else:
LOG.exception('Unexpected RPC exception %s', cls.__name__)
response = {
"jsonrpc": "2.0",
"id": request_id,
"error": {
"code": getattr(exc, 'code', 500),
"message": str(exc),
}
}
if expected and not isinstance(exc, JsonRpcError):
# Allow de-serializing the correct class for expected errors.
response['error']['data'] = {
'class': '%s.%s' % (cls.__module__, cls.__name__)
}
return response
def _call(self, request):
"""Process a JSON RPC request.
:param request: ``webob.Request`` object.
:return: dict with response body.
"""
request_id = None
try:
try:
body = json.loads(request.text)
except ValueError:
LOG.error('Cannot parse JSON RPC request as JSON')
raise ParseError()
if not isinstance(body, dict):
LOG.error('JSON RPC request %s is not an object (batched '
'requests are not supported)', body)
raise InvalidRequest()
request_id = body.get('id')
params = body.get('params', {})
if (body.get('jsonrpc') != '2.0'
or not body.get('method')
or not isinstance(params, dict)):
LOG.error('JSON RPC request %s is invalid', body)
raise InvalidRequest()
except Exception as exc:
# We do not treat malformed requests as notifications and return
# a response even when request_id is None. This seems in agreement
# with the examples in the specification.
return self._handle_error(exc, request_id)
try:
method = body['method']
try:
func = self._method_map[method]
except KeyError:
raise MethodNotFound(name=method)
result = self._handle_requests(func, method, params)
if request_id is not None:
return {
"jsonrpc": "2.0",
"result": result,
"id": request_id
}
except Exception as exc:
result = self._handle_error(exc, request_id)
# We treat correctly formed requests without "id" as notifications
# and do not return any errors.
if request_id is not None:
return result
def _handle_requests(self, func, name, params):
"""Convert arguments and call a method.
:param func: Callable object.
:param name: RPC call name for logging.
:param params: Keyword arguments.
:return: call result as JSON.
"""
# TODO(dtantsur): server-side version check?
params.pop('rpc.version', None)
logged_params = strutils.mask_dict_password(params)
try:
context = params.pop('context')
except KeyError:
context = None
else:
# A valid context is required for deserialization
if not isinstance(context, dict):
raise InvalidParams(
_("Context must be a dictionary, if provided"))
context = self.context_class(context)
params = {key: self.serializer.deserialize_entity(context, value)
for key, value in params.items()}
params['context'] = context
LOG.debug('RPC %s with %s', name, logged_params)
try:
result = func(**params)
# FIXME(dtantsur): we could use the inspect module, but
# oslo_messaging.expected_exceptions messes up signatures.
except TypeError as exc:
raise InvalidParams(params=', '.join(params),
method=name, error=exc)
if context is not None:
# Currently it seems that we can serialize even with invalid
# context, but I'm not sure it's guaranteed to be the case.
result = self.serializer.serialize_entity(context, result)
LOG.debug('RPC %s returned %s', name,
strutils.mask_dict_password(result)
if isinstance(result, dict) else result)
return result

View File

@ -0,0 +1,77 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import socket
from oslo_config import cfg
from oslo_service import service
from oslo_service import wsgi
from ironic.common import utils
CONF = cfg.CONF
class WSGIService(service.ServiceBase):
def __init__(self, name, app, conf):
"""Initialize, but do not start the WSGI server.
:param name: The name of the WSGI server given to the loader.
:param app: WSGI application to run.
:param conf: Object to load configuration from.
:returns: None
"""
self.name = name
self._conf = conf
if conf.unix_socket:
utils.unlink_without_raise(conf.unix_socket)
self.server = wsgi.Server(CONF, name, app,
socket_family=socket.AF_UNIX,
socket_file=conf.unix_socket,
socket_mode=conf.unix_socket_mode,
use_ssl=conf.use_ssl)
else:
self.server = wsgi.Server(CONF, name, app,
host=conf.host_ip,
port=conf.port,
use_ssl=conf.use_ssl)
def start(self):
"""Start serving this service using loaded configuration.
:returns: None
"""
self.server.start()
def stop(self):
"""Stop serving this API.
:returns: None
"""
self.server.stop()
if self._conf.unix_socket:
utils.unlink_without_raise(self._conf.unix_socket)
def wait(self):
"""Wait for the service to stop serving this API.
:returns: None
"""
self.server.wait()
def reset(self):
"""Reset server greenpool size to default.
:returns: None
"""
self.server.reset()

View File

@ -14,20 +14,26 @@
"""Central place for handling Keystone authorization and service lookup."""
import copy
import functools
from keystoneauth1 import exceptions as ks_exception
from keystoneauth1 import loading as ks_loading
from keystoneauth1 import service_token
from keystoneauth1 import token_endpoint
import os_service_types
from oslo_config import cfg
from oslo_log import log as logging
from ironic.common import exception
from ironic.conf import CONF
LOG = logging.getLogger(__name__)
DEFAULT_VALID_INTERFACES = ['internal', 'public']
CONF = cfg.CONF
def ks_exceptions(f):
"""Wraps keystoneclient functions and centralizes exception handling."""
@ -157,3 +163,59 @@ def get_service_auth(context, endpoint, service_auth,
user_auth = service_auth
return service_token.ServiceTokenAuthWrapper(user_auth=user_auth,
service_auth=service_auth)
def register_auth_opts(conf, group, service_type=None):
"""Register session- and auth-related options
Registers only basic auth options shared by all auth plugins.
The rest are registered at runtime depending on auth plugin used.
"""
ks_loading.register_session_conf_options(conf, group)
ks_loading.register_auth_conf_options(conf, group)
CONF.set_default('auth_type', default='password', group=group)
ks_loading.register_adapter_conf_options(conf, group)
conf.set_default('valid_interfaces', DEFAULT_VALID_INTERFACES, group=group)
if service_type:
conf.set_default('service_type', service_type, group=group)
else:
types = os_service_types.get_service_types()
key = 'ironic-inspector' if group == 'inspector' else group
service_types = types.service_types_by_project.get(key)
if service_types:
conf.set_default('service_type', service_types[0], group=group)
def add_auth_opts(options, service_type=None):
"""Add auth options to sample config
As these are dynamically registered at runtime,
this adds options for most used auth_plugins
when generating sample config.
"""
def add_options(opts, opts_to_add):
for new_opt in opts_to_add:
for opt in opts:
if opt.name == new_opt.name:
break
else:
opts.append(new_opt)
opts = copy.deepcopy(options)
opts.insert(0, ks_loading.get_auth_common_conf_options()[0])
# NOTE(dims): There are a lot of auth plugins, we just generate
# the config options for a few common ones
plugins = ['password', 'v2password', 'v3password']
for name in plugins:
plugin = ks_loading.get_plugin_loader(name)
add_options(opts, ks_loading.get_auth_plugin_conf_options(plugin))
add_options(opts, ks_loading.get_session_conf_options())
if service_type:
adapter_opts = ks_loading.get_adapter_conf_options(
include_deprecated=False)
# adding defaults for valid interfaces
cfg.set_defaults(adapter_opts, service_type=service_type,
valid_interfaces=DEFAULT_VALID_INTERFACES)
add_options(opts, adapter_opts)
opts.sort(key=lambda x: x.name)
return opts

View File

@ -17,12 +17,12 @@ import io
import os
import tempfile
from ironic_lib import utils as ironic_utils
from oslo_log import log as logging
import pycdlib
import requests
from ironic.common import exception
from ironic.common import utils
from ironic.conf import CONF
LOG = logging.getLogger(__name__)
@ -153,7 +153,7 @@ def prepare_config_drive(task,
if not config_drive:
return ks_config_drive
if ironic_utils.is_http_url(config_drive):
if utils.is_http_url(config_drive):
config_drive = _fetch_config_drive_from_url(config_drive)
if not isinstance(config_drive, dict):

185
ironic/common/mdns.py Normal file
View File

@ -0,0 +1,185 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Multicast DNS implementation for API discovery.
This implementation follows RFC 6763 as clarified by the API SIG guideline
https://review.opendev.org/651222.
"""
import collections
import logging
import socket
import time
from urllib import parse as urlparse
import zeroconf
from ironic.common import exception
from ironic.common.i18n import _
from ironic.conf import CONF
LOG = logging.getLogger(__name__)
_MDNS_DOMAIN = '_openstack._tcp.local.'
_endpoint = collections.namedtuple('Endpoint',
['addresses', 'hostname', 'port', 'params'])
class Zeroconf(object):
"""Multicast DNS implementation client and server.
Uses threading internally, so there is no start method. It starts
automatically on creation.
.. warning::
The underlying library does not yet support IPv6.
"""
def __init__(self):
"""Initialize and start the mDNS server."""
interfaces = (CONF.mdns.interfaces if CONF.mdns.interfaces
else zeroconf.InterfaceChoice.All)
# If interfaces are set, let zeroconf auto-detect the version
ip_version = None if CONF.mdns.interfaces else zeroconf.IPVersion.All
self._zc = zeroconf.Zeroconf(interfaces=interfaces,
ip_version=ip_version)
self._registered = []
def register_service(self, service_type, endpoint, params=None):
"""Register a service.
This call announces the new services via multicast and instructs the
built-in server to respond to queries about it.
:param service_type: OpenStack service type, e.g. "baremetal".
:param endpoint: full endpoint to reach the service.
:param params: optional properties as a dictionary.
:raises: :exc:`.ServiceRegistrationFailure` if the service cannot be
registered, e.g. because of conflicts.
"""
parsed = _parse_endpoint(endpoint, service_type)
all_params = CONF.mdns.params.copy()
if params:
all_params.update(params)
all_params.update(parsed.params)
properties = {
(key.encode('utf-8') if isinstance(key, str) else key):
(value.encode('utf-8') if isinstance(value, str) else value)
for key, value in all_params.items()
}
# TODO(dtantsur): allow overriding TTL values via configuration
info = zeroconf.ServiceInfo(_MDNS_DOMAIN,
'%s.%s' % (service_type, _MDNS_DOMAIN),
addresses=parsed.addresses,
port=parsed.port,
properties=properties,
server=parsed.hostname)
LOG.debug('Registering %s via mDNS', info)
# Work around a potential race condition in the registration code:
# https://github.com/jstasiak/python-zeroconf/issues/163
delay = 0.1
try:
for attempt in range(CONF.mdns.registration_attempts):
try:
self._zc.register_service(info)
except zeroconf.NonUniqueNameException:
LOG.debug('Could not register %s - conflict', info)
if attempt == CONF.mdns.registration_attempts - 1:
raise
# reset the cache to purge learned records and retry
self._zc.cache = zeroconf.DNSCache()
time.sleep(delay)
delay *= 2
else:
break
except zeroconf.Error as exc:
raise exception.ServiceRegistrationFailure(
service=service_type, error=exc)
self._registered.append(info)
def close(self):
"""Shut down mDNS and unregister services.
.. note::
If another server is running for the same services, it will
re-register them immediately.
"""
for info in self._registered:
try:
self._zc.unregister_service(info)
except Exception:
LOG.exception('Cound not unregister mDNS service %s', info)
self._zc.close()
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
def _parse_endpoint(endpoint, service_type=None):
params = {}
url = urlparse.urlparse(endpoint)
port = url.port
if port is None:
if url.scheme == 'https':
port = 443
else:
port = 80
addresses = []
hostname = url.hostname
try:
infos = socket.getaddrinfo(hostname, port, 0, socket.IPPROTO_TCP)
except socket.error as exc:
raise exception.ServiceRegistrationFailure(
service=service_type,
error=_('Could not resolve hostname %(host)s: %(exc)s') %
{'host': hostname, 'exc': exc})
for info in infos:
ip = info[4][0]
if ip == hostname:
# we need a host name for the service record. if what we have in
# the catalog is an IP address, use the local hostname instead
hostname = None
# zeroconf requires addresses in network format
ip = socket.inet_pton(info[0], ip)
if ip not in addresses:
addresses.append(ip)
if not addresses:
raise exception.ServiceRegistrationFailure(
service=service_type,
error=_('No suitable addresses found for %s') % url.hostname)
# avoid storing information that can be derived from existing data
if url.path not in ('', '/'):
params['path'] = url.path
if (not (port == 80 and url.scheme == 'http')
and not (port == 443 and url.scheme == 'https')):
params['protocol'] = url.scheme
# zeroconf is pretty picky about having the trailing dot
if hostname is not None and not hostname.endswith('.'):
hostname += '.'
return _endpoint(addresses, hostname, port, params)

307
ironic/common/metrics.py Normal file
View File

@ -0,0 +1,307 @@
# Copyright 2016 Rackspace Hosting
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import functools
import random
import time
from ironic.common import exception
from ironic.common.i18n import _
class Timer(object):
"""A timer decorator and context manager.
This metric type times the decorated method or code running inside the
context manager, and emits the time as the metric value. It is bound to
this MetricLogger. For example::
from ironic.common import metrics_utils
METRICS = metrics_utils.get_metrics_logger()
@METRICS.timer('foo')
def foo(bar, baz):
print bar, baz
with METRICS.timer('foo'):
do_something()
"""
def __init__(self, metrics, name):
"""Init the decorator / context manager.
:param metrics: The metric logger
:param name: The metric name
"""
if not isinstance(name, str):
raise TypeError(_("The metric name is expected to be a string. "
"Value is %s") % name)
self.metrics = metrics
self.name = name
self._start = None
def __call__(self, f):
@functools.wraps(f)
def wrapped(*args, **kwargs):
start = _time()
result = f(*args, **kwargs)
duration = _time() - start
# Log the timing data (in ms)
self.metrics.send_timer(self.metrics.get_metric_name(self.name),
duration * 1000)
return result
return wrapped
def __enter__(self):
self._start = _time()
def __exit__(self, exc_type, exc_val, exc_tb):
duration = _time() - self._start
# Log the timing data (in ms)
self.metrics.send_timer(self.metrics.get_metric_name(self.name),
duration * 1000)
class Counter(object):
"""A counter decorator and context manager.
This metric type increments a counter every time the decorated method or
context manager is executed. It is bound to this MetricLogger. For
example::
from ironic.common import metrics_utils
METRICS = metrics_utils.get_metrics_logger()
@METRICS.counter('foo')
def foo(bar, baz):
print bar, baz
with METRICS.counter('foo'):
do_something()
"""
def __init__(self, metrics, name, sample_rate):
"""Init the decorator / context manager.
:param metrics: The metric logger
:param name: The metric name
:param sample_rate: Probabilistic rate at which the values will be sent
"""
if not isinstance(name, str):
raise TypeError(_("The metric name is expected to be a string. "
"Value is %s") % name)
if (sample_rate is not None
and (sample_rate < 0.0 or sample_rate > 1.0)):
msg = _("sample_rate is set to %s. Value must be None "
"or in the interval [0.0, 1.0]") % sample_rate
raise ValueError(msg)
self.metrics = metrics
self.name = name
self.sample_rate = sample_rate
def __call__(self, f):
@functools.wraps(f)
def wrapped(*args, **kwargs):
self.metrics.send_counter(
self.metrics.get_metric_name(self.name),
1, sample_rate=self.sample_rate)
result = f(*args, **kwargs)
return result
return wrapped
def __enter__(self):
self.metrics.send_counter(self.metrics.get_metric_name(self.name),
1, sample_rate=self.sample_rate)
def __exit__(self, exc_type, exc_val, exc_tb):
pass
class Gauge(object):
"""A gauge decorator.
This metric type returns the value of the decorated method as a metric
every time the method is executed. It is bound to this MetricLogger. For
example::
from ironic.common import metrics_utils
METRICS = metrics_utils.get_metrics_logger()
@METRICS.gauge('foo')
def add_foo(bar, baz):
return (bar + baz)
"""
def __init__(self, metrics, name):
"""Init the decorator / context manager.
:param metrics: The metric logger
:param name: The metric name
"""
if not isinstance(name, str):
raise TypeError(_("The metric name is expected to be a string. "
"Value is %s") % name)
self.metrics = metrics
self.name = name
def __call__(self, f):
@functools.wraps(f)
def wrapped(*args, **kwargs):
result = f(*args, **kwargs)
self.metrics.send_gauge(self.metrics.get_metric_name(self.name),
result)
return result
return wrapped
def _time():
"""Wraps time.time() for simpler testing."""
return time.time()
class MetricLogger(object, metaclass=abc.ABCMeta):
"""Abstract class representing a metrics logger.
A MetricLogger sends data to a backend (noop or statsd).
The data can be a gauge, a counter, or a timer.
The data sent to the backend is composed of:
- a full metric name
- a numeric value
The format of the full metric name is:
_prefix<delim>name
where:
- _prefix: [global_prefix<delim>][uuid<delim>][host_name<delim>]prefix
- name: the name of this metric
- <delim>: the delimiter. Default is '.'
"""
def __init__(self, prefix='', delimiter='.'):
"""Init a MetricLogger.
:param prefix: Prefix for this metric logger. This string will prefix
all metric names.
:param delimiter: Delimiter used to generate the full metric name.
"""
self._prefix = prefix
self._delimiter = delimiter
def get_metric_name(self, name):
"""Get the full metric name.
The format of the full metric name is:
_prefix<delim>name
where:
- _prefix: [global_prefix<delim>][uuid<delim>][host_name<delim>]
prefix
- name: the name of this metric
- <delim>: the delimiter. Default is '.'
:param name: The metric name.
:return: The full metric name, with logger prefix, as a string.
"""
if not self._prefix:
return name
return self._delimiter.join([self._prefix, name])
def send_gauge(self, name, value):
"""Send gauge metric data.
Gauges are simple values.
The backend will set the value of gauge 'name' to 'value'.
:param name: Metric name
:param value: Metric numeric value that will be sent to the backend
"""
self._gauge(name, value)
def send_counter(self, name, value, sample_rate=None):
"""Send counter metric data.
Counters are used to count how many times an event occurred.
The backend will increment the counter 'name' by the value 'value'.
Optionally, specify sample_rate in the interval [0.0, 1.0] to
sample data probabilistically where::
P(send metric data) = sample_rate
If sample_rate is None, then always send metric data, but do not
have the backend send sample rate information (if supported).
:param name: Metric name
:param value: Metric numeric value that will be sent to the backend
:param sample_rate: Probabilistic rate at which the values will be
sent. Value must be None or in the interval [0.0, 1.0].
"""
if (sample_rate is None or random.random() < sample_rate):
return self._counter(name, value,
sample_rate=sample_rate)
def send_timer(self, name, value):
"""Send timer data.
Timers are used to measure how long it took to do something.
:param m_name: Metric name
:param m_value: Metric numeric value that will be sent to the backend
"""
self._timer(name, value)
def timer(self, name):
return Timer(self, name)
def counter(self, name, sample_rate=None):
return Counter(self, name, sample_rate)
def gauge(self, name):
return Gauge(self, name)
@abc.abstractmethod
def _gauge(self, name, value):
"""Abstract method for backends to implement gauge behavior."""
@abc.abstractmethod
def _counter(self, name, value, sample_rate=None):
"""Abstract method for backends to implement counter behavior."""
@abc.abstractmethod
def _timer(self, name, value):
"""Abstract method for backends to implement timer behavior."""
def get_metrics_data(self):
"""Return the metrics collection, if available."""
raise exception.MetricsNotSupported()
class NoopMetricLogger(MetricLogger):
"""Noop metric logger that throws away all metric data."""
def _gauge(self, name, value):
pass
def _counter(self, name, value, sample_rate=None):
pass
def _timer(self, m_name, value):
pass

View File

@ -0,0 +1,120 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_concurrency import lockutils
from oslo_config import cfg
from ironic.common import metrics
CONF = cfg.CONF
STATISTIC_DATA = {}
class DictCollectionMetricLogger(metrics.MetricLogger):
"""Metric logger that collects internal counters."""
# These are internal typing labels in Ironic-lib.
GAUGE_TYPE = 'g'
COUNTER_TYPE = 'c'
TIMER_TYPE = 'ms'
def __init__(self, prefix, delimiter='.'):
"""Initialize the Collection Metrics Logger.
The logger stores metrics data in a dictionary which can then be
retrieved by the program utilizing it whenever needed utilizing a
get_metrics_data call to return the metrics data structure.
:param prefix: Prefix for this metric logger.
:param delimiter: Delimiter used to generate the full metric name.
"""
super(DictCollectionMetricLogger, self).__init__(
prefix, delimiter=delimiter)
@lockutils.synchronized('statistics-update')
def _send(self, name, value, metric_type, sample_rate=None):
"""Send the metrics to be stored in memory.
This memory updates the internal dictionary to facilitate
collection of statistics, and the retrieval of them for
consumers or plugins in Ironic to retrieve the statistic
data utilizing the `get_metrics_data` method.
:param name: Metric name
:param value: Metric value
:param metric_type: Metric type (GAUGE_TYPE, COUNTER_TYPE),
TIMER_TYPE is not supported.
:param sample_rate: Not Applicable.
"""
global STATISTIC_DATA
if metric_type == self.TIMER_TYPE:
if name in STATISTIC_DATA:
STATISTIC_DATA[name] = {
'count': STATISTIC_DATA[name]['count'] + 1,
'sum': STATISTIC_DATA[name]['sum'] + value,
'type': 'timer'
}
else:
# Set initial data value.
STATISTIC_DATA[name] = {
'count': 1,
'sum': value,
'type': 'timer'
}
elif metric_type == self.GAUGE_TYPE:
STATISTIC_DATA[name] = {
'value': value,
'type': 'gauge'
}
elif metric_type == self.COUNTER_TYPE:
if name in STATISTIC_DATA:
# NOTE(TheJulia): Value is hard coded for counter
# data types as a value of 1.
STATISTIC_DATA[name] = {
'count': STATISTIC_DATA[name]['count'] + 1,
'type': 'counter'
}
else:
STATISTIC_DATA[name] = {
'count': 1,
'type': 'counter'
}
def _gauge(self, name, value):
return self._send(name, value, self.GAUGE_TYPE)
def _counter(self, name, value, sample_rate=None):
return self._send(name, value, self.COUNTER_TYPE,
sample_rate=sample_rate)
def _timer(self, name, value):
return self._send(name, value, self.TIMER_TYPE)
def get_metrics_data(self):
"""Return the metrics collection dictionary.
:returns: Dictionary containing the keys and values of
data stored via the metrics collection hooks.
The values themselves are dictionaries which
contain a type field, indicating if the statistic
is a counter, gauge, or timer. A counter has a
`count` field, a gauge value has a `value` field,
and a 'timer' fiend las a 'count' and 'sum' fields.
The multiple fields for for a timer type allows
for additional statistics to be implied from the
data once collected and compared over time.
"""
return STATISTIC_DATA

View File

@ -0,0 +1,92 @@
# Copyright 2016 Rackspace Hosting
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import logging
import socket
from oslo_config import cfg
from ironic.common import metrics
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
class StatsdMetricLogger(metrics.MetricLogger):
"""Metric logger that reports data via the statsd protocol."""
GAUGE_TYPE = 'g'
COUNTER_TYPE = 'c'
TIMER_TYPE = 'ms'
def __init__(self, prefix, delimiter='.', host=None, port=None):
"""Initialize a StatsdMetricLogger
The logger uses the given prefix list, delimiter, host, and port.
:param prefix: Prefix for this metric logger.
:param delimiter: Delimiter used to generate the full metric name.
:param host: The statsd host
:param port: The statsd port
"""
super(StatsdMetricLogger, self).__init__(prefix,
delimiter=delimiter)
self._host = host or CONF.metrics_statsd.statsd_host
self._port = port or CONF.metrics_statsd.statsd_port
self._target = (self._host, self._port)
def _send(self, name, value, metric_type, sample_rate=None):
"""Send metrics to the statsd backend
:param name: Metric name
:param value: Metric value
:param metric_type: Metric type (GAUGE_TYPE, COUNTER_TYPE,
or TIMER_TYPE)
:param sample_rate: Probabilistic rate at which the values will be sent
"""
if sample_rate is None:
metric = '%s:%s|%s' % (name, value, metric_type)
else:
metric = '%s:%s|%s@%s' % (name, value, metric_type, sample_rate)
# Ideally, we'd cache a sending socket in self, but that
# results in a socket getting shared by multiple green threads.
with contextlib.closing(self._open_socket()) as sock:
try:
sock.settimeout(0.0)
sock.sendto(metric.encode(), self._target)
except socket.error as e:
LOG.warning("Failed to send the metric value to host "
"%(host)s, port %(port)s. Error: %(error)s",
{'host': self._host, 'port': self._port,
'error': e})
def _open_socket(self):
return socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
def _gauge(self, name, value):
return self._send(name, value, self.GAUGE_TYPE)
def _counter(self, name, value, sample_rate=None):
return self._send(name, value, self.COUNTER_TYPE,
sample_rate=sample_rate)
def _timer(self, name, value):
return self._send(name, value, self.TIMER_TYPE)

View File

@ -0,0 +1,76 @@
# Copyright 2016 Rackspace Hosting
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics
from ironic.common import metrics_collector
from ironic.common import metrics_statsd
CONF = cfg.CONF
def get_metrics_logger(prefix='', backend=None, host=None, delimiter='.'):
"""Return a metric logger with the specified prefix.
The format of the prefix is:
[global_prefix<delim>][host_name<delim>]prefix
where <delim> is the delimiter (default is '.')
:param prefix: Prefix for this metric logger.
Value should be a string or None.
:param backend: Backend to use for the metrics system.
Possible values are 'noop' and 'statsd'.
:param host: Name of this node.
:param delimiter: Delimiter to use for the metrics name.
:return: The new MetricLogger.
"""
if not isinstance(prefix, str):
msg = (_("This metric prefix (%s) is of unsupported type. "
"Value should be a string or None")
% str(prefix))
raise exception.InvalidMetricConfig(msg)
if CONF.metrics.prepend_host and host:
if CONF.metrics.prepend_host_reverse:
host = '.'.join(reversed(host.split('.')))
if prefix:
prefix = delimiter.join([host, prefix])
else:
prefix = host
if CONF.metrics.global_prefix:
if prefix:
prefix = delimiter.join([CONF.metrics.global_prefix, prefix])
else:
prefix = CONF.metrics.global_prefix
backend = backend or CONF.metrics.backend
if backend == 'statsd':
return metrics_statsd.StatsdMetricLogger(prefix, delimiter=delimiter)
elif backend == 'noop':
return metrics.NoopMetricLogger(prefix, delimiter=delimiter)
elif backend == 'collector':
return metrics_collector.DictCollectionMetricLogger(
prefix, delimiter=delimiter)
else:
msg = (_("The backend is set to an unsupported type: "
"%s. Value should be 'noop' or 'statsd'.")
% backend)
raise exception.InvalidMetricConfig(msg)

View File

@ -20,7 +20,6 @@ import shutil
import tempfile
from urllib import parse as urlparse
from ironic_lib import utils as ironic_utils
import jinja2
from oslo_concurrency import processutils
from oslo_log import log as logging
@ -108,7 +107,7 @@ def _link_mac_pxe_configs(task, ipxe_enabled=False):
"""
def create_link(mac_path):
ironic_utils.unlink_without_raise(mac_path)
utils.unlink_without_raise(mac_path)
relative_source_path = os.path.relpath(
pxe_config_file_path, os.path.dirname(mac_path))
utils.create_link_without_raise(relative_source_path, mac_path)
@ -159,7 +158,7 @@ def _link_ip_address_pxe_configs(task, ipxe_enabled=False):
ip_addrs = []
for port_ip_address in ip_addrs:
ip_address_path = _get_pxe_ip_address_path(port_ip_address)
ironic_utils.unlink_without_raise(ip_address_path)
utils.unlink_without_raise(ip_address_path)
relative_source_path = os.path.relpath(
pxe_config_file_path, os.path.dirname(ip_address_path))
utils.create_link_without_raise(relative_source_path,
@ -429,18 +428,19 @@ def clean_up_pxe_config(task, ipxe_enabled=False):
except exception.FailedToGetIPAddressOnPort:
continue
# Cleaning up config files created for grub2.
ironic_utils.unlink_without_raise(ip_address_path)
utils.unlink_without_raise(ip_address_path)
for port in task.ports:
client_id = port.extra.get('client-id')
# syslinux, ipxe, etc.
ironic_utils.unlink_without_raise(
_get_pxe_mac_path(port.address, client_id=client_id,
utils.unlink_without_raise(
_get_pxe_mac_path(port.address,
client_id=client_id,
ipxe_enabled=ipxe_enabled))
# Grub2 MAC address based configuration
for path in _get_pxe_grub_mac_path(port.address,
ipxe_enabled=ipxe_enabled):
ironic_utils.unlink_without_raise(path)
utils.unlink_without_raise(path)
utils.rmtree_without_raise(os.path.join(_get_root_dir(ipxe_enabled),
task.node.uuid))
@ -1348,7 +1348,7 @@ def clean_up_pxe_env(task, images_info, ipxe_enabled=False):
"""
for label in images_info:
path = images_info[label][1]
ironic_utils.unlink_without_raise(path)
utils.unlink_without_raise(path)
clean_up_pxe_config(task, ipxe_enabled=ipxe_enabled)
TFTPImageCache().clean_up()

View File

@ -17,7 +17,6 @@
import sys
import time
from ironic_lib.json_rpc import server as json_rpc
from oslo_config import cfg
from oslo_log import log
import oslo_messaging as messaging
@ -25,6 +24,7 @@ from oslo_service import service
from oslo_utils import importutils
from ironic.common import context
from ironic.common.json_rpc import server as json_rpc
from ironic.common import rpc
from ironic.objects import base as objects_base

View File

@ -20,6 +20,7 @@
from collections import abc
import contextlib
import copy
import datetime
import errno
import hashlib
@ -30,23 +31,39 @@ import shlex
import shutil
import tempfile
import time
from urllib import parse as urlparse
import warnings
import jinja2
from oslo_concurrency import processutils
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import excutils
from oslo_utils import fileutils
from oslo_utils import netutils
from oslo_utils import specs_matcher
from oslo_utils import strutils
from oslo_utils import timeutils
from oslo_utils import units
import psutil
from ironic.common import exception
from ironic.common.i18n import _
from ironic.conf import CONF
LOG = logging.getLogger(__name__)
# A dictionary in the form {hint name: hint type}
VALID_ROOT_DEVICE_HINTS = {
'size': int, 'model': str, 'wwn': str, 'serial': str, 'vendor': str,
'wwn_with_extension': str, 'wwn_vendor_extension': str, 'name': str,
'rotational': bool, 'hctl': str, 'by_path': str,
}
ROOT_DEVICE_HINTS_GRAMMAR = specs_matcher.make_grammar()
DATE_RE = r'(?P<year>-?\d{4,})-(?P<month>\d{2})-(?P<day>\d{2})'
TIME_RE = r'(?P<hour>\d{2}):(?P<min>\d{2}):(?P<sec>\d{2})' + \
r'(\.(?P<sec_frac>\d+))?'
@ -59,29 +76,60 @@ DATETIME_RE = re.compile(
USING_SQLITE = None
def execute(*cmd, **kwargs):
def execute(*cmd, use_standard_locale=False, log_stdout=True, **kwargs):
"""Convenience wrapper around oslo's execute() method.
:param cmd: Passed to processutils.execute.
:param use_standard_locale: True | False. Defaults to False. If set to
True, execute command with standard locale
added to environment variables.
:returns: (stdout, stderr) from process execution
:raises: UnknownArgumentError
:raises: ProcessExecutionError
"""
Executes and logs results from a system command. See docs for
oslo_concurrency.processutils.execute for usage.
use_standard_locale = kwargs.pop('use_standard_locale', False)
:param cmd: positional arguments to pass to processutils.execute()
:param use_standard_locale: Defaults to False. If set to True,
execute command with standard locale
added to environment variables.
:param log_stdout: Defaults to True. If set to True, logs the output.
:param kwargs: keyword arguments to pass to processutils.execute()
:returns: (stdout, stderr) from process execution
:raises: UnknownArgumentError on receiving unknown arguments
:raises: ProcessExecutionError
:raises: OSError
"""
if use_standard_locale:
env = kwargs.pop('env_variables', os.environ.copy())
env['LC_ALL'] = 'C'
kwargs['env_variables'] = env
result = processutils.execute(*cmd, **kwargs)
LOG.debug('Execution completed, command line is "%s"',
' '.join(map(str, cmd)))
LOG.debug('Command stdout is: "%s"', result[0])
LOG.debug('Command stderr is: "%s"', result[1])
return result
if kwargs.pop('run_as_root', False):
warnings.warn("run_as_root is deprecated and has no effect",
DeprecationWarning)
def _log(stdout, stderr):
if log_stdout:
try:
LOG.debug('Command stdout is: "%s"', stdout)
except UnicodeEncodeError:
LOG.debug('stdout contains invalid UTF-8 characters')
stdout = (stdout.encode('utf8', 'surrogateescape')
.decode('utf8', 'ignore'))
LOG.debug('Command stdout is: "%s"', stdout)
try:
LOG.debug('Command stderr is: "%s"', stderr)
except UnicodeEncodeError:
LOG.debug('stderr contains invalid UTF-8 characters')
stderr = (stderr.encode('utf8', 'surrogateescape')
.decode('utf8', 'ignore'))
LOG.debug('Command stderr is: "%s"', stderr)
try:
result = processutils.execute(*cmd, **kwargs)
except FileNotFoundError:
with excutils.save_and_reraise_exception():
LOG.debug('Command not found: "%s"', ' '.join(map(str, cmd)))
except processutils.ProcessExecutionError as exc:
with excutils.save_and_reraise_exception():
_log(exc.stdout, exc.stderr)
else:
_log(result[0], result[1])
return result
def is_valid_datapath_id(datapath_id):
@ -705,3 +753,397 @@ def is_ironic_using_sqlite():
# return the value.
USING_SQLITE = 'sqlite' in CONF.database.connection.lower()
return USING_SQLITE
def try_execute(*cmd, **kwargs):
"""The same as execute but returns None on error.
Executes and logs results from a system command. See docs for
oslo_concurrency.processutils.execute for usage.
Instead of raising an exception on failure, this method simply
returns None in case of failure.
:param cmd: positional arguments to pass to processutils.execute()
:param kwargs: keyword arguments to pass to processutils.execute()
:raises: UnknownArgumentError on receiving unknown arguments
:returns: tuple of (stdout, stderr) or None in some error cases
"""
try:
return execute(*cmd, **kwargs)
except (processutils.ProcessExecutionError, OSError) as e:
LOG.debug('Command failed: %s', e)
def mkfs(fs, path, label=None):
"""Format a file or block device
:param fs: Filesystem type (examples include 'swap', 'ext3', 'ext4'
'btrfs', etc.)
:param path: Path to file or block device to format
:param label: Volume label to use
"""
if fs == 'swap':
args = ['mkswap']
else:
args = ['mkfs', '-t', fs]
# add -F to force no interactive execute on non-block device.
if fs in ('ext3', 'ext4'):
args.extend(['-F'])
if label:
if fs in ('msdos', 'vfat'):
label_opt = '-n'
else:
label_opt = '-L'
args.extend([label_opt, label])
args.append(path)
try:
execute(*args, use_standard_locale=True)
except processutils.ProcessExecutionError as e:
with excutils.save_and_reraise_exception() as ctx:
if os.strerror(errno.ENOENT) in e.stderr:
ctx.reraise = False
LOG.exception('Failed to make file system. '
'File system %s is not supported.', fs)
raise exception.FileSystemNotSupported(fs=fs)
else:
LOG.exception('Failed to create a file system '
'in %(path)s. Error: %(error)s',
{'path': path, 'error': e})
def unlink_without_raise(path):
try:
os.unlink(path)
except OSError as e:
if e.errno == errno.ENOENT:
return
else:
LOG.warning("Failed to unlink %(path)s, error: %(e)s",
{'path': path, 'e': e})
def dd(src, dst, *args):
"""Execute dd from src to dst.
:param src: the input file for dd command.
:param dst: the output file for dd command.
:param args: a tuple containing the arguments to be
passed to dd command.
:raises: processutils.ProcessExecutionError if it failed
to run the process.
"""
LOG.debug("Starting dd process.")
execute('dd', 'if=%s' % src, 'of=%s' % dst, *args,
use_standard_locale=True)
def is_http_url(url):
url = url.lower()
return url.startswith('http://') or url.startswith('https://')
def _extract_hint_operator_and_values(hint_expression, hint_name):
"""Extract the operator and value(s) of a root device hint expression.
A root device hint expression could contain one or more values
depending on the operator. This method extracts the operator and
value(s) and returns a dictionary containing both.
:param hint_expression: The hint expression string containing value(s)
and operator (optionally).
:param hint_name: The name of the hint. Used for logging.
:raises: ValueError if the hint_expression is empty.
:returns: A dictionary containing:
:op: The operator. An empty string in case of None.
:values: A list of values stripped and converted to lowercase.
"""
expression = str(hint_expression).strip().lower()
if not expression:
raise ValueError(
_('Root device hint "%s" expression is empty') % hint_name)
# parseString() returns a list of tokens which the operator (if
# present) is always the first element.
ast = ROOT_DEVICE_HINTS_GRAMMAR.parseString(expression)
if len(ast) <= 1:
# hint_expression had no operator
return {'op': '', 'values': [expression]}
op = ast[0]
return {'values': [v.strip() for v in re.split(op, expression) if v],
'op': op}
def _normalize_hint_expression(hint_expression, hint_name):
"""Normalize a string type hint expression.
A string-type hint expression contains one or more operators and
one or more values: [<op>] <value> [<op> <value>]*. This normalizes
the values by url-encoding white spaces and special characters. The
operators are not normalized. For example: the hint value of "<or>
foo bar <or> bar" will become "<or> foo%20bar <or> bar".
:param hint_expression: The hint expression string containing value(s)
and operator (optionally).
:param hint_name: The name of the hint. Used for logging.
:raises: ValueError if the hint_expression is empty.
:returns: A normalized string.
"""
hdict = _extract_hint_operator_and_values(hint_expression, hint_name)
result = hdict['op'].join([' %s ' % urlparse.quote(t)
for t in hdict['values']])
return (hdict['op'] + result).strip()
def _append_operator_to_hints(root_device):
"""Add an equal (s== or ==) operator to the hints.
For backwards compatibility, for root device hints where no operator
means equal, this method adds the equal operator to the hint. This is
needed when using oslo.utils.specs_matcher methods.
:param root_device: The root device hints dictionary.
"""
for name, expression in root_device.items():
# NOTE(lucasagomes): The specs_matcher from oslo.utils does not
# support boolean, so we don't need to append any operator
# for it.
if VALID_ROOT_DEVICE_HINTS[name] is bool:
continue
expression = str(expression)
ast = ROOT_DEVICE_HINTS_GRAMMAR.parseString(expression)
if len(ast) > 1:
continue
op = 's== %s' if VALID_ROOT_DEVICE_HINTS[name] is str else '== %s'
root_device[name] = op % expression
return root_device
def parse_root_device_hints(root_device):
"""Parse the root_device property of a node.
Parses and validates the root_device property of a node. These are
hints for how a node's root device is created. The 'size' hint
should be a positive integer. The 'rotational' hint should be a
Boolean value.
:param root_device: the root_device dictionary from the node's property.
:returns: a dictionary with the root device hints parsed or
None if there are no hints.
:raises: ValueError, if some information is invalid.
"""
if not root_device:
return
root_device = copy.deepcopy(root_device)
invalid_hints = set(root_device) - set(VALID_ROOT_DEVICE_HINTS)
if invalid_hints:
raise ValueError(
_('The hints "%(invalid_hints)s" are invalid. '
'Valid hints are: "%(valid_hints)s"') %
{'invalid_hints': ', '.join(invalid_hints),
'valid_hints': ', '.join(VALID_ROOT_DEVICE_HINTS)})
for name, expression in root_device.items():
hint_type = VALID_ROOT_DEVICE_HINTS[name]
if hint_type is str:
if not isinstance(expression, str):
raise ValueError(
_('Root device hint "%(name)s" is not a string value. '
'Hint expression: %(expression)s') %
{'name': name, 'expression': expression})
root_device[name] = _normalize_hint_expression(expression, name)
elif hint_type is int:
for v in _extract_hint_operator_and_values(expression,
name)['values']:
try:
integer = int(v)
except ValueError:
raise ValueError(
_('Root device hint "%(name)s" is not an integer '
'value. Current value: %(expression)s') %
{'name': name, 'expression': expression})
if integer <= 0:
raise ValueError(
_('Root device hint "%(name)s" should be a positive '
'integer. Current value: %(expression)s') %
{'name': name, 'expression': expression})
elif hint_type is bool:
try:
root_device[name] = strutils.bool_from_string(
expression, strict=True)
except ValueError:
raise ValueError(
_('Root device hint "%(name)s" is not a Boolean value. '
'Current value: %(expression)s') %
{'name': name, 'expression': expression})
return _append_operator_to_hints(root_device)
def find_devices_by_hints(devices, root_device_hints):
"""Find all devices that match the root device hints.
Try to find devices that match the root device hints. In order
for a device to be matched it needs to satisfy all the given hints.
:param devices: A list of dictionaries representing the devices
containing one or more of the following keys:
:name: (String) The device name, e.g /dev/sda
:size: (Integer) Size of the device in *bytes*
:model: (String) Device model
:vendor: (String) Device vendor name
:serial: (String) Device serial number
:wwn: (String) Unique storage identifier
:wwn_with_extension: (String): Unique storage identifier with
the vendor extension appended
:wwn_vendor_extension: (String): United vendor storage identifier
:rotational: (Boolean) Whether it's a rotational device or
not. Useful to distinguish HDDs (rotational) and SSDs
(not rotational).
:hctl: (String): The SCSI address: Host, channel, target and lun.
For example: '1:0:0:0'.
:by_path: (String): The alternative device name,
e.g. /dev/disk/by-path/pci-0000:00
:param root_device_hints: A dictionary with the root device hints.
:raises: ValueError, if some information is invalid.
:returns: A generator with all matching devices as dictionaries.
"""
LOG.debug('Trying to find devices from "%(devs)s" that match the '
'device hints "%(hints)s"',
{'devs': ', '.join([d.get('name') for d in devices]),
'hints': root_device_hints})
parsed_hints = parse_root_device_hints(root_device_hints)
for dev in devices:
device_name = dev.get('name')
for hint in parsed_hints:
hint_type = VALID_ROOT_DEVICE_HINTS[hint]
device_value = dev.get(hint)
hint_value = parsed_hints[hint]
if hint_type is str:
try:
device_value = _normalize_hint_expression(device_value,
hint)
except ValueError:
LOG.warning(
'The attribute "%(attr)s" of the device "%(dev)s" '
'has an empty value. Skipping device.',
{'attr': hint, 'dev': device_name})
break
if hint == 'size':
# Since we don't support units yet we expect the size
# in GiB for now
device_value = device_value / units.Gi
LOG.debug('Trying to match the device hint "%(hint)s" '
'with a value of "%(hint_value)s" against the same '
'device\'s (%(dev)s) attribute with a value of '
'"%(dev_value)s"', {'hint': hint, 'dev': device_name,
'hint_value': hint_value,
'dev_value': device_value})
# NOTE(lucasagomes): Boolean hints are not supported by
# specs_matcher.match(), so we need to do the comparison
# ourselves
if hint_type is bool:
try:
device_value = strutils.bool_from_string(device_value,
strict=True)
except ValueError:
LOG.warning('The attribute "%(attr)s" (with value '
'"%(value)s") of device "%(dev)s" is not '
'a valid Boolean. Skipping device.',
{'attr': hint, 'value': device_value,
'dev': device_name})
break
if device_value == hint_value:
continue
elif specs_matcher.match(device_value, hint_value):
continue
LOG.debug('The attribute "%(attr)s" (with value "%(value)s") '
'of device "%(dev)s" does not match the hint %(hint)s',
{'attr': hint, 'value': device_value,
'dev': device_name, 'hint': hint_value})
break
else:
yield dev
def match_root_device_hints(devices, root_device_hints):
"""Try to find a device that matches the root device hints.
Try to find a device that matches the root device hints. In order
for a device to be matched it needs to satisfy all the given hints.
:param devices: A list of dictionaries representing the devices
containing one or more of the following keys:
:name: (String) The device name, e.g /dev/sda
:size: (Integer) Size of the device in *bytes*
:model: (String) Device model
:vendor: (String) Device vendor name
:serial: (String) Device serial number
:wwn: (String) Unique storage identifier
:wwn_with_extension: (String): Unique storage identifier with
the vendor extension appended
:wwn_vendor_extension: (String): United vendor storage identifier
:rotational: (Boolean) Whether it's a rotational device or
not. Useful to distinguish HDDs (rotational) and SSDs
(not rotational).
:hctl: (String): The SCSI address: Host, channel, target and lun.
For example: '1:0:0:0'.
:by_path: (String): The alternative device name,
e.g. /dev/disk/by-path/pci-0000:00
:param root_device_hints: A dictionary with the root device hints.
:raises: ValueError, if some information is invalid.
:returns: The first device to match all the hints or None.
"""
try:
dev = next(find_devices_by_hints(devices, root_device_hints))
except StopIteration:
LOG.warning('No device found that matches the root device hints %s',
root_device_hints)
else:
LOG.info('Root device found! The device "%s" matches the root '
'device hints %s', dev, root_device_hints)
return dev
def get_route_source(dest, ignore_link_local=True):
"""Get the IP address to send packages to destination."""
try:
out, _err = execute('ip', 'route', 'get', dest)
except (EnvironmentError, processutils.ProcessExecutionError) as e:
LOG.warning('Cannot get route to host %(dest)s: %(err)s',
{'dest': dest, 'err': e})
return
try:
source = out.strip().split('\n')[0].split('src')[1].split()[0]
if (ipaddress.ip_address(source).is_link_local
and ignore_link_local):
LOG.debug('Ignoring link-local source to %(dest)s: %(rec)s',
{'dest': dest, 'rec': out})
return
return source
except (IndexError, ValueError):
LOG.debug('No route to host %(dest)s, route record: %(rec)s',
{'dest': dest, 'rec': out})

View File

@ -12,7 +12,6 @@
import socket
from ironic_lib import utils as il_utils
from oslo_concurrency import processutils
from oslo_service import service
from oslo_service import wsgi
@ -20,6 +19,7 @@ from oslo_service import wsgi
from ironic.api import app
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import utils
from ironic.conf import CONF
@ -50,7 +50,7 @@ class WSGIService(service.ServiceBase):
"must be greater than 0.") % self.workers)
if CONF.api.unix_socket:
il_utils.unlink_without_raise(CONF.api.unix_socket)
utils.unlink_without_raise(CONF.api.unix_socket)
self.server = wsgi.Server(CONF, name, self.app,
socket_family=socket.AF_UNIX,
socket_file=CONF.api.unix_socket,
@ -76,7 +76,7 @@ class WSGIService(service.ServiceBase):
"""
self.server.stop()
if CONF.api.unix_socket:
il_utils.unlink_without_raise(CONF.api.unix_socket)
utils.unlink_without_raise(CONF.api.unix_socket)
def wait(self):
"""Wait for the service to stop serving this API.

View File

@ -14,7 +14,6 @@
import random
from ironic_lib import metrics_utils
from oslo_config import cfg
from oslo_log import log
from oslo_utils import excutils
@ -22,6 +21,7 @@ import tenacity
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic.common import states
from ironic.conductor import task_manager
from ironic import objects

View File

@ -20,7 +20,6 @@ import eventlet
import futurist
from futurist import periodics
from futurist import rejection
from ironic_lib import mdns
from oslo_db import exception as db_exception
from oslo_log import log
from oslo_utils import excutils
@ -32,6 +31,7 @@ from ironic.common import driver_factory
from ironic.common import exception
from ironic.common import hash_ring
from ironic.common.i18n import _
from ironic.common import mdns
from ironic.common import release_mappings as versions
from ironic.common import rpc
from ironic.common import states

View File

@ -14,7 +14,6 @@
import tempfile
from ironic_lib import metrics_utils
from oslo_db import exception as db_exception
from oslo_log import log
from oslo_utils import excutils
@ -24,6 +23,7 @@ from ironic.common import exception
from ironic.common.glance_service import service_utils as glance_utils
from ironic.common.i18n import _
from ironic.common import lessee_sources
from ironic.common import metrics_utils
from ironic.common import states
from ironic.common import swift
from ironic.conductor import notification_utils as notify_utils

View File

@ -45,7 +45,6 @@ import queue
import eventlet
from futurist import waiters
from ironic_lib import metrics_utils
from oslo_log import log
import oslo_messaging as messaging
from oslo_utils import excutils
@ -57,6 +56,7 @@ from ironic.common import driver_factory
from ironic.common import exception
from ironic.common import faults
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic.common import network
from ironic.common import nova
from ironic.common import rpc
@ -2741,15 +2741,6 @@ class ConductorManager(base_manager.BaseConductorManager):
ev_type = 'ironic.metrics'
message['event_type'] = ev_type + '.update'
sensors_data = METRICS.get_metrics_data()
except AttributeError:
# TODO(TheJulia): Remove this at some point, but right now
# don't inherently break on version mismatches when people
# disregard requirements.
LOG.warning(
'get_sensors_data has been configured to collect '
'conductor metrics, however the installed ironic-lib '
'library lacks the functionality. Please update '
'ironic-lib to a minimum of version 5.4.0.')
except Exception as e:
LOG.exception(
"An unknown error occurred while attempting to collect "

View File

@ -18,10 +18,10 @@ import inspect
import eventlet
from futurist import periodics
from ironic_lib import metrics_utils
from oslo_log import log
from ironic.common import exception
from ironic.common import metrics_utils
from ironic.conductor import base_manager
from ironic.conductor import task_manager
from ironic.drivers import base as driver_base

View File

@ -20,13 +20,13 @@ Client side of the conductor RPC API.
import random
from ironic_lib.json_rpc import client as json_rpc
from oslo_log import log
import oslo_messaging as messaging
from ironic.common import exception
from ironic.common import hash_ring
from ironic.common.i18n import _
from ironic.common.json_rpc import client as json_rpc
from ironic.common import release_mappings as versions
from ironic.common import rpc
from ironic.conf import CONF

View File

@ -30,6 +30,7 @@ from ironic.conf import dhcp
from ironic.conf import disk_utils
from ironic.conf import dnsmasq
from ironic.conf import drac
from ironic.conf import exception
from ironic.conf import fake
from ironic.conf import glance
from ironic.conf import healthcheck
@ -38,8 +39,9 @@ from ironic.conf import inspector
from ironic.conf import inventory
from ironic.conf import ipmi
from ironic.conf import irmc
from ironic.conf import json_rpc
from ironic.conf import mdns
from ironic.conf import metrics
from ironic.conf import metrics_statsd
from ironic.conf import molds
from ironic.conf import neutron
from ironic.conf import nova
@ -67,6 +69,7 @@ drac.register_opts(CONF)
dhcp.register_opts(CONF)
disk_utils.register_opts(CONF)
dnsmasq.register_opts(CONF)
exception.register_opts(CONF)
fake.register_opts(CONF)
glance.register_opts(CONF)
healthcheck.register_opts(CONF)
@ -75,8 +78,9 @@ inspector.register_opts(CONF)
inventory.register_opts(CONF)
ipmi.register_opts(CONF)
irmc.register_opts(CONF)
json_rpc.register_opts(CONF)
mdns.register_opts(CONF)
metrics.register_opts(CONF)
metrics_statsd.register_opts(CONF)
molds.register_opts(CONF)
neutron.register_opts(CONF)
nova.register_opts(CONF)

View File

@ -14,7 +14,7 @@ from oslo_config import cfg
# NOTE(TheJulia): If you make *any* chance to this code, you may need
# to make an identitical or similar change to ironic-python-agent.
# to make an identical or similar change to ironic-python-agent.
# These options were originally taken from ironic-lib upon the decision
# to move the qemu-img image conversion calls into the projects in
# order to simplify fixes related to them.

View File

@ -1,6 +1,3 @@
# Copyright 2016 Intel Corporation
# Copyright 2014 Rackspace, Inc.
# Copyright 2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@ -20,17 +17,15 @@ from ironic.common.i18n import _
opts = [
cfg.StrOpt('agent_statsd_host',
default='localhost',
help=_('Host for the agent ramdisk to use with the statsd '
'backend. This must be accessible from networks the '
'agent is booted on.')),
cfg.PortOpt('agent_statsd_port',
default=8125,
help=_('Port for the agent ramdisk to use with the statsd '
'backend.')),
cfg.BoolOpt('fatal_exception_format_errors',
default=False,
help=_('Used if there is a formatting error when generating '
'an exception message (a programming error). If True, '
'raise an exception; if False, use the unformatted '
'message.'),
deprecated_group='ironic_lib'),
]
def register_opts(conf):
conf.register_opts(opts, group='metrics_statsd')
conf.register_opts(opts, group='errors')

79
ironic/conf/json_rpc.py Normal file
View File

@ -0,0 +1,79 @@
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from ironic.common.i18n import _
from ironic.common import keystone
from ironic.conf.api import Octal
CONF = cfg.CONF
opts = [
cfg.StrOpt('auth_strategy',
choices=[('noauth', _('no authentication')),
('keystone', _('use the Identity service for '
'authentication')),
('http_basic', _('HTTP basic authentication'))],
help=_('Authentication strategy used by JSON RPC. Defaults to '
'the global auth_strategy setting.')),
cfg.StrOpt('http_basic_auth_user_file',
default='/etc/ironic/htpasswd-json-rpc',
help=_('Path to Apache format user authentication file used '
'when auth_strategy=http_basic')),
cfg.HostAddressOpt('host_ip',
default='::',
help=_('The IP address or hostname on which JSON RPC '
'will listen.')),
cfg.PortOpt('port',
default=8089,
help=_('The port to use for JSON RPC')),
cfg.BoolOpt('use_ssl',
default=False,
help=_('Whether to use TLS for JSON RPC')),
cfg.StrOpt('http_basic_username',
deprecated_for_removal=True,
deprecated_reason=_("Use username instead"),
help=_("Name of the user to use for HTTP Basic authentication "
"client requests.")),
cfg.StrOpt('http_basic_password',
deprecated_for_removal=True,
deprecated_reason=_("Use password instead"),
secret=True,
help=_("Password to use for HTTP Basic authentication "
"client requests.")),
cfg.ListOpt('allowed_roles',
default=['admin'],
help=_("List of roles allowed to use JSON RPC")),
cfg.StrOpt('unix_socket',
help=_('Unix socket to listen on. Disables host_ip and port.')),
cfg.Opt('unix_socket_mode', type=Octal(),
help=_('File mode (an octal number) of the unix socket to '
'listen on. Ignored if unix_socket is not set.')),
]
def register_opts(conf):
conf.register_opts(opts, group='json_rpc')
keystone.register_auth_opts(conf, 'json_rpc')
conf.set_default('timeout', 120, group='json_rpc')
def list_opts():
return keystone.add_auth_opts(opts)
def auth_strategy():
return CONF.json_rpc.auth_strategy or CONF.auth_strategy

44
ironic/conf/mdns.py Normal file
View File

@ -0,0 +1,44 @@
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from oslo_config import types as cfg_types
from ironic.common.i18n import _
opts = [
cfg.IntOpt('registration_attempts',
min=1, default=5,
help=_('Number of attempts to register a service. Currently '
'has to be larger than 1 because of race conditions '
'in the zeroconf library.')),
cfg.IntOpt('lookup_attempts',
min=1, default=3,
help=_('Number of attempts to lookup a service.')),
cfg.Opt('params',
# This is required for values that contain commas.
type=cfg_types.Dict(cfg_types.String(quotes=True)),
default={},
help=_('Additional parameters to pass for the registered '
'service.')),
cfg.ListOpt('interfaces',
help=_('List of IP addresses of interfaces to use for mDNS. '
'Defaults to all interfaces on the system.')),
]
def register_opts(conf):
group = cfg.OptGroup(name='mdns', title=_('Options for multicast DNS'))
conf.register_group(group)
conf.register_opts(opts, group)

View File

@ -20,6 +20,30 @@ from ironic.common.i18n import _
opts = [
cfg.StrOpt('backend',
default='noop',
choices=[
('noop', 'Do nothing in relation to metrics.'),
('statsd', 'Transmits metrics data to a statsd backend.'),
('collector', 'Collects metrics data and saves it in '
'memory for use by the running application.'),
],
help='Backend to use for the metrics system.'),
cfg.BoolOpt('prepend_host',
default=False,
help='Prepend the hostname to all metric names. '
'The format of metric names is '
'[global_prefix.][host_name.]prefix.metric_name.'),
cfg.BoolOpt('prepend_host_reverse',
default=True,
help='Split the prepended host value by "." and reverse it '
'(to better match the reverse hierarchical form of '
'domain names).'),
cfg.StrOpt('global_prefix',
help='Prefix all metric names with this value. '
'By default, there is no global prefix. '
'The format of metric names is '
'[global_prefix.][host_name.]prefix.metric_name.'),
# IPA config options: used by IPA to configure how it reports metric data
cfg.StrOpt('agent_backend',
default='noop',
@ -51,5 +75,29 @@ opts = [
]
statsd_opts = [
cfg.StrOpt('statsd_host',
default='localhost',
help='Host for use with the statsd backend.'),
cfg.PortOpt('statsd_port',
default=8125,
help='Port to use with the statsd backend.'),
cfg.StrOpt('agent_statsd_host',
default='localhost',
help=_('Host for the agent ramdisk to use with the statsd '
'backend. This must be accessible from networks the '
'agent is booted on.')),
cfg.PortOpt('agent_statsd_port',
default=8125,
help=_('Port for the agent ramdisk to use with the statsd '
'backend.')),
]
def register_opts(conf):
conf.register_opts(opts, group='metrics')
conf.register_opts(statsd_opts, group='metrics_statsd')
def list_opts():
return [opts, statsd_opts]

View File

@ -15,9 +15,11 @@ from oslo_log import log
import ironic.conf
# NOTE(JayF): Please keep this in ABC order by group (with DEFAULT first)
_opts = [
('DEFAULT', ironic.conf.default.list_opts()),
('agent', ironic.conf.agent.opts),
('anaconda', ironic.conf.anaconda.opts),
('ansible', ironic.conf.ansible.opts),
('api', ironic.conf.api.opts),
('audit', ironic.conf.audit.opts),
@ -29,6 +31,7 @@ _opts = [
('dhcp', ironic.conf.dhcp.opts),
('disk_utils', ironic.conf.disk_utils.opts),
('drac', ironic.conf.drac.opts),
('errors', ironic.conf.exception.opts),
('glance', ironic.conf.glance.list_opts()),
('healthcheck', ironic.conf.healthcheck.opts),
('ilo', ironic.conf.ilo.opts),
@ -36,9 +39,10 @@ _opts = [
('inventory', ironic.conf.inventory.opts),
('ipmi', ironic.conf.ipmi.opts),
('irmc', ironic.conf.irmc.opts),
('anaconda', ironic.conf.anaconda.opts),
('json_rpc', ironic.conf.json_rpc.list_opts()),
('mdns', ironic.conf.mdns.opts),
('metrics', ironic.conf.metrics.opts),
('metrics_statsd', ironic.conf.metrics_statsd.opts),
('metrics_statsd', ironic.conf.metrics.statsd_opts),
('molds', ironic.conf.molds.opts),
('neutron', ironic.conf.neutron.list_opts()),
('nova', ironic.conf.nova.list_opts()),

View File

@ -14,7 +14,6 @@
from urllib import parse as urlparse
from ironic_lib import metrics_utils
from oslo_log import log
from oslo_utils import strutils
from oslo_utils import units
@ -27,6 +26,7 @@ from ironic.common.glance_service import service_utils
from ironic.common.i18n import _
from ironic.common import image_service
from ironic.common import images
from ironic.common import metrics_utils
from ironic.common import raid
from ironic.common import states
from ironic.common import utils

View File

@ -18,13 +18,13 @@
import collections
from ironic_lib import metrics_utils
from oslo_log import log
from ironic.common import async_steps
from ironic.common import dhcp_factory
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic.common import states
from ironic.common import utils
from ironic.conductor import cleaning

View File

@ -17,7 +17,6 @@ import os
import ssl
import time
from ironic_lib import metrics_utils
from oslo_log import log
from oslo_serialization import jsonutils
from oslo_utils import excutils
@ -27,6 +26,7 @@ import tenacity
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic.common import utils
from ironic.conf import CONF

View File

@ -20,8 +20,6 @@ import os
import shlex
from urllib import parse as urlparse
from ironic_lib import metrics_utils
from ironic_lib import utils as irlib_utils
from oslo_concurrency import processutils
from oslo_log import log
from oslo_utils import strutils
@ -34,6 +32,7 @@ from ironic.common import exception
from ironic.common import faults
from ironic.common.i18n import _
from ironic.common import images
from ironic.common import metrics_utils
from ironic.common import states
from ironic.common import utils
from ironic.conductor import steps as conductor_steps
@ -511,8 +510,7 @@ class AnsibleDeploy(agent_base.HeartbeatMixin,
task.driver.boot.clean_up_ramdisk(task)
provider = dhcp_factory.DHCPFactory()
provider.clean_dhcp(task)
irlib_utils.unlink_without_raise(
_get_configdrive_path(task.node.uuid))
utils.unlink_without_raise(_get_configdrive_path(task.node.uuid))
def take_over(self, task):
LOG.error("Ansible deploy does not support take over. "

View File

@ -28,7 +28,6 @@ import socket
import subprocess
import time
from ironic_lib import utils as ironic_utils
from oslo_concurrency import lockutils
from oslo_log import log as logging
from oslo_service import loopingcall
@ -134,7 +133,7 @@ def _stop_console(node_uuid):
LOG.warning("Console process for node %s is not running "
"but pid file exists.", node_uuid)
finally:
ironic_utils.unlink_without_raise(_get_console_pid_file(node_uuid))
utils.unlink_without_raise(_get_console_pid_file(node_uuid))
def make_persistent_password_file(path, password):

View File

@ -17,8 +17,6 @@
import os
import re
from ironic_lib import metrics_utils
from ironic_lib import utils as il_utils
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import fileutils
@ -34,6 +32,7 @@ from ironic.common.i18n import _
from ironic.common import image_service
from ironic.common import images
from ironic.common import keystone
from ironic.common import metrics_utils
from ironic.common import states
from ironic.common import utils
from ironic.conductor import utils as manager_utils
@ -1131,7 +1130,7 @@ def destroy_images(node_uuid):
:param node_uuid: the uuid of the ironic node.
"""
il_utils.unlink_without_raise(_get_image_file_path(node_uuid))
utils.unlink_without_raise(_get_image_file_path(node_uuid))
utils.rmtree_without_raise(_get_image_dir_path(node_uuid))
InstanceImageCache().clean_up()
@ -1148,7 +1147,7 @@ def compute_image_checksum(image_path, algorithm='md5'):
def remove_http_instance_symlink(node_uuid):
symlink_path = _get_http_image_symlink_file_path(node_uuid)
il_utils.unlink_without_raise(symlink_path)
utils.unlink_without_raise(symlink_path)
def destroy_http_instance_images(node):
@ -1231,7 +1230,7 @@ def _validate_image_url(node, url, secret=False, inspect_image=None,
expected_format=expected_format)
# NOTE(TheJulia): We explicitly delete this file because it has no use
# in the cache after this point.
il_utils.unlink_without_raise(image_path)
utils.unlink_without_raise(image_path)
image_info['disk_format'] = img_format
return image_info
@ -1699,7 +1698,7 @@ def get_root_device_for_deploy(node):
source = 'instance_info'
try:
return il_utils.parse_root_device_hints(hints)
return utils.parse_root_device_hints(hints)
except ValueError as e:
raise exception.InvalidParameterValue(
_('Failed to validate the root device hints %(hints)s (from the '

View File

@ -22,7 +22,6 @@ DRAC management interface
import json
from ironic_lib import metrics_utils
import jsonschema
from jsonschema import exceptions as json_schema_exc
from oslo_log import log as logging
@ -31,6 +30,7 @@ import sushy
from ironic.common import boot_devices
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic.common import molds
from ironic.common import states
from ironic.conductor import periodics

View File

@ -15,7 +15,6 @@
DRAC RAID specific methods
"""
from ironic_lib import metrics_utils
from oslo_log import log as logging
from oslo_utils import importutils
import sushy
@ -23,6 +22,7 @@ import tenacity
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic.common import states
from ironic.conductor import periodics
from ironic.conductor import utils as manager_utils

View File

@ -15,12 +15,12 @@
iLO BIOS Interface
"""
from ironic_lib import metrics_utils
from oslo_log import log as logging
from oslo_utils import importutils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic.drivers import base
from ironic.drivers.modules import deploy_utils
from ironic.drivers.modules.ilo import common as ilo_common

View File

@ -15,7 +15,6 @@
Boot Interface for iLO drivers and its supporting methods.
"""
from ironic_lib import metrics_utils
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
@ -27,6 +26,7 @@ from ironic.common.glance_service import service_utils
from ironic.common.i18n import _
from ironic.common import image_service
from ironic.common import images
from ironic.common import metrics_utils
from ironic.common import states
from ironic.conductor import utils as manager_utils
from ironic.drivers import base

View File

@ -22,7 +22,6 @@ import shutil
import tempfile
from urllib import parse as urlparse
from ironic_lib import utils as ironic_utils
from oslo_log import log as logging
from oslo_utils import fileutils
from oslo_utils import importutils
@ -218,7 +217,7 @@ def remove_image_from_web_server(object_name):
from the web server root.
"""
image_path = os.path.join(CONF.deploy.http_root, object_name)
ironic_utils.unlink_without_raise(image_path)
utils.unlink_without_raise(image_path)
def copy_image_to_swift(source_file_path, destination_object_name):
@ -901,10 +900,10 @@ def remove_single_or_list_of_files(file_location):
# file_location is a list of files
if isinstance(file_location, list):
for location in file_location:
ironic_utils.unlink_without_raise(location)
utils.unlink_without_raise(location)
# file_location is a single file path
elif isinstance(file_location, str):
ironic_utils.unlink_without_raise(file_location)
utils.unlink_without_raise(file_location)
def verify_image_checksum(image_location, expected_checksum):

View File

@ -15,10 +15,9 @@
iLO Deploy Driver(s) and supporting methods.
"""
from ironic_lib import metrics_utils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic.drivers.modules.ilo import common as ilo_common
from ironic.drivers.modules import ipmitool

View File

@ -12,12 +12,12 @@
"""
iLO Inspect Interface
"""
from ironic_lib import metrics_utils
from oslo_log import log as logging
from oslo_utils import importutils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic.common import states
from ironic.common import utils
from ironic.conductor import utils as conductor_utils

View File

@ -18,7 +18,6 @@ import os
import shutil
from urllib import parse as urlparse
from ironic_lib import metrics_utils
from oslo_log import log as logging
from oslo_service import loopingcall
from oslo_utils import excutils
@ -29,6 +28,7 @@ from ironic.common import boot_devices
from ironic.common import boot_modes
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic.common import states
from ironic.conductor import task_manager
from ironic.conductor import utils as manager_utils

View File

@ -16,7 +16,6 @@
iLO Power Driver
"""
from ironic_lib import metrics_utils
from oslo_log import log as logging
from oslo_service import loopingcall
from oslo_utils import importutils
@ -24,6 +23,7 @@ from oslo_utils import importutils
from ironic.common import boot_devices
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic.common import states
from ironic.conductor import task_manager
from ironic.conductor import utils as manager_utils

View File

@ -16,12 +16,12 @@
iLO5 RAID specific methods
"""
from ironic_lib import metrics_utils
from oslo_log import log as logging
from oslo_utils import importutils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic.common import raid
from ironic.common import states
from ironic.conductor import utils as manager_utils

View File

@ -16,10 +16,9 @@
Vendor Interface for iLO drivers and its supporting methods.
"""
from ironic_lib import metrics_utils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic.common import states
from ironic.conductor import task_manager
from ironic.conductor import utils as manager_utils

View File

@ -11,13 +11,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from ironic_lib import utils as il_utils
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import units
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import utils
from ironic.drivers.modules.inspector.hooks import base
CONF = cfg.CONF
@ -34,7 +34,7 @@ class RootDeviceHook(base.InspectionHook):
skip_list = set()
for hint in skip_list_hints:
skipped_devs = il_utils.find_devices_by_hints(block_devices, hint)
skipped_devs = utils.find_devices_by_hints(block_devices, hint)
excluded_devs = {dev['name'] for dev in skipped_devs}
skipped_devices = excluded_devs.difference(skip_list)
skip_list = skip_list.union(excluded_devs)
@ -61,8 +61,8 @@ class RootDeviceHook(base.InspectionHook):
inventory_disks = inventory['disks']
try:
root_device = il_utils.match_root_device_hints(inventory_disks,
hints)
root_device = utils.match_root_device_hints(inventory_disks,
hints)
except (TypeError, ValueError) as e:
raise exception.HardwareInspectionFailure(
_('No disks could be found using root device hints %(hints)s '

View File

@ -37,8 +37,6 @@ import tempfile
import time
from eventlet.green import subprocess as green_subprocess
from ironic_lib import metrics_utils
from ironic_lib import utils as ironic_utils
from oslo_concurrency import processutils
from oslo_log import log as logging
from oslo_utils import excutils
@ -47,6 +45,7 @@ from oslo_utils import strutils
from ironic.common import boot_devices
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic.common import states
from ironic.common import utils
from ironic.conductor import task_manager
@ -1660,8 +1659,7 @@ class IPMIShellinaboxConsole(IPMIConsole):
try:
console_utils.stop_shellinabox_console(task.node.uuid)
finally:
ironic_utils.unlink_without_raise(
_console_pwfile_path(task.node.uuid))
utils.unlink_without_raise(_console_pwfile_path(task.node.uuid))
_release_allocated_port(task)
def _exec_stop_console(self, driver_info):
@ -1727,8 +1725,7 @@ class IPMISocatConsole(IPMIConsole):
try:
console_utils.stop_socat_console(task.node.uuid)
finally:
ironic_utils.unlink_without_raise(
_console_pwfile_path(task.node.uuid))
utils.unlink_without_raise(_console_pwfile_path(task.node.uuid))
self._exec_stop_console(driver_info)
_release_allocated_port(task)

View File

@ -15,11 +15,11 @@
"""
iRMC BIOS configuration specific methods
"""
from ironic_lib import metrics_utils
from oslo_log import log as logging
from oslo_utils import importutils
from ironic.common import exception
from ironic.common import metrics_utils
from ironic.drivers import base
from ironic.drivers.modules.irmc import common as irmc_common
from ironic import objects

View File

@ -21,8 +21,6 @@ import shutil
import tempfile
from urllib import parse as urlparse
from ironic_lib import metrics_utils
from ironic_lib import utils as ironic_utils
from oslo_log import log as logging
from oslo_utils import importutils
from oslo_utils import netutils
@ -33,7 +31,9 @@ from ironic.common.glance_service import service_utils
from ironic.common.i18n import _
from ironic.common import image_service
from ironic.common import images
from ironic.common import metrics_utils
from ironic.common import states
from ironic.common import utils
from ironic.conductor import utils as manager_utils
from ironic.conf import CONF
from ironic.drivers import base
@ -450,7 +450,7 @@ def _remove_share_file(share_filename):
"""
share_fullpathname = os.path.join(
CONF.irmc.remote_image_share_root, share_filename)
ironic_utils.unlink_without_raise(share_fullpathname)
utils.unlink_without_raise(share_fullpathname)
def _attach_virtual_cd(node, bootable_iso_filename):

View File

@ -16,13 +16,13 @@ iRMC Inspect Interface
"""
import re
from ironic_lib import metrics_utils
from oslo_log import log as logging
from oslo_utils import importutils
from ironic.common import boot_devices
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic.common import states
from ironic.common import utils
from ironic.conductor import utils as manager_utils

View File

@ -15,13 +15,13 @@
iRMC Management Driver
"""
from ironic_lib import metrics_utils
from oslo_log import log as logging
from oslo_utils import importutils
from ironic.common import boot_devices
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic.common import states
from ironic.conductor import task_manager
from ironic.conductor import utils as manager_utils

View File

@ -15,13 +15,13 @@
"""
iRMC Power Driver using the Base Server Profile
"""
from ironic_lib import metrics_utils
from oslo_log import log as logging
from oslo_service import loopingcall
from oslo_utils import importutils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic.common import states
from ironic.conductor import task_manager
from ironic.conf import CONF

View File

@ -15,11 +15,11 @@
"""
Irmc RAID specific methods
"""
from ironic_lib import metrics_utils
from oslo_log import log as logging
from oslo_utils import importutils
from ironic.common import exception
from ironic.common import metrics_utils
from ironic.common import raid as raid_common
from ironic.common import states
from ironic.conductor import periodics

View File

@ -15,11 +15,11 @@
PXE Boot Interface
"""
from ironic_lib import metrics_utils
from oslo_log import log as logging
from ironic.common import boot_devices
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic.common import pxe_utils
from ironic.common import states
from ironic.conductor import task_manager

View File

@ -13,7 +13,6 @@
Base PXE Interface Methods
"""
from ironic_lib import metrics_utils
from oslo_config import cfg
from oslo_log import log as logging
@ -21,6 +20,7 @@ from ironic.common import boot_devices
from ironic.common import dhcp_factory
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic.common import pxe_utils
from ironic.common import states
from ironic.conductor import periodics

View File

@ -14,11 +14,11 @@
Ramdisk Deploy Interface
"""
from ironic_lib import metrics_utils
from oslo_log import log as logging
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic.common import states
from ironic.conductor import task_manager
from ironic.conductor import utils as manager_utils

View File

@ -12,12 +12,12 @@
# License for the specific language governing permissions and limitations
# under the License.
from ironic_lib import metrics_utils
from oslo_log import log
import sushy
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic.drivers import base
from ironic.drivers.modules import deploy_utils
from ironic.drivers.modules.redfish import utils as redfish_utils

View File

@ -13,13 +13,13 @@
from urllib.parse import urlparse
from ironic_lib import metrics_utils
from oslo_log import log
from oslo_utils import timeutils
import sushy
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic.common import states
from ironic.conductor import periodics
from ironic.conductor import utils as manager_utils

View File

@ -17,7 +17,6 @@ import collections
import time
from urllib.parse import urlparse
from ironic_lib import metrics_utils
from oslo_log import log
from oslo_utils import timeutils
import sushy
@ -28,6 +27,7 @@ from ironic.common import components
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import indicator_states
from ironic.common import metrics_utils
from ironic.common import states
from ironic.common import utils
from ironic.conductor import periodics

View File

@ -16,14 +16,13 @@
import itertools
import math
from ironic_lib import metrics_utils
from oslo_log import log
from oslo_utils import units
import sushy
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import raid as raid_common
from ironic.common import metrics_utils
from ironic.common import raid
from ironic.common import states
from ironic.conductor import periodics
@ -705,7 +704,7 @@ def update_raid_config(node):
"type: %(vol_no_raid_type)s",
{'vol_no_raid_type': ", ".join(vol_no_raid_type)})
raid_common.update_raid_info(node, {'logical_disks': logical_disks})
raid.update_raid_info(node, {'logical_disks': logical_disks})
class RedfishRAID(base.RAIDInterface):

View File

@ -15,13 +15,13 @@
Vendor Interface for Redfish drivers and its supporting methods.
"""
from ironic_lib import metrics_utils
from oslo_log import log
import rfc3986
import sushy
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic.drivers import base
from ironic.drivers.modules.redfish import boot as redfish_boot
from ironic.drivers.modules.redfish import utils as redfish_utils

View File

@ -15,10 +15,10 @@ import time
import eventlet
from eventlet import event
from ironic_lib import metrics_utils
from oslo_log import log
from ironic.common.i18n import _
from ironic.common import metrics_utils
from ironic.common import states
from ironic.conf import CONF
from ironic.db import api as dbapi

View File

@ -31,7 +31,6 @@ import warnings
import eventlet
eventlet.monkey_patch(os=False)
import fixtures
from ironic_lib import utils
from oslo_concurrency import processutils
from oslo_config import fixture as config_fixture
from oslo_log import log as logging
@ -45,7 +44,7 @@ from ironic.common import context as ironic_context
from ironic.common import driver_factory
from ironic.common import hash_ring
from ironic.common import rpc
from ironic.common import utils as common_utils
from ironic.common import utils
from ironic.conf import CONF
from ironic.drivers import base as drivers_base
from ironic.objects import base as objects_base
@ -264,7 +263,7 @@ class TestCase(oslo_test_base.BaseTestCase):
def config_temp_dir(self, option, group=None):
"""Override a config option with a temporary directory."""
temp_dir = tempfile.mkdtemp()
self.addCleanup(lambda: common_utils.rmtree_without_raise(temp_dir))
self.addCleanup(lambda: utils.rmtree_without_raise(temp_dir))
self.config(**{option: temp_dir, 'group': group})
def set_defaults(self, **kw):
@ -315,7 +314,7 @@ class TestCase(oslo_test_base.BaseTestCase):
def do_not_call(*args, **kwargs):
"""Helper function to raise an exception if it is called"""
raise Exception(
"Don't call ironic_lib.utils.execute() / "
"Don't call utils.execute() / "
"processutils.execute() or similar functions in tests!")

View File

@ -0,0 +1,73 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
from unittest import mock
from oslo_config import cfg
from ironic.common import exception
from ironic.tests import base
CONF = cfg.CONF
class Unserializable(object):
def __str__(self):
raise NotImplementedError('nostr')
class TestException(exception.IronicException):
_msg_fmt = 'Some exception: %(spam)s, %(ham)s'
class TestIronicException(base.TestCase):
def test___init___json_serializable(self):
exc = TestException(spam=[1, 2, 3], ham='eggs')
self.assertIn('[1, 2, 3]', str(exc))
self.assertEqual('[1, 2, 3]', exc.kwargs['spam'])
def test___init___string_serializable(self):
exc = TestException(
spam=type('ni', (object,), dict(a=1, b=2))(), ham='eggs'
)
check_str = 'ni object at'
self.assertIn(check_str, str(exc))
self.assertIn(check_str, exc.kwargs['spam'])
@mock.patch.object(exception.LOG, 'error', autospec=True)
def test___init___invalid_kwarg(self, log_mock):
CONF.set_override('fatal_exception_format_errors', False,
group='errors')
e = TestException(spam=Unserializable(), ham='eggs')
message = \
log_mock.call_args_list[0][0][0] % log_mock.call_args_list[0][0][1]
self.assertIsNotNone(
re.search('spam: .*JSON.* NotImplementedError: nostr', message),
message
)
self.assertEqual({'ham': '"eggs"', 'code': 500}, e.kwargs)
@mock.patch.object(exception.LOG, 'error', autospec=True)
def test___init___invalid_kwarg_reraise(self, log_mock):
CONF.set_override('fatal_exception_format_errors', True,
group='errors')
self.assertRaises(KeyError, TestException, spam=Unserializable(),
ham='eggs')
message = \
log_mock.call_args_list[0][0][0] % log_mock.call_args_list[0][0][1]
self.assertIsNotNone(
re.search('spam: .*JSON.* NotImplementedError: nostr', message),
message
)

View File

@ -14,8 +14,6 @@ import os
import shutil
from unittest import mock
from ironic_lib import utils as ironic_utils
from ironic.common import image_publisher
from ironic.common import utils
from ironic.tests.unit.db import base as db_base
@ -174,7 +172,7 @@ class LocalPublisherTestCase(db_base.DbTestCase):
mock_chmod.assert_called_once_with('/httpboot/redfish/boot.iso',
0o644)
@mock.patch.object(ironic_utils, 'unlink_without_raise', autospec=True)
@mock.patch.object(utils, 'unlink_without_raise', autospec=True)
def test_unpublish_local(self, mock_unlink):
object_name = 'boot.iso'
expected_file = '/httpboot/redfish/' + object_name

View File

@ -0,0 +1,734 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import os
import tempfile
from unittest import mock
import fixtures
import oslo_messaging
import webob
from ironic.common import exception
from ironic.common.json_rpc import client
from ironic.common.json_rpc import server
from ironic.tests.base import TestCase
class FakeContext(server.EmptyContext):
request_id = 'abcd'
class FakeManager(object):
def success(self, context, x, y=0):
assert isinstance(context, FakeContext)
assert context.user_name == 'admin'
return x - y
def no_result(self, context):
assert isinstance(context, FakeContext)
return None
def no_context(self):
return 42
def fail(self, context, message):
assert isinstance(context, FakeContext)
raise exception.IronicException(message)
@oslo_messaging.expected_exceptions(exception.BadRequest)
def expected(self, context, message):
assert isinstance(context, FakeContext)
raise exception.BadRequest(message)
def crash(self, context):
raise RuntimeError('boom')
def copy(self, context, data):
return copy.deepcopy(data)
def init_host(self, context):
assert False, "This should not be exposed"
def _private(self, context):
assert False, "This should not be exposed"
# This should not be exposed either
value = 42
class FakeSerializer:
def serialize_entity(self, context, entity):
return entity
def deserialize_entity(self, context, data):
return data
class TestService(TestCase):
def setUp(self):
super(TestService, self).setUp()
self.config(auth_strategy='noauth', group='json_rpc')
self.server_mock = self.useFixture(fixtures.MockPatch(
'oslo_service.wsgi.Server', autospec=True)).mock
self.serializer = FakeSerializer()
self.service = server.WSGIService(FakeManager(), self.serializer,
FakeContext)
self.app = self.service._application
self.ctx = {'user_name': 'admin'}
def _request(self, name=None, params=None, expected_error=None,
request_id='abcd', **kwargs):
body = {
'jsonrpc': '2.0',
}
if request_id is not None:
body['id'] = request_id
if name is not None:
body['method'] = name
if params is not None:
body['params'] = params
if 'json_body' not in kwargs:
kwargs['json_body'] = body
kwargs.setdefault('method', 'POST')
kwargs.setdefault('headers', {'Content-Type': 'application/json'})
request = webob.Request.blank("/", **kwargs)
response = request.get_response(self.app)
self.assertEqual(response.status_code,
expected_error or (200 if request_id else 204))
if request_id is not None:
if expected_error:
self.assertEqual(expected_error,
response.json_body['error']['code'])
else:
return response.json_body
else:
return response.text
def _check(self, body, result=None, error=None, request_id='abcd'):
self.assertEqual('2.0', body.pop('jsonrpc'))
self.assertEqual(request_id, body.pop('id'))
if error is not None:
self.assertEqual({'error': error}, body)
else:
self.assertEqual({'result': result}, body)
def _setup_http_basic(self):
with tempfile.NamedTemporaryFile(mode='w', delete=False) as f:
f.write('myName:$2y$05$lE3eGtyj41jZwrzS87KTqe6.'
'JETVCWBkc32C63UP2aYrGoYOEpbJm\n\n\n')
self.addCleanup(os.remove, f.name)
self.config(http_basic_auth_user_file=f.name, group='json_rpc')
self.config(auth_strategy='http_basic', group='json_rpc')
# self.config(http_basic_username='myUser', group='json_rpc')
# self.config(http_basic_password='myPassword', group='json_rpc')
self.service = server.WSGIService(FakeManager(), self.serializer,
FakeContext)
self.app = self.server_mock.call_args[0][2]
def test_http_basic_not_authenticated(self):
self._setup_http_basic()
self._request('success', {'context': self.ctx, 'x': 42},
request_id=None, expected_error=401)
def test_http_basic(self):
self._setup_http_basic()
headers = {
'Content-Type': 'application/json',
'Authorization': 'Basic bXlOYW1lOm15UGFzc3dvcmQ='
}
body = self._request('success', {'context': self.ctx, 'x': 42},
headers=headers)
self._check(body, result=42)
def test_success(self):
body = self._request('success', {'context': self.ctx, 'x': 42})
self._check(body, result=42)
def test_success_no_result(self):
body = self._request('no_result', {'context': self.ctx})
self._check(body, result=None)
def test_notification(self):
body = self._request('no_result', {'context': self.ctx},
request_id=None)
self.assertEqual('', body)
def test_no_context(self):
body = self._request('no_context')
self._check(body, result=42)
def test_non_json_body(self):
for body in (b'', b'???', b"\xc3\x28"):
request = webob.Request.blank("/", method='POST', body=body)
response = request.get_response(self.app)
self._check(
response.json_body,
error={
'message': server.ParseError._msg_fmt,
'code': -32700,
},
request_id=None)
def test_invalid_requests(self):
bodies = [
# Invalid requests with request ID.
{'method': 'no_result', 'id': 'abcd',
'params': {'context': self.ctx}},
{'jsonrpc': '2.0', 'id': 'abcd', 'params': {'context': self.ctx}},
# These do not count as notifications, since they're malformed.
{'method': 'no_result', 'params': {'context': self.ctx}},
{'jsonrpc': '2.0', 'params': {'context': self.ctx}},
42,
# We do not implement batched requests.
[],
[{'jsonrpc': '2.0', 'method': 'no_result',
'params': {'context': self.ctx}}],
]
for body in bodies:
body = self._request(json_body=body)
self._check(
body,
error={
'message': server.InvalidRequest._msg_fmt,
'code': -32600,
},
request_id=body.get('id'))
def test_malformed_context(self):
body = self._request(json_body={'jsonrpc': '2.0', 'id': 'abcd',
'method': 'no_result',
'params': {'context': 42}})
self._check(
body,
error={
'message': 'Context must be a dictionary, if provided',
'code': -32602,
})
def test_expected_failure(self):
body = self._request('fail', {'context': self.ctx,
'message': 'some error'})
self._check(body,
error={
'message': 'some error',
'code': 500,
'data': {
'class': 'ironic.common.exception.IronicException'
}
})
def test_expected_failure_oslo(self):
# Check that exceptions wrapped by oslo's expected_exceptions get
# unwrapped correctly.
body = self._request('expected', {'context': self.ctx,
'message': 'some error'})
self._check(body,
error={
'message': 'some error',
'code': 400,
'data': {
'class': 'ironic.common.exception.Invalid'
}
})
@mock.patch.object(server.LOG, 'exception', autospec=True)
def test_unexpected_failure(self, mock_log):
body = self._request('crash', {'context': self.ctx})
self._check(body,
error={
'message': 'boom',
'code': 500,
})
self.assertTrue(mock_log.called)
def test_method_not_found(self):
body = self._request('banana', {'context': self.ctx})
self._check(body,
error={
'message': 'Method banana was not found',
'code': -32601,
})
def test_no_deny_methods(self):
for name in ('__init__', '_private', 'init_host', 'value'):
body = self._request(name, {'context': self.ctx})
self._check(body,
error={
'message': 'Method %s was not found' % name,
'code': -32601,
})
def test_missing_argument(self):
body = self._request('success', {'context': self.ctx})
# The exact error message depends on the Python version
self.assertEqual(-32602, body['error']['code'])
self.assertNotIn('result', body)
def test_method_not_post(self):
self._request('success', {'context': self.ctx, 'x': 42},
method='GET', expected_error=405)
def test_authenticated(self):
self.config(auth_strategy='keystone', group='json_rpc')
self.service = server.WSGIService(FakeManager(), self.serializer,
FakeContext)
self.app = self.server_mock.call_args[0][2]
self._request('success', {'context': self.ctx, 'x': 42},
expected_error=401)
def test_authenticated_with_allowed_role(self):
self.config(auth_strategy='keystone', group='json_rpc')
self.config(allowed_roles=['allowed', 'ignored'], group='json_rpc')
self.service = server.WSGIService(FakeManager(), self.serializer,
FakeContext)
self.app = self.server_mock.call_args[0][2]
self._request('success', {'context': self.ctx, 'x': 42},
expected_error=401,
headers={'Content-Type': 'application/json',
'X-Roles': 'allowed,denied'})
def test_authenticated_no_admin_role(self):
self.config(auth_strategy='keystone', group='json_rpc')
self._request('success', {'context': self.ctx, 'x': 42},
expected_error=403)
def test_authenticated_no_allowed_role(self):
self.config(auth_strategy='keystone', group='json_rpc')
self.config(allowed_roles=['allowed', 'ignored'], group='json_rpc')
self._request('success', {'context': self.ctx, 'x': 42},
expected_error=403,
headers={'Content-Type': 'application/json',
'X-Roles': 'denied,notallowed'})
@mock.patch.object(server.LOG, 'debug', autospec=True)
def test_mask_secrets(self, mock_log):
data = {'ipmi_username': 'admin', 'ipmi_password': 'secret'}
node = self.serializer.serialize_entity(self.ctx, data)
body = self._request('copy', {'context': self.ctx, 'data': data})
self.assertIsNone(body.get('error'))
node = self.serializer.deserialize_entity(self.ctx, body['result'])
logged_params = mock_log.call_args_list[0][0][2]
logged_node = logged_params['data']
self.assertEqual({'ipmi_username': 'admin', 'ipmi_password': '***'},
logged_node)
logged_resp = mock_log.call_args_list[1][0][2]
self.assertEqual({'ipmi_username': 'admin', 'ipmi_password': '***'},
logged_resp)
# The result is not affected, only logging
self.assertEqual(data, node)
@mock.patch.object(client, '_get_session', autospec=True)
class TestClient(TestCase):
def setUp(self):
super(TestClient, self).setUp()
self.serializer = FakeSerializer()
self.client = client.Client(self.serializer)
self.context = FakeContext({'user_name': 'admin'})
self.ctx_json = self.context.to_dict()
def test_can_send_version(self, mock_session):
self.assertTrue(self.client.can_send_version('1.42'))
self.client = client.Client(self.serializer, version_cap='1.42')
self.assertTrue(self.client.can_send_version('1.42'))
self.assertTrue(self.client.can_send_version('1.0'))
self.assertFalse(self.client.can_send_version('1.99'))
self.assertFalse(self.client.can_send_version('2.0'))
def test_call_success(self, mock_session):
response = mock_session.return_value.post.return_value
response.json.return_value = {
'jsonrpc': '2.0',
'result': 42
}
cctx = self.client.prepare('foo.example.com')
self.assertEqual('example.com', cctx.host)
result = cctx.call(self.context, 'do_something', answer=42)
self.assertEqual(42, result)
mock_session.return_value.post.assert_called_once_with(
'http://example.com:8089',
json={'jsonrpc': '2.0',
'method': 'do_something',
'params': {'answer': 42, 'context': self.ctx_json},
'id': self.context.request_id})
def test_call_ipv4_success(self, mock_session):
response = mock_session.return_value.post.return_value
response.json.return_value = {
'jsonrpc': '2.0',
'result': 42
}
cctx = self.client.prepare('foo.192.0.2.1')
self.assertEqual('192.0.2.1', cctx.host)
result = cctx.call(self.context, 'do_something', answer=42)
self.assertEqual(42, result)
mock_session.return_value.post.assert_called_once_with(
'http://192.0.2.1:8089',
json={'jsonrpc': '2.0',
'method': 'do_something',
'params': {'answer': 42, 'context': self.ctx_json},
'id': self.context.request_id})
def test_call_ipv6_success(self, mock_session):
response = mock_session.return_value.post.return_value
response.json.return_value = {
'jsonrpc': '2.0',
'result': 42
}
cctx = self.client.prepare('foo.2001:db8::1')
self.assertEqual('2001:db8::1', cctx.host)
self.assertEqual(8089, cctx.port)
result = cctx.call(self.context, 'do_something', answer=42)
self.assertEqual(42, result)
mock_session.return_value.post.assert_called_once_with(
'http://[2001:db8::1]:8089',
json={'jsonrpc': '2.0',
'method': 'do_something',
'params': {'answer': 42, 'context': self.ctx_json},
'id': self.context.request_id})
def test_call_ipv6_success_rfc2732(self, mock_session):
response = mock_session.return_value.post.return_value
response.json.return_value = {
'jsonrpc': '2.0',
'result': 42
}
cctx = self.client.prepare('foo.[2001:db8::1]:8192')
self.assertEqual('2001:db8::1', cctx.host)
result = cctx.call(self.context, 'do_something', answer=42)
self.assertEqual(42, result)
mock_session.return_value.post.assert_called_once_with(
'http://[2001:db8::1]:8192',
json={'jsonrpc': '2.0',
'method': 'do_something',
'params': {'answer': 42, 'context': self.ctx_json},
'id': self.context.request_id})
def test_call_success_with_version(self, mock_session):
response = mock_session.return_value.post.return_value
response.json.return_value = {
'jsonrpc': '2.0',
'result': 42
}
cctx = self.client.prepare('foo.example.com:8192', version='1.42')
self.assertEqual('example.com', cctx.host)
result = cctx.call(self.context, 'do_something', answer=42)
self.assertEqual(42, result)
mock_session.return_value.post.assert_called_once_with(
'http://example.com:8192',
json={'jsonrpc': '2.0',
'method': 'do_something',
'params': {'answer': 42, 'context': self.ctx_json,
'rpc.version': '1.42'},
'id': self.context.request_id})
def test_call_success_with_version_and_cap(self, mock_session):
self.client = client.Client(self.serializer, version_cap='1.99')
response = mock_session.return_value.post.return_value
response.json.return_value = {
'jsonrpc': '2.0',
'result': 42
}
cctx = self.client.prepare('foo.example.com', version='1.42')
self.assertEqual('example.com', cctx.host)
result = cctx.call(self.context, 'do_something', answer=42)
self.assertEqual(42, result)
mock_session.return_value.post.assert_called_once_with(
'http://example.com:8089',
json={'jsonrpc': '2.0',
'method': 'do_something',
'params': {'answer': 42, 'context': self.ctx_json,
'rpc.version': '1.42'},
'id': self.context.request_id})
def test_call_with_ssl(self, mock_session):
self.config(use_ssl=True, group='json_rpc')
response = mock_session.return_value.post.return_value
response.json.return_value = {
'jsonrpc': '2.0',
'result': 42
}
cctx = self.client.prepare('foo.example.com')
self.assertEqual('example.com', cctx.host)
result = cctx.call(self.context, 'do_something', answer=42)
self.assertEqual(42, result)
mock_session.return_value.post.assert_called_once_with(
'https://example.com:8089',
json={'jsonrpc': '2.0',
'method': 'do_something',
'params': {'answer': 42, 'context': self.ctx_json},
'id': self.context.request_id})
def test_cast_success(self, mock_session):
cctx = self.client.prepare('foo.example.com')
self.assertEqual('example.com', cctx.host)
result = cctx.cast(self.context, 'do_something', answer=42)
self.assertIsNone(result)
mock_session.return_value.post.assert_called_once_with(
'http://example.com:8089',
json={'jsonrpc': '2.0',
'method': 'do_something',
'params': {'answer': 42, 'context': self.ctx_json}})
def test_cast_success_with_version(self, mock_session):
cctx = self.client.prepare('foo.example.com', version='1.42')
self.assertEqual('example.com', cctx.host)
result = cctx.cast(self.context, 'do_something', answer=42)
self.assertIsNone(result)
mock_session.return_value.post.assert_called_once_with(
'http://example.com:8089',
json={'jsonrpc': '2.0',
'method': 'do_something',
'params': {'answer': 42, 'context': self.ctx_json,
'rpc.version': '1.42'}})
def test_call_failure(self, mock_session):
response = mock_session.return_value.post.return_value
response.json.return_value = {
'jsonrpc': '2.0',
'error': {
'code': 418,
'message': 'I am a teapot',
'data': {
'class': 'ironic.common.exception.Invalid'
}
}
}
cctx = self.client.prepare('foo.example.com')
self.assertEqual('example.com', cctx.host)
# Make sure that the class is restored correctly for expected errors.
exc = self.assertRaises(exception.BadRequest,
cctx.call,
self.context, 'do_something', answer=42)
# Code from the body has priority over one in the class.
self.assertEqual(418, exc.code)
self.assertIn('I am a teapot', str(exc))
mock_session.return_value.post.assert_called_once_with(
'http://example.com:8089',
json={'jsonrpc': '2.0',
'method': 'do_something',
'params': {'answer': 42, 'context': self.ctx_json},
'id': self.context.request_id})
def test_call_unexpected_failure(self, mock_session):
response = mock_session.return_value.post.return_value
response.json.return_value = {
'jsonrpc': '2.0',
'error': {
'code': 500,
'message': 'AttributeError',
}
}
cctx = self.client.prepare('foo.example.com')
self.assertEqual('example.com', cctx.host)
exc = self.assertRaises(exception.IronicException,
cctx.call,
self.context, 'do_something', answer=42)
self.assertEqual(500, exc.code)
self.assertIn('Unexpected error', str(exc))
mock_session.return_value.post.assert_called_once_with(
'http://example.com:8089',
json={'jsonrpc': '2.0',
'method': 'do_something',
'params': {'answer': 42, 'context': self.ctx_json},
'id': self.context.request_id})
def test_call_failure_with_foreign_class(self, mock_session):
# This should not happen, but provide an additional safeguard
response = mock_session.return_value.post.return_value
response.json.return_value = {
'jsonrpc': '2.0',
'error': {
'code': 500,
'message': 'AttributeError',
'data': {
'class': 'AttributeError'
}
}
}
cctx = self.client.prepare('foo.example.com')
self.assertEqual('example.com', cctx.host)
exc = self.assertRaises(exception.IronicException,
cctx.call,
self.context, 'do_something', answer=42)
self.assertEqual(500, exc.code)
self.assertIn('Unexpected error', str(exc))
mock_session.return_value.post.assert_called_once_with(
'http://example.com:8089',
json={'jsonrpc': '2.0',
'method': 'do_something',
'params': {'answer': 42, 'context': self.ctx_json},
'id': self.context.request_id})
def test_cast_failure(self, mock_session):
# Cast cannot return normal failures, but make sure we ignore them even
# if server sends something in violation of the protocol (or because
# it's a low-level error like HTTP Forbidden).
response = mock_session.return_value.post.return_value
response.json.return_value = {
'jsonrpc': '2.0',
'error': {
'code': 418,
'message': 'I am a teapot',
'data': {
'class': 'ironic.common.exception.IronicException'
}
}
}
cctx = self.client.prepare('foo.example.com')
self.assertEqual('example.com', cctx.host)
result = cctx.cast(self.context, 'do_something', answer=42)
self.assertIsNone(result)
mock_session.return_value.post.assert_called_once_with(
'http://example.com:8089',
json={'jsonrpc': '2.0',
'method': 'do_something',
'params': {'answer': 42, 'context': self.ctx_json}})
def test_call_failure_with_version_and_cap(self, mock_session):
self.client = client.Client(self.serializer, version_cap='1.42')
cctx = self.client.prepare('foo.example.com', version='1.99')
self.assertRaisesRegex(RuntimeError,
"requested version 1.99, maximum allowed "
"version is 1.42",
cctx.call, self.context, 'do_something',
answer=42)
self.assertFalse(mock_session.return_value.post.called)
@mock.patch.object(client.LOG, 'debug', autospec=True)
def test_mask_secrets(self, mock_log, mock_session):
request = {
'redfish_username': 'admin',
'redfish_password': 'passw0rd'
}
body = """{
"jsonrpc": "2.0",
"result": {
"driver_info": {
"ipmi_username": "admin",
"ipmi_password": "passw0rd"
}
}
}"""
response = mock_session.return_value.post.return_value
response.text = body
cctx = self.client.prepare('foo.example.com')
cctx.cast(self.context, 'do_something', node=request)
mock_session.return_value.post.assert_called_once_with(
'http://example.com:8089',
json={'jsonrpc': '2.0',
'method': 'do_something',
'params': {'node': request, 'context': self.ctx_json}})
self.assertEqual(2, mock_log.call_count)
node = mock_log.call_args_list[0][0][3]['params']['node']
self.assertEqual(node, {'redfish_username': 'admin',
'redfish_password': '***'})
resp_text = mock_log.call_args_list[1][0][3]
self.assertEqual(body.replace('passw0rd', '***'), resp_text)
@mock.patch('ironic.common.json_rpc.client.keystone', autospec=True)
class TestSession(TestCase):
def setUp(self):
super(TestSession, self).setUp()
client._SESSION = None
def test_noauth(self, mock_keystone):
self.config(auth_strategy='noauth', group='json_rpc')
session = client._get_session()
mock_keystone.get_auth.assert_called_once_with('json_rpc')
auth = mock_keystone.get_auth.return_value
mock_keystone.get_session.assert_called_once_with(
'json_rpc', auth=auth)
internal_session = mock_keystone.get_session.return_value
mock_keystone.get_adapter.assert_called_once_with(
'json_rpc',
session=internal_session,
additional_headers={
'Content-Type': 'application/json'
})
self.assertEqual(mock_keystone.get_adapter.return_value, session)
def test_keystone(self, mock_keystone):
self.config(auth_strategy='keystone', group='json_rpc')
session = client._get_session()
mock_keystone.get_auth.assert_called_once_with('json_rpc')
auth = mock_keystone.get_auth.return_value
mock_keystone.get_session.assert_called_once_with(
'json_rpc', auth=auth)
internal_session = mock_keystone.get_session.return_value
mock_keystone.get_adapter.assert_called_once_with(
'json_rpc',
session=internal_session,
additional_headers={
'Content-Type': 'application/json'
})
self.assertEqual(mock_keystone.get_adapter.return_value, session)
def test_http_basic(self, mock_keystone):
self.config(auth_strategy='http_basic', group='json_rpc')
session = client._get_session()
mock_keystone.get_auth.assert_called_once_with('json_rpc')
auth = mock_keystone.get_auth.return_value
mock_keystone.get_session.assert_called_once_with(
'json_rpc', auth=auth)
internal_session = mock_keystone.get_session.return_value
mock_keystone.get_adapter.assert_called_once_with(
'json_rpc',
session=internal_session,
additional_headers={
'Content-Type': 'application/json'
})
self.assertEqual(mock_keystone.get_adapter.return_value, session)
def test_http_basic_deprecated(self, mock_keystone):
self.config(auth_strategy='http_basic', group='json_rpc')
self.config(http_basic_username='myName', group='json_rpc')
self.config(http_basic_password='myPassword', group='json_rpc')
session = client._get_session()
mock_keystone.get_auth.assert_called_once_with(
'json_rpc', username='myName', password='myPassword')
auth = mock_keystone.get_auth.return_value
mock_keystone.get_session.assert_called_once_with(
'json_rpc', auth=auth)
internal_session = mock_keystone.get_session.return_value
mock_keystone.get_adapter.assert_called_once_with(
'json_rpc',
session=internal_session,
additional_headers={
'Content-Type': 'application/json'
})
self.assertEqual(mock_keystone.get_adapter.return_value, session)

View File

@ -1,5 +1,3 @@
# -*- encoding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
@ -16,23 +14,20 @@ from unittest import mock
from keystoneauth1 import loading as ks_loading
from oslo_config import cfg
from oslo_config import fixture
from ironic.common import context
from ironic.common import exception
from ironic.common import keystone
from ironic.conf import auth as ironic_auth
from ironic.tests import base
from ironic.tests.base import TestCase
class KeystoneTestCase(base.TestCase):
class KeystoneTestCase(TestCase):
def setUp(self):
super(KeystoneTestCase, self).setUp()
self.test_group = 'test_group'
self.cfg_fixture.conf.register_group(cfg.OptGroup(self.test_group))
ironic_auth.register_auth_opts(self.cfg_fixture.conf, self.test_group,
service_type='vikings')
keystone.register_auth_opts(self.cfg_fixture.conf, self.test_group,
service_type='vikings')
self.config(auth_type='password',
group=self.test_group)
# NOTE(pas-ha) this is due to auth_plugin options
@ -47,10 +42,6 @@ class KeystoneTestCase(base.TestCase):
project_name='fake_tenant',
group=self.test_group)
def _set_config(self):
self.cfg_fixture = self.useFixture(fixture.Config())
self.addCleanup(cfg.CONF.reset)
def test_get_session(self):
self.config(timeout=10, group=self.test_group)
session = keystone.get_session(self.test_group, timeout=20)
@ -82,10 +73,49 @@ class KeystoneTestCase(base.TestCase):
autospec=True)
@mock.patch('keystoneauth1.token_endpoint.Token', autospec=True)
def test_get_service_auth(self, token_mock, service_auth_mock):
ctxt = context.RequestContext(auth_token='spam')
ctxt = mock.Mock(spec=['auth_token'], auth_token='spam')
mock_auth = mock.Mock()
self.assertEqual(service_auth_mock.return_value,
keystone.get_service_auth(ctxt, 'ham', mock_auth))
token_mock.assert_called_once_with('ham', 'spam')
service_auth_mock.assert_called_once_with(
user_auth=token_mock.return_value, service_auth=mock_auth)
class AuthConfTestCase(TestCase):
def setUp(self):
super(AuthConfTestCase, self).setUp()
self.test_group = 'test_group'
self.cfg_fixture.conf.register_group(cfg.OptGroup(self.test_group))
keystone.register_auth_opts(self.cfg_fixture.conf, self.test_group)
self.config(auth_type='password',
group=self.test_group)
# NOTE(pas-ha) this is due to auth_plugin options
# being dynamically registered on first load,
# but we need to set the config before
plugin = ks_loading.get_plugin_loader('password')
opts = ks_loading.get_auth_plugin_conf_options(plugin)
self.cfg_fixture.register_opts(opts, group=self.test_group)
self.config(auth_url='http://127.0.0.1:9898',
username='fake_user',
password='fake_pass',
project_name='fake_tenant',
group=self.test_group)
def test_add_auth_opts(self):
opts = keystone.add_auth_opts([])
# check that there is no duplicates
names = {o.dest for o in opts}
self.assertEqual(len(names), len(opts))
# NOTE(pas-ha) checking for most standard auth and session ones only
expected = {'timeout', 'insecure', 'cafile', 'certfile', 'keyfile',
'auth_type', 'auth_url', 'username', 'password',
'tenant_name', 'project_name', 'trust_id',
'domain_id', 'user_domain_id', 'project_domain_id'}
self.assertTrue(expected.issubset(names))
def test_os_service_types_alias(self):
keystone.register_auth_opts(self.cfg_fixture.conf, 'barbican')
self.assertEqual(self.cfg_fixture.conf.barbican.service_type,
'key-manager')

View File

@ -0,0 +1,143 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import socket
from unittest import mock
from oslo_config import cfg
import zeroconf
from ironic.common import exception
from ironic.common import mdns
from ironic.tests.base import TestCase
CONF = cfg.CONF
@mock.patch.object(zeroconf, 'Zeroconf', autospec=True)
class RegisterServiceTestCase(TestCase):
def test_ok(self, mock_zc):
zc = mdns.Zeroconf()
zc.register_service('baremetal', 'https://127.0.0.1/baremetal')
mock_zc.assert_called_once_with(
interfaces=zeroconf.InterfaceChoice.All,
ip_version=zeroconf.IPVersion.All)
mock_zc.return_value.register_service.assert_called_once_with(mock.ANY)
info = mock_zc.return_value.register_service.call_args[0][0]
self.assertEqual('_openstack._tcp.local.', info.type)
self.assertEqual('baremetal._openstack._tcp.local.', info.name)
self.assertEqual('127.0.0.1', socket.inet_ntoa(info.addresses[0]))
self.assertEqual({b'path': b'/baremetal'}, info.properties)
def test_with_params(self, mock_zc):
CONF.set_override('params', {'answer': 'none', 'foo': 'bar'},
group='mdns')
zc = mdns.Zeroconf()
zc.register_service('baremetal', 'https://127.0.0.1/baremetal',
params={'answer': b'42'})
mock_zc.return_value.register_service.assert_called_once_with(mock.ANY)
info = mock_zc.return_value.register_service.call_args[0][0]
self.assertEqual('_openstack._tcp.local.', info.type)
self.assertEqual('baremetal._openstack._tcp.local.', info.name)
self.assertEqual('127.0.0.1', socket.inet_ntoa(info.addresses[0]))
self.assertEqual({b'path': b'/baremetal',
b'answer': b'42',
b'foo': b'bar'},
info.properties)
@mock.patch.object(mdns.time, 'sleep', autospec=True)
def test_with_race(self, mock_sleep, mock_zc):
mock_zc.return_value.register_service.side_effect = [
zeroconf.NonUniqueNameException,
zeroconf.NonUniqueNameException,
zeroconf.NonUniqueNameException,
None
]
zc = mdns.Zeroconf()
zc.register_service('baremetal', 'https://127.0.0.1/baremetal')
mock_zc.return_value.register_service.assert_called_with(mock.ANY)
self.assertEqual(4, mock_zc.return_value.register_service.call_count)
mock_sleep.assert_has_calls([mock.call(i) for i in (0.1, 0.2, 0.4)])
def test_with_interfaces(self, mock_zc):
CONF.set_override('interfaces', ['10.0.0.1', '192.168.1.1'],
group='mdns')
zc = mdns.Zeroconf()
zc.register_service('baremetal', 'https://127.0.0.1/baremetal')
mock_zc.assert_called_once_with(interfaces=['10.0.0.1', '192.168.1.1'],
ip_version=None)
mock_zc.return_value.register_service.assert_called_once_with(mock.ANY)
info = mock_zc.return_value.register_service.call_args[0][0]
self.assertEqual('_openstack._tcp.local.', info.type)
self.assertEqual('baremetal._openstack._tcp.local.', info.name)
self.assertEqual('127.0.0.1', socket.inet_ntoa(info.addresses[0]))
self.assertEqual({b'path': b'/baremetal'}, info.properties)
@mock.patch.object(mdns.time, 'sleep', autospec=True)
def test_failure(self, mock_sleep, mock_zc):
mock_zc.return_value.register_service.side_effect = (
zeroconf.NonUniqueNameException
)
zc = mdns.Zeroconf()
self.assertRaises(exception.ServiceRegistrationFailure,
zc.register_service,
'baremetal', 'https://127.0.0.1/baremetal')
mock_zc.return_value.register_service.assert_called_with(mock.ANY)
self.assertEqual(CONF.mdns.registration_attempts,
mock_zc.return_value.register_service.call_count)
self.assertEqual(CONF.mdns.registration_attempts - 1,
mock_sleep.call_count)
class ParseEndpointTestCase(TestCase):
def test_simple(self):
endpoint = mdns._parse_endpoint('http://127.0.0.1')
self.assertEqual(1, len(endpoint.addresses))
self.assertEqual('127.0.0.1', socket.inet_ntoa(endpoint.addresses[0]))
self.assertEqual(80, endpoint.port)
self.assertEqual({}, endpoint.params)
self.assertIsNone(endpoint.hostname)
def test_simple_https(self):
endpoint = mdns._parse_endpoint('https://127.0.0.1')
self.assertEqual(1, len(endpoint.addresses))
self.assertEqual('127.0.0.1', socket.inet_ntoa(endpoint.addresses[0]))
self.assertEqual(443, endpoint.port)
self.assertEqual({}, endpoint.params)
self.assertIsNone(endpoint.hostname)
def test_with_path_and_port(self):
endpoint = mdns._parse_endpoint('http://127.0.0.1:8080/bm')
self.assertEqual(1, len(endpoint.addresses))
self.assertEqual('127.0.0.1', socket.inet_ntoa(endpoint.addresses[0]))
self.assertEqual(8080, endpoint.port)
self.assertEqual({'path': '/bm', 'protocol': 'http'}, endpoint.params)
self.assertIsNone(endpoint.hostname)
@mock.patch.object(socket, 'getaddrinfo', autospec=True)
def test_resolve(self, mock_resolve):
mock_resolve.return_value = [
(socket.AF_INET, None, None, None, ('1.2.3.4',)),
(socket.AF_INET6, None, None, None, ('::2', 'scope')),
]
endpoint = mdns._parse_endpoint('http://example.com')
self.assertEqual(2, len(endpoint.addresses))
self.assertEqual('1.2.3.4', socket.inet_ntoa(endpoint.addresses[0]))
self.assertEqual('::2', socket.inet_ntop(socket.AF_INET6,
endpoint.addresses[1]))
self.assertEqual(80, endpoint.port)
self.assertEqual({}, endpoint.params)
self.assertEqual('example.com.', endpoint.hostname)
mock_resolve.assert_called_once_with('example.com', 80, mock.ANY,
socket.IPPROTO_TCP)

View File

@ -20,7 +20,6 @@ import shutil
import tempfile
from unittest import mock
from ironic_lib import utils as ironic_utils
from oslo_config import cfg
from oslo_utils import fileutils
from oslo_utils import uuidutils
@ -381,7 +380,7 @@ class TestPXEUtils(db_base.DbTestCase):
self.assertEqual(str(expected_template), rendered_template)
@mock.patch('ironic.common.utils.create_link_without_raise', autospec=True)
@mock.patch('ironic_lib.utils.unlink_without_raise', autospec=True)
@mock.patch('ironic.common.utils.unlink_without_raise', autospec=True)
def test__write_mac_pxe_configs(self, unlink_mock, create_link_mock):
port_1 = object_utils.create_test_port(
self.context, node_id=self.node.id,
@ -419,7 +418,7 @@ class TestPXEUtils(db_base.DbTestCase):
create_link_mock.assert_has_calls(create_link_calls)
@mock.patch('ironic.common.utils.create_link_without_raise', autospec=True)
@mock.patch('ironic_lib.utils.unlink_without_raise', autospec=True)
@mock.patch('ironic.common.utils.unlink_without_raise', autospec=True)
def test__write_infiniband_mac_pxe_configs(
self, unlink_mock, create_link_mock):
client_id1 = (
@ -464,7 +463,7 @@ class TestPXEUtils(db_base.DbTestCase):
create_link_mock.assert_has_calls(create_link_calls)
@mock.patch('ironic.common.utils.create_link_without_raise', autospec=True)
@mock.patch('ironic_lib.utils.unlink_without_raise', autospec=True)
@mock.patch('ironic.common.utils.unlink_without_raise', autospec=True)
def test__write_mac_ipxe_configs(self, unlink_mock, create_link_mock):
port_1 = object_utils.create_test_port(
self.context, node_id=self.node.id,
@ -549,7 +548,7 @@ class TestPXEUtils(db_base.DbTestCase):
create_link_mock.mock_calls)
@mock.patch('ironic.common.utils.create_link_without_raise', autospec=True)
@mock.patch('ironic_lib.utils.unlink_without_raise', autospec=True)
@mock.patch('ironic.common.utils.unlink_without_raise', autospec=True)
@mock.patch('ironic.common.dhcp_factory.DHCPFactory.provider',
autospec=True)
def test__link_ip_address_pxe_configs(self, provider_mock, unlink_mock,
@ -801,7 +800,7 @@ class TestPXEUtils(db_base.DbTestCase):
@mock.patch('ironic.dhcp.neutron.NeutronDHCPApi.get_ip_addresses',
autospec=True)
@mock.patch('ironic.common.utils.rmtree_without_raise', autospec=True)
@mock.patch('ironic_lib.utils.unlink_without_raise', autospec=True)
@mock.patch('ironic.common.utils.unlink_without_raise', autospec=True)
def test_clean_up_pxe_config(self, unlink_mock, rmtree_mock,
mock_get_ip_addr):
address = "aa:aa:aa:aa:aa:aa"
@ -1096,7 +1095,7 @@ class TestPXEUtils(db_base.DbTestCase):
ipxe_enabled=True)
@mock.patch('ironic.common.utils.rmtree_without_raise', autospec=True)
@mock.patch('ironic_lib.utils.unlink_without_raise', autospec=True)
@mock.patch('ironic.common.utils.unlink_without_raise', autospec=True)
@mock.patch('ironic.common.dhcp_factory.DHCPFactory.provider',
autospec=True)
def test_clean_up_pxe_config_uefi(self, provider_mock, unlink_mock,
@ -1124,7 +1123,7 @@ class TestPXEUtils(db_base.DbTestCase):
os.path.join(CONF.pxe.tftp_root, self.node.uuid))
@mock.patch('ironic.common.utils.rmtree_without_raise', autospec=True)
@mock.patch('ironic_lib.utils.unlink_without_raise', autospec=True)
@mock.patch('ironic.common.utils.unlink_without_raise', autospec=True)
@mock.patch('ironic.common.dhcp_factory.DHCPFactory.provider',
autospec=True)
def test_clean_up_pxe_config_uefi_mac_address(
@ -1154,7 +1153,7 @@ class TestPXEUtils(db_base.DbTestCase):
os.path.join(CONF.pxe.tftp_root, self.node.uuid))
@mock.patch('ironic.common.utils.rmtree_without_raise', autospec=True)
@mock.patch('ironic_lib.utils.unlink_without_raise', autospec=True)
@mock.patch('ironic.common.utils.unlink_without_raise', autospec=True)
@mock.patch('ironic.common.dhcp_factory.DHCPFactory.provider',
autospec=True)
def test_clean_up_pxe_config_uefi_instance_info(self,
@ -1182,7 +1181,7 @@ class TestPXEUtils(db_base.DbTestCase):
os.path.join(CONF.pxe.tftp_root, self.node.uuid))
@mock.patch('ironic.common.utils.rmtree_without_raise', autospec=True)
@mock.patch('ironic_lib.utils.unlink_without_raise', autospec=True)
@mock.patch('ironic.common.utils.unlink_without_raise', autospec=True)
@mock.patch('ironic.common.dhcp_factory.DHCPFactory.provider',
autospec=True)
def test_clean_up_pxe_config_uefi_no_ipaddress(self, provider_mock,
@ -2524,7 +2523,7 @@ class iPXEBuildConfigOptionsTestCase(db_base.DbTestCase):
self._test_build_pxe_config_options_ipxe(iso_boot=True)
@mock.patch('ironic.common.utils.rmtree_without_raise', autospec=True)
@mock.patch('ironic_lib.utils.unlink_without_raise', autospec=True)
@mock.patch('ironic.common.utils.unlink_without_raise', autospec=True)
def test_clean_up_ipxe_config_uefi(self, unlink_mock, rmtree_mock):
self.config(http_root='/httpboot', group='deploy')
address = "aa:aa:aa:aa:aa:aa"
@ -2604,7 +2603,7 @@ class iPXEBuildServicePXEConfigTestCase(db_base.DbTestCase):
mock_switch.assert_called()
@mock.patch.object(ironic_utils, 'unlink_without_raise', autospec=True)
@mock.patch.object(utils, 'unlink_without_raise', autospec=True)
@mock.patch.object(pxe_utils, 'clean_up_pxe_config', autospec=True)
@mock.patch.object(pxe_utils, 'TFTPImageCache', autospec=True)
class CleanUpPxeEnvTestCase(db_base.DbTestCase):

View File

@ -13,6 +13,7 @@
# License for the specific language governing permissions and limitations
# under the License.
import copy
import datetime
import errno
import os
@ -50,33 +51,6 @@ class BareMetalUtilsTestCase(base.TestCase):
symlink_mock.assert_called_once_with("/fake/source", "/fake/link")
class ExecuteTestCase(base.TestCase):
@mock.patch.object(processutils, 'execute', autospec=True)
@mock.patch.object(os.environ, 'copy', return_value={}, autospec=True)
def test_execute_use_standard_locale_no_env_variables(self, env_mock,
execute_mock):
utils.execute('foo', use_standard_locale=True)
execute_mock.assert_called_once_with('foo',
env_variables={'LC_ALL': 'C'})
@mock.patch.object(processutils, 'execute', autospec=True)
def test_execute_use_standard_locale_with_env_variables(self,
execute_mock):
utils.execute('foo', use_standard_locale=True,
env_variables={'foo': 'bar'})
execute_mock.assert_called_once_with('foo',
env_variables={'LC_ALL': 'C',
'foo': 'bar'})
@mock.patch.object(processutils, 'execute', autospec=True)
def test_execute_not_use_standard_locale(self, execute_mock):
utils.execute('foo', use_standard_locale=False,
env_variables={'foo': 'bar'})
execute_mock.assert_called_once_with('foo',
env_variables={'foo': 'bar'})
class GenericUtilsTestCase(base.TestCase):
@mock.patch.object(utils, 'hashlib', autospec=True)
def test__get_hash_object(self, hashlib_mock):
@ -627,3 +601,481 @@ class ValidateConductorGroupTestCase(base.TestCase):
utils.validate_conductor_group, object())
self.assertRaises(exception.InvalidConductorGroup,
utils.validate_conductor_group, None)
class UnlinkTestCase(base.TestCase):
def test_unlink(self):
with mock.patch.object(os, "unlink", autospec=True) as unlink_mock:
unlink_mock.return_value = None
utils.unlink_without_raise("/fake/path")
unlink_mock.assert_called_once_with("/fake/path")
def test_unlink_ENOENT(self):
with mock.patch.object(os, "unlink", autospec=True) as unlink_mock:
unlink_mock.side_effect = OSError(errno.ENOENT)
utils.unlink_without_raise("/fake/path")
unlink_mock.assert_called_once_with("/fake/path")
class ExecuteTestCase(base.TestCase):
# Allow calls to utils.execute() and related functions
block_execute = False
@mock.patch.object(processutils, 'execute', autospec=True)
@mock.patch.object(os.environ, 'copy', return_value={}, autospec=True)
def test_execute_use_standard_locale_no_env_variables(self, env_mock,
execute_mock):
utils.execute('foo', use_standard_locale=True)
execute_mock.assert_called_once_with('foo',
env_variables={'LC_ALL': 'C'})
@mock.patch.object(processutils, 'execute', autospec=True)
def test_execute_use_standard_locale_with_env_variables(self,
execute_mock):
utils.execute('foo', use_standard_locale=True,
env_variables={'foo': 'bar'})
execute_mock.assert_called_once_with('foo',
env_variables={'LC_ALL': 'C',
'foo': 'bar'})
@mock.patch.object(processutils, 'execute', autospec=True)
def test_execute_not_use_standard_locale(self, execute_mock):
utils.execute('foo', use_standard_locale=False,
env_variables={'foo': 'bar'})
execute_mock.assert_called_once_with('foo',
env_variables={'foo': 'bar'})
@mock.patch.object(utils, 'LOG', autospec=True)
def _test_execute_with_log_stdout(self, log_mock, log_stdout=None):
with mock.patch.object(
processutils, 'execute', autospec=True) as execute_mock:
execute_mock.return_value = ('stdout', 'stderr')
if log_stdout is not None:
utils.execute('foo', log_stdout=log_stdout)
else:
utils.execute('foo')
execute_mock.assert_called_once_with('foo')
name, args, kwargs = log_mock.debug.mock_calls[0]
if log_stdout is False:
self.assertEqual(1, log_mock.debug.call_count)
self.assertNotIn('stdout', args[0])
else:
self.assertEqual(2, log_mock.debug.call_count)
self.assertIn('stdout', args[0])
def test_execute_with_log_stdout_default(self):
self._test_execute_with_log_stdout()
def test_execute_with_log_stdout_true(self):
self._test_execute_with_log_stdout(log_stdout=True)
def test_execute_with_log_stdout_false(self):
self._test_execute_with_log_stdout(log_stdout=False)
@mock.patch.object(utils, 'LOG', autospec=True)
@mock.patch.object(processutils, 'execute', autospec=True)
def test_execute_command_not_found(self, execute_mock, log_mock):
execute_mock.side_effect = FileNotFoundError
self.assertRaises(FileNotFoundError, utils.execute, 'foo')
execute_mock.assert_called_once_with('foo')
name, args, kwargs = log_mock.debug.mock_calls[0]
self.assertEqual(1, log_mock.debug.call_count)
self.assertIn('not found', args[0])
class MkfsTestCase(base.TestCase):
@mock.patch.object(utils, 'execute', autospec=True)
def test_mkfs(self, execute_mock):
utils.mkfs('ext4', '/my/block/dev')
utils.mkfs('msdos', '/my/msdos/block/dev')
utils.mkfs('swap', '/my/swap/block/dev')
expected = [mock.call('mkfs', '-t', 'ext4', '-F', '/my/block/dev',
use_standard_locale=True),
mock.call('mkfs', '-t', 'msdos', '/my/msdos/block/dev',
use_standard_locale=True),
mock.call('mkswap', '/my/swap/block/dev',
use_standard_locale=True)]
self.assertEqual(expected, execute_mock.call_args_list)
@mock.patch.object(utils, 'execute', autospec=True)
def test_mkfs_with_label(self, execute_mock):
utils.mkfs('ext4', '/my/block/dev', 'ext4-vol')
utils.mkfs('msdos', '/my/msdos/block/dev', 'msdos-vol')
utils.mkfs('swap', '/my/swap/block/dev', 'swap-vol')
expected = [mock.call('mkfs', '-t', 'ext4', '-F', '-L', 'ext4-vol',
'/my/block/dev',
use_standard_locale=True),
mock.call('mkfs', '-t', 'msdos', '-n', 'msdos-vol',
'/my/msdos/block/dev',
use_standard_locale=True),
mock.call('mkswap', '-L', 'swap-vol',
'/my/swap/block/dev',
use_standard_locale=True)]
self.assertEqual(expected, execute_mock.call_args_list)
@mock.patch.object(utils, 'execute', autospec=True,
side_effect=processutils.ProcessExecutionError(
stderr=os.strerror(errno.ENOENT)))
def test_mkfs_with_unsupported_fs(self, execute_mock):
self.assertRaises(exception.FileSystemNotSupported,
utils.mkfs, 'foo', '/my/block/dev')
@mock.patch.object(utils, 'execute', autospec=True,
side_effect=processutils.ProcessExecutionError(
stderr='fake'))
def test_mkfs_with_unexpected_error(self, execute_mock):
self.assertRaises(processutils.ProcessExecutionError, utils.mkfs,
'ext4', '/my/block/dev', 'ext4-vol')
class IsHttpUrlTestCase(base.TestCase):
def test_is_http_url(self):
self.assertTrue(utils.is_http_url('http://127.0.0.1'))
self.assertTrue(utils.is_http_url('https://127.0.0.1'))
self.assertTrue(utils.is_http_url('HTTP://127.1.2.3'))
self.assertTrue(utils.is_http_url('HTTPS://127.3.2.1'))
self.assertFalse(utils.is_http_url('Zm9vYmFy'))
self.assertFalse(utils.is_http_url('11111111'))
class ParseRootDeviceTestCase(base.TestCase):
def test_parse_root_device_hints_without_operators(self):
root_device = {
'wwn': '123456', 'model': 'FOO model', 'size': 12345,
'serial': 'foo-serial', 'vendor': 'foo VENDOR with space',
'name': '/dev/sda', 'wwn_with_extension': '123456111',
'wwn_vendor_extension': '111', 'rotational': True,
'hctl': '1:0:0:0', 'by_path': '/dev/disk/by-path/1:0:0:0'}
result = utils.parse_root_device_hints(root_device)
expected = {
'wwn': 's== 123456', 'model': 's== foo%20model',
'size': '== 12345', 'serial': 's== foo-serial',
'vendor': 's== foo%20vendor%20with%20space',
'name': 's== /dev/sda', 'wwn_with_extension': 's== 123456111',
'wwn_vendor_extension': 's== 111', 'rotational': True,
'hctl': 's== 1%3A0%3A0%3A0',
'by_path': 's== /dev/disk/by-path/1%3A0%3A0%3A0'}
self.assertEqual(expected, result)
def test_parse_root_device_hints_with_operators(self):
root_device = {
'wwn': 's== 123456', 'model': 's== foo MODEL', 'size': '>= 12345',
'serial': 's!= foo-serial', 'vendor': 's== foo VENDOR with space',
'name': '<or> /dev/sda <or> /dev/sdb',
'wwn_with_extension': 's!= 123456111',
'wwn_vendor_extension': 's== 111', 'rotational': True,
'hctl': 's== 1:0:0:0', 'by_path': 's== /dev/disk/by-path/1:0:0:0'}
# Validate strings being normalized
expected = copy.deepcopy(root_device)
expected['model'] = 's== foo%20model'
expected['vendor'] = 's== foo%20vendor%20with%20space'
expected['hctl'] = 's== 1%3A0%3A0%3A0'
expected['by_path'] = 's== /dev/disk/by-path/1%3A0%3A0%3A0'
result = utils.parse_root_device_hints(root_device)
# The hints already contain the operators, make sure we keep it
self.assertEqual(expected, result)
def test_parse_root_device_hints_string_compare_operator_name(self):
root_device = {'name': 's== /dev/sdb'}
# Validate strings being normalized
expected = copy.deepcopy(root_device)
result = utils.parse_root_device_hints(root_device)
# The hints already contain the operators, make sure we keep it
self.assertEqual(expected, result)
def test_parse_root_device_hints_no_hints(self):
result = utils.parse_root_device_hints({})
self.assertIsNone(result)
def test_parse_root_device_hints_convert_size(self):
for size in (12345, '12345'):
result = utils.parse_root_device_hints({'size': size})
self.assertEqual({'size': '== 12345'}, result)
def test_parse_root_device_hints_invalid_size(self):
for value in ('not-int', -123, 0):
self.assertRaises(ValueError, utils.parse_root_device_hints,
{'size': value})
def test_parse_root_device_hints_int_or(self):
expr = '<or> 123 <or> 456 <or> 789'
result = utils.parse_root_device_hints({'size': expr})
self.assertEqual({'size': expr}, result)
def test_parse_root_device_hints_int_or_invalid(self):
expr = '<or> 123 <or> non-int <or> 789'
self.assertRaises(ValueError, utils.parse_root_device_hints,
{'size': expr})
def test_parse_root_device_hints_string_or_space(self):
expr = '<or> foo <or> foo bar <or> bar'
expected = '<or> foo <or> foo%20bar <or> bar'
result = utils.parse_root_device_hints({'model': expr})
self.assertEqual({'model': expected}, result)
def _parse_root_device_hints_convert_rotational(self, values,
expected_value):
for value in values:
result = utils.parse_root_device_hints({'rotational': value})
self.assertEqual({'rotational': expected_value}, result)
def test_parse_root_device_hints_convert_rotational(self):
self._parse_root_device_hints_convert_rotational(
(True, 'true', 'on', 'y', 'yes'), True)
self._parse_root_device_hints_convert_rotational(
(False, 'false', 'off', 'n', 'no'), False)
def test_parse_root_device_hints_invalid_rotational(self):
self.assertRaises(ValueError, utils.parse_root_device_hints,
{'rotational': 'not-bool'})
def test_parse_root_device_hints_invalid_wwn(self):
self.assertRaises(ValueError, utils.parse_root_device_hints,
{'wwn': 123})
def test_parse_root_device_hints_invalid_wwn_with_extension(self):
self.assertRaises(ValueError, utils.parse_root_device_hints,
{'wwn_with_extension': 123})
def test_parse_root_device_hints_invalid_wwn_vendor_extension(self):
self.assertRaises(ValueError, utils.parse_root_device_hints,
{'wwn_vendor_extension': 123})
def test_parse_root_device_hints_invalid_model(self):
self.assertRaises(ValueError, utils.parse_root_device_hints,
{'model': 123})
def test_parse_root_device_hints_invalid_serial(self):
self.assertRaises(ValueError, utils.parse_root_device_hints,
{'serial': 123})
def test_parse_root_device_hints_invalid_vendor(self):
self.assertRaises(ValueError, utils.parse_root_device_hints,
{'vendor': 123})
def test_parse_root_device_hints_invalid_name(self):
self.assertRaises(ValueError, utils.parse_root_device_hints,
{'name': 123})
def test_parse_root_device_hints_invalid_hctl(self):
self.assertRaises(ValueError, utils.parse_root_device_hints,
{'hctl': 123})
def test_parse_root_device_hints_invalid_by_path(self):
self.assertRaises(ValueError, utils.parse_root_device_hints,
{'by_path': 123})
def test_parse_root_device_hints_non_existent_hint(self):
self.assertRaises(ValueError, utils.parse_root_device_hints,
{'non-existent': 'foo'})
def test_extract_hint_operator_and_values_single_value(self):
expected = {'op': '>=', 'values': ['123']}
self.assertEqual(
expected, utils._extract_hint_operator_and_values(
'>= 123', 'size'))
def test_extract_hint_operator_and_values_multiple_values(self):
expected = {'op': '<or>', 'values': ['123', '456', '789']}
expr = '<or> 123 <or> 456 <or> 789'
self.assertEqual(
expected, utils._extract_hint_operator_and_values(expr, 'size'))
def test_extract_hint_operator_and_values_multiple_values_space(self):
expected = {'op': '<or>', 'values': ['foo', 'foo bar', 'bar']}
expr = '<or> foo <or> foo bar <or> bar'
self.assertEqual(
expected, utils._extract_hint_operator_and_values(expr, 'model'))
def test_extract_hint_operator_and_values_no_operator(self):
expected = {'op': '', 'values': ['123']}
self.assertEqual(
expected, utils._extract_hint_operator_and_values('123', 'size'))
def test_extract_hint_operator_and_values_empty_value(self):
self.assertRaises(
ValueError, utils._extract_hint_operator_and_values, '', 'size')
def test_extract_hint_operator_and_values_integer(self):
expected = {'op': '', 'values': ['123']}
self.assertEqual(
expected, utils._extract_hint_operator_and_values(123, 'size'))
def test__append_operator_to_hints(self):
root_device = {'serial': 'foo', 'size': 12345,
'model': 'foo model', 'rotational': True}
expected = {'serial': 's== foo', 'size': '== 12345',
'model': 's== foo model', 'rotational': True}
result = utils._append_operator_to_hints(root_device)
self.assertEqual(expected, result)
def test_normalize_hint_expression_or(self):
expr = '<or> foo <or> foo bar <or> bar'
expected = '<or> foo <or> foo%20bar <or> bar'
result = utils._normalize_hint_expression(expr, 'model')
self.assertEqual(expected, result)
def test_normalize_hint_expression_in(self):
expr = '<in> foo <in> foo bar <in> bar'
expected = '<in> foo <in> foo%20bar <in> bar'
result = utils._normalize_hint_expression(expr, 'model')
self.assertEqual(expected, result)
def test_normalize_hint_expression_op_space(self):
expr = 's== test string with space'
expected = 's== test%20string%20with%20space'
result = utils._normalize_hint_expression(expr, 'model')
self.assertEqual(expected, result)
def test_normalize_hint_expression_op_no_space(self):
expr = 's!= SpongeBob'
expected = 's!= spongebob'
result = utils._normalize_hint_expression(expr, 'model')
self.assertEqual(expected, result)
def test_normalize_hint_expression_no_op_space(self):
expr = 'no operators'
expected = 'no%20operators'
result = utils._normalize_hint_expression(expr, 'model')
self.assertEqual(expected, result)
def test_normalize_hint_expression_no_op_no_space(self):
expr = 'NoSpace'
expected = 'nospace'
result = utils._normalize_hint_expression(expr, 'model')
self.assertEqual(expected, result)
def test_normalize_hint_expression_empty_value(self):
self.assertRaises(
ValueError, utils._normalize_hint_expression, '', 'size')
class MatchRootDeviceTestCase(base.TestCase):
def setUp(self):
super(MatchRootDeviceTestCase, self).setUp()
self.devices = [
{'name': '/dev/sda', 'size': 64424509440, 'model': 'ok model',
'serial': 'fakeserial'},
{'name': '/dev/sdb', 'size': 128849018880, 'model': 'big model',
'serial': 'veryfakeserial', 'rotational': 'yes'},
{'name': '/dev/sdc', 'size': 10737418240, 'model': 'small model',
'serial': 'veryveryfakeserial', 'rotational': False},
]
def test_match_root_device_hints_one_hint(self):
root_device_hints = {'size': '>= 70'}
dev = utils.match_root_device_hints(self.devices, root_device_hints)
self.assertEqual('/dev/sdb', dev['name'])
def test_match_root_device_hints_rotational(self):
root_device_hints = {'rotational': False}
dev = utils.match_root_device_hints(self.devices, root_device_hints)
self.assertEqual('/dev/sdc', dev['name'])
def test_match_root_device_hints_rotational_convert_devices_bool(self):
root_device_hints = {'size': '>=100', 'rotational': True}
dev = utils.match_root_device_hints(self.devices, root_device_hints)
self.assertEqual('/dev/sdb', dev['name'])
def test_match_root_device_hints_multiple_hints(self):
root_device_hints = {'size': '>= 50', 'model': 's==big model',
'serial': 's==veryfakeserial'}
dev = utils.match_root_device_hints(self.devices, root_device_hints)
self.assertEqual('/dev/sdb', dev['name'])
def test_match_root_device_hints_multiple_hints2(self):
root_device_hints = {
'size': '<= 20',
'model': '<or> model 5 <or> foomodel <or> small model <or>',
'serial': 's== veryveryfakeserial'}
dev = utils.match_root_device_hints(self.devices, root_device_hints)
self.assertEqual('/dev/sdc', dev['name'])
def test_match_root_device_hints_multiple_hints3(self):
root_device_hints = {'rotational': False, 'model': '<in> small'}
dev = utils.match_root_device_hints(self.devices, root_device_hints)
self.assertEqual('/dev/sdc', dev['name'])
def test_match_root_device_hints_no_operators(self):
root_device_hints = {'size': '120', 'model': 'big model',
'serial': 'veryfakeserial'}
dev = utils.match_root_device_hints(self.devices, root_device_hints)
self.assertEqual('/dev/sdb', dev['name'])
def test_match_root_device_hints_no_device_found(self):
root_device_hints = {'size': '>=50', 'model': 's==foo'}
dev = utils.match_root_device_hints(self.devices, root_device_hints)
self.assertIsNone(dev)
@mock.patch.object(utils.LOG, 'warning', autospec=True)
def test_match_root_device_hints_empty_device_attribute(self, mock_warn):
empty_dev = [{'name': '/dev/sda', 'model': ' '}]
dev = utils.match_root_device_hints(empty_dev, {'model': 'foo'})
self.assertIsNone(dev)
self.assertTrue(mock_warn.called)
def test_find_devices_all(self):
root_device_hints = {'size': '>= 10'}
devs = list(utils.find_devices_by_hints(self.devices,
root_device_hints))
self.assertEqual(self.devices, devs)
def test_find_devices_none(self):
root_device_hints = {'size': '>= 100500'}
devs = list(utils.find_devices_by_hints(self.devices,
root_device_hints))
self.assertEqual([], devs)
def test_find_devices_name(self):
root_device_hints = {'name': 's== /dev/sda'}
devs = list(utils.find_devices_by_hints(self.devices,
root_device_hints))
self.assertEqual([self.devices[0]], devs)
@mock.patch.object(utils, 'execute', autospec=True)
class GetRouteSourceTestCase(base.TestCase):
def test_get_route_source_ipv4(self, mock_execute):
mock_execute.return_value = ('XXX src 1.2.3.4 XXX\n cache', None)
source = utils.get_route_source('XXX')
self.assertEqual('1.2.3.4', source)
def test_get_route_source_ipv6(self, mock_execute):
mock_execute.return_value = ('XXX src 1:2::3:4 metric XXX\n cache',
None)
source = utils.get_route_source('XXX')
self.assertEqual('1:2::3:4', source)
def test_get_route_source_ipv6_linklocal(self, mock_execute):
mock_execute.return_value = (
'XXX src fe80::1234:1234:1234:1234 metric XXX\n cache', None)
source = utils.get_route_source('XXX')
self.assertIsNone(source)
def test_get_route_source_ipv6_linklocal_allowed(self, mock_execute):
mock_execute.return_value = (
'XXX src fe80::1234:1234:1234:1234 metric XXX\n cache', None)
source = utils.get_route_source('XXX', ignore_link_local=False)
self.assertEqual('fe80::1234:1234:1234:1234', source)
def test_get_route_source_indexerror(self, mock_execute):
mock_execute.return_value = ('XXX src \n cache', None)
source = utils.get_route_source('XXX')
self.assertIsNone(source)

View File

@ -19,13 +19,13 @@ import uuid
import eventlet
import futurist
from futurist import periodics
from ironic_lib import mdns
from oslo_config import cfg
from oslo_db import exception as db_exception
from oslo_utils import uuidutils
from ironic.common import driver_factory
from ironic.common import exception
from ironic.common import mdns
from ironic.common import states
from ironic.common import utils as common_utils
from ironic.conductor import base_manager

View File

@ -26,7 +26,6 @@ from unittest import mock
import eventlet
from futurist import waiters
from ironic_lib import metrics as ironic_metrics
from oslo_config import cfg
import oslo_messaging as messaging
from oslo_utils import uuidutils
@ -42,6 +41,7 @@ from ironic.common import exception
from ironic.common import faults
from ironic.common import images
from ironic.common import indicator_states
from ironic.common import metrics as ironic_metrics
from ironic.common import nova
from ironic.common import states
from ironic.conductor import cleaning

View File

@ -21,7 +21,6 @@ Unit Tests for :py:class:`ironic.conductor.rpcapi.ConductorAPI`.
import copy
from unittest import mock
from ironic_lib.json_rpc import client as json_rpc
from oslo_config import cfg
import oslo_messaging as messaging
from oslo_messaging import _utils as messaging_utils
@ -31,6 +30,7 @@ from ironic.common import boot_modes
from ironic.common import components
from ironic.common import exception
from ironic.common import indicator_states
from ironic.common.json_rpc import client as json_rpc
from ironic.common import release_mappings
from ironic.common import rpc
from ironic.common import states

View File

@ -13,7 +13,6 @@
import json
from unittest import mock
from ironic_lib import utils as irlib_utils
from oslo_concurrency import processutils
from ironic.common import exception
@ -676,7 +675,7 @@ class TestAnsibleDeploy(AnsibleDeployTestCaseBase):
@mock.patch.object(ansible_deploy, '_get_configdrive_path',
return_value='/path/test', autospec=True)
@mock.patch.object(irlib_utils, 'unlink_without_raise', autospec=True)
@mock.patch.object(com_utils, 'unlink_without_raise', autospec=True)
@mock.patch.object(pxe.PXEBoot, 'clean_up_ramdisk', autospec=True)
def test_clean_up(self, pxe_clean_up_mock, unlink_mock,
get_cfdrive_path_mock):

View File

@ -24,7 +24,6 @@ import shutil
import tempfile
from unittest import mock
from ironic_lib import utils as ironic_utils
from oslo_config import cfg
from oslo_utils import importutils
from oslo_utils import uuidutils
@ -34,6 +33,7 @@ from ironic.common import exception
from ironic.common import image_service
from ironic.common import images
from ironic.common import swift
from ironic.common import utils
from ironic.conductor import task_manager
from ironic.conductor import utils as manager_utils
from ironic.drivers.modules.ilo import common as ilo_common
@ -980,7 +980,7 @@ class IloCommonMethodsTestCase(BaseIloTest):
copy_mock.assert_called_once_with(source, image_path)
self.assertFalse(chmod_mock.called)
@mock.patch.object(ilo_common, 'ironic_utils', autospec=True)
@mock.patch.object(ilo_common, 'utils', autospec=True)
def test_remove_image_from_web_server(self, utils_mock):
# | GIVEN |
CONF.set_override('http_url', 'http://x.y.z.a/webserver/', 'deploy')
@ -1087,7 +1087,7 @@ class IloCommonMethodsTestCase(BaseIloTest):
"alice_in_wonderland"),
'err': raised_exc})
@mock.patch.object(ironic_utils, 'unlink_without_raise', spec_set=True,
@mock.patch.object(utils, 'unlink_without_raise', spec_set=True,
autospec=True)
@mock.patch.object(ilo_common, '_get_floppy_image_name', spec_set=True,
autospec=True)
@ -1133,7 +1133,7 @@ class IloCommonMethodsTestCase(BaseIloTest):
func_set_boot_device.assert_called_once_with(task,
boot_devices.CDROM)
@mock.patch.object(ironic_utils, 'unlink_without_raise', spec_set=True,
@mock.patch.object(utils, 'unlink_without_raise', spec_set=True,
autospec=True)
def test_remove_single_or_list_of_files_with_file_list(self, unlink_mock):
# | GIVEN |
@ -1148,7 +1148,7 @@ class IloCommonMethodsTestCase(BaseIloTest):
mock.call('/any_path3/any_file3')]
unlink_mock.assert_has_calls(calls)
@mock.patch.object(ironic_utils, 'unlink_without_raise', spec_set=True,
@mock.patch.object(utils, 'unlink_without_raise', spec_set=True,
autospec=True)
def test_remove_single_or_list_of_files_with_file_str(self, unlink_mock):
# | GIVEN |

View File

@ -23,7 +23,6 @@ import tempfile
import unittest
from unittest import mock
from ironic_lib import utils as ironic_utils
from oslo_config import cfg
from oslo_utils import uuidutils
@ -33,6 +32,7 @@ from ironic.common.glance_service import service_utils
from ironic.common.i18n import _
from ironic.common import images
from ironic.common import states
from ironic.common import utils
from ironic.conductor import task_manager
from ironic.conductor import utils as manager_utils
from ironic.drivers.modules import boot_mode_utils
@ -828,7 +828,7 @@ class IRMCDeployPrivateMethodsTestCase(test_common.BaseIRMCTest):
mock.call(_get_iso_name_mock(task.node, label='deploy')),
mock.call(_get_iso_name_mock(task.node, label='rescue'))])
@mock.patch.object(ironic_utils, 'unlink_without_raise', spec_set=True,
@mock.patch.object(utils, 'unlink_without_raise', spec_set=True,
autospec=True)
def test__remove_share_file(self, unlink_without_raise_mock,
check_share_fs_mounted_mock):

View File

@ -30,12 +30,12 @@ import tempfile
import time
from unittest import mock
from ironic_lib import utils as ironic_utils
from oslo_config import cfg
from oslo_service import loopingcall
import psutil
from ironic.common import exception
from ironic.common import utils
from ironic.drivers.modules import console_utils
from ironic.drivers.modules import ipmitool as ipmi
from ironic.tests.unit.db import base as db_base
@ -147,7 +147,7 @@ class ConsoleUtilsTestCase(db_base.DbTestCase):
console_utils._get_console_pid,
self.info['uuid'])
@mock.patch.object(ironic_utils, 'unlink_without_raise', autospec=True)
@mock.patch.object(utils, 'unlink_without_raise', autospec=True)
@mock.patch.object(os, 'kill', autospec=True)
@mock.patch.object(console_utils, '_get_console_pid', autospec=True)
def test__stop_console(self, mock_pid, mock_kill, mock_unlink):
@ -164,7 +164,7 @@ class ConsoleUtilsTestCase(db_base.DbTestCase):
mock_kill.assert_any_call(mock_pid.return_value, signal.SIGTERM)
mock_unlink.assert_called_once_with(pid_file)
@mock.patch.object(ironic_utils, 'unlink_without_raise', autospec=True)
@mock.patch.object(utils, 'unlink_without_raise', autospec=True)
@mock.patch.object(os, 'kill', autospec=True)
@mock.patch.object(psutil, 'pid_exists', autospec=True, return_value=True)
@mock.patch.object(console_utils, '_get_console_pid', autospec=True)
@ -181,7 +181,7 @@ class ConsoleUtilsTestCase(db_base.DbTestCase):
mock_kill.assert_any_call(mock_pid.return_value, signal.SIGKILL)
mock_unlink.assert_called_once_with(pid_file)
@mock.patch.object(ironic_utils, 'unlink_without_raise', autospec=True)
@mock.patch.object(utils, 'unlink_without_raise', autospec=True)
@mock.patch.object(os, 'kill', autospec=True)
@mock.patch.object(console_utils, '_get_console_pid', autospec=True)
def test__stop_console_nopid(self, mock_pid, mock_kill, mock_unlink):
@ -196,7 +196,7 @@ class ConsoleUtilsTestCase(db_base.DbTestCase):
self.assertFalse(mock_kill.called)
mock_unlink.assert_called_once_with(pid_file)
@mock.patch.object(ironic_utils, 'unlink_without_raise', autospec=True)
@mock.patch.object(utils, 'unlink_without_raise', autospec=True)
@mock.patch.object(os, 'kill', autospec=True)
@mock.patch.object(console_utils, '_get_console_pid', autospec=True)
def test__stop_console_shellinabox_not_running(self, mock_pid,
@ -212,7 +212,7 @@ class ConsoleUtilsTestCase(db_base.DbTestCase):
signal.SIGTERM)
mock_unlink.assert_called_once_with(pid_file)
@mock.patch.object(ironic_utils, 'unlink_without_raise', autospec=True)
@mock.patch.object(utils, 'unlink_without_raise', autospec=True)
@mock.patch.object(os, 'kill', autospec=True)
@mock.patch.object(console_utils, '_get_console_pid', autospec=True)
def test__stop_console_exception(self, mock_pid, mock_kill, mock_unlink):

View File

@ -31,7 +31,6 @@ import types
from unittest import mock
import fixtures
from ironic_lib import utils as ironic_utils
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_config import fixture as cfg_fixture
@ -4463,10 +4462,8 @@ class IPMIToolSocatDriverTestCase(IPMIToolShellinaboxTestCase):
@mock.patch.object(ipmi.IPMISocatConsole, '_exec_stop_console',
autospec=True)
@mock.patch.object(ironic_utils, 'unlink_without_raise',
autospec=True)
@mock.patch.object(console_utils, 'stop_socat_console',
autospec=True)
@mock.patch.object(utils, 'unlink_without_raise', autospec=True)
@mock.patch.object(console_utils, 'stop_socat_console', autospec=True)
def test_stop_console_fail(self, mock_stop, mock_unlink, mock_exec_stop):
mock_stop.side_effect = exception.ConsoleError()

Some files were not shown because too many files have changed in this diff Show More