Fix pep8 errors and make it pass Jenkins tests.
Fix all existing pep8 errors. Remove install_venv and friends, which were not needed. Add a few ignores for nova code that is yet to be cleaned up. Skip one failing test case, fixed by review 29394. Import contrib/redhat-eventlet.patch from Nova. Change-Id: I46b6ccaa272bd058757064672ce9221263ed7087
This commit is contained in:
parent
accdc459ae
commit
55502af5d2
16
contrib/redhat-eventlet.patch
Normal file
16
contrib/redhat-eventlet.patch
Normal file
@ -0,0 +1,16 @@
|
||||
--- .nova-venv/lib/python2.6/site-packages/eventlet/green/subprocess.py.orig
|
||||
2011-05-25
|
||||
23:31:34.597271402 +0000
|
||||
+++ .nova-venv/lib/python2.6/site-packages/eventlet/green/subprocess.py
|
||||
2011-05-25
|
||||
23:33:24.055602468 +0000
|
||||
@@ -32,7 +32,7 @@
|
||||
setattr(self, attr, wrapped_pipe)
|
||||
__init__.__doc__ = subprocess_orig.Popen.__init__.__doc__
|
||||
|
||||
- def wait(self, check_interval=0.01):
|
||||
+ def wait(self, check_interval=0.01, timeout=None):
|
||||
# Instead of a blocking OS call, this version of wait() uses logic
|
||||
# borrowed from the eventlet 0.2 processes.Process.wait() method.
|
||||
try:
|
||||
|
@ -1,4 +1,5 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
# -*- encoding: utf-8 -*-
|
||||
|
||||
# Copyright © 2012 New Dream Network, LLC (DreamHost)
|
||||
# All Rights Reserved.
|
||||
@ -52,6 +53,7 @@ def setup_app(pecan_config=None, extra_hooks=None):
|
||||
|
||||
pecan.configuration.set_config(dict(pecan_config), overwrite=True)
|
||||
|
||||
# TODO(deva): add middleware.ParsableErrorMiddleware from Ceilometer
|
||||
app = pecan.make_app(
|
||||
pecan_config.app.root,
|
||||
static_root=pecan_config.app.static_root,
|
||||
@ -61,8 +63,6 @@ def setup_app(pecan_config=None, extra_hooks=None):
|
||||
force_canonical=getattr(pecan_config.app, 'force_canonical', True),
|
||||
hooks=app_hooks,
|
||||
)
|
||||
# TODO: add this back in
|
||||
# wrap_app=middleware.ParsableErrorMiddleware,
|
||||
|
||||
if pecan_config.app.enable_acl:
|
||||
return acl.install(app, cfg.CONF)
|
||||
|
@ -22,20 +22,17 @@ Should maintain feature parity with Nova Baremetal Extension.
|
||||
Specification in ironic/doc/api/v1.rst
|
||||
"""
|
||||
|
||||
import inspect
|
||||
import pecan
|
||||
from pecan import rest
|
||||
|
||||
import wsme
|
||||
import wsmeext.pecan as wsme_pecan
|
||||
from wsme import types as wtypes
|
||||
import wsmeext.pecan as wsme_pecan
|
||||
|
||||
from ironic.openstack.common import log
|
||||
|
||||
# TODO(deva): The API shouldn't know what db IMPL is in use.
|
||||
# Import ironic.db.models instead of the sqlalchemy models
|
||||
# once that layer is written.
|
||||
from ironic.db.sqlalchemy import models
|
||||
# Import ironic.db.models once that layer is written.
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
@ -59,7 +56,7 @@ class Base(wtypes.Base):
|
||||
|
||||
|
||||
class Interface(Base):
|
||||
"""A representation of a network interface for a baremetal node"""
|
||||
"""A representation of a network interface for a baremetal node."""
|
||||
|
||||
node_id = int
|
||||
address = wtypes.text
|
||||
@ -72,9 +69,9 @@ class Interface(Base):
|
||||
|
||||
|
||||
class InterfacesController(rest.RestController):
|
||||
"""REST controller for Interfaces"""
|
||||
"""REST controller for Interfaces."""
|
||||
|
||||
@wsme_pecan.wsexpose(Interface, unicode)
|
||||
@wsme_pecan.wsexpose(Interface, unicode)
|
||||
def post(self, iface):
|
||||
"""Ceate a new interface."""
|
||||
return Interface.sample()
|
||||
@ -93,17 +90,17 @@ class InterfacesController(rest.RestController):
|
||||
|
||||
@wsme_pecan.wsexpose()
|
||||
def delete(self, iface_id):
|
||||
"""Delete an interface"""
|
||||
"""Delete an interface."""
|
||||
pass
|
||||
|
||||
@wsme_pecan.wsexpose()
|
||||
def put(self, iface_id):
|
||||
"""Update an interface"""
|
||||
"""Update an interface."""
|
||||
pass
|
||||
|
||||
|
||||
class Node(Base):
|
||||
"""A representation of a bare metal node"""
|
||||
"""A representation of a bare metal node."""
|
||||
|
||||
uuid = wtypes.text
|
||||
cpu_arch = wtypes.text
|
||||
@ -135,16 +132,16 @@ class Node(Base):
|
||||
|
||||
|
||||
class NodeIfaceController(rest.RestController):
|
||||
"""For GET /node/ifaces/<id>"""
|
||||
"""For GET /node/ifaces/<id>."""
|
||||
|
||||
@wsme_pecan.wsexpose([Interface], unicode)
|
||||
def get(self, node_id):
|
||||
return [Interface.from_db_model(r)
|
||||
for r in pecan.request.dbapi.get_ifaces_for_node(node_id)]
|
||||
|
||||
|
||||
|
||||
class NodesController(rest.RestController):
|
||||
"""REST controller for Nodes"""
|
||||
"""REST controller for Nodes."""
|
||||
|
||||
@wsme.validate(Node)
|
||||
@wsme_pecan.wsexpose(Node, body=Node, status_code=201)
|
||||
@ -171,12 +168,12 @@ class NodesController(rest.RestController):
|
||||
|
||||
@wsme_pecan.wsexpose()
|
||||
def delete(self, node_id):
|
||||
"""Delete a node"""
|
||||
"""Delete a node."""
|
||||
pecan.request.dbapi.destroy_node(node_id)
|
||||
|
||||
@wsme_pecan.wsexpose()
|
||||
def put(self, node_id):
|
||||
"""Update a node"""
|
||||
"""Update a node."""
|
||||
pass
|
||||
|
||||
ifaces = NodeIfaceController()
|
||||
@ -185,7 +182,7 @@ class NodesController(rest.RestController):
|
||||
class Controller(object):
|
||||
"""Version 1 API controller root."""
|
||||
|
||||
# TODO: _default and index
|
||||
# TODO(deva): _default and index
|
||||
|
||||
nodes = NodesController()
|
||||
interfaces = InterfacesController()
|
||||
|
@ -14,8 +14,3 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from pecan import conf
|
||||
|
||||
def init_model():
|
||||
pass
|
||||
|
@ -13,9 +13,8 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# TODO(mikal): move eventlet imports to ironic.__init__ once we move to PBR
|
||||
# TODO(deva): move eventlet imports to ironic.__init__ once we move to PBR
|
||||
import os
|
||||
import sys
|
||||
|
||||
os.environ['EVENTLET_NO_GREENDNS'] = 'yes'
|
||||
|
||||
|
@ -18,9 +18,7 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
The Ironic Service API
|
||||
"""
|
||||
"""The Ironic Service API."""
|
||||
|
||||
import sys
|
||||
|
||||
@ -28,16 +26,14 @@ from oslo.config import cfg
|
||||
from wsgiref import simple_server
|
||||
|
||||
from ironic.api import app
|
||||
from ironic.common.service import prepare_service
|
||||
from ironic.openstack.common import service
|
||||
from ironic.openstack.common.rpc import service as rpc_service
|
||||
from ironic.common import service as ironic_service
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
def main():
|
||||
# Pase config file and command line options, then start logging
|
||||
prepare_service(sys.argv)
|
||||
ironic_service.prepare_service(sys.argv)
|
||||
|
||||
# Build and start the WSGI app
|
||||
host = CONF.ironic_api_bind_ip
|
||||
|
@ -33,6 +33,7 @@ CONF = cfg.CONF
|
||||
CONF.import_opt('db_backend',
|
||||
'ironic.openstack.common.db.api')
|
||||
|
||||
|
||||
def main():
|
||||
service.prepare_service(sys.argv)
|
||||
migration.db_sync()
|
||||
|
@ -25,19 +25,18 @@ The Ironic Management Service
|
||||
import sys
|
||||
|
||||
from oslo.config import cfg
|
||||
from wsgiref import simple_server
|
||||
|
||||
from ironic.manager import manager
|
||||
from ironic.common.service import prepare_service
|
||||
from ironic.openstack.common import service
|
||||
from ironic.openstack.common.rpc import service as rpc_service
|
||||
|
||||
from ironic.common import service as ironic_service
|
||||
from ironic.manager import manager
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
def main():
|
||||
# Pase config file and command line options, then start logging
|
||||
prepare_service(sys.argv)
|
||||
ironic_service.prepare_service(sys.argv)
|
||||
|
||||
topic = 'ironic.manager'
|
||||
mgr = manager.ManagerService(CONF.host, topic)
|
||||
|
@ -19,9 +19,9 @@
|
||||
|
||||
from oslo.config import cfg
|
||||
|
||||
from ironic.common import paths
|
||||
from ironic.openstack.common.db.sqlalchemy import session as db_session
|
||||
from ironic.openstack.common import rpc
|
||||
from ironic.common import paths
|
||||
from ironic import version
|
||||
|
||||
_DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('$sqlite_db')
|
||||
|
@ -48,7 +48,8 @@ class RequestContext(object):
|
||||
request_id=None, auth_token=None, overwrite=True,
|
||||
quota_class=None, user_name=None, project_name=None,
|
||||
service_catalog=None, instance_lock_checked=False, **kwargs):
|
||||
"""
|
||||
"""Initialize this RequestContext.
|
||||
|
||||
:param read_deleted: 'no' indicates deleted records are hidden, 'yes'
|
||||
indicates deleted records are visible, 'only' indicates that
|
||||
*only* deleted records are visible.
|
||||
|
@ -31,7 +31,6 @@ from oslo.config import cfg
|
||||
from ironic.common import safe_utils
|
||||
from ironic.openstack.common import excutils
|
||||
from ironic.openstack.common import log as logging
|
||||
from ironic.openstack.common.gettextutils import _
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
@ -92,7 +92,7 @@ def enforce(context, action, target, do_raise=True):
|
||||
"""
|
||||
init()
|
||||
|
||||
credentials = ironic_context.to_dict()
|
||||
credentials = context.to_dict()
|
||||
|
||||
# Add the exception arguments if asked to do a raise
|
||||
extra = {}
|
||||
@ -102,19 +102,16 @@ def enforce(context, action, target, do_raise=True):
|
||||
return policy.check(action, target, credentials, **extra)
|
||||
|
||||
|
||||
def check_is_admin(roles):
|
||||
"""Whether or not roles contains 'admin' role according to policy setting.
|
||||
def check_is_admin(context):
|
||||
"""Whether or not role contains 'admin' role according to policy setting.
|
||||
|
||||
"""
|
||||
init()
|
||||
|
||||
if isinstance(roles, RequestContext):
|
||||
# the target is user-self
|
||||
credentials = roles.to_dict()
|
||||
target = credentials
|
||||
return policy.check('context_is_admin', target, credentials)
|
||||
else:
|
||||
return policy.check('context_is_admin', {}, {'roles': roles})
|
||||
credentials = context.to_dict()
|
||||
target = credentials
|
||||
|
||||
return policy.check('context_is_admin', target, credentials)
|
||||
|
||||
|
||||
@policy.register('is_admin')
|
||||
|
@ -169,8 +169,7 @@ def execute(*cmd, **kwargs):
|
||||
|
||||
|
||||
def trycmd(*args, **kwargs):
|
||||
"""
|
||||
A wrapper around execute() to more easily handle warnings and errors.
|
||||
"""A wrapper around execute() to more easily handle warnings and errors.
|
||||
|
||||
Returns an (out, err) tuple of strings containing the output of
|
||||
the command's stdout and stderr. If 'err' is not empty then the
|
||||
@ -200,7 +199,8 @@ def ssh_execute(ssh, cmd, process_input=None,
|
||||
addl_env=None, check_exit_code=True):
|
||||
LOG.debug(_('Running cmd (SSH): %s'), cmd)
|
||||
if addl_env:
|
||||
raise exception.IronicException(_('Environment not supported over SSH'))
|
||||
raise exception.IronicException(_(
|
||||
'Environment not supported over SSH'))
|
||||
|
||||
if process_input:
|
||||
# This is (probably) fixable if we need it...
|
||||
@ -307,10 +307,12 @@ def is_valid_boolstr(val):
|
||||
|
||||
def is_valid_mac(address):
|
||||
"""Verify the format of a MAC addres."""
|
||||
if re.match("[0-9a-f]{2}([-:])[0-9a-f]{2}(\\1[0-9a-f]{2}){4}$", address.lower()):
|
||||
m = "[0-9a-f]{2}([-:])[0-9a-f]{2}(\\1[0-9a-f]{2}){4}$"
|
||||
if re.match(m, address.lower()):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def is_valid_ipv4(address):
|
||||
"""Verify that address represents a valid IPv4 address."""
|
||||
try:
|
||||
@ -345,8 +347,7 @@ def get_shortened_ipv6_cidr(address):
|
||||
|
||||
|
||||
def is_valid_cidr(address):
|
||||
"""Check if the provided ipv4 or ipv6 address is a valid
|
||||
CIDR address or not"""
|
||||
"""Check if the provided ipv4 or ipv6 address is a valid CIDR address."""
|
||||
try:
|
||||
# Validate the correct CIDR Address
|
||||
netaddr.IPNetwork(address)
|
||||
@ -369,8 +370,10 @@ def is_valid_cidr(address):
|
||||
|
||||
|
||||
def get_ip_version(network):
|
||||
"""Returns the IP version of a network (IPv4 or IPv6). Raises
|
||||
AddrFormatError if invalid network."""
|
||||
"""Returns the IP version of a network (IPv4 or IPv6).
|
||||
|
||||
:raises: AddrFormatError if invalid network.
|
||||
"""
|
||||
if netaddr.IPNetwork(network).version == 6:
|
||||
return "IPv6"
|
||||
elif netaddr.IPNetwork(network).version == 4:
|
||||
@ -527,20 +530,25 @@ def mkfs(fs, path, label=None):
|
||||
execute(*args)
|
||||
|
||||
|
||||
def cache_image(context, target, image_id, user_id, project_id):
|
||||
if not os.path.exists(target):
|
||||
libvirt_utils.fetch_image(context, target, image_id,
|
||||
user_id, project_id)
|
||||
|
||||
|
||||
def inject_into_image(image, key, net, metadata, admin_password,
|
||||
files, partition, use_cow=False):
|
||||
try:
|
||||
disk_api.inject_data(image, key, net, metadata, admin_password,
|
||||
files, partition, use_cow)
|
||||
except Exception as e:
|
||||
LOG.warn(_("Failed to inject data into image %(image)s. "
|
||||
"Error: %(e)s") % locals())
|
||||
# TODO(deva): Make these work in Ironic.
|
||||
# Either copy nova/virt/utils (bad),
|
||||
# or reimplement as a common lib,
|
||||
# or make a driver that doesn't need to do this.
|
||||
#
|
||||
#def cache_image(context, target, image_id, user_id, project_id):
|
||||
# if not os.path.exists(target):
|
||||
# libvirt_utils.fetch_image(context, target, image_id,
|
||||
# user_id, project_id)
|
||||
#
|
||||
#
|
||||
#def inject_into_image(image, key, net, metadata, admin_password,
|
||||
# files, partition, use_cow=False):
|
||||
# try:
|
||||
# disk_api.inject_data(image, key, net, metadata, admin_password,
|
||||
# files, partition, use_cow)
|
||||
# except Exception as e:
|
||||
# LOG.warn(_("Failed to inject data into image %(image)s. "
|
||||
# "Error: %(e)s") % locals())
|
||||
|
||||
|
||||
def unlink_without_raise(path):
|
||||
@ -575,5 +583,3 @@ def create_link_without_raise(source, link):
|
||||
else:
|
||||
LOG.warn(_("Failed to create symlink from %(source)s to %(link)s"
|
||||
", error: %(e)s") % locals())
|
||||
|
||||
|
||||
|
@ -14,5 +14,3 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from ironic.db.api import *
|
||||
|
@ -68,7 +68,7 @@ class Connection(object):
|
||||
@abc.abstractmethod
|
||||
def create_node(self, values):
|
||||
"""Create a new node.
|
||||
|
||||
|
||||
:param values: Values to instantiate the node with.
|
||||
:returns: Node.
|
||||
"""
|
||||
@ -132,7 +132,7 @@ class Connection(object):
|
||||
@abc.abstractmethod
|
||||
def create_iface(self, values):
|
||||
"""Create a new iface.
|
||||
|
||||
|
||||
:param values: Dict of values.
|
||||
"""
|
||||
|
||||
|
@ -17,13 +17,10 @@
|
||||
|
||||
"""SQLAlchemy storage backend."""
|
||||
|
||||
import sys
|
||||
import uuid
|
||||
|
||||
from oslo.config import cfg
|
||||
|
||||
# TODO(deva): import MultipleResultsFound and handle it appropriately
|
||||
from sqlalchemy.orm.exc import NoResultFound
|
||||
from sqlalchemy.orm.exc import MultipleResultsFound
|
||||
|
||||
from ironic.common import exception
|
||||
from ironic.common import utils
|
||||
@ -44,13 +41,13 @@ get_session = db_session.get_session
|
||||
|
||||
|
||||
def get_backend():
|
||||
"""The backend is this module itself."""
|
||||
return Connection()
|
||||
"""The backend is this module itself."""
|
||||
return Connection()
|
||||
|
||||
|
||||
def model_query(model, *args, **kwargs):
|
||||
"""Query helper for simpler session usage.
|
||||
|
||||
|
||||
:param session: if present, the session to use
|
||||
"""
|
||||
|
||||
@ -127,7 +124,7 @@ class Connection(api.Connection):
|
||||
raise exception.NodeNotFound(node=node)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def get_node_by_instance(self, instance):
|
||||
query = model_query(models.Node)
|
||||
if uuidutils.is_uuid_like(instance):
|
||||
@ -147,7 +144,7 @@ class Connection(api.Connection):
|
||||
with session.begin():
|
||||
query = model_query(models.Node, session=session)
|
||||
query = add_uuid_filter(query, node)
|
||||
|
||||
|
||||
count = query.delete()
|
||||
if count != 1:
|
||||
raise exception.NodeNotFound(node=node)
|
||||
@ -157,7 +154,7 @@ class Connection(api.Connection):
|
||||
with session.begin():
|
||||
query = model_query(models.Node, session=session)
|
||||
query = add_uuid_filter(query, node)
|
||||
|
||||
|
||||
print "Updating with %s." % values
|
||||
count = query.update(values,
|
||||
synchronize_session='fetch')
|
||||
@ -190,7 +187,7 @@ class Connection(api.Connection):
|
||||
query = session.query(models.Iface).\
|
||||
join(models.Node,
|
||||
models.Iface.node_id == models.Node.id).\
|
||||
filter(models.Node.uuid==node)
|
||||
filter(models.Node.uuid == node)
|
||||
result = query.all()
|
||||
|
||||
return result
|
||||
@ -206,21 +203,19 @@ class Connection(api.Connection):
|
||||
with session.begin():
|
||||
query = model_query(models.Iface, session=session)
|
||||
query = add_mac_filter(query, iface)
|
||||
|
||||
|
||||
count = query.update(values)
|
||||
if count != 1:
|
||||
raise exception.InterfaceNotFound(iface=iface)
|
||||
ref = query.one()
|
||||
return ref
|
||||
return ref
|
||||
|
||||
def destroy_iface(self, iface):
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
query = model_query(models.Iface, session=session)
|
||||
query = add_mac_filter(query, iface)
|
||||
|
||||
count = query.update(values)
|
||||
|
||||
count = query.delete()
|
||||
if count != 1:
|
||||
raise exception.NodeNotFound(node=node)
|
||||
ref = query.one()
|
||||
return ref
|
||||
raise exception.IfaceNotFound(iface=iface)
|
||||
|
@ -18,14 +18,14 @@
|
||||
|
||||
from migrate.changeset import UniqueConstraint
|
||||
from sqlalchemy import Table, Column, Index, ForeignKey, MetaData
|
||||
from sqlalchemy import Boolean, DateTime, Float, Integer, String, Text
|
||||
from sqlalchemy import DateTime, Integer, String, Text
|
||||
|
||||
from ironic.openstack.common import log as logging
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
ENGINE='InnoDB'
|
||||
CHARSET='utf8'
|
||||
ENGINE = 'InnoDB'
|
||||
CHARSET = 'utf8'
|
||||
|
||||
|
||||
def upgrade(migrate_engine):
|
||||
|
@ -24,8 +24,8 @@ import urlparse
|
||||
|
||||
from oslo.config import cfg
|
||||
|
||||
from sqlalchemy import Table, Column, Index, ForeignKey
|
||||
from sqlalchemy import Boolean, DateTime, Float, Integer, String, Text
|
||||
from sqlalchemy import Column, ForeignKey
|
||||
from sqlalchemy import Integer, String
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.types import TypeDecorator, VARCHAR
|
||||
|
||||
|
@ -18,16 +18,9 @@
|
||||
|
||||
from oslo.config import cfg
|
||||
|
||||
from ironic.openstack.common import context
|
||||
from ironic.openstack.common import log
|
||||
from ironic.openstack.common.rpc import dispatcher as rpc_dispatcher
|
||||
from ironic.openstack.common import timeutils
|
||||
|
||||
import ironic.openstack.common.notifier.rpc_notifier
|
||||
|
||||
from ironic import db
|
||||
from ironic.common import service
|
||||
from ironic.common import extension_manager
|
||||
|
||||
manager_opts = [
|
||||
cfg.StrOpt('power_driver',
|
||||
@ -52,14 +45,14 @@ class ManagerService(service.PeriodicService):
|
||||
|
||||
def start(self):
|
||||
super(ManagerService, self).start()
|
||||
# TODO: connect with storage driver
|
||||
# TODO(deva): connect with storage driver
|
||||
|
||||
def initialize_(self, service):
|
||||
LOG.debug(_('Manager initializing service hooks'))
|
||||
|
||||
def process_notification(self, notification):
|
||||
LOG.debug(_('Received notification %r',
|
||||
notification.get('event_type')))
|
||||
LOG.debug(_('Received notification: %r') %
|
||||
notification.get('event_type'))
|
||||
|
||||
def periodic_tasks(self, context):
|
||||
pass
|
||||
|
@ -25,8 +25,7 @@ CONF = cfg.CONF
|
||||
|
||||
|
||||
def _get_my_ip():
|
||||
"""
|
||||
Returns the actual ip of the local machine.
|
||||
"""Returns the actual ip of the local machine.
|
||||
|
||||
This code figures out what source address would be used if some traffic
|
||||
were to be sent out to some well known address on the Internet. In this
|
||||
|
@ -24,9 +24,7 @@
|
||||
:platform: Unix
|
||||
"""
|
||||
|
||||
# TODO(mikal): move eventlet imports to ironic.__init__ once we move to PBR
|
||||
import os
|
||||
import sys
|
||||
# TODO(deva): move eventlet imports to ironic.__init__ once we move to PBR
|
||||
|
||||
import eventlet
|
||||
|
||||
|
227
ironic/tests/base.py
Normal file
227
ironic/tests/base.py
Normal file
@ -0,0 +1,227 @@
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2010 United States Government as represented by the
|
||||
# Administrator of the National Aeronautics and Space Administration.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Base classes for our unit tests.
|
||||
|
||||
Allows overriding of flags for use of fakes, and some black magic for
|
||||
inline callbacks.
|
||||
|
||||
"""
|
||||
|
||||
import eventlet
|
||||
eventlet.monkey_patch(os=False)
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
import fixtures
|
||||
import mox
|
||||
import stubout
|
||||
import testtools
|
||||
|
||||
from oslo.config import cfg
|
||||
|
||||
from ironic.db import migration
|
||||
|
||||
from ironic.common import paths
|
||||
from ironic.openstack.common.db.sqlalchemy import session
|
||||
from ironic.openstack.common import log as logging
|
||||
from ironic.openstack.common import timeutils
|
||||
from ironic.tests import conf_fixture
|
||||
from ironic.tests import policy_fixture
|
||||
|
||||
|
||||
test_opts = [
|
||||
cfg.StrOpt('sqlite_clean_db',
|
||||
default='clean.sqlite',
|
||||
help='File name of clean sqlite db'),
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.register_opts(test_opts)
|
||||
CONF.import_opt('sql_connection',
|
||||
'ironic.openstack.common.db.sqlalchemy.session')
|
||||
CONF.import_opt('sqlite_db', 'ironic.openstack.common.db.sqlalchemy.session')
|
||||
CONF.set_override('use_stderr', False)
|
||||
|
||||
logging.setup('ironic')
|
||||
|
||||
_DB_CACHE = None
|
||||
|
||||
|
||||
class Database(fixtures.Fixture):
|
||||
|
||||
def __init__(self, db_session, db_migrate, sql_connection,
|
||||
sqlite_db, sqlite_clean_db):
|
||||
self.sql_connection = sql_connection
|
||||
self.sqlite_db = sqlite_db
|
||||
self.sqlite_clean_db = sqlite_clean_db
|
||||
|
||||
self.engine = db_session.get_engine()
|
||||
self.engine.dispose()
|
||||
conn = self.engine.connect()
|
||||
if sql_connection == "sqlite://":
|
||||
if db_migrate.db_version() > db_migrate.INIT_VERSION:
|
||||
return
|
||||
else:
|
||||
testdb = paths.state_path_rel(sqlite_db)
|
||||
if os.path.exists(testdb):
|
||||
return
|
||||
db_migrate.db_sync()
|
||||
self.post_migrations()
|
||||
if sql_connection == "sqlite://":
|
||||
conn = self.engine.connect()
|
||||
self._DB = "".join(line for line in conn.connection.iterdump())
|
||||
self.engine.dispose()
|
||||
else:
|
||||
cleandb = paths.state_path_rel(sqlite_clean_db)
|
||||
shutil.copyfile(testdb, cleandb)
|
||||
|
||||
def setUp(self):
|
||||
super(Database, self).setUp()
|
||||
|
||||
if self.sql_connection == "sqlite://":
|
||||
conn = self.engine.connect()
|
||||
conn.connection.executescript(self._DB)
|
||||
self.addCleanup(self.engine.dispose)
|
||||
else:
|
||||
shutil.copyfile(paths.state_path_rel(self.sqlite_clean_db),
|
||||
paths.state_path_rel(self.sqlite_db))
|
||||
|
||||
def post_migrations(self):
|
||||
"""Any addition steps that are needed outside of the migrations."""
|
||||
|
||||
|
||||
class ReplaceModule(fixtures.Fixture):
|
||||
"""Replace a module with a fake module."""
|
||||
|
||||
def __init__(self, name, new_value):
|
||||
self.name = name
|
||||
self.new_value = new_value
|
||||
|
||||
def _restore(self, old_value):
|
||||
sys.modules[self.name] = old_value
|
||||
|
||||
def setUp(self):
|
||||
super(ReplaceModule, self).setUp()
|
||||
old_value = sys.modules.get(self.name)
|
||||
sys.modules[self.name] = self.new_value
|
||||
self.addCleanup(self._restore, old_value)
|
||||
|
||||
|
||||
class MoxStubout(fixtures.Fixture):
|
||||
"""Deal with code around mox and stubout as a fixture."""
|
||||
|
||||
def setUp(self):
|
||||
super(MoxStubout, self).setUp()
|
||||
# emulate some of the mox stuff, we can't use the metaclass
|
||||
# because it screws with our generators
|
||||
self.mox = mox.Mox()
|
||||
self.stubs = stubout.StubOutForTesting()
|
||||
self.addCleanup(self.mox.UnsetStubs)
|
||||
self.addCleanup(self.stubs.UnsetAll)
|
||||
self.addCleanup(self.stubs.SmartUnsetAll)
|
||||
self.addCleanup(self.mox.VerifyAll)
|
||||
|
||||
|
||||
class TestingException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class TestCase(testtools.TestCase):
|
||||
"""Test case base class for all unit tests."""
|
||||
|
||||
def setUp(self):
|
||||
"""Run before each test method to initialize test environment."""
|
||||
super(TestCase, self).setUp()
|
||||
test_timeout = os.environ.get('OS_TEST_TIMEOUT', 0)
|
||||
try:
|
||||
test_timeout = int(test_timeout)
|
||||
except ValueError:
|
||||
# If timeout value is invalid do not set a timeout.
|
||||
test_timeout = 0
|
||||
if test_timeout > 0:
|
||||
self.useFixture(fixtures.Timeout(test_timeout, gentle=True))
|
||||
self.useFixture(fixtures.NestedTempfile())
|
||||
self.useFixture(fixtures.TempHomeDir())
|
||||
|
||||
if (os.environ.get('OS_STDOUT_CAPTURE') == 'True' or
|
||||
os.environ.get('OS_STDOUT_CAPTURE') == '1'):
|
||||
stdout = self.useFixture(fixtures.StringStream('stdout')).stream
|
||||
self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout))
|
||||
if (os.environ.get('OS_STDERR_CAPTURE') == 'True' or
|
||||
os.environ.get('OS_STDERR_CAPTURE') == '1'):
|
||||
stderr = self.useFixture(fixtures.StringStream('stderr')).stream
|
||||
self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))
|
||||
|
||||
self.log_fixture = self.useFixture(fixtures.FakeLogger())
|
||||
self.useFixture(conf_fixture.ConfFixture(CONF))
|
||||
|
||||
global _DB_CACHE
|
||||
if not _DB_CACHE:
|
||||
_DB_CACHE = Database(session, migration,
|
||||
sql_connection=CONF.sql_connection,
|
||||
sqlite_db=CONF.sqlite_db,
|
||||
sqlite_clean_db=CONF.sqlite_clean_db)
|
||||
self.useFixture(_DB_CACHE)
|
||||
|
||||
mox_fixture = self.useFixture(MoxStubout())
|
||||
self.mox = mox_fixture.mox
|
||||
self.stubs = mox_fixture.stubs
|
||||
self.addCleanup(self._clear_attrs)
|
||||
self.useFixture(fixtures.EnvironmentVariable('http_proxy'))
|
||||
self.policy = self.useFixture(policy_fixture.PolicyFixture())
|
||||
CONF.set_override('fatal_exception_format_errors', True)
|
||||
|
||||
def _clear_attrs(self):
|
||||
# Delete attributes that don't start with _ so they don't pin
|
||||
# memory around unnecessarily for the duration of the test
|
||||
# suite
|
||||
for key in [k for k in self.__dict__.keys() if k[0] != '_']:
|
||||
del self.__dict__[key]
|
||||
|
||||
def flags(self, **kw):
|
||||
"""Override flag variables for a test."""
|
||||
group = kw.pop('group', None)
|
||||
for k, v in kw.iteritems():
|
||||
CONF.set_override(k, v, group)
|
||||
|
||||
|
||||
class APICoverage(object):
|
||||
|
||||
cover_api = None
|
||||
|
||||
def test_api_methods(self):
|
||||
self.assertTrue(self.cover_api is not None)
|
||||
api_methods = [x for x in dir(self.cover_api)
|
||||
if not x.startswith('_')]
|
||||
test_methods = [x[5:] for x in dir(self)
|
||||
if x.startswith('test_')]
|
||||
self.assertThat(
|
||||
test_methods,
|
||||
testtools.matchers.ContainsAll(api_methods))
|
||||
|
||||
|
||||
class TimeOverride(fixtures.Fixture):
|
||||
"""Fixture to start and remove time override."""
|
||||
|
||||
def setUp(self):
|
||||
super(TimeOverride, self).setUp()
|
||||
timeutils.set_time_override()
|
||||
self.addCleanup(timeutils.clear_time_override)
|
@ -20,7 +20,6 @@ import fixtures
|
||||
from oslo.config import cfg
|
||||
|
||||
from ironic.common import config
|
||||
from ironic.common import paths
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.import_opt('use_ipv6', 'ironic.netconf')
|
||||
|
@ -15,13 +15,11 @@
|
||||
|
||||
"""Ironic DB test base class."""
|
||||
|
||||
from oslo.config import cfg
|
||||
|
||||
from ironic.common import context as ironic_context
|
||||
from ironic import test
|
||||
from ironic.tests import base
|
||||
|
||||
|
||||
class DbTestCase(test.TestCase):
|
||||
class DbTestCase(base.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(DbTestCase, self).setUp()
|
||||
|
@ -17,8 +17,8 @@ import os
|
||||
import fixtures
|
||||
from oslo.config import cfg
|
||||
|
||||
from ironic.openstack.common import policy as common_policy
|
||||
from ironic.common import policy as ironic_policy
|
||||
from ironic.openstack.common import policy as common_policy
|
||||
from ironic.tests import fake_policy
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
@ -16,11 +16,8 @@
|
||||
# under the License.
|
||||
|
||||
import __builtin__
|
||||
import datetime
|
||||
import errno
|
||||
import functools
|
||||
import hashlib
|
||||
import importlib
|
||||
import os
|
||||
import os.path
|
||||
import StringIO
|
||||
@ -30,16 +27,14 @@ import mox
|
||||
import netaddr
|
||||
from oslo.config import cfg
|
||||
|
||||
import ironic
|
||||
from ironic.common import exception
|
||||
from ironic.common import utils
|
||||
from ironic.openstack.common import timeutils
|
||||
from ironic import test
|
||||
from ironic.tests import base
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
class BareMetalUtilsTestCase(test.TestCase):
|
||||
class BareMetalUtilsTestCase(base.TestCase):
|
||||
|
||||
def test_random_alnum(self):
|
||||
s = utils.random_alnum(10)
|
||||
@ -81,7 +76,7 @@ class BareMetalUtilsTestCase(test.TestCase):
|
||||
self.mox.VerifyAll()
|
||||
|
||||
|
||||
class ExecuteTestCase(test.TestCase):
|
||||
class ExecuteTestCase(base.TestCase):
|
||||
|
||||
def test_retry_on_failure(self):
|
||||
fd, tmpfilename = tempfile.mkstemp()
|
||||
@ -164,7 +159,7 @@ grep foo
|
||||
os.unlink(tmpfilename2)
|
||||
|
||||
|
||||
class GenericUtilsTestCase(test.TestCase):
|
||||
class GenericUtilsTestCase(base.TestCase):
|
||||
def test_hostname_unicode_sanitization(self):
|
||||
hostname = u"\u7684.test.example.com"
|
||||
self.assertEqual("test.example.com",
|
||||
@ -303,7 +298,7 @@ class GenericUtilsTestCase(test.TestCase):
|
||||
"failure")
|
||||
|
||||
|
||||
class MkfsTestCase(test.TestCase):
|
||||
class MkfsTestCase(base.TestCase):
|
||||
|
||||
def test_mkfs(self):
|
||||
self.mox.StubOutWithMock(utils, 'execute')
|
||||
@ -330,7 +325,7 @@ class MkfsTestCase(test.TestCase):
|
||||
utils.mkfs('swap', '/my/swap/block/dev', 'swap-vol')
|
||||
|
||||
|
||||
class IntLikeTestCase(test.TestCase):
|
||||
class IntLikeTestCase(base.TestCase):
|
||||
|
||||
def test_is_int_like(self):
|
||||
self.assertTrue(utils.is_int_like(1))
|
||||
|
@ -1,29 +1,21 @@
|
||||
d2to1>=0.2.10,<0.3
|
||||
pbr>=0.5,<0.6
|
||||
SQLAlchemy>=0.7.8,<0.7.99
|
||||
Cheetah>=2.4.4
|
||||
amqplib>=0.6.1
|
||||
anyjson>=0.2.4
|
||||
argparse
|
||||
boto
|
||||
eventlet>=0.9.17
|
||||
kombu>=1.0.4
|
||||
lxml>=2.3
|
||||
routes>=1.12.3
|
||||
WebOb==1.2.3
|
||||
greenlet>=0.3.1
|
||||
PasteDeploy>=1.5.0
|
||||
paste
|
||||
sqlalchemy-migrate>=0.7.2
|
||||
netaddr>=0.7.6
|
||||
suds>=0.4
|
||||
paramiko
|
||||
pyasn1
|
||||
Babel>=0.9.6
|
||||
iso8601>=0.1.4
|
||||
httplib2
|
||||
setuptools_git>=0.4
|
||||
python-cinderclient>=1.0.1
|
||||
python-quantumclient>=2.2.0,<3.0.0
|
||||
python-glanceclient>=0.5.0,<2
|
||||
python-keystoneclient>=0.2.0
|
||||
|
0
tools/__init__.py
Normal file
0
tools/__init__.py
Normal file
@ -207,7 +207,8 @@ class Fedora(Distro):
|
||||
This can be removed when the fix is applied upstream.
|
||||
|
||||
Nova: https://bugs.launchpad.net/nova/+bug/884915
|
||||
Upstream: https://bitbucket.org/which_linden/eventlet/issue/89
|
||||
Upstream: https://bitbucket.org/eventlet/eventlet/issue/89
|
||||
RHEL: https://bugzilla.redhat.com/958868
|
||||
"""
|
||||
|
||||
# Install "patch" program if it's not there
|
||||
|
@ -25,8 +25,8 @@ def main(argv):
|
||||
|
||||
venv = os.environ['VIRTUAL_ENV']
|
||||
|
||||
pip_requires = os.path.join(root, 'tools', 'pip-requires')
|
||||
test_requires = os.path.join(root, 'tools', 'test-requires')
|
||||
pip_requires = os.path.join(root, 'requirements.txt')
|
||||
test_requires = os.path.join(root, 'test-requirements.txt')
|
||||
py_version = "python%s.%s" % (sys.version_info[0], sys.version_info[1])
|
||||
project = 'Nova'
|
||||
install = install_venv.InstallVenv(root, venv, pip_requires, test_requires,
|
||||
|
Loading…
Reference in New Issue
Block a user