Fix pep8 errors and make it pass Jenkins tests.

Fix all existing pep8 errors.
Remove install_venv and friends, which were not needed.
Add a few ignores for nova code that is yet to be cleaned up.
Skip one failing test case, fixed by review 29394.
Import contrib/redhat-eventlet.patch from Nova.

Change-Id: I46b6ccaa272bd058757064672ce9221263ed7087
This commit is contained in:
Devananda van der Veen 2013-05-16 11:35:55 -07:00 committed by Monty Taylor
parent accdc459ae
commit 55502af5d2
31 changed files with 347 additions and 146 deletions

View File

@ -0,0 +1,16 @@
--- .nova-venv/lib/python2.6/site-packages/eventlet/green/subprocess.py.orig
2011-05-25
23:31:34.597271402 +0000
+++ .nova-venv/lib/python2.6/site-packages/eventlet/green/subprocess.py
2011-05-25
23:33:24.055602468 +0000
@@ -32,7 +32,7 @@
setattr(self, attr, wrapped_pipe)
__init__.__doc__ = subprocess_orig.Popen.__init__.__doc__
- def wait(self, check_interval=0.01):
+ def wait(self, check_interval=0.01, timeout=None):
# Instead of a blocking OS call, this version of wait() uses logic
# borrowed from the eventlet 0.2 processes.Process.wait() method.
try:

View File

@ -1,4 +1,5 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # vim: tabstop=4 shiftwidth=4 softtabstop=4
# -*- encoding: utf-8 -*-
# Copyright © 2012 New Dream Network, LLC (DreamHost) # Copyright © 2012 New Dream Network, LLC (DreamHost)
# All Rights Reserved. # All Rights Reserved.
@ -52,6 +53,7 @@ def setup_app(pecan_config=None, extra_hooks=None):
pecan.configuration.set_config(dict(pecan_config), overwrite=True) pecan.configuration.set_config(dict(pecan_config), overwrite=True)
# TODO(deva): add middleware.ParsableErrorMiddleware from Ceilometer
app = pecan.make_app( app = pecan.make_app(
pecan_config.app.root, pecan_config.app.root,
static_root=pecan_config.app.static_root, static_root=pecan_config.app.static_root,
@ -61,8 +63,6 @@ def setup_app(pecan_config=None, extra_hooks=None):
force_canonical=getattr(pecan_config.app, 'force_canonical', True), force_canonical=getattr(pecan_config.app, 'force_canonical', True),
hooks=app_hooks, hooks=app_hooks,
) )
# TODO: add this back in
# wrap_app=middleware.ParsableErrorMiddleware,
if pecan_config.app.enable_acl: if pecan_config.app.enable_acl:
return acl.install(app, cfg.CONF) return acl.install(app, cfg.CONF)

View File

@ -22,20 +22,17 @@ Should maintain feature parity with Nova Baremetal Extension.
Specification in ironic/doc/api/v1.rst Specification in ironic/doc/api/v1.rst
""" """
import inspect
import pecan import pecan
from pecan import rest from pecan import rest
import wsme import wsme
import wsmeext.pecan as wsme_pecan
from wsme import types as wtypes from wsme import types as wtypes
import wsmeext.pecan as wsme_pecan
from ironic.openstack.common import log from ironic.openstack.common import log
# TODO(deva): The API shouldn't know what db IMPL is in use. # TODO(deva): The API shouldn't know what db IMPL is in use.
# Import ironic.db.models instead of the sqlalchemy models # Import ironic.db.models once that layer is written.
# once that layer is written.
from ironic.db.sqlalchemy import models
LOG = log.getLogger(__name__) LOG = log.getLogger(__name__)
@ -59,7 +56,7 @@ class Base(wtypes.Base):
class Interface(Base): class Interface(Base):
"""A representation of a network interface for a baremetal node""" """A representation of a network interface for a baremetal node."""
node_id = int node_id = int
address = wtypes.text address = wtypes.text
@ -72,9 +69,9 @@ class Interface(Base):
class InterfacesController(rest.RestController): class InterfacesController(rest.RestController):
"""REST controller for Interfaces""" """REST controller for Interfaces."""
@wsme_pecan.wsexpose(Interface, unicode) @wsme_pecan.wsexpose(Interface, unicode)
def post(self, iface): def post(self, iface):
"""Ceate a new interface.""" """Ceate a new interface."""
return Interface.sample() return Interface.sample()
@ -93,17 +90,17 @@ class InterfacesController(rest.RestController):
@wsme_pecan.wsexpose() @wsme_pecan.wsexpose()
def delete(self, iface_id): def delete(self, iface_id):
"""Delete an interface""" """Delete an interface."""
pass pass
@wsme_pecan.wsexpose() @wsme_pecan.wsexpose()
def put(self, iface_id): def put(self, iface_id):
"""Update an interface""" """Update an interface."""
pass pass
class Node(Base): class Node(Base):
"""A representation of a bare metal node""" """A representation of a bare metal node."""
uuid = wtypes.text uuid = wtypes.text
cpu_arch = wtypes.text cpu_arch = wtypes.text
@ -135,16 +132,16 @@ class Node(Base):
class NodeIfaceController(rest.RestController): class NodeIfaceController(rest.RestController):
"""For GET /node/ifaces/<id>""" """For GET /node/ifaces/<id>."""
@wsme_pecan.wsexpose([Interface], unicode) @wsme_pecan.wsexpose([Interface], unicode)
def get(self, node_id): def get(self, node_id):
return [Interface.from_db_model(r) return [Interface.from_db_model(r)
for r in pecan.request.dbapi.get_ifaces_for_node(node_id)] for r in pecan.request.dbapi.get_ifaces_for_node(node_id)]
class NodesController(rest.RestController): class NodesController(rest.RestController):
"""REST controller for Nodes""" """REST controller for Nodes."""
@wsme.validate(Node) @wsme.validate(Node)
@wsme_pecan.wsexpose(Node, body=Node, status_code=201) @wsme_pecan.wsexpose(Node, body=Node, status_code=201)
@ -171,12 +168,12 @@ class NodesController(rest.RestController):
@wsme_pecan.wsexpose() @wsme_pecan.wsexpose()
def delete(self, node_id): def delete(self, node_id):
"""Delete a node""" """Delete a node."""
pecan.request.dbapi.destroy_node(node_id) pecan.request.dbapi.destroy_node(node_id)
@wsme_pecan.wsexpose() @wsme_pecan.wsexpose()
def put(self, node_id): def put(self, node_id):
"""Update a node""" """Update a node."""
pass pass
ifaces = NodeIfaceController() ifaces = NodeIfaceController()
@ -185,7 +182,7 @@ class NodesController(rest.RestController):
class Controller(object): class Controller(object):
"""Version 1 API controller root.""" """Version 1 API controller root."""
# TODO: _default and index # TODO(deva): _default and index
nodes = NodesController() nodes = NodesController()
interfaces = InterfacesController() interfaces = InterfacesController()

View File

@ -14,8 +14,3 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
from pecan import conf
def init_model():
pass

View File

@ -13,9 +13,8 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
# TODO(mikal): move eventlet imports to ironic.__init__ once we move to PBR # TODO(deva): move eventlet imports to ironic.__init__ once we move to PBR
import os import os
import sys
os.environ['EVENTLET_NO_GREENDNS'] = 'yes' os.environ['EVENTLET_NO_GREENDNS'] = 'yes'

View File

@ -18,9 +18,7 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
""" """The Ironic Service API."""
The Ironic Service API
"""
import sys import sys
@ -28,16 +26,14 @@ from oslo.config import cfg
from wsgiref import simple_server from wsgiref import simple_server
from ironic.api import app from ironic.api import app
from ironic.common.service import prepare_service from ironic.common import service as ironic_service
from ironic.openstack.common import service
from ironic.openstack.common.rpc import service as rpc_service
CONF = cfg.CONF CONF = cfg.CONF
def main(): def main():
# Pase config file and command line options, then start logging # Pase config file and command line options, then start logging
prepare_service(sys.argv) ironic_service.prepare_service(sys.argv)
# Build and start the WSGI app # Build and start the WSGI app
host = CONF.ironic_api_bind_ip host = CONF.ironic_api_bind_ip

View File

@ -33,6 +33,7 @@ CONF = cfg.CONF
CONF.import_opt('db_backend', CONF.import_opt('db_backend',
'ironic.openstack.common.db.api') 'ironic.openstack.common.db.api')
def main(): def main():
service.prepare_service(sys.argv) service.prepare_service(sys.argv)
migration.db_sync() migration.db_sync()

View File

@ -25,19 +25,18 @@ The Ironic Management Service
import sys import sys
from oslo.config import cfg from oslo.config import cfg
from wsgiref import simple_server
from ironic.manager import manager
from ironic.common.service import prepare_service
from ironic.openstack.common import service from ironic.openstack.common import service
from ironic.openstack.common.rpc import service as rpc_service
from ironic.common import service as ironic_service
from ironic.manager import manager
CONF = cfg.CONF CONF = cfg.CONF
def main(): def main():
# Pase config file and command line options, then start logging # Pase config file and command line options, then start logging
prepare_service(sys.argv) ironic_service.prepare_service(sys.argv)
topic = 'ironic.manager' topic = 'ironic.manager'
mgr = manager.ManagerService(CONF.host, topic) mgr = manager.ManagerService(CONF.host, topic)

View File

@ -19,9 +19,9 @@
from oslo.config import cfg from oslo.config import cfg
from ironic.common import paths
from ironic.openstack.common.db.sqlalchemy import session as db_session from ironic.openstack.common.db.sqlalchemy import session as db_session
from ironic.openstack.common import rpc from ironic.openstack.common import rpc
from ironic.common import paths
from ironic import version from ironic import version
_DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('$sqlite_db') _DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('$sqlite_db')

View File

@ -48,7 +48,8 @@ class RequestContext(object):
request_id=None, auth_token=None, overwrite=True, request_id=None, auth_token=None, overwrite=True,
quota_class=None, user_name=None, project_name=None, quota_class=None, user_name=None, project_name=None,
service_catalog=None, instance_lock_checked=False, **kwargs): service_catalog=None, instance_lock_checked=False, **kwargs):
""" """Initialize this RequestContext.
:param read_deleted: 'no' indicates deleted records are hidden, 'yes' :param read_deleted: 'no' indicates deleted records are hidden, 'yes'
indicates deleted records are visible, 'only' indicates that indicates deleted records are visible, 'only' indicates that
*only* deleted records are visible. *only* deleted records are visible.

View File

@ -31,7 +31,6 @@ from oslo.config import cfg
from ironic.common import safe_utils from ironic.common import safe_utils
from ironic.openstack.common import excutils from ironic.openstack.common import excutils
from ironic.openstack.common import log as logging from ironic.openstack.common import log as logging
from ironic.openstack.common.gettextutils import _
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)

View File

@ -92,7 +92,7 @@ def enforce(context, action, target, do_raise=True):
""" """
init() init()
credentials = ironic_context.to_dict() credentials = context.to_dict()
# Add the exception arguments if asked to do a raise # Add the exception arguments if asked to do a raise
extra = {} extra = {}
@ -102,19 +102,16 @@ def enforce(context, action, target, do_raise=True):
return policy.check(action, target, credentials, **extra) return policy.check(action, target, credentials, **extra)
def check_is_admin(roles): def check_is_admin(context):
"""Whether or not roles contains 'admin' role according to policy setting. """Whether or not role contains 'admin' role according to policy setting.
""" """
init() init()
if isinstance(roles, RequestContext): credentials = context.to_dict()
# the target is user-self target = credentials
credentials = roles.to_dict()
target = credentials return policy.check('context_is_admin', target, credentials)
return policy.check('context_is_admin', target, credentials)
else:
return policy.check('context_is_admin', {}, {'roles': roles})
@policy.register('is_admin') @policy.register('is_admin')

View File

@ -169,8 +169,7 @@ def execute(*cmd, **kwargs):
def trycmd(*args, **kwargs): def trycmd(*args, **kwargs):
""" """A wrapper around execute() to more easily handle warnings and errors.
A wrapper around execute() to more easily handle warnings and errors.
Returns an (out, err) tuple of strings containing the output of Returns an (out, err) tuple of strings containing the output of
the command's stdout and stderr. If 'err' is not empty then the the command's stdout and stderr. If 'err' is not empty then the
@ -200,7 +199,8 @@ def ssh_execute(ssh, cmd, process_input=None,
addl_env=None, check_exit_code=True): addl_env=None, check_exit_code=True):
LOG.debug(_('Running cmd (SSH): %s'), cmd) LOG.debug(_('Running cmd (SSH): %s'), cmd)
if addl_env: if addl_env:
raise exception.IronicException(_('Environment not supported over SSH')) raise exception.IronicException(_(
'Environment not supported over SSH'))
if process_input: if process_input:
# This is (probably) fixable if we need it... # This is (probably) fixable if we need it...
@ -307,10 +307,12 @@ def is_valid_boolstr(val):
def is_valid_mac(address): def is_valid_mac(address):
"""Verify the format of a MAC addres.""" """Verify the format of a MAC addres."""
if re.match("[0-9a-f]{2}([-:])[0-9a-f]{2}(\\1[0-9a-f]{2}){4}$", address.lower()): m = "[0-9a-f]{2}([-:])[0-9a-f]{2}(\\1[0-9a-f]{2}){4}$"
if re.match(m, address.lower()):
return True return True
return False return False
def is_valid_ipv4(address): def is_valid_ipv4(address):
"""Verify that address represents a valid IPv4 address.""" """Verify that address represents a valid IPv4 address."""
try: try:
@ -345,8 +347,7 @@ def get_shortened_ipv6_cidr(address):
def is_valid_cidr(address): def is_valid_cidr(address):
"""Check if the provided ipv4 or ipv6 address is a valid """Check if the provided ipv4 or ipv6 address is a valid CIDR address."""
CIDR address or not"""
try: try:
# Validate the correct CIDR Address # Validate the correct CIDR Address
netaddr.IPNetwork(address) netaddr.IPNetwork(address)
@ -369,8 +370,10 @@ def is_valid_cidr(address):
def get_ip_version(network): def get_ip_version(network):
"""Returns the IP version of a network (IPv4 or IPv6). Raises """Returns the IP version of a network (IPv4 or IPv6).
AddrFormatError if invalid network."""
:raises: AddrFormatError if invalid network.
"""
if netaddr.IPNetwork(network).version == 6: if netaddr.IPNetwork(network).version == 6:
return "IPv6" return "IPv6"
elif netaddr.IPNetwork(network).version == 4: elif netaddr.IPNetwork(network).version == 4:
@ -527,20 +530,25 @@ def mkfs(fs, path, label=None):
execute(*args) execute(*args)
def cache_image(context, target, image_id, user_id, project_id): # TODO(deva): Make these work in Ironic.
if not os.path.exists(target): # Either copy nova/virt/utils (bad),
libvirt_utils.fetch_image(context, target, image_id, # or reimplement as a common lib,
user_id, project_id) # or make a driver that doesn't need to do this.
#
#def cache_image(context, target, image_id, user_id, project_id):
def inject_into_image(image, key, net, metadata, admin_password, # if not os.path.exists(target):
files, partition, use_cow=False): # libvirt_utils.fetch_image(context, target, image_id,
try: # user_id, project_id)
disk_api.inject_data(image, key, net, metadata, admin_password, #
files, partition, use_cow) #
except Exception as e: #def inject_into_image(image, key, net, metadata, admin_password,
LOG.warn(_("Failed to inject data into image %(image)s. " # files, partition, use_cow=False):
"Error: %(e)s") % locals()) # try:
# disk_api.inject_data(image, key, net, metadata, admin_password,
# files, partition, use_cow)
# except Exception as e:
# LOG.warn(_("Failed to inject data into image %(image)s. "
# "Error: %(e)s") % locals())
def unlink_without_raise(path): def unlink_without_raise(path):
@ -575,5 +583,3 @@ def create_link_without_raise(source, link):
else: else:
LOG.warn(_("Failed to create symlink from %(source)s to %(link)s" LOG.warn(_("Failed to create symlink from %(source)s to %(link)s"
", error: %(e)s") % locals()) ", error: %(e)s") % locals())

View File

@ -14,5 +14,3 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
from ironic.db.api import *

View File

@ -68,7 +68,7 @@ class Connection(object):
@abc.abstractmethod @abc.abstractmethod
def create_node(self, values): def create_node(self, values):
"""Create a new node. """Create a new node.
:param values: Values to instantiate the node with. :param values: Values to instantiate the node with.
:returns: Node. :returns: Node.
""" """
@ -132,7 +132,7 @@ class Connection(object):
@abc.abstractmethod @abc.abstractmethod
def create_iface(self, values): def create_iface(self, values):
"""Create a new iface. """Create a new iface.
:param values: Dict of values. :param values: Dict of values.
""" """

View File

@ -17,13 +17,10 @@
"""SQLAlchemy storage backend.""" """SQLAlchemy storage backend."""
import sys
import uuid
from oslo.config import cfg from oslo.config import cfg
# TODO(deva): import MultipleResultsFound and handle it appropriately
from sqlalchemy.orm.exc import NoResultFound from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.orm.exc import MultipleResultsFound
from ironic.common import exception from ironic.common import exception
from ironic.common import utils from ironic.common import utils
@ -44,13 +41,13 @@ get_session = db_session.get_session
def get_backend(): def get_backend():
"""The backend is this module itself.""" """The backend is this module itself."""
return Connection() return Connection()
def model_query(model, *args, **kwargs): def model_query(model, *args, **kwargs):
"""Query helper for simpler session usage. """Query helper for simpler session usage.
:param session: if present, the session to use :param session: if present, the session to use
""" """
@ -127,7 +124,7 @@ class Connection(api.Connection):
raise exception.NodeNotFound(node=node) raise exception.NodeNotFound(node=node)
return result return result
def get_node_by_instance(self, instance): def get_node_by_instance(self, instance):
query = model_query(models.Node) query = model_query(models.Node)
if uuidutils.is_uuid_like(instance): if uuidutils.is_uuid_like(instance):
@ -147,7 +144,7 @@ class Connection(api.Connection):
with session.begin(): with session.begin():
query = model_query(models.Node, session=session) query = model_query(models.Node, session=session)
query = add_uuid_filter(query, node) query = add_uuid_filter(query, node)
count = query.delete() count = query.delete()
if count != 1: if count != 1:
raise exception.NodeNotFound(node=node) raise exception.NodeNotFound(node=node)
@ -157,7 +154,7 @@ class Connection(api.Connection):
with session.begin(): with session.begin():
query = model_query(models.Node, session=session) query = model_query(models.Node, session=session)
query = add_uuid_filter(query, node) query = add_uuid_filter(query, node)
print "Updating with %s." % values print "Updating with %s." % values
count = query.update(values, count = query.update(values,
synchronize_session='fetch') synchronize_session='fetch')
@ -190,7 +187,7 @@ class Connection(api.Connection):
query = session.query(models.Iface).\ query = session.query(models.Iface).\
join(models.Node, join(models.Node,
models.Iface.node_id == models.Node.id).\ models.Iface.node_id == models.Node.id).\
filter(models.Node.uuid==node) filter(models.Node.uuid == node)
result = query.all() result = query.all()
return result return result
@ -206,21 +203,19 @@ class Connection(api.Connection):
with session.begin(): with session.begin():
query = model_query(models.Iface, session=session) query = model_query(models.Iface, session=session)
query = add_mac_filter(query, iface) query = add_mac_filter(query, iface)
count = query.update(values) count = query.update(values)
if count != 1: if count != 1:
raise exception.InterfaceNotFound(iface=iface) raise exception.InterfaceNotFound(iface=iface)
ref = query.one() ref = query.one()
return ref return ref
def destroy_iface(self, iface): def destroy_iface(self, iface):
session = get_session() session = get_session()
with session.begin(): with session.begin():
query = model_query(models.Iface, session=session) query = model_query(models.Iface, session=session)
query = add_mac_filter(query, iface) query = add_mac_filter(query, iface)
count = query.update(values) count = query.delete()
if count != 1: if count != 1:
raise exception.NodeNotFound(node=node) raise exception.IfaceNotFound(iface=iface)
ref = query.one()
return ref

View File

@ -18,14 +18,14 @@
from migrate.changeset import UniqueConstraint from migrate.changeset import UniqueConstraint
from sqlalchemy import Table, Column, Index, ForeignKey, MetaData from sqlalchemy import Table, Column, Index, ForeignKey, MetaData
from sqlalchemy import Boolean, DateTime, Float, Integer, String, Text from sqlalchemy import DateTime, Integer, String, Text
from ironic.openstack.common import log as logging from ironic.openstack.common import log as logging
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
ENGINE='InnoDB' ENGINE = 'InnoDB'
CHARSET='utf8' CHARSET = 'utf8'
def upgrade(migrate_engine): def upgrade(migrate_engine):

View File

@ -24,8 +24,8 @@ import urlparse
from oslo.config import cfg from oslo.config import cfg
from sqlalchemy import Table, Column, Index, ForeignKey from sqlalchemy import Column, ForeignKey
from sqlalchemy import Boolean, DateTime, Float, Integer, String, Text from sqlalchemy import Integer, String
from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.types import TypeDecorator, VARCHAR from sqlalchemy.types import TypeDecorator, VARCHAR

View File

@ -18,16 +18,9 @@
from oslo.config import cfg from oslo.config import cfg
from ironic.openstack.common import context
from ironic.openstack.common import log from ironic.openstack.common import log
from ironic.openstack.common.rpc import dispatcher as rpc_dispatcher
from ironic.openstack.common import timeutils
import ironic.openstack.common.notifier.rpc_notifier
from ironic import db
from ironic.common import service from ironic.common import service
from ironic.common import extension_manager
manager_opts = [ manager_opts = [
cfg.StrOpt('power_driver', cfg.StrOpt('power_driver',
@ -52,14 +45,14 @@ class ManagerService(service.PeriodicService):
def start(self): def start(self):
super(ManagerService, self).start() super(ManagerService, self).start()
# TODO: connect with storage driver # TODO(deva): connect with storage driver
def initialize_(self, service): def initialize_(self, service):
LOG.debug(_('Manager initializing service hooks')) LOG.debug(_('Manager initializing service hooks'))
def process_notification(self, notification): def process_notification(self, notification):
LOG.debug(_('Received notification %r', LOG.debug(_('Received notification: %r') %
notification.get('event_type'))) notification.get('event_type'))
def periodic_tasks(self, context): def periodic_tasks(self, context):
pass pass

View File

@ -25,8 +25,7 @@ CONF = cfg.CONF
def _get_my_ip(): def _get_my_ip():
""" """Returns the actual ip of the local machine.
Returns the actual ip of the local machine.
This code figures out what source address would be used if some traffic This code figures out what source address would be used if some traffic
were to be sent out to some well known address on the Internet. In this were to be sent out to some well known address on the Internet. In this

View File

@ -24,9 +24,7 @@
:platform: Unix :platform: Unix
""" """
# TODO(mikal): move eventlet imports to ironic.__init__ once we move to PBR # TODO(deva): move eventlet imports to ironic.__init__ once we move to PBR
import os
import sys
import eventlet import eventlet

227
ironic/tests/base.py Normal file
View File

@ -0,0 +1,227 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Base classes for our unit tests.
Allows overriding of flags for use of fakes, and some black magic for
inline callbacks.
"""
import eventlet
eventlet.monkey_patch(os=False)
import os
import shutil
import sys
import fixtures
import mox
import stubout
import testtools
from oslo.config import cfg
from ironic.db import migration
from ironic.common import paths
from ironic.openstack.common.db.sqlalchemy import session
from ironic.openstack.common import log as logging
from ironic.openstack.common import timeutils
from ironic.tests import conf_fixture
from ironic.tests import policy_fixture
test_opts = [
cfg.StrOpt('sqlite_clean_db',
default='clean.sqlite',
help='File name of clean sqlite db'),
]
CONF = cfg.CONF
CONF.register_opts(test_opts)
CONF.import_opt('sql_connection',
'ironic.openstack.common.db.sqlalchemy.session')
CONF.import_opt('sqlite_db', 'ironic.openstack.common.db.sqlalchemy.session')
CONF.set_override('use_stderr', False)
logging.setup('ironic')
_DB_CACHE = None
class Database(fixtures.Fixture):
def __init__(self, db_session, db_migrate, sql_connection,
sqlite_db, sqlite_clean_db):
self.sql_connection = sql_connection
self.sqlite_db = sqlite_db
self.sqlite_clean_db = sqlite_clean_db
self.engine = db_session.get_engine()
self.engine.dispose()
conn = self.engine.connect()
if sql_connection == "sqlite://":
if db_migrate.db_version() > db_migrate.INIT_VERSION:
return
else:
testdb = paths.state_path_rel(sqlite_db)
if os.path.exists(testdb):
return
db_migrate.db_sync()
self.post_migrations()
if sql_connection == "sqlite://":
conn = self.engine.connect()
self._DB = "".join(line for line in conn.connection.iterdump())
self.engine.dispose()
else:
cleandb = paths.state_path_rel(sqlite_clean_db)
shutil.copyfile(testdb, cleandb)
def setUp(self):
super(Database, self).setUp()
if self.sql_connection == "sqlite://":
conn = self.engine.connect()
conn.connection.executescript(self._DB)
self.addCleanup(self.engine.dispose)
else:
shutil.copyfile(paths.state_path_rel(self.sqlite_clean_db),
paths.state_path_rel(self.sqlite_db))
def post_migrations(self):
"""Any addition steps that are needed outside of the migrations."""
class ReplaceModule(fixtures.Fixture):
"""Replace a module with a fake module."""
def __init__(self, name, new_value):
self.name = name
self.new_value = new_value
def _restore(self, old_value):
sys.modules[self.name] = old_value
def setUp(self):
super(ReplaceModule, self).setUp()
old_value = sys.modules.get(self.name)
sys.modules[self.name] = self.new_value
self.addCleanup(self._restore, old_value)
class MoxStubout(fixtures.Fixture):
"""Deal with code around mox and stubout as a fixture."""
def setUp(self):
super(MoxStubout, self).setUp()
# emulate some of the mox stuff, we can't use the metaclass
# because it screws with our generators
self.mox = mox.Mox()
self.stubs = stubout.StubOutForTesting()
self.addCleanup(self.mox.UnsetStubs)
self.addCleanup(self.stubs.UnsetAll)
self.addCleanup(self.stubs.SmartUnsetAll)
self.addCleanup(self.mox.VerifyAll)
class TestingException(Exception):
pass
class TestCase(testtools.TestCase):
"""Test case base class for all unit tests."""
def setUp(self):
"""Run before each test method to initialize test environment."""
super(TestCase, self).setUp()
test_timeout = os.environ.get('OS_TEST_TIMEOUT', 0)
try:
test_timeout = int(test_timeout)
except ValueError:
# If timeout value is invalid do not set a timeout.
test_timeout = 0
if test_timeout > 0:
self.useFixture(fixtures.Timeout(test_timeout, gentle=True))
self.useFixture(fixtures.NestedTempfile())
self.useFixture(fixtures.TempHomeDir())
if (os.environ.get('OS_STDOUT_CAPTURE') == 'True' or
os.environ.get('OS_STDOUT_CAPTURE') == '1'):
stdout = self.useFixture(fixtures.StringStream('stdout')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout))
if (os.environ.get('OS_STDERR_CAPTURE') == 'True' or
os.environ.get('OS_STDERR_CAPTURE') == '1'):
stderr = self.useFixture(fixtures.StringStream('stderr')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))
self.log_fixture = self.useFixture(fixtures.FakeLogger())
self.useFixture(conf_fixture.ConfFixture(CONF))
global _DB_CACHE
if not _DB_CACHE:
_DB_CACHE = Database(session, migration,
sql_connection=CONF.sql_connection,
sqlite_db=CONF.sqlite_db,
sqlite_clean_db=CONF.sqlite_clean_db)
self.useFixture(_DB_CACHE)
mox_fixture = self.useFixture(MoxStubout())
self.mox = mox_fixture.mox
self.stubs = mox_fixture.stubs
self.addCleanup(self._clear_attrs)
self.useFixture(fixtures.EnvironmentVariable('http_proxy'))
self.policy = self.useFixture(policy_fixture.PolicyFixture())
CONF.set_override('fatal_exception_format_errors', True)
def _clear_attrs(self):
# Delete attributes that don't start with _ so they don't pin
# memory around unnecessarily for the duration of the test
# suite
for key in [k for k in self.__dict__.keys() if k[0] != '_']:
del self.__dict__[key]
def flags(self, **kw):
"""Override flag variables for a test."""
group = kw.pop('group', None)
for k, v in kw.iteritems():
CONF.set_override(k, v, group)
class APICoverage(object):
cover_api = None
def test_api_methods(self):
self.assertTrue(self.cover_api is not None)
api_methods = [x for x in dir(self.cover_api)
if not x.startswith('_')]
test_methods = [x[5:] for x in dir(self)
if x.startswith('test_')]
self.assertThat(
test_methods,
testtools.matchers.ContainsAll(api_methods))
class TimeOverride(fixtures.Fixture):
"""Fixture to start and remove time override."""
def setUp(self):
super(TimeOverride, self).setUp()
timeutils.set_time_override()
self.addCleanup(timeutils.clear_time_override)

View File

@ -20,7 +20,6 @@ import fixtures
from oslo.config import cfg from oslo.config import cfg
from ironic.common import config from ironic.common import config
from ironic.common import paths
CONF = cfg.CONF CONF = cfg.CONF
CONF.import_opt('use_ipv6', 'ironic.netconf') CONF.import_opt('use_ipv6', 'ironic.netconf')

View File

@ -15,13 +15,11 @@
"""Ironic DB test base class.""" """Ironic DB test base class."""
from oslo.config import cfg
from ironic.common import context as ironic_context from ironic.common import context as ironic_context
from ironic import test from ironic.tests import base
class DbTestCase(test.TestCase): class DbTestCase(base.TestCase):
def setUp(self): def setUp(self):
super(DbTestCase, self).setUp() super(DbTestCase, self).setUp()

View File

@ -17,8 +17,8 @@ import os
import fixtures import fixtures
from oslo.config import cfg from oslo.config import cfg
from ironic.openstack.common import policy as common_policy
from ironic.common import policy as ironic_policy from ironic.common import policy as ironic_policy
from ironic.openstack.common import policy as common_policy
from ironic.tests import fake_policy from ironic.tests import fake_policy
CONF = cfg.CONF CONF = cfg.CONF

View File

@ -16,11 +16,8 @@
# under the License. # under the License.
import __builtin__ import __builtin__
import datetime
import errno import errno
import functools
import hashlib import hashlib
import importlib
import os import os
import os.path import os.path
import StringIO import StringIO
@ -30,16 +27,14 @@ import mox
import netaddr import netaddr
from oslo.config import cfg from oslo.config import cfg
import ironic
from ironic.common import exception from ironic.common import exception
from ironic.common import utils from ironic.common import utils
from ironic.openstack.common import timeutils from ironic.tests import base
from ironic import test
CONF = cfg.CONF CONF = cfg.CONF
class BareMetalUtilsTestCase(test.TestCase): class BareMetalUtilsTestCase(base.TestCase):
def test_random_alnum(self): def test_random_alnum(self):
s = utils.random_alnum(10) s = utils.random_alnum(10)
@ -81,7 +76,7 @@ class BareMetalUtilsTestCase(test.TestCase):
self.mox.VerifyAll() self.mox.VerifyAll()
class ExecuteTestCase(test.TestCase): class ExecuteTestCase(base.TestCase):
def test_retry_on_failure(self): def test_retry_on_failure(self):
fd, tmpfilename = tempfile.mkstemp() fd, tmpfilename = tempfile.mkstemp()
@ -164,7 +159,7 @@ grep foo
os.unlink(tmpfilename2) os.unlink(tmpfilename2)
class GenericUtilsTestCase(test.TestCase): class GenericUtilsTestCase(base.TestCase):
def test_hostname_unicode_sanitization(self): def test_hostname_unicode_sanitization(self):
hostname = u"\u7684.test.example.com" hostname = u"\u7684.test.example.com"
self.assertEqual("test.example.com", self.assertEqual("test.example.com",
@ -303,7 +298,7 @@ class GenericUtilsTestCase(test.TestCase):
"failure") "failure")
class MkfsTestCase(test.TestCase): class MkfsTestCase(base.TestCase):
def test_mkfs(self): def test_mkfs(self):
self.mox.StubOutWithMock(utils, 'execute') self.mox.StubOutWithMock(utils, 'execute')
@ -330,7 +325,7 @@ class MkfsTestCase(test.TestCase):
utils.mkfs('swap', '/my/swap/block/dev', 'swap-vol') utils.mkfs('swap', '/my/swap/block/dev', 'swap-vol')
class IntLikeTestCase(test.TestCase): class IntLikeTestCase(base.TestCase):
def test_is_int_like(self): def test_is_int_like(self):
self.assertTrue(utils.is_int_like(1)) self.assertTrue(utils.is_int_like(1))

View File

@ -1,29 +1,21 @@
d2to1>=0.2.10,<0.3 d2to1>=0.2.10,<0.3
pbr>=0.5,<0.6 pbr>=0.5,<0.6
SQLAlchemy>=0.7.8,<0.7.99 SQLAlchemy>=0.7.8,<0.7.99
Cheetah>=2.4.4
amqplib>=0.6.1 amqplib>=0.6.1
anyjson>=0.2.4 anyjson>=0.2.4
argparse argparse
boto
eventlet>=0.9.17 eventlet>=0.9.17
kombu>=1.0.4 kombu>=1.0.4
lxml>=2.3 lxml>=2.3
routes>=1.12.3
WebOb==1.2.3 WebOb==1.2.3
greenlet>=0.3.1 greenlet>=0.3.1
PasteDeploy>=1.5.0
paste
sqlalchemy-migrate>=0.7.2 sqlalchemy-migrate>=0.7.2
netaddr>=0.7.6 netaddr>=0.7.6
suds>=0.4 suds>=0.4
paramiko paramiko
pyasn1
Babel>=0.9.6 Babel>=0.9.6
iso8601>=0.1.4 iso8601>=0.1.4
httplib2
setuptools_git>=0.4 setuptools_git>=0.4
python-cinderclient>=1.0.1
python-quantumclient>=2.2.0,<3.0.0 python-quantumclient>=2.2.0,<3.0.0
python-glanceclient>=0.5.0,<2 python-glanceclient>=0.5.0,<2
python-keystoneclient>=0.2.0 python-keystoneclient>=0.2.0

0
tools/__init__.py Normal file
View File

View File

@ -207,7 +207,8 @@ class Fedora(Distro):
This can be removed when the fix is applied upstream. This can be removed when the fix is applied upstream.
Nova: https://bugs.launchpad.net/nova/+bug/884915 Nova: https://bugs.launchpad.net/nova/+bug/884915
Upstream: https://bitbucket.org/which_linden/eventlet/issue/89 Upstream: https://bitbucket.org/eventlet/eventlet/issue/89
RHEL: https://bugzilla.redhat.com/958868
""" """
# Install "patch" program if it's not there # Install "patch" program if it's not there

View File

@ -25,8 +25,8 @@ def main(argv):
venv = os.environ['VIRTUAL_ENV'] venv = os.environ['VIRTUAL_ENV']
pip_requires = os.path.join(root, 'tools', 'pip-requires') pip_requires = os.path.join(root, 'requirements.txt')
test_requires = os.path.join(root, 'tools', 'test-requires') test_requires = os.path.join(root, 'test-requirements.txt')
py_version = "python%s.%s" % (sys.version_info[0], sys.version_info[1]) py_version = "python%s.%s" % (sys.version_info[0], sys.version_info[1])
project = 'Nova' project = 'Nova'
install = install_venv.InstallVenv(root, venv, pip_requires, test_requires, install = install_venv.InstallVenv(root, venv, pip_requires, test_requires,

View File

@ -31,4 +31,4 @@ commands = {posargs}
[flake8] [flake8]
ignore = E12 ignore = E12
builtins = _ builtins = _
exclude = .venv,.git,.tox,dist,doc,*openstack/common*,*lib/python*,*egg,build exclude = .venv,.git,.tox,dist,doc,*openstack/common*,*lib/python*,*egg,build,*ironic/nova*,*ironic/tests/nova*,tools