Make HACKING compliant

Make all the source and tests HACKING compliant and
enable tox -e hacking on by default.

Relative directory checks not enabled (yet)

Change-Id: I8803f67c49b4d16caebe76ae690092ae5c9a6dd3
This commit is contained in:
Sandy Walsh 2013-03-06 15:20:48 -04:00 committed by Sandy Walsh
parent b345cef2f3
commit 6a7633c62a
41 changed files with 396 additions and 326 deletions

View File

@ -90,12 +90,12 @@ pipeline_manager = pipeline.setup_pipeline(publish_manager)
with pipeline_manager.publisher(context.get_admin_context(), with pipeline_manager.publisher(context.get_admin_context(),
cfg.CONF.counter_source) as p: cfg.CONF.counter_source) as p:
p([counter.Counter(name=cfg.CONF.counter_name, p([counter.Counter(name=cfg.CONF.counter_name,
type=cfg.CONF.counter_type, type=cfg.CONF.counter_type,
unit=cfg.CONF.counter_unit, unit=cfg.CONF.counter_unit,
volume=cfg.CONF.counter_volume, volume=cfg.CONF.counter_volume,
user_id=cfg.CONF.counter_user, user_id=cfg.CONF.counter_user,
project_id=cfg.CONF.counter_project, project_id=cfg.CONF.counter_project,
resource_id=cfg.CONF.counter_resource, resource_id=cfg.CONF.counter_resource,
timestamp=cfg.CONF.counter_timestamp, timestamp=cfg.CONF.counter_timestamp,
resource_metadata=cfg.CONF.counter_metadata resource_metadata=cfg.CONF.counter_metadata
and eval(cfg.CONF.counter_metadata))]) and eval(cfg.CONF.counter_metadata))])

View File

@ -21,6 +21,7 @@ import itertools
from oslo.config import cfg from oslo.config import cfg
from stevedore import dispatch from stevedore import dispatch
from ceilometer.openstack.common import context from ceilometer.openstack.common import context
from ceilometer.openstack.common import log from ceilometer.openstack.common import log
from ceilometer import pipeline from ceilometer import pipeline
@ -65,7 +66,7 @@ class AgentManager(object):
@abc.abstractmethod @abc.abstractmethod
def create_polling_task(self): def create_polling_task(self):
"""Create an empty polling task""" """Create an empty polling task."""
def setup_polling_tasks(self): def setup_polling_tasks(self):
polling_tasks = {} polling_tasks = {}

View File

@ -15,15 +15,17 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
"""Set up the ACL to acces the API server."""
import keystoneclient.middleware.auth_token as auth_token """Access Control Lists (ACL's) control access the API server."""
from keystoneclient.middleware import auth_token
from oslo.config import cfg from oslo.config import cfg
from pecan import hooks from pecan import hooks
from webob import exc from webob import exc
from ceilometer import policy from ceilometer import policy
OPT_GROUP_NAME = 'keystone_authtoken' OPT_GROUP_NAME = 'keystone_authtoken'

View File

@ -17,11 +17,10 @@
# under the License. # under the License.
from oslo.config import cfg from oslo.config import cfg
from pecan import make_app import pecan
from pecan import configuration
from ceilometer.api import config as api_config
from ceilometer.api import acl from ceilometer.api import acl
from ceilometer.api import config as api_config
from ceilometer.api import hooks from ceilometer.api import hooks
from ceilometer.api import middleware from ceilometer.api import middleware
@ -29,7 +28,7 @@ from ceilometer.api import middleware
def get_pecan_config(): def get_pecan_config():
# Set up the pecan configuration # Set up the pecan configuration
filename = api_config.__file__.replace('.pyc', '.py') filename = api_config.__file__.replace('.pyc', '.py')
return configuration.conf_from_file(filename) return pecan.configuration.conf_from_file(filename)
def setup_app(pecan_config=None, extra_hooks=None): def setup_app(pecan_config=None, extra_hooks=None):
@ -45,9 +44,9 @@ def setup_app(pecan_config=None, extra_hooks=None):
if pecan_config.app.enable_acl: if pecan_config.app.enable_acl:
app_hooks.append(acl.AdminAuthHook()) app_hooks.append(acl.AdminAuthHook())
configuration.set_config(dict(pecan_config), overwrite=True) pecan.configuration.set_config(dict(pecan_config), overwrite=True)
app = make_app( app = pecan.make_app(
pecan_config.app.root, pecan_config.app.root,
static_root=pecan_config.app.static_root, static_root=pecan_config.app.static_root,
template_path=pecan_config.app.template_path, template_path=pecan_config.app.template_path,

View File

@ -16,16 +16,16 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
from pecan import expose import pecan
from . import v2 from ceilometer.api.controllers import v2
class RootController(object): class RootController(object):
v2 = v2.V2Controller() v2 = v2.V2Controller()
@expose(generic=True, template='index.html') @pecan.expose(generic=True, template='index.html')
def index(self): def index(self):
# FIXME: Return version information # FIXME: Return version information
return dict() return dict()

View File

@ -32,25 +32,24 @@
import datetime import datetime
import inspect import inspect
import pecan import pecan
from pecan import request from pecan import rest
from pecan.rest import RestController
import wsme import wsme
import wsmeext.pecan as wsme_pecan import wsmeext.pecan as wsme_pecan
from wsme.types import Base, text, Enum from wsme import types as wtypes
from ceilometer.openstack.common import log as logging from ceilometer.openstack.common import log
from ceilometer.openstack.common import timeutils from ceilometer.openstack.common import timeutils
from ceilometer import storage from ceilometer import storage
LOG = logging.getLogger(__name__) LOG = log.getLogger(__name__)
operation_kind = Enum(str, 'lt', 'le', 'eq', 'ne', 'ge', 'gt') operation_kind = wtypes.Enum(str, 'lt', 'le', 'eq', 'ne', 'ge', 'gt')
class Query(Base): class Query(wtypes.Base):
"""Query filter. """Query filter.
""" """
@ -62,7 +61,7 @@ class Query(Base):
def set_op(self, value): def set_op(self, value):
self._op = value self._op = value
field = text field = wtypes.text
"The name of the field to test" "The name of the field to test"
#op = wsme.wsattr(operation_kind, default='eq') #op = wsme.wsattr(operation_kind, default='eq')
@ -70,7 +69,7 @@ class Query(Base):
op = wsme.wsproperty(operation_kind, get_op, set_op) op = wsme.wsproperty(operation_kind, get_op, set_op)
"The comparison operator. Defaults to 'eq'." "The comparison operator. Defaults to 'eq'."
value = text value = wtypes.text
"The value to compare against the stored data" "The value to compare against the stored data"
def __repr__(self): def __repr__(self):
@ -200,44 +199,44 @@ def _flatten_metadata(metadata):
if type(v) not in set([list, dict, set])) if type(v) not in set([list, dict, set]))
class Sample(Base): class Sample(wtypes.Base):
"""A single measurement for a given meter and resource. """A single measurement for a given meter and resource.
""" """
source = text source = wtypes.text
"An identity source ID" "An identity source ID"
counter_name = text counter_name = wtypes.text
"The name of the meter" "The name of the meter"
# FIXME(dhellmann): Make this meter_name? # FIXME(dhellmann): Make this meter_name?
counter_type = text counter_type = wtypes.text
"The type of the meter (see :ref:`measurements`)" "The type of the meter (see :ref:`measurements`)"
# FIXME(dhellmann): Make this meter_type? # FIXME(dhellmann): Make this meter_type?
counter_unit = text counter_unit = wtypes.text
"The unit of measure for the value in counter_volume" "The unit of measure for the value in counter_volume"
# FIXME(dhellmann): Make this meter_unit? # FIXME(dhellmann): Make this meter_unit?
counter_volume = float counter_volume = float
"The actual measured value" "The actual measured value"
user_id = text user_id = wtypes.text
"The ID of the user who last triggered an update to the resource" "The ID of the user who last triggered an update to the resource"
project_id = text project_id = wtypes.text
"The ID of the project or tenant that owns the resource" "The ID of the project or tenant that owns the resource"
resource_id = text resource_id = wtypes.text
"The ID of the :class:`Resource` for which the measurements are taken" "The ID of the :class:`Resource` for which the measurements are taken"
timestamp = datetime.datetime timestamp = datetime.datetime
"UTC date and time when the measurement was made" "UTC date and time when the measurement was made"
resource_metadata = {text: text} resource_metadata = {wtypes.text: wtypes.text}
"Arbitrary metadata associated with the resource" "Arbitrary metadata associated with the resource"
message_id = text message_id = wtypes.text
"A unique identifier for the sample" "A unique identifier for the sample"
def __init__(self, counter_volume=None, resource_metadata={}, **kwds): def __init__(self, counter_volume=None, resource_metadata={}, **kwds):
@ -265,7 +264,7 @@ class Sample(Base):
) )
class Statistics(Base): class Statistics(wtypes.Base):
"""Computed statistics for a query. """Computed statistics for a query.
""" """
@ -353,7 +352,7 @@ class Statistics(Base):
) )
class MeterController(RestController): class MeterController(rest.RestController):
"""Manages operations on a single meter. """Manages operations on a single meter.
""" """
_custom_actions = { _custom_actions = {
@ -361,7 +360,7 @@ class MeterController(RestController):
} }
def __init__(self, meter_id): def __init__(self, meter_id):
request.context['meter_id'] = meter_id pecan.request.context['meter_id'] = meter_id
self._id = meter_id self._id = meter_id
@wsme_pecan.wsexpose([Sample], [Query]) @wsme_pecan.wsexpose([Sample], [Query])
@ -374,7 +373,7 @@ class MeterController(RestController):
kwargs['meter'] = self._id kwargs['meter'] = self._id
f = storage.EventFilter(**kwargs) f = storage.EventFilter(**kwargs)
return [Sample(**e) return [Sample(**e)
for e in request.storage_conn.get_raw_events(f) for e in pecan.request.storage_conn.get_raw_events(f)
] ]
@wsme_pecan.wsexpose([Statistics], [Query], int) @wsme_pecan.wsexpose([Statistics], [Query], int)
@ -389,7 +388,7 @@ class MeterController(RestController):
kwargs = _query_to_kwargs(q, storage.EventFilter.__init__) kwargs = _query_to_kwargs(q, storage.EventFilter.__init__)
kwargs['meter'] = self._id kwargs['meter'] = self._id
f = storage.EventFilter(**kwargs) f = storage.EventFilter(**kwargs)
computed = request.storage_conn.get_meter_statistics(f, period) computed = pecan.request.storage_conn.get_meter_statistics(f, period)
# Find the original timestamp in the query to use for clamping # Find the original timestamp in the query to use for clamping
# the duration returned in the statistics. # the duration returned in the statistics.
start = end = None start = end = None
@ -405,27 +404,27 @@ class MeterController(RestController):
for c in computed] for c in computed]
class Meter(Base): class Meter(wtypes.Base):
"""One category of measurements. """One category of measurements.
""" """
name = text name = wtypes.text
"The unique name for the meter" "The unique name for the meter"
# FIXME(dhellmann): Make this an enum? # FIXME(dhellmann): Make this an enum?
type = text type = wtypes.text
"The meter type (see :ref:`measurements`)" "The meter type (see :ref:`measurements`)"
unit = text unit = wtypes.text
"The unit of measure" "The unit of measure"
resource_id = text resource_id = wtypes.text
"The ID of the :class:`Resource` for which the measurements are taken" "The ID of the :class:`Resource` for which the measurements are taken"
project_id = text project_id = wtypes.text
"The ID of the project or tenant that owns the resource" "The ID of the project or tenant that owns the resource"
user_id = text user_id = wtypes.text
"The ID of the user who last triggered an update to the resource" "The ID of the user who last triggered an update to the resource"
@classmethod @classmethod
@ -439,7 +438,7 @@ class Meter(Base):
) )
class MetersController(RestController): class MetersController(rest.RestController):
"""Works on meters.""" """Works on meters."""
@pecan.expose() @pecan.expose()
@ -452,28 +451,28 @@ class MetersController(RestController):
:param q: Filter rules for the meters to be returned. :param q: Filter rules for the meters to be returned.
""" """
kwargs = _query_to_kwargs(q, request.storage_conn.get_meters) kwargs = _query_to_kwargs(q, pecan.request.storage_conn.get_meters)
return [Meter(**m) return [Meter(**m)
for m in request.storage_conn.get_meters(**kwargs)] for m in pecan.request.storage_conn.get_meters(**kwargs)]
class Resource(Base): class Resource(wtypes.Base):
"""An externally defined object for which samples have been received. """An externally defined object for which samples have been received.
""" """
resource_id = text resource_id = wtypes.text
"The unique identifier for the resource" "The unique identifier for the resource"
project_id = text project_id = wtypes.text
"The ID of the owning project or tenant" "The ID of the owning project or tenant"
user_id = text user_id = wtypes.text
"The ID of the user who created the resource or updated it last" "The ID of the user who created the resource or updated it last"
timestamp = datetime.datetime timestamp = datetime.datetime
"UTC date and time of the last update to any meter for the resource" "UTC date and time of the last update to any meter for the resource"
metadata = {text: text} metadata = {wtypes.text: wtypes.text}
"Arbitrary metadata associated with the resource" "Arbitrary metadata associated with the resource"
def __init__(self, metadata={}, **kwds): def __init__(self, metadata={}, **kwds):
@ -491,7 +490,7 @@ class Resource(Base):
) )
class ResourcesController(RestController): class ResourcesController(rest.RestController):
"""Works on resources.""" """Works on resources."""
@wsme_pecan.wsexpose(Resource, unicode) @wsme_pecan.wsexpose(Resource, unicode)
@ -500,7 +499,8 @@ class ResourcesController(RestController):
:param resource_id: The UUID of the resource. :param resource_id: The UUID of the resource.
""" """
r = list(request.storage_conn.get_resources(resource=resource_id))[0] r = list(pecan.request.storage_conn.get_resources(
resource=resource_id))[0]
return Resource(**r) return Resource(**r)
@wsme_pecan.wsexpose([Resource], [Query]) @wsme_pecan.wsexpose([Resource], [Query])
@ -509,10 +509,10 @@ class ResourcesController(RestController):
:param q: Filter rules for the resources to be returned. :param q: Filter rules for the resources to be returned.
""" """
kwargs = _query_to_kwargs(q, request.storage_conn.get_resources) kwargs = _query_to_kwargs(q, pecan.request.storage_conn.get_resources)
resources = [ resources = [
Resource(**r) Resource(**r)
for r in request.storage_conn.get_resources(**kwargs)] for r in pecan.request.storage_conn.get_resources(**kwargs)]
return resources return resources

View File

@ -15,16 +15,15 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
"""Set up the API server application instance """Set up the API server application instance."""
"""
import flask import flask
from oslo.config import cfg from oslo.config import cfg
from ceilometer.api import acl
from ceilometer.api.v1 import blueprint as v1_blueprint
from ceilometer.openstack.common import jsonutils from ceilometer.openstack.common import jsonutils
from ceilometer import storage from ceilometer import storage
from ceilometer.api.v1 import blueprint as v1_blueprint
from ceilometer.api import acl
storage.register_opts(cfg.CONF) storage.register_opts(cfg.CONF)

View File

@ -21,12 +21,8 @@ from stevedore import dispatch
from ceilometer.collector import meter as meter_api from ceilometer.collector import meter as meter_api
from ceilometer import extension_manager from ceilometer import extension_manager
from ceilometer import pipeline
from ceilometer import service
from ceilometer import storage
from ceilometer.openstack.common import context from ceilometer.openstack.common import context
from ceilometer.openstack.common import log from ceilometer.openstack.common import log
from ceilometer.openstack.common import timeutils
from ceilometer.openstack.common.rpc import dispatcher as rpc_dispatcher from ceilometer.openstack.common.rpc import dispatcher as rpc_dispatcher
# Import rpc_notifier to register `notification_topics` flag so that # Import rpc_notifier to register `notification_topics` flag so that
@ -34,6 +30,11 @@ from ceilometer.openstack.common.rpc import dispatcher as rpc_dispatcher
# FIXME(dhellmann): Use option importing feature of oslo.config instead. # FIXME(dhellmann): Use option importing feature of oslo.config instead.
import ceilometer.openstack.common.notifier.rpc_notifier import ceilometer.openstack.common.notifier.rpc_notifier
from ceilometer.openstack.common import timeutils
from ceilometer import pipeline
from ceilometer import service
from ceilometer import storage
OPTS = [ OPTS = [
cfg.ListOpt('disabled_notification_listeners', cfg.ListOpt('disabled_notification_listeners',
default=[], default=[],
@ -43,7 +44,6 @@ OPTS = [
cfg.CONF.register_opts(OPTS) cfg.CONF.register_opts(OPTS)
LOG = log.getLogger(__name__) LOG = log.getLogger(__name__)

View File

@ -19,9 +19,9 @@
from oslo.config import cfg from oslo.config import cfg
from ceilometer import agent from ceilometer import agent
from ceilometer.compute.virt import inspector as virt_inspector
from ceilometer import extension_manager from ceilometer import extension_manager
from ceilometer import nova_client from ceilometer import nova_client
from ceilometer.compute.virt import inspector as virt_inspector
from ceilometer.openstack.common import log from ceilometer.openstack.common import log
@ -67,7 +67,7 @@ class AgentManager(agent.AgentManager):
return PollingTask(self) return PollingTask(self)
def setup_notifier_task(self): def setup_notifier_task(self):
"""For nova notifier usage""" """For nova notifier usage."""
task = PollingTask(self) task = PollingTask(self)
for pollster in self.pollster_manager.extensions: for pollster in self.pollster_manager.extensions:
task.add( task.add(

View File

@ -20,9 +20,9 @@
from oslo.config import cfg from oslo.config import cfg
from ceilometer.compute import instance
from ceilometer import counter from ceilometer import counter
from ceilometer import plugin from ceilometer import plugin
from ceilometer.compute import instance
OPTS = [ OPTS = [

View File

@ -21,14 +21,12 @@ __all__ = [
'initialize_manager', 'initialize_manager',
] ]
from nova import db as instance_info_source
from oslo.config import cfg from oslo.config import cfg
from ceilometer.compute import manager as compute_manager
from ceilometer.openstack.common import log as logging from ceilometer.openstack.common import log as logging
from ceilometer.compute.manager import AgentManager
from nova import db as instance_info_source
# This module runs inside the nova compute # This module runs inside the nova compute
# agent, which only configures the "nova" logger. # agent, which only configures the "nova" logger.
# We use a fake logger name in that namespace # We use a fake logger name in that namespace
@ -44,7 +42,7 @@ def initialize_manager(agent_manager=None):
if not agent_manager: if not agent_manager:
cfg.CONF(args=[], project='ceilometer', prog='ceilometer-agent') cfg.CONF(args=[], project='ceilometer', prog='ceilometer-agent')
# Instantiate a manager # Instantiate a manager
_agent_manager = AgentManager() _agent_manager = compute_manager.AgentManager()
else: else:
_agent_manager = agent_manager _agent_manager = agent_manager
_agent_manager.setup_notifier_task() _agent_manager.setup_notifier_task()

View File

@ -21,9 +21,9 @@
import copy import copy
import datetime import datetime
from ceilometer import counter
from ceilometer.compute import plugin
from ceilometer.compute import instance as compute_instance from ceilometer.compute import instance as compute_instance
from ceilometer.compute import plugin
from ceilometer import counter
from ceilometer.openstack.common import log from ceilometer.openstack.common import log
from ceilometer.openstack.common import timeutils from ceilometer.openstack.common import timeutils
@ -31,7 +31,7 @@ LOG = log.getLogger(__name__)
def _instance_name(instance): def _instance_name(instance):
"""Shortcut to get instance name""" """Shortcut to get instance name."""
return getattr(instance, 'OS-EXT-SRV-ATTR:instance_name', None) return getattr(instance, 'OS-EXT-SRV-ATTR:instance_name', None)

View File

@ -16,7 +16,7 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
"""Inspector abstraction for read-only access to hypervisors""" """Inspector abstraction for read-only access to hypervisors."""
import collections import collections

View File

@ -15,7 +15,7 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
"""Implementation of Inspector abstraction for libvirt""" """Implementation of Inspector abstraction for libvirt."""
from lxml import etree from lxml import etree
from oslo.config import cfg from oslo.config import cfg

View File

@ -26,6 +26,7 @@ import collections
from oslo.config import cfg from oslo.config import cfg
OPTS = [ OPTS = [
cfg.StrOpt('counter_source', cfg.StrOpt('counter_source',
default='openstack', default='openstack',

View File

@ -16,14 +16,13 @@
import datetime import datetime
from keystoneclient import exceptions
import requests import requests
from ceilometer import counter
from ceilometer.central import plugin from ceilometer.central import plugin
from ceilometer import counter
from ceilometer.openstack.common import log from ceilometer.openstack.common import log
from keystoneclient import exceptions
LOG = log.getLogger(__name__) LOG = log.getLogger(__name__)

View File

@ -21,12 +21,11 @@
from __future__ import absolute_import from __future__ import absolute_import
import itertools import itertools
import glanceclient import glanceclient
from ceilometer import plugin
from ceilometer import counter from ceilometer import counter
from ceilometer.openstack.common import timeutils from ceilometer.openstack.common import timeutils
from ceilometer import plugin
class _Base(plugin.PollsterBase): class _Base(plugin.PollsterBase):

View File

@ -139,7 +139,7 @@ class ImageSize(ImageCRUDBase):
class ImageDownload(ImageBase): class ImageDownload(ImageBase):
""" Emit image_download counter when an image is downloaded. """ """Emit image_download counter when an image is downloaded."""
metadata_keys = ['destination_ip', 'owner_id'] metadata_keys = ['destination_ip', 'owner_id']
@ -167,7 +167,7 @@ class ImageDownload(ImageBase):
class ImageServe(ImageBase): class ImageServe(ImageBase):
""" Emit image_serve counter when an image is served out. """ """Emit image_serve counter when an image is served out."""
metadata_keys = ['destination_ip', 'receiver_user_id', metadata_keys = ['destination_ip', 'receiver_user_id',
'receiver_tenant_id'] 'receiver_tenant_id']

View File

@ -19,9 +19,9 @@
from ceilometer.openstack.common import log from ceilometer.openstack.common import log
from ceilometer.openstack.common import timeutils from ceilometer.openstack.common import timeutils
from ceilometer.central import plugin
from ceilometer import counter from ceilometer import counter
from ceilometer import nova_client from ceilometer import nova_client
from ceilometer.central import plugin
class FloatingIPPollster(plugin.CentralPollster): class FloatingIPPollster(plugin.CentralPollster):

View File

@ -23,9 +23,8 @@
from oslo.config import cfg from oslo.config import cfg
from ceilometer import counter from ceilometer import counter
from ceilometer.openstack.common import log
from ceilometer import plugin from ceilometer import plugin
from ceilometer.openstack.common import log as logging
OPTS = [ OPTS = [
cfg.StrOpt('quantum_control_exchange', cfg.StrOpt('quantum_control_exchange',
@ -33,10 +32,9 @@ OPTS = [
help="Exchange name for Quantum notifications"), help="Exchange name for Quantum notifications"),
] ]
cfg.CONF.register_opts(OPTS) cfg.CONF.register_opts(OPTS)
LOG = logging.getLogger(__name__) LOG = log.getLogger(__name__)
class NetworkNotificationBase(plugin.NotificationBase): class NetworkNotificationBase(plugin.NotificationBase):

View File

@ -14,7 +14,7 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
from functools import wraps import functools
from novaclient.v1_1 import client as nova_client from novaclient.v1_1 import client as nova_client
from oslo.config import cfg from oslo.config import cfg
@ -27,7 +27,7 @@ LOG = log.getLogger(__name__)
def logged(func): def logged(func):
@wraps(func) @functools.wraps(func)
def with_logging(*args, **kwargs): def with_logging(*args, **kwargs):
try: try:
return func(*args, **kwargs) return func(*args, **kwargs)
@ -41,7 +41,7 @@ def logged(func):
class Client(object): class Client(object):
def __init__(self): def __init__(self):
"""Returns nova client""" """Returns a nova Client object."""
conf = cfg.CONF conf = cfg.CONF
tenant = conf.os_tenant_id and conf.os_tenant_id or conf.os_tenant_name tenant = conf.os_tenant_id and conf.os_tenant_id or conf.os_tenant_name
self.nova_client = nova_client.Client(username=cfg.CONF.os_username, self.nova_client = nova_client.Client(username=cfg.CONF.os_username,
@ -62,7 +62,7 @@ class Client(object):
@logged @logged
def instance_get_all_by_host(self, hostname): def instance_get_all_by_host(self, hostname):
"""Returns list of instances on particular host""" """Returns list of instances on particular host."""
search_opts = {'host': hostname, 'all_tenants': True} search_opts = {'host': hostname, 'all_tenants': True}
return self._with_flavor(self.nova_client.servers.list( return self._with_flavor(self.nova_client.servers.list(
detailed=True, detailed=True,
@ -70,5 +70,5 @@ class Client(object):
@logged @logged
def floating_ip_get_all(self): def floating_ip_get_all(self):
"""Returns all floating ips""" """Returns all floating ips."""
return self.nova_client.floating_ips.list() return self.nova_client.floating_ips.list()

View File

@ -25,10 +25,11 @@ import abc
from oslo.config import cfg from oslo.config import cfg
from swiftclient import client as swift from swiftclient import client as swift
from ceilometer import plugin
from ceilometer import counter from ceilometer import counter
from ceilometer.openstack.common import timeutils
from ceilometer.openstack.common import log from ceilometer.openstack.common import log
from ceilometer.openstack.common import timeutils
from ceilometer import plugin
LOG = log.getLogger(__name__) LOG = log.getLogger(__name__)

View File

@ -22,6 +22,23 @@ from __future__ import absolute_import
from oslo.config import cfg from oslo.config import cfg
from stevedore import dispatch from stevedore import dispatch
from swift.common.utils import split_path
import webob
REQUEST = webob
try:
# Swift >= 1.7.5
import swift.common.swob
REQUEST = swift.common.swob
except ImportError:
pass
try:
# Swift > 1.7.5 ... module exists but doesn't contain class.
from swift.common.utils import InputProxy
except ImportError:
# Swift <= 1.7.5 ... module exists and has class.
from swift.common.middleware.proxy_logging import InputProxy
from ceilometer import counter from ceilometer import counter
from ceilometer.openstack.common import context from ceilometer.openstack.common import context
@ -29,21 +46,6 @@ from ceilometer.openstack.common import timeutils
from ceilometer import pipeline from ceilometer import pipeline
from ceilometer import service from ceilometer import service
from swift.common.utils import split_path
try:
# Swift >= 1.7.5
from swift.common.swob import Request
except ImportError:
from webob import Request
try:
# Swift > 1.7.5
from swift.common.utils import InputProxy
except ImportError:
# Swift <= 1.7.5
from swift.common.middleware.proxy_logging import InputProxy
class CeilometerMiddleware(object): class CeilometerMiddleware(object):
""" """
@ -92,7 +94,7 @@ class CeilometerMiddleware(object):
return iter_response(iterable) return iter_response(iterable)
def publish_counter(self, env, bytes_received, bytes_sent): def publish_counter(self, env, bytes_received, bytes_sent):
req = Request(env) req = REQUEST.Request(env)
version, account, container, obj = split_path(req.path, 1, 4, True) version, account, container, obj = split_path(req.path, 1, 4, True)
now = timeutils.utcnow().isoformat() now = timeutils.utcnow().isoformat()
with pipeline.PublishContext( with pipeline.PublishContext(

View File

@ -307,7 +307,7 @@ class PipelineManager(object):
""" """
def __init__(self, cfg, publisher_manager): def __init__(self, cfg, publisher_manager):
"""Create the pipeline manager""" """Create the pipeline manager."""
self._setup_pipelines(cfg, publisher_manager) self._setup_pipelines(cfg, publisher_manager)
def _setup_pipelines(self, cfg, publisher_manager): def _setup_pipelines(self, cfg, publisher_manager):

View File

@ -19,10 +19,11 @@
""" """
import abc import abc
from collections import namedtuple import collections
ExchangeTopics = namedtuple('ExchangeTopics', ['exchange', 'topics']) ExchangeTopics = collections.namedtuple('ExchangeTopics',
['exchange', 'topics'])
class PluginBase(object): class PluginBase(object):

View File

@ -15,14 +15,14 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
"""Policy Engine For Ceilometer""" """Policy Engine For Ceilometer."""
import os import os
from oslo.config import cfg from oslo.config import cfg
from ceilometer import utils
from ceilometer.openstack.common import policy from ceilometer.openstack.common import policy
from ceilometer import utils
OPTS = [ OPTS = [

View File

@ -22,9 +22,9 @@ import socket
from oslo.config import cfg from oslo.config import cfg
from ceilometer.openstack.common import rpc
from ceilometer.openstack.common import context from ceilometer.openstack.common import context
from ceilometer.openstack.common import log from ceilometer.openstack.common import log
from ceilometer.openstack.common import rpc
from ceilometer.openstack.common.rpc import service as rpc_service from ceilometer.openstack.common.rpc import service as rpc_service

View File

@ -18,8 +18,9 @@
"""Storage backend management """Storage backend management
""" """
from datetime import datetime
from urlparse import urlparse import datetime
import urlparse
from oslo.config import cfg from oslo.config import cfg
from stevedore import driver from stevedore import driver
@ -27,6 +28,7 @@ from stevedore import driver
from ceilometer.openstack.common import log from ceilometer.openstack.common import log
from ceilometer.openstack.common import timeutils from ceilometer.openstack.common import timeutils
LOG = log.getLogger(__name__) LOG = log.getLogger(__name__)
STORAGE_ENGINE_NAMESPACE = 'ceilometer.storage' STORAGE_ENGINE_NAMESPACE = 'ceilometer.storage'
@ -43,16 +45,14 @@ cfg.CONF.register_opts(STORAGE_OPTS)
def register_opts(conf): def register_opts(conf):
"""Register any options for the storage system. """Register any options for the storage system."""
"""
p = get_engine(conf) p = get_engine(conf)
p.register_opts(conf) p.register_opts(conf)
def get_engine(conf): def get_engine(conf):
"""Load the configured engine and return an instance. """Load the configured engine and return an instance."""
""" engine_name = urlparse.urlparse(conf.database_connection).scheme
engine_name = urlparse(conf.database_connection).scheme
LOG.debug('looking for %r driver in %r', LOG.debug('looking for %r driver in %r',
engine_name, STORAGE_ENGINE_NAMESPACE) engine_name, STORAGE_ENGINE_NAMESPACE)
mgr = driver.DriverManager(STORAGE_ENGINE_NAMESPACE, mgr = driver.DriverManager(STORAGE_ENGINE_NAMESPACE,
@ -62,8 +62,7 @@ def get_engine(conf):
def get_connection(conf): def get_connection(conf):
"""Return an open connection to the database. """Return an open connection to the database."""
"""
engine = get_engine(conf) engine = get_engine(conf)
engine.register_opts(conf) engine.register_opts(conf)
db = engine.get_connection(conf) db = engine.get_connection(conf)
@ -94,9 +93,9 @@ class EventFilter(object):
self.metaquery = metaquery self.metaquery = metaquery
def _sanitize_timestamp(self, timestamp): def _sanitize_timestamp(self, timestamp):
"""Return a naive utc datetime object""" """Return a naive utc datetime object."""
if not timestamp: if not timestamp:
return timestamp return timestamp
if not isinstance(timestamp, datetime): if not isinstance(timestamp, datetime.datetime):
timestamp = timeutils.parse_isotime(timestamp) timestamp = timeutils.parse_isotime(timestamp)
return timeutils.normalize_time(timestamp) return timeutils.normalize_time(timestamp)

View File

@ -26,31 +26,27 @@ LOG = log.getLogger(__name__)
class StorageEngine(object): class StorageEngine(object):
"""Base class for storage engines. """Base class for storage engines."""
"""
__metaclass__ = abc.ABCMeta __metaclass__ = abc.ABCMeta
@abc.abstractmethod @abc.abstractmethod
def register_opts(self, conf): def register_opts(self, conf):
"""Register any configuration options used by this engine. """Register any configuration options used by this engine."""
"""
@abc.abstractmethod @abc.abstractmethod
def get_connection(self, conf): def get_connection(self, conf):
"""Return a Connection instance based on the configuration settings. """Return a Connection instance based on the configuration settings."""
"""
class Connection(object): class Connection(object):
"""Base class for storage system connections. """Base class for storage system connections."""
"""
__metaclass__ = abc.ABCMeta __metaclass__ = abc.ABCMeta
@abc.abstractmethod @abc.abstractmethod
def __init__(self, conf): def __init__(self, conf):
"""Constructor""" """Constructor."""
@abc.abstractmethod @abc.abstractmethod
def upgrade(self, version=None): def upgrade(self, version=None):

View File

@ -20,15 +20,15 @@
import copy import copy
import datetime import datetime
import re
import urlparse
import bson.code
import pymongo
from ceilometer.openstack.common import log from ceilometer.openstack.common import log
from ceilometer.storage import base from ceilometer.storage import base
import bson.code
import pymongo
import re
from urlparse import urlparse
LOG = log.getLogger(__name__) LOG = log.getLogger(__name__)
@ -283,7 +283,7 @@ class Connection(base.Connection):
def _parse_connection_url(self, url): def _parse_connection_url(self, url):
opts = {} opts = {}
result = urlparse(url) result = urlparse.urlparse(url)
opts['dbtype'] = result.scheme opts['dbtype'] = result.scheme
opts['dbname'] = result.path.replace('/', '') opts['dbname'] = result.path.replace('/', '')
netloc_match = re.match(r'(?:(\w+:\w+)@)?(.*)', result.netloc) netloc_match = re.match(r'(?:(\w+:\w+)@)?(.*)', result.netloc)

View File

@ -13,29 +13,30 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
"""SQLAlchemy storage backend
""" """SQLAlchemy storage backend."""
from __future__ import absolute_import from __future__ import absolute_import
import copy import copy
import datetime import datetime
import math import math
from sqlalchemy import func from sqlalchemy import func
from ceilometer.openstack.common import log from ceilometer.openstack.common import log
from ceilometer.openstack.common import timeutils from ceilometer.openstack.common import timeutils
from ceilometer.storage import base from ceilometer.storage import base
from ceilometer.storage.sqlalchemy import migration
from ceilometer.storage.sqlalchemy.models import Meter, Project, Resource from ceilometer.storage.sqlalchemy.models import Meter, Project, Resource
from ceilometer.storage.sqlalchemy.models import Source, User, Base from ceilometer.storage.sqlalchemy.models import Source, User, Base
import ceilometer.storage.sqlalchemy.session as sqlalchemy_session import ceilometer.storage.sqlalchemy.session as sqlalchemy_session
from ceilometer.storage.sqlalchemy import migration
LOG = log.getLogger(__name__) LOG = log.getLogger(__name__)
class SQLAlchemyStorage(base.StorageEngine): class SQLAlchemyStorage(base.StorageEngine):
"""Put the data into a SQLAlchemy database """Put the data into a SQLAlchemy database.
Tables:: Tables::
@ -82,8 +83,7 @@ class SQLAlchemyStorage(base.StorageEngine):
OPTIONS = [] OPTIONS = []
def register_opts(self, conf): def register_opts(self, conf):
"""Register any configuration options used by this engine. """Register any configuration options used by this engine."""
"""
conf.register_opts(self.OPTIONS) conf.register_opts(self.OPTIONS)
@staticmethod @staticmethod
@ -127,8 +127,7 @@ def make_query_from_filter(query, event_filter, require_meter=True):
class Connection(base.Connection): class Connection(base.Connection):
"""SqlAlchemy connection. """SqlAlchemy connection."""
"""
def __init__(self, conf): def __init__(self, conf):
LOG.info('connecting to %s', conf.database_connection) LOG.info('connecting to %s', conf.database_connection)
@ -144,8 +143,7 @@ class Connection(base.Connection):
engine.execute(table.delete()) engine.execute(table.delete())
def _get_connection(self, conf): def _get_connection(self, conf):
"""Return a connection to the database. """Return a connection to the database."""
"""
return sqlalchemy_session.get_session() return sqlalchemy_session.get_session()
def record_metering_data(self, data): def record_metering_data(self, data):
@ -353,7 +351,7 @@ class Connection(base.Connection):
yield e yield e
def _make_volume_query(self, event_filter, counter_volume_func): def _make_volume_query(self, event_filter, counter_volume_func):
"""Returns complex Meter counter_volume query for max and sum""" """Returns complex Meter counter_volume query for max and sum."""
subq = model_query(Meter.id, session=self.session) subq = model_query(Meter.id, session=self.session)
subq = make_query_from_filter(subq, event_filter, require_meter=False) subq = make_query_from_filter(subq, event_filter, require_meter=False)
subq = subq.subquery() subq = subq.subquery()
@ -464,7 +462,7 @@ class Connection(base.Connection):
def model_query(*args, **kwargs): def model_query(*args, **kwargs):
"""Query helper """Query helper.
:param session: if present, the session to use :param session: if present, the session to use
""" """

View File

@ -17,16 +17,16 @@
import distutils.version as dist_version import distutils.version as dist_version
import os import os
from ceilometer.storage.sqlalchemy.session import get_engine
from ceilometer.openstack.common import log as logging
import migrate import migrate
from migrate.versioning import util as migrate_util from migrate.versioning import util as migrate_util
import sqlalchemy import sqlalchemy
from ceilometer.openstack.common import log
from ceilometer.storage.sqlalchemy import session
INIT_VERSION = 1 INIT_VERSION = 1
LOG = logging.getLogger(__name__) LOG = log.getLogger(__name__)
@migrate_util.decorator @migrate_util.decorator
@ -78,20 +78,20 @@ def db_sync(engine, version=None):
def db_version(): def db_version():
repository = _find_migrate_repo() repository = _find_migrate_repo()
try: try:
return versioning_api.db_version(get_engine(), repository) return versioning_api.db_version(session.get_engine(), repository)
except versioning_exceptions.DatabaseNotControlledError: except versioning_exceptions.DatabaseNotControlledError:
meta = sqlalchemy.MetaData() meta = sqlalchemy.MetaData()
engine = get_engine() engine = session.get_engine()
meta.reflect(bind=engine) meta.reflect(bind=engine)
tables = meta.tables tables = meta.tables
if len(tables) == 0: if len(tables) == 0:
db_version_control(0) db_version_control(0)
return versioning_api.db_version(get_engine(), repository) return versioning_api.db_version(session.get_engine(), repository)
def db_version_control(version=None): def db_version_control(version=None):
repository = _find_migrate_repo() repository = _find_migrate_repo()
versioning_api.version_control(get_engine(), repository, version) versioning_api.version_control(session.get_engine(), repository, version)
return version return version

View File

@ -15,16 +15,15 @@
# under the License. # under the License.
""" """
SQLAlchemy models for nova data. SQLAlchemy models for Ceilometer data.
""" """
import json import json
from urlparse import urlparse import urlparse
from oslo.config import cfg from oslo.config import cfg
from sqlalchemy import Column, Integer, String, Table from sqlalchemy import Column, Integer, String, Table, ForeignKey, DateTime
from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import ForeignKey, DateTime
from sqlalchemy.orm import relationship from sqlalchemy.orm import relationship
from sqlalchemy.types import TypeDecorator, VARCHAR from sqlalchemy.types import TypeDecorator, VARCHAR
@ -40,7 +39,7 @@ cfg.CONF.register_opts(sql_opts)
def table_args(): def table_args():
engine_name = urlparse(cfg.CONF.database_connection).scheme engine_name = urlparse.urlparse(cfg.CONF.database_connection).scheme
if engine_name == 'mysql': if engine_name == 'mysql':
return {'mysql_engine': cfg.CONF.mysql_engine, return {'mysql_engine': cfg.CONF.mysql_engine,
'mysql_charset': "utf8"} 'mysql_charset': "utf8"}
@ -97,7 +96,7 @@ class Source(Base):
class Meter(Base): class Meter(Base):
"""Metering data""" """Metering data."""
__tablename__ = 'meter' __tablename__ = 'meter'
id = Column(Integer, primary_key=True) id = Column(Integer, primary_key=True)

View File

@ -23,13 +23,14 @@ import time
from oslo.config import cfg from oslo.config import cfg
import sqlalchemy import sqlalchemy
from sqlalchemy.exc import DisconnectionError, OperationalError import sqlalchemy.exc as exc
import sqlalchemy.orm import sqlalchemy.orm
from sqlalchemy.pool import NullPool, StaticPool import sqlalchemy.pool as pool
import ceilometer.openstack.common.log as logging from ceilometer.openstack.common import log
LOG = logging.getLogger(__name__)
LOG = log.getLogger(__name__)
_MAKER = None _MAKER = None
_ENGINE = None _ENGINE = None
@ -73,7 +74,7 @@ def get_session(autocommit=True, expire_on_commit=False, autoflush=True):
def synchronous_switch_listener(dbapi_conn, connection_rec): def synchronous_switch_listener(dbapi_conn, connection_rec):
"""Switch sqlite connections to non-synchronous mode""" """Switch sqlite connections to non-synchronous mode."""
dbapi_conn.execute("PRAGMA synchronous = OFF") dbapi_conn.execute("PRAGMA synchronous = OFF")
@ -99,7 +100,7 @@ def ping_listener(dbapi_conn, connection_rec, connection_proxy):
except dbapi_conn.OperationalError, ex: except dbapi_conn.OperationalError, ex:
if ex.args[0] in (2006, 2013, 2014, 2045, 2055): if ex.args[0] in (2006, 2013, 2014, 2045, 2055):
LOG.warn('Got mysql server has gone away: %s', ex) LOG.warn('Got mysql server has gone away: %s', ex)
raise DisconnectionError("Database server went away") raise exc.DisconnectionError("Database server went away")
else: else:
raise raise
@ -135,10 +136,10 @@ def get_engine():
engine_args['echo'] = True engine_args['echo'] = True
if "sqlite" in connection_dict.drivername: if "sqlite" in connection_dict.drivername:
engine_args["poolclass"] = NullPool engine_args["poolclass"] = pool.NullPool
if cfg.CONF.database_connection == "sqlite://": if cfg.CONF.database_connection == "sqlite://":
engine_args["poolclass"] = StaticPool engine_args["poolclass"] = pool.StaticPool
engine_args["connect_args"] = {'check_same_thread': False} engine_args["connect_args"] = {'check_same_thread': False}
_ENGINE = sqlalchemy.create_engine(cfg.CONF.database_connection, _ENGINE = sqlalchemy.create_engine(cfg.CONF.database_connection,
@ -160,7 +161,7 @@ def get_engine():
try: try:
_ENGINE.connect() _ENGINE.connect()
except OperationalError, e: except exc.OperationalError, e:
if not is_db_connection_error(e.args[0]): if not is_db_connection_error(e.args[0]):
raise raise
@ -176,7 +177,7 @@ def get_engine():
try: try:
_ENGINE.connect() _ENGINE.connect()
break break
except OperationalError, e: except exc.OperationalError, e:
if (remaining != 'infinite' and remaining == 0) \ if (remaining != 'infinite' and remaining == 0) \
or not is_db_connection_error(e.args[0]): or not is_db_connection_error(e.args[0]):
raise raise

View File

@ -24,14 +24,14 @@ import urllib
import flask import flask
from oslo.config import cfg from oslo.config import cfg
from pecan import set_config import pecan
from pecan.testing import load_test_app import pecan.testing
from ceilometer import storage
from ceilometer.api.v1 import app as v1_app from ceilometer.api.v1 import app as v1_app
from ceilometer.api.v1 import blueprint as v1_blueprint from ceilometer.api.v1 import blueprint as v1_blueprint
from ceilometer.tests import db as db_test_base from ceilometer import storage
from ceilometer.tests import base from ceilometer.tests import base
from ceilometer.tests import db as db_test_base
class TestBase(db_test_base.TestBase): class TestBase(db_test_base.TestBase):
@ -123,11 +123,11 @@ class FunctionalTest(db_test_base.TestBase):
}, },
} }
return load_test_app(self.config) return pecan.testing.load_test_app(self.config)
def tearDown(self): def tearDown(self):
super(FunctionalTest, self).tearDown() super(FunctionalTest, self).tearDown()
set_config({}, overwrite=True) pecan.set_config({}, overwrite=True)
def get_json(self, path, expect_errors=False, headers=None, def get_json(self, path, expect_errors=False, headers=None,
q=[], **params): q=[], **params):

View File

@ -17,8 +17,8 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
"""Base classes for API tests.
""" """Base classes for API tests."""
from ming import mim from ming import mim
from nose.plugins import skip from nose.plugins import skip
@ -28,6 +28,10 @@ from ceilometer import storage
from ceilometer.tests import base as test_base from ceilometer.tests import base as test_base
class BaseException(Exception):
"""A base exception for avoiding false positives."""
class TestBase(test_base.TestCase): class TestBase(test_base.TestCase):
# Default tests use test:// (MIM) # Default tests use test:// (MIM)
@ -49,6 +53,6 @@ def require_map_reduce(conn):
# skip these tests unless we aren't using mim. # skip these tests unless we aren't using mim.
try: try:
import spidermonkey import spidermonkey
except: except BaseException:
if isinstance(conn.conn, mim.Connection): if isinstance(conn.conn, mim.Connection):
raise skip.SkipTest('requires spidermonkey') raise skip.SkipTest('requires spidermonkey')

View File

@ -17,9 +17,9 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import textwrap
import os import os
import setuptools import setuptools
import textwrap
from ceilometer.openstack.common import setup as common_setup from ceilometer.openstack.common import setup as common_setup

View File

@ -17,11 +17,11 @@
# under the License. # under the License.
"""Test API against SQLAlchemy. """Test API against SQLAlchemy.
""" """
from . import compute_duration_by_resource as cdbr import compute_duration_by_resource as cdbr
from . import list_events import list_events
from . import list_meters import list_meters
from . import list_projects import list_projects
from . import list_users import list_users
class TestListEvents(list_events.TestListEvents): class TestListEvents(list_events.TestListEvents):

View File

@ -18,6 +18,7 @@
# under the License. # under the License.
import mock import mock
from ceilometer.central import manager from ceilometer.central import manager
from ceilometer.objectstore import swift from ceilometer.objectstore import swift
from ceilometer.tests import base from ceilometer.tests import base

View File

@ -16,11 +16,12 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
"""nova HACKING file compliance testing """Ceilometer HACKING file compliance testing
Built on top of pep8.py Built on top of pep8.py
""" """
import imp
import inspect import inspect
import logging import logging
import os import os
@ -28,7 +29,7 @@ import re
import subprocess import subprocess
import sys import sys
import tokenize import tokenize
import warnings import traceback
import pep8 import pep8
@ -43,8 +44,12 @@ logging.disable('LOG')
#N6xx calling methods #N6xx calling methods
#N7xx localization #N7xx localization
#N8xx git commit messages #N8xx git commit messages
#N9xx other
IMPORT_EXCEPTIONS = ['sqlalchemy', 'migrate', 'nova.db.sqlalchemy.session'] IMPORT_EXCEPTIONS = ['sqlalchemy', 'migrate',
'ceilometer.storage.sqlalchemy.session',
'ceilometer.storage.sqlalchemy.models']
# Paste is missing a __init__ in top level directory
START_DOCSTRING_TRIPLE = ['u"""', 'r"""', '"""', "u'''", "r'''", "'''"] START_DOCSTRING_TRIPLE = ['u"""', 'r"""', '"""', "u'''", "r'''", "'''"]
END_DOCSTRING_TRIPLE = ['"""', "'''"] END_DOCSTRING_TRIPLE = ['"""', "'''"]
VERBOSE_MISSING_IMPORT = os.getenv('HACKING_VERBOSE_MISSING_IMPORT', 'False') VERBOSE_MISSING_IMPORT = os.getenv('HACKING_VERBOSE_MISSING_IMPORT', 'False')
@ -149,111 +154,124 @@ def nova_except_format_assert(logical_line):
yield 1, "N202: assertRaises Exception too broad" yield 1, "N202: assertRaises Exception too broad"
def nova_one_import_per_line(logical_line): modules_cache = dict((mod, True) for mod in tuple(sys.modules.keys())
r"""Check for import format. + sys.builtin_module_names)
RE_RELATIVE_IMPORT = re.compile('^from\s*[.]')
def nova_import_rules(logical_line):
r"""Check for imports.
nova HACKING guide recommends one import per line: nova HACKING guide recommends one import per line:
Do not import more than one module per line Do not import more than one module per line
Examples: Examples:
Okay: from nova.rpc.common import RemoteError Okay: from nova.compute import api
N301: from nova.rpc.common import RemoteError, LOG N301: from nova.compute import api, utils
"""
pos = logical_line.find(',')
parts = logical_line.split()
if (pos > -1 and (parts[0] == "import" or
parts[0] == "from" and parts[2] == "import") and
not is_import_exception(parts[1])):
yield pos, "N301: one import per line"
def nova_import_module_only(logical_line): Imports should usually be on separate lines.
r"""Check for import module only.
nova HACKING guide recommends importing only modules: nova HACKING guide recommends importing only modules:
Do not import objects, only modules Do not import objects, only modules
Examples:
Okay: from os import path Okay: from os import path
Okay: from os import path as p
Okay: from os import (path as p)
Okay: import os.path Okay: import os.path
Okay: from nova.compute import rpcapi
N302: from os.path import dirname as dirname2 N302: from os.path import dirname as dirname2
N303 from os.path import * N302: from os.path import (dirname as dirname2)
N304 import flakes N303: from os.path import *
N304: from .compute import rpcapi
""" """
# N302 import only modules #NOTE(afazekas): An old style relative import example will not be able to
# N303 Invalid Import # pass the doctest, since the relativity depends on the file's locality
# N304 Relative Import
# TODO(sdague) actually get these tests working def is_module_for_sure(mod, search_path=sys.path):
# TODO(jogo) simplify this code mod = mod.replace('(', '') # Ignore parentheses
def import_module_check(mod, parent=None, added=False):
"""Checks for relative, modules and invalid imports.
If can't find module on first try, recursively check for relative
imports.
When parsing 'from x import y,' x is the parent.
"""
current_path = os.path.dirname(pep8.current_file)
try: try:
with warnings.catch_warnings(): mod_name = mod
warnings.simplefilter('ignore', DeprecationWarning) while '.' in mod_name:
valid = True pack_name, _sep, mod_name = mod.partition('.')
if parent: f, p, d = imp.find_module(pack_name, search_path)
parent_mod = __import__(parent, globals(), locals(), search_path = [p]
[mod], -1) imp.find_module(mod_name, search_path)
valid = inspect.ismodule(getattr(parent_mod, mod)) except ImportError:
else: try:
__import__(mod, globals(), locals(), [], -1) # NOTE(vish): handle namespace modules
valid = inspect.ismodule(sys.modules[mod]) module = __import__(mod)
if not valid: except ImportError, exc:
if added: # NOTE(vish): the import error might be due
sys.path.pop() # to a missing dependency
added = False missing = str(exc).split()[-1]
return logical_line.find(mod), ("N304: No " if (missing != mod.split('.')[-1] or
"relative imports. '%s' is a relative import" "cannot import" in str(exc)):
% logical_line) _missingImport.add(missing)
return logical_line.find(mod), ("N302: import only " return True
"modules. '%s' does not import a module" return False
% logical_line) except Exception, exc:
# NOTE(jogo) don't stack trace if unexpected import error,
# log and continue.
traceback.print_exc()
return False
return True
except (ImportError, NameError) as exc: def is_module(mod):
if not added: """Checks for non module imports."""
added = True if mod in modules_cache:
sys.path.append(current_path) return modules_cache[mod]
return import_module_check(mod, parent, added) res = is_module_for_sure(mod)
else: modules_cache[mod] = res
name = logical_line.split()[1] return res
if name not in _missingImport:
if VERBOSE_MISSING_IMPORT != 'False':
print >> sys.stderr, ("ERROR: import '%s' in %s "
"failed: %s" %
(name, pep8.current_file, exc))
_missingImport.add(name)
added = False
sys.path.pop()
return
except AttributeError: current_path = os.path.dirname(pep8.current_file)
# Invalid import current_mod = os.path.basename(pep8.current_file)
if "import *" in logical_line: if current_mod[-3:] == ".py":
# TODO(jogo): handle "from x import *, by checking all current_mod = current_mod[:-3]
# "objects in x"
return
return logical_line.find(mod), ("N303: Invalid import, "
"%s" % mod)
split_line = logical_line.split() split_line = logical_line.split()
if (", " not in logical_line and split_line_len = len(split_line)
split_line[0] in ('import', 'from') and if (split_line[0] in ('import', 'from') and split_line_len > 1 and
(len(split_line) in (2, 4, 6)) and not is_import_exception(split_line[1])):
split_line[1] != "__future__"): pos = logical_line.find(',')
if is_import_exception(split_line[1]): if pos != -1:
if split_line[0] == 'from':
yield pos, "N301: one import per line"
return # ',' is not supported by the N302 checker yet
pos = logical_line.find('*')
if pos != -1:
yield pos, "N303: No wildcard (*) import."
return return
if "from" == split_line[0]:
rval = import_module_check(split_line[3], parent=split_line[1]) if split_line_len in (2, 4, 6) and split_line[1] != "__future__":
else: if 'from' == split_line[0] and split_line_len > 3:
rval = import_module_check(split_line[1]) mod = '.'.join((split_line[1], split_line[3]))
if rval is not None: if is_import_exception(mod):
yield rval return
if RE_RELATIVE_IMPORT.search(logical_line):
yield logical_line.find('.'), ("N304: No "
"relative imports. '%s' is a relative import"
% logical_line)
return
if not is_module(mod):
yield 0, ("N302: import only modules."
"'%s' does not import a module" % logical_line)
return
#NOTE(afazekas): import searches first in the package
# The import keyword just imports modules
# The guestfs module now imports guestfs
mod = split_line[1]
if (current_mod != mod and
not is_module(mod) and
is_module_for_sure(mod, [current_path])):
yield 0, ("N304: No relative imports."
" '%s' is a relative import"
% logical_line)
#TODO(jogo): import template: N305 #TODO(jogo): import template: N305
@ -293,12 +311,26 @@ def nova_import_no_db_in_virt(logical_line, filename):
""" """
if "nova/virt" in filename and not filename.endswith("fake.py"): if "nova/virt" in filename and not filename.endswith("fake.py"):
if logical_line.startswith("from nova import db"): if logical_line.startswith("from nova import db"):
yield (0, "N307: nova.db import not allowed in nova/virt/*") yield (0, "N307: nova.db import not allowed in nova/virt/*")
def in_docstring_position(previous_logical): def is_docstring(physical_line, previous_logical):
return (previous_logical.startswith("def ") or """Return True if found docstring
previous_logical.startswith("class ")) 'A docstring is a string literal that occurs as the first statement in a
module, function, class,'
http://www.python.org/dev/peps/pep-0257/#what-is-a-docstring
"""
line = physical_line.lstrip()
start = max([line.find(i) for i in START_DOCSTRING_TRIPLE])
end = max([line[-4:-1] == i for i in END_DOCSTRING_TRIPLE])
if (previous_logical.startswith("def ") or
previous_logical.startswith("class ")):
if start is 0:
return True
else:
# Handle multi line comments
return end and start in (-1, len(line) - 4)
def nova_docstring_start_space(physical_line, previous_logical): def nova_docstring_start_space(physical_line, previous_logical):
@ -308,6 +340,8 @@ def nova_docstring_start_space(physical_line, previous_logical):
Docstring should not start with space Docstring should not start with space
Okay: def foo():\n '''This is good.''' Okay: def foo():\n '''This is good.'''
Okay: def foo():\n a = ''' This is not a docstring.'''
Okay: def foo():\n pass\n ''' This is not.'''
N401: def foo():\n ''' This is not.''' N401: def foo():\n ''' This is not.'''
""" """
# short circuit so that we don't fail on our own fail test # short circuit so that we don't fail on our own fail test
@ -318,30 +352,32 @@ def nova_docstring_start_space(physical_line, previous_logical):
# it's important that we determine this is actually a docstring, # it's important that we determine this is actually a docstring,
# and not a doc block used somewhere after the first line of a # and not a doc block used somewhere after the first line of a
# function def # function def
if in_docstring_position(previous_logical): if is_docstring(physical_line, previous_logical):
pos = max([physical_line.find(i) for i in START_DOCSTRING_TRIPLE]) pos = max([physical_line.find(i) for i in START_DOCSTRING_TRIPLE])
if pos != -1 and len(physical_line) > pos + 4: if physical_line[pos + 3] == ' ':
if physical_line[pos + 3] == ' ': return (pos, "N401: docstring should not start with"
return (pos, "N401: docstring should not start with" " a space")
" a space")
def nova_docstring_one_line(physical_line): def nova_docstring_one_line(physical_line, previous_logical):
r"""Check one line docstring end. r"""Check one line docstring end.
nova HACKING guide recommendation for one line docstring: nova HACKING guide recommendation for one line docstring:
A one line docstring looks like this and ends in punctuation. A one line docstring looks like this and ends in punctuation.
Okay: '''This is good.''' Okay: def foo():\n '''This is good.'''
Okay: '''This is good too!''' Okay: def foo():\n '''This is good too!'''
Okay: '''How about this?''' Okay: def foo():\n '''How about this?'''
N402: '''This is not''' Okay: def foo():\n a = '''This is not a docstring'''
N402: '''Bad punctuation,''' Okay: def foo():\n pass\n '''This is not a docstring'''
Okay: class Foo:\n pass\n '''This is not a docstring'''
N402: def foo():\n '''This is not'''
N402: def foo():\n '''Bad punctuation,'''
N402: class Foo:\n '''Bad punctuation,'''
""" """
#TODO(jogo) make this apply to multi line docstrings as well #TODO(jogo) make this apply to multi line docstrings as well
line = physical_line.lstrip() line = physical_line.lstrip()
if is_docstring(physical_line, previous_logical):
if line.startswith('"') or line.startswith("'"):
pos = max([line.find(i) for i in START_DOCSTRING_TRIPLE]) # start pos = max([line.find(i) for i in START_DOCSTRING_TRIPLE]) # start
end = max([line[-4:-1] == i for i in END_DOCSTRING_TRIPLE]) # end end = max([line[-4:-1] == i for i in END_DOCSTRING_TRIPLE]) # end
@ -350,20 +386,27 @@ def nova_docstring_one_line(physical_line):
return pos, "N402: one line docstring needs punctuation." return pos, "N402: one line docstring needs punctuation."
def nova_docstring_multiline_end(physical_line, previous_logical): def nova_docstring_multiline_end(physical_line, previous_logical, tokens):
r"""Check multi line docstring end. r"""Check multi line docstring end.
nova HACKING guide recommendation for docstring: nova HACKING guide recommendation for docstring:
Docstring should end on a new line Docstring should end on a new line
Okay: '''foobar\nfoo\nbar\n''' Okay: '''foobar\nfoo\nbar\n'''
N403: def foo():\n'''foobar\nfoo\nbar\n d'''\n\n Okay: def foo():\n '''foobar\nfoo\nbar\n'''
Okay: class Foo:\n '''foobar\nfoo\nbar\n'''
Okay: def foo():\n a = '''not\na\ndocstring'''
Okay: def foo():\n pass\n'''foobar\nfoo\nbar\n d'''
N403: def foo():\n '''foobar\nfoo\nbar\ndocstring'''
N403: class Foo:\n '''foobar\nfoo\nbar\ndocstring'''\n\n
""" """
if in_docstring_position(previous_logical): # if find OP tokens, not a docstring
ops = [t for t, _, _, _, _ in tokens if t == tokenize.OP]
if (is_docstring(physical_line, previous_logical) and len(tokens) > 0 and
len(ops) == 0):
pos = max(physical_line.find(i) for i in END_DOCSTRING_TRIPLE) pos = max(physical_line.find(i) for i in END_DOCSTRING_TRIPLE)
if pos != -1 and len(physical_line) == pos + 4: if physical_line.strip() not in START_DOCSTRING_TRIPLE:
if physical_line.strip() not in START_DOCSTRING_TRIPLE: return (pos, "N403: multi line docstring end on new line")
return (pos, "N403: multi line docstring end on new line")
def nova_docstring_multiline_start(physical_line, previous_logical, tokens): def nova_docstring_multiline_start(physical_line, previous_logical, tokens):
@ -373,9 +416,10 @@ def nova_docstring_multiline_start(physical_line, previous_logical, tokens):
Docstring should start with A multi line docstring has a one-line summary Docstring should start with A multi line docstring has a one-line summary
Okay: '''foobar\nfoo\nbar\n''' Okay: '''foobar\nfoo\nbar\n'''
N404: def foo():\n'''\nfoo\nbar\n''' \n\n Okay: def foo():\n a = '''\nnot\na docstring\n'''
N404: def foo():\n'''\nfoo\nbar\n'''\n\n
""" """
if in_docstring_position(previous_logical): if is_docstring(physical_line, previous_logical):
pos = max([physical_line.find(i) for i in START_DOCSTRING_TRIPLE]) pos = max([physical_line.find(i) for i in START_DOCSTRING_TRIPLE])
# start of docstring when len(tokens)==0 # start of docstring when len(tokens)==0
if len(tokens) == 0 and pos != -1 and len(physical_line) == pos + 4: if len(tokens) == 0 and pos != -1 and len(physical_line) == pos + 4:
@ -385,7 +429,7 @@ def nova_docstring_multiline_start(physical_line, previous_logical, tokens):
def nova_no_cr(physical_line): def nova_no_cr(physical_line):
r"""Check that we only use newlines not cariage returns. r"""Check that we only use newlines not carriage returns.
Okay: import os\nimport sys Okay: import os\nimport sys
# pep8 doesn't yet replace \r in strings, will work on an # pep8 doesn't yet replace \r in strings, will work on an
@ -493,6 +537,37 @@ def nova_localization_strings(logical_line, tokens):
#TODO(jogo) Dict and list objects #TODO(jogo) Dict and list objects
def nova_is_not(logical_line):
r"""Check localization in line.
Okay: if x is not y
N901: if not X is Y
N901: if not X.B is Y
"""
split_line = logical_line.split()
if (len(split_line) == 5 and split_line[0] == 'if' and
split_line[1] == 'not' and split_line[3] == 'is'):
yield (logical_line.find('not'), "N901: Use the 'is not' "
"operator for when testing for unequal identities")
def nova_not_in(logical_line):
r"""Check localization in line.
Okay: if x not in y
Okay: if not (X in Y or X is Z)
Okay: if not (X in Y)
N902: if not X in Y
N902: if not X.B in Y
"""
split_line = logical_line.split()
if (len(split_line) == 5 and split_line[0] == 'if' and
split_line[1] == 'not' and split_line[3] == 'in' and not
split_line[2].startswith('(')):
yield (logical_line.find('not'), "N902: Use the 'not in' "
"operator for collection membership evaluation")
current_file = "" current_file = ""
@ -513,7 +588,7 @@ def add_nova():
if not inspect.isfunction(function): if not inspect.isfunction(function):
continue continue
args = inspect.getargspec(function)[0] args = inspect.getargspec(function)[0]
if args and name.startswith("nova"): if args and name.startswith("ceilometer"):
exec("pep8.%s = %s" % (name, name)) exec("pep8.%s = %s" % (name, name))
@ -523,7 +598,7 @@ def once_git_check_commit_title():
nova HACKING recommends not referencing a bug or blueprint in first line, nova HACKING recommends not referencing a bug or blueprint in first line,
it should provide an accurate description of the change it should provide an accurate description of the change
N801 N801
N802 Title limited to 50 chars N802 Title limited to 72 chars
""" """
#Get title of most recent commit #Get title of most recent commit
@ -548,6 +623,8 @@ def once_git_check_commit_title():
"description of the change, not just a reference to a bug " "description of the change, not just a reference to a bug "
"or blueprint" % title.strip()) "or blueprint" % title.strip())
error = True error = True
# HACKING.rst recommends commit titles 50 chars or less, but enforces
# a 72 character limit
if len(title.decode('utf-8')) > 72: if len(title.decode('utf-8')) > 72:
print ("N802: git commit title ('%s') should be under 50 chars" print ("N802: git commit title ('%s') should be under 50 chars"
% title.strip()) % title.strip())

11
tox.ini
View File

@ -1,5 +1,5 @@
[tox] [tox]
envlist = py26,py27,py26-folsom,py27-folsom,pep8 envlist = py26,py27,py26-folsom,py27-folsom,pep8,hacking
[testenv] [testenv]
deps = -r{toxinidir}/tools/test-requires deps = -r{toxinidir}/tools/test-requires
@ -22,18 +22,13 @@ setenv=CEILOMETER_TEST_LIVE=1
commands = nosetests --no-path-adjustment --with-coverage --cover-erase --cover-package=ceilometer --cover-inclusive [] commands = nosetests --no-path-adjustment --with-coverage --cover-erase --cover-package=ceilometer --cover-inclusive []
[testenv:pep8] [testenv:pep8]
deps = -r{toxinidir}/tools/test-requires deps = pep8==1.3.3
-r{toxinidir}/tools/pip-requires
pep8==1.3.3
commands = commands =
pep8 --repeat --ignore=E125 --show-source ceilometer setup.py bin/ceilometer-agent-central bin/ceilometer-agent-compute bin/ceilometer-collector bin/ceilometer-api tests pep8 --repeat --ignore=E125 --show-source ceilometer setup.py bin/ceilometer-agent-central bin/ceilometer-agent-compute bin/ceilometer-collector bin/ceilometer-api tests
[testenv:hacking] [testenv:hacking]
deps = -r{toxinidir}/tools/test-requires deps = pep8==1.3.3
-r{toxinidir}/tools/pip-requires
pep8==1.3.3
commands = commands =
python tools/hacking.py --doctest
python tools/hacking.py --ignore=E12,E711,E721,E712,N403,N404 --show-source \ python tools/hacking.py --ignore=E12,E711,E721,E712,N403,N404 --show-source \
--exclude=.venv,.git,.tox,dist,doc,*openstack/common*,*lib/python*,*egg . --exclude=.venv,.git,.tox,dist,doc,*openstack/common*,*lib/python*,*egg .
python tools/hacking.py --ignore=E12,E711,E721,E712,N403,N404 --show-source \ python tools/hacking.py --ignore=E12,E711,E721,E712,N403,N404 --show-source \