Make HACKING compliant
Make all the source and tests HACKING compliant and enable tox -e hacking on by default. Relative directory checks not enabled (yet) Change-Id: I8803f67c49b4d16caebe76ae690092ae5c9a6dd3
This commit is contained in:
parent
b345cef2f3
commit
6a7633c62a
@ -90,12 +90,12 @@ pipeline_manager = pipeline.setup_pipeline(publish_manager)
|
||||
with pipeline_manager.publisher(context.get_admin_context(),
|
||||
cfg.CONF.counter_source) as p:
|
||||
p([counter.Counter(name=cfg.CONF.counter_name,
|
||||
type=cfg.CONF.counter_type,
|
||||
unit=cfg.CONF.counter_unit,
|
||||
volume=cfg.CONF.counter_volume,
|
||||
user_id=cfg.CONF.counter_user,
|
||||
project_id=cfg.CONF.counter_project,
|
||||
resource_id=cfg.CONF.counter_resource,
|
||||
timestamp=cfg.CONF.counter_timestamp,
|
||||
resource_metadata=cfg.CONF.counter_metadata
|
||||
and eval(cfg.CONF.counter_metadata))])
|
||||
type=cfg.CONF.counter_type,
|
||||
unit=cfg.CONF.counter_unit,
|
||||
volume=cfg.CONF.counter_volume,
|
||||
user_id=cfg.CONF.counter_user,
|
||||
project_id=cfg.CONF.counter_project,
|
||||
resource_id=cfg.CONF.counter_resource,
|
||||
timestamp=cfg.CONF.counter_timestamp,
|
||||
resource_metadata=cfg.CONF.counter_metadata
|
||||
and eval(cfg.CONF.counter_metadata))])
|
||||
|
@ -21,6 +21,7 @@ import itertools
|
||||
|
||||
from oslo.config import cfg
|
||||
from stevedore import dispatch
|
||||
|
||||
from ceilometer.openstack.common import context
|
||||
from ceilometer.openstack.common import log
|
||||
from ceilometer import pipeline
|
||||
@ -65,7 +66,7 @@ class AgentManager(object):
|
||||
|
||||
@abc.abstractmethod
|
||||
def create_polling_task(self):
|
||||
"""Create an empty polling task"""
|
||||
"""Create an empty polling task."""
|
||||
|
||||
def setup_polling_tasks(self):
|
||||
polling_tasks = {}
|
||||
|
@ -15,15 +15,17 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
"""Set up the ACL to acces the API server."""
|
||||
|
||||
import keystoneclient.middleware.auth_token as auth_token
|
||||
"""Access Control Lists (ACL's) control access the API server."""
|
||||
|
||||
from keystoneclient.middleware import auth_token
|
||||
from oslo.config import cfg
|
||||
from pecan import hooks
|
||||
from webob import exc
|
||||
|
||||
from ceilometer import policy
|
||||
|
||||
|
||||
OPT_GROUP_NAME = 'keystone_authtoken'
|
||||
|
||||
|
||||
|
@ -17,11 +17,10 @@
|
||||
# under the License.
|
||||
|
||||
from oslo.config import cfg
|
||||
from pecan import make_app
|
||||
from pecan import configuration
|
||||
import pecan
|
||||
|
||||
from ceilometer.api import config as api_config
|
||||
from ceilometer.api import acl
|
||||
from ceilometer.api import config as api_config
|
||||
from ceilometer.api import hooks
|
||||
from ceilometer.api import middleware
|
||||
|
||||
@ -29,7 +28,7 @@ from ceilometer.api import middleware
|
||||
def get_pecan_config():
|
||||
# Set up the pecan configuration
|
||||
filename = api_config.__file__.replace('.pyc', '.py')
|
||||
return configuration.conf_from_file(filename)
|
||||
return pecan.configuration.conf_from_file(filename)
|
||||
|
||||
|
||||
def setup_app(pecan_config=None, extra_hooks=None):
|
||||
@ -45,9 +44,9 @@ def setup_app(pecan_config=None, extra_hooks=None):
|
||||
if pecan_config.app.enable_acl:
|
||||
app_hooks.append(acl.AdminAuthHook())
|
||||
|
||||
configuration.set_config(dict(pecan_config), overwrite=True)
|
||||
pecan.configuration.set_config(dict(pecan_config), overwrite=True)
|
||||
|
||||
app = make_app(
|
||||
app = pecan.make_app(
|
||||
pecan_config.app.root,
|
||||
static_root=pecan_config.app.static_root,
|
||||
template_path=pecan_config.app.template_path,
|
||||
|
@ -16,16 +16,16 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from pecan import expose
|
||||
import pecan
|
||||
|
||||
from . import v2
|
||||
from ceilometer.api.controllers import v2
|
||||
|
||||
|
||||
class RootController(object):
|
||||
|
||||
v2 = v2.V2Controller()
|
||||
|
||||
@expose(generic=True, template='index.html')
|
||||
@pecan.expose(generic=True, template='index.html')
|
||||
def index(self):
|
||||
# FIXME: Return version information
|
||||
return dict()
|
||||
|
@ -32,25 +32,24 @@
|
||||
import datetime
|
||||
import inspect
|
||||
import pecan
|
||||
from pecan import request
|
||||
from pecan.rest import RestController
|
||||
from pecan import rest
|
||||
|
||||
import wsme
|
||||
import wsmeext.pecan as wsme_pecan
|
||||
from wsme.types import Base, text, Enum
|
||||
from wsme import types as wtypes
|
||||
|
||||
from ceilometer.openstack.common import log as logging
|
||||
from ceilometer.openstack.common import log
|
||||
from ceilometer.openstack.common import timeutils
|
||||
from ceilometer import storage
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
operation_kind = Enum(str, 'lt', 'le', 'eq', 'ne', 'ge', 'gt')
|
||||
operation_kind = wtypes.Enum(str, 'lt', 'le', 'eq', 'ne', 'ge', 'gt')
|
||||
|
||||
|
||||
class Query(Base):
|
||||
class Query(wtypes.Base):
|
||||
"""Query filter.
|
||||
"""
|
||||
|
||||
@ -62,7 +61,7 @@ class Query(Base):
|
||||
def set_op(self, value):
|
||||
self._op = value
|
||||
|
||||
field = text
|
||||
field = wtypes.text
|
||||
"The name of the field to test"
|
||||
|
||||
#op = wsme.wsattr(operation_kind, default='eq')
|
||||
@ -70,7 +69,7 @@ class Query(Base):
|
||||
op = wsme.wsproperty(operation_kind, get_op, set_op)
|
||||
"The comparison operator. Defaults to 'eq'."
|
||||
|
||||
value = text
|
||||
value = wtypes.text
|
||||
"The value to compare against the stored data"
|
||||
|
||||
def __repr__(self):
|
||||
@ -200,44 +199,44 @@ def _flatten_metadata(metadata):
|
||||
if type(v) not in set([list, dict, set]))
|
||||
|
||||
|
||||
class Sample(Base):
|
||||
class Sample(wtypes.Base):
|
||||
"""A single measurement for a given meter and resource.
|
||||
"""
|
||||
|
||||
source = text
|
||||
source = wtypes.text
|
||||
"An identity source ID"
|
||||
|
||||
counter_name = text
|
||||
counter_name = wtypes.text
|
||||
"The name of the meter"
|
||||
# FIXME(dhellmann): Make this meter_name?
|
||||
|
||||
counter_type = text
|
||||
counter_type = wtypes.text
|
||||
"The type of the meter (see :ref:`measurements`)"
|
||||
# FIXME(dhellmann): Make this meter_type?
|
||||
|
||||
counter_unit = text
|
||||
counter_unit = wtypes.text
|
||||
"The unit of measure for the value in counter_volume"
|
||||
# FIXME(dhellmann): Make this meter_unit?
|
||||
|
||||
counter_volume = float
|
||||
"The actual measured value"
|
||||
|
||||
user_id = text
|
||||
user_id = wtypes.text
|
||||
"The ID of the user who last triggered an update to the resource"
|
||||
|
||||
project_id = text
|
||||
project_id = wtypes.text
|
||||
"The ID of the project or tenant that owns the resource"
|
||||
|
||||
resource_id = text
|
||||
resource_id = wtypes.text
|
||||
"The ID of the :class:`Resource` for which the measurements are taken"
|
||||
|
||||
timestamp = datetime.datetime
|
||||
"UTC date and time when the measurement was made"
|
||||
|
||||
resource_metadata = {text: text}
|
||||
resource_metadata = {wtypes.text: wtypes.text}
|
||||
"Arbitrary metadata associated with the resource"
|
||||
|
||||
message_id = text
|
||||
message_id = wtypes.text
|
||||
"A unique identifier for the sample"
|
||||
|
||||
def __init__(self, counter_volume=None, resource_metadata={}, **kwds):
|
||||
@ -265,7 +264,7 @@ class Sample(Base):
|
||||
)
|
||||
|
||||
|
||||
class Statistics(Base):
|
||||
class Statistics(wtypes.Base):
|
||||
"""Computed statistics for a query.
|
||||
"""
|
||||
|
||||
@ -353,7 +352,7 @@ class Statistics(Base):
|
||||
)
|
||||
|
||||
|
||||
class MeterController(RestController):
|
||||
class MeterController(rest.RestController):
|
||||
"""Manages operations on a single meter.
|
||||
"""
|
||||
_custom_actions = {
|
||||
@ -361,7 +360,7 @@ class MeterController(RestController):
|
||||
}
|
||||
|
||||
def __init__(self, meter_id):
|
||||
request.context['meter_id'] = meter_id
|
||||
pecan.request.context['meter_id'] = meter_id
|
||||
self._id = meter_id
|
||||
|
||||
@wsme_pecan.wsexpose([Sample], [Query])
|
||||
@ -374,7 +373,7 @@ class MeterController(RestController):
|
||||
kwargs['meter'] = self._id
|
||||
f = storage.EventFilter(**kwargs)
|
||||
return [Sample(**e)
|
||||
for e in request.storage_conn.get_raw_events(f)
|
||||
for e in pecan.request.storage_conn.get_raw_events(f)
|
||||
]
|
||||
|
||||
@wsme_pecan.wsexpose([Statistics], [Query], int)
|
||||
@ -389,7 +388,7 @@ class MeterController(RestController):
|
||||
kwargs = _query_to_kwargs(q, storage.EventFilter.__init__)
|
||||
kwargs['meter'] = self._id
|
||||
f = storage.EventFilter(**kwargs)
|
||||
computed = request.storage_conn.get_meter_statistics(f, period)
|
||||
computed = pecan.request.storage_conn.get_meter_statistics(f, period)
|
||||
# Find the original timestamp in the query to use for clamping
|
||||
# the duration returned in the statistics.
|
||||
start = end = None
|
||||
@ -405,27 +404,27 @@ class MeterController(RestController):
|
||||
for c in computed]
|
||||
|
||||
|
||||
class Meter(Base):
|
||||
class Meter(wtypes.Base):
|
||||
"""One category of measurements.
|
||||
"""
|
||||
|
||||
name = text
|
||||
name = wtypes.text
|
||||
"The unique name for the meter"
|
||||
|
||||
# FIXME(dhellmann): Make this an enum?
|
||||
type = text
|
||||
type = wtypes.text
|
||||
"The meter type (see :ref:`measurements`)"
|
||||
|
||||
unit = text
|
||||
unit = wtypes.text
|
||||
"The unit of measure"
|
||||
|
||||
resource_id = text
|
||||
resource_id = wtypes.text
|
||||
"The ID of the :class:`Resource` for which the measurements are taken"
|
||||
|
||||
project_id = text
|
||||
project_id = wtypes.text
|
||||
"The ID of the project or tenant that owns the resource"
|
||||
|
||||
user_id = text
|
||||
user_id = wtypes.text
|
||||
"The ID of the user who last triggered an update to the resource"
|
||||
|
||||
@classmethod
|
||||
@ -439,7 +438,7 @@ class Meter(Base):
|
||||
)
|
||||
|
||||
|
||||
class MetersController(RestController):
|
||||
class MetersController(rest.RestController):
|
||||
"""Works on meters."""
|
||||
|
||||
@pecan.expose()
|
||||
@ -452,28 +451,28 @@ class MetersController(RestController):
|
||||
|
||||
:param q: Filter rules for the meters to be returned.
|
||||
"""
|
||||
kwargs = _query_to_kwargs(q, request.storage_conn.get_meters)
|
||||
kwargs = _query_to_kwargs(q, pecan.request.storage_conn.get_meters)
|
||||
return [Meter(**m)
|
||||
for m in request.storage_conn.get_meters(**kwargs)]
|
||||
for m in pecan.request.storage_conn.get_meters(**kwargs)]
|
||||
|
||||
|
||||
class Resource(Base):
|
||||
class Resource(wtypes.Base):
|
||||
"""An externally defined object for which samples have been received.
|
||||
"""
|
||||
|
||||
resource_id = text
|
||||
resource_id = wtypes.text
|
||||
"The unique identifier for the resource"
|
||||
|
||||
project_id = text
|
||||
project_id = wtypes.text
|
||||
"The ID of the owning project or tenant"
|
||||
|
||||
user_id = text
|
||||
user_id = wtypes.text
|
||||
"The ID of the user who created the resource or updated it last"
|
||||
|
||||
timestamp = datetime.datetime
|
||||
"UTC date and time of the last update to any meter for the resource"
|
||||
|
||||
metadata = {text: text}
|
||||
metadata = {wtypes.text: wtypes.text}
|
||||
"Arbitrary metadata associated with the resource"
|
||||
|
||||
def __init__(self, metadata={}, **kwds):
|
||||
@ -491,7 +490,7 @@ class Resource(Base):
|
||||
)
|
||||
|
||||
|
||||
class ResourcesController(RestController):
|
||||
class ResourcesController(rest.RestController):
|
||||
"""Works on resources."""
|
||||
|
||||
@wsme_pecan.wsexpose(Resource, unicode)
|
||||
@ -500,7 +499,8 @@ class ResourcesController(RestController):
|
||||
|
||||
:param resource_id: The UUID of the resource.
|
||||
"""
|
||||
r = list(request.storage_conn.get_resources(resource=resource_id))[0]
|
||||
r = list(pecan.request.storage_conn.get_resources(
|
||||
resource=resource_id))[0]
|
||||
return Resource(**r)
|
||||
|
||||
@wsme_pecan.wsexpose([Resource], [Query])
|
||||
@ -509,10 +509,10 @@ class ResourcesController(RestController):
|
||||
|
||||
:param q: Filter rules for the resources to be returned.
|
||||
"""
|
||||
kwargs = _query_to_kwargs(q, request.storage_conn.get_resources)
|
||||
kwargs = _query_to_kwargs(q, pecan.request.storage_conn.get_resources)
|
||||
resources = [
|
||||
Resource(**r)
|
||||
for r in request.storage_conn.get_resources(**kwargs)]
|
||||
for r in pecan.request.storage_conn.get_resources(**kwargs)]
|
||||
return resources
|
||||
|
||||
|
||||
|
@ -15,16 +15,15 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
"""Set up the API server application instance
|
||||
"""
|
||||
"""Set up the API server application instance."""
|
||||
|
||||
import flask
|
||||
from oslo.config import cfg
|
||||
|
||||
from ceilometer.api import acl
|
||||
from ceilometer.api.v1 import blueprint as v1_blueprint
|
||||
from ceilometer.openstack.common import jsonutils
|
||||
from ceilometer import storage
|
||||
from ceilometer.api.v1 import blueprint as v1_blueprint
|
||||
from ceilometer.api import acl
|
||||
|
||||
|
||||
storage.register_opts(cfg.CONF)
|
||||
|
@ -21,12 +21,8 @@ from stevedore import dispatch
|
||||
|
||||
from ceilometer.collector import meter as meter_api
|
||||
from ceilometer import extension_manager
|
||||
from ceilometer import pipeline
|
||||
from ceilometer import service
|
||||
from ceilometer import storage
|
||||
from ceilometer.openstack.common import context
|
||||
from ceilometer.openstack.common import log
|
||||
from ceilometer.openstack.common import timeutils
|
||||
from ceilometer.openstack.common.rpc import dispatcher as rpc_dispatcher
|
||||
|
||||
# Import rpc_notifier to register `notification_topics` flag so that
|
||||
@ -34,6 +30,11 @@ from ceilometer.openstack.common.rpc import dispatcher as rpc_dispatcher
|
||||
# FIXME(dhellmann): Use option importing feature of oslo.config instead.
|
||||
import ceilometer.openstack.common.notifier.rpc_notifier
|
||||
|
||||
from ceilometer.openstack.common import timeutils
|
||||
from ceilometer import pipeline
|
||||
from ceilometer import service
|
||||
from ceilometer import storage
|
||||
|
||||
OPTS = [
|
||||
cfg.ListOpt('disabled_notification_listeners',
|
||||
default=[],
|
||||
@ -43,7 +44,6 @@ OPTS = [
|
||||
|
||||
cfg.CONF.register_opts(OPTS)
|
||||
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
|
@ -19,9 +19,9 @@
|
||||
from oslo.config import cfg
|
||||
|
||||
from ceilometer import agent
|
||||
from ceilometer.compute.virt import inspector as virt_inspector
|
||||
from ceilometer import extension_manager
|
||||
from ceilometer import nova_client
|
||||
from ceilometer.compute.virt import inspector as virt_inspector
|
||||
from ceilometer.openstack.common import log
|
||||
|
||||
|
||||
@ -67,7 +67,7 @@ class AgentManager(agent.AgentManager):
|
||||
return PollingTask(self)
|
||||
|
||||
def setup_notifier_task(self):
|
||||
"""For nova notifier usage"""
|
||||
"""For nova notifier usage."""
|
||||
task = PollingTask(self)
|
||||
for pollster in self.pollster_manager.extensions:
|
||||
task.add(
|
||||
|
@ -20,9 +20,9 @@
|
||||
|
||||
from oslo.config import cfg
|
||||
|
||||
from ceilometer.compute import instance
|
||||
from ceilometer import counter
|
||||
from ceilometer import plugin
|
||||
from ceilometer.compute import instance
|
||||
|
||||
|
||||
OPTS = [
|
||||
|
@ -21,14 +21,12 @@ __all__ = [
|
||||
'initialize_manager',
|
||||
]
|
||||
|
||||
from nova import db as instance_info_source
|
||||
from oslo.config import cfg
|
||||
|
||||
from ceilometer.compute import manager as compute_manager
|
||||
from ceilometer.openstack.common import log as logging
|
||||
|
||||
from ceilometer.compute.manager import AgentManager
|
||||
|
||||
from nova import db as instance_info_source
|
||||
|
||||
# This module runs inside the nova compute
|
||||
# agent, which only configures the "nova" logger.
|
||||
# We use a fake logger name in that namespace
|
||||
@ -44,7 +42,7 @@ def initialize_manager(agent_manager=None):
|
||||
if not agent_manager:
|
||||
cfg.CONF(args=[], project='ceilometer', prog='ceilometer-agent')
|
||||
# Instantiate a manager
|
||||
_agent_manager = AgentManager()
|
||||
_agent_manager = compute_manager.AgentManager()
|
||||
else:
|
||||
_agent_manager = agent_manager
|
||||
_agent_manager.setup_notifier_task()
|
||||
|
@ -21,9 +21,9 @@
|
||||
import copy
|
||||
import datetime
|
||||
|
||||
from ceilometer import counter
|
||||
from ceilometer.compute import plugin
|
||||
from ceilometer.compute import instance as compute_instance
|
||||
from ceilometer.compute import plugin
|
||||
from ceilometer import counter
|
||||
from ceilometer.openstack.common import log
|
||||
from ceilometer.openstack.common import timeutils
|
||||
|
||||
@ -31,7 +31,7 @@ LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
def _instance_name(instance):
|
||||
"""Shortcut to get instance name"""
|
||||
"""Shortcut to get instance name."""
|
||||
return getattr(instance, 'OS-EXT-SRV-ATTR:instance_name', None)
|
||||
|
||||
|
||||
|
@ -16,7 +16,7 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
"""Inspector abstraction for read-only access to hypervisors"""
|
||||
"""Inspector abstraction for read-only access to hypervisors."""
|
||||
|
||||
import collections
|
||||
|
||||
|
@ -15,7 +15,7 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
"""Implementation of Inspector abstraction for libvirt"""
|
||||
"""Implementation of Inspector abstraction for libvirt."""
|
||||
|
||||
from lxml import etree
|
||||
from oslo.config import cfg
|
||||
|
@ -26,6 +26,7 @@ import collections
|
||||
|
||||
from oslo.config import cfg
|
||||
|
||||
|
||||
OPTS = [
|
||||
cfg.StrOpt('counter_source',
|
||||
default='openstack',
|
||||
|
@ -16,14 +16,13 @@
|
||||
|
||||
import datetime
|
||||
|
||||
from keystoneclient import exceptions
|
||||
import requests
|
||||
|
||||
from ceilometer import counter
|
||||
from ceilometer.central import plugin
|
||||
from ceilometer import counter
|
||||
from ceilometer.openstack.common import log
|
||||
|
||||
from keystoneclient import exceptions
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
|
@ -21,12 +21,11 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import itertools
|
||||
|
||||
import glanceclient
|
||||
|
||||
from ceilometer import plugin
|
||||
from ceilometer import counter
|
||||
from ceilometer.openstack.common import timeutils
|
||||
from ceilometer import plugin
|
||||
|
||||
|
||||
class _Base(plugin.PollsterBase):
|
||||
|
@ -139,7 +139,7 @@ class ImageSize(ImageCRUDBase):
|
||||
|
||||
|
||||
class ImageDownload(ImageBase):
|
||||
""" Emit image_download counter when an image is downloaded. """
|
||||
"""Emit image_download counter when an image is downloaded."""
|
||||
|
||||
metadata_keys = ['destination_ip', 'owner_id']
|
||||
|
||||
@ -167,7 +167,7 @@ class ImageDownload(ImageBase):
|
||||
|
||||
|
||||
class ImageServe(ImageBase):
|
||||
""" Emit image_serve counter when an image is served out. """
|
||||
"""Emit image_serve counter when an image is served out."""
|
||||
|
||||
metadata_keys = ['destination_ip', 'receiver_user_id',
|
||||
'receiver_tenant_id']
|
||||
|
@ -19,9 +19,9 @@
|
||||
from ceilometer.openstack.common import log
|
||||
from ceilometer.openstack.common import timeutils
|
||||
|
||||
from ceilometer.central import plugin
|
||||
from ceilometer import counter
|
||||
from ceilometer import nova_client
|
||||
from ceilometer.central import plugin
|
||||
|
||||
|
||||
class FloatingIPPollster(plugin.CentralPollster):
|
||||
|
@ -23,9 +23,8 @@
|
||||
from oslo.config import cfg
|
||||
|
||||
from ceilometer import counter
|
||||
from ceilometer.openstack.common import log
|
||||
from ceilometer import plugin
|
||||
from ceilometer.openstack.common import log as logging
|
||||
|
||||
|
||||
OPTS = [
|
||||
cfg.StrOpt('quantum_control_exchange',
|
||||
@ -33,10 +32,9 @@ OPTS = [
|
||||
help="Exchange name for Quantum notifications"),
|
||||
]
|
||||
|
||||
|
||||
cfg.CONF.register_opts(OPTS)
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class NetworkNotificationBase(plugin.NotificationBase):
|
||||
|
@ -14,7 +14,7 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from functools import wraps
|
||||
import functools
|
||||
|
||||
from novaclient.v1_1 import client as nova_client
|
||||
from oslo.config import cfg
|
||||
@ -27,7 +27,7 @@ LOG = log.getLogger(__name__)
|
||||
|
||||
def logged(func):
|
||||
|
||||
@wraps(func)
|
||||
@functools.wraps(func)
|
||||
def with_logging(*args, **kwargs):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
@ -41,7 +41,7 @@ def logged(func):
|
||||
class Client(object):
|
||||
|
||||
def __init__(self):
|
||||
"""Returns nova client"""
|
||||
"""Returns a nova Client object."""
|
||||
conf = cfg.CONF
|
||||
tenant = conf.os_tenant_id and conf.os_tenant_id or conf.os_tenant_name
|
||||
self.nova_client = nova_client.Client(username=cfg.CONF.os_username,
|
||||
@ -62,7 +62,7 @@ class Client(object):
|
||||
|
||||
@logged
|
||||
def instance_get_all_by_host(self, hostname):
|
||||
"""Returns list of instances on particular host"""
|
||||
"""Returns list of instances on particular host."""
|
||||
search_opts = {'host': hostname, 'all_tenants': True}
|
||||
return self._with_flavor(self.nova_client.servers.list(
|
||||
detailed=True,
|
||||
@ -70,5 +70,5 @@ class Client(object):
|
||||
|
||||
@logged
|
||||
def floating_ip_get_all(self):
|
||||
"""Returns all floating ips"""
|
||||
"""Returns all floating ips."""
|
||||
return self.nova_client.floating_ips.list()
|
||||
|
@ -25,10 +25,11 @@ import abc
|
||||
from oslo.config import cfg
|
||||
from swiftclient import client as swift
|
||||
|
||||
from ceilometer import plugin
|
||||
from ceilometer import counter
|
||||
from ceilometer.openstack.common import timeutils
|
||||
from ceilometer.openstack.common import log
|
||||
from ceilometer.openstack.common import timeutils
|
||||
from ceilometer import plugin
|
||||
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
@ -22,6 +22,23 @@ from __future__ import absolute_import
|
||||
|
||||
from oslo.config import cfg
|
||||
from stevedore import dispatch
|
||||
from swift.common.utils import split_path
|
||||
import webob
|
||||
|
||||
REQUEST = webob
|
||||
try:
|
||||
# Swift >= 1.7.5
|
||||
import swift.common.swob
|
||||
REQUEST = swift.common.swob
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
# Swift > 1.7.5 ... module exists but doesn't contain class.
|
||||
from swift.common.utils import InputProxy
|
||||
except ImportError:
|
||||
# Swift <= 1.7.5 ... module exists and has class.
|
||||
from swift.common.middleware.proxy_logging import InputProxy
|
||||
|
||||
from ceilometer import counter
|
||||
from ceilometer.openstack.common import context
|
||||
@ -29,21 +46,6 @@ from ceilometer.openstack.common import timeutils
|
||||
from ceilometer import pipeline
|
||||
from ceilometer import service
|
||||
|
||||
from swift.common.utils import split_path
|
||||
|
||||
try:
|
||||
# Swift >= 1.7.5
|
||||
from swift.common.swob import Request
|
||||
except ImportError:
|
||||
from webob import Request
|
||||
|
||||
try:
|
||||
# Swift > 1.7.5
|
||||
from swift.common.utils import InputProxy
|
||||
except ImportError:
|
||||
# Swift <= 1.7.5
|
||||
from swift.common.middleware.proxy_logging import InputProxy
|
||||
|
||||
|
||||
class CeilometerMiddleware(object):
|
||||
"""
|
||||
@ -92,7 +94,7 @@ class CeilometerMiddleware(object):
|
||||
return iter_response(iterable)
|
||||
|
||||
def publish_counter(self, env, bytes_received, bytes_sent):
|
||||
req = Request(env)
|
||||
req = REQUEST.Request(env)
|
||||
version, account, container, obj = split_path(req.path, 1, 4, True)
|
||||
now = timeutils.utcnow().isoformat()
|
||||
with pipeline.PublishContext(
|
||||
|
@ -307,7 +307,7 @@ class PipelineManager(object):
|
||||
"""
|
||||
|
||||
def __init__(self, cfg, publisher_manager):
|
||||
"""Create the pipeline manager"""
|
||||
"""Create the pipeline manager."""
|
||||
self._setup_pipelines(cfg, publisher_manager)
|
||||
|
||||
def _setup_pipelines(self, cfg, publisher_manager):
|
||||
|
@ -19,10 +19,11 @@
|
||||
"""
|
||||
|
||||
import abc
|
||||
from collections import namedtuple
|
||||
import collections
|
||||
|
||||
|
||||
ExchangeTopics = namedtuple('ExchangeTopics', ['exchange', 'topics'])
|
||||
ExchangeTopics = collections.namedtuple('ExchangeTopics',
|
||||
['exchange', 'topics'])
|
||||
|
||||
|
||||
class PluginBase(object):
|
||||
|
@ -15,14 +15,14 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Policy Engine For Ceilometer"""
|
||||
"""Policy Engine For Ceilometer."""
|
||||
|
||||
import os
|
||||
|
||||
from oslo.config import cfg
|
||||
|
||||
from ceilometer import utils
|
||||
from ceilometer.openstack.common import policy
|
||||
from ceilometer import utils
|
||||
|
||||
|
||||
OPTS = [
|
||||
|
@ -22,9 +22,9 @@ import socket
|
||||
|
||||
from oslo.config import cfg
|
||||
|
||||
from ceilometer.openstack.common import rpc
|
||||
from ceilometer.openstack.common import context
|
||||
from ceilometer.openstack.common import log
|
||||
from ceilometer.openstack.common import rpc
|
||||
from ceilometer.openstack.common.rpc import service as rpc_service
|
||||
|
||||
|
||||
|
@ -18,8 +18,9 @@
|
||||
"""Storage backend management
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from urlparse import urlparse
|
||||
|
||||
import datetime
|
||||
import urlparse
|
||||
|
||||
from oslo.config import cfg
|
||||
from stevedore import driver
|
||||
@ -27,6 +28,7 @@ from stevedore import driver
|
||||
from ceilometer.openstack.common import log
|
||||
from ceilometer.openstack.common import timeutils
|
||||
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
STORAGE_ENGINE_NAMESPACE = 'ceilometer.storage'
|
||||
@ -43,16 +45,14 @@ cfg.CONF.register_opts(STORAGE_OPTS)
|
||||
|
||||
|
||||
def register_opts(conf):
|
||||
"""Register any options for the storage system.
|
||||
"""
|
||||
"""Register any options for the storage system."""
|
||||
p = get_engine(conf)
|
||||
p.register_opts(conf)
|
||||
|
||||
|
||||
def get_engine(conf):
|
||||
"""Load the configured engine and return an instance.
|
||||
"""
|
||||
engine_name = urlparse(conf.database_connection).scheme
|
||||
"""Load the configured engine and return an instance."""
|
||||
engine_name = urlparse.urlparse(conf.database_connection).scheme
|
||||
LOG.debug('looking for %r driver in %r',
|
||||
engine_name, STORAGE_ENGINE_NAMESPACE)
|
||||
mgr = driver.DriverManager(STORAGE_ENGINE_NAMESPACE,
|
||||
@ -62,8 +62,7 @@ def get_engine(conf):
|
||||
|
||||
|
||||
def get_connection(conf):
|
||||
"""Return an open connection to the database.
|
||||
"""
|
||||
"""Return an open connection to the database."""
|
||||
engine = get_engine(conf)
|
||||
engine.register_opts(conf)
|
||||
db = engine.get_connection(conf)
|
||||
@ -94,9 +93,9 @@ class EventFilter(object):
|
||||
self.metaquery = metaquery
|
||||
|
||||
def _sanitize_timestamp(self, timestamp):
|
||||
"""Return a naive utc datetime object"""
|
||||
"""Return a naive utc datetime object."""
|
||||
if not timestamp:
|
||||
return timestamp
|
||||
if not isinstance(timestamp, datetime):
|
||||
if not isinstance(timestamp, datetime.datetime):
|
||||
timestamp = timeutils.parse_isotime(timestamp)
|
||||
return timeutils.normalize_time(timestamp)
|
||||
|
@ -26,31 +26,27 @@ LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class StorageEngine(object):
|
||||
"""Base class for storage engines.
|
||||
"""
|
||||
"""Base class for storage engines."""
|
||||
|
||||
__metaclass__ = abc.ABCMeta
|
||||
|
||||
@abc.abstractmethod
|
||||
def register_opts(self, conf):
|
||||
"""Register any configuration options used by this engine.
|
||||
"""
|
||||
"""Register any configuration options used by this engine."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_connection(self, conf):
|
||||
"""Return a Connection instance based on the configuration settings.
|
||||
"""
|
||||
"""Return a Connection instance based on the configuration settings."""
|
||||
|
||||
|
||||
class Connection(object):
|
||||
"""Base class for storage system connections.
|
||||
"""
|
||||
"""Base class for storage system connections."""
|
||||
|
||||
__metaclass__ = abc.ABCMeta
|
||||
|
||||
@abc.abstractmethod
|
||||
def __init__(self, conf):
|
||||
"""Constructor"""
|
||||
"""Constructor."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def upgrade(self, version=None):
|
||||
|
@ -20,15 +20,15 @@
|
||||
|
||||
import copy
|
||||
import datetime
|
||||
import re
|
||||
import urlparse
|
||||
|
||||
import bson.code
|
||||
import pymongo
|
||||
|
||||
from ceilometer.openstack.common import log
|
||||
from ceilometer.storage import base
|
||||
|
||||
import bson.code
|
||||
import pymongo
|
||||
import re
|
||||
|
||||
from urlparse import urlparse
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
@ -283,7 +283,7 @@ class Connection(base.Connection):
|
||||
|
||||
def _parse_connection_url(self, url):
|
||||
opts = {}
|
||||
result = urlparse(url)
|
||||
result = urlparse.urlparse(url)
|
||||
opts['dbtype'] = result.scheme
|
||||
opts['dbname'] = result.path.replace('/', '')
|
||||
netloc_match = re.match(r'(?:(\w+:\w+)@)?(.*)', result.netloc)
|
||||
|
@ -13,29 +13,30 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
"""SQLAlchemy storage backend
|
||||
"""
|
||||
|
||||
"""SQLAlchemy storage backend."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import copy
|
||||
import datetime
|
||||
import math
|
||||
|
||||
from sqlalchemy import func
|
||||
|
||||
from ceilometer.openstack.common import log
|
||||
from ceilometer.openstack.common import timeutils
|
||||
from ceilometer.storage import base
|
||||
from ceilometer.storage.sqlalchemy import migration
|
||||
from ceilometer.storage.sqlalchemy.models import Meter, Project, Resource
|
||||
from ceilometer.storage.sqlalchemy.models import Source, User, Base
|
||||
import ceilometer.storage.sqlalchemy.session as sqlalchemy_session
|
||||
from ceilometer.storage.sqlalchemy import migration
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class SQLAlchemyStorage(base.StorageEngine):
|
||||
"""Put the data into a SQLAlchemy database
|
||||
"""Put the data into a SQLAlchemy database.
|
||||
|
||||
Tables::
|
||||
|
||||
@ -82,8 +83,7 @@ class SQLAlchemyStorage(base.StorageEngine):
|
||||
OPTIONS = []
|
||||
|
||||
def register_opts(self, conf):
|
||||
"""Register any configuration options used by this engine.
|
||||
"""
|
||||
"""Register any configuration options used by this engine."""
|
||||
conf.register_opts(self.OPTIONS)
|
||||
|
||||
@staticmethod
|
||||
@ -127,8 +127,7 @@ def make_query_from_filter(query, event_filter, require_meter=True):
|
||||
|
||||
|
||||
class Connection(base.Connection):
|
||||
"""SqlAlchemy connection.
|
||||
"""
|
||||
"""SqlAlchemy connection."""
|
||||
|
||||
def __init__(self, conf):
|
||||
LOG.info('connecting to %s', conf.database_connection)
|
||||
@ -144,8 +143,7 @@ class Connection(base.Connection):
|
||||
engine.execute(table.delete())
|
||||
|
||||
def _get_connection(self, conf):
|
||||
"""Return a connection to the database.
|
||||
"""
|
||||
"""Return a connection to the database."""
|
||||
return sqlalchemy_session.get_session()
|
||||
|
||||
def record_metering_data(self, data):
|
||||
@ -353,7 +351,7 @@ class Connection(base.Connection):
|
||||
yield e
|
||||
|
||||
def _make_volume_query(self, event_filter, counter_volume_func):
|
||||
"""Returns complex Meter counter_volume query for max and sum"""
|
||||
"""Returns complex Meter counter_volume query for max and sum."""
|
||||
subq = model_query(Meter.id, session=self.session)
|
||||
subq = make_query_from_filter(subq, event_filter, require_meter=False)
|
||||
subq = subq.subquery()
|
||||
@ -464,7 +462,7 @@ class Connection(base.Connection):
|
||||
|
||||
|
||||
def model_query(*args, **kwargs):
|
||||
"""Query helper
|
||||
"""Query helper.
|
||||
|
||||
:param session: if present, the session to use
|
||||
"""
|
||||
|
@ -17,16 +17,16 @@
|
||||
import distutils.version as dist_version
|
||||
import os
|
||||
|
||||
from ceilometer.storage.sqlalchemy.session import get_engine
|
||||
from ceilometer.openstack.common import log as logging
|
||||
|
||||
|
||||
import migrate
|
||||
from migrate.versioning import util as migrate_util
|
||||
import sqlalchemy
|
||||
|
||||
from ceilometer.openstack.common import log
|
||||
from ceilometer.storage.sqlalchemy import session
|
||||
|
||||
|
||||
INIT_VERSION = 1
|
||||
LOG = logging.getLogger(__name__)
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
@migrate_util.decorator
|
||||
@ -78,20 +78,20 @@ def db_sync(engine, version=None):
|
||||
def db_version():
|
||||
repository = _find_migrate_repo()
|
||||
try:
|
||||
return versioning_api.db_version(get_engine(), repository)
|
||||
return versioning_api.db_version(session.get_engine(), repository)
|
||||
except versioning_exceptions.DatabaseNotControlledError:
|
||||
meta = sqlalchemy.MetaData()
|
||||
engine = get_engine()
|
||||
engine = session.get_engine()
|
||||
meta.reflect(bind=engine)
|
||||
tables = meta.tables
|
||||
if len(tables) == 0:
|
||||
db_version_control(0)
|
||||
return versioning_api.db_version(get_engine(), repository)
|
||||
return versioning_api.db_version(session.get_engine(), repository)
|
||||
|
||||
|
||||
def db_version_control(version=None):
|
||||
repository = _find_migrate_repo()
|
||||
versioning_api.version_control(get_engine(), repository, version)
|
||||
versioning_api.version_control(session.get_engine(), repository, version)
|
||||
return version
|
||||
|
||||
|
||||
|
@ -15,16 +15,15 @@
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
SQLAlchemy models for nova data.
|
||||
SQLAlchemy models for Ceilometer data.
|
||||
"""
|
||||
|
||||
import json
|
||||
from urlparse import urlparse
|
||||
import urlparse
|
||||
|
||||
from oslo.config import cfg
|
||||
from sqlalchemy import Column, Integer, String, Table
|
||||
from sqlalchemy import Column, Integer, String, Table, ForeignKey, DateTime
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy import ForeignKey, DateTime
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.types import TypeDecorator, VARCHAR
|
||||
|
||||
@ -40,7 +39,7 @@ cfg.CONF.register_opts(sql_opts)
|
||||
|
||||
|
||||
def table_args():
|
||||
engine_name = urlparse(cfg.CONF.database_connection).scheme
|
||||
engine_name = urlparse.urlparse(cfg.CONF.database_connection).scheme
|
||||
if engine_name == 'mysql':
|
||||
return {'mysql_engine': cfg.CONF.mysql_engine,
|
||||
'mysql_charset': "utf8"}
|
||||
@ -97,7 +96,7 @@ class Source(Base):
|
||||
|
||||
|
||||
class Meter(Base):
|
||||
"""Metering data"""
|
||||
"""Metering data."""
|
||||
|
||||
__tablename__ = 'meter'
|
||||
id = Column(Integer, primary_key=True)
|
||||
|
@ -23,13 +23,14 @@ import time
|
||||
|
||||
from oslo.config import cfg
|
||||
import sqlalchemy
|
||||
from sqlalchemy.exc import DisconnectionError, OperationalError
|
||||
import sqlalchemy.exc as exc
|
||||
import sqlalchemy.orm
|
||||
from sqlalchemy.pool import NullPool, StaticPool
|
||||
import sqlalchemy.pool as pool
|
||||
|
||||
import ceilometer.openstack.common.log as logging
|
||||
from ceilometer.openstack.common import log
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
_MAKER = None
|
||||
_ENGINE = None
|
||||
@ -73,7 +74,7 @@ def get_session(autocommit=True, expire_on_commit=False, autoflush=True):
|
||||
|
||||
|
||||
def synchronous_switch_listener(dbapi_conn, connection_rec):
|
||||
"""Switch sqlite connections to non-synchronous mode"""
|
||||
"""Switch sqlite connections to non-synchronous mode."""
|
||||
dbapi_conn.execute("PRAGMA synchronous = OFF")
|
||||
|
||||
|
||||
@ -99,7 +100,7 @@ def ping_listener(dbapi_conn, connection_rec, connection_proxy):
|
||||
except dbapi_conn.OperationalError, ex:
|
||||
if ex.args[0] in (2006, 2013, 2014, 2045, 2055):
|
||||
LOG.warn('Got mysql server has gone away: %s', ex)
|
||||
raise DisconnectionError("Database server went away")
|
||||
raise exc.DisconnectionError("Database server went away")
|
||||
else:
|
||||
raise
|
||||
|
||||
@ -135,10 +136,10 @@ def get_engine():
|
||||
engine_args['echo'] = True
|
||||
|
||||
if "sqlite" in connection_dict.drivername:
|
||||
engine_args["poolclass"] = NullPool
|
||||
engine_args["poolclass"] = pool.NullPool
|
||||
|
||||
if cfg.CONF.database_connection == "sqlite://":
|
||||
engine_args["poolclass"] = StaticPool
|
||||
engine_args["poolclass"] = pool.StaticPool
|
||||
engine_args["connect_args"] = {'check_same_thread': False}
|
||||
|
||||
_ENGINE = sqlalchemy.create_engine(cfg.CONF.database_connection,
|
||||
@ -160,7 +161,7 @@ def get_engine():
|
||||
|
||||
try:
|
||||
_ENGINE.connect()
|
||||
except OperationalError, e:
|
||||
except exc.OperationalError, e:
|
||||
if not is_db_connection_error(e.args[0]):
|
||||
raise
|
||||
|
||||
@ -176,7 +177,7 @@ def get_engine():
|
||||
try:
|
||||
_ENGINE.connect()
|
||||
break
|
||||
except OperationalError, e:
|
||||
except exc.OperationalError, e:
|
||||
if (remaining != 'infinite' and remaining == 0) \
|
||||
or not is_db_connection_error(e.args[0]):
|
||||
raise
|
||||
|
@ -24,14 +24,14 @@ import urllib
|
||||
|
||||
import flask
|
||||
from oslo.config import cfg
|
||||
from pecan import set_config
|
||||
from pecan.testing import load_test_app
|
||||
import pecan
|
||||
import pecan.testing
|
||||
|
||||
from ceilometer import storage
|
||||
from ceilometer.api.v1 import app as v1_app
|
||||
from ceilometer.api.v1 import blueprint as v1_blueprint
|
||||
from ceilometer.tests import db as db_test_base
|
||||
from ceilometer import storage
|
||||
from ceilometer.tests import base
|
||||
from ceilometer.tests import db as db_test_base
|
||||
|
||||
|
||||
class TestBase(db_test_base.TestBase):
|
||||
@ -123,11 +123,11 @@ class FunctionalTest(db_test_base.TestBase):
|
||||
},
|
||||
}
|
||||
|
||||
return load_test_app(self.config)
|
||||
return pecan.testing.load_test_app(self.config)
|
||||
|
||||
def tearDown(self):
|
||||
super(FunctionalTest, self).tearDown()
|
||||
set_config({}, overwrite=True)
|
||||
pecan.set_config({}, overwrite=True)
|
||||
|
||||
def get_json(self, path, expect_errors=False, headers=None,
|
||||
q=[], **params):
|
||||
|
@ -17,8 +17,8 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
"""Base classes for API tests.
|
||||
"""
|
||||
|
||||
"""Base classes for API tests."""
|
||||
|
||||
from ming import mim
|
||||
from nose.plugins import skip
|
||||
@ -28,6 +28,10 @@ from ceilometer import storage
|
||||
from ceilometer.tests import base as test_base
|
||||
|
||||
|
||||
class BaseException(Exception):
|
||||
"""A base exception for avoiding false positives."""
|
||||
|
||||
|
||||
class TestBase(test_base.TestCase):
|
||||
|
||||
# Default tests use test:// (MIM)
|
||||
@ -49,6 +53,6 @@ def require_map_reduce(conn):
|
||||
# skip these tests unless we aren't using mim.
|
||||
try:
|
||||
import spidermonkey
|
||||
except:
|
||||
except BaseException:
|
||||
if isinstance(conn.conn, mim.Connection):
|
||||
raise skip.SkipTest('requires spidermonkey')
|
||||
|
2
setup.py
2
setup.py
@ -17,9 +17,9 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import textwrap
|
||||
import os
|
||||
import setuptools
|
||||
import textwrap
|
||||
|
||||
from ceilometer.openstack.common import setup as common_setup
|
||||
|
||||
|
@ -17,11 +17,11 @@
|
||||
# under the License.
|
||||
"""Test API against SQLAlchemy.
|
||||
"""
|
||||
from . import compute_duration_by_resource as cdbr
|
||||
from . import list_events
|
||||
from . import list_meters
|
||||
from . import list_projects
|
||||
from . import list_users
|
||||
import compute_duration_by_resource as cdbr
|
||||
import list_events
|
||||
import list_meters
|
||||
import list_projects
|
||||
import list_users
|
||||
|
||||
|
||||
class TestListEvents(list_events.TestListEvents):
|
||||
|
@ -18,6 +18,7 @@
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
|
||||
from ceilometer.central import manager
|
||||
from ceilometer.objectstore import swift
|
||||
from ceilometer.tests import base
|
||||
|
303
tools/hacking.py
303
tools/hacking.py
@ -16,11 +16,12 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""nova HACKING file compliance testing
|
||||
"""Ceilometer HACKING file compliance testing
|
||||
|
||||
Built on top of pep8.py
|
||||
"""
|
||||
|
||||
import imp
|
||||
import inspect
|
||||
import logging
|
||||
import os
|
||||
@ -28,7 +29,7 @@ import re
|
||||
import subprocess
|
||||
import sys
|
||||
import tokenize
|
||||
import warnings
|
||||
import traceback
|
||||
|
||||
import pep8
|
||||
|
||||
@ -43,8 +44,12 @@ logging.disable('LOG')
|
||||
#N6xx calling methods
|
||||
#N7xx localization
|
||||
#N8xx git commit messages
|
||||
#N9xx other
|
||||
|
||||
IMPORT_EXCEPTIONS = ['sqlalchemy', 'migrate', 'nova.db.sqlalchemy.session']
|
||||
IMPORT_EXCEPTIONS = ['sqlalchemy', 'migrate',
|
||||
'ceilometer.storage.sqlalchemy.session',
|
||||
'ceilometer.storage.sqlalchemy.models']
|
||||
# Paste is missing a __init__ in top level directory
|
||||
START_DOCSTRING_TRIPLE = ['u"""', 'r"""', '"""', "u'''", "r'''", "'''"]
|
||||
END_DOCSTRING_TRIPLE = ['"""', "'''"]
|
||||
VERBOSE_MISSING_IMPORT = os.getenv('HACKING_VERBOSE_MISSING_IMPORT', 'False')
|
||||
@ -149,111 +154,124 @@ def nova_except_format_assert(logical_line):
|
||||
yield 1, "N202: assertRaises Exception too broad"
|
||||
|
||||
|
||||
def nova_one_import_per_line(logical_line):
|
||||
r"""Check for import format.
|
||||
modules_cache = dict((mod, True) for mod in tuple(sys.modules.keys())
|
||||
+ sys.builtin_module_names)
|
||||
|
||||
RE_RELATIVE_IMPORT = re.compile('^from\s*[.]')
|
||||
|
||||
|
||||
def nova_import_rules(logical_line):
|
||||
r"""Check for imports.
|
||||
|
||||
nova HACKING guide recommends one import per line:
|
||||
Do not import more than one module per line
|
||||
|
||||
Examples:
|
||||
Okay: from nova.rpc.common import RemoteError
|
||||
N301: from nova.rpc.common import RemoteError, LOG
|
||||
"""
|
||||
pos = logical_line.find(',')
|
||||
parts = logical_line.split()
|
||||
if (pos > -1 and (parts[0] == "import" or
|
||||
parts[0] == "from" and parts[2] == "import") and
|
||||
not is_import_exception(parts[1])):
|
||||
yield pos, "N301: one import per line"
|
||||
Okay: from nova.compute import api
|
||||
N301: from nova.compute import api, utils
|
||||
|
||||
|
||||
def nova_import_module_only(logical_line):
|
||||
r"""Check for import module only.
|
||||
Imports should usually be on separate lines.
|
||||
|
||||
nova HACKING guide recommends importing only modules:
|
||||
Do not import objects, only modules
|
||||
|
||||
Examples:
|
||||
Okay: from os import path
|
||||
Okay: from os import path as p
|
||||
Okay: from os import (path as p)
|
||||
Okay: import os.path
|
||||
Okay: from nova.compute import rpcapi
|
||||
N302: from os.path import dirname as dirname2
|
||||
N303 from os.path import *
|
||||
N304 import flakes
|
||||
N302: from os.path import (dirname as dirname2)
|
||||
N303: from os.path import *
|
||||
N304: from .compute import rpcapi
|
||||
"""
|
||||
# N302 import only modules
|
||||
# N303 Invalid Import
|
||||
# N304 Relative Import
|
||||
#NOTE(afazekas): An old style relative import example will not be able to
|
||||
# pass the doctest, since the relativity depends on the file's locality
|
||||
|
||||
# TODO(sdague) actually get these tests working
|
||||
# TODO(jogo) simplify this code
|
||||
def import_module_check(mod, parent=None, added=False):
|
||||
"""Checks for relative, modules and invalid imports.
|
||||
|
||||
If can't find module on first try, recursively check for relative
|
||||
imports.
|
||||
When parsing 'from x import y,' x is the parent.
|
||||
"""
|
||||
current_path = os.path.dirname(pep8.current_file)
|
||||
def is_module_for_sure(mod, search_path=sys.path):
|
||||
mod = mod.replace('(', '') # Ignore parentheses
|
||||
try:
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter('ignore', DeprecationWarning)
|
||||
valid = True
|
||||
if parent:
|
||||
parent_mod = __import__(parent, globals(), locals(),
|
||||
[mod], -1)
|
||||
valid = inspect.ismodule(getattr(parent_mod, mod))
|
||||
else:
|
||||
__import__(mod, globals(), locals(), [], -1)
|
||||
valid = inspect.ismodule(sys.modules[mod])
|
||||
if not valid:
|
||||
if added:
|
||||
sys.path.pop()
|
||||
added = False
|
||||
return logical_line.find(mod), ("N304: No "
|
||||
"relative imports. '%s' is a relative import"
|
||||
% logical_line)
|
||||
return logical_line.find(mod), ("N302: import only "
|
||||
"modules. '%s' does not import a module"
|
||||
% logical_line)
|
||||
mod_name = mod
|
||||
while '.' in mod_name:
|
||||
pack_name, _sep, mod_name = mod.partition('.')
|
||||
f, p, d = imp.find_module(pack_name, search_path)
|
||||
search_path = [p]
|
||||
imp.find_module(mod_name, search_path)
|
||||
except ImportError:
|
||||
try:
|
||||
# NOTE(vish): handle namespace modules
|
||||
module = __import__(mod)
|
||||
except ImportError, exc:
|
||||
# NOTE(vish): the import error might be due
|
||||
# to a missing dependency
|
||||
missing = str(exc).split()[-1]
|
||||
if (missing != mod.split('.')[-1] or
|
||||
"cannot import" in str(exc)):
|
||||
_missingImport.add(missing)
|
||||
return True
|
||||
return False
|
||||
except Exception, exc:
|
||||
# NOTE(jogo) don't stack trace if unexpected import error,
|
||||
# log and continue.
|
||||
traceback.print_exc()
|
||||
return False
|
||||
return True
|
||||
|
||||
except (ImportError, NameError) as exc:
|
||||
if not added:
|
||||
added = True
|
||||
sys.path.append(current_path)
|
||||
return import_module_check(mod, parent, added)
|
||||
else:
|
||||
name = logical_line.split()[1]
|
||||
if name not in _missingImport:
|
||||
if VERBOSE_MISSING_IMPORT != 'False':
|
||||
print >> sys.stderr, ("ERROR: import '%s' in %s "
|
||||
"failed: %s" %
|
||||
(name, pep8.current_file, exc))
|
||||
_missingImport.add(name)
|
||||
added = False
|
||||
sys.path.pop()
|
||||
return
|
||||
def is_module(mod):
|
||||
"""Checks for non module imports."""
|
||||
if mod in modules_cache:
|
||||
return modules_cache[mod]
|
||||
res = is_module_for_sure(mod)
|
||||
modules_cache[mod] = res
|
||||
return res
|
||||
|
||||
except AttributeError:
|
||||
# Invalid import
|
||||
if "import *" in logical_line:
|
||||
# TODO(jogo): handle "from x import *, by checking all
|
||||
# "objects in x"
|
||||
return
|
||||
return logical_line.find(mod), ("N303: Invalid import, "
|
||||
"%s" % mod)
|
||||
current_path = os.path.dirname(pep8.current_file)
|
||||
current_mod = os.path.basename(pep8.current_file)
|
||||
if current_mod[-3:] == ".py":
|
||||
current_mod = current_mod[:-3]
|
||||
|
||||
split_line = logical_line.split()
|
||||
if (", " not in logical_line and
|
||||
split_line[0] in ('import', 'from') and
|
||||
(len(split_line) in (2, 4, 6)) and
|
||||
split_line[1] != "__future__"):
|
||||
if is_import_exception(split_line[1]):
|
||||
split_line_len = len(split_line)
|
||||
if (split_line[0] in ('import', 'from') and split_line_len > 1 and
|
||||
not is_import_exception(split_line[1])):
|
||||
pos = logical_line.find(',')
|
||||
if pos != -1:
|
||||
if split_line[0] == 'from':
|
||||
yield pos, "N301: one import per line"
|
||||
return # ',' is not supported by the N302 checker yet
|
||||
pos = logical_line.find('*')
|
||||
if pos != -1:
|
||||
yield pos, "N303: No wildcard (*) import."
|
||||
return
|
||||
if "from" == split_line[0]:
|
||||
rval = import_module_check(split_line[3], parent=split_line[1])
|
||||
else:
|
||||
rval = import_module_check(split_line[1])
|
||||
if rval is not None:
|
||||
yield rval
|
||||
|
||||
if split_line_len in (2, 4, 6) and split_line[1] != "__future__":
|
||||
if 'from' == split_line[0] and split_line_len > 3:
|
||||
mod = '.'.join((split_line[1], split_line[3]))
|
||||
if is_import_exception(mod):
|
||||
return
|
||||
if RE_RELATIVE_IMPORT.search(logical_line):
|
||||
yield logical_line.find('.'), ("N304: No "
|
||||
"relative imports. '%s' is a relative import"
|
||||
% logical_line)
|
||||
return
|
||||
|
||||
if not is_module(mod):
|
||||
yield 0, ("N302: import only modules."
|
||||
"'%s' does not import a module" % logical_line)
|
||||
return
|
||||
|
||||
#NOTE(afazekas): import searches first in the package
|
||||
# The import keyword just imports modules
|
||||
# The guestfs module now imports guestfs
|
||||
mod = split_line[1]
|
||||
if (current_mod != mod and
|
||||
not is_module(mod) and
|
||||
is_module_for_sure(mod, [current_path])):
|
||||
yield 0, ("N304: No relative imports."
|
||||
" '%s' is a relative import"
|
||||
% logical_line)
|
||||
|
||||
|
||||
#TODO(jogo): import template: N305
|
||||
@ -293,12 +311,26 @@ def nova_import_no_db_in_virt(logical_line, filename):
|
||||
"""
|
||||
if "nova/virt" in filename and not filename.endswith("fake.py"):
|
||||
if logical_line.startswith("from nova import db"):
|
||||
|
||||
yield (0, "N307: nova.db import not allowed in nova/virt/*")
|
||||
|
||||
|
||||
def in_docstring_position(previous_logical):
|
||||
return (previous_logical.startswith("def ") or
|
||||
previous_logical.startswith("class "))
|
||||
def is_docstring(physical_line, previous_logical):
|
||||
"""Return True if found docstring
|
||||
'A docstring is a string literal that occurs as the first statement in a
|
||||
module, function, class,'
|
||||
http://www.python.org/dev/peps/pep-0257/#what-is-a-docstring
|
||||
"""
|
||||
line = physical_line.lstrip()
|
||||
start = max([line.find(i) for i in START_DOCSTRING_TRIPLE])
|
||||
end = max([line[-4:-1] == i for i in END_DOCSTRING_TRIPLE])
|
||||
if (previous_logical.startswith("def ") or
|
||||
previous_logical.startswith("class ")):
|
||||
if start is 0:
|
||||
return True
|
||||
else:
|
||||
# Handle multi line comments
|
||||
return end and start in (-1, len(line) - 4)
|
||||
|
||||
|
||||
def nova_docstring_start_space(physical_line, previous_logical):
|
||||
@ -308,6 +340,8 @@ def nova_docstring_start_space(physical_line, previous_logical):
|
||||
Docstring should not start with space
|
||||
|
||||
Okay: def foo():\n '''This is good.'''
|
||||
Okay: def foo():\n a = ''' This is not a docstring.'''
|
||||
Okay: def foo():\n pass\n ''' This is not.'''
|
||||
N401: def foo():\n ''' This is not.'''
|
||||
"""
|
||||
# short circuit so that we don't fail on our own fail test
|
||||
@ -318,30 +352,32 @@ def nova_docstring_start_space(physical_line, previous_logical):
|
||||
# it's important that we determine this is actually a docstring,
|
||||
# and not a doc block used somewhere after the first line of a
|
||||
# function def
|
||||
if in_docstring_position(previous_logical):
|
||||
if is_docstring(physical_line, previous_logical):
|
||||
pos = max([physical_line.find(i) for i in START_DOCSTRING_TRIPLE])
|
||||
if pos != -1 and len(physical_line) > pos + 4:
|
||||
if physical_line[pos + 3] == ' ':
|
||||
return (pos, "N401: docstring should not start with"
|
||||
" a space")
|
||||
if physical_line[pos + 3] == ' ':
|
||||
return (pos, "N401: docstring should not start with"
|
||||
" a space")
|
||||
|
||||
|
||||
def nova_docstring_one_line(physical_line):
|
||||
def nova_docstring_one_line(physical_line, previous_logical):
|
||||
r"""Check one line docstring end.
|
||||
|
||||
nova HACKING guide recommendation for one line docstring:
|
||||
A one line docstring looks like this and ends in punctuation.
|
||||
|
||||
Okay: '''This is good.'''
|
||||
Okay: '''This is good too!'''
|
||||
Okay: '''How about this?'''
|
||||
N402: '''This is not'''
|
||||
N402: '''Bad punctuation,'''
|
||||
Okay: def foo():\n '''This is good.'''
|
||||
Okay: def foo():\n '''This is good too!'''
|
||||
Okay: def foo():\n '''How about this?'''
|
||||
Okay: def foo():\n a = '''This is not a docstring'''
|
||||
Okay: def foo():\n pass\n '''This is not a docstring'''
|
||||
Okay: class Foo:\n pass\n '''This is not a docstring'''
|
||||
N402: def foo():\n '''This is not'''
|
||||
N402: def foo():\n '''Bad punctuation,'''
|
||||
N402: class Foo:\n '''Bad punctuation,'''
|
||||
"""
|
||||
#TODO(jogo) make this apply to multi line docstrings as well
|
||||
line = physical_line.lstrip()
|
||||
|
||||
if line.startswith('"') or line.startswith("'"):
|
||||
if is_docstring(physical_line, previous_logical):
|
||||
pos = max([line.find(i) for i in START_DOCSTRING_TRIPLE]) # start
|
||||
end = max([line[-4:-1] == i for i in END_DOCSTRING_TRIPLE]) # end
|
||||
|
||||
@ -350,20 +386,27 @@ def nova_docstring_one_line(physical_line):
|
||||
return pos, "N402: one line docstring needs punctuation."
|
||||
|
||||
|
||||
def nova_docstring_multiline_end(physical_line, previous_logical):
|
||||
def nova_docstring_multiline_end(physical_line, previous_logical, tokens):
|
||||
r"""Check multi line docstring end.
|
||||
|
||||
nova HACKING guide recommendation for docstring:
|
||||
Docstring should end on a new line
|
||||
|
||||
Okay: '''foobar\nfoo\nbar\n'''
|
||||
N403: def foo():\n'''foobar\nfoo\nbar\n d'''\n\n
|
||||
Okay: def foo():\n '''foobar\nfoo\nbar\n'''
|
||||
Okay: class Foo:\n '''foobar\nfoo\nbar\n'''
|
||||
Okay: def foo():\n a = '''not\na\ndocstring'''
|
||||
Okay: def foo():\n pass\n'''foobar\nfoo\nbar\n d'''
|
||||
N403: def foo():\n '''foobar\nfoo\nbar\ndocstring'''
|
||||
N403: class Foo:\n '''foobar\nfoo\nbar\ndocstring'''\n\n
|
||||
"""
|
||||
if in_docstring_position(previous_logical):
|
||||
# if find OP tokens, not a docstring
|
||||
ops = [t for t, _, _, _, _ in tokens if t == tokenize.OP]
|
||||
if (is_docstring(physical_line, previous_logical) and len(tokens) > 0 and
|
||||
len(ops) == 0):
|
||||
pos = max(physical_line.find(i) for i in END_DOCSTRING_TRIPLE)
|
||||
if pos != -1 and len(physical_line) == pos + 4:
|
||||
if physical_line.strip() not in START_DOCSTRING_TRIPLE:
|
||||
return (pos, "N403: multi line docstring end on new line")
|
||||
if physical_line.strip() not in START_DOCSTRING_TRIPLE:
|
||||
return (pos, "N403: multi line docstring end on new line")
|
||||
|
||||
|
||||
def nova_docstring_multiline_start(physical_line, previous_logical, tokens):
|
||||
@ -373,9 +416,10 @@ def nova_docstring_multiline_start(physical_line, previous_logical, tokens):
|
||||
Docstring should start with A multi line docstring has a one-line summary
|
||||
|
||||
Okay: '''foobar\nfoo\nbar\n'''
|
||||
N404: def foo():\n'''\nfoo\nbar\n''' \n\n
|
||||
Okay: def foo():\n a = '''\nnot\na docstring\n'''
|
||||
N404: def foo():\n'''\nfoo\nbar\n'''\n\n
|
||||
"""
|
||||
if in_docstring_position(previous_logical):
|
||||
if is_docstring(physical_line, previous_logical):
|
||||
pos = max([physical_line.find(i) for i in START_DOCSTRING_TRIPLE])
|
||||
# start of docstring when len(tokens)==0
|
||||
if len(tokens) == 0 and pos != -1 and len(physical_line) == pos + 4:
|
||||
@ -385,7 +429,7 @@ def nova_docstring_multiline_start(physical_line, previous_logical, tokens):
|
||||
|
||||
|
||||
def nova_no_cr(physical_line):
|
||||
r"""Check that we only use newlines not cariage returns.
|
||||
r"""Check that we only use newlines not carriage returns.
|
||||
|
||||
Okay: import os\nimport sys
|
||||
# pep8 doesn't yet replace \r in strings, will work on an
|
||||
@ -493,6 +537,37 @@ def nova_localization_strings(logical_line, tokens):
|
||||
|
||||
#TODO(jogo) Dict and list objects
|
||||
|
||||
|
||||
def nova_is_not(logical_line):
|
||||
r"""Check localization in line.
|
||||
|
||||
Okay: if x is not y
|
||||
N901: if not X is Y
|
||||
N901: if not X.B is Y
|
||||
"""
|
||||
split_line = logical_line.split()
|
||||
if (len(split_line) == 5 and split_line[0] == 'if' and
|
||||
split_line[1] == 'not' and split_line[3] == 'is'):
|
||||
yield (logical_line.find('not'), "N901: Use the 'is not' "
|
||||
"operator for when testing for unequal identities")
|
||||
|
||||
|
||||
def nova_not_in(logical_line):
|
||||
r"""Check localization in line.
|
||||
|
||||
Okay: if x not in y
|
||||
Okay: if not (X in Y or X is Z)
|
||||
Okay: if not (X in Y)
|
||||
N902: if not X in Y
|
||||
N902: if not X.B in Y
|
||||
"""
|
||||
split_line = logical_line.split()
|
||||
if (len(split_line) == 5 and split_line[0] == 'if' and
|
||||
split_line[1] == 'not' and split_line[3] == 'in' and not
|
||||
split_line[2].startswith('(')):
|
||||
yield (logical_line.find('not'), "N902: Use the 'not in' "
|
||||
"operator for collection membership evaluation")
|
||||
|
||||
current_file = ""
|
||||
|
||||
|
||||
@ -513,7 +588,7 @@ def add_nova():
|
||||
if not inspect.isfunction(function):
|
||||
continue
|
||||
args = inspect.getargspec(function)[0]
|
||||
if args and name.startswith("nova"):
|
||||
if args and name.startswith("ceilometer"):
|
||||
exec("pep8.%s = %s" % (name, name))
|
||||
|
||||
|
||||
@ -523,7 +598,7 @@ def once_git_check_commit_title():
|
||||
nova HACKING recommends not referencing a bug or blueprint in first line,
|
||||
it should provide an accurate description of the change
|
||||
N801
|
||||
N802 Title limited to 50 chars
|
||||
N802 Title limited to 72 chars
|
||||
"""
|
||||
#Get title of most recent commit
|
||||
|
||||
@ -548,6 +623,8 @@ def once_git_check_commit_title():
|
||||
"description of the change, not just a reference to a bug "
|
||||
"or blueprint" % title.strip())
|
||||
error = True
|
||||
# HACKING.rst recommends commit titles 50 chars or less, but enforces
|
||||
# a 72 character limit
|
||||
if len(title.decode('utf-8')) > 72:
|
||||
print ("N802: git commit title ('%s') should be under 50 chars"
|
||||
% title.strip())
|
||||
|
11
tox.ini
11
tox.ini
@ -1,5 +1,5 @@
|
||||
[tox]
|
||||
envlist = py26,py27,py26-folsom,py27-folsom,pep8
|
||||
envlist = py26,py27,py26-folsom,py27-folsom,pep8,hacking
|
||||
|
||||
[testenv]
|
||||
deps = -r{toxinidir}/tools/test-requires
|
||||
@ -22,18 +22,13 @@ setenv=CEILOMETER_TEST_LIVE=1
|
||||
commands = nosetests --no-path-adjustment --with-coverage --cover-erase --cover-package=ceilometer --cover-inclusive []
|
||||
|
||||
[testenv:pep8]
|
||||
deps = -r{toxinidir}/tools/test-requires
|
||||
-r{toxinidir}/tools/pip-requires
|
||||
pep8==1.3.3
|
||||
deps = pep8==1.3.3
|
||||
commands =
|
||||
pep8 --repeat --ignore=E125 --show-source ceilometer setup.py bin/ceilometer-agent-central bin/ceilometer-agent-compute bin/ceilometer-collector bin/ceilometer-api tests
|
||||
|
||||
[testenv:hacking]
|
||||
deps = -r{toxinidir}/tools/test-requires
|
||||
-r{toxinidir}/tools/pip-requires
|
||||
pep8==1.3.3
|
||||
deps = pep8==1.3.3
|
||||
commands =
|
||||
python tools/hacking.py --doctest
|
||||
python tools/hacking.py --ignore=E12,E711,E721,E712,N403,N404 --show-source \
|
||||
--exclude=.venv,.git,.tox,dist,doc,*openstack/common*,*lib/python*,*egg .
|
||||
python tools/hacking.py --ignore=E12,E711,E721,E712,N403,N404 --show-source \
|
||||
|
Loading…
Reference in New Issue
Block a user