Fix H405 violations and re-enable gating

H405 is a new rule in hacking 0.9, so fix new violations and
re-enable gating.

Change-Id: I61541fa0a9dc18ad938df54d56c65c972b151622
This commit is contained in:
Igor Degtiarov 2014-07-01 13:41:27 +03:00
parent 92a08daa31
commit f67bce40fb
55 changed files with 410 additions and 445 deletions

View File

@ -51,6 +51,7 @@ class Resources(object):
class PollingTask(object):
"""Polling task for polling samples and inject into pipeline.
A polling task can be invoked periodically or only once.
"""

View File

@ -86,8 +86,9 @@ class Evaluator(object):
@classmethod
def within_time_constraint(cls, alarm):
"""Check whether the alarm is within at least one of its time
constraints. If there are none, then the answer is yes.
"""Check whether the alarm is within at least one of its time limits.
If there are none, then the answer is yes.
"""
if not alarm.time_constraints:
return True
@ -107,7 +108,9 @@ class Evaluator(object):
@staticmethod
def _is_exact_match(cron, ts):
"""Handle edge case where if the timestamp is the same as the
"""Handle edge in case when both parameters are equal.
Handle edge case where if the timestamp is the same as the
cron point in time to the minute, croniter returns the previous
start, not the current. We can check this by first going one
step back and then one step forward and check if we are
@ -119,8 +122,8 @@ class Evaluator(object):
@abc.abstractmethod
def evaluate(self, alarm):
'''interface definition
"""Interface definition.
evaluate an alarm
alarm Alarm: an instance of the Alarm
'''
"""

View File

@ -38,7 +38,9 @@ class CombinationEvaluator(evaluator.Evaluator):
return alarm.state
def _sufficient_states(self, alarm, states):
"""Ensure there is sufficient data for evaluation,
"""Check for the sufficiency of the data for evaluation.
Ensure that there is sufficient data for evaluation,
transitioning to unknown otherwise.
"""
# note(sileht): alarm can be evaluated only with
@ -56,8 +58,7 @@ class CombinationEvaluator(evaluator.Evaluator):
@staticmethod
def _reason_data(alarm_ids):
"""Create a reason data dictionary for this evaluator type.
"""
"""Create a reason data dictionary for this evaluator type."""
return {'type': 'combination', 'alarm_ids': alarm_ids}
@classmethod
@ -80,8 +81,7 @@ class CombinationEvaluator(evaluator.Evaluator):
'alarm_ids': ",".join(alarms_to_report)}), reason_data
def _transition(self, alarm, underlying_states):
"""Transition alarm state if necessary.
"""
"""Transition alarm state if necessary."""
op = alarm.rule['operator']
if COMPARATORS[op](s == evaluator.ALARM
for __, s in underlying_states):

View File

@ -68,8 +68,7 @@ class ThresholdEvaluator(evaluator.Evaluator):
@staticmethod
def _sanitize(alarm, statistics):
"""Sanitize statistics.
"""
"""Sanitize statistics."""
LOG.debug(_('sanitize stats %s') % statistics)
if alarm.rule.get('exclude_outliers'):
key = operator.attrgetter('count')
@ -103,8 +102,10 @@ class ThresholdEvaluator(evaluator.Evaluator):
return []
def _sufficient(self, alarm, statistics):
"""Ensure there is sufficient data for evaluation,
transitioning to unknown otherwise.
"""Check for the sufficiency of the data for evaluation.
Ensure there is sufficient data for evaluation, transitioning to
unknown otherwise.
"""
sufficient = len(statistics) >= self.quorum
if not sufficient and alarm.state != evaluator.UNKNOWN:
@ -118,8 +119,7 @@ class ThresholdEvaluator(evaluator.Evaluator):
@staticmethod
def _reason_data(disposition, count, most_recent):
"""Create a reason data dictionary for this evaluator type.
"""
"""Create a reason data dictionary for this evaluator type."""
return {'type': 'threshold', 'disposition': disposition,
'count': count, 'most_recent': most_recent}

View File

@ -19,8 +19,7 @@ import math
def mean(s, key=lambda x: x):
"""Calculate the mean of a numeric list.
"""
"""Calculate the mean of a numeric list."""
count = float(len(s))
if count:
return math.fsum(map(key, s)) / count
@ -28,34 +27,29 @@ def mean(s, key=lambda x: x):
def deltas(s, key, m=None):
"""Calculate the squared distances from mean for a numeric list.
"""
"""Calculate the squared distances from mean for a numeric list."""
m = m or mean(s, key)
return [(key(i) - m) ** 2 for i in s]
def variance(s, key, m=None):
"""Calculate the variance of a numeric list.
"""
"""Calculate the variance of a numeric list."""
return mean(deltas(s, key, m))
def stddev(s, key, m=None):
"""Calculate the standard deviation of a numeric list.
"""
"""Calculate the standard deviation of a numeric list."""
return math.sqrt(variance(s, key, m))
def outside(s, key, lower=0.0, upper=0.0):
"""Determine if value falls outside upper and lower bounds.
"""
"""Determine if value falls outside upper and lower bounds."""
v = key(s)
return v < lower or v > upper
def anomalies(s, key, lower=0.0, upper=0.0):
"""Separate anomalous data points from the in-liers.
"""
"""Separate anomalous data points from the in-liers."""
inliers = []
outliers = []
for i in s:

View File

@ -89,8 +89,7 @@ class EntityNotFound(ClientSideError):
class AdvEnum(wtypes.wsproperty):
"""Handle default and mandatory for wtypes.Enum
"""
"""Handle default and mandatory for wtypes.Enum."""
def __init__(self, name, *args, **kwargs):
self._name = '_advenum_%s' % name
self._default = kwargs.pop('default', None)
@ -146,8 +145,7 @@ class _Base(wtypes.Base):
class Link(_Base):
"""A link representation
"""
"""A link representation."""
href = wtypes.text
"The url of a link"
@ -165,8 +163,7 @@ class Link(_Base):
class Query(_Base):
"""Query filter.
"""
"""Query filter."""
# The data types supported by the query.
_supported_types = ['integer', 'float', 'string', 'boolean']
@ -299,11 +296,12 @@ def _get_auth_project(on_behalf_of=None):
def _sanitize_query(query, db_func, on_behalf_of=None):
'''Check the query to see if:
"""Check the query.
See if:
1) the request is coming from admin - then allow full visibility
2) non-admin - make sure that the query includes the requester's
project.
'''
2) non-admin - make sure that the query includes the requester's project.
"""
q = copy.copy(query)
auth_project = _get_auth_project(on_behalf_of)
@ -322,7 +320,7 @@ def _sanitize_query(query, db_func, on_behalf_of=None):
def _verify_query_segregation(query, auth_project=None):
'''Ensure non-admin queries are not constrained to another project.'''
"""Ensure non-admin queries are not constrained to another project."""
auth_project = (auth_project or
acl.get_limited_to_project(pecan.request.headers))
@ -336,9 +334,10 @@ def _verify_query_segregation(query, auth_project=None):
def _validate_query(query, db_func, internal_keys=None,
allow_timestamps=True):
"""Validates the syntax of the query and verifies that the query
request is authorized for the included project.
"""Validates the syntax of the query and verifies the query.
Verification check if the query request is authorized for the included
project.
:param query: Query expression that should be validated
:param db_func: the function on the storage level, of which arguments
will form the valid_keys list, which defines the valid fields for a
@ -355,7 +354,6 @@ def _validate_query(query, db_func, internal_keys=None,
search_offset was included without timestamp constraint
:raises: UnknownArgument: if a field name is not a timestamp field, nor
in the list of valid keys
"""
internal_keys = internal_keys or []
@ -408,8 +406,7 @@ def _validate_query(query, db_func, internal_keys=None,
def _validate_timestamp_fields(query, field_name, operator_list,
allow_timestamps):
"""Validates the timestamp related constraints in a query expression, if
there are any.
"""Validates the timestamp related constraints in a query if there are any.
:param query: query expression that may contain the timestamp fields
:param field_name: timestamp name, which should be checked (timestamp,
@ -429,7 +426,6 @@ def _validate_timestamp_fields(query, field_name, operator_list,
field
:raises UnknownArgument: if the timestamp constraint is not allowed in
the query
"""
for item in query:
@ -504,9 +500,9 @@ def _query_to_kwargs(query, db_func, internal_keys=None,
def _validate_groupby_fields(groupby_fields):
"""Checks that the list of groupby fields from request is valid and
if all fields are valid, returns fields with duplicates removed
"""Checks that the list of groupby fields from request is valid.
If all fields are valid, returns fields with duplicates removed.
"""
# NOTE(terriyu): Currently, metadata fields are not supported in our
# group by statistics implementation
@ -574,9 +570,10 @@ def _get_query_timestamps(args=None):
def _flatten_metadata(metadata):
"""Return flattened resource metadata with flattened nested
structures (except nested sets) and with all values converted
to unicode strings.
"""Return flattened resource metadata.
Metadata is returned with flattened nested structures (except nested sets)
and with all values converted to unicode strings.
"""
if metadata:
# After changing recursive_keypairs` output we need to keep
@ -693,8 +690,7 @@ class OldSample(_Base):
class Statistics(_Base):
"""Computed statistics for a query.
"""
"""Computed statistics for a query."""
groupby = {wtypes.text: wtypes.text}
"Dictionary of field names for group, if groupby statistics are requested"
@ -812,8 +808,7 @@ class Aggregate(_Base):
class MeterController(rest.RestController):
"""Manages operations on a single meter.
"""
"""Manages operations on a single meter."""
_custom_actions = {
'statistics': ['GET'],
}
@ -944,8 +939,7 @@ class MeterController(rest.RestController):
class Meter(_Base):
"""One category of measurements.
"""
"""One category of measurements."""
name = wtypes.text
"The unique name for the meter"
@ -1271,8 +1265,7 @@ class ValidatedComplexQuery(object):
self.original_query = query
def validate(self, visibility_field):
"""Validates the query content and does the necessary transformations.
"""
"""Validates the query content and does the necessary conversions."""
if self.original_query.filter is wtypes.Unset:
self.filter_expr = None
else:
@ -1322,8 +1315,7 @@ class ValidatedComplexQuery(object):
def _check_cross_project_references(self, own_project_id,
visibility_field):
"""Do not allow other than own_project_id
"""
"""Do not allow other than own_project_id."""
def check_project_id(subfilter):
op = subfilter.keys()[0]
if (op.lower() not in self.complex_operators
@ -1334,8 +1326,10 @@ class ValidatedComplexQuery(object):
self._traverse_postorder(self.filter_expr, check_project_id)
def _force_visibility(self, visibility_field):
"""If the tenant is not admin insert an extra
"and <visibility_field>=<tenant's project_id>" clause to the query
"""Force visibility field.
If the tenant is not admin insert an extra
"and <visibility_field>=<tenant's project_id>" clause to the query.
"""
authorized_project = acl.get_limited_to_project(pecan.request.headers)
is_admin = authorized_project is None
@ -1402,8 +1396,7 @@ class ValidatedComplexQuery(object):
class Resource(_Base):
"""An externally defined object for which samples have been received.
"""
"""An externally defined object for which samples have been received."""
resource_id = wtypes.text
"The unique identifier for the resource"
@ -1828,8 +1821,7 @@ class Alarm(_Base):
class AlarmChange(_Base):
"""Representation of an event in an alarm's history
"""
"""Representation of an event in an alarm's history."""
event_id = wtypes.text
"The UUID of the change event"
@ -1872,8 +1864,7 @@ class AlarmChange(_Base):
class AlarmController(rest.RestController):
"""Manages operations on a single alarm.
"""
"""Manages operations on a single alarm."""
_custom_actions = {
'history': ['GET'],
@ -1922,8 +1913,7 @@ class AlarmController(rest.RestController):
@wsme_pecan.wsexpose(Alarm)
def get(self):
"""Return this alarm.
"""
"""Return this alarm."""
return Alarm.from_db_model(self._alarm())
@wsme_pecan.wsexpose(Alarm, body=Alarm)
@ -1987,8 +1977,7 @@ class AlarmController(rest.RestController):
@wsme_pecan.wsexpose(None, status_code=204)
def delete(self):
"""Delete this alarm.
"""
"""Delete this alarm."""
# ensure alarm exists before deleting
alarm = self._alarm()
self.conn.delete_alarm(alarm.alarm_id)
@ -2040,15 +2029,13 @@ class AlarmController(rest.RestController):
@wsme_pecan.wsexpose(state_kind_enum)
def get_state(self):
"""Get the state of this alarm.
"""
"""Get the state of this alarm."""
alarm = self._alarm()
return alarm.state
class AlarmsController(rest.RestController):
"""Manages operations on the alarms collection.
"""
"""Manages operations on the alarms collection."""
@pecan.expose()
def _lookup(self, alarm_id, *remainder):
@ -2192,8 +2179,9 @@ class Trait(_Base):
@staticmethod
def _convert_storage_trait(trait):
"""Helper method to convert a storage model into an API trait
instance. If an API trait instance is passed in, just return it.
"""Helper method to convert a storage model into an API trait instance.
If an API trait instance is passed in, just return it.
"""
if isinstance(trait, Trait):
return trait
@ -2328,8 +2316,7 @@ class EventTypesController(rest.RestController):
@requires_admin
@wsme_pecan.wsexpose([unicode])
def get_all(self):
"""Get all event types.
"""
"""Get all event types."""
return list(pecan.request.storage_conn.get_event_types())
@ -2377,8 +2364,7 @@ class EventsController(rest.RestController):
class QuerySamplesController(rest.RestController):
"""Provides complex query possibilities for samples
"""
"""Provides complex query possibilities for samples."""
@wsme_pecan.wsexpose([Sample], body=ComplexQuery)
def post(self, body):
@ -2405,8 +2391,7 @@ class QuerySamplesController(rest.RestController):
class QueryAlarmHistoryController(rest.RestController):
"""Provides complex query possibilites for alarm history
"""
"""Provides complex query possibilites for alarm history."""
@wsme_pecan.wsexpose([AlarmChange], body=ComplexQuery)
def post(self, body):
"""Define query for retrieving AlarmChange data.
@ -2424,8 +2409,7 @@ class QueryAlarmHistoryController(rest.RestController):
class QueryAlarmsController(rest.RestController):
"""Provides complex query possibilities for alarms
"""
"""Provides complex query possibilities for alarms."""
history = QueryAlarmHistoryController()
@wsme_pecan.wsexpose([Alarm], body=ComplexQuery)
@ -2455,8 +2439,9 @@ def _flatten_capabilities(capabilities):
class Capabilities(_Base):
"""A representation of the API and storage capabilities, usually
constrained by restrictions imposed by the storage driver.
"""A representation of the API and storage capabilities.
Usually constrained by restrictions imposed by the storage driver.
"""
api = {wtypes.text: bool}
@ -2507,13 +2492,13 @@ class Capabilities(_Base):
class CapabilitiesController(rest.RestController):
"""Manages capabilities queries.
"""
"""Manages capabilities queries."""
@wsme_pecan.wsexpose(Capabilities)
def get(self):
"""Returns a flattened dictionary of API capabilities supported
by the currently configured storage driver.
"""Returns a flattened dictionary of API capabilities.
Capabilities supported by the currently configured storage driver.
"""
# variation in API capabilities is effectively determined by
# the lack of strict feature parity across storage drivers

View File

@ -25,8 +25,9 @@ from ceilometer import pipeline
class ConfigHook(hooks.PecanHook):
"""Attach the configuration object to the request
so controllers can get to it.
"""Attach the configuration object to the request.
That allows controllers to get it.
"""
def before(self, state):
@ -43,9 +44,10 @@ class DBHook(hooks.PecanHook):
class PipelineHook(hooks.PecanHook):
'''Create and attach a pipeline to the request so that
new samples can be posted via the /v2/meters/ API.
'''
"""Create and attach a pipeline to the request.
That allows new samples to be posted via the /v2/meters/ API.
"""
def __init__(self):
# this is done here as the cfg options are not available

View File

@ -35,13 +35,11 @@ LOG = log.getLogger(__name__)
class ParsableErrorMiddleware(object):
"""Replace error body with something the client can parse.
"""
"""Replace error body with something the client can parse."""
@staticmethod
def best_match_language(accept_language):
"""Determines best available locale from the Accept-Language
header.
"""Determines best available locale from the Accept-Language header.
:returns: the best language match or None if the 'Accept-Language'
header was not available in the request.
@ -60,8 +58,7 @@ class ParsableErrorMiddleware(object):
state = {}
def replacement_start_response(status, headers, exc_info=None):
"""Overrides the default response to make errors parsable.
"""
"""Overrides the default response to make errors parsable."""
try:
status_code = int(status.split(' ')[0])
state['status_code'] = status_code

View File

@ -27,8 +27,7 @@ class InstanceDiscovery(plugin.DiscoveryBase):
self.nova_cli = nova_client.Client()
def discover(self, param=None):
"""Discover resources to monitor.
"""
"""Discover resources to monitor."""
instances = self.nova_cli.instance_get_all_by_host(cfg.CONF.host)
return [i for i in instances
if getattr(i, 'OS-EXT-STS:vm_state', None) != 'error']

View File

@ -34,8 +34,10 @@ cfg.CONF.register_opts(OPTS)
class ComputeNotificationBase(plugin.NotificationBase):
@staticmethod
def get_targets(conf):
"""Return a sequence of oslo.messaging.Target defining the exchange and
topics to be connected for this plugin.
"""Return a sequence of oslo.messaging.Target
This sequence is defining the exchange and topics to be connected for
this plugin.
"""
return [oslo.messaging.Target(topic=topic,
exchange=conf.nova_control_exchange)

View File

@ -28,8 +28,7 @@ LOG = log.getLogger(__name__)
class ComputeMetricsNotificationBase(notifications.ComputeNotificationBase):
"""Convert compute.metrics.update notifications into Samples
"""
"""Convert compute.metrics.update notifications into Samples."""
event_types = ['compute.metrics.update']
metric = None
sample_type = None

View File

@ -30,8 +30,7 @@ from ceilometer import sample
@six.add_metaclass(abc.ABCMeta)
class UserMetadataAwareInstanceNotificationBase(
notifications.ComputeNotificationBase):
"""Consumes notifications containing instance user metadata.
"""
"""Consumes notifications containing instance user metadata."""
def process_notification(self, message):
instance_properties = self.get_instance_properties(message)
@ -72,8 +71,7 @@ class InstanceScheduled(UserMetadataAwareInstanceNotificationBase):
class ComputeInstanceNotificationBase(
UserMetadataAwareInstanceNotificationBase):
"""Convert compute.instance.* notifications into Samples
"""
"""Convert compute.instance.* notifications into Samples."""
event_types = ['compute.instance.*']
@ -158,8 +156,9 @@ class InstanceFlavor(ComputeInstanceNotificationBase):
class InstanceDelete(ComputeInstanceNotificationBase):
"""Handle the messages sent by the nova notifier plugin
when an instance is being deleted.
"""Handle the messages sent by the nova notifier plugin.
Messages are sent when an instance is being deleted.
"""
event_types = ['compute.instance.delete.samples']

View File

@ -26,8 +26,10 @@ from ceilometer import plugin
@six.add_metaclass(abc.ABCMeta)
class ComputePollster(plugin.PollsterBase):
"""Base class for plugins that support the polling API on the compute node.
"""
"""Base class for plugins.
It supports the polling API on the compute node.
"""
@abc.abstractmethod
def get_samples(self, manager, cache, resources):

View File

@ -35,8 +35,7 @@ INSTANCE_PROPERTIES = [
def _get_metadata_from_object(instance):
"""Return a metadata dictionary for the instance.
"""
"""Return a metadata dictionary for the instance."""
metadata = {
'display_name': instance.name,
'name': getattr(instance, 'OS-EXT-SRV-ATTR:instance_name', u''),

View File

@ -32,8 +32,10 @@ VC_REAL_TIME_SAMPLING_INTERVAL = 20
class VsphereOperations(object):
"""Class to invoke vSphere APIs calls required by various
pollsters, collecting data from VMware infrastructure.
"""Class to invoke vSphere APIs calls.
vSphere APIs calls are required by various pollsters, collecting data from
VMware infrastructure.
"""
def __init__(self, api_session, max_objects):
self._api_session = api_session
@ -65,8 +67,7 @@ class VsphereOperations(object):
session.vim, result)
def get_vm_moid(self, vm_instance_id):
"""Method returns VC MOID of the VM by its NOVA instance ID.
"""
"""Method returns VC MOID of the VM by its NOVA instance ID."""
if vm_instance_id not in self._vm_moid_lookup_map:
self._init_vm_moid_lookup_map()

View File

@ -212,8 +212,7 @@ class EventDefinition(object):
@staticmethod
def _extract_when(body):
"""Extract the generated datetime from the notification.
"""
"""Extract the generated datetime from the notification."""
# NOTE: I am keeping the logic the same as it was in the collector,
# However, *ALL* notifications should have a 'timestamp' field, it's
# part of the notification envelope spec. If this was put here because

View File

@ -21,8 +21,9 @@ import six
@six.add_metaclass(abc.ABCMeta)
class TraitPluginBase(object):
"""Base class for plugins that convert notification fields to
Trait values.
"""Base class for plugins.
It converts notification fields to Trait values.
"""
def __init__(self, **kw):
@ -89,7 +90,6 @@ class TraitPluginBase(object):
if not match_list:
return None
return match_list[0][1]
"""

View File

@ -39,8 +39,10 @@ class ImageBase(plugin.NotificationBase):
@staticmethod
def get_targets(conf):
"""Return a sequence of oslo.messaging.Target defining the exchange and
topics to be connected for this plugin.
"""Return a sequence of oslo.messaging.Target
This sequence is defining the exchange and topics to be connected for
this plugin.
"""
return [oslo.messaging.Target(topic=topic,
exchange=conf.glance_control_exchange)

View File

@ -47,8 +47,10 @@ class HTTPRequest(plugin.NotificationBase):
@staticmethod
def get_targets(conf):
"""Return a sequence of oslo.messaging.Target defining the exchange and
topics to be connected for this plugin.
"""Return a sequence of oslo.messaging.Target
This sequence is defining the exchange and topics to be connected for
this plugin.
"""
return [oslo.messaging.Target(topic=topic, exchange=exchange)
for topic in conf.notification_topics

View File

@ -65,8 +65,10 @@ class NetworkNotificationBase(plugin.NotificationBase):
@staticmethod
def get_targets(conf):
"""Return a sequence of oslo.messaging.Target defining the exchange and
topics to be connected for this plugin.
"""Return a sequence of oslo.messaging.Target
This sequence is defining the exchange and topics to be connected for
this plugin.
"""
return [oslo.messaging.Target(topic=topic,
exchange=conf.neutron_control_exchange)
@ -106,41 +108,41 @@ class NetworkNotificationBase(plugin.NotificationBase):
class Network(NetworkNotificationBase):
"""Listen for Neutron network notifications in order to mediate with the
metering framework.
"""Listen for Neutron network notifications.
Listen in order to mediate with the metering framework.
"""
resource_name = 'network'
class Subnet(NetworkNotificationBase):
"""Listen for Neutron notifications in order to mediate with the
metering framework.
"""Listen for Neutron notifications.
Listen in order to mediate with the metering framework.
"""
resource_name = 'subnet'
class Port(NetworkNotificationBase):
"""Listen for Neutron notifications in order to mediate with the
metering framework.
"""Listen for Neutron notifications.
Listen in order to mediate with the metering framework.
"""
resource_name = 'port'
class Router(NetworkNotificationBase):
"""Listen for Neutron notifications in order to mediate with the
metering framework.
"""Listen for Neutron notifications.
Listen in order to mediate with the metering framework.
"""
resource_name = 'router'
class FloatingIP(NetworkNotificationBase):
"""Listen for Neutron notifications in order to mediate with the
metering framework.
"""Listen for Neutron notifications.
Listen in order to mediate with the metering framework.
"""
resource_name = 'floatingip'
counter_name = 'ip.floating'
@ -148,9 +150,9 @@ class FloatingIP(NetworkNotificationBase):
class Bandwidth(NetworkNotificationBase):
"""Listen for Neutron notifications in order to mediate with the
metering framework.
"""Listen for Neutron notifications.
Listen in order to mediate with the metering framework.
"""
event_types = ['l3.meter']

View File

@ -63,8 +63,7 @@ class _BasePollster(plugin.PollsterBase):
class LBPoolPollster(_BasePollster):
"""Pollster to capture Load Balancer pool status samples.
"""
"""Pollster to capture Load Balancer pool status samples."""
FIELDS = ['admin_state_up',
'description',
'lb_method',
@ -104,8 +103,7 @@ class LBPoolPollster(_BasePollster):
class LBVipPollster(_BasePollster):
"""Pollster to capture Load Balancer Vip status samples.
"""
"""Pollster to capture Load Balancer Vip status samples."""
FIELDS = ['admin_state_up',
'address',
'connection_limit',
@ -148,8 +146,7 @@ class LBVipPollster(_BasePollster):
class LBMemberPollster(_BasePollster):
"""Pollster to capture Load Balancer Member status samples.
"""
"""Pollster to capture Load Balancer Member status samples."""
FIELDS = ['admin_state_up',
'address',
'pool_id',
@ -184,8 +181,7 @@ class LBMemberPollster(_BasePollster):
class LBHealthMonitorPollster(_BasePollster):
"""Pollster to capture Load Balancer Health probes status samples.
"""
"""Pollster to capture Load Balancer Health probes status samples."""
FIELDS = ['admin_state_up',
'delay',
'max_retries',
@ -216,8 +212,10 @@ class LBHealthMonitorPollster(_BasePollster):
@six.add_metaclass(abc.ABCMeta)
class _LBStatsPollster(_BasePollster):
"""Base Statistics pollster capturing the statistics info
and yielding samples for connections and bandwidth.
"""Base Statistics pollster.
It is capturing the statistics info and yielding samples for connections
and bandwidth.
"""
def _get_lb_pools(self):
@ -270,8 +268,7 @@ class _LBStatsPollster(_BasePollster):
class LBActiveConnectionsPollster(_LBStatsPollster):
"""Pollster to capture Active Load Balancer connections.
"""
"""Pollster to capture Active Load Balancer connections."""
@staticmethod
def _get_sample(pool, data):
@ -285,8 +282,7 @@ class LBActiveConnectionsPollster(_LBStatsPollster):
class LBTotalConnectionsPollster(_LBStatsPollster):
"""Pollster to capture Total Load Balancer connections
"""
"""Pollster to capture Total Load Balancer connections."""
@staticmethod
def _get_sample(pool, data):
@ -300,8 +296,7 @@ class LBTotalConnectionsPollster(_LBStatsPollster):
class LBBytesInPollster(_LBStatsPollster):
"""Pollster to capture incoming bytes.
"""
"""Pollster to capture incoming bytes."""
@staticmethod
def _get_sample(pool, data):
@ -315,8 +310,7 @@ class LBBytesInPollster(_LBStatsPollster):
class LBBytesOutPollster(_LBStatsPollster):
"""Pollster to capture outgoing bytes.
"""
"""Pollster to capture outgoing bytes."""
@staticmethod
def _get_sample(pool, data):

View File

@ -77,15 +77,13 @@ class _Base(plugin.CentralPollster):
@staticmethod
def _neaten_url(endpoint, tenant_id):
"""Transform the registered url to standard and valid format.
"""
"""Transform the registered url to standard and valid format."""
return urlparse.urljoin(endpoint.split('/v1')[0].rstrip('/') + '/',
'v1/' + cfg.CONF.reseller_prefix + tenant_id)
class ObjectsPollster(_Base):
"""Iterate over all accounts, using keystone.
"""
"""Iterate over all accounts, using keystone."""
@plugin.check_keystone
def get_samples(self, manager, cache, resources=None):
for tenant, account in self._iter_accounts(manager.keystone, cache):
@ -103,8 +101,7 @@ class ObjectsPollster(_Base):
class ObjectsSizePollster(_Base):
"""Iterate over all accounts, using keystone.
"""
"""Iterate over all accounts, using keystone."""
@plugin.check_keystone
def get_samples(self, manager, cache, resources=None):
for tenant, account in self._iter_accounts(manager.keystone, cache):
@ -122,8 +119,7 @@ class ObjectsSizePollster(_Base):
class ObjectsContainersPollster(_Base):
"""Iterate over all accounts, using keystone.
"""
"""Iterate over all accounts, using keystone."""
@plugin.check_keystone
def get_samples(self, manager, cache, resources=None):
for tenant, account in self._iter_accounts(manager.keystone, cache):
@ -141,8 +137,7 @@ class ObjectsContainersPollster(_Base):
class ContainersObjectsPollster(_Base):
"""Get info about containers using Swift API
"""
"""Get info about containers using Swift API."""
METHOD = 'get'
@ -165,8 +160,7 @@ class ContainersObjectsPollster(_Base):
class ContainersSizePollster(_Base):
"""Get info about containers using Swift API
"""
"""Get info about containers using Swift API."""
METHOD = 'get'

View File

@ -48,8 +48,9 @@ class StackCRUD(plugin.NotificationBase):
@staticmethod
def get_targets(conf):
"""Return a sequence of oslo.messaging.Target defining the exchange and
topics to be connected for this plugin.
"""Return a sequence of oslo.messaging.Target
It is defining the exchange and topics to be connected for this plugin.
"""
return [oslo.messaging.Target(topic=topic,
exchange=conf.heat_control_exchange)

View File

@ -75,14 +75,12 @@ class PublishContext(object):
class Source(object):
"""Represents a source of samples, in effect a set of pollsters
and/or notification handlers emitting samples for a set of matching
meters.
Each source encapsulates meter name matching, polling interval
determination, optional resource enumeration or discovery, and
mapping to one or more sinks for publication.
"""Represents a source of samples.
In effect it is a set of pollsters and/or notification handlers emitting
samples for a set of matching meters. Each source encapsulates meter name
matching, polling interval determination, optional resource enumeration or
discovery, and mapping to one or more sinks for publication.
"""
def __init__(self, cfg):
@ -186,8 +184,9 @@ class Source(object):
class Sink(object):
"""Represents a sink for the transformation and publication of
samples emitted from a related source.
"""Represents a sink for the transformation and publication of samples.
Samples are emitted from a related source.
Each sink config is concerned *only* with the transformation rules
and publication conduits for samples.
@ -209,7 +208,6 @@ class Sink(object):
If no transformers are included in the chain, the publishers are
passed samples directly from the sink which are published unchanged.
"""
def __init__(self, cfg, transformer_manager):
@ -334,8 +332,7 @@ class Sink(object):
class Pipeline(object):
"""Represents a coupling between a sink and a corresponding source.
"""
"""Represents a coupling between a sink and a corresponding source."""
def __init__(self, source, sink):
self.source = source

View File

@ -36,8 +36,7 @@ ExchangeTopics = collections.namedtuple('ExchangeTopics',
class PluginBase(object):
"""Base class for all plugins.
"""
"""Base class for all plugins."""
@six.add_metaclass(abc.ABCMeta)
@ -49,14 +48,16 @@ class NotificationBase(PluginBase):
@abc.abstractproperty
def event_types(self):
"""Return a sequence of strings defining the event types to be
given to this plugin.
"""Return a sequence of strings.
Strings are defining the event types to be given to this plugin.
"""
def get_targets(self, conf):
"""Return a sequence of oslo.messaging.Target defining the exchange and
topics to be connected for this plugin.
"""Return a sequence of oslo.messaging.Target.
Sequence is defining the exchange and topics to be connected for this
plugin.
:param conf: Configuration.
"""
@ -81,9 +82,9 @@ class NotificationBase(PluginBase):
@staticmethod
def _handle_event_type(event_type, event_type_to_handle):
"""Check whether event_type should be handled according to
event_type_to_handle.
"""Check whether event_type should be handled.
It is according to event_type_to_handle.
"""
return any(map(lambda e: fnmatch.fnmatch(event_type, e),
event_type_to_handle))
@ -106,12 +107,11 @@ class NotificationBase(PluginBase):
self.to_samples_and_publish(context.get_admin_context(), notification)
def to_samples_and_publish(self, context, notification):
"""Return samples produced by *process_notification* for the given
notification.
"""Return samples produced by *process_notification*.
Samples produced for the given notification.
:param context: Execution context from the service or RPC call
:param notification: The notification to process.
"""
# TODO(sileht): this will be moved into oslo.messaging
@ -149,5 +149,6 @@ class DiscoveryBase(object):
@abc.abstractmethod
def discover(self, param=None):
"""Discover resources to monitor.
:param param: an optional parameter to guide the discovery
"""

View File

@ -24,9 +24,9 @@ class ProfilerNotifications(plugin.NotificationBase):
event_types = ["profiler.*"]
def get_targets(self, conf):
"""Return a sequence of oslo.messaging.Target defining the exchange and
topics to be connected for this plugin.
"""Return a sequence of oslo.messaging.Target
It is defining the exchange and topics to be connected for this plugin.
:param conf: Configuration.
"""
targets = []

View File

@ -45,8 +45,7 @@ METER_PUBLISH_OPTS = [
def register_opts(config):
"""Register the options for publishing metering messages.
"""
"""Register the options for publishing metering messages."""
config.register_opts(METER_PUBLISH_OPTS, group="publisher_rpc")
@ -63,10 +62,10 @@ def oslo_messaging_is_rabbit():
def override_backend_retry_config(value):
"""Override the retry config option native to the configured
rpc backend (if such a native config option exists).
"""Override the retry config option native to the configured rpc backend.
:param value: the value to override
It is done if such a native config option exists.
:param value: the value to override
"""
# TODO(sileht): ultimately we should add to olso a more generic concept
# of retry config (i.e. not specific to an individual AMQP provider)

View File

@ -40,8 +40,7 @@ METER_PUBLISH_OPTS = [
def register_opts(config):
"""Register the options for publishing metering messages.
"""
"""Register the options for publishing metering messages."""
config.register_opts(METER_PUBLISH_OPTS, group="publisher")
@ -49,8 +48,7 @@ register_opts(cfg.CONF)
def compute_signature(message, secret):
"""Return the signature for a message dictionary.
"""
"""Return the signature for a message dictionary."""
digest_maker = hmac.new(secret, '', hashlib.sha256)
for name, value in utils.recursive_keypairs(message):
if name == 'message_signature':
@ -63,8 +61,10 @@ def compute_signature(message, secret):
def verify_signature(message, secret):
"""Check the signature in the message against the value computed
from the rest of the contents.
"""Check the signature in the message.
Message is verified against the value computed from the rest of the
contents.
"""
old_sig = message.get('message_signature')
new_sig = compute_signature(message, secret)

View File

@ -92,8 +92,7 @@ LOG = log.getLogger(__name__)
class WorkerException(Exception):
"""Exception for errors relating to service workers
"""
"""Exception for errors relating to service workers."""
def get_workers(name):

View File

@ -27,14 +27,14 @@ from ceilometer.openstack.common import timeutils
def iter_period(start, end, period):
"""Split a time from start to end in periods of a number of seconds. This
function yield the (start, end) time for each period composing the time
passed as argument.
"""Split a time from start to end in periods of a number of seconds.
This function yields the (start, end) time for each period composing the
time passed as argument.
:param start: When the period set start.
:param end: When the period end starts.
:param period: The duration of the period.
"""
period_start = start
increment = datetime.timedelta(seconds=period)
@ -104,8 +104,7 @@ class Pagination(object):
class Model(object):
"""Base class for storage API models.
"""
"""Base class for storage API models."""
def __init__(self, **kwds):
self.fields = list(kwds)
@ -198,11 +197,11 @@ class Connection(object):
@staticmethod
def clear_expired_metering_data(ttl):
"""Clear expired data from the backend storage system according to the
time-to-live.
"""Clear expired data from the backend storage system.
Clearing occurs according to the time-to-live.
:param ttl: Number of seconds to keep records for.
"""
raise NotImplementedError('Clearing samples not implemented')
@ -211,9 +210,9 @@ class Connection(object):
start_timestamp=None, start_timestamp_op=None,
end_timestamp=None, end_timestamp_op=None,
metaquery=None, resource=None, pagination=None):
"""Return an iterable of models.Resource instances containing
resource information.
"""Return an iterable of models.Resource instances.
Iterable items containing resource information.
:param user: Optional ID for user that owns the resource.
:param project: Optional ID for project that owns the resource.
:param source: Optional source filter.
@ -230,9 +229,9 @@ class Connection(object):
@staticmethod
def get_meters(user=None, project=None, resource=None, source=None,
metaquery=None, pagination=None):
"""Return an iterable of model.Meter instances containing meter
information.
"""Return an iterable of model.Meter instances.
Iterable items containing meter information.
:param user: Optional ID for user that owns the resource.
:param project: Optional ID for project that owns the resource.
:param resource: Optional resource filter.
@ -333,31 +332,29 @@ class Connection(object):
@staticmethod
def get_events(event_filter):
"""Return an iterable of model.Event objects.
"""
"""Return an iterable of model.Event objects."""
raise NotImplementedError('Events not implemented.')
@staticmethod
def get_event_types():
"""Return all event types as an iterable of strings.
"""
"""Return all event types as an iterable of strings."""
raise NotImplementedError('Events not implemented.')
@staticmethod
def get_trait_types(event_type):
"""Return a dictionary containing the name and data type of
the trait type. Only trait types for the provided event_type are
returned.
"""Return a dictionary containing the name and data type of the trait.
Only trait types for the provided event_type are
returned.
:param event_type: the type of the Event
"""
raise NotImplementedError('Events not implemented.')
@staticmethod
def get_traits(event_type, trait_type=None):
"""Return all trait instances associated with an event_type. If
trait_type is specified, only return instances of that trait type.
"""Return all trait instances associated with an event_type.
If trait_type is specified, only return instances of that trait type.
:param event_type: the type of the Event to filter by
:param trait_type: the name of the Trait to filter by
"""
@ -402,8 +399,7 @@ class Connection(object):
@classmethod
def get_capabilities(cls):
"""Return an dictionary representing the capabilities of each driver.
"""
"""Return an dictionary with the capabilities of each driver."""
return cls.CAPABILITIES
@classmethod

View File

@ -25,8 +25,7 @@ LOG = log.getLogger(__name__)
class MTable(object):
"""HappyBase.Table mock
"""
"""HappyBase.Table mock."""
def __init__(self, name, families):
self.name = name
self.families = families
@ -119,7 +118,9 @@ class MTable(object):
@staticmethod
def SingleColumnValueFilter(args, rows):
"""This method is called from scan() when 'SingleColumnValueFilter'
"""This is filter for testing "in-memory HBase".
This method is called from scan() when 'SingleColumnValueFilter'
is found in the 'filter' argument.
"""
op = args[2]
@ -200,8 +201,10 @@ class MTable(object):
@staticmethod
def QualifierFilter(args, rows):
"""This method is called from scan() when 'QualifierFilter'
is found in the 'filter' argument
"""This is filter for testing "in-memory HBase".
This method is called from scan() when 'QualifierFilter' is found in
the 'filter' argument
"""
op = args[0]
value = args[1]
@ -235,8 +238,7 @@ class MConnectionPool(object):
class MConnection(object):
"""HappyBase.Connection mock
"""
"""HappyBase.Connection mock."""
def __init__(self):
self.tables = {}

View File

@ -51,9 +51,9 @@ def timestamp(dt, reverse=True):
def make_events_query_from_filter(event_filter):
"""Return start and stop row for filtering and a query which based on the
selected parameter.
"""Return start and stop row for filtering and a query.
Query is based on the selected parameter.
:param event_filter: storage.EventFilter object.
"""
q = []
@ -84,9 +84,9 @@ def make_events_query_from_filter(event_filter):
def make_timestamp_query(func, start=None, start_op=None, end=None,
end_op=None, bounds_only=False, **kwargs):
"""Return a filter start and stop row for filtering and a query
which based on the fact that CF-name is 'rts'.
"""Return a filter start and stop row for filtering and a query.
Query is based on the fact that CF-name is 'rts'.
:param start: Optional start timestamp
:param start_op: Optional start timestamp operator, like gt, ge
:param end: Optional end timestamp
@ -245,8 +245,8 @@ def make_meter_query_for_resource(start_timestamp, start_timestamp_op,
end_timestamp, end_timestamp_op, source,
query=None):
"""This method is used when Resource table should be filtered by meters.
In this method we are looking into all qualifiers with m_ prefix.
In this method we are looking into all qualifiers with m_ prefix.
:param start_timestamp: meter's timestamp start range.
:param start_timestamp_op: meter's start time operator, like ge, gt.
:param end_timestamp: meter's timestamp end range.
@ -279,8 +279,9 @@ def make_meter_query_for_resource(start_timestamp, start_timestamp_op,
def make_general_rowkey_scan(rts_start=None, rts_end=None, some_id=None):
"""If it's filter on some_id without start and end,
start_row = some_id while end_row = some_id + MAX_BYTE
"""If it's filter on some_id without start and end.
start_row = some_id while end_row = some_id + MAX_BYTE.
"""
if some_id is None:
return None, None
@ -293,33 +294,32 @@ def make_general_rowkey_scan(rts_start=None, rts_end=None, some_id=None):
def format_meter_reference(c_name, c_type, c_unit, rts, source):
"""Format reference to meter data.
"""
"""Format reference to meter data."""
return "%s+%s+%s!%s!%s" % (rts, source, c_name, c_type, c_unit)
def timestamp_from_record_tuple(record):
"""Extract timestamp from HBase tuple record
"""
"""Extract timestamp from HBase tuple record."""
return record[0]['timestamp']
def resource_id_from_record_tuple(record):
"""Extract resource_id from HBase tuple record
"""
"""Extract resource_id from HBase tuple record."""
return record[0]['resource_id']
def deserialize_entry(entry, get_raw_meta=True):
"""Return a list of flatten_result, sources, meters and metadata
flatten_result contains a dict of simple structures such as 'resource_id':1
"""Return a list of flatten_result, sources, meters and metadata.
Flatten_result contains a dict of simple structures such as 'resource_id':1
sources/meters are the lists of sources and meters correspondingly.
metadata is metadata dict. This dict may be returned as flattened if
get_raw_meta is False.
:param entry: entry from HBase, without row name and timestamp
:param get_raw_meta: If true then raw metadata will be returned,
if False metadata will be constructed from 'f:r_metadata.' fields
if False metadata will be constructed from
'f:r_metadata.' fields
"""
flatten_result = {}
sources = []

View File

@ -304,10 +304,10 @@ class Connection(pymongo_base.Connection):
def get_meter_statistics(self, sample_filter, period=None, groupby=None,
aggregate=None):
"""Return an iterable of models.Statistics instance containing meter
statistics described by the query parameters.
"""Return an iterable of models.Statistics instance.
The filter must have a meter value set.
Items are containing meter statistics described by the query
parameters. The filter must have a meter value set.
"""
if (groupby and
set(groupby) - set(['user_id', 'project_id',

View File

@ -306,8 +306,7 @@ class Connection(base.Connection):
yield alarm_models.AlarmChange(**stored_entry)
def record_alarm_change(self, alarm_change):
"""Record alarm change event.
"""
"""Record alarm change event."""
alarm_change_dict = hbase_utils.serialize_entry(alarm_change)
ts = alarm_change.get('timestamp') or datetime.datetime.now()
rts = hbase_utils.timestamp(ts)
@ -513,17 +512,16 @@ class Connection(base.Connection):
def get_meter_statistics(self, sample_filter, period=None, groupby=None,
aggregate=None):
"""Return an iterable of models.Statistics instances containing meter
statistics described by the query parameters.
"""Return an iterable of models.Statistics instances.
The filter must have a meter value set.
Items are containing meter statistics described by the query
parameters. The filter must have a meter value set.
.. note::
Due to HBase limitations the aggregations are implemented
in the driver itself, therefore this method will be quite slow
because of all the Thrift traffic it is going to create.
"""
if groupby:
raise NotImplementedError("Group by not implemented.")
@ -714,9 +712,9 @@ class Connection(base.Connection):
yield {'name': name, 'data_type': data_type}
def get_traits(self, event_type, trait_type=None):
"""Return all trait instances associated with an event_type. If
trait_type is specified, only return instances of that trait type.
"""Return all trait instances associated with an event_type.
If trait_type is specified, only return instances of that trait type.
:param event_type: the type of the Event to filter by
:param trait_type: the name of the Trait to filter by
"""

View File

@ -25,8 +25,7 @@ LOG = log.getLogger(__name__)
class Connection(base.Connection):
"""Log the data.
"""
"""Log the data."""
def upgrade(self):
pass
@ -38,7 +37,7 @@ class Connection(base.Connection):
"""Write the data to the backend storage system.
:param data: a dictionary such as returned by
ceilometer.meter.meter_message_from_counter
ceilometer.meter.meter_message_from_counter.
"""
LOG.info(_('metering data %(counter_name)s for %(resource_id)s: '
'%(counter_volume)s')
@ -47,11 +46,10 @@ class Connection(base.Connection):
'counter_volume': data['counter_volume']}))
def clear_expired_metering_data(self, ttl):
"""Clear expired data from the backend storage system according to the
time-to-live.
"""Clear expired data from the backend storage system.
Clearing occurs according to the time-to-live.
:param ttl: Number of seconds to keep records for.
"""
LOG.info(_("Dropping data with TTL %d"), ttl)
@ -104,7 +102,9 @@ class Connection(base.Connection):
return []
def get_samples(self, sample_filter):
"""Return an iterable of samples as created by
"""Return an iterable of samples.
Items are created by
:func:`ceilometer.meter.meter_message_from_counter`.
"""
return []
@ -112,8 +112,8 @@ class Connection(base.Connection):
def get_meter_statistics(self, sample_filter, period=None, groupby=None,
aggregate=None):
"""Return a dictionary containing meter statistics.
described by the query parameters.
Meter statistics is described by the query parameters.
The filter must have a meter value set.
{ 'min':
@ -128,24 +128,20 @@ class Connection(base.Connection):
'duration_start':
'duration_end':
}
"""
return []
def get_alarms(self, name=None, user=None, state=None, meter=None,
project=None, enabled=None, alarm_id=None, pagination=None):
"""Yields a lists of alarms that match filters
"""
"""Yields a lists of alarms that match filters."""
return []
def create_alarm(self, alarm):
"""Create alarm.
"""
"""Create alarm."""
return alarm
def update_alarm(self, alarm):
"""update alarm
"""
"""Update alarm."""
return alarm
def delete_alarm(self, alarm_id):

View File

@ -534,11 +534,10 @@ class Connection(pymongo_base.Connection):
self.db.meter.insert(record)
def clear_expired_metering_data(self, ttl):
"""Clear expired data from the backend storage system according to the
time-to-live.
"""Clear expired data from the backend storage system.
Clearing occurs according to the time-to-live.
:param ttl: Number of seconds to keep records for.
"""
results = self.db.meter.group(
key={},
@ -683,9 +682,9 @@ class Connection(pymongo_base.Connection):
start_timestamp, start_timestamp_op,
end_timestamp, end_timestamp_op,
metaquery, resource):
"""Return an iterable of models.Resource instances constrained
by sample timestamp.
"""Return an iterable of models.Resource instances
Items are constrained by sample timestamp.
:param query: project/user/source query
:param start_timestamp: modified timestamp start range.
:param start_timestamp_op: start time operator, like gt, ge.
@ -743,9 +742,9 @@ class Connection(pymongo_base.Connection):
self.db[out].drop()
def _get_floating_resources(self, query, metaquery, resource):
"""Return an iterable of models.Resource instances unconstrained
by timestamp.
"""Return an iterable of models.Resource instances
Items are unconstrained by timestamp.
:param query: project/user/source query
:param metaquery: dict with metadata to match on.
:param resource: resource filter.
@ -844,11 +843,10 @@ class Connection(pymongo_base.Connection):
def get_meter_statistics(self, sample_filter, period=None, groupby=None,
aggregate=None):
"""Return an iterable of models.Statistics instance containing meter
statistics described by the query parameters.
The filter must have a meter value set.
"""Return an iterable of models.Statistics instance.
Items are containing meter statistics described by the query
parameters. The filter must have a meter value set.
"""
if (groupby and
set(groupby) - set(['user_id', 'project_id',

View File

@ -304,11 +304,10 @@ class Connection(base.Connection):
value=v))
def clear_expired_metering_data(self, ttl):
"""Clear expired data from the backend storage system according to the
time-to-live.
"""Clear expired data from the backend storage system.
Clearing occurs according to the time-to-live.
:param ttl: Number of seconds to keep records for.
"""
session = self._engine_facade.get_session()
@ -609,11 +608,10 @@ class Connection(base.Connection):
def get_meter_statistics(self, sample_filter, period=None, groupby=None,
aggregate=None):
"""Return an iterable of api_models.Statistics instances containing
meter statistics described by the query parameters.
The filter must have a meter value set.
"""Return an iterable of api_models.Statistics instances.
Items are containing meter statistics described by the query
parameters. The filter must have a meter value set.
"""
if groupby:
for group in groupby:
@ -691,6 +689,7 @@ class Connection(base.Connection):
def get_alarms(self, name=None, user=None, state=None, meter=None,
project=None, enabled=None, alarm_id=None, pagination=None):
"""Yields a lists of alarms that match filters
:param user: Optional ID for user that owns the resource.
:param state: Optional string for alarm state.
:param meter: Optional string for alarms associated with meter.
@ -775,16 +774,14 @@ class Connection(base.Connection):
timestamp=row.timestamp)
def query_alarms(self, filter_expr=None, orderby=None, limit=None):
"""Yields a lists of alarms that match filter
"""
"""Yields a lists of alarms that match filter."""
return self._retrieve_data(filter_expr, orderby, limit, models.Alarm)
def _retrieve_alarm_history(self, query):
return (self._row_to_alarm_change_model(x) for x in query.all())
def query_alarm_history(self, filter_expr=None, orderby=None, limit=None):
"""Return an iterable of model.AlarmChange objects.
"""
"""Return an iterable of model.AlarmChange objects."""
return self._retrieve_data(filter_expr,
orderby,
limit,
@ -849,8 +846,7 @@ class Connection(base.Connection):
return self._retrieve_alarm_history(query)
def record_alarm_change(self, alarm_change):
"""Record alarm change event.
"""
"""Record alarm change event."""
session = self._engine_facade.get_session()
with session.begin():
alarm_change_row = models.AlarmChange(
@ -859,8 +855,9 @@ class Connection(base.Connection):
session.add(alarm_change_row)
def _get_or_create_trait_type(self, trait_type, data_type, session=None):
"""Find if this trait already exists in the database, and
if it does not, create a new entry in the trait type table.
"""Find if this trait already exists in the database.
If it does not, create a new entry in the trait type table.
"""
if session is None:
session = self._engine_facade.get_session()
@ -889,10 +886,9 @@ class Connection(base.Connection):
return models.Trait(trait_type, event, **values)
def _get_or_create_event_type(self, event_type, session=None):
"""Here, we check to see if an event type with the supplied
name already exists. If not, we create it and return the record.
"""Check if an event type with the supplied name is already exists.
This may result in a flush.
If not, we create it and return the record. This may result in a flush.
"""
if session is None:
session = self._engine_facade.get_session()
@ -905,8 +901,7 @@ class Connection(base.Connection):
return et
def _record_event(self, session, event_model):
"""Store a single Event, including related Traits.
"""
"""Store a single Event, including related Traits."""
with session.begin(subtransactions=True):
event_type = self._get_or_create_event_type(event_model.event_type,
session=session)
@ -1063,8 +1058,7 @@ class Connection(base.Connection):
return sorted(event_models, key=operator.attrgetter('generated'))
def get_event_types(self):
"""Return all event types as an iterable of strings.
"""
"""Return all event types as an iterable of strings."""
session = self._engine_facade.get_session()
with session.begin():
@ -1075,10 +1069,9 @@ class Connection(base.Connection):
yield name[0]
def get_trait_types(self, event_type):
"""Return a dictionary containing the name and data type of
the trait type. Only trait types for the provided event_type are
returned.
"""Return a dictionary containing the name and data type of the trait.
Only trait types for the provided event_type are returned.
:param event_type: the type of the Event
"""
session = self._engine_facade.get_session()
@ -1106,9 +1099,9 @@ class Connection(base.Connection):
yield {'name': desc_, 'data_type': dtype}
def get_traits(self, event_type, trait_type=None):
"""Return all trait instances associated with an event_type. If
trait_type is specified, only return instances of that trait type.
"""Return all trait instances associated with an event_type.
If trait_type is specified, only return instances of that trait type.
:param event_type: the type of the Event to filter by
:param trait_type: the name of the Trait to filter by
"""

View File

@ -57,8 +57,9 @@ class Event(base.Model):
class Trait(base.Model):
"""A Trait is a key/value pair of data on an Event. The value is variant
record of basic data types (int, date, float, etc).
"""A Trait is a key/value pair of data on an Event.
The value is variant record of basic data types (int, date, float, etc).
"""
NONE_TYPE = 0
@ -110,8 +111,7 @@ class Trait(base.Model):
class Resource(base.Model):
"""Something for which sample data has been collected.
"""
"""Something for which sample data has been collected."""
def __init__(self, resource_id, project_id,
first_sample_timestamp,
@ -139,8 +139,7 @@ class Resource(base.Model):
class Meter(base.Model):
"""Definition of a meter for which sample data has been collected.
"""
"""Definition of a meter for which sample data has been collected."""
def __init__(self, name, type, unit, resource_id, project_id, source,
user_id):
@ -166,8 +165,7 @@ class Meter(base.Model):
class Sample(base.Model):
"""One collected data point.
"""
"""One collected data point."""
def __init__(self,
source,
counter_name, counter_type, counter_unit, counter_volume,
@ -211,8 +209,7 @@ class Sample(base.Model):
class Statistics(base.Model):
"""Computed statistics based on a set of sample data.
"""
"""Computed statistics based on a set of sample data."""
def __init__(self, unit,
period, period_start, period_end,
duration, duration_start, duration_end,

View File

@ -40,10 +40,10 @@ cfg.CONF.import_opt('retry_interval', 'ceilometer.openstack.common.db.options',
def make_timestamp_range(start, end,
start_timestamp_op=None, end_timestamp_op=None):
"""Given two possible datetimes and their operations, create the query
document to find timestamps within that range.
By default, using $gte for the lower bound and $lt for the
upper bound.
"""Create the query document to find timestamps within that range.
This is done by given two possible datetimes and their operations.
By default, using $gte for the lower bound and $lt for the upper bound.
"""
ts_range = {}
@ -66,7 +66,7 @@ def make_timestamp_range(start, end,
def make_query_from_filter(sample_filter, require_meter=True):
"""Return a query dictionary based on the settings in the filter.
:param filter: SampleFilter instance
:param sample_filter: SampleFilter instance
:param require_meter: If true and the filter does not have a meter,
raise an error.
"""
@ -181,9 +181,10 @@ class QueryTransformer(object):
@staticmethod
def _move_negation_to_leaf(condition):
"""Moves every not operator to the leafs by
applying the De Morgan rules and anihilating
double negations
"""Moves every not operator to the leafs.
Moving is going by applying the De Morgan rules and anihilating
double negations.
"""
def _apply_de_morgan(tree, negated_subtree, negated_op):
if negated_op == "and":

View File

@ -50,8 +50,7 @@ AVAILABLE_STORAGE_CAPABILITIES = {
class Connection(base.Connection):
"""Base Connection class for MongoDB and DB2 drivers.
"""
"""Base Connection class for MongoDB and DB2 drivers."""
CAPABILITIES = utils.update_nested(base.Connection.CAPABILITIES,
COMMON_AVAILABLE_CAPABILITIES)
@ -103,8 +102,7 @@ class Connection(base.Connection):
)
def update_alarm(self, alarm):
"""update alarm
"""
"""Update alarm."""
data = alarm.as_dict()
self.db.alarm.update(
@ -121,13 +119,11 @@ class Connection(base.Connection):
create_alarm = update_alarm
def delete_alarm(self, alarm_id):
"""Delete an alarm
"""
"""Delete an alarm."""
self.db.alarm.remove({'alarm_id': alarm_id})
def record_alarm_change(self, alarm_change):
"""Record alarm change event.
"""
"""Record alarm change event."""
self.db.alarm_history.insert(alarm_change.copy())
def get_samples(self, sample_filter, limit=None):
@ -148,6 +144,7 @@ class Connection(base.Connection):
def get_alarms(self, name=None, user=None, state=None, meter=None,
project=None, enabled=None, alarm_id=None, pagination=None):
"""Yields a lists of alarms that match filters
:param name: The Alarm name.
:param user: Optional ID for user that owns the resource.
:param state: Optional string for alarm state.
@ -231,14 +228,12 @@ class Connection(base.Connection):
return self._retrieve_data(filter_expr, orderby, limit, models.Meter)
def query_alarms(self, filter_expr=None, orderby=None, limit=None):
"""Return an iterable of model.Alarm objects.
"""
"""Return an iterable of model.Alarm objects."""
return self._retrieve_data(filter_expr, orderby, limit,
alarm_models.Alarm)
def query_alarm_history(self, filter_expr=None, orderby=None, limit=None):
"""Return an iterable of model.AlarmChange objects.
"""
"""Return an iterable of model.AlarmChange objects."""
return self._retrieve_data(filter_expr,
orderby,
limit,
@ -313,8 +308,9 @@ class Connection(base.Connection):
@classmethod
def _ensure_encapsulated_rule_format(cls, alarm):
"""This ensure the alarm returned by the storage have the correct
format. The previous format looks like:
"""Ensure the alarm returned by the storage have the correct format.
The previous format looks like:
{
'alarm_id': '0ld-4l3rt',
'enabled': True,

View File

@ -35,7 +35,7 @@ from ceilometer import utils
class JSONEncodedDict(TypeDecorator):
"Represents an immutable structure as a json-encoded string."
"""Represents an immutable structure as a json-encoded string."""
impl = String
@ -194,8 +194,10 @@ class Sample(Base):
class MeterSample(Base):
"""Helper model as many of the filters work against Sample data
joined with Meter data.
"""Helper model.
It's needed as many of the filters work against Sample data joined with
Meter data.
"""
meter = Meter.__table__
sample = Sample.__table__
@ -296,12 +298,12 @@ class Event(Base):
class TraitType(Base):
"""Types of event traits. A trait type includes a description
and a data type. Uniqueness is enforced compositely on the
data_type and desc fields. This is to accommodate cases, such as
'generated', which, depending on the corresponding event,
could be a date, a boolean, or a float.
"""Types of event traits.
A trait type includes a description and a data type. Uniqueness is
enforced compositely on the data_type and desc fields. This is to
accommodate cases, such as 'generated', which, depending on the
corresponding event, could be a date, a boolean, or a float.
"""
__tablename__ = 'trait_type'
__table_args__ = (

View File

@ -30,9 +30,10 @@ cfg.CONF.import_opt("policy_file", "ceilometer.openstack.common.policy")
class FunctionalTest(db_test_base.TestBase):
"""Used for functional tests of Pecan controllers where you need to
test your literal application and its integration with the
framework.
"""Used for functional tests of Pecan controllers.
Used in case when you need to test your literal application and its
integration with the framework.
"""
PATH_PREFIX = ''

View File

@ -855,19 +855,23 @@ class TestAlarms(v2.FunctionalTest,
self.fail("Alarm not found")
def test_post_alarm_as_admin_explicit_project_constraint(self):
"""Test the creation of an alarm as admin for another project,
with an explicit query constraint on the owner's project ID.
"""Test the creation of an alarm as admin for another project.
With an explicit query constraint on the owner's project ID.
"""
self._do_test_post_alarm_as_admin(True)
def test_post_alarm_as_admin_implicit_project_constraint(self):
"""Test the creation of an alarm as admin for another project,
without an explicit query constraint on the owner's project ID.
"""Test the creation of an alarm as admin for another project.
Test without an explicit query constraint on the owner's project ID.
"""
self._do_test_post_alarm_as_admin(False)
def test_post_alarm_as_admin_no_user(self):
"""Test the creation of an alarm as admin for another project but
"""Test the creation of an alarm.
Test the creation of an alarm as admin for another project but
forgetting to set the values.
"""
json = {
@ -903,7 +907,9 @@ class TestAlarms(v2.FunctionalTest,
self._verify_alarm(json, alarms[0], 'added_alarm')
def test_post_alarm_as_admin_no_project(self):
"""Test the creation of an alarm as admin for another project but
"""Test the creation of an alarm.
Test the creation of an alarm as admin for another project but
forgetting to set the values.
"""
json = {
@ -965,7 +971,9 @@ class TestAlarms(v2.FunctionalTest,
def _do_test_post_alarm_as_nonadmin_on_behalf_of_another(self,
identifiers):
"""Test that posting an alarm as non-admin on behalf of another
"""Test posting an alarm.
Test that posting an alarm as non-admin on behalf of another
user/project fails with an explicit 401 instead of reverting
to the requestor's identity.
"""
@ -995,7 +1003,9 @@ class TestAlarms(v2.FunctionalTest,
self._do_test_post_alarm_as_nonadmin_on_behalf_of_another(identifiers)
def _do_test_post_alarm_as_nonadmin_on_behalf_of_self(self, identifiers):
"""Test posting an alarm as non-admin on behalf of own user/project
"""Test posting an alarm.
Test posting an alarm as non-admin on behalf of own user/project
creates alarm associated with the requestor's identity.
"""
json = self._alarm_representation_owned_by(identifiers)
@ -1054,7 +1064,9 @@ class TestAlarms(v2.FunctionalTest,
self.fail("Alarm not found")
def test_post_combination_alarm_as_user_with_unauthorized_alarm(self):
"""Test that post a combination alarm as normal user/project
"""Test posting a combination alarm.
Test that post a combination alarm as normal user/project
with an alarm_id unauthorized for this project/user
"""
json = {
@ -1081,7 +1093,9 @@ class TestAlarms(v2.FunctionalTest,
['faultstring'])
def test_post_combination_alarm_as_admin_on_behalf_of_an_other_user(self):
"""Test that post a combination alarm as admin on behalf of an other
"""Test posting a combination alarm.
Test that post a combination alarm as admin on behalf of an other
user/project with an alarm_id unauthorized for this project/user
"""
json = {
@ -1112,7 +1126,9 @@ class TestAlarms(v2.FunctionalTest,
['faultstring'])
def test_post_combination_alarm_with_reasonable_description(self):
"""Test that post a combination alarm with two blanks around the
"""Test posting a combination alarm.
Test that post a combination alarm with two blanks around the
operator in alarm description.
"""
json = {
@ -1144,8 +1160,7 @@ class TestAlarms(v2.FunctionalTest,
self._do_post_combination_alarm_as_admin_success(True)
def test_post_combination_alarm_with_threshold_rule(self):
"""Test the creation of an combination alarm with threshold rule.
"""
"""Test the creation of an combination alarm with threshold rule."""
json = {
'enabled': False,
'name': 'added_alarm',
@ -1176,8 +1191,7 @@ class TestAlarms(v2.FunctionalTest,
resp.json['error_message']['faultstring'])
def test_post_threshold_alarm_with_combination_rule(self):
"""Test the creation of an threshold alarm with combination rule.
"""
"""Test the creation of an threshold alarm with combination rule."""
json = {
'enabled': False,
'name': 'added_alarm',
@ -1201,7 +1215,9 @@ class TestAlarms(v2.FunctionalTest,
resp.json['error_message']['faultstring'])
def _do_post_combination_alarm_as_admin_success(self, owner_is_set):
"""Test that post a combination alarm as admin on behalf of nobody
"""Test posting a combination alarm.
Test that post a combination alarm as admin on behalf of nobody
with an alarm_id of someone else, with owner set or not
"""
json = {
@ -1240,8 +1256,7 @@ class TestAlarms(v2.FunctionalTest,
self.fail("Alarm not found")
def test_post_invalid_alarm_combination(self):
"""Test that post a combination alarm with a not existing alarm id
"""
"""Test that post a combination alarm with a not existing alarm id."""
json = {
'enabled': False,
'name': 'added_alarm',
@ -1431,8 +1446,7 @@ class TestAlarms(v2.FunctionalTest,
self.assertEqual(200, resp.status_code)
def test_put_alarm_with_existing_name(self):
"""Test that update a threshold alarm with an existing name.
"""
"""Test that update a threshold alarm with an existing name."""
json = {
'enabled': False,
'name': 'name1',

View File

@ -166,6 +166,7 @@ class TestPostSamples(v2.FunctionalTest,
def test_multiple_samples(self):
"""Send multiple samples.
The usecase here is to reduce the chatter and send the counters
at a slower cadence.
"""
@ -269,8 +270,10 @@ class TestPostSamples(v2.FunctionalTest,
self.assertEqual(s, self.published[0][x])
def test_multiple_samples_multiple_sources(self):
"""Do accept a single post with some multiples sources
with some of them null
"""Test posting with special conditions.
Do accept a single post with some multiples sources with some of them
null.
"""
s1 = [{'counter_name': 'my_counter_name',
'counter_type': 'gauge',
@ -317,8 +320,7 @@ class TestPostSamples(v2.FunctionalTest,
self.assertEqual(s, self.published[0][x])
def test_missing_project_user_id(self):
"""Ensure missing project & user IDs are defaulted appropriately.
"""
"""Ensure missing project & user IDs are defaulted appropriately."""
s1 = [{'counter_name': 'my_counter_name',
'counter_type': 'gauge',
'counter_unit': 'instance',

View File

@ -91,8 +91,7 @@ class TestRunTasks(agentbase.BaseAgentManagerTestCase):
self.assertTrue(self.Pollster.resources)
def test_when_keystone_fail(self):
"""Test for bug 1316532.
"""
"""Test for bug 1316532."""
self.useFixture(mockpatch.Patch(
'keystoneclient.v2_0.client.Client',
side_effect=Exception))

View File

@ -142,7 +142,8 @@ class TestBase(testscenarios.testcase.WithScenarios, test_base.BaseTestCase):
def run_with(*drivers):
"""Used to mark tests that are only applicable for certain db driver.
Skips test if driver is not available
Skips test if driver is not available.
"""
def decorator(test):
if isinstance(test, type) and issubclass(test, TestBase):

View File

@ -19,9 +19,7 @@ from ceilometer.hardware.inspector import base
class InspectorBaseTest(object):
"""Subclass must set self.inspector and self.host in
self.setUp()
"""
"""Subclass must set self.inspector and self.host in self.setUp()."""
cpu = [base.CPUStats(cpu_1_min=0.1,
cpu_5_min=0.2,

View File

@ -39,11 +39,12 @@ from ceilometer.transformer import conversions
@six.add_metaclass(abc.ABCMeta)
class BasePipelineTestCase(test.BaseTestCase):
def fake_tem_init(self):
"""Fake a transformerManager for pipeline
The faked entry point setting is below:
update: TransformerClass
except: TransformerClassException
drop: TransformerClassDrop
"""Fake a transformerManager for pipeline.
The faked entry point setting is below:
update: TransformerClass
except: TransformerClassException
drop: TransformerClassDrop
"""
pass

View File

@ -167,9 +167,7 @@ class TestPublish(tests_base.BaseTestCase):
self.assertEqual(expected, prepare.mock_calls)
def test_published_concurrency(self):
"""This test the concurrent access to the local queue
of the rpc publisher
"""
"""Test concurrent access to the local queue of the rpc publisher."""
publisher = rpc.RPCPublisher(network_utils.urlsplit('rpc://'))
cast_context = mock.MagicMock()

View File

@ -2705,8 +2705,9 @@ class ComplexAlarmHistoryQueryTest(AlarmTestBase,
class EventTestBase(tests_db.TestBase,
tests_db.MixinTestsWithBackendScenarios):
"""Separate test base class because we don't want to
inherit all the Meter stuff.
"""Separate test base class.
We don't want to inherit all the Meter stuff.
"""
def setUp(self):

View File

@ -19,9 +19,9 @@ from ceilometer import transformer
class TransformerAccumulator(transformer.TransformerBase):
"""Transformer that accumulates sample until a threshold, and then flush
them out in the wild.
"""Transformer that accumulates sample until a threshold.
And then flush them out in the wild.
"""
def __init__(self, size=1, **kwargs):

View File

@ -28,10 +28,11 @@ LOG = log.getLogger(__name__)
class Namespace(object):
"""Encapsulates the namespace wrapping the evaluation of the
configured scale factor. This allows nested dicts to be
accessed in the attribute style, and missing attributes
to yield false when used in a boolean expression.
"""Encapsulates the namespace.
Encapsulation is going by wrapping the evaluation of the configured scale
factor. This allows nested dicts to be accessed in the attribute style,
and missing attributes to yield false when used in a boolean expression.
"""
def __init__(self, seed):
self.__dict__ = collections.defaultdict(lambda: Namespace({}))
@ -51,8 +52,7 @@ class Namespace(object):
class ScalingTransformer(transformer.TransformerBase):
"""Transformer to apply a scaling conversion.
"""
"""Transformer to apply a scaling conversion."""
def __init__(self, source=None, target=None, **kwargs):
"""Initialize transformer with configured parameters.
@ -74,8 +74,9 @@ class ScalingTransformer(transformer.TransformerBase):
super(ScalingTransformer, self).__init__(**kwargs)
def _scale(self, s):
"""Apply the scaling factor (either a straight multiplicative
factor or else a string to be eval'd).
"""Apply the scaling factor.
Either a straight multiplicative factor or else a string to be eval'd.
"""
ns = Namespace(s.as_dict())
@ -84,8 +85,7 @@ class ScalingTransformer(transformer.TransformerBase):
else s.volume * scale) if scale else s.volume)
def _map(self, s, attr):
"""Apply the name or unit mapping if configured.
"""
"""Apply the name or unit mapping if configured."""
mapped = None
from_ = self.source.get('map_from')
to_ = self.target.get('map_to')
@ -98,8 +98,7 @@ class ScalingTransformer(transformer.TransformerBase):
return mapped or self.target.get(attr, getattr(s, attr))
def _convert(self, s, growth=1):
"""Transform the appropriate sample fields.
"""
"""Transform the appropriate sample fields."""
return sample.Sample(
name=self._map(s, 'name'),
unit=self._map(s, 'unit'),
@ -122,15 +121,14 @@ class ScalingTransformer(transformer.TransformerBase):
class RateOfChangeTransformer(ScalingTransformer):
"""Transformer based on the rate of change of a sample volume,
for example taking the current and previous volumes of a
cumulative sample and producing a gauge value based on the
proportion of some maximum used.
"""Transformer based on the rate of change of a sample volume.
For example taking the current and previous volumes of a cumulative sample
and producing a gauge value based on the proportion of some maximum used.
"""
def __init__(self, **kwargs):
"""Initialize transformer with configured parameters.
"""
"""Initialize transformer with configured parameters."""
super(RateOfChangeTransformer, self).__init__(**kwargs)
self.cache = {}
self.scale = self.scale or '1'
@ -167,8 +165,10 @@ class RateOfChangeTransformer(ScalingTransformer):
class AggregatorTransformer(ScalingTransformer):
"""Transformer that aggregate sample until a threshold or/and a
retention_time, and then flush them out in the wild.
"""Transformer that aggregate sample.
Aggregation goes until a threshold or/and a retention_time, and then flush
them out in the wild.
Example:
To aggregate sample by resource_metadata and keep the
@ -181,7 +181,6 @@ class AggregatorTransformer(ScalingTransformer):
AggregatorTransformer(size=15, user_id='first',
resource_metadata='drop')
"""
def __init__(self, size=1, retention_time=None,

View File

@ -31,8 +31,7 @@ from ceilometer.openstack.common import units
def recursive_keypairs(d, separator=':'):
"""Generator that produces sequence of keypairs for nested dictionaries.
"""
"""Generator that produces sequence of keypairs for nested dictionaries."""
for name, value in sorted(d.iteritems()):
if isinstance(value, dict):
for subname, subvalue in recursive_keypairs(value, separator):
@ -57,8 +56,7 @@ def recursive_keypairs(d, separator=':'):
def restore_nesting(d, separator=':'):
"""Unwinds a flattened dict to restore nesting.
"""
"""Unwinds a flattened dict to restore nesting."""
d = copy.copy(d) if any([separator in k for k in d.keys()]) else d
for k, v in d.items():
if separator in k:
@ -86,8 +84,7 @@ def dt_to_decimal(utc):
def decimal_to_dt(dec):
"""Return a datetime from Decimal unixtime format.
"""
"""Return a datetime from Decimal unixtime format."""
if dec is None:
return None
@ -153,8 +150,9 @@ def lowercase_values(mapping):
def update_nested(original_dict, updates):
"""Updates the leaf nodes in a nest dict, without replacing
entire sub-dicts.
"""Updates the leaf nodes in a nest dict.
Updates occur without replacing entire sub-dicts.
"""
dict_to_update = copy.deepcopy(original_dict)
for key, value in updates.iteritems():

View File

@ -40,8 +40,10 @@ class _Base(plugin.NotificationBase):
@staticmethod
def get_targets(conf):
"""Return a sequence of oslo.messaging.Target defining the exchange and
topics to be connected for this plugin.
"""Return a sequence of oslo.messaging.Target
Sequence defining the exchange and topics to be connected for this
plugin.
"""
return [oslo.messaging.Target(topic=topic,
exchange=conf.cinder_control_exchange)

View File

@ -38,8 +38,7 @@ commands = {posargs}
[flake8]
# H305 imports not grouped correctly
# H405 multi line docstring summary not separated with an empty line
ignore = H305,H405
ignore = H305
builtins = _
exclude=.venv,.git,.tox,dist,doc,./ceilometer/openstack/common,*lib/python*,*egg,tools,nova_tests,build
show-source = True