Merge "Fix H904 violations and re-enable gating"

This commit is contained in:
Jenkins 2014-07-08 10:18:29 +00:00 committed by Gerrit Code Review
commit 50c0fd1879
48 changed files with 284 additions and 288 deletions

View File

@ -47,9 +47,9 @@ class CombinationEvaluator(evaluator.Evaluator):
if not state or state == evaluator.UNKNOWN] if not state or state == evaluator.UNKNOWN]
sufficient = len(alarms_missing_states) == 0 sufficient = len(alarms_missing_states) == 0
if not sufficient and alarm.state != evaluator.UNKNOWN: if not sufficient and alarm.state != evaluator.UNKNOWN:
reason = _('Alarms %(alarm_ids)s' reason = (_('Alarms %(alarm_ids)s'
' are in unknown state') % \ ' are in unknown state') %
{'alarm_ids': ",".join(alarms_missing_states)} {'alarm_ids': ",".join(alarms_missing_states)})
reason_data = self._reason_data(alarms_missing_states) reason_data = self._reason_data(alarms_missing_states)
self._refresh(alarm, evaluator.UNKNOWN, reason, reason_data) self._refresh(alarm, evaluator.UNKNOWN, reason, reason_data)
return sufficient return sufficient

View File

@ -254,19 +254,19 @@ class Query(_Base):
raise TypeError() raise TypeError()
converted_value = self._type_converters[type](self.value) converted_value = self._type_converters[type](self.value)
except ValueError: except ValueError:
msg = _('Unable to convert the value %(value)s' msg = (_('Unable to convert the value %(value)s'
' to the expected data type %(type)s.') % \ ' to the expected data type %(type)s.') %
{'value': self.value, 'type': type} {'value': self.value, 'type': type})
raise ClientSideError(msg) raise ClientSideError(msg)
except TypeError: except TypeError:
msg = _('The data type %(type)s is not supported. The supported' msg = (_('The data type %(type)s is not supported. The supported'
' data type list is: %(supported)s') % \ ' data type list is: %(supported)s') %
{'type': type, 'supported': self._supported_types} {'type': type, 'supported': self._supported_types})
raise ClientSideError(msg) raise ClientSideError(msg)
except Exception: except Exception:
msg = _('Unexpected exception converting %(value)s to' msg = (_('Unexpected exception converting %(value)s to'
' the expected data type %(type)s.') % \ ' the expected data type %(type)s.') %
{'value': self.value, 'type': type} {'value': self.value, 'type': type})
raise ClientSideError(msg) raise ClientSideError(msg)
return converted_value return converted_value
@ -1553,14 +1553,13 @@ class AlarmThresholdRule(_Base):
@property @property
def default_description(self): def default_description(self):
return _( return (_('Alarm when %(meter_name)s is %(comparison_operator)s a '
'Alarm when %(meter_name)s is %(comparison_operator)s a ' '%(statistic)s of %(threshold)s over %(period)s seconds') %
'%(statistic)s of %(threshold)s over %(period)s seconds') % \
dict(comparison_operator=self.comparison_operator, dict(comparison_operator=self.comparison_operator,
statistic=self.statistic, statistic=self.statistic,
threshold=self.threshold, threshold=self.threshold,
meter_name=self.meter_name, meter_name=self.meter_name,
period=self.period) period=self.period))
def as_dict(self): def as_dict(self):
rule = self.as_dict_from_keys(['period', 'comparison_operator', rule = self.as_dict_from_keys(['period', 'comparison_operator',
@ -1624,8 +1623,8 @@ class AlarmTimeConstraint(_Base):
def get_description(self): def get_description(self):
if not self._description: if not self._description:
return 'Time constraint at %s lasting for %s seconds' \ return ('Time constraint at %s lasting for %s seconds'
% (self.start, self.duration) % (self.start, self.duration))
return self._description return self._description
def set_description(self, value): def set_description(self, value):

View File

@ -40,8 +40,8 @@ class _Base(plugin.ComputePollster):
metadata = copy.copy(vnic_data) metadata = copy.copy(vnic_data)
resource_metadata = dict(zip(metadata._fields, metadata)) resource_metadata = dict(zip(metadata._fields, metadata))
resource_metadata['instance_id'] = instance.id resource_metadata['instance_id'] = instance.id
resource_metadata['instance_type'] = \ resource_metadata['instance_type'] = (instance.flavor['id'] if
instance.flavor['id'] if instance.flavor else None instance.flavor else None)
if vnic_data.fref is not None: if vnic_data.fref is not None:
rid = vnic_data.fref rid = vnic_data.fref

View File

@ -69,8 +69,8 @@ def _get_metadata_from_object(instance):
metadata['memory_mb'] = instance.flavor['ram'] metadata['memory_mb'] = instance.flavor['ram']
metadata['disk_gb'] = instance.flavor['disk'] metadata['disk_gb'] = instance.flavor['disk']
metadata['ephemeral_gb'] = instance.flavor['ephemeral'] metadata['ephemeral_gb'] = instance.flavor['ephemeral']
metadata['root_gb'] = int(metadata['disk_gb']) - \ metadata['root_gb'] = (int(metadata['disk_gb']) -
int(metadata['ephemeral_gb']) int(metadata['ephemeral_gb']))
return compute_util.add_reserved_user_metadata(instance.metadata, metadata) return compute_util.add_reserved_user_metadata(instance.metadata, metadata)

View File

@ -108,14 +108,14 @@ class SNMPInspector(base.Inspector):
def inspect_cpu(self, host): def inspect_cpu(self, host):
# get 1 minute load # get 1 minute load
cpu_1_min_load = \ cpu_1_min_load = (
str(self._get_value_from_oid(self._cpu_1_min_load_oid, host)) str(self._get_value_from_oid(self._cpu_1_min_load_oid, host)))
# get 5 minute load # get 5 minute load
cpu_5_min_load = \ cpu_5_min_load = (
str(self._get_value_from_oid(self._cpu_5_min_load_oid, host)) str(self._get_value_from_oid(self._cpu_5_min_load_oid, host)))
# get 15 minute load # get 15 minute load
cpu_15_min_load = \ cpu_15_min_load = (
str(self._get_value_from_oid(self._cpu_15_min_load_oid, host)) str(self._get_value_from_oid(self._cpu_15_min_load_oid, host)))
yield base.CPUStats(cpu_1_min=float(cpu_1_min_load), yield base.CPUStats(cpu_1_min=float(cpu_1_min_load),
cpu_5_min=float(cpu_5_min_load), cpu_5_min=float(cpu_5_min_load),

View File

@ -157,10 +157,10 @@ class OpenDayLightDriver(driver.Driver):
user_links.append(user_link) user_links.append(user_link)
# get link status to hosts # get link status to hosts
container_data['active_hosts'] = cs.host_tracker.\ container_data['active_hosts'] = (
get_active_hosts(container_name) cs.host_tracker.get_active_hosts(container_name))
container_data['inactive_hosts'] = cs.host_tracker.\ container_data['inactive_hosts'] = (
get_inactive_hosts(container_name) cs.host_tracker.get_inactive_hosts(container_name))
container_data['timestamp'] = timeutils.isotime() container_data['timestamp'] = timeutils.isotime()

View File

@ -108,8 +108,8 @@ class Client(object):
attr_defaults = [('kernel_id', None), attr_defaults = [('kernel_id', None),
('ramdisk_id', None)] ('ramdisk_id', None)]
instance.image['name'] = \ instance.image['name'] = (
getattr(image, 'name') if image else 'unknown-id-%s' % iid getattr(image, 'name') if image else 'unknown-id-%s' % iid)
image_metadata = getattr(image, 'metadata', None) image_metadata = getattr(image, 'metadata', None)
for attr, default in attr_defaults: for attr, default in attr_defaults:

View File

@ -56,15 +56,14 @@ class StackCRUD(plugin.NotificationBase):
for topic in conf.notification_topics] for topic in conf.notification_topics]
def process_notification(self, message): def process_notification(self, message):
name = message['event_type'] \ name = (message['event_type'].replace(self.resource_name, 'stack')
.replace(self.resource_name, 'stack') \ .replace('.end', ''))
.replace('.end', '')
project_id = message['payload']['tenant_id'] project_id = message['payload']['tenant_id']
# Trying to use the trustor_id if trusts is used by Heat, # Trying to use the trustor_id if trusts is used by Heat,
user_id = message.get('_context_trustor_user_id') or \ user_id = (message.get('_context_trustor_user_id') or
message['_context_user_id'] message['_context_user_id'])
yield sample.Sample.from_notification( yield sample.Sample.from_notification(
name=name, name=name,

View File

@ -130,8 +130,8 @@ class Source(object):
if not meters: if not meters:
raise PipelineException("No meter specified", self.cfg) raise PipelineException("No meter specified", self.cfg)
if [x for x in meters if x[0] not in '!*'] and \ if ([x for x in meters if x[0] not in '!*'] and
[x for x in meters if x[0] == '!']: [x for x in meters if x[0] == '!']):
raise PipelineException( raise PipelineException(
"Both included and excluded meters specified", "Both included and excluded meters specified",
cfg) cfg)

View File

@ -148,8 +148,8 @@ class RPCPublisher(publisher.PublisherBase):
# something in the self.local_queue # something in the self.local_queue
queue = self.local_queue queue = self.local_queue
self.local_queue = [] self.local_queue = []
self.local_queue = self._process_queue(queue, self.policy) + \ self.local_queue = (self._process_queue(queue, self.policy) +
self.local_queue self.local_queue)
if self.policy == 'queue': if self.policy == 'queue':
self._check_queue_length() self._check_queue_length()

View File

@ -213,9 +213,9 @@ class MTable(object):
data = rows[row] data = rows[row]
r_data = {} r_data = {}
for key in data: for key in data:
if (op == '=' and key.startswith(column)) or \ if ((op == '=' and key.startswith(column)) or
(op == '>=' and key >= column) or \ (op == '>=' and key >= column) or
(op == '<=' and key <= column): (op == '<=' and key <= column)):
r_data[key] = data[key] r_data[key] = data[key]
else: else:
raise NotImplementedError("In-memory QualifierFilter " raise NotImplementedError("In-memory QualifierFilter "

View File

@ -382,10 +382,12 @@ class Connection(pymongo_base.Connection):
if period: if period:
stat.period = period stat.period = period
periods = key.get('timestamp') periods = key.get('timestamp')
stat.period_start = period_start + \ stat.period_start = (period_start +
datetime.timedelta(**(_to_offset(periods))) datetime.
stat.period_end = period_start + \ timedelta(**(_to_offset(periods))))
datetime.timedelta(**(_to_offset(periods + 1))) stat.period_end = (period_start +
datetime.
timedelta(**(_to_offset(periods + 1))))
else: else:
stat.period_start = stat.duration_start stat.period_start = stat.duration_start
stat.period_end = stat.duration_end stat.period_end = stat.duration_end

View File

@ -176,8 +176,8 @@ class Connection(base.Connection):
if Connection._memory_instance is None: if Connection._memory_instance is None:
LOG.debug(_('Creating a new in-memory HBase ' LOG.debug(_('Creating a new in-memory HBase '
'Connection object')) 'Connection object'))
Connection._memory_instance = \ Connection._memory_instance = (hbase_inmemory.
hbase_inmemory.MConnectionPool() MConnectionPool())
self.conn_pool = Connection._memory_instance self.conn_pool = Connection._memory_instance
else: else:
self.conn_pool = self._get_connection_pool(opts) self.conn_pool = self._get_connection_pool(opts)
@ -477,9 +477,9 @@ class Connection(base.Connection):
return return
with self.conn_pool.connection() as conn: with self.conn_pool.connection() as conn:
meter_table = conn.table(self.METER_TABLE) meter_table = conn.table(self.METER_TABLE)
q, start, stop, columns = \ q, start, stop, columns = (hbase_utils.
hbase_utils.make_sample_query_from_filter( make_sample_query_from_filter
sample_filter, require_meter=False) (sample_filter, require_meter=False))
LOG.debug(_("Query Meter Table: %s") % q) LOG.debug(_("Query Meter Table: %s") % q)
gen = meter_table.scan(filter=q, row_start=start, row_stop=stop, gen = meter_table.scan(filter=q, row_start=start, row_stop=stop,
limit=limit) limit=limit)
@ -508,9 +508,8 @@ class Connection(base.Connection):
stat.avg = (stat.sum / float(stat.count)) stat.avg = (stat.sum / float(stat.count))
stat.duration_start = min(ts, stat.duration_start or ts) stat.duration_start = min(ts, stat.duration_start or ts)
stat.duration_end = max(ts, stat.duration_end or ts) stat.duration_end = max(ts, stat.duration_end or ts)
stat.duration = \ stat.duration = (timeutils.delta_seconds(stat.duration_start,
timeutils.delta_seconds(stat.duration_start, stat.duration_end))
stat.duration_end)
def get_meter_statistics(self, sample_filter, period=None, groupby=None, def get_meter_statistics(self, sample_filter, period=None, groupby=None,
aggregate=None): aggregate=None):
@ -534,8 +533,9 @@ class Connection(base.Connection):
with self.conn_pool.connection() as conn: with self.conn_pool.connection() as conn:
meter_table = conn.table(self.METER_TABLE) meter_table = conn.table(self.METER_TABLE)
q, start, stop, columns = \ q, start, stop, columns = (hbase_utils.
hbase_utils.make_sample_query_from_filter(sample_filter) make_sample_query_from_filter
(sample_filter))
# These fields are used in statistics' calculating # These fields are used in statistics' calculating
columns.extend(['f:timestamp', 'f:counter_volume', columns.extend(['f:timestamp', 'f:counter_volume',
'f:counter_unit']) 'f:counter_unit'])
@ -575,8 +575,7 @@ class Connection(base.Connection):
start_time, ts) / period) * period start_time, ts) / period) * period
period_start = start_time + datetime.timedelta(0, offset) period_start = start_time + datetime.timedelta(0, offset)
if not results or not results[-1].period_start == \ if not results or not results[-1].period_start == period_start:
period_start:
if period: if period:
period_end = period_start + datetime.timedelta( period_end = period_start + datetime.timedelta(
0, period) 0, period)

View File

@ -250,10 +250,10 @@ class Connection(base.Connection):
nested = session.connection().dialect.name != 'sqlite' nested = session.connection().dialect.name != 'sqlite'
with session.begin(nested=nested, with session.begin(nested=nested,
subtransactions=not nested): subtransactions=not nested):
obj = session.query(models.Meter)\ obj = (session.query(models.Meter)
.filter(models.Meter.name == name)\ .filter(models.Meter.name == name)
.filter(models.Meter.type == type)\ .filter(models.Meter.type == type)
.filter(models.Meter.unit == unit).first() .filter(models.Meter.unit == unit).first())
if obj is None: if obj is None:
obj = models.Meter(name=name, type=type, unit=unit) obj = models.Meter(name=name, type=type, unit=unit)
session.add(obj) session.add(obj)
@ -314,8 +314,8 @@ class Connection(base.Connection):
session = self._engine_facade.get_session() session = self._engine_facade.get_session()
with session.begin(): with session.begin():
end = timeutils.utcnow() - datetime.timedelta(seconds=ttl) end = timeutils.utcnow() - datetime.timedelta(seconds=ttl)
sample_query = session.query(models.Sample)\ sample_query = (session.query(models.Sample)
.filter(models.Sample.timestamp < end) .filter(models.Sample.timestamp < end))
for sample_obj in sample_query.all(): for sample_obj in sample_query.all():
session.delete(sample_obj) session.delete(sample_obj)
@ -374,15 +374,15 @@ class Connection(base.Connection):
for res_id in res_q.all(): for res_id in res_q.all():
# get latest Sample # get latest Sample
max_q = session.query(models.Sample)\ max_q = (session.query(models.Sample)
.filter(models.Sample.resource_id == res_id[0]) .filter(models.Sample.resource_id == res_id[0]))
max_q = _apply_filters(max_q) max_q = _apply_filters(max_q)
max_q = max_q.order_by(models.Sample.timestamp.desc(), max_q = max_q.order_by(models.Sample.timestamp.desc(),
models.Sample.id.desc()).limit(1) models.Sample.id.desc()).limit(1)
# get the min timestamp value. # get the min timestamp value.
min_q = session.query(models.Sample.timestamp)\ min_q = (session.query(models.Sample.timestamp)
.filter(models.Sample.resource_id == res_id[0]) .filter(models.Sample.resource_id == res_id[0]))
min_q = _apply_filters(min_q) min_q = _apply_filters(min_q)
min_q = min_q.order_by(models.Sample.timestamp.asc()).limit(1) min_q = min_q.order_by(models.Sample.timestamp.asc()).limit(1)
@ -433,17 +433,19 @@ class Connection(base.Connection):
# by selecting a record for each (resource_id, meter_id). # by selecting a record for each (resource_id, meter_id).
# max() is used to choice a sample record, so the latest record # max() is used to choice a sample record, so the latest record
# is selected for each (resource_id, meter_id). # is selected for each (resource_id, meter_id).
sample_subq = session.query( sample_subq = (session.query(
func.max(models.Sample.id).label('id'))\ func.max(models.Sample.id).label('id'))
.group_by(models.Sample.meter_id, models.Sample.resource_id) .group_by(models.Sample.meter_id,
models.Sample.resource_id))
sample_subq = sample_subq.subquery() sample_subq = sample_subq.subquery()
# SELECT sample.* FROM sample INNER JOIN # SELECT sample.* FROM sample INNER JOIN
# (SELECT max(sample.id) AS id FROM sample # (SELECT max(sample.id) AS id FROM sample
# GROUP BY sample.resource_id, sample.meter_id) AS anon_2 # GROUP BY sample.resource_id, sample.meter_id) AS anon_2
# ON sample.id = anon_2.id # ON sample.id = anon_2.id
query_sample = session.query(models.MeterSample).\ query_sample = (session.query(models.MeterSample).
join(sample_subq, models.MeterSample.id == sample_subq.c.id) join(sample_subq, models.MeterSample.id ==
sample_subq.c.id))
query_sample = _apply_filters(query_sample) query_sample = _apply_filters(query_sample)
for sample in query_sample.all(): for sample in query_sample.all():
@ -563,9 +565,9 @@ class Connection(base.Connection):
group_attributes = [getattr(models.Sample, g) for g in groupby] group_attributes = [getattr(models.Sample, g) for g in groupby]
select.extend(group_attributes) select.extend(group_attributes)
query = session.query(*select).filter( query = (session.query(*select).filter(
models.Meter.id == models.Sample.meter_id)\ models.Meter.id == models.Sample.meter_id).
.group_by(models.Meter.unit) group_by(models.Meter.unit))
if groupby: if groupby:
query = query.group_by(*group_attributes) query = query.group_by(*group_attributes)
@ -972,8 +974,8 @@ class Connection(base.Connection):
models.Event.event_type_id] models.Event.event_type_id]
if event_filter.event_type: if event_filter.event_type:
event_join_conditions\ event_join_conditions.append(models.EventType.desc ==
.append(models.EventType.desc == event_filter.event_type) event_filter.event_type)
event_query = event_query.join(models.EventType, event_query = event_query.join(models.EventType,
and_(*event_join_conditions)) and_(*event_join_conditions))
@ -981,16 +983,16 @@ class Connection(base.Connection):
# Build up the where conditions # Build up the where conditions
event_filter_conditions = [] event_filter_conditions = []
if event_filter.message_id: if event_filter.message_id:
event_filter_conditions\ event_filter_conditions.append(models.Event.message_id ==
.append(models.Event.message_id == event_filter.message_id) event_filter.message_id)
if start: if start:
event_filter_conditions.append(models.Event.generated >= start) event_filter_conditions.append(models.Event.generated >= start)
if end: if end:
event_filter_conditions.append(models.Event.generated <= end) event_filter_conditions.append(models.Event.generated <= end)
if event_filter_conditions: if event_filter_conditions:
event_query = event_query\ event_query = (event_query.
.filter(and_(*event_filter_conditions)) filter(and_(*event_filter_conditions)))
event_models_dict = {} event_models_dict = {}
if event_filter.traits_filter: if event_filter.traits_filter:
@ -1013,20 +1015,20 @@ class Connection(base.Connection):
elif key == 'float': elif key == 'float':
conditions.append(models.Trait.t_float == value) conditions.append(models.Trait.t_float == value)
trait_query = session.query(models.Trait.event_id)\ trait_query = (session.query(models.Trait.event_id).
.join(models.TraitType, and_(*conditions)).subquery() join(models.TraitType,
and_(*conditions)).subquery())
event_query = event_query\ event_query = (event_query.
.join(trait_query, join(trait_query, models.Event.id ==
models.Event.id == trait_query.c.event_id) trait_query.c.event_id))
else: else:
# If there are no trait filters, grab the events from the db # If there are no trait filters, grab the events from the db
query = session.query(models.Event.id, query = (session.query(models.Event.id,
models.Event.generated, models.Event.generated,
models.Event.message_id, models.Event.message_id,
models.EventType.desc)\ models.EventType.desc).
.join(models.EventType, join(models.EventType, and_(*event_join_conditions)))
and_(*event_join_conditions))
if event_filter_conditions: if event_filter_conditions:
query = query.filter(and_(*event_filter_conditions)) query = query.filter(and_(*event_filter_conditions))
for (id_, generated, message_id, desc_) in query.all(): for (id_, generated, message_id, desc_) in query.all():
@ -1037,10 +1039,11 @@ class Connection(base.Connection):
# Build event models for the events # Build event models for the events
event_query = event_query.subquery() event_query = event_query.subquery()
query = session.query(models.Trait)\ query = (session.query(models.Trait).
.join(models.TraitType, join(models.TraitType, models.Trait.trait_type_id ==
models.Trait.trait_type_id == models.TraitType.id)\ models.TraitType.id).
.join(event_query, models.Trait.event_id == event_query.c.id) join(event_query, models.Trait.event_id ==
event_query.c.id))
# Now convert the sqlalchemy objects back into Models ... # Now convert the sqlalchemy objects back into Models ...
for trait in query.all(): for trait in query.all():
@ -1065,8 +1068,8 @@ class Connection(base.Connection):
session = self._engine_facade.get_session() session = self._engine_facade.get_session()
with session.begin(): with session.begin():
query = session.query(models.EventType.desc)\ query = (session.query(models.EventType.desc).
.order_by(models.EventType.desc) order_by(models.EventType.desc))
for name in query.all(): for name in query.all():
# The query returns a tuple with one element. # The query returns a tuple with one element.
yield name[0] yield name[0]

View File

@ -51,9 +51,9 @@ class Event(base.Model):
trait_list = [] trait_list = []
if self.traits: if self.traits:
trait_list = [str(trait) for trait in self.traits] trait_list = [str(trait) for trait in self.traits]
return "<Event: %s, %s, %s, %s>" % \ return ("<Event: %s, %s, %s, %s>" %
(self.message_id, self.event_type, self.generated, (self.message_id, self.event_type, self.generated,
" ".join(trait_list)) " ".join(trait_list)))
class Trait(base.Model): class Trait(base.Model):

View File

@ -23,8 +23,8 @@ def upgrade(migrate_engine):
for table in tables: for table in tables:
sql += "ALTER TABLE %s CONVERT TO CHARACTER SET utf8;" % table sql += "ALTER TABLE %s CONVERT TO CHARACTER SET utf8;" % table
sql += "SET foreign_key_checks = 1;" sql += "SET foreign_key_checks = 1;"
sql += "ALTER DATABASE %s DEFAULT CHARACTER SET utf8;" \ sql += ("ALTER DATABASE %s DEFAULT CHARACTER SET utf8;" %
% migrate_engine.url.database migrate_engine.url.database)
migrate_engine.execute(sql) migrate_engine.execute(sql)
@ -38,6 +38,6 @@ def downgrade(migrate_engine):
for table in tables: for table in tables:
sql += "ALTER TABLE %s CONVERT TO CHARACTER SET latin1;" % table sql += "ALTER TABLE %s CONVERT TO CHARACTER SET latin1;" % table
sql += "SET foreign_key_checks = 1;" sql += "SET foreign_key_checks = 1;"
sql += "ALTER DATABASE %s DEFAULT CHARACTER SET latin1;" \ sql += ("ALTER DATABASE %s DEFAULT CHARACTER SET latin1;" %
% migrate_engine.url.database migrate_engine.url.database)
migrate_engine.execute(sql) migrate_engine.execute(sql)

View File

@ -40,10 +40,8 @@ def upgrade(migrate_engine):
from_obj=join) from_obj=join)
for event_id, value in traits.execute(): for event_id, value in traits.execute():
event.update().\ (event.update().where(event.c.id == event_id).values(message_id=value).
where(event.c.id == event_id).\ execute())
values(message_id=value).\
execute()
# Leave the Trait, makes the rollback easier and won't really hurt anyone. # Leave the Trait, makes the rollback easier and won't really hurt anyone.

View File

@ -38,8 +38,8 @@ def _convert_data_type(table, col, from_t, to_t, pk_attr='id', index=False):
query = sa.select([key_attr, orig_col]) query = sa.select([key_attr, orig_col])
for key, value in migration.paged(query): for key, value in migration.paged(query):
table.update().where(key_attr == key)\ (table.update().where(key_attr == key).values({temp_col_n: value}).
.values({temp_col_n: value}).execute() execute())
orig_col.drop() orig_col.drop()
new_col.alter(name=col) new_col.alter(name=col)

View File

@ -63,10 +63,9 @@ def upgrade(migrate_engine):
# and delete the entry from the unique_name table # and delete the entry from the unique_name table
query = select([event.c.id, event.c.unique_name_id]) query = select([event.c.id, event.c.unique_name_id])
for key, value in migration.paged(query): for key, value in migration.paged(query):
event.update().where(event.c.id == key)\ (event.update().where(event.c.id == key).
.values({"event_type_id": value}).execute() values({"event_type_id": value}).execute())
unique_name.delete()\ unique_name.delete().where(unique_name.c.id == key).execute()
.where(unique_name.c.id == key).execute()
params = {'columns': [event.c.event_type_id], params = {'columns': [event.c.event_type_id],
'refcolumns': [event_type.c.id]} 'refcolumns': [event_type.c.id]}
@ -108,8 +107,8 @@ def downgrade(migrate_engine):
# Move data from event_type_id column to unique_name_id column # Move data from event_type_id column to unique_name_id column
query = select([event.c.id, event.c.event_type_id]) query = select([event.c.id, event.c.event_type_id])
for key, value in migration.paged(query): for key, value in migration.paged(query):
event.update().where(event.c.id == key)\ (event.update().where(event.c.id == key).
.values({"unique_name_id": value}).execute() values({"unique_name_id": value}).execute())
event.c.event_type_id.drop() event.c.event_type_id.drop()
params = {'columns': [event.c.unique_name_id], params = {'columns': [event.c.unique_name_id],

View File

@ -65,8 +65,8 @@ def upgrade(migrate_engine):
# Move data from name_id column into trait_type_id column # Move data from name_id column into trait_type_id column
query = select([trait.c.id, trait.c.name_id]) query = select([trait.c.id, trait.c.name_id])
for key, value in migration.paged(query): for key, value in migration.paged(query):
trait.update().where(trait.c.id == key)\ (trait.update().where(trait.c.id == key).
.values({"trait_type_id": value}).execute() values({"trait_type_id": value}).execute())
trait.c.name_id.drop() trait.c.name_id.drop()
@ -128,14 +128,14 @@ def downgrade(migrate_engine):
# copy data from trait_type.data_type into trait.t_type # copy data from trait_type.data_type into trait.t_type
query = select([trait_type.c.id, trait_type.c.data_type]) query = select([trait_type.c.id, trait_type.c.data_type])
for key, value in migration.paged(query): for key, value in migration.paged(query):
trait.update().where(trait.c.trait_type_id == key)\ (trait.update().where(trait.c.trait_type_id == key).
.values({"t_type": value}).execute() values({"t_type": value}).execute())
# Move data from name_id column into trait_type_id column # Move data from name_id column into trait_type_id column
query = select([trait.c.id, trait.c.trait_type_id]) query = select([trait.c.id, trait.c.trait_type_id])
for key, value in migration.paged(query): for key, value in migration.paged(query):
trait.update().where(trait.c.id == key)\ (trait.update().where(trait.c.id == key).
.values({"name_id": value}).execute() values({"name_id": value}).execute())
# Add a foreign key to the unique_name table # Add a foreign key to the unique_name table
params = {'columns': [trait.c.name_id], params = {'columns': [trait.c.name_id],

View File

@ -36,8 +36,8 @@ def _convert_data_type(table, col, from_t, to_t, pk_attr='id', index=False):
query = sa.select([key_attr, orig_col]) query = sa.select([key_attr, orig_col])
for key, value in migration.paged(query): for key, value in migration.paged(query):
table.update().where(key_attr == key)\ (table.update().where(key_attr == key).values({temp_col_n: value}).
.values({temp_col_n: value}).execute() execute())
orig_col.drop() orig_col.drop()
new_col.alter(name=col) new_col.alter(name=col)

View File

@ -36,8 +36,8 @@ def _convert_data_type(table, col, from_t, to_t, pk_attr='id'):
query = sa.select([key_attr, orig_col]) query = sa.select([key_attr, orig_col])
for key, value in migration.paged(query): for key, value in migration.paged(query):
table.update().where(key_attr == key)\ (table.update().where(key_attr == key).values({temp_col_n: value}).
.values({temp_col_n: value}).execute() execute())
orig_col.drop() orig_col.drop()
new_col.alter(name=col) new_col.alter(name=col)

View File

@ -73,11 +73,11 @@ def upgrade(migrate_engine):
index.create(bind=migrate_engine) index.create(bind=migrate_engine)
for row in sa.select([meter]).execute(): for row in sa.select([meter]).execute():
sample.update()\ (sample.update().
.where(sa.and_(sample.c.counter_name == row['name'], where(sa.and_(sample.c.counter_name == row['name'],
sample.c.counter_type == row['type'], sample.c.counter_type == row['type'],
sample.c.counter_unit == row['unit']))\ sample.c.counter_unit == row['unit'])).
.values({sample.c.meter_id: row['id']}).execute() values({sample.c.meter_id: row['id']}).execute())
handle_rid_index(meta) handle_rid_index(meta)
@ -96,11 +96,11 @@ def downgrade(migrate_engine):
sa.Column('counter_unit', sa.String(255)).create(sample) sa.Column('counter_unit', sa.String(255)).create(sample)
meter = sa.Table('meter', meta, autoload=True) meter = sa.Table('meter', meta, autoload=True)
for row in sa.select([meter]).execute(): for row in sa.select([meter]).execute():
sample.update()\ (sample.update().
.where(sample.c.meter_id == row['id'])\ where(sample.c.meter_id == row['id']).
.values({sample.c.counter_name: row['name'], values({sample.c.counter_name: row['name'],
sample.c.counter_type: row['type'], sample.c.counter_type: row['type'],
sample.c.counter_unit: row['unit']}).execute() sample.c.counter_unit: row['unit']}).execute())
params = {'columns': [sample.c.meter_id], params = {'columns': [sample.c.meter_id],
'refcolumns': [meter.c.id]} 'refcolumns': [meter.c.id]}

View File

@ -21,14 +21,14 @@ def upgrade(migrate_engine):
meta = sa.MetaData(bind=migrate_engine) meta = sa.MetaData(bind=migrate_engine)
for table_name in TABLES_027: for table_name in TABLES_027:
try: try:
sa.Table('dump027_' + table_name, meta, autoload=True)\ (sa.Table('dump027_' + table_name, meta, autoload=True).
.drop(checkfirst=True) drop(checkfirst=True))
except sa.exc.NoSuchTableError: except sa.exc.NoSuchTableError:
pass pass
for table_name in TABLES_012: for table_name in TABLES_012:
try: try:
sa.Table('dump_' + table_name, meta, autoload=True)\ (sa.Table('dump_' + table_name, meta, autoload=True).
.drop(checkfirst=True) drop(checkfirst=True))
except sa.exc.NoSuchTableError: except sa.exc.NoSuchTableError:
pass pass

View File

@ -46,11 +46,11 @@ def upgrade(migrate_engine):
params = {'columns': [table.c[column]], params = {'columns': [table.c[column]],
'refcolumns': [ref_table.c[ref_column_name]]} 'refcolumns': [ref_table.c[ref_column_name]]}
if migrate_engine.name == "mysql" and \ if (migrate_engine.name == "mysql" and
table_name != 'alarm_history': table_name != 'alarm_history'):
params['name'] = "_".join(('fk', table_name, column)) params['name'] = "_".join(('fk', table_name, column))
elif migrate_engine.name == "postgresql" and \ elif (migrate_engine.name == "postgresql" and
table_name == "sample": table_name == "sample"):
# The fk contains the old table name # The fk contains the old table name
params['name'] = "_".join(('meter', column, 'fkey')) params['name'] = "_".join(('meter', column, 'fkey'))
@ -163,11 +163,11 @@ def downgrade(migrate_engine):
params = {'columns': [table.c[column]], params = {'columns': [table.c[column]],
'refcolumns': [ref_table.c[ref_column_name]]} 'refcolumns': [ref_table.c[ref_column_name]]}
if migrate_engine.name == "mysql" and \ if (migrate_engine.name == "mysql" and
table_name != 'alarm_history': table_name != 'alarm_history'):
params['name'] = "_".join(('fk', table_name, column)) params['name'] = "_".join(('fk', table_name, column))
elif migrate_engine.name == "postgresql" and \ elif (migrate_engine.name == "postgresql" and
table_name == "sample": table_name == "sample"):
# The fk contains the old table name # The fk contains the old table name
params['name'] = "_".join(('meter', column, 'fkey')) params['name'] = "_".join(('meter', column, 'fkey'))

View File

@ -58,11 +58,11 @@ def upgrade(migrate_engine):
# move source values to samples # move source values to samples
sourceassoc = load_tables['sourceassoc'] sourceassoc = load_tables['sourceassoc']
query = sa.select([sourceassoc.c.sample_id, sourceassoc.c.source_id])\ query = (sa.select([sourceassoc.c.sample_id, sourceassoc.c.source_id]).
.where(sourceassoc.c.sample_id.isnot(None)) where(sourceassoc.c.sample_id.isnot(None)))
for sample_id, source_id in migration.paged(query): for sample_id, source_id in migration.paged(query):
sample.update().where(sample_id == sample.c.id)\ (sample.update().where(sample_id == sample.c.id).
.values({'source_id': source_id}).execute() values({'source_id': source_id}).execute())
# drop tables # drop tables
for table_name in DROP_TABLES: for table_name in DROP_TABLES:

View File

@ -19,8 +19,8 @@ SQLAlchemy models for Ceilometer data.
import json import json
from sqlalchemy import Column, Integer, String, ForeignKey, \ from sqlalchemy import (Column, Integer, String, ForeignKey, Index,
Index, UniqueConstraint, BigInteger, join UniqueConstraint, BigInteger, join)
from sqlalchemy import Float, Boolean, Text, DateTime from sqlalchemy import Float, Boolean, Text, DateTime
from sqlalchemy.dialects.mysql import DECIMAL from sqlalchemy.dialects.mysql import DECIMAL
from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.ext.declarative import declarative_base
@ -375,8 +375,8 @@ class Trait(Base):
def __repr__(self): def __repr__(self):
name = self.trait_type.name if self.trait_type else None name = self.trait_type.name if self.trait_type else None
data_type = self.trait_type.data_type if self.trait_type\ data_type = (self.trait_type.data_type if self.trait_type else
else api_models.Trait.NONE_TYPE api_models.Trait.NONE_TYPE)
return "<Trait(%s) %d=%s/%s/%s/%s on %s>" % (name, return "<Trait(%s) %d=%s/%s/%s/%s on %s>" % (name,
data_type, data_type,

View File

@ -233,8 +233,8 @@ class TestEvaluate(base.TestEvaluatorBase):
self._assert_all_alarms('ok') self._assert_all_alarms('ok')
self.assertEqual([], self.assertEqual([],
self.api_client.alarms.set_state.call_args_list) self.api_client.alarms.set_state.call_args_list)
reason = 'Remaining as ok due to 4 samples inside' \ reason = ('Remaining as ok due to 4 samples inside'
' threshold, most recent: 8.0' ' threshold, most recent: 8.0')
reason_datas = self._reason_data('inside', 4, 8.0) reason_datas = self._reason_data('inside', 4, 8.0)
expected = [mock.call(self.alarms[1], 'ok', reason, reason_datas)] expected = [mock.call(self.alarms[1], 'ok', reason, reason_datas)]
self.assertEqual(expected, self.notifier.notify.call_args_list) self.assertEqual(expected, self.notifier.notify.call_args_list)
@ -255,8 +255,8 @@ class TestEvaluate(base.TestEvaluatorBase):
self._assert_all_alarms('alarm') self._assert_all_alarms('alarm')
self.assertEqual([], self.assertEqual([],
self.api_client.alarms.set_state.call_args_list) self.api_client.alarms.set_state.call_args_list)
reason = 'Remaining as alarm due to 4 samples outside' \ reason = ('Remaining as alarm due to 4 samples outside'
' threshold, most recent: 7.0' ' threshold, most recent: 7.0')
reason_datas = self._reason_data('outside', 4, 7.0) reason_datas = self._reason_data('outside', 4, 7.0)
expected = [mock.call(self.alarms[1], 'alarm', expected = [mock.call(self.alarms[1], 'alarm',
reason, reason_datas)] reason, reason_datas)]

View File

@ -1939,12 +1939,12 @@ class TestAlarms(v2.FunctionalTest,
class PayloadMatcher(object): class PayloadMatcher(object):
def __eq__(self, payload): def __eq__(self, payload):
return payload['detail']['name'] == 'sent_notification' and \ return (payload['detail']['name'] == 'sent_notification' and
payload['type'] == 'creation' and \ payload['type'] == 'creation' and
payload['detail']['rule']['meter_name'] == 'ameter' and \ payload['detail']['rule']['meter_name'] == 'ameter' and
set(['alarm_id', 'detail', 'event_id', 'on_behalf_of', set(['alarm_id', 'detail', 'event_id', 'on_behalf_of',
'project_id', 'timestamp', 'project_id', 'timestamp',
'user_id']).issubset(payload.keys()) 'user_id']).issubset(payload.keys()))
endpoint.info.assert_called_once_with( endpoint.info.assert_called_once_with(
{'instance_uuid': None, {'instance_uuid': None,

View File

@ -57,15 +57,15 @@ class TestApp(base.BaseTestCase):
def test_keystone_middleware_parse_conffile(self): def test_keystone_middleware_parse_conffile(self):
pipeline_conf = self.path_get("etc/ceilometer/pipeline.yaml") pipeline_conf = self.path_get("etc/ceilometer/pipeline.yaml")
api_conf = self.path_get('etc/ceilometer/api_paste.ini') api_conf = self.path_get('etc/ceilometer/api_paste.ini')
content = "[DEFAULT]\n"\ content = ("[DEFAULT]\n"
"rpc_backend = fake\n"\ "rpc_backend = fake\n"
"pipeline_cfg_file = {0}\n"\ "pipeline_cfg_file = {0}\n"
"api_paste_config = {1}\n"\ "api_paste_config = {1}\n"
"[{2}]\n"\ "[{2}]\n"
"auth_protocol = file\n"\ "auth_protocol = file\n"
"auth_version = v2.0\n".format(pipeline_conf, "auth_version = v2.0\n".format(pipeline_conf,
api_conf, api_conf,
acl.OPT_GROUP_NAME) acl.OPT_GROUP_NAME))
tmpfile = fileutils.write_to_tempfile(content=content, tmpfile = fileutils.write_to_tempfile(content=content,
prefix='ceilometer', prefix='ceilometer',
@ -217,8 +217,8 @@ class TestApiMiddleware(v2.FunctionalTest):
json.loads(resp.body)['error_message'] json.loads(resp.body)['error_message']
['faultstring']) ['faultstring'])
with mock.patch('ceilometer.api.controllers.v2.EntityNotFound') \ with mock.patch('ceilometer.api.controllers.'
as CustomErrorClass: 'v2.EntityNotFound') as CustomErrorClass:
CustomErrorClass.return_value = wsme.exc.ClientSideError( CustomErrorClass.return_value = wsme.exc.ClientSideError(
"untranslated_error", status_code=404) "untranslated_error", status_code=404)
resp = self.get_json('/alarms/alarm-id-5', expect_errors=True) resp = self.get_json('/alarms/alarm-id-5', expect_errors=True)

View File

@ -36,8 +36,7 @@ class EventTestBase(v2.FunctionalTest,
base = 0 base = 0
self.trait_time = datetime.datetime(2013, 12, 31, 5, 0) self.trait_time = datetime.datetime(2013, 12, 31, 5, 0)
for event_type in ['Foo', 'Bar', 'Zoo']: for event_type in ['Foo', 'Bar', 'Zoo']:
trait_models = \ trait_models = [models.Trait(name, type, value)
[models.Trait(name, type, value)
for name, type, value in [ for name, type, value in [
('trait_A', models.Trait.TEXT_TYPE, ('trait_A', models.Trait.TEXT_TYPE,
"my_%s_text" % event_type), "my_%s_text" % event_type),

View File

@ -94,8 +94,8 @@ class TestCPUUtilPollster(base.TestPollsterBase):
def inspect_cpu_util(name, duration): def inspect_cpu_util(name, duration):
return six.next(next_value) return six.next(next_value)
self.inspector.inspect_cpu_util = \ self.inspector.inspect_cpu_util = (mock.
mock.Mock(side_effect=inspect_cpu_util) Mock(side_effect=inspect_cpu_util))
mgr = manager.AgentManager() mgr = manager.AgentManager()
pollster = cpu.CPUUtilPollster() pollster = cpu.CPUUtilPollster()

View File

@ -85,8 +85,7 @@ class TestDiskRatePollsters(base.TestPollsterBase):
def setUp(self): def setUp(self):
super(TestDiskRatePollsters, self).setUp() super(TestDiskRatePollsters, self).setUp()
self.inspector.inspect_disk_rates = \ self.inspector.inspect_disk_rates = mock.Mock(return_value=self.DISKS)
mock.Mock(return_value=self.DISKS)
@mock.patch('ceilometer.pipeline.setup_pipeline', mock.MagicMock()) @mock.patch('ceilometer.pipeline.setup_pipeline', mock.MagicMock())
def _check_get_samples(self, factory, sample_name, expected_volume): def _check_get_samples(self, factory, sample_name, expected_volume):

View File

@ -37,8 +37,8 @@ class TestMemoryPollster(base.TestPollsterBase):
def inspect_memory_usage(instance, duration): def inspect_memory_usage(instance, duration):
return six.next(next_value) return six.next(next_value)
self.inspector.inspect_memory_usage = \ (self.inspector.
mock.Mock(side_effect=inspect_memory_usage) inspect_memory_usage) = mock.Mock(side_effect=inspect_memory_usage)
mgr = manager.AgentManager() mgr = manager.AgentManager()
pollster = memory.MemoryUsagePollster() pollster = memory.MemoryUsagePollster()

View File

@ -51,10 +51,10 @@ class TestVsphereInspection(test.BaseTestCase):
fake_instance = construct_mock_instance_object(fake_instance_id) fake_instance = construct_mock_instance_object(fake_instance_id)
self._inspector._ops.get_vm_moid.return_value = fake_instance_moid self._inspector._ops.get_vm_moid.return_value = fake_instance_moid
self._inspector._ops.get_perf_counter_id.return_value = \ (self._inspector._ops.
fake_perf_counter_id get_perf_counter_id.return_value) = fake_perf_counter_id
self._inspector._ops.query_vm_aggregate_stats.return_value = \ (self._inspector._ops.query_vm_aggregate_stats.
fake_memory_value return_value) = fake_memory_value
memory_stat = self._inspector.inspect_memory_usage(fake_instance) memory_stat = self._inspector.inspect_memory_usage(fake_instance)
self.assertEqual(fake_stat, memory_stat) self.assertEqual(fake_stat, memory_stat)
@ -72,10 +72,10 @@ class TestVsphereInspection(test.BaseTestCase):
fake_instance = construct_mock_instance_object(fake_instance_id) fake_instance = construct_mock_instance_object(fake_instance_id)
self._inspector._ops.get_vm_moid.return_value = fake_instance_moid self._inspector._ops.get_vm_moid.return_value = fake_instance_moid
self._inspector._ops.get_perf_counter_id.return_value = \ (self._inspector._ops.get_perf_counter_id.
fake_perf_counter_id return_value) = fake_perf_counter_id
self._inspector._ops.query_vm_aggregate_stats.return_value = \ (self._inspector._ops.query_vm_aggregate_stats.
fake_cpu_util_value * 100 return_value) = fake_cpu_util_value * 100
cpu_util_stat = self._inspector.inspect_cpu_util(fake_instance) cpu_util_stat = self._inspector.inspect_cpu_util(fake_instance)
self.assertEqual(fake_stat, cpu_util_stat) self.assertEqual(fake_stat, cpu_util_stat)
@ -107,8 +107,7 @@ class TestVsphereInspection(test.BaseTestCase):
ops_mock = self._inspector._ops ops_mock = self._inspector._ops
ops_mock.get_vm_moid.return_value = test_vm_moid ops_mock.get_vm_moid.return_value = test_vm_moid
ops_mock.get_perf_counter_id.side_effect = get_counter_id_side_effect ops_mock.get_perf_counter_id.side_effect = get_counter_id_side_effect
ops_mock.query_vm_device_stats.side_effect = \ ops_mock.query_vm_device_stats.side_effect = query_stat_side_effect
query_stat_side_effect
result = self._inspector.inspect_vnic_rates(mock.MagicMock()) result = self._inspector.inspect_vnic_rates(mock.MagicMock())
# validate result # validate result

View File

@ -111,8 +111,8 @@ class VsphereOperationsTest(test.BaseTestCase):
counter_info1 = construct_mock_counter_info("a", "b", "c", 1) counter_info1 = construct_mock_counter_info("a", "b", "c", 1)
counter_info2 = construct_mock_counter_info("x", "y", "z", 2) counter_info2 = construct_mock_counter_info("x", "y", "z", 2)
result = mock.MagicMock() result = mock.MagicMock()
result.objects[0].propSet[0].val.PerfCounterInfo.__iter__. \ (result.objects[0].propSet[0].val.PerfCounterInfo.__iter__.
return_value = [counter_info1, counter_info2] return_value) = [counter_info1, counter_info2]
return result return result
vim_mock = self._vsphere_ops._api_session._vim vim_mock = self._vsphere_ops._api_session._vim

View File

@ -94,10 +94,14 @@ class TestEventEndpoint(tests_base.BaseTestCase):
self.mock_dispatcher = mock.MagicMock() self.mock_dispatcher = mock.MagicMock()
self.endpoint = event_endpoint.EventsNotificationEndpoint() self.endpoint = event_endpoint.EventsNotificationEndpoint()
self.endpoint.dispatcher_manager = \ (self.endpoint.
extension.ExtensionManager.make_test_instance([ dispatcher_manager) = (extension.ExtensionManager.
extension.Extension('test', None, None, self.mock_dispatcher) make_test_instance([extension.
]) Extension('test', None,
None,
self.
mock_dispatcher)
]))
self.endpoint.event_converter = mock.MagicMock() self.endpoint.event_converter = mock.MagicMock()
self.endpoint.event_converter.to_event.return_value = mock.MagicMock( self.endpoint.event_converter.to_event.return_value = mock.MagicMock(
event_type='test.test') event_type='test.test')

View File

@ -17,7 +17,7 @@ import abc
import mock import mock
import six import six
from six import moves from six import moves
from six.moves.urllib import parse as urlparse from six.moves.urllib import parse as url_parse
from ceilometer.network.statistics.opendaylight import driver from ceilometer.network.statistics.opendaylight import driver
from ceilometer.openstack.common import test from ceilometer.openstack.common import test
@ -58,20 +58,20 @@ class _Base(test.BaseTestCase):
def inactive_hosts_data(self): def inactive_hosts_data(self):
pass pass
fake_odl_url = urlparse.ParseResult('opendaylight', fake_odl_url = url_parse.ParseResult('opendaylight',
'localhost:8080', 'localhost:8080',
'controller/nb/v2', 'controller/nb/v2',
None, None,
None, None,
None) None)
fake_params = urlparse.parse_qs('user=admin&password=admin&scheme=http&' fake_params = url_parse.parse_qs('user=admin&password=admin&scheme=http&'
'container_name=default&auth=basic') 'container_name=default&auth=basic')
fake_params_multi_container = \ fake_params_multi_container = (
urlparse.parse_qs('user=admin&password=admin&scheme=http&' url_parse.parse_qs('user=admin&password=admin&scheme=http&'
'container_name=first&container_name=second&' 'container_name=first&container_name=second&'
'auth=basic') 'auth=basic'))
def setUp(self): def setUp(self):
super(_Base, self).setUp() super(_Base, self).setUp()

View File

@ -866,9 +866,9 @@ class BasePipelineTestCase(test.BaseTestCase):
def _do_test_rate_of_change_conversion(self, prev, curr, type, expected, def _do_test_rate_of_change_conversion(self, prev, curr, type, expected,
offset=1, weight=None): offset=1, weight=None):
s = "(resource_metadata.user_metadata.autoscaling_weight or 1.0)" \ s = ("(resource_metadata.user_metadata.autoscaling_weight or 1.0)"
"* (resource_metadata.non.existent or 1.0)" \ "* (resource_metadata.non.existent or 1.0)"
"* (100.0 / (10**9 * (resource_metadata.cpu_number or 1)))" "* (100.0 / (10**9 * (resource_metadata.cpu_number or 1)))")
transformer_cfg = [ transformer_cfg = [
{ {
'name': 'rate_of_change', 'name': 'rate_of_change',

View File

@ -105,8 +105,8 @@ class TestPublish(tests_base.BaseTestCase):
endpoint = mock.MagicMock(['record_metering_data']) endpoint = mock.MagicMock(['record_metering_data'])
collector = messaging.get_rpc_server( collector = messaging.get_rpc_server(
self.transport, self.CONF.publisher_rpc.metering_topic, endpoint) self.transport, self.CONF.publisher_rpc.metering_topic, endpoint)
endpoint.record_metering_data.side_effect = \ endpoint.record_metering_data.side_effect = (lambda *args, **kwds:
lambda *args, **kwds: collector.stop() collector.stop())
collector.start() collector.start()
eventlet.sleep() eventlet.sleep()

View File

@ -188,6 +188,6 @@ class CapabilitiesTest(test_base.BaseTestCase):
expected_capabilities = { expected_capabilities = {
'storage': {'production_ready': True}, 'storage': {'production_ready': True},
} }
actual_capabilities = impl_mongodb.Connection.\ actual_capabilities = (impl_mongodb.Connection.
get_storage_capabilities() get_storage_capabilities())
self.assertEqual(expected_capabilities, actual_capabilities) self.assertEqual(expected_capabilities, actual_capabilities)

View File

@ -240,6 +240,6 @@ class CapabilitiesTest(test_base.BaseTestCase):
expected_capabilities = { expected_capabilities = {
'storage': {'production_ready': True}, 'storage': {'production_ready': True},
} }
actual_capabilities = impl_sqlalchemy.Connection.\ actual_capabilities = (impl_sqlalchemy.
get_storage_capabilities() Connection.get_storage_capabilities())
self.assertEqual(expected_capabilities, actual_capabilities) self.assertEqual(expected_capabilities, actual_capabilities)

View File

@ -2737,8 +2737,7 @@ class GetEventTest(EventTestBase):
self.start = datetime.datetime(2013, 12, 31, 5, 0) self.start = datetime.datetime(2013, 12, 31, 5, 0)
now = self.start now = self.start
for event_type in ['Foo', 'Bar', 'Zoo', 'Foo', 'Bar', 'Zoo']: for event_type in ['Foo', 'Bar', 'Zoo', 'Foo', 'Bar', 'Zoo']:
trait_models = \ trait_models = [models.Trait(name, dtype, value)
[models.Trait(name, dtype, value)
for name, dtype, value in [ for name, dtype, value in [
('trait_A', models.Trait.TEXT_TYPE, ('trait_A', models.Trait.TEXT_TYPE,
"my_%s_text" % event_type), "my_%s_text" % event_type),
@ -2888,8 +2887,7 @@ class GetEventTest(EventTestBase):
trait_dict["trait_D"]) trait_dict["trait_D"])
def test_get_all_traits(self): def test_get_all_traits(self):
traits = self.conn.\ traits = self.conn.get_traits("Foo")
get_traits("Foo")
traits = [t for t in traits] traits = [t for t in traits]
self.assertEqual(8, len(traits)) self.assertEqual(8, len(traits))

View File

@ -80,9 +80,9 @@ class BinSendSampleTestCase(base.BaseTestCase):
def setUp(self): def setUp(self):
super(BinSendSampleTestCase, self).setUp() super(BinSendSampleTestCase, self).setUp()
pipeline_cfg_file = self.path_get('etc/ceilometer/pipeline.yaml') pipeline_cfg_file = self.path_get('etc/ceilometer/pipeline.yaml')
content = "[DEFAULT]\n"\ content = ("[DEFAULT]\n"
"rpc_backend=fake\n"\ "rpc_backend=fake\n"
"pipeline_cfg_file={0}\n".format(pipeline_cfg_file) "pipeline_cfg_file={0}\n".format(pipeline_cfg_file))
self.tempfile = fileutils.write_to_tempfile(content=content, self.tempfile = fileutils.write_to_tempfile(content=content,
prefix='ceilometer', prefix='ceilometer',
@ -105,11 +105,10 @@ class BinApiTestCase(base.BaseTestCase):
def setUp(self): def setUp(self):
super(BinApiTestCase, self).setUp() super(BinApiTestCase, self).setUp()
# create api_paste.ini file without authentication # create api_paste.ini file without authentication
content = \ content = ("[pipeline:main]\n"
"[pipeline:main]\n"\ "pipeline = api-server\n"
"pipeline = api-server\n"\ "[app:api-server]\n"
"[app:api-server]\n"\ "paste.app_factory = ceilometer.api.app:app_factory\n")
"paste.app_factory = ceilometer.api.app:app_factory\n"
self.paste = fileutils.write_to_tempfile(content=content, self.paste = fileutils.write_to_tempfile(content=content,
prefix='api_paste', prefix='api_paste',
suffix='.ini') suffix='.ini')
@ -119,20 +118,20 @@ class BinApiTestCase(base.BaseTestCase):
self.http = httplib2.Http() self.http = httplib2.Http()
pipeline_cfg_file = self.path_get('etc/ceilometer/pipeline.yaml') pipeline_cfg_file = self.path_get('etc/ceilometer/pipeline.yaml')
policy_file = self.path_get('etc/ceilometer/policy.json') policy_file = self.path_get('etc/ceilometer/policy.json')
content = "[DEFAULT]\n"\ content = ("[DEFAULT]\n"
"rpc_backend=fake\n"\ "rpc_backend=fake\n"
"auth_strategy=noauth\n"\ "auth_strategy=noauth\n"
"debug=true\n"\ "debug=true\n"
"pipeline_cfg_file={0}\n"\ "pipeline_cfg_file={0}\n"
"policy_file={1}\n"\ "policy_file={1}\n"
"api_paste_config={2}\n"\ "api_paste_config={2}\n"
"[api]\n"\ "[api]\n"
"port={3}\n"\ "port={3}\n"
"[database]\n"\ "[database]\n"
"connection=log://localhost\n".format(pipeline_cfg_file, "connection=log://localhost\n".format(pipeline_cfg_file,
policy_file, policy_file,
self.paste, self.paste,
self.api_port) self.api_port))
self.tempfile = fileutils.write_to_tempfile(content=content, self.tempfile = fileutils.write_to_tempfile(content=content,
prefix='ceilometer', prefix='ceilometer',

View File

@ -112,8 +112,8 @@ class TestNotification(tests_base.BaseTestCase):
@mock.patch('ceilometer.event.endpoint.EventsNotificationEndpoint') @mock.patch('ceilometer.event.endpoint.EventsNotificationEndpoint')
def _do_process_notification_manager_start(self, def _do_process_notification_manager_start(self,
fake_event_endpoint_class): fake_event_endpoint_class):
with mock.patch.object(self.srv, '_get_notifications_manager') \ with mock.patch.object(self.srv,
as get_nm: '_get_notifications_manager') as get_nm:
get_nm.side_effect = self.fake_get_notifications_manager get_nm.side_effect = self.fake_get_notifications_manager
self.srv.start() self.srv.start()
self.fake_event_endpoint = fake_event_endpoint_class.return_value self.fake_event_endpoint = fake_event_endpoint_class.return_value
@ -155,8 +155,8 @@ class TestNotification(tests_base.BaseTestCase):
mock.MagicMock()) mock.MagicMock())
def test_event_dispatcher_loaded(self): def test_event_dispatcher_loaded(self):
self.CONF.set_override("store_events", True, group="notification") self.CONF.set_override("store_events", True, group="notification")
with mock.patch.object(self.srv, '_get_notifications_manager') \ with mock.patch.object(self.srv,
as get_nm: '_get_notifications_manager') as get_nm:
get_nm.side_effect = self.fake_get_notifications_manager get_nm.side_effect = self.fake_get_notifications_manager
self.srv.start() self.srv.start()
self.assertEqual(2, len(self.srv.listeners[0].dispatcher.endpoints)) self.assertEqual(2, len(self.srv.listeners[0].dispatcher.endpoints))

View File

@ -239,9 +239,9 @@ class AggregatorTransformer(ScalingTransformer):
getattr(sample, field)) getattr(sample, field))
def flush(self, context): def flush(self, context):
expired = self.retention_time and \ expired = (self.retention_time and
timeutils.is_older_than(self.initial_timestamp, timeutils.is_older_than(self.initial_timestamp,
self.retention_time) self.retention_time))
full = self.aggregated_samples >= self.size full = self.aggregated_samples >= self.size
if full or expired: if full or expired:
x = self.samples.values() x = self.samples.values()

View File

@ -80,9 +80,9 @@ def dt_to_decimal(utc):
return None return None
decimal.getcontext().prec = 30 decimal.getcontext().prec = 30
return decimal.Decimal(str(calendar.timegm(utc.utctimetuple()))) + \ return (decimal.Decimal(str(calendar.timegm(utc.utctimetuple()))) +
(decimal.Decimal(str(utc.microsecond)) / (decimal.Decimal(str(utc.microsecond)) /
decimal.Decimal("1000000.0")) decimal.Decimal("1000000.0")))
def decimal_to_dt(dec): def decimal_to_dt(dec):

View File

@ -39,8 +39,7 @@ commands = {posargs}
[flake8] [flake8]
# H305 imports not grouped correctly # H305 imports not grouped correctly
# H405 multi line docstring summary not separated with an empty line # H405 multi line docstring summary not separated with an empty line
# H904 Wrap long lines in parentheses instead of a backslash ignore = H305,H405
ignore = H305,H405,H904
builtins = _ builtins = _
exclude=.venv,.git,.tox,dist,doc,./ceilometer/openstack/common,*lib/python*,*egg,tools,nova_tests,build exclude=.venv,.git,.tox,dist,doc,./ceilometer/openstack/common,*lib/python*,*egg,tools,nova_tests,build
show-source = True show-source = True