Python 3: generalize the usage of the six module
* Replace itertools.ifilter() with six.moves.filter() * Replace itertools.imap() with six.moves.map() * Replace map(_compare, statistics) with [_compare(statistic) for statistic in statistics] * Replace obj.iterkeys() with six.iterkeys(obj) * Replace obj.iteritems() with six.iteritems(obj) * Replace xrange() with six.moves.xrange(), or with range() for small ranges * Replace the repr module with six.moves.reprlib Change-Id: Iaaa328cc15355182bde444a1aeaa4385691c8f90
This commit is contained in:
parent
253a6288e8
commit
de9c4891e7
@ -26,6 +26,7 @@ import random
|
|||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_context import context
|
from oslo_context import context
|
||||||
import six
|
import six
|
||||||
|
from six import moves
|
||||||
from six.moves.urllib import parse as urlparse
|
from six.moves.urllib import parse as urlparse
|
||||||
from stevedore import extension
|
from stevedore import extension
|
||||||
|
|
||||||
@ -214,7 +215,7 @@ class AgentManager(os_service.Service):
|
|||||||
extensions = (self._extensions('poll', namespace).extensions
|
extensions = (self._extensions('poll', namespace).extensions
|
||||||
for namespace in namespaces)
|
for namespace in namespaces)
|
||||||
if pollster_list:
|
if pollster_list:
|
||||||
extensions = (itertools.ifilter(_match, exts)
|
extensions = (moves.filter(_match, exts)
|
||||||
for exts in extensions)
|
for exts in extensions)
|
||||||
|
|
||||||
self.extensions = list(itertools.chain(*list(extensions)))
|
self.extensions = list(itertools.chain(*list(extensions)))
|
||||||
|
@ -16,7 +16,7 @@
|
|||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
|
|
||||||
import itertools
|
from six import moves
|
||||||
|
|
||||||
from ceilometer.alarm import evaluator
|
from ceilometer.alarm import evaluator
|
||||||
from ceilometer.i18n import _
|
from ceilometer.i18n import _
|
||||||
@ -106,8 +106,7 @@ class CombinationEvaluator(evaluator.Evaluator):
|
|||||||
return
|
return
|
||||||
|
|
||||||
states = zip(alarm.rule['alarm_ids'],
|
states = zip(alarm.rule['alarm_ids'],
|
||||||
itertools.imap(self._get_alarm_state,
|
moves.map(self._get_alarm_state, alarm.rule['alarm_ids']))
|
||||||
alarm.rule['alarm_ids']))
|
|
||||||
|
|
||||||
if self._sufficient_states(alarm, states):
|
if self._sufficient_states(alarm, states):
|
||||||
self._transition(alarm, states)
|
self._transition(alarm, states)
|
||||||
|
@ -201,4 +201,4 @@ class ThresholdEvaluator(evaluator.Evaluator):
|
|||||||
|
|
||||||
self._transition(alarm,
|
self._transition(alarm,
|
||||||
statistics,
|
statistics,
|
||||||
map(_compare, statistics))
|
[_compare(statistic) for statistic in statistics])
|
||||||
|
@ -18,6 +18,7 @@
|
|||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_utils import units
|
from oslo_utils import units
|
||||||
from oslo_vmware import api
|
from oslo_vmware import api
|
||||||
|
import six
|
||||||
|
|
||||||
from ceilometer.compute.virt import inspector as virt_inspector
|
from ceilometer.compute.virt import inspector as virt_inspector
|
||||||
from ceilometer.compute.virt.vmware import vsphere_operations
|
from ceilometer.compute.virt.vmware import vsphere_operations
|
||||||
@ -129,7 +130,7 @@ class VsphereInspector(virt_inspector.Inspector):
|
|||||||
vnic_id_to_stats_map = self._ops.query_vm_device_stats(
|
vnic_id_to_stats_map = self._ops.query_vm_device_stats(
|
||||||
vm_moid, net_counter_id, duration)
|
vm_moid, net_counter_id, duration)
|
||||||
vnic_stats[net_counter] = vnic_id_to_stats_map
|
vnic_stats[net_counter] = vnic_id_to_stats_map
|
||||||
vnic_ids.update(vnic_id_to_stats_map.iterkeys())
|
vnic_ids.update(six.iterkeys(vnic_id_to_stats_map))
|
||||||
|
|
||||||
# Stats provided from vSphere are in KB/s, converting it to B/s.
|
# Stats provided from vSphere are in KB/s, converting it to B/s.
|
||||||
for vnic_id in vnic_ids:
|
for vnic_id in vnic_ids:
|
||||||
@ -180,7 +181,7 @@ class VsphereInspector(virt_inspector.Inspector):
|
|||||||
disk_id_to_stat_map = self._ops.query_vm_device_stats(
|
disk_id_to_stat_map = self._ops.query_vm_device_stats(
|
||||||
vm_moid, disk_counter_id, duration)
|
vm_moid, disk_counter_id, duration)
|
||||||
disk_stats[disk_counter] = disk_id_to_stat_map
|
disk_stats[disk_counter] = disk_id_to_stat_map
|
||||||
disk_ids.update(disk_id_to_stat_map.iterkeys())
|
disk_ids.update(six.iterkeys(disk_id_to_stat_map))
|
||||||
|
|
||||||
for disk_id in disk_ids:
|
for disk_id in disk_ids:
|
||||||
|
|
||||||
|
@ -21,6 +21,8 @@
|
|||||||
|
|
||||||
from pysnmp.entity.rfc3413.oneliner import cmdgen
|
from pysnmp.entity.rfc3413.oneliner import cmdgen
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from ceilometer.hardware.inspector import base
|
from ceilometer.hardware.inspector import base
|
||||||
|
|
||||||
|
|
||||||
@ -347,7 +349,7 @@ class SNMPInspector(base.Inspector):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def construct_metadata(cls, oid_cache, meta_defs, suffix=''):
|
def construct_metadata(cls, oid_cache, meta_defs, suffix=''):
|
||||||
metadata = {}
|
metadata = {}
|
||||||
for key, oid_def in meta_defs.iteritems():
|
for key, oid_def in six.iteritems(meta_defs):
|
||||||
metadata[key] = cls.get_oid_value(oid_cache, oid_def, suffix)
|
metadata[key] = cls.get_oid_value(oid_cache, oid_def, suffix)
|
||||||
return metadata
|
return metadata
|
||||||
|
|
||||||
@ -423,7 +425,7 @@ class SNMPInspector(base.Inspector):
|
|||||||
# populate the oid into cache
|
# populate the oid into cache
|
||||||
self._query_oids(host, [self._interface_ip_oid], cache, True)
|
self._query_oids(host, [self._interface_ip_oid], cache, True)
|
||||||
ip_addr = ''
|
ip_addr = ''
|
||||||
for k, v in oid_cache.iteritems():
|
for k, v in six.iteritems(oid_cache):
|
||||||
if k.startswith(self._interface_ip_oid) and v == int(suffix[1:]):
|
if k.startswith(self._interface_ip_oid) and v == int(suffix[1:]):
|
||||||
ip_addr = k.replace(self._interface_ip_oid + ".", "")
|
ip_addr = k.replace(self._interface_ip_oid + ".", "")
|
||||||
metadata.update(ip=ip_addr)
|
metadata.update(ip=ip_addr)
|
||||||
|
@ -184,7 +184,7 @@ def improve_keys(data, metaquery=False):
|
|||||||
return data
|
return data
|
||||||
|
|
||||||
if metaquery:
|
if metaquery:
|
||||||
for key in data.iterkeys():
|
for key in six.iterkeys(data):
|
||||||
if '.$' in key:
|
if '.$' in key:
|
||||||
key_list = []
|
key_list = []
|
||||||
for k in quote_key(key):
|
for k in quote_key(key):
|
||||||
|
@ -403,12 +403,12 @@ class TestEvaluate(base.TestEvaluatorBase):
|
|||||||
avgs = [self._get_stat('avg',
|
avgs = [self._get_stat('avg',
|
||||||
threshold + (v if v < 10 else -v),
|
threshold + (v if v < 10 else -v),
|
||||||
count=20 if v < 10 else 1)
|
count=20 if v < 10 else 1)
|
||||||
for v in xrange(1, 11)]
|
for v in moves.xrange(1, 11)]
|
||||||
threshold = self.alarms[1].rule['threshold']
|
threshold = self.alarms[1].rule['threshold']
|
||||||
maxs = [self._get_stat('max',
|
maxs = [self._get_stat('max',
|
||||||
threshold - (v if v < 7 else -v),
|
threshold - (v if v < 7 else -v),
|
||||||
count=20 if v < 7 else 1)
|
count=20 if v < 7 else 1)
|
||||||
for v in xrange(8)]
|
for v in moves.xrange(8)]
|
||||||
self.api_client.statistics.list.side_effect = [avgs, maxs]
|
self.api_client.statistics.list.side_effect = [avgs, maxs]
|
||||||
self._evaluate_all_alarms()
|
self._evaluate_all_alarms()
|
||||||
self._assert_all_alarms('alarm' if exclude_outliers else 'ok')
|
self._assert_all_alarms('alarm' if exclude_outliers else 'ok')
|
||||||
@ -445,12 +445,12 @@ class TestEvaluate(base.TestEvaluatorBase):
|
|||||||
avgs = [self._get_stat('avg',
|
avgs = [self._get_stat('avg',
|
||||||
threshold - (v if v < 9 else -v),
|
threshold - (v if v < 9 else -v),
|
||||||
count=20 if v < 9 else 1)
|
count=20 if v < 9 else 1)
|
||||||
for v in xrange(10)]
|
for v in moves.xrange(10)]
|
||||||
threshold = self.alarms[1].rule['threshold']
|
threshold = self.alarms[1].rule['threshold']
|
||||||
maxs = [self._get_stat('max',
|
maxs = [self._get_stat('max',
|
||||||
threshold + (v if v < 8 else -v),
|
threshold + (v if v < 8 else -v),
|
||||||
count=20 if v < 8 else 1)
|
count=20 if v < 8 else 1)
|
||||||
for v in xrange(1, 9)]
|
for v in moves.xrange(1, 9)]
|
||||||
self.api_client.statistics.list.side_effect = [avgs, maxs]
|
self.api_client.statistics.list.side_effect = [avgs, maxs]
|
||||||
self._evaluate_all_alarms()
|
self._evaluate_all_alarms()
|
||||||
self._assert_all_alarms('ok' if exclude_outliers else 'alarm')
|
self._assert_all_alarms('ok' if exclude_outliers else 'alarm')
|
||||||
|
@ -1460,7 +1460,7 @@ class TestSelectableAggregates(v2.FunctionalTest,
|
|||||||
# add a large number of datapoints that won't impact on cardinality
|
# add a large number of datapoints that won't impact on cardinality
|
||||||
# if the computation logic is tolerant of different DB behavior on
|
# if the computation logic is tolerant of different DB behavior on
|
||||||
# larger numbers of samples per-period
|
# larger numbers of samples per-period
|
||||||
for i in xrange(200):
|
for i in range(200):
|
||||||
s = sample.Sample(
|
s = sample.Sample(
|
||||||
'instance',
|
'instance',
|
||||||
sample.TYPE_GAUGE,
|
sample.TYPE_GAUGE,
|
||||||
|
@ -57,7 +57,7 @@ class FakeRequest(object):
|
|||||||
if 'wsgi.input' not in environ:
|
if 'wsgi.input' not in environ:
|
||||||
environ['wsgi.input'] = six.moves.cStringIO('')
|
environ['wsgi.input'] = six.moves.cStringIO('')
|
||||||
|
|
||||||
for header, value in headers.iteritems():
|
for header, value in six.iteritems(headers):
|
||||||
environ['HTTP_%s' % header.upper()] = value
|
environ['HTTP_%s' % header.upper()] = value
|
||||||
self.environ = environ
|
self.environ = environ
|
||||||
|
|
||||||
|
@ -19,7 +19,7 @@
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import repr
|
from six.moves import reprlib
|
||||||
|
|
||||||
import mock
|
import mock
|
||||||
from oslo_utils import timeutils
|
from oslo_utils import timeutils
|
||||||
@ -62,7 +62,7 @@ class EventTypeTest(tests_db.TestBase):
|
|||||||
self.assertNotEqual(et1.id, et2.id)
|
self.assertNotEqual(et1.id, et2.id)
|
||||||
self.assertNotEqual(et1.desc, et2.desc)
|
self.assertNotEqual(et1.desc, et2.desc)
|
||||||
# Test the method __repr__ returns a string
|
# Test the method __repr__ returns a string
|
||||||
self.assertTrue(repr.repr(et2))
|
self.assertTrue(reprlib.repr(et2))
|
||||||
|
|
||||||
|
|
||||||
@tests_db.run_with('sqlite', 'mysql', 'pgsql')
|
@tests_db.run_with('sqlite', 'mysql', 'pgsql')
|
||||||
@ -100,7 +100,7 @@ class EventTest(tests_db.TestBase):
|
|||||||
def test_event_repr(self):
|
def test_event_repr(self):
|
||||||
ev = sql_models.Event('msg_id', None, False, {})
|
ev = sql_models.Event('msg_id', None, False, {})
|
||||||
ev.id = 100
|
ev.id = 100
|
||||||
self.assertTrue(repr.repr(ev))
|
self.assertTrue(reprlib.repr(ev))
|
||||||
|
|
||||||
|
|
||||||
@tests_db.run_with('sqlite', 'mysql', 'pgsql')
|
@tests_db.run_with('sqlite', 'mysql', 'pgsql')
|
||||||
|
@ -27,6 +27,7 @@ import uuid
|
|||||||
|
|
||||||
import make_test_data
|
import make_test_data
|
||||||
from oslo_context import context
|
from oslo_context import context
|
||||||
|
from six import moves
|
||||||
|
|
||||||
from ceilometer import messaging
|
from ceilometer import messaging
|
||||||
from ceilometer import service
|
from ceilometer import service
|
||||||
@ -53,7 +54,7 @@ def generate_data(rpc_client, make_data_args, samples_count,
|
|||||||
|
|
||||||
make_data_args.resource_id = None
|
make_data_args.resource_id = None
|
||||||
resources_list = [str(uuid.uuid4())
|
resources_list = [str(uuid.uuid4())
|
||||||
for _ in xrange(resources_count)]
|
for _ in moves.xrange(resources_count)]
|
||||||
resource_samples = {resource: 0 for resource in resources_list}
|
resource_samples = {resource: 0 for resource in resources_list}
|
||||||
batch = []
|
batch = []
|
||||||
count = 0
|
count = 0
|
||||||
|
Loading…
Reference in New Issue
Block a user