Replace dict.iteritems() with six.iteritems(dict)
This change is needed to make Ceilometer code both Python 2.x and 3.x compatible in some future. Oslo modules are left without changes in this change, that will be done directly in the oslo-incubator code. Partially-Implements: blueprint ceilometer-py33-support Change-Id: Ic7fe5819f85524bb36f04dec6cad10d4cf2b935d
This commit is contained in:
parent
0d8cb5e751
commit
d65dd17016
@ -20,6 +20,7 @@
|
||||
import collections
|
||||
import itertools
|
||||
|
||||
import six
|
||||
from six.moves.urllib import parse as urlparse
|
||||
from stevedore import extension
|
||||
|
||||
@ -137,7 +138,7 @@ class AgentManager(os_service.Service):
|
||||
def start(self):
|
||||
self.pipeline_manager = pipeline.setup_pipeline()
|
||||
|
||||
for interval, task in self.setup_polling_tasks().iteritems():
|
||||
for interval, task in six.iteritems(self.setup_polling_tasks()):
|
||||
self.tg.add_timer(interval,
|
||||
self.interval_task,
|
||||
task=task)
|
||||
|
@ -23,6 +23,7 @@ from nova import notifications
|
||||
from nova.openstack.common import log as logging
|
||||
from nova.openstack.common.notifier import api as notifier_api
|
||||
from nova import utils
|
||||
import six
|
||||
from stevedore import extension
|
||||
|
||||
# HACK(dhellmann): Insert the nova version of openstack.common into
|
||||
@ -96,7 +97,7 @@ class Instance(object):
|
||||
can pass it to the pollsters.
|
||||
"""
|
||||
def __init__(self, context, info):
|
||||
for k, v in info.iteritems():
|
||||
for k, v in six.iteritems(info):
|
||||
if k == 'name':
|
||||
setattr(self, 'OS-EXT-SRV-ATTR:instance_name', v)
|
||||
elif k == 'metadata':
|
||||
|
@ -19,6 +19,7 @@ import datetime
|
||||
from keystoneclient import exceptions
|
||||
from oslo.config import cfg
|
||||
import requests
|
||||
import six
|
||||
|
||||
from ceilometer.central import plugin
|
||||
from ceilometer.openstack.common.gettextutils import _
|
||||
@ -45,7 +46,7 @@ class KwapiClient(object):
|
||||
request = requests.get(probes_url, headers=headers)
|
||||
message = request.json()
|
||||
probes = message['probes']
|
||||
for key, value in probes.iteritems():
|
||||
for key, value in six.iteritems(probes):
|
||||
probe_dict = value
|
||||
probe_dict['id'] = key
|
||||
yield probe_dict
|
||||
|
@ -21,6 +21,7 @@ import datetime
|
||||
import inspect
|
||||
import math
|
||||
|
||||
import six
|
||||
from six import moves
|
||||
|
||||
from ceilometer.openstack.common import timeutils
|
||||
@ -108,7 +109,7 @@ class Model(object):
|
||||
|
||||
def __init__(self, **kwds):
|
||||
self.fields = list(kwds)
|
||||
for k, v in kwds.iteritems():
|
||||
for k, v in six.iteritems(kwds):
|
||||
setattr(self, k, v)
|
||||
|
||||
def as_dict(self):
|
||||
|
@ -29,6 +29,7 @@ import sys
|
||||
import bson.code
|
||||
import bson.objectid
|
||||
import pymongo
|
||||
import six
|
||||
|
||||
from ceilometer.openstack.common import log
|
||||
from ceilometer.openstack.common import timeutils
|
||||
@ -266,7 +267,7 @@ class Connection(pymongo_base.Connection):
|
||||
q['resource_id'] = resource
|
||||
# Add resource_ prefix so it matches the field in the db
|
||||
q.update(dict(('resource_' + k, v)
|
||||
for (k, v) in metaquery.iteritems()))
|
||||
for (k, v) in six.iteritems(metaquery)))
|
||||
|
||||
if start_timestamp or end_timestamp:
|
||||
# Look for resources matching the above criteria and with
|
||||
|
@ -31,6 +31,7 @@ import bson.code
|
||||
import bson.objectid
|
||||
from oslo.config import cfg
|
||||
import pymongo
|
||||
import six
|
||||
|
||||
from ceilometer.openstack.common import log
|
||||
from ceilometer.openstack.common import timeutils
|
||||
@ -698,7 +699,7 @@ class Connection(pymongo_base.Connection):
|
||||
|
||||
# Add resource_ prefix so it matches the field in the db
|
||||
query.update(dict(('resource_' + k, v)
|
||||
for (k, v) in metaquery.iteritems()))
|
||||
for (k, v) in six.iteritems(metaquery)))
|
||||
|
||||
# FIXME(dhellmann): This may not perform very well,
|
||||
# but doing any better will require changing the database
|
||||
@ -753,7 +754,7 @@ class Connection(pymongo_base.Connection):
|
||||
query['_id'] = resource
|
||||
|
||||
query.update(dict((k, v)
|
||||
for (k, v) in metaquery.iteritems()))
|
||||
for (k, v) in six.iteritems(metaquery)))
|
||||
|
||||
keys = base._handle_sort_key('resource')
|
||||
sort_keys = ['last_sample_timestamp' if i == 'timestamp' else i
|
||||
|
@ -23,6 +23,7 @@ import os
|
||||
import types
|
||||
|
||||
from oslo.config import cfg
|
||||
import six
|
||||
from sqlalchemy import and_
|
||||
from sqlalchemy import asc
|
||||
from sqlalchemy import desc
|
||||
@ -123,7 +124,7 @@ def apply_metaquery_filter(session, query, metaquery):
|
||||
:param query: Query instance
|
||||
:param metaquery: dict with metadata to match on.
|
||||
"""
|
||||
for k, value in metaquery.iteritems():
|
||||
for k, value in six.iteritems(metaquery):
|
||||
key = k[9:] # strip out 'metadata.' prefix
|
||||
try:
|
||||
_model = META_TYPE_MAP[type(value)]
|
||||
@ -1016,7 +1017,7 @@ class Connection(base.Connection):
|
||||
models.TraitType.id,
|
||||
models.TraitType.desc == trait_name]
|
||||
|
||||
for key, value in trait_filter.iteritems():
|
||||
for key, value in six.iteritems(trait_filter):
|
||||
if key == 'string':
|
||||
conditions.append(models.Trait.t_string == value)
|
||||
elif key == 'integer':
|
||||
|
@ -23,6 +23,7 @@ import time
|
||||
|
||||
from oslo.config import cfg
|
||||
import pymongo
|
||||
import six
|
||||
import weakref
|
||||
|
||||
from ceilometer.openstack.common.gettextutils import _
|
||||
@ -89,7 +90,7 @@ def make_events_query_from_filter(event_filter):
|
||||
for trait_filter in event_filter.traits_filter:
|
||||
op = trait_filter.pop('op', 'eq')
|
||||
dict_query = {}
|
||||
for k, v in trait_filter.iteritems():
|
||||
for k, v in six.iteritems(trait_filter):
|
||||
if v is not None:
|
||||
# All parameters in EventFilter['traits'] are optional, so
|
||||
# we need to check if they are in the query or no.
|
||||
@ -149,7 +150,7 @@ def make_query_from_filter(sample_filter, require_meter=True):
|
||||
# so the samples call metadata resource_metadata, so we convert
|
||||
# to that.
|
||||
q.update(dict(('resource_%s' % k, v)
|
||||
for (k, v) in sample_filter.metaquery.iteritems()))
|
||||
for (k, v) in six.iteritems(sample_filter.metaquery)))
|
||||
return q
|
||||
|
||||
|
||||
|
@ -19,6 +19,7 @@ SQLAlchemy models for Ceilometer data.
|
||||
|
||||
import json
|
||||
|
||||
import six
|
||||
from sqlalchemy import (Column, Integer, String, ForeignKey, Index,
|
||||
UniqueConstraint, BigInteger, join)
|
||||
from sqlalchemy import Float, Boolean, Text, DateTime
|
||||
@ -91,7 +92,7 @@ class CeilometerBase(object):
|
||||
|
||||
def update(self, values):
|
||||
"""Make the model object behave like a dict."""
|
||||
for k, v in values.iteritems():
|
||||
for k, v in six.iteritems(values):
|
||||
setattr(self, k, v)
|
||||
|
||||
|
||||
|
@ -25,6 +25,7 @@ import uuid
|
||||
|
||||
import mock
|
||||
import oslo.messaging.conffixture
|
||||
import six
|
||||
from six import moves
|
||||
|
||||
from ceilometer.alarm.storage import models
|
||||
@ -386,7 +387,7 @@ class TestAlarms(v2.FunctionalTest,
|
||||
'combination_rule': {}
|
||||
}
|
||||
}
|
||||
for field, json in jsons.iteritems():
|
||||
for field, json in six.iteritems(jsons):
|
||||
resp = self.post_json('/alarms', params=json, expect_errors=True,
|
||||
status=400, headers=self.auth_headers)
|
||||
self.assertEqual("Invalid input for field/attribute %s."
|
||||
@ -965,7 +966,7 @@ class TestAlarms(v2.FunctionalTest,
|
||||
'period': 180,
|
||||
}
|
||||
}
|
||||
for aspect, id in identifiers.iteritems():
|
||||
for aspect, id in six.iteritems(identifiers):
|
||||
json['%s_id' % aspect] = id
|
||||
return json
|
||||
|
||||
@ -1640,13 +1641,13 @@ class TestAlarms(v2.FunctionalTest,
|
||||
status=204)
|
||||
|
||||
def _assert_is_subset(self, expected, actual):
|
||||
for k, v in expected.iteritems():
|
||||
for k, v in six.iteritems(expected):
|
||||
self.assertEqual(v, actual.get(k), 'mismatched field: %s' % k)
|
||||
self.assertIsNotNone(actual['event_id'])
|
||||
|
||||
def _assert_in_json(self, expected, actual):
|
||||
actual = jsonutils.dumps(jsonutils.loads(actual), sort_keys=True)
|
||||
for k, v in expected.iteritems():
|
||||
for k, v in six.iteritems(expected):
|
||||
fragment = jsonutils.dumps({k: v}, sort_keys=True)[1:-1]
|
||||
self.assertTrue(fragment in actual,
|
||||
'%s not in %s' % (fragment, actual))
|
||||
|
@ -20,6 +20,7 @@
|
||||
import datetime
|
||||
|
||||
import mock
|
||||
import six
|
||||
import webtest.app
|
||||
|
||||
from ceilometer.openstack.common import timeutils
|
||||
@ -172,4 +173,4 @@ class TestListEvents(v2.FunctionalTest,
|
||||
('not_ignored_list', "['returned']"),
|
||||
('tag', 'self.sample'),
|
||||
],
|
||||
list(sorted(sample['resource_metadata'].iteritems())))
|
||||
list(sorted(six.iteritems(sample['resource_metadata']))))
|
||||
|
@ -476,7 +476,7 @@ class TestListResources(v2.FunctionalTest,
|
||||
(u'display_name', u'test-server'),
|
||||
(u'not_ignored_list', u"['returned']"),
|
||||
(u'tag', u'self.sample')],
|
||||
list(sorted(metadata.iteritems())))
|
||||
list(sorted(six.iteritems(metadata))))
|
||||
|
||||
def test_resource_meter_links(self):
|
||||
sample1 = sample.Sample(
|
||||
|
@ -20,6 +20,7 @@
|
||||
"""
|
||||
|
||||
import mock
|
||||
import six
|
||||
|
||||
from ceilometer.compute import manager
|
||||
from ceilometer.compute.pollsters import util
|
||||
@ -81,7 +82,7 @@ class TestLocationMetadata(test.BaseTestCase):
|
||||
|
||||
def test_metadata(self):
|
||||
md = util._get_metadata_from_object(self.instance)
|
||||
for prop, value in self.INSTANCE_PROPERTIES.iteritems():
|
||||
for prop, value in six.iteritems(self.INSTANCE_PROPERTIES):
|
||||
if prop not in ("metadata"):
|
||||
# Special cases
|
||||
if prop == 'name':
|
||||
|
@ -18,6 +18,7 @@ import datetime
|
||||
|
||||
from keystoneclient import exceptions
|
||||
import mock
|
||||
import six
|
||||
|
||||
from ceilometer.central import manager
|
||||
from ceilometer.energy import kwapi
|
||||
@ -88,7 +89,7 @@ class TestEnergyPollster(test.BaseTestCase):
|
||||
@staticmethod
|
||||
def fake_iter_probes(ksclient, cache):
|
||||
probes = PROBE_DICT['probes']
|
||||
for key, value in probes.iteritems():
|
||||
for key, value in six.iteritems(probes):
|
||||
probe_dict = value
|
||||
probe_dict['id'] = key
|
||||
yield probe_dict
|
||||
@ -148,7 +149,7 @@ class TestPowerPollster(test.BaseTestCase):
|
||||
@staticmethod
|
||||
def fake_iter_probes(ksclient, cache):
|
||||
probes = PROBE_DICT['probes']
|
||||
for key, value in probes.iteritems():
|
||||
for key, value in six.iteritems(probes):
|
||||
probe_dict = value
|
||||
probe_dict['id'] = key
|
||||
yield probe_dict
|
||||
|
@ -79,7 +79,7 @@ class Namespace(object):
|
||||
def __init__(self, seed):
|
||||
self.__dict__ = collections.defaultdict(lambda: Namespace({}))
|
||||
self.__dict__.update(seed)
|
||||
for k, v in self.__dict__.iteritems():
|
||||
for k, v in six.iteritems(self.__dict__):
|
||||
if isinstance(v, dict):
|
||||
self.__dict__[k] = Namespace(v)
|
||||
|
||||
|
@ -20,6 +20,8 @@ import keyword
|
||||
import math
|
||||
import re
|
||||
|
||||
import six
|
||||
|
||||
from ceilometer.openstack.common.gettextutils import _
|
||||
from ceilometer.openstack.common import log
|
||||
from ceilometer import sample
|
||||
@ -69,7 +71,7 @@ class ArithmeticTransformer(transformer.TransformerBase):
|
||||
def _calculate(self, resource_id):
|
||||
"""Evaluate the expression and return a new sample if successful."""
|
||||
ns_dict = dict((m, s.as_dict()) for m, s
|
||||
in self.cache[resource_id].iteritems())
|
||||
in six.iteritems(self.cache[resource_id]))
|
||||
ns = transformer.Namespace(ns_dict)
|
||||
try:
|
||||
new_volume = eval(self.expr_escaped, {}, ns)
|
||||
|
@ -32,7 +32,7 @@ from ceilometer.openstack.common import units
|
||||
|
||||
def recursive_keypairs(d, separator=':'):
|
||||
"""Generator that produces sequence of keypairs for nested dictionaries."""
|
||||
for name, value in sorted(d.iteritems()):
|
||||
for name, value in sorted(six.iteritems(d)):
|
||||
if isinstance(value, dict):
|
||||
for subname, subvalue in recursive_keypairs(value, separator):
|
||||
yield ('%s%s%s' % (name, separator, subname), subvalue)
|
||||
@ -107,7 +107,7 @@ def stringify_timestamps(data):
|
||||
"""Stringify any datetimes in given dict."""
|
||||
isa_timestamp = lambda v: isinstance(v, datetime.datetime)
|
||||
return dict((k, v.isoformat() if isa_timestamp(v) else v)
|
||||
for (k, v) in data.iteritems())
|
||||
for (k, v) in six.iteritems(data))
|
||||
|
||||
|
||||
def dict_to_keyval(value, key_base=None):
|
||||
@ -118,7 +118,7 @@ def dict_to_keyval(value, key_base=None):
|
||||
"""
|
||||
val_iter, key_func = None, None
|
||||
if isinstance(value, dict):
|
||||
val_iter = value.iteritems()
|
||||
val_iter = six.iteritems(value)
|
||||
key_func = lambda k: key_base + '.' + k if key_base else k
|
||||
elif isinstance(value, (tuple, list)):
|
||||
val_iter = enumerate(value)
|
||||
@ -155,7 +155,7 @@ def update_nested(original_dict, updates):
|
||||
Updates occur without replacing entire sub-dicts.
|
||||
"""
|
||||
dict_to_update = copy.deepcopy(original_dict)
|
||||
for key, value in updates.iteritems():
|
||||
for key, value in six.iteritems(updates):
|
||||
if isinstance(value, dict):
|
||||
sub_dict = update_nested(dict_to_update.get(key, {}), value)
|
||||
dict_to_update[key] = sub_dict
|
||||
|
@ -20,6 +20,7 @@ from __future__ import print_function
|
||||
import sys
|
||||
|
||||
from oslo.config import cfg
|
||||
import six
|
||||
|
||||
from ceilometer import storage
|
||||
|
||||
@ -38,7 +39,7 @@ def show_resources(db, args):
|
||||
print(u)
|
||||
for resource in db.get_resources(user=u):
|
||||
print(' %(resource_id)s %(timestamp)s' % resource)
|
||||
for k, v in sorted(resource['metadata'].iteritems()):
|
||||
for k, v in sorted(six.iteritems(resource['metadata'])):
|
||||
print(' %-10s : %s' % (k, v))
|
||||
for meter in resource['meter']:
|
||||
totals = db.get_statistics(storage.SampleFilter(
|
||||
|
Loading…
x
Reference in New Issue
Block a user