Merge "Fix usage of dictionary methods on Python 3"

This commit is contained in:
Jenkins 2015-05-28 09:29:03 +00:00 committed by Gerrit Code Review
commit b02d6835a6
7 changed files with 36 additions and 35 deletions

View File

@ -97,7 +97,7 @@ class ValidatedComplexQuery(object):
"project": "project_id"}
self.name_mapping.update(additional_name_mapping)
valid_keys = db_model.get_field_names()
valid_keys = list(valid_keys) + self.name_mapping.keys()
valid_keys = list(valid_keys) + list(self.name_mapping.keys())
valid_fields = _list_to_regexp(valid_keys)
if metadata_allowed:
@ -239,7 +239,7 @@ class ValidatedComplexQuery(object):
self._replace_field_names(orderby_field)
def _traverse_postorder(self, tree, visitor):
op = tree.keys()[0]
op = list(tree.keys())[0]
if op.lower() in self.complex_operators:
for i, operand in enumerate(tree[op]):
self._traverse_postorder(operand, visitor)
@ -252,12 +252,11 @@ class ValidatedComplexQuery(object):
visibility_field):
"""Do not allow other than own_project_id."""
def check_project_id(subfilter):
op = subfilter.keys()[0]
op, value = list(subfilter.items())[0]
if (op.lower() not in self.complex_operators
and subfilter[op].keys()[0] == visibility_field
and subfilter[op][visibility_field] != own_project_id):
raise base.ProjectNotAuthorized(
subfilter[op][visibility_field])
and list(value.keys())[0] == visibility_field
and value[visibility_field] != own_project_id):
raise base.ProjectNotAuthorized(value[visibility_field])
self._traverse_postorder(self.filter_expr, check_project_id)
@ -283,26 +282,25 @@ class ValidatedComplexQuery(object):
def _replace_isotime_with_datetime(self, filter_expr):
def replace_isotime(subfilter):
op = subfilter.keys()[0]
if (op.lower() not in self.complex_operators
and subfilter[op].keys()[0] in self.timestamp_fields):
field = subfilter[op].keys()[0]
date_time = self._convert_to_datetime(subfilter[op][field])
subfilter[op][field] = date_time
op, value = list(subfilter.items())[0]
if op.lower() not in self.complex_operators:
field = list(value.keys())[0]
if field in self.timestamp_fields:
date_time = self._convert_to_datetime(subfilter[op][field])
subfilter[op][field] = date_time
self._traverse_postorder(filter_expr, replace_isotime)
def _normalize_field_names_for_db_model(self, filter_expr):
def _normalize_field_names(subfilter):
op = subfilter.keys()[0]
op, value = list(subfilter.items())[0]
if op.lower() not in self.complex_operators:
self._replace_field_names(subfilter.values()[0])
self._replace_field_names(value)
self._traverse_postorder(filter_expr,
_normalize_field_names)
def _replace_field_names(self, subfilter):
field = subfilter.keys()[0]
value = subfilter[field]
field, value = list(subfilter.items())[0]
if field in self.name_mapping:
del subfilter[field]
subfilter[self.name_mapping[field]] = value

View File

@ -247,15 +247,15 @@ class Connection(base.Connection):
trait_filter = filters.pop()
key = trait_filter.pop('key')
op = trait_filter.pop('op', 'eq')
trait_subq, t_model = _build_trait_query(
session, trait_filter.keys()[0], key,
trait_filter.values()[0], op)
trait_type, value = list(trait_filter.items())[0]
trait_subq, t_model = _build_trait_query(session, trait_type,
key, value, op)
for trait_filter in filters:
key = trait_filter.pop('key')
op = trait_filter.pop('op', 'eq')
q, model = _build_trait_query(
session, trait_filter.keys()[0], key,
trait_filter.values()[0], op)
trait_type, value = list(trait_filter.items())[0]
q, model = _build_trait_query(session, trait_type,
key, value, op)
trait_subq = trait_subq.filter(
q.filter(model.event_id == t_model.event_id).exists())
trait_subq = trait_subq.subquery()

View File

@ -79,8 +79,7 @@ class QueryTransformer(object):
def _handle_simple_op(self, simple_op, nodes):
op = self._get_operator(simple_op)
field_name = nodes.keys()[0]
value = nodes.values()[0]
field_name, value = list(nodes.items())[0]
if field_name.startswith('resource_metadata.'):
return self._handle_metadata(op, field_name, value)
else:
@ -103,8 +102,7 @@ class QueryTransformer(object):
return op(meta_alias.value, value)
def _transform(self, sub_tree):
operator = sub_tree.keys()[0]
nodes = sub_tree.values()[0]
operator, nodes = list(sub_tree.items())[0]
if operator in self.complex_operators:
return self._handle_complex_op(operator, nodes)
else:
@ -122,9 +120,10 @@ class QueryTransformer(object):
def _apply_order_by(self, orderby):
if orderby is not None:
for field in orderby:
ordering_function = self.ordering_functions[field.values()[0]]
attr, order = list(field.items())[0]
ordering_function = self.ordering_functions[order]
self.query = self.query.order_by(ordering_function(
getattr(self.table, field.keys()[0])))
getattr(self.table, attr)))
else:
self.query = self.query.order_by(desc(self.table.timestamp))

View File

@ -311,7 +311,8 @@ class BaseAgentManagerTestCase(base.BaseTestCase):
self.assertEqual(1, len(per_task_resources))
self.assertEqual(set(self.pipeline_cfg[0]['resources']),
set(per_task_resources['test_pipeline-test'].get({})))
self.mgr.interval_task(polling_tasks.values()[0])
task = list(polling_tasks.values())[0]
self.mgr.interval_task(task)
pub = self.mgr.pipeline_manager.pipelines[0].publishers[0]
del pub.samples[0].resource_metadata['resources']
self.assertEqual(self.Pollster.test_data, pub.samples[0])
@ -716,7 +717,7 @@ class BaseAgentManagerTestCase(base.BaseTestCase):
self, get_samples, LOG):
self.pipeline_cfg[0]['resources'] = []
self.setup_pipeline()
polling_task = self.mgr.setup_polling_tasks().values()[0]
polling_task = list(self.mgr.setup_polling_tasks().values())[0]
pollster = list(polling_task.pollster_matches['test_pipeline'])[0]
polling_task.poll_and_publish()

View File

@ -193,7 +193,8 @@ class TestRunTasks(agentbase.BaseAgentManagerTestCase):
def test_get_sample_resources(self):
polling_tasks = self.mgr.setup_polling_tasks()
self.mgr.interval_task(polling_tasks.values()[0])
task = list(polling_tasks.values())[0]
self.mgr.interval_task(task)
self.assertTrue(self.Pollster.resources)
def test_when_keystone_fail(self):
@ -215,7 +216,8 @@ class TestRunTasks(agentbase.BaseAgentManagerTestCase):
self.pipeline_cfg,
self.transformer_manager)
polling_tasks = self.mgr.setup_polling_tasks()
self.mgr.interval_task(polling_tasks.values()[0])
task = list(polling_tasks.values())[0]
self.mgr.interval_task(task)
self.assertFalse(self.PollsterKeystone.samples)
def test_interval_exception_isolation(self):
@ -239,7 +241,7 @@ class TestRunTasks(agentbase.BaseAgentManagerTestCase):
self.mgr.pipeline_manager = pipeline.PipelineManager(
self.pipeline_cfg,
self.transformer_manager)
polling_task = self.mgr.setup_polling_tasks().values()[0]
polling_task = list(self.mgr.setup_polling_tasks().values())[0]
pollster = list(polling_task.pollster_matches[source_name])[0]
# 2 samples after 4 pollings, as pollster got disabled unpon exception

View File

@ -43,7 +43,7 @@ class ArithmeticTransformer(transformer.TransformerBase):
self.target = target
self.expr = target.get('expr', '')
self.expr_escaped, self.escaped_names = self.parse_expr(self.expr)
self.required_meters = self.escaped_names.values()
self.required_meters = list(self.escaped_names.values())
self.misconfigured = len(self.required_meters) == 0
if not self.misconfigured:
self.reference_meter = self.required_meters[0]

View File

@ -65,6 +65,7 @@ commands = python -m testtools.run \
ceilometer.tests.publisher.test_kafka_broker_publisher \
ceilometer.tests.publisher.test_messaging_publisher \
ceilometer.tests.publisher.test_utils \
ceilometer.tests.storage.test_impl_sqlalchemy \
ceilometer.tests.test_bin \
ceilometer.tests.test_collector \
ceilometer.tests.test_coordination \