Merge "Fix order of arguments in assertEqual"
This commit is contained in:
commit
d9fd4940c9
@ -138,7 +138,7 @@ class TestEvaluate(base.TestEvaluatorBase):
|
||||
expected = [mock.call(alarm.alarm_id, state='insufficient data')
|
||||
for alarm in self.alarms]
|
||||
update_calls = self.api_client.alarms.set_state.call_args_list
|
||||
self.assertEqual(update_calls, expected)
|
||||
self.assertEqual(expected, update_calls)
|
||||
expected = [mock.call(
|
||||
alarm,
|
||||
'ok',
|
||||
|
@ -622,7 +622,7 @@ class TestNotifications(base.BaseTestCase):
|
||||
def _verify_user_metadata(self, metadata):
|
||||
self.assertIn('user_metadata', metadata)
|
||||
user_meta = metadata['user_metadata']
|
||||
self.assertEqual(user_meta.get('server_group'), 'Group_A')
|
||||
self.assertEqual('Group_A', user_meta.get('server_group'))
|
||||
self.assertNotIn('AutoScalingGroupName', user_meta)
|
||||
self.assertIn('foo_bar', user_meta)
|
||||
self.assertNotIn('foo.bar', user_meta)
|
||||
|
@ -197,7 +197,7 @@ class TestLibvirtInspection(base.BaseTestCase):
|
||||
return_value=(5L, 0L, 0L,
|
||||
2L, 999999L))):
|
||||
interfaces = list(self.inspector.inspect_vnics(self.instance_name))
|
||||
self.assertEqual(interfaces, [])
|
||||
self.assertEqual([], interfaces)
|
||||
|
||||
def test_inspect_disks(self):
|
||||
dom_xml = """
|
||||
@ -244,7 +244,7 @@ class TestLibvirtInspection(base.BaseTestCase):
|
||||
return_value=(5L, 0L, 0L,
|
||||
2L, 999999L))):
|
||||
disks = list(self.inspector.inspect_disks(self.instance_name))
|
||||
self.assertEqual(disks, [])
|
||||
self.assertEqual([], disks)
|
||||
|
||||
def test_inspect_memory_usage(self):
|
||||
fake_memory_stats = {'available': 51200L, 'unused': 25600L}
|
||||
|
@ -94,7 +94,7 @@ class TestNotification(base.BaseTestCase):
|
||||
|
||||
def _verify_common_counter(self, c, name, volume):
|
||||
self.assertIsNotNone(c)
|
||||
self.assertEqual(c.name, name)
|
||||
self.assertEqual(name, c.name)
|
||||
self.assertEqual(fake_uuid('c'), c.resource_id)
|
||||
self.assertEqual(NOW, c.timestamp)
|
||||
self.assertEqual(volume, c.volume)
|
||||
|
@ -159,14 +159,14 @@ class TestOpenDayLightDriverSpecial(_Base):
|
||||
self.fake_odl_url,
|
||||
self.fake_params,
|
||||
cache)
|
||||
self.assertEqual(self.get_flow_statistics.call_count, 1)
|
||||
self.assertEqual(1, self.get_flow_statistics.call_count)
|
||||
|
||||
cache = {}
|
||||
self.driver.get_sample_data('switch',
|
||||
self.fake_odl_url,
|
||||
self.fake_params,
|
||||
cache)
|
||||
self.assertEqual(self.get_flow_statistics.call_count, 2)
|
||||
self.assertEqual(2, self.get_flow_statistics.call_count)
|
||||
|
||||
def test_multi_container(self):
|
||||
cache = {}
|
||||
@ -174,7 +174,7 @@ class TestOpenDayLightDriverSpecial(_Base):
|
||||
self.fake_odl_url,
|
||||
self.fake_params_multi_container,
|
||||
cache)
|
||||
self.assertEqual(self.get_flow_statistics.call_count, 2)
|
||||
self.assertEqual(2, self.get_flow_statistics.call_count)
|
||||
|
||||
self.assertIn('network.statistics.opendaylight', cache)
|
||||
|
||||
|
@ -104,15 +104,15 @@ class TestBaseGetSamples(base.BaseTestCase):
|
||||
|
||||
def _assert_sample(self, s, volume, resource_id, resource_metadata,
|
||||
timestamp):
|
||||
self.assertEqual(s.name, 'foo')
|
||||
self.assertEqual(s.type, sample.TYPE_CUMULATIVE)
|
||||
self.assertEqual(s.unit, 'bar')
|
||||
self.assertEqual(s.volume, volume)
|
||||
self.assertEqual('foo', s.name)
|
||||
self.assertEqual(sample.TYPE_CUMULATIVE, s.type)
|
||||
self.assertEqual('bar', s.unit)
|
||||
self.assertEqual(volume, s.volume)
|
||||
self.assertIsNone(s.user_id)
|
||||
self.assertIsNone(s.project_id)
|
||||
self.assertEqual(s.resource_id, resource_id)
|
||||
self.assertEqual(s.timestamp, timestamp)
|
||||
self.assertEqual(s.resource_metadata, resource_metadata)
|
||||
self.assertEqual(resource_id, s.resource_id)
|
||||
self.assertEqual(timestamp, s.timestamp)
|
||||
self.assertEqual(resource_metadata, s.resource_metadata)
|
||||
|
||||
def test_get_samples_one_driver_one_resource(self):
|
||||
times = self._make_timestamps(2)
|
||||
@ -124,7 +124,7 @@ class TestBaseGetSamples(base.BaseTestCase):
|
||||
|
||||
samples = self._get_samples('http://foo')
|
||||
|
||||
self.assertEqual(len(samples), 1)
|
||||
self.assertEqual(1, len(samples))
|
||||
self._assert_sample(samples[0], 1, 'a', {'spam': 'egg'}, times[0])
|
||||
|
||||
def test_get_samples_one_driver_two_resource(self):
|
||||
@ -138,7 +138,7 @@ class TestBaseGetSamples(base.BaseTestCase):
|
||||
|
||||
samples = self._get_samples('http://foo', 'http://bar')
|
||||
|
||||
self.assertEqual(len(samples), 2)
|
||||
self.assertEqual(2, len(samples))
|
||||
self._assert_sample(samples[0], 1, 'a', {'spam': 'egg'}, times[0])
|
||||
self._assert_sample(samples[1], 2, 'b', None, times[1])
|
||||
|
||||
@ -155,7 +155,7 @@ class TestBaseGetSamples(base.BaseTestCase):
|
||||
|
||||
samples = self._get_samples('http://foo')
|
||||
|
||||
self.assertEqual(len(samples), 1)
|
||||
self.assertEqual(1, len(samples))
|
||||
self._assert_sample(samples[0], 1, 'a', {'spam': 'egg'}, times[0])
|
||||
|
||||
def test_get_samples_multi_samples(self):
|
||||
@ -168,7 +168,7 @@ class TestBaseGetSamples(base.BaseTestCase):
|
||||
|
||||
samples = self._get_samples('http://foo')
|
||||
|
||||
self.assertEqual(len(samples), 2)
|
||||
self.assertEqual(2, len(samples))
|
||||
self._assert_sample(samples[0], 1, 'a', {'spam': 'egg'}, times[0])
|
||||
self._assert_sample(samples[1], 2, 'b', None, times[1])
|
||||
|
||||
@ -179,7 +179,7 @@ class TestBaseGetSamples(base.BaseTestCase):
|
||||
|
||||
samples = self._get_samples('http://foo')
|
||||
|
||||
self.assertEqual(len(samples), 0)
|
||||
self.assertEqual(0, len(samples))
|
||||
|
||||
def test_get_samples_return_no_generator(self):
|
||||
class NoneFakeDriver(driver.Driver):
|
||||
|
@ -92,24 +92,24 @@ class TestNotification(base.BaseTestCase):
|
||||
|
||||
def _verify_common_sample(self, s, name, volume):
|
||||
self.assertIsNotNone(s)
|
||||
self.assertEqual(s.name, 'stack.%s' % name)
|
||||
self.assertEqual(s.timestamp, NOW)
|
||||
self.assertEqual(s.type, sample.TYPE_DELTA)
|
||||
self.assertEqual(s.project_id, TENANT_ID)
|
||||
self.assertEqual(s.resource_id, STACK_ARN)
|
||||
self.assertEqual('stack.%s' % name, s.name)
|
||||
self.assertEqual(NOW, s.timestamp)
|
||||
self.assertEqual(sample.TYPE_DELTA, s.type)
|
||||
self.assertEqual(TENANT_ID, s.project_id)
|
||||
self.assertEqual(STACK_ARN, s.resource_id)
|
||||
metadata = s.resource_metadata
|
||||
self.assertEqual(metadata.get('host'),
|
||||
u'orchestration.node-n5x66lxdy67d')
|
||||
self.assertEqual(u'orchestration.node-n5x66lxdy67d',
|
||||
metadata.get('host'))
|
||||
|
||||
def _test_operation(self, operation, trust=None):
|
||||
notif = stack_notification_for(operation, trust)
|
||||
handler = notifications.StackCRUD(mock.Mock())
|
||||
data = list(handler.process_notification(notif))
|
||||
self.assertEqual(len(data), 1)
|
||||
self.assertEqual(1, len(data))
|
||||
if trust:
|
||||
self.assertEqual(data[0].user_id, TRUSTOR_ID)
|
||||
self.assertEqual(TRUSTOR_ID, data[0].user_id)
|
||||
else:
|
||||
self.assertEqual(data[0].user_id, USER_ID)
|
||||
self.assertEqual(USER_ID, data[0].user_id)
|
||||
self._verify_common_sample(data[0], operation, 1)
|
||||
|
||||
def test_create(self):
|
||||
|
@ -58,7 +58,7 @@ class ConnectionRetryTest(base.BaseTestCase):
|
||||
storage.get_connection_from_config(self.CONF)
|
||||
except RuntimeError as err:
|
||||
self.assertIn('no-such-engine', six.text_type(err))
|
||||
self.assertEqual(retry_sleep.call_count, 9)
|
||||
self.assertEqual(9, retry_sleep.call_count)
|
||||
retry_sleep.assert_called_with(10.0)
|
||||
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -162,17 +162,17 @@ class TestPartitioning(base.BaseTestCase):
|
||||
dict(agent_id='agent2', group_id='group')]
|
||||
self._usage_simulation(*agents)
|
||||
|
||||
self.assertEqual(sorted(self.shared_storage.keys()), ['group'])
|
||||
self.assertEqual(sorted(self.shared_storage['group'].keys()),
|
||||
['agent1', 'agent2'])
|
||||
self.assertEqual(['group'], sorted(self.shared_storage.keys()))
|
||||
self.assertEqual(['agent1', 'agent2'],
|
||||
sorted(self.shared_storage['group'].keys()))
|
||||
|
||||
def test_multiple_groups(self):
|
||||
agents = [dict(agent_id='agent1', group_id='group1'),
|
||||
dict(agent_id='agent2', group_id='group2')]
|
||||
self._usage_simulation(*agents)
|
||||
|
||||
self.assertEqual(sorted(self.shared_storage.keys()), ['group1',
|
||||
'group2'])
|
||||
self.assertEqual(['group1', 'group2'],
|
||||
sorted(self.shared_storage.keys()))
|
||||
|
||||
def test_partitioning(self):
|
||||
all_resources = ['resource_%s' % i for i in range(1000)]
|
||||
|
@ -146,19 +146,19 @@ class TestDecoupledPipeline(pipeline_base.BasePipelineTestCase):
|
||||
with pipeline_manager.publisher(None) as p:
|
||||
p([self.test_counter])
|
||||
|
||||
self.assertEqual(len(pipeline_manager.pipelines), 2)
|
||||
self.assertEqual(str(pipeline_manager.pipelines[0]),
|
||||
'test_source:test_sink')
|
||||
self.assertEqual(str(pipeline_manager.pipelines[1]),
|
||||
'test_source:second_sink')
|
||||
self.assertEqual(2, len(pipeline_manager.pipelines))
|
||||
self.assertEqual('test_source:test_sink',
|
||||
str(pipeline_manager.pipelines[0]))
|
||||
self.assertEqual('test_source:second_sink',
|
||||
str(pipeline_manager.pipelines[1]))
|
||||
test_publisher = pipeline_manager.pipelines[0].publishers[0]
|
||||
new_publisher = pipeline_manager.pipelines[1].publishers[0]
|
||||
for publisher, sfx in [(test_publisher, '_update'),
|
||||
(new_publisher, '_new')]:
|
||||
self.assertEqual(len(publisher.samples), 2)
|
||||
self.assertEqual(publisher.calls, 2)
|
||||
self.assertEqual(getattr(publisher.samples[0], "name"), 'a' + sfx)
|
||||
self.assertEqual(getattr(publisher.samples[1], "name"), 'b' + sfx)
|
||||
self.assertEqual(2, len(publisher.samples))
|
||||
self.assertEqual(2, publisher.calls)
|
||||
self.assertEqual('a' + sfx, getattr(publisher.samples[0], "name"))
|
||||
self.assertEqual('b' + sfx, getattr(publisher.samples[1], "name"))
|
||||
|
||||
def test_multiple_sources_with_single_sink(self):
|
||||
self.pipeline_cfg['sources'].append({
|
||||
@ -189,23 +189,23 @@ class TestDecoupledPipeline(pipeline_base.BasePipelineTestCase):
|
||||
with pipeline_manager.publisher(None) as p:
|
||||
p([self.test_counter])
|
||||
|
||||
self.assertEqual(len(pipeline_manager.pipelines), 2)
|
||||
self.assertEqual(str(pipeline_manager.pipelines[0]),
|
||||
'test_source:test_sink')
|
||||
self.assertEqual(str(pipeline_manager.pipelines[1]),
|
||||
'second_source:test_sink')
|
||||
self.assertEqual(2, len(pipeline_manager.pipelines))
|
||||
self.assertEqual('test_source:test_sink',
|
||||
str(pipeline_manager.pipelines[0]))
|
||||
self.assertEqual('second_source:test_sink',
|
||||
str(pipeline_manager.pipelines[1]))
|
||||
test_publisher = pipeline_manager.pipelines[0].publishers[0]
|
||||
another_publisher = pipeline_manager.pipelines[1].publishers[0]
|
||||
for publisher in [test_publisher, another_publisher]:
|
||||
self.assertEqual(len(publisher.samples), 2)
|
||||
self.assertEqual(publisher.calls, 2)
|
||||
self.assertEqual(getattr(publisher.samples[0], "name"), 'a_update')
|
||||
self.assertEqual(getattr(publisher.samples[1], "name"), 'b_update')
|
||||
self.assertEqual(2, len(publisher.samples))
|
||||
self.assertEqual(2, publisher.calls)
|
||||
self.assertEqual('a_update', getattr(publisher.samples[0], "name"))
|
||||
self.assertEqual('b_update', getattr(publisher.samples[1], "name"))
|
||||
|
||||
transformed_samples = self.TransformerClass.samples
|
||||
self.assertEqual(len(transformed_samples), 2)
|
||||
self.assertEqual([getattr(s, 'name') for s in transformed_samples],
|
||||
['a', 'b'])
|
||||
self.assertEqual(2, len(transformed_samples))
|
||||
self.assertEqual(['a', 'b'],
|
||||
[getattr(s, 'name') for s in transformed_samples])
|
||||
|
||||
def _do_test_rate_of_change_in_boilerplate_pipeline_cfg(self, index,
|
||||
meters, units):
|
||||
|
Loading…
Reference in New Issue
Block a user