aodh/ceilometer/tests/publisher/test_kafka_broker_publisher.py
Komei Shimamura 129929c346 Fix a issue for kafka-publisher and refactor the test code
Fix a issue which causes error when default policy is selected.
Add test codes to avoid the same mistakes again.
Fix a bug of the test code which is:
* KafkaBrokerPublisher tries to connect localhost when __init__
  is called because of the lack of mocking _get_client function
  before initializing KafkaBrokerPublisher.

Closes-Bug: 1441175
Closes-Bug: 1441258
Change-Id: I306db443a866860ee45b2362b8a0cd2a59d8c3a2
2015-04-09 00:55:58 -07:00

220 lines
8.6 KiB
Python

#
# Copyright 2015 Cisco Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for ceilometer/publisher/kafka_broker.py
"""
import datetime
import uuid
import mock
from oslo_utils import netutils
from ceilometer.event.storage import models as event
from ceilometer.publisher.kafka_broker import KafkaBrokerPublisher
from ceilometer import sample
from ceilometer.tests import base as tests_base
class TestKafkaPublisher(tests_base.BaseTestCase):
test_event_data = [
event.Event(message_id=uuid.uuid4(),
event_type='event_%d' % i,
generated=datetime.datetime.utcnow(),
traits=[], raw={})
for i in range(0, 5)
]
test_data = [
sample.Sample(
name='test',
type=sample.TYPE_CUMULATIVE,
unit='',
volume=1,
user_id='test',
project_id='test',
resource_id='test_run_tasks',
timestamp=datetime.datetime.utcnow().isoformat(),
resource_metadata={'name': 'TestPublish'},
),
sample.Sample(
name='test',
type=sample.TYPE_CUMULATIVE,
unit='',
volume=1,
user_id='test',
project_id='test',
resource_id='test_run_tasks',
timestamp=datetime.datetime.utcnow().isoformat(),
resource_metadata={'name': 'TestPublish'},
),
sample.Sample(
name='test2',
type=sample.TYPE_CUMULATIVE,
unit='',
volume=1,
user_id='test',
project_id='test',
resource_id='test_run_tasks',
timestamp=datetime.datetime.utcnow().isoformat(),
resource_metadata={'name': 'TestPublish'},
),
sample.Sample(
name='test2',
type=sample.TYPE_CUMULATIVE,
unit='',
volume=1,
user_id='test',
project_id='test',
resource_id='test_run_tasks',
timestamp=datetime.datetime.utcnow().isoformat(),
resource_metadata={'name': 'TestPublish'},
),
sample.Sample(
name='test3',
type=sample.TYPE_CUMULATIVE,
unit='',
volume=1,
user_id='test',
project_id='test',
resource_id='test_run_tasks',
timestamp=datetime.datetime.utcnow().isoformat(),
resource_metadata={'name': 'TestPublish'},
),
]
def setUp(self):
super(TestKafkaPublisher, self).setUp()
@mock.patch.object(KafkaBrokerPublisher, '_get_client')
def test_publish(self, mock_method):
publisher = KafkaBrokerPublisher(netutils.urlsplit(
'kafka://127.0.0.1:9092?topic=ceilometer'))
with mock.patch.object(publisher, '_send') as fake_send:
fake_send.side_effect = mock.Mock()
publisher.publish_samples(mock.MagicMock(), self.test_data)
self.assertEqual(1, len(fake_send.mock_calls))
self.assertEqual(0, len(publisher.local_queue))
@mock.patch.object(KafkaBrokerPublisher, '_get_client')
def test_publish_without_options(self, mock_method):
publisher = KafkaBrokerPublisher(
netutils.urlsplit('kafka://127.0.0.1:9092'))
with mock.patch.object(publisher, '_send') as fake_send:
fake_send.side_effect = mock.Mock()
publisher.publish_samples(mock.MagicMock(), self.test_data)
self.assertEqual(1, len(fake_send.mock_calls))
self.assertEqual(0, len(publisher.local_queue))
@mock.patch.object(KafkaBrokerPublisher, '_get_client')
def test_publish_to_host_without_policy(self, mock_method):
publisher = KafkaBrokerPublisher(netutils.urlsplit(
'kafka://127.0.0.1:9092?topic=ceilometer'))
self.assertEqual('default', publisher.policy)
publisher = KafkaBrokerPublisher(netutils.urlsplit(
'kafka://127.0.0.1:9092?topic=ceilometer&policy=test'))
self.assertEqual('default', publisher.policy)
@mock.patch.object(KafkaBrokerPublisher, '_get_client')
def test_publish_to_host_with_default_policy(self, mock_method):
publisher = KafkaBrokerPublisher(netutils.urlsplit(
'kafka://127.0.0.1:9092?topic=ceilometer&policy=default'))
with mock.patch.object(publisher, '_send') as fake_send:
fake_send.side_effect = TypeError
self.assertRaises(TypeError, publisher.publish_samples,
mock.MagicMock(), self.test_data)
self.assertEqual(100, len(fake_send.mock_calls))
self.assertEqual(0, len(publisher.local_queue))
@mock.patch.object(KafkaBrokerPublisher, '_get_client')
def test_publish_to_host_with_drop_policy(self, mock_method):
publisher = KafkaBrokerPublisher(netutils.urlsplit(
'kafka://127.0.0.1:9092?topic=ceilometer&policy=drop'))
with mock.patch.object(publisher, '_send') as fake_send:
fake_send.side_effect = Exception("test")
publisher.publish_samples(mock.MagicMock(), self.test_data)
self.assertEqual(1, len(fake_send.mock_calls))
self.assertEqual(0, len(publisher.local_queue))
@mock.patch.object(KafkaBrokerPublisher, '_get_client')
def test_publish_to_host_with_queue_policy(self, mock_method):
publisher = KafkaBrokerPublisher(netutils.urlsplit(
'kafka://127.0.0.1:9092?topic=ceilometer&policy=queue'))
with mock.patch.object(publisher, '_send') as fake_send:
fake_send.side_effect = Exception("test")
publisher.publish_samples(mock.MagicMock(), self.test_data)
self.assertEqual(1, len(fake_send.mock_calls))
self.assertEqual(1, len(publisher.local_queue))
@mock.patch.object(KafkaBrokerPublisher, '_get_client')
def test_publish_to_down_host_with_default_queue_size(self, mock_method):
publisher = KafkaBrokerPublisher(netutils.urlsplit(
'kafka://127.0.0.1:9092?topic=ceilometer&policy=queue'))
for i in range(0, 2000):
for s in self.test_data:
s.name = 'test-%d' % i
publisher.publish_samples(mock.MagicMock(),
self.test_data)
self.assertEqual(1024, len(publisher.local_queue))
self.assertEqual(
'test-976',
publisher.local_queue[0][0]['counter_name']
)
self.assertEqual(
'test-1999',
publisher.local_queue[1023][0]['counter_name']
)
@mock.patch.object(KafkaBrokerPublisher, '_get_client')
def test_publish_to_host_from_down_to_up_with_queue(self, mock_method):
publisher = KafkaBrokerPublisher(netutils.urlsplit(
'kafka://127.0.0.1:9092?topic=ceilometer&policy=queue'))
for i in range(0, 16):
for s in self.test_data:
s.name = 'test-%d' % i
publisher.publish_samples(mock.MagicMock(), self.test_data)
self.assertEqual(16, len(publisher.local_queue))
with mock.patch.object(publisher, '_send') as fake_send:
fake_send.return_value = mock.Mock()
for s in self.test_data:
s.name = 'test-%d' % 16
publisher.publish_samples(mock.MagicMock(), self.test_data)
self.assertEqual(0, len(publisher.local_queue))
@mock.patch.object(KafkaBrokerPublisher, '_get_client')
def test_publish_event_with_default_policy(self, mock_method):
publisher = KafkaBrokerPublisher(
netutils.urlsplit('kafka://127.0.0.1:9092?topic=ceilometer'))
with mock.patch.object(KafkaBrokerPublisher, '_send') as fake_send:
publisher.publish_events(mock.MagicMock(), self.test_event_data)
self.assertEqual(1, len(fake_send.mock_calls))
with mock.patch.object(KafkaBrokerPublisher, '_send') as fake_send:
fake_send.side_effect = TypeError
self.assertRaises(TypeError, publisher.publish_events,
mock.MagicMock(), self.test_event_data)
self.assertEqual(100, len(fake_send.mock_calls))
self.assertEqual(0, len(publisher.local_queue))