Fix the configuration for the nova notifier
The global config object inside the ceilometer copy of openstack.common.cfg was not being configured, which meant the RPC code in openstack.common.rpc was not set up. That lead to an infinite loop when the notifier was invoked, since it tried to connect to the message bus over and over and over. This change forces the configuration object to be loaded from the ceilometer agent configuration file, assumed to be in /etc/ceilometer/ceilometer-agent.conf. It also changes the notifier to do that initialization the first time it is used, rather than when the module is imported. This prevents the tests from loading the module and initializing the config object, causing exceptions in any subsequent code that tries to add new configuration options. Also folded into this changeset is a fix to use the right db method to load the instance. Notification messages contain the UUID of the object, not the numerical key from the id column of the table. Therefore, we need to use db.instance_get_by_uuid() instead of db.instance_get(). Change-Id: I45719ab879ae6163329b6a886dee3e82ed4bc260 Signed-off-by: Doug Hellmann <doug.hellmann@dreamhost.com>
This commit is contained in:
parent
3bce0da4cd
commit
1b5eb0bad4
@ -16,22 +16,51 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from ceilometer.openstack.common import cfg
|
||||
from ceilometer.openstack.common import log as logging
|
||||
|
||||
from nova import db
|
||||
from ceilometer.compute.manager import AgentManager
|
||||
|
||||
# This module runs inside the nova compute
|
||||
# agent, which only configures the "nova" logger.
|
||||
# We use a fake logger name in that namespace
|
||||
# so that messages from this module appear
|
||||
# in the log file.
|
||||
LOG = logging.getLogger('nova.ceilometer.notifier')
|
||||
|
||||
class CeilometerNovaNotifier(object):
|
||||
"""Special notifier for Nova, doing special jobs for Ceilometer."""
|
||||
# NOTE(dhellmann): The _initialize_config_options is set by the tests
|
||||
# to disable the cfg.CONF() call in notify(), since initializing the
|
||||
# object in one tests breaks other tests unpredictably when new
|
||||
# modules are imported and new options registered.
|
||||
#
|
||||
# GLOBAL STATE IS A BAD IDEA BUT IMPORT SIDE-EFFECTS ARE WORSE!
|
||||
_initialize_config_options = True
|
||||
_agent_manager = None
|
||||
|
||||
def __init__(self):
|
||||
self.manager = AgentManager()
|
||||
self.manager.init_host()
|
||||
|
||||
def __call__(self, context, message):
|
||||
if message['event_type'] == 'compute.instance.delete.start':
|
||||
instance_id = message['payload']['instance_id']
|
||||
self.manager.poll_instance(context,
|
||||
db.instance_get(context,
|
||||
instance_id))
|
||||
def initialize_manager():
|
||||
global _agent_manager
|
||||
# NOTE(dhellmann): See note above.
|
||||
if _initialize_config_options:
|
||||
cfg.CONF(args=[], project='ceilometer', prog='ceilometer-agent')
|
||||
# Instantiate a manager
|
||||
_agent_manager = AgentManager()
|
||||
_agent_manager.init_host()
|
||||
|
||||
notify = CeilometerNovaNotifier()
|
||||
|
||||
def notify(context, message):
|
||||
global _agent_manager
|
||||
# Initialize the global config object as though it was in the
|
||||
# compute agent process so that the ceilometer copy of the rpc
|
||||
# modules will know how to communicate.
|
||||
if _agent_manager is None:
|
||||
initialize_manager()
|
||||
|
||||
if message['event_type'] == 'compute.instance.delete.start':
|
||||
instance_id = message['payload']['instance_id']
|
||||
LOG.debug('polling final stats for %r', instance_id)
|
||||
_agent_manager.poll_instance(
|
||||
context,
|
||||
db.instance_get_by_uuid(context, instance_id))
|
||||
return
|
||||
|
@ -38,6 +38,7 @@ from ceilometer import counter
|
||||
from ceilometer.tests import base
|
||||
from ceilometer.tests import skip
|
||||
from ceilometer.compute import nova_notifier
|
||||
from ceilometer.openstack.common import cfg
|
||||
from ceilometer.openstack.common import importutils
|
||||
|
||||
|
||||
@ -110,7 +111,7 @@ class TestNovaNotifier(base.TestCase):
|
||||
"metadata": {},
|
||||
"uuid": "144e08f4-00cb-11e2-888e-5453ed1bbb5f"}
|
||||
|
||||
self.stubs.Set(db, 'instance_get', self.fake_db_instance_get)
|
||||
self.stubs.Set(db, 'instance_get_by_uuid', self.fake_db_instance_get)
|
||||
self.stubs.Set(db, 'instance_info_cache_delete', self.do_nothing)
|
||||
self.stubs.Set(db, 'instance_destroy', self.do_nothing)
|
||||
self.stubs.Set(db, 'instance_system_metadata_get',
|
||||
@ -122,16 +123,20 @@ class TestNovaNotifier(base.TestCase):
|
||||
self.instance))
|
||||
|
||||
self.stubs.Set(publish, 'publish_counter', self.do_nothing)
|
||||
nova_notifier.notify.manager.pollsters = [('test', self.Pollster())]
|
||||
nova_notifier._initialize_config_options = False
|
||||
nova_notifier.initialize_manager()
|
||||
nova_notifier._agent_manager.pollsters = [('test', self.Pollster())]
|
||||
|
||||
def tearDown(self):
|
||||
super(TestNovaNotifier, self).tearDown()
|
||||
self.Pollster.counters = []
|
||||
super(TestNovaNotifier, self).tearDown()
|
||||
|
||||
def test_notifications(self):
|
||||
self.compute.terminate_instance(self.context, instance=self.instance)
|
||||
self.assertTrue(self.Pollster.counters)
|
||||
self.assertTrue(self.Pollster.counters[0])
|
||||
self.assertEqual(self.Pollster.counters[0][0],
|
||||
nova_notifier.notify.manager)
|
||||
nova_notifier._agent_manager)
|
||||
self.assertEqual(self.Pollster.counters[0][1].id,
|
||||
self.instance['uuid'])
|
||||
self.assertEqual(len(self.Pollster.counters), 1)
|
||||
|
Loading…
x
Reference in New Issue
Block a user