Multiple publisher pipeline framework

With multiple publisher support, a meter data can be sent to multiple
conduit, like message bus, CW API etc.

Pipeline framework transfers the data from pollster and notification
handler to the publisher, through multiple pipelines.
Each pipeline is composed of 1 or more transformers and one or more
publishers. The transformer transforms the data, like summary, drop,
delta etc.

For bp: multi-publisher

Change-Id: Iff8b85d724eb6358d147b8a8431837934d913c88
Signed-off-by: Yunhong, Jiang <yunhong.jiang@intel.com>
This commit is contained in:
Yunhong, Jiang 2012-12-13 17:13:34 +08:00
parent f85bbcedfa
commit a004624dcc
4 changed files with 1071 additions and 0 deletions

357
ceilometer/pipeline.py Normal file
View File

@ -0,0 +1,357 @@
# -*- encoding: utf-8 -*-
#
# Copyright © 2013 Intel Corp.
#
# Author: Yunhong Jiang <yunhong.jiang@intel.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from stevedore import extension
import yaml
from ceilometer import extension_manager
from ceilometer.openstack.common import cfg
from ceilometer.openstack.common import log
OPTS = [
cfg.StrOpt('pipeline_cfg_file',
default="pipeline.yaml",
help="Configuration file for pipeline definition"
),
]
cfg.CONF.register_opts(OPTS)
LOG = log.getLogger(__name__)
PUBLISHER_NAMESPACE = 'ceilometer.publisher'
TRANSFORMER_NAMESPACE = 'ceilometer.transformer'
class PipelineException(Exception):
def __init__(self, message, pipeline_cfg):
self.msg = message
self.pipeline_cfg = pipeline_cfg
def __str__(self):
return 'Pipeline %s: %s' % (self.pipeline_cfg, self.msg)
class TransformerExtensionManager(extension.ExtensionManager):
def __init__(self):
super(TransformerExtensionManager, self).__init__(
namespace=TRANSFORMER_NAMESPACE,
invoke_on_load=False,
invoke_args=(),
invoke_kwds={}
)
self.by_name = dict((e.name, e) for e in self.extensions)
def get_ext(self, name):
return self.by_name[name]
class Pipeline(object):
"""Sample handling pipeline
Pipeline describes a chain of handlers. The chain starts with
tranformer and ends with one or more publishers.
The first transformer in the chain gets counter from data collector, i.e.
pollster or notification handler, takes some action like dropping,
aggregation, changing field etc, then passes the updated counter
to next step.
The subsequent transformers, if any, handle the data similarly.
In the end of the chain, publishers publish the data. The exact publishing
method depends on publisher type, for example, pushing into data storage
through message bus, sending to external CW software through CW API call.
If no transformer is included in the chain, the publishers get counters
from data collector and publish them directly.
"""
def __init__(self, cfg, publisher_manager, transformer_manager):
self.cfg = cfg
try:
self.name = cfg['name']
try:
self.interval = int(cfg['interval'])
except ValueError:
raise PipelineException("Invalid interval value", cfg)
self.counters = cfg['counters']
self.publishers = cfg['publishers']
# It's legal to have no transformer specified
self.transformer_cfg = cfg['transformers'] or []
self.publisher_manager = publisher_manager
except KeyError as err:
raise PipelineException(
"Required field %s not specified" % err.args[0], cfg)
if self.interval <= 0:
raise PipelineException("Interval value should > 0", cfg)
self._check_counters()
self._check_publishers(cfg, publisher_manager)
self.transformers = self._setup_transformers(cfg, transformer_manager)
def __str__(self):
return self.name
def _check_counters(self):
"""Counter rules checking
At least one meaningful counter exist
Included type and excluded type counter can't co-exist at
the same pipeline
Included type counter and wildcard can't co-exist at same pipeline
"""
counters = self.counters
if not counters:
raise PipelineException("No counter specified", self.cfg)
if [x for x in counters if x[0] not in '!*'] and \
[x for x in counters if x[0] == '!']:
raise PipelineException(
"Both included and excluded counters specified",
cfg)
if '*' in counters and [x for x in counters if x[0] not in '!*']:
raise PipelineException(
"Included counters specified with wildcard",
self.cfg)
def _check_publishers(self, cfg, publisher_manager):
if not self.publishers:
raise PipelineException(
"No publisher specified", cfg)
if not set(self.publishers).issubset(set(publisher_manager.names())):
raise PipelineException(
"Publishers %s invalid" %
set(self.publishers).difference(
set(self.publisher_manager.names())), cfg)
@staticmethod
def _setup_transformers(cfg, transformer_manager):
transformer_cfg = cfg['transformers'] or []
transformers = []
for transformer in transformer_cfg:
parameter = transformer['parameters'] or {}
try:
ext = transformer_manager.get_ext(transformer['name'])
except KeyError:
raise PipelineException(
"No transformer named %s loaded" % transformer['name'],
cfg)
transformers.append(ext.plugin(**parameter))
LOG.info("Pipeline %s: Setup transformer instance %s "
"with parameter %s",
transformer['name'],
parameter)
return transformers
@staticmethod
def _publish_counter_to_one_publisher(ext, ctxt, counter, source):
try:
ext.obj.publish_counter(ctxt, counter, source)
except Exception as err:
LOG.warning("Pipeline %s: Continue after error "
"from publisher %s for %s",
self, ext.name, counter)
LOG.exception(err)
def _transform_counter(self, start, ctxt, counter, source):
try:
for transformer in self.transformers[start:]:
counter = transformer.handle_sample(ctxt, counter, source)
if not counter:
LOG.debug("Pipeline %s: Counter dropped by transformer %s",
self, transformer)
return
return counter
except Exception as err:
LOG.warning("Pipeline %s: Exit after error from transformer"
"%s for %s",
self, transformer, counter)
LOG.exception(err)
def _publish_counter(self, start, ctxt, counter, source):
"""Push counter into pipeline for publishing.
param start: the first transformer that the counter will be injected.
This is mainly for flush() invocation that transformer
may emit counters
param ctxt: execution context from the manager or service
param counter: counter
param source: counter source
"""
LOG.audit("Pipeline %s: Transform counter %s from %s transformer",
self, counter, start)
counter = self._transform_counter(start, ctxt, counter, source)
if not counter:
return
LOG.audit("Pipeline %s: Publish counter %s", self, counter)
self.publisher_manager.map(self.publishers,
self._publish_counter_to_one_publisher,
ctxt=ctxt,
counter=counter,
source=source,
)
LOG.audit("Pipeline %s: Published counter %s", self, counter)
def publish_counter(self, ctxt, counter, source):
if self.support_counter(counter.name):
self._publish_counter(0, ctxt, counter, source)
# (yjiang5) To support counters like instance:m1.tiny,
# which include variable part at the end starting with ':'.
# Hope we will not add such counters in future.
def _variable_counter_name(self, name):
m = name.partition(':')
if m[1] == ':':
return m[1].join((m[0], '*'))
else:
return name
def support_counter(self, counter_name):
counter_name = self._variable_counter_name(counter_name)
if ('!' + counter_name) in self.counters:
return False
if '*' in self.counters:
return True
elif self.counters[0][0] == '!':
return not ('!' + counter_name) in self.counters
else:
return counter_name in self.counters
def flush(self, ctxt, source):
"""Flush data after all counter have been injected to pipeline."""
LOG.audit("Flush pipeline %s", self)
for (i, transformer) in enumerate(self.transformers):
for c in transformer.flush(ctxt, source):
self._publish_counter(i + 1, ctxt, c, source)
def get_interval(self):
return self.interval
class PipelineManager(object):
"""Pipeline Manager
Pipeline manager sets up pipelines according to config file
Usually only one pipeline manager exists in the system.
"""
def __init__(self, cfg, publisher_manager):
"""Create the pipeline manager"""
self._setup_pipelines(cfg, publisher_manager)
def _setup_pipelines(self, cfg, publisher_manager):
"""Setup the pipelines according to config.
The top of the cfg is a list of pipeline definitions.
Pipeline definition is an dictionary specifying the target counters,
the tranformers involved, and the target publishers:
{
"name": pipeline_name
"interval": interval_time
"counters" : ["counter_1", "counter_2"],
"tranformers":[
{"name": "Transformer_1",
"parameters": {"p1": "value"}},
{"name": "Transformer_2",
"parameters": {"p1": "value"}},
]
"publishers": ["publisher_1", "publisher_2"]
}
Interval is how many seconds should the counters be injected to
the pipeline.
Valid counter format is '*', '!counter_name', or 'counter_name'.
'*' is wildcard symbol means any counters; '!counter_name' means
"counter_name" will be excluded; 'counter_name' means 'counter_name'
will be included.
The 'counter_name" is Counter namedtuple's name field. For counter
names with variable like "instance:m1.tiny", it's "instance:*", as
returned by get_counter_list().
Valid counters definition is all "included counter names", all
"excluded counter names", wildcard and "excluded counter names", or
only wildcard.
Transformer's name is plugin name in setup.py.
Publisher's name is plugin name in setup.py
"""
transformer_manager = TransformerExtensionManager()
self.pipelines = [Pipeline(pipedef, publisher_manager,
transformer_manager)
for pipedef in cfg]
def pipelines_for_counter(self, counter_name):
"""Get all pipelines that support counter"""
return [p for p in self.pipelines if p.support_counter(counter_name)]
def publish_counter(self, ctxt, counter, source):
"""Publish counter through pipelines
This is helpful to notification mechanism, so that they don't need
to maintain the private mapping cache from counter to pipelines.
For polling based data collector, they may need keep private
mapping cache for different interval support.
"""
# TODO(yjiang5) Utilize a cache
for p in self.pipelines:
if p.support_counter(counter.name):
p.publish_counter(ctxt, counter, source)
def setup_pipeline(publisher_manager):
"""Setup pipeline manager according to yaml config file."""
cfg_file = cfg.CONF.pipeline_cfg_file
if not os.path.exists(cfg_file):
cfg_file = cfg.CONF.find_file(cfg_file)
LOG.debug("Pipeline config file: %s", cfg_file)
with open(cfg_file) as fap:
data = fap.read()
pipeline_cfg = yaml.safe_load(data)
LOG.info("Pipeline config: %s", pipeline_cfg)
return PipelineManager(pipeline_cfg,
publisher_manager)

View File

@ -81,3 +81,48 @@ class PollsterBase(PluginBase):
def get_counters(self, manager, instance):
"""Return a sequence of Counter instances from polling the
resources."""
class PublisherBase(PluginBase):
"""Base class for plugins that publish the sampler."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def publish_counter(self, context, counter, source):
"publish counters into final conduit"
class TransformerBase(PluginBase):
"""Base class for plugins that transform the counter."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def handle_sample(self, context, counter, source):
"""Transform a counter
parameter:
context: Passed from the data collector
counters: An interator of counters.
source: Passed from data collector.
"""
def flush(self, context, source):
"""Flush counters cached previously"""
return []
def __init__(self, **parameter):
"""Setup transformer
Each time a transformed is involved in a pipeline, a new transformer
instance is created and chained into the pipeline. i.e. transformer
instance is per pipeline. This helps if transformer need keep some
cache and per-pipeline information.
parameter:
kwds: the parameter that is defined in pipeline config file
"""
super(TransformerBase, self).__init__()

668
tests/test_pipeline.py Normal file
View File

@ -0,0 +1,668 @@
# -*- encoding: utf-8 -*-
#
# Copyright © 2013 Intel Corp.
#
# Author: Yunhong Jiang <yunhong.jiang@intel.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from stevedore import dispatch
from stevedore import extension
from ceilometer import counter
from ceilometer import extension_manager as act_extension
from ceilometer.openstack.common import timeutils
from ceilometer import pipeline
from ceilometer.tests import base
class TestPipeline(base.TestCase):
def fake_tem_init(self):
"""Fake a transformerManager for pipeline
The faked entry point setting is below:
update: TransformerClass
except: TransformerClassException
drop: TransformerClassDrop
"""
pass
def fake_tem_get_ext(self, name):
class_name_ext = {
'update': self.TransformerClass,
'except': self.TransformerClassException,
'drop': self.TransformerClassDrop,
'cache': self.TransformerClassCache}
if name in class_name_ext:
return extension.Extension(name, None,
class_name_ext[name],
None,
)
raise KeyError(name)
class PublisherClass():
def __init__(self):
self.counters = []
def publish_counter(self, ctxt, counter, source):
self.counters.append(counter)
class PublisherClassException():
def publish_counter(self, ctxt, counter, source):
raise Exception()
class TransformerClass(object):
samples = []
def __init__(self, append_name='_update'):
self.__class__.samples = []
self.append_name = append_name
def flush(self, ctxt, source):
return []
def handle_sample(self, ctxt, counter, source):
self.__class__.samples.append(counter)
newname = getattr(counter, 'name') + self.append_name
return counter._replace(name=newname)
class TransformerClassDrop(object):
samples = []
def __init__(self):
self.__class__.samples = []
def handle_sample(self, ctxt, counter, source):
self.__class__.samples.append(counter)
class TransformerClassException(object):
def handle_sample(self, ctxt, counter, source):
raise Exception()
class TransformerClassCache(object):
samples = []
caches = []
def __init__(self, drop=True):
self.__class__.samples = []
self.__class__.caches = []
def handle_sample(self, ctxt, counter, source):
self.__class__.samples.append(counter)
self.__class__.caches.append(counter)
def flush(self, ctxt, source):
return self.__class__.caches
def _create_publisher_manager(self, ext_name='test'):
self.publisher_manager = dispatch.NameDispatchExtensionManager(
'fake',
[],
invoke_on_load=False,
)
self.publisher = self.PublisherClass()
self.new_publisher = self.PublisherClass()
self.publisher_exception = self.PublisherClassException()
self.publisher_manager.extensions = [
extension.Extension(
'test',
None,
None,
self.publisher,
),
extension.Extension(
'new',
None,
None,
self.new_publisher,
),
extension.Extension(
'except',
None,
None,
self.publisher_exception,
),
]
self.publisher_manager.by_name = dict(
(e.name, e)
for e
in self.publisher_manager.extensions)
def setUp(self):
super(TestPipeline, self).setUp()
self.test_counter = counter.Counter(
name='a',
type='test_type',
volume=1,
unit='B',
user_id="test_user",
project_id="test_proj",
resource_id="test_resource",
timestamp=timeutils.utcnow().isoformat(),
resource_metadata={}
)
self.stubs.Set(pipeline.TransformerExtensionManager,
"__init__",
self.fake_tem_init)
self.stubs.Set(pipeline.TransformerExtensionManager,
"get_ext",
self.fake_tem_get_ext)
self._create_publisher_manager()
self.pipeline_cfg = [{
'name': "test_pipeline",
'interval': 5,
'counters': ['a'],
'transformers': [
{'name': "update",
'parameters': {}}
],
'publishers': ["test"],
}, ]
def _exception_create_pipelinemanager(self):
self.assertRaises(pipeline.PipelineException,
pipeline.PipelineManager,
self.pipeline_cfg,
self.publisher_manager)
def test_no_counters(self):
del self.pipeline_cfg[0]['counters']
self._exception_create_pipelinemanager()
def test_no_transformers(self):
del self.pipeline_cfg[0]['transformers']
self._exception_create_pipelinemanager()
def test_no_name(self):
del self.pipeline_cfg[0]['name']
self._exception_create_pipelinemanager()
def test_no_interval(self):
del self.pipeline_cfg[0]['interval']
self._exception_create_pipelinemanager()
def test_no_publishers(self):
del self.pipeline_cfg[0]['publishers']
self._exception_create_pipelinemanager()
def test_check_counters_include_exclude_same(self):
counter_cfg = ['a', '!a']
self.pipeline_cfg[0]['counters'] = counter_cfg
self._exception_create_pipelinemanager()
def test_check_counters_include_exclude(self):
counter_cfg = ['a', '!b']
self.pipeline_cfg[0]['counters'] = counter_cfg
self._exception_create_pipelinemanager()
def test_check_counters_wildcard_included(self):
counter_cfg = ['a', '*']
self.pipeline_cfg[0]['counters'] = counter_cfg
self._exception_create_pipelinemanager()
def test_check_publishers_invalid_publisher(self):
publisher_cfg = ['test_invalid']
self.pipeline_cfg[0]['publishers'] = publisher_cfg
self._exception_create_pipelinemanager()
def test_invalid_string_interval(self):
self.pipeline_cfg[0]['interval'] = 'string'
self._exception_create_pipelinemanager()
def test_check_transformer_invalid_transformer(self):
transformer_cfg = [
{'name': "test_invalid",
'parameters': {}}
]
self.pipeline_cfg[0]['transformers'] = transformer_cfg
self._exception_create_pipelinemanager()
def test_pipelines_for_counter(self):
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.publisher_manager)
self.assertTrue(len(pipeline_manager.pipelines_for_counter('a'))
== 1)
self.assertTrue(len(pipeline_manager.pipelines_for_counter('b'))
== 0)
def test_get_interval(self):
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.publisher_manager)
pipe = pipeline_manager.pipelines_for_counter('a')[0]
self.assertTrue(pipe.get_interval() == 5)
def test_publisher_transformer_invoked(self):
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.publisher_manager)
pipe = pipeline_manager.pipelines_for_counter('a')[0]
pipe.publish_counter(None, self.test_counter, None)
self.assertTrue(len(self.publisher.counters) == 1)
self.assertTrue(len(self.TransformerClass.samples) == 1)
self.assertTrue(getattr(self.publisher.counters[0], "name")
== 'a_update')
self.assertTrue(getattr(self.TransformerClass.samples[0], "name")
== 'a')
def test_multiple_included_counters(self):
counter_cfg = ['a', 'b']
self.pipeline_cfg[0]['counters'] = counter_cfg
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.publisher_manager)
pipe = pipeline_manager.pipelines_for_counter('a')[0]
pipe.publish_counter(None, self.test_counter, None)
self.assertTrue(len(self.publisher.counters) == 1)
pipe = pipeline_manager.pipelines_for_counter('b')[0]
self.test_counter = self.test_counter._replace(name='b')
pipe.publish_counter(None, self.test_counter, None)
self.assertTrue(len(self.publisher.counters) == 2)
self.assertTrue(len(self.TransformerClass.samples) == 2)
self.assertTrue(getattr(self.publisher.counters[0], "name")
== 'a_update')
self.assertTrue(getattr(self.publisher.counters[1], "name")
== 'b_update')
def test_wildcard_counter(self):
counter_cfg = ['*']
self.pipeline_cfg[0]['counters'] = counter_cfg
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.publisher_manager)
pipe = pipeline_manager.pipelines_for_counter('a')[0]
pipe.publish_counter(None, self.test_counter, None)
self.assertTrue(len(self.publisher.counters) == 1)
self.assertTrue(len(self.TransformerClass.samples) == 1)
self.assertTrue(getattr(self.publisher.counters[0], "name")
== 'a_update')
def test_wildcard_excluded_counters(self):
counter_cfg = ['*', '!a']
self.pipeline_cfg[0]['counters'] = counter_cfg
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.publisher_manager)
pipe = pipeline_manager.pipelines_for_counter('a')
self.assertTrue(len(pipe) == 0)
def test_wildcard_excluded_counters_not_excluded(self):
counter_cfg = ['*', '!b']
self.pipeline_cfg[0]['counters'] = counter_cfg
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.publisher_manager)
pipe = pipeline_manager.pipelines_for_counter('a')[0]
pipe.publish_counter(None, self.test_counter, None)
self.assertTrue(len(self.publisher.counters) == 1)
self.assertTrue(len(self.TransformerClass.samples) == 1)
self.assertTrue(getattr(self.publisher.counters[0], "name")
== 'a_update')
def test_all_excluded_counters_not_excluded(self):
counter_cfg = ['!b', '!c']
self.pipeline_cfg[0]['counters'] = counter_cfg
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.publisher_manager)
pipe = pipeline_manager.pipelines_for_counter('a')[0]
pipe.publish_counter(None, self.test_counter, None)
self.assertTrue(len(self.publisher.counters) == 1)
self.assertTrue(len(self.TransformerClass.samples) == 1)
self.assertTrue(getattr(self.publisher.counters[0], "name")
== 'a_update')
self.assertTrue(getattr(self.TransformerClass.samples[0], "name")
== 'a')
def test_all_excluded_counters_is_excluded(self):
counter_cfg = ['!a', '!c']
self.pipeline_cfg[0]['counters'] = counter_cfg
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.publisher_manager)
pipe = pipeline_manager.pipelines_for_counter('a')
self.assertTrue(len(pipe) == 0)
pipe_1 = pipeline_manager.pipelines_for_counter('c')
self.assertTrue(len(pipe_1) == 0)
def test_multiple_pipeline(self):
self.pipeline_cfg.append({
'name': 'second_pipeline',
'interval': 5,
'counters': ['b'],
'transformers': [{
'name': 'update',
'parameters':
{
"append_name": "_new",
}
}],
'publishers': ['new'],
})
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.publisher_manager)
pipe = pipeline_manager.pipelines_for_counter('a')[0]
pipe_1 = pipeline_manager.pipelines_for_counter('b')[0]
pipe.publish_counter(None, self.test_counter, None)
self.test_counter = self.test_counter._replace(name='b')
pipe_1.publish_counter(None, self.test_counter, None)
self.assertTrue(len(self.publisher.counters) == 1)
self.assertTrue(getattr(self.publisher.counters[0], "name")
== 'a_update')
self.assertTrue(len(self.new_publisher.counters) == 1)
self.assertTrue(getattr(self.new_publisher.counters[0], "name")
== 'b_new')
self.assertTrue(getattr(self.TransformerClass.samples[0], "name")
== 'a')
self.assertTrue(len(self.TransformerClass.samples) == 2)
self.assertTrue(getattr(self.TransformerClass.samples[0], "name")
== 'a')
self.assertTrue(getattr(self.TransformerClass.samples[1], "name")
== 'b')
def test_multple_pipeline_exception(self):
self.pipeline_cfg.append({
'name': "second_pipeline",
"interval": 5,
'counters': ['b'],
'transformers': [{
'name': 'update',
'parameters':
{
"append_name": "_new",
}
}],
'publishers': ['except'],
})
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.publisher_manager)
pipe = pipeline_manager.pipelines_for_counter('a')[0]
pipe_1 = pipeline_manager.pipelines_for_counter('b')[0]
pipe.publish_counter(None, self.test_counter, None)
self.test_counter = self.test_counter._replace(name='b')
pipe_1.publish_counter(None, self.test_counter, None)
self.assertTrue(len(self.publisher.counters) == 1)
self.assertTrue(getattr(self.publisher.counters[0], "name")
== 'a_update')
self.assertTrue(len(self.new_publisher.counters) == 0)
self.assertTrue(len(self.TransformerClass.samples) == 2)
self.assertTrue(getattr(self.TransformerClass.samples[0], "name")
== 'a')
self.assertTrue(getattr(self.TransformerClass.samples[1], "name")
== 'b')
def test_none_transformer_pipeline(self):
self.pipeline_cfg[0]['transformers'] = None
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.publisher_manager)
pipe = pipeline_manager.pipelines_for_counter('a')[0]
pipe.publish_counter(None, self.test_counter, None)
self.assertTrue(len(self.publisher.counters) == 1)
self.assertTrue(getattr(self.publisher.counters[0], 'name') == 'a')
def test_empty_transformer_pipeline(self):
self.pipeline_cfg[0]['transformers'] = []
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.publisher_manager)
pipe = pipeline_manager.pipelines_for_counter('a')[0]
pipe.publish_counter(None, self.test_counter, None)
self.assertTrue(len(self.publisher.counters) == 1)
self.assertTrue(getattr(self.publisher.counters[0], 'name') == 'a')
def test_multiple_transformer_same_class(self):
self.pipeline_cfg[0]['transformers'] = [
{
'name': 'update',
'parameters': {}
},
{
'name': 'update',
'parameters': {}
},
]
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.publisher_manager)
pipe = pipeline_manager.pipelines_for_counter('a')[0]
pipe.publish_counter(None, self.test_counter, None)
self.assertTrue(len(self.publisher.counters) == 1)
self.assertTrue(getattr(self.publisher.counters[0], 'name')
== 'a_update_update')
self.assertTrue(len(self.TransformerClass.samples) == 2)
self.assertTrue(getattr(self.TransformerClass.samples[0], 'name')
== 'a')
self.assertTrue(getattr(self.TransformerClass.samples[1], 'name')
== 'a_update')
def test_multiple_transformer_same_class_different_parameter(self):
self.pipeline_cfg[0]['transformers'] = [
{
'name': 'update',
'parameters':
{
"append_name": "_update",
}
},
{
'name': 'update',
'parameters':
{
"append_name": "_new",
}
},
]
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.publisher_manager)
pipe = pipeline_manager.pipelines_for_counter('a')[0]
pipe.publish_counter(None, self.test_counter, None)
self.assertTrue(len(self.TransformerClass.samples) == 2)
self.assertTrue(getattr(self.TransformerClass.samples[0], 'name')
== 'a')
self.assertTrue(getattr(self.TransformerClass.samples[1], 'name')
== 'a_update')
self.assertTrue(len(self.publisher.counters) == 1)
self.assertTrue(getattr(self.publisher.counters[0], 'name')
== 'a_update_new')
def test_multiple_transformer_drop_transformer(self):
self.pipeline_cfg[0]['transformers'] = [
{
'name': 'update',
'parameters':
{
"append_name": "_update",
}
},
{
'name': 'drop',
'parameters': {}
},
{
'name': 'update',
'parameters':
{
"append_name": "_new",
}
},
]
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.publisher_manager)
pipe = pipeline_manager.pipelines_for_counter('a')[0]
pipe.publish_counter(None, self.test_counter, None)
self.assertTrue(len(self.publisher.counters) == 0)
self.assertTrue(len(self.TransformerClass.samples) == 1)
self.assertTrue(getattr(self.TransformerClass.samples[0], 'name')
== 'a')
self.assertTrue(len(self.TransformerClassDrop.samples) == 1)
self.assertTrue(getattr(self.TransformerClassDrop.samples[0], 'name')
== 'a_update')
def test_multiple_publisher(self):
self.pipeline_cfg[0]['publishers'] = ['test', 'new']
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.publisher_manager)
pipe = pipeline_manager.pipelines_for_counter('a')[0]
pipe.publish_counter(None, self.test_counter, None)
self.assertTrue(len(self.publisher.counters) == 1)
self.assertTrue(len(self.new_publisher.counters) == 1)
self.assertTrue(getattr(self.new_publisher.counters[0], 'name')
== 'a_update')
self.assertTrue(getattr(self.publisher.counters[0], 'name')
== 'a_update')
def test_multiple_publisher_isolation(self):
self.pipeline_cfg[0]['publishers'] = ['except', 'new']
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.publisher_manager)
pipe = pipeline_manager.pipelines_for_counter('a')[0]
pipe.publish_counter(None, self.test_counter, None)
self.assertTrue(len(self.new_publisher.counters) == 1)
self.assertTrue(getattr(self.new_publisher.counters[0], 'name')
== 'a_update')
def test_multiple_counter_pipeline(self):
self.pipeline_cfg[0]['counters'] = ['a', 'b']
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.publisher_manager)
pipe = pipeline_manager.pipelines_for_counter('a')[0]
pipe_1 = pipeline_manager.pipelines_for_counter('b')[0]
self.assertTrue(pipe is pipe_1)
pipe.publish_counter(None, self.test_counter, None)
self.test_counter = self.test_counter._replace(name='b')
pipe_1.publish_counter(None, self.test_counter, None)
self.assertTrue(len(self.publisher.counters) == 2)
self.assertTrue(getattr(self.publisher.counters[0], 'name')
== 'a_update')
self.assertTrue(getattr(self.publisher.counters[1], 'name')
== 'b_update')
def test_flush_pipeline_cache(self):
self.pipeline_cfg[0]['transformers'].extend([
{
'name': 'cache',
'parameters': {}
},
{
'name': 'update',
'parameters':
{
'append_name': '_new'
}
}, ]
)
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.publisher_manager)
pipe = pipeline_manager.pipelines_for_counter('a')[0]
pipe.publish_counter(None, self.test_counter, None)
self.assertTrue(len(self.TransformerClassCache.caches) == 1)
self.assertTrue(len(self.TransformerClass.samples) == 1)
self.assertTrue(len(self.publisher.counters) == 0)
pipe.flush(None, None)
self.assertTrue(len(self.publisher.counters) == 1)
self.assertTrue(len(self.TransformerClass.samples) == 2)
self.assertTrue(len(self.TransformerClassCache.caches) == 1)
self.assertTrue(getattr(self.publisher.counters[0], 'name')
== 'a_update_new')
def test_flush_pipeline_cache_multiple_counter(self):
self.pipeline_cfg[0]['transformers'].extend([
{
'name': 'cache',
'parameters': {}
},
{
'name': 'update',
'parameters':
{
'append_name': '_new'
}
}, ]
)
self.pipeline_cfg[0]['counters'] = ['a', 'b']
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.publisher_manager)
pipe = pipeline_manager.pipelines_for_counter('a')[0]
pipe.publish_counter(None, self.test_counter, None)
self.test_counter = self.test_counter._replace(name='b')
pipe.publish_counter(None, self.test_counter, None)
self.assertTrue(len(self.TransformerClassCache.caches) == 2)
self.assertTrue(len(self.TransformerClass.samples) == 2)
self.assertTrue(len(self.publisher.counters) == 0)
pipe.flush(None, None)
self.assertTrue(len(self.publisher.counters) == 2)
self.assertTrue(len(self.TransformerClassCache.caches) == 2)
self.assertTrue(len(self.TransformerClass.samples) == 4)
self.assertTrue(getattr(self.publisher.counters[0], 'name')
== 'a_update_new')
self.assertTrue(getattr(self.publisher.counters[1], 'name')
== 'b_update_new')
def test_flush_pipeline_cache_before_publisher(self):
self.pipeline_cfg[0]['transformers'].append({
'name': 'cache',
'parameters': {}
})
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.publisher_manager)
pipe = pipeline_manager.pipelines_for_counter('a')[0]
pipe.publish_counter(None, self.test_counter, None)
self.assertTrue(len(self.TransformerClassCache.caches) == 1)
self.assertTrue(len(self.publisher.counters) == 0)
pipe.flush(None, None)
self.assertTrue(len(self.publisher.counters) == 1)
self.assertTrue(getattr(self.publisher.counters[0], 'name')
== 'a_update')
def test_variable_counter(self):
self.pipeline_cfg = [{
'name': "test_pipeline",
'interval': 5,
'counters': ['a:*'],
'transformers': [
{'name': "update",
'parameters': {}}
],
'publishers': ["test"],
}, ]
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.publisher_manager)
pipe = pipeline_manager.pipelines_for_counter('a:*')[0]
self.test_counter = self.test_counter._replace(name='a:b')
pipe.publish_counter(None, self.test_counter, None)
self.assertTrue(len(self.publisher.counters) == 1)
self.assertTrue(len(self.TransformerClass.samples) == 1)
self.assertTrue(getattr(self.publisher.counters[0], "name")
== 'a:b_update')
self.assertTrue(getattr(self.TransformerClass.samples[0], "name")
== 'a:b')

View File

@ -20,3 +20,4 @@ lxml
requests<1.0
extras
wsme>=0.5b1
pyyaml