def setup_pipeline(transformer_manager=None): """Setup pipeline manager according to yaml config file.""" cfg_file = cfg.CONF.pipeline_cfg_file if not os.path.exists(cfg_file): cfg_file = cfg.CONF.find_file(cfg_file) LOG.debug(_("Pipeline config file: %s"), cfg_file) with open(cfg_file) as fap: data = fap.read() pipeline_cfg = yaml.safe_load(data) LOG.info(_("Pipeline config: %s"), pipeline_cfg) return PipelineManager(pipeline_cfg, transformer_manager or xformer.TransformerExtensionManager( 'ceilometer.transformer', ))
def test_process_notification(self): transformer_manager = transformer.TransformerExtensionManager( 'ceilometer.transformer', ) notifier._pipeline_manager = pipeline.PipelineManager( [{ 'name': "test_pipeline", 'interval': 60, 'counters': ['*'], 'transformers': [], 'publishers': ["test"], }], transformer_manager) pub = notifier._pipeline_manager.pipelines[0].publishers[0] self.assertEqual(len(pub.samples), 0) notifier.notify(None, MESSAGE) self.assertTrue(len(pub.samples) > 0) self.assertIn('disk.ephemeral.size', [c.name for c in pub.samples])
def initialize_service_hook(self, service): '''Consumers must be declared before consume_thread start.''' LOG.debug('initialize_service_hooks') self.pipeline_manager = pipeline.setup_pipeline( transformer.TransformerExtensionManager( 'ceilometer.transformer', ), ) LOG.debug('loading notification handlers from %s', self.COLLECTOR_NAMESPACE) self.notification_manager = \ extension.ExtensionManager( namespace=self.COLLECTOR_NAMESPACE, invoke_on_load=True, ) if not list(self.notification_manager): LOG.warning('Failed to load any notification handlers for %s', self.COLLECTOR_NAMESPACE) self.notification_manager.map(self._setup_subscription) # Load all configured dispatchers self.dispatchers = [] for dispatcher in named.NamedExtensionManager( namespace=self.DISPATCHER_NAMESPACE, names=cfg.CONF.collector.dispatcher, invoke_on_load=True, invoke_args=[cfg.CONF]): if dispatcher.obj: self.dispatchers.append(dispatcher.obj) LOG.info('dispatchers loaded %s' % str(self.dispatchers)) # Set ourselves up as a separate worker for the metering data, # since the default for service is to use create_consumer(). self.conn.create_worker( cfg.CONF.publisher_rpc.metering_topic, rpc_dispatcher.RpcDispatcher([self]), 'ceilometer.collector.' + cfg.CONF.publisher_rpc.metering_topic, )
def initialize_service_hook(self, service): '''Consumers must be declared before consume_thread start.''' self.pipeline_manager = pipeline.setup_pipeline( transformer.TransformerExtensionManager( 'ceilometer.transformer', ), ) LOG.debug(_('Loading event definitions')) self.event_converter = event_converter.setup_events( extension.ExtensionManager( namespace='ceilometer.event.trait_plugin')) self.notification_manager = \ extension.ExtensionManager( namespace=self.NOTIFICATION_NAMESPACE, invoke_on_load=True, ) if not list(self.notification_manager): LOG.warning(_('Failed to load any notification handlers for %s'), self.NOTIFICATION_NAMESPACE) self.notification_manager.map(self._setup_subscription)
def setUp(self): super(TestPipeline, self).setUp() self.test_counter = sample.Sample( name='a', type=sample.TYPE_GAUGE, volume=1, unit='B', user_id="test_user", project_id="test_proj", resource_id="test_resource", timestamp=timeutils.utcnow().isoformat(), resource_metadata={} ) self.useFixture(mockpatch.PatchObject( transformer.TransformerExtensionManager, "__init__", side_effect=self.fake_tem_init)) self.useFixture(mockpatch.PatchObject( transformer.TransformerExtensionManager, "get_ext", side_effect=self.fake_tem_get_ext)) self.useFixture(mockpatch.PatchObject( publisher, 'get_publisher', side_effect=self.get_publisher)) self.transformer_manager = transformer.TransformerExtensionManager() self.pipeline_cfg = [{ 'name': "test_pipeline", 'interval': 5, 'counters': ['a'], 'transformers': [ {'name': "update", 'parameters': {}} ], 'publishers': ["test://"], }, ]
def _load_pipeline_manager(): global _pipeline_manager _pipeline_manager = pipeline.setup_pipeline( transformer.TransformerExtensionManager('ceilometer.transformer', ), )
def send_sample(): cfg.CONF.register_cli_opts([ cfg.StrOpt('sample-name', short='n', help='Meter name.', required=True), cfg.StrOpt('sample-type', short='y', help='Meter type (gauge, delta, cumulative).', default='gauge', required=True), cfg.StrOpt('sample-unit', short='U', help='Meter unit.'), cfg.IntOpt('sample-volume', short='l', help='Meter volume value.', default=1), cfg.StrOpt('sample-resource', short='r', help='Meter resource id.', required=True), cfg.StrOpt('sample-user', short='u', help='Meter user id.'), cfg.StrOpt('sample-project', short='p', help='Meter project id.'), cfg.StrOpt('sample-timestamp', short='i', help='Meter timestamp.', default=timeutils.utcnow().isoformat()), cfg.StrOpt('sample-metadata', short='m', help='Meter metadata.'), ]) service.prepare_service() # Set up logging to use the console console = logging.StreamHandler(sys.stderr) console.setLevel(logging.DEBUG) formatter = logging.Formatter('%(message)s') console.setFormatter(formatter) root_logger = logging.getLogger('') root_logger.addHandler(console) root_logger.setLevel(logging.DEBUG) pipeline_manager = pipeline.setup_pipeline( transformer.TransformerExtensionManager( 'ceilometer.transformer', ), ) with pipeline_manager.publisher(context.get_admin_context()) as p: p([sample.Sample( name=cfg.CONF.sample_name, type=cfg.CONF.sample_type, unit=cfg.CONF.sample_unit, volume=cfg.CONF.sample_volume, user_id=cfg.CONF.sample_user, project_id=cfg.CONF.sample_project, resource_id=cfg.CONF.sample_resource, timestamp=cfg.CONF.sample_timestamp, resource_metadata=cfg.CONF.sample_metadata and eval( cfg.CONF.sample_metadata))])
def setup_pipeline(self): self.transformer_manager = transformer.TransformerExtensionManager( 'ceilometer.transformer', ) self.mgr.pipeline_manager = pipeline.PipelineManager( self.pipeline_cfg, self.transformer_manager)
def setUp(self): super(TestPipeline, self).setUp() self.test_counter = counter.Counter( name='a', type=counter.TYPE_GAUGE, volume=1, unit='B', user_id="test_user", project_id="test_proj", resource_id="test_resource", timestamp=timeutils.utcnow().isoformat(), resource_metadata={}) self.stubs.Set(transformer.TransformerExtensionManager, "__init__", self.fake_tem_init) self.stubs.Set(transformer.TransformerExtensionManager, "get_ext", self.fake_tem_get_ext) self.publisher_manager = publisher.PublisherExtensionManager('fake', ) self.transformer_manager = transformer.TransformerExtensionManager() self.publisher = self.PublisherClass() self.new_publisher = self.PublisherClass() self.publisher_exception = self.PublisherClassException() self.publisher_manager.extensions = [ extension.Extension( 'test', None, None, self.publisher, ), extension.Extension( 'new', None, None, self.new_publisher, ), extension.Extension( 'except', None, None, self.publisher_exception, ), ] self.publisher_manager.by_name = dict( (e.name, e) for e in self.publisher_manager.extensions) self.pipeline_cfg = [ { 'name': "test_pipeline", 'interval': 5, 'counters': ['a'], 'transformers': [{ 'name': "update", 'parameters': {} }], 'publishers': ["test"], }, ]