def test_multiple_pipeline_exception(self): self._reraise_exception = False self._break_pipeline_cfg() self._build_and_set_new_pipeline() pipeline_manager = pipeline.SamplePipelineManager(self.CONF) with pipeline_manager.publisher() as p: p([self.test_counter]) self.test_counter = sample.Sample( name='b', type=self.test_counter.type, volume=self.test_counter.volume, unit=self.test_counter.unit, user_id=self.test_counter.user_id, project_id=self.test_counter.project_id, resource_id=self.test_counter.resource_id, timestamp=self.test_counter.timestamp, resource_metadata=self.test_counter.resource_metadata, ) with pipeline_manager.publisher() as p: p([self.test_counter]) publisher = pipeline_manager.pipelines[0].publishers[0] self.assertEqual(1, publisher.calls) self.assertEqual(1, len(publisher.samples)) self.assertEqual('a', getattr(publisher.samples[0], "name"))
def test_wildcard_excluded_counters(self): counter_cfg = ['*', '!a'] self._set_pipeline_cfg('meters', counter_cfg) self._build_and_set_new_pipeline() pipeline_manager = pipeline.SamplePipelineManager(self.CONF) pipe = pipeline_manager.pipelines[0] self.assertFalse(pipe.source.support_meter('a'))
def test_fake_volume_counter(self, LOG): self._set_pipeline_cfg('meters', ['fake_volume']) self._build_and_set_new_pipeline() pipeline_manager = pipeline.SamplePipelineManager(self.CONF) publisher = pipeline_manager.pipelines[0].publishers[0] test_s = sample.Sample( name='fake_volume', type=self.test_counter.type, volume='fake_value', unit=self.test_counter.unit, user_id=self.test_counter.user_id, project_id=self.test_counter.project_id, resource_id=self.test_counter.resource_id, timestamp=self.test_counter.timestamp, resource_metadata=self.test_counter.resource_metadata, ) with pipeline_manager.publisher() as p: p([test_s]) LOG.warning.assert_called_once_with( 'metering data %(counter_name)s for %(resource_id)s ' '@ %(timestamp)s has volume which is not a number ' '(volume: %(counter_volume)s), the sample will be dropped' % {'counter_name': test_s.name, 'resource_id': test_s.resource_id, 'timestamp': test_s.timestamp, 'counter_volume': test_s.volume}) self.assertEqual(0, len(publisher.samples))
def test_multiple_included_counters(self): counter_cfg = ['a', 'b'] self._set_pipeline_cfg('meters', counter_cfg) self._build_and_set_new_pipeline() pipeline_manager = pipeline.SamplePipelineManager(self.CONF) with pipeline_manager.publisher() as p: p([self.test_counter]) publisher = pipeline_manager.pipelines[0].publishers[0] self.assertEqual(1, len(publisher.samples)) self.test_counter = sample.Sample( name='b', type=self.test_counter.type, volume=self.test_counter.volume, unit=self.test_counter.unit, user_id=self.test_counter.user_id, project_id=self.test_counter.project_id, resource_id=self.test_counter.resource_id, timestamp=self.test_counter.timestamp, resource_metadata=self.test_counter.resource_metadata, ) with pipeline_manager.publisher() as p: p([self.test_counter]) self.assertEqual(2, len(publisher.samples)) self.assertEqual('a', getattr(publisher.samples[0], "name")) self.assertEqual('b', getattr(publisher.samples[1], "name"))
def test_excluded_counter_and_excluded_wildcard_counters(self): counter_cfg = ['!cpu', '!disk.*'] self._set_pipeline_cfg('meters', counter_cfg) self._build_and_set_new_pipeline() pipeline_manager = pipeline.SamplePipelineManager(self.CONF) pipe = pipeline_manager.pipelines[0] self.assertFalse(pipe.source.support_meter('disk.read.bytes')) self.assertFalse(pipe.source.support_meter('cpu')) self.assertTrue(pipe.source.support_meter('instance'))
def test_all_excluded_counters_is_excluded(self): counter_cfg = ['!a', '!c'] self._set_pipeline_cfg('meters', counter_cfg) self._build_and_set_new_pipeline() pipeline_manager = pipeline.SamplePipelineManager(self.CONF) pipe = pipeline_manager.pipelines[0] self.assertFalse(pipe.source.support_meter('a')) self.assertTrue(pipe.source.support_meter('b')) self.assertFalse(pipe.source.support_meter('c'))
def test_wildcard_excluded_counters_not_excluded(self): counter_cfg = ['*', '!b'] self._set_pipeline_cfg('meters', counter_cfg) self._build_and_set_new_pipeline() pipeline_manager = pipeline.SamplePipelineManager(self.CONF) with pipeline_manager.publisher() as p: p([self.test_counter]) publisher = pipeline_manager.pipelines[0].publishers[0] self.assertEqual(1, len(publisher.samples)) self.assertEqual('a', getattr(publisher.samples[0], "name"))
def send_sample(): conf = cfg.ConfigOpts() conf.register_cli_opts([ cfg.StrOpt('sample-name', short='n', help='Meter name.', required=True), cfg.StrOpt('sample-type', short='y', help='Meter type (gauge, delta, cumulative).', default='gauge', required=True), cfg.StrOpt('sample-unit', short='U', help='Meter unit.'), cfg.IntOpt('sample-volume', short='l', help='Meter volume value.', default=1), cfg.StrOpt('sample-resource', short='r', help='Meter resource id.', required=True), cfg.StrOpt('sample-user', short='u', help='Meter user id.'), cfg.StrOpt('sample-project', short='p', help='Meter project id.'), cfg.StrOpt('sample-timestamp', short='i', help='Meter timestamp.', default=timeutils.utcnow().isoformat()), cfg.StrOpt('sample-metadata', short='m', help='Meter metadata.'), ]) service.prepare_service(conf=conf) # Set up logging to use the console console = logging.StreamHandler(sys.stderr) console.setLevel(logging.DEBUG) formatter = logging.Formatter('%(message)s') console.setFormatter(formatter) root_logger = logging.getLogger('') root_logger.addHandler(console) root_logger.setLevel(logging.DEBUG) pipeline_manager = sample_pipe.SamplePipelineManager(conf) with pipeline_manager.publisher() as p: p([ sample.Sample(name=conf.sample_name, type=conf.sample_type, unit=conf.sample_unit, volume=conf.sample_volume, user_id=conf.sample_user, project_id=conf.sample_project, resource_id=conf.sample_resource, timestamp=conf.sample_timestamp, resource_metadata=conf.sample_metadata and eval(conf.sample_metadata)) ])
def test_multiple_publisher_isolation(self): self._reraise_exception = False self._set_pipeline_cfg('publishers', ['except://', 'new://']) self._build_and_set_new_pipeline() pipeline_manager = pipeline.SamplePipelineManager(self.CONF) with pipeline_manager.publisher() as p: p([self.test_counter]) new_publisher = pipeline_manager.pipelines[0].publishers[1] self.assertEqual(1, len(new_publisher.samples)) self.assertEqual('a', getattr(new_publisher.samples[0], 'name'))
def test_counter_dont_match(self): counter_cfg = ['nomatch'] self._set_pipeline_cfg('meters', counter_cfg) self._build_and_set_new_pipeline() pipeline_manager = pipeline.SamplePipelineManager(self.CONF) with pipeline_manager.publisher() as p: p([self.test_counter]) publisher = pipeline_manager.pipelines[0].publishers[0] self.assertEqual(0, len(publisher.samples)) self.assertEqual(0, publisher.calls)
def _do_test_rate_of_change_in_boilerplate_pipeline_cfg( self, index, meters, units): with open('ceilometer/pipeline/data/pipeline.yaml') as fap: data = fap.read() pipeline_cfg = yaml.safe_load(data) for s in pipeline_cfg['sinks']: s['publishers'] = ['test://'] name = self.cfg2file(pipeline_cfg) self.CONF.set_override('pipeline_cfg_file', name) pipeline_manager = pipeline.SamplePipelineManager(self.CONF) pipe = pipeline_manager.pipelines[index] self._do_test_rate_of_change_mapping(pipe, meters, units)
def test_source_with_multiple_sinks(self): meter_cfg = ['a', 'b'] self._set_pipeline_cfg('meters', meter_cfg) self.pipeline_cfg['sinks'].append({ 'name': 'second_sink', 'transformers': [{ 'name': 'update', 'parameters': { 'append_name': '_new', } }], 'publishers': ['new'], }) self.pipeline_cfg['sources'][0]['sinks'].append('second_sink') self._build_and_set_new_pipeline() pipeline_manager = pipeline.SamplePipelineManager(self.CONF) with pipeline_manager.publisher() as p: p([self.test_counter]) self.test_counter = sample.Sample( name='b', type=self.test_counter.type, volume=self.test_counter.volume, unit=self.test_counter.unit, user_id=self.test_counter.user_id, project_id=self.test_counter.project_id, resource_id=self.test_counter.resource_id, timestamp=self.test_counter.timestamp, resource_metadata=self.test_counter.resource_metadata, ) with pipeline_manager.publisher() as p: p([self.test_counter]) self.assertEqual(2, len(pipeline_manager.pipelines)) self.assertEqual('test_source:test_sink', str(pipeline_manager.pipelines[0])) self.assertEqual('test_source:second_sink', str(pipeline_manager.pipelines[1])) test_publisher = pipeline_manager.pipelines[0].publishers[0] new_publisher = pipeline_manager.pipelines[1].publishers[0] for publisher, sfx in [(test_publisher, '_update'), (new_publisher, '_new')]: self.assertEqual(2, len(publisher.samples)) self.assertEqual(2, publisher.calls) self.assertEqual('a' + sfx, getattr(publisher.samples[0], "name")) self.assertEqual('b' + sfx, getattr(publisher.samples[1], "name"))
def test_multiple_sources_with_single_sink(self): self.pipeline_cfg['sources'].append({ 'name': 'second_source', 'meters': ['b'], 'sinks': ['test_sink'] }) self._build_and_set_new_pipeline() pipeline_manager = pipeline.SamplePipelineManager(self.CONF) with pipeline_manager.publisher() as p: p([self.test_counter]) self.test_counter = sample.Sample( name='b', type=self.test_counter.type, volume=self.test_counter.volume, unit=self.test_counter.unit, user_id=self.test_counter.user_id, project_id=self.test_counter.project_id, resource_id=self.test_counter.resource_id, timestamp=self.test_counter.timestamp, resource_metadata=self.test_counter.resource_metadata, ) with pipeline_manager.publisher() as p: p([self.test_counter]) self.assertEqual(2, len(pipeline_manager.pipelines)) self.assertEqual('test_source:test_sink', str(pipeline_manager.pipelines[0])) self.assertEqual('second_source:test_sink', str(pipeline_manager.pipelines[1])) test_publisher = pipeline_manager.pipelines[0].publishers[0] another_publisher = pipeline_manager.pipelines[1].publishers[0] for publisher in [test_publisher, another_publisher]: self.assertEqual(2, len(publisher.samples)) self.assertEqual(2, publisher.calls) self.assertEqual('a_update', getattr(publisher.samples[0], "name")) self.assertEqual('b_update', getattr(publisher.samples[1], "name")) transformed_samples = self.TransformerClass.samples self.assertEqual(2, len(transformed_samples)) self.assertEqual(['a', 'b'], [getattr(s, 'name') for s in transformed_samples])