def test_create_event_workflow(self, fakeclient_cls): url = netutils.urlsplit("gnocchi://") self.publisher = gnocchi.GnocchiPublisher(self.conf.conf, url) fakeclient = fakeclient_cls.return_value now = timeutils.utcnow() self.useFixture(utils_fixture.TimeFixture(now)) expected_calls = [ mock.call.resource.create( 'instance', {'id': '9f9d01b9-4a58-4271-9e27-398b21ab20d1', 'user_id': '1e3ce043029547f1a61c1996d1a531a2', 'project_id': '7c150a59fe714e6f9263774af9688f0e', 'availability_zone': 'zone1', 'flavor_name': 'm1.tiny', 'flavor_id': '2', 'host': 'vagrant-precise'}), ] self.publisher.publish_events([INSTANCE_CREATE_END]) self.assertEqual(1, len(fakeclient.mock_calls)) for call in expected_calls: self.assertIn(call, fakeclient.mock_calls)
def test_broken_config_load(self, mylog): contents = [ ("---\n" "resources:\n" " - resource_type: foobar\n"), ("---\n" "resources:\n" " - resource_type: 0\n"), ("---\n" "resources:\n" " - sample_types: ['foo', 'bar']\n"), ("---\n" "resources:\n" " - sample_types: foobar\n" " - resource_type: foobar\n"), ] for content in contents: if six.PY3: content = content.encode('utf-8') temp = fileutils.write_to_tempfile(content=content, prefix='gnocchi_resources', suffix='.yaml') self.addCleanup(os.remove, temp) url = netutils.urlsplit("gnocchi://?resources_definition_file=" + temp) d = gnocchi.GnocchiPublisher(self.conf.conf, url) self.assertTrue(mylog.error.called) self.assertEqual(0, len(d.resources_definition))
def test_event_workflow(self, fakeclient_cls): url = netutils.urlsplit("gnocchi://") self.publisher = gnocchi.GnocchiPublisher(self.conf.conf, url) fakeclient = fakeclient_cls.return_value fakeclient.resource.search.side_effect = [ [{ "id": "b26268d6-8bb5-11e6-baff-00224d8226cd", "type": "instance_disk", "instance_id": "9f9d01b9-4a58-4271-9e27-398b21ab20d1" }], [{ "id": "b1c7544a-8bb5-11e6-850e-00224d8226cd", "type": "instance_network_interface", "instance_id": "9f9d01b9-4a58-4271-9e27-398b21ab20d1" }], ] search_params = { '=': { 'instance_id': '9f9d01b9-4a58-4271-9e27-398b21ab20d1' } } now = timeutils.utcnow() self.useFixture(utils_fixture.TimeFixture(now)) expected_calls = [ mock.call.resource.search('instance_disk', search_params), mock.call.resource.search('instance_network_interface', search_params), mock.call.resource.update('instance', '9f9d01b9-4a58-4271-9e27-398b21ab20d1', {'ended_at': now.isoformat()}), mock.call.resource.update('instance_disk', 'b26268d6-8bb5-11e6-baff-00224d8226cd', {'ended_at': now.isoformat()}), mock.call.resource.update('instance_network_interface', 'b1c7544a-8bb5-11e6-850e-00224d8226cd', {'ended_at': now.isoformat()}), mock.call.resource.update('image', 'dc337359-de70-4044-8e2c-80573ba6e577', {'ended_at': now.isoformat()}), mock.call.resource.update('volume', '6cc6e7dd-d17d-460f-ae79-7e08a216ce96', {'ended_at': now.isoformat()}), mock.call.resource.update('network', '705e2c08-08e8-45cb-8673-5c5be955569b', {'ended_at': now.isoformat()}) ] self.publisher.publish_events([ INSTANCE_DELETE_START, IMAGE_DELETE_START, VOLUME_DELETE_START, FLOATINGIP_DELETE_END ]) self.assertEqual(8, len(fakeclient.mock_calls)) for call in expected_calls: self.assertIn(call, fakeclient.mock_calls)
def _do_test_activity_filter(self, expected_measures, fake_batch): url = netutils.urlsplit("gnocchi://") d = gnocchi.GnocchiPublisher(self.conf.conf, url) d.publish_samples(self.samples) self.assertEqual(1, len(fake_batch.mock_calls)) measures = fake_batch.mock_calls[0][1][0] self.assertEqual( expected_measures, sum(len(m) for rid in measures for m in measures[rid].values()))
def _do_test_activity_filter(self, expected_measures, fake_batch, __): url = netutils.urlsplit("gnocchi://") d = gnocchi.GnocchiPublisher(self.conf.conf, url) d.publish_samples(self.samples) fake_batch.assert_called_with(mock.ANY, mock.ANY, { 'metrics': 1, 'resources': 1, 'measures': expected_measures })
def test_unhandled_meter(self, fake_batch): samples = [ sample.Sample(name='unknown.meter', unit='GB', type=sample.TYPE_GAUGE, volume=2, user_id='test_user', project_id='test_project', source='openstack', timestamp='2014-05-08 20:23:48.028195', resource_id='randomid', resource_metadata={}) ] url = netutils.urlsplit("gnocchi://") d = gnocchi.GnocchiPublisher(self.conf.conf, url) d.publish_samples(samples) self.assertEqual(0, len(fake_batch.call_args[0][1]))
def test_update_event_workflow(self, fakeclient_cls): url = netutils.urlsplit("gnocchi://") self.publisher = gnocchi.GnocchiPublisher(self.conf.conf, url) fakeclient = fakeclient_cls.return_value now = timeutils.utcnow() self.useFixture(utils_fixture.TimeFixture(now)) expected_calls = [ mock.call.resource.update( 'volume', '156b8d3f-ad99-429b-b84c-3f263fb2a801', {'project_id': '85bc015f7a2342348593077a927c4aaa'}), ] self.publisher.publish_events([VOLUME_TRANSFER_ACCEPT_END]) self.assertEqual(1, len(fakeclient.mock_calls)) for call in expected_calls: self.assertIn(call, fakeclient.mock_calls)
def main(): args = get_parser().parse_args() storage_conn = get_native_storage_conn(args.native_metering_connection) total_amount = count_samples(storage_conn, args.start_timestamp, args.end_timestamp) print('%s samples will be migrated to Gnocchi.' % total_amount) # NOTE: we need service credentials to init gnocchiclient config_file = ([args.ceilometer_config_file] if args.ceilometer_config_file else None) gnocchi_conf = service.prepare_service([], config_file) logger = log.getLogger() log_conf = cfg.ConfigOpts() log.register_options(log_conf) log_conf.set_override('log_file', args.log_file) log_conf.set_override('debug', True) log.setup(log_conf, 'ceilometer_migration') time_filters = [] if args.start_timestamp: time_filters.append({">=": {'timestamp': args.start_timestamp}}) if args.end_timestamp: time_filters.append({"<": {'timestamp': args.end_timestamp}}) gnocchi_publisher = gnocchi.GnocchiPublisher(gnocchi_conf, "gnocchi://") batch_size = args.batch_migration_size if total_amount == 'Unknown': total_amount = None orderby = [{"message_id": "asc"}] last_message_id = None migrated_amount = 0 if progress_bar: pbar = progress_bar(total=total_amount, ncols=100, unit='samples') else: pbar = None while migrated_amount < total_amount: if time_filters and last_message_id: filter_expr = { 'and': time_filters + [{ ">": { "message_id": last_message_id } }] } elif time_filters and not last_message_id: if len(time_filters) == 1: filter_expr = time_filters[0] else: filter_expr = {'and': time_filters} elif not time_filters and last_message_id: filter_expr = {">": {"message_id": last_message_id}} else: filter_expr = None samples = storage_conn.query_samples(filter_expr=filter_expr, orderby=orderby, limit=batch_size) samples = list(samples) if not samples: break last_message_id = samples[-1].message_id for sample in samples: logger.info( 'Migrating sample with message_id: %s, meter: %s, ' 'resource_id: %s' % (sample.message_id, sample.counter_name, sample.resource_id)) samples_dict = [sample.as_dict() for sample in samples] gnocchi_publisher.publish_samples(samples_dict) length = len(samples) migrated_amount += length if pbar: pbar.update(length) logger.info("=========== %s metrics data migration done ============" % total_amount)
def test_workflow(self, fakeclient_cls, logger): url = netutils.urlsplit("gnocchi://") self.publisher = gnocchi.GnocchiPublisher(self.conf.conf, url) fakeclient = fakeclient_cls.return_value resource_id = self.sample.resource_id.replace("/", "_") metric_name = self.sample.name gnocchi_id = uuid.uuid4() expected_calls = [ mock.call.metric.batch_resources_metrics_measures( {resource_id: { metric_name: self.measures_attributes }}, create_metrics=True) ] expected_debug = [ mock.call('filtered project found: %s', 'a2d42c23-d518-46b6-96ab-3fba2e146859'), ] measures_posted = False batch_side_effect = [] if self.post_measure_fail: batch_side_effect += [Exception('boom!')] elif not self.resource_exists: batch_side_effect += [ gnocchi_exc.BadRequest( 400, { "cause": "Unknown resources", 'detail': [{ 'resource_id': gnocchi_id, 'original_resource_id': resource_id }] }) ] attributes = self.postable_attributes.copy() attributes.update(self.patchable_attributes) attributes['id'] = self.sample.resource_id attributes['metrics'] = dict( (metric_name, {}) for metric_name in self.metric_names) for k, v in six.iteritems(attributes['metrics']): if k == 'disk.root.size': v['unit'] = 'GB' continue if k == 'hardware.ipmi.node.power': v['unit'] = 'W' continue expected_calls.append( mock.call.resource.create(self.resource_type, attributes)) if self.create_resource_fail: fakeclient.resource.create.side_effect = [Exception('boom!')] elif self.create_resource_race: fakeclient.resource.create.side_effect = [ gnocchi_exc.ResourceAlreadyExists(409) ] else: # not resource_exists expected_debug.append( mock.call('Resource %s created', self.sample.resource_id)) if not self.create_resource_fail: expected_calls.append( mock.call.metric.batch_resources_metrics_measures( {resource_id: { metric_name: self.measures_attributes }}, create_metrics=True)) if self.retry_post_measures_fail: batch_side_effect += [Exception('boom!')] else: measures_posted = True else: measures_posted = True if measures_posted: batch_side_effect += [None] expected_debug.append( mock.call( "%d measures posted against %d metrics through %d " "resources", len(self.measures_attributes), 1, 1)) if self.patchable_attributes: expected_calls.append( mock.call.resource.update(self.resource_type, resource_id, self.patchable_attributes)) if self.update_resource_fail: fakeclient.resource.update.side_effect = [Exception('boom!')] else: expected_debug.append( mock.call('Resource %s updated', self.sample.resource_id)) batch = fakeclient.metric.batch_resources_metrics_measures batch.side_effect = batch_side_effect self.publisher.publish_samples([self.sample]) # Check that the last log message is the expected one if (self.post_measure_fail or self.create_resource_fail or self.retry_post_measures_fail or (self.update_resource_fail and self.patchable_attributes)): logger.error.assert_called_with('boom!', exc_info=True) else: self.assertEqual(0, logger.error.call_count) self.assertEqual(expected_calls, fakeclient.mock_calls) self.assertEqual(expected_debug, logger.debug.mock_calls)
def test_metric_match(self): pub = gnocchi.GnocchiPublisher(self.conf.conf, netutils.urlsplit("gnocchi://")) self.assertIn('image.size', pub.metric_map['image.size'].metrics)
def test_config_load(self): url = netutils.urlsplit("gnocchi://") d = gnocchi.GnocchiPublisher(self.conf.conf, url) names = [rd.cfg['resource_type'] for rd in d.resources_definition] self.assertIn('instance', names) self.assertIn('volume', names)
def test_workflow(self, fakeclient_cls, logger): fakeclient = fakeclient_cls.return_value resource_id = self.sample.resource_id.replace("/", "_") metric_name = self.sample.name gnocchi_id = uuid.uuid4() expected_calls = [ mock.call.archive_policy.create({"name": "ceilometer-low", "back_window": 0, "aggregation_methods": ["mean"], "definition": mock.ANY}), mock.call.archive_policy.create({"name": "ceilometer-low-rate", "back_window": 0, "aggregation_methods": [ "mean", "rate:mean"], "definition": mock.ANY}), mock.call.archive_policy.create({"name": "ceilometer-high", "back_window": 0, "aggregation_methods": ["mean"], "definition": mock.ANY}), mock.call.archive_policy.create({"name": "ceilometer-high-rate", "back_window": 0, "aggregation_methods": [ "mean", "rate:mean"], "definition": mock.ANY}), mock.call.metric.batch_resources_metrics_measures( {resource_id: {metric_name: self.metric_attributes}}, create_metrics=True) ] expected_debug = [ mock.call('filtered project found: %s', 'a2d42c23-d518-46b6-96ab-3fba2e146859'), mock.call('Processing sample [%s] for resource ID [%s].', self.sample, resource_id), ] measures_posted = False batch_side_effect = [] if self.post_measure_fail: batch_side_effect += [Exception('boom!')] elif not self.resource_exists: batch_side_effect += [ gnocchi_exc.BadRequest( 400, {"cause": "Unknown resources", 'detail': [{ 'resource_id': gnocchi_id, 'original_resource_id': resource_id}]})] attributes = self.postable_attributes.copy() attributes.update(self.patchable_attributes) attributes['id'] = self.sample.resource_id expected_calls.append(mock.call.resource.create( self.resource_type, attributes)) if self.create_resource_fail: fakeclient.resource.create.side_effect = [Exception('boom!')] elif self.create_resource_race: fakeclient.resource.create.side_effect = [ gnocchi_exc.ResourceAlreadyExists(409)] else: # not resource_exists expected_debug.append(mock.call( 'Resource %s created', self.sample.resource_id)) if not self.create_resource_fail: expected_calls.append( mock.call.metric.batch_resources_metrics_measures( {resource_id: {metric_name: self.metric_attributes}}, create_metrics=True) ) if self.retry_post_measures_fail: batch_side_effect += [Exception('boom!')] else: measures_posted = True else: measures_posted = True if measures_posted: batch_side_effect += [None] expected_debug.append( mock.call("%d measures posted against %d metrics through %d " "resources", len(self.metric_attributes["measures"]), 1, 1) ) if self.patchable_attributes: expected_calls.append(mock.call.resource.update( self.resource_type, resource_id, self.patchable_attributes)) if self.update_resource_fail: fakeclient.resource.update.side_effect = [Exception('boom!')] else: expected_debug.append(mock.call( 'Resource %s updated', self.sample.resource_id)) batch = fakeclient.metric.batch_resources_metrics_measures batch.side_effect = batch_side_effect url = netutils.urlsplit("gnocchi://") publisher = gnocchi.GnocchiPublisher(self.conf.conf, url) publisher.publish_samples([self.sample]) # Check that the last log message is the expected one if (self.post_measure_fail or self.create_resource_fail or self.retry_post_measures_fail or (self.update_resource_fail and self.patchable_attributes)): logger.error.assert_called_with('boom!', exc_info=True) else: self.assertEqual(0, logger.error.call_count) self.assertEqual(expected_calls, fakeclient.mock_calls) self.assertEqual(expected_debug, logger.debug.mock_calls)