def create(self, metric, refetch_metric=True): """Create an metric :param metric: The metric :type metric: str """ resource_id = metric.get('resource_id') if resource_id is None: metric = self._post(self.metric_url, headers={ 'Content-Type': "application/json" }, data=jsonutils.dumps(metric)).json() # FIXME(sileht): create and get have a # different output: LP#1497171 if refetch_metric: return self.get(metric["id"]) return metric metric_name = metric.get('name') if metric_name is None: raise TypeError("metric_name is required if resource_id is set") del metric['resource_id'] resource_id = utils.encode_resource_id(resource_id) metric = {metric_name: metric} metric = self._post(self.resource_url % resource_id, headers={'Content-Type': "application/json"}, data=jsonutils.dumps(metric)) return self.get(metric_name, resource_id)
def create(self, metric, refetch_metric=True): """Create an metric :param metric: The metric :type metric: dict """ resource_id = metric.get('resource_id') if resource_id is None: metric = self._post( self.metric_url, headers={'Content-Type': "application/json"}, data=jsonutils.dumps(metric)).json() # FIXME(sileht): create and get have a # different output: LP#1497171 if refetch_metric: return self.get(metric["id"]) return metric metric_name = metric.get('name') if metric_name is None: raise TypeError("metric_name is required if resource_id is set") del metric['resource_id'] resource_id = utils.encode_resource_id(resource_id) metric = {metric_name: metric} metric = self._post( self.resource_url % resource_id, headers={'Content-Type': "application/json"}, data=jsonutils.dumps(metric)) return self.get(metric_name, resource_id)
def history(self, resource_type, resource_id, details=False, limit=None, marker=None, sorts=None): """Get a resource :param resource_type: Type of the resource :type resource_type: str :param resource_id: ID of the resource :type resource_id: str :param details: Show all attributes of resources :type details: bool :param limit: maximum number of resources to return :type limit: int :param marker: the last item of the previous page; we returns the next results after this value. :type marker: str :param sorts: list of resource attributes to order by. (example ["user_id:desc-nullslast", "project_id:asc"] :type sorts: list of str """ qs = _get_pagination_options(details, False, limit, marker, sorts) resource_id = utils.encode_resource_id(resource_id) url = "%s%s/%s/history?%s" % (self.url, resource_type, resource_id, qs) return self._get(url).json()
def delete(self, resource_id): """Delete a resource :param resource_id: ID of the resource :type resource_id: str """ resource_id = utils.encode_resource_id(resource_id) self._delete(self.url + "generic/" + resource_id)
def get_measures( self, metric, start=None, stop=None, aggregation=None, granularity=None, resource_id=None, refresh=False, resample=None, **kwargs ): """Get measurements of a metric :param metric: ID or Name of the metric :type metric: str :param start: beginning of the period :type start: timestamp :param stop: end of the period :type stop: timestamp :param aggregation: aggregation to retrieve :type aggregation: str :param granularity: granularity to retrieve (in seconds) :type granularity: int :param resource_id: ID of the resource (required to get a metric by name) :type resource_id: str :param refresh: force aggregation of all known measures :type refresh: bool :param resample: resample measures to new granularity :type resample: float All other arguments are arguments are dedicated to custom aggregation method passed as-is to the Gnocchi. """ if isinstance(start, datetime.datetime): start = start.isoformat() if isinstance(stop, datetime.datetime): stop = stop.isoformat() params = dict( start=start, stop=stop, aggregation=aggregation, granularity=granularity, refresh=refresh, resample=resample ) params.update(kwargs) if resource_id is None: self._ensure_metric_is_uuid(metric) url = self.metric_url + metric + "/measures" else: resource_id = utils.encode_resource_id(resource_id) url = self.resource_url % resource_id + metric + "/measures" return self._get(url, params=params).json()
def get(self, resource_type, resource_id, history=False): """Get a resource :param resource_type: Type of the resource :type resource_type: str :param resource_id: ID of the resource :type resource_id: str :param history: Show the history of the resource :type history: bool """ history = "/history" if history else "" resource_id = utils.encode_resource_id(resource_id) url = self.url + "%s/%s%s" % (resource_type, resource_id, history) return self._get(url).json()
def delete(self, metric, resource_id=None): """Delete an metric :param metric: ID or Name of the metric :type metric: str :param resource_id: ID of the resource (required to get a metric by name) :type resource_id: str """ if resource_id is None: self._ensure_metric_is_uuid(metric) url = self.metric_url + metric else: resource_id = utils.encode_resource_id(resource_id) url = self.resource_url % resource_id + metric self._delete(url)
def get(self, metric, resource_id=None): """Get an metric :param metric: ID or Name of the metric :type metric: str :param resource_id: ID of the resource (required to get a metric by name) :type resource_id: str """ if resource_id is None: self._ensure_metric_is_uuid(metric) url = self.metric_url + metric else: resource_id = utils.encode_resource_id(resource_id) url = (self.resource_url % resource_id) + metric return self._get(url).json()
def update(self, resource_type, resource_id, resource): """Update a resource :param resource_type: Type of the resource :type resource_type: str :param resource_id: ID of the resource :type resource_id: str :param resource: Attribute of the resource :type resource: dict """ resource_id = utils.encode_resource_id(resource_id) return self._patch( self.url + resource_type + "/" + resource_id, headers={'Content-Type': "application/json"}, data=jsonutils.dumps(resource)).json()
def update(self, resource_type, resource_id, resource): """Update a resource :param resource_type: Type of the resource :type resource_type: str :param resource_id: ID of the resource :type resource_id: str :param resource: Attribute of the resource :type resource: dict """ resource_id = utils.encode_resource_id(resource_id) return self._patch(self.url + resource_type + "/" + resource_id, headers={ 'Content-Type': "application/json" }, data=jsonutils.dumps(resource)).json()
def get_measures(self, metric, start=None, stop=None, aggregation=None, granularity=None, resource_id=None, **kwargs): """Get measurements of a metric :param metric: ID or Name of the metric :type metric: str :param start: beginning of the period :type start: timestamp :param stop: end of the period :type stop: timestamp :param aggregation: aggregation to retrieve :type aggregation: str :param granularity: granularity to retrieve (in seconds) :type granularity: int :param resource_id: ID of the resource (required to get a metric by name) :type resource_id: str All other arguments are arguments are dedicated to custom aggregation method passed as-is to the Gnocchi. """ if isinstance(start, datetime.datetime): start = start.isoformat() if isinstance(stop, datetime.datetime): stop = stop.isoformat() params = dict(start=start, stop=stop, aggregation=aggregation, granularity=granularity) params.update(kwargs) if resource_id is None: self._ensure_metric_is_uuid(metric) url = self.metric_url + metric + "/measures" else: resource_id = utils.encode_resource_id(resource_id) url = self.resource_url % resource_id + metric + "/measures" return self._get(url, params=params).json()
def add_measures(self, metric, measures, resource_id=None): """Add measurements to a metric :param metric: ID or Name of the metric :type metric: str :param resource_id: ID of the resource (required to get a metric by name) :type resource_id: str :param measures: measurements :type measures: list of dict(timestamp=timestamp, value=float) """ if resource_id is None: self._ensure_metric_is_uuid(metric) url = self.metric_url + metric + "/measures" else: resource_id = utils.encode_resource_id(resource_id) url = self.resource_url % resource_id + metric + "/measures" return self._post(url, headers={"Content-Type": "application/json"}, data=jsonutils.dumps(measures))
def add_measures(self, metric, measures, resource_id=None): """Add measurements to a metric :param metric: ID or Name of the metric :type metric: str :param resource_id: ID of the resource (required to get a metric by name) :type resource_id: str :param measures: measurements :type measures: list of dict(timestamp=timestamp, value=float) """ if resource_id is None: self._ensure_metric_is_uuid(metric) url = self.metric_url + metric + "/measures" else: resource_id = utils.encode_resource_id(resource_id) url = self.resource_url % resource_id + metric + "/measures" return self._post(url, headers={'Content-Type': "application/json"}, data=jsonutils.dumps(measures))
def record_metering_data(self, data): # We may have receive only one counter on the wire if not isinstance(data, list): data = [data] # NOTE(sileht): skip sample generated by gnocchi itself data = [s for s in data if not self._is_gnocchi_activity(s)] # FIXME(sileht): This method bulk the processing of samples # grouped by resource_id and metric_name but this is not # efficient yet because the data received here doesn't often # contains a lot of different kind of samples # So perhaps the next step will be to pool the received data from # message bus. data.sort(key=lambda s: (s['resource_id'], s['counter_name'])) resource_grouped_samples = itertools.groupby( data, key=operator.itemgetter('resource_id')) gnocchi_data = {} measures = {} stats = dict(measures=0, resources=0, metrics=0) for resource_id, samples_of_resource in resource_grouped_samples: stats['resources'] += 1 metric_grouped_samples = itertools.groupby( list(samples_of_resource), key=operator.itemgetter('counter_name')) # NOTE(sileht): We convert resource id to Gnocchi format # because batch_resources_metrics_measures exception # returns this id and not the ceilometer one gnocchi_id = gnocchi_utils.encode_resource_id(resource_id) res_info = gnocchi_data[gnocchi_id] = {} for metric_name, samples in metric_grouped_samples: stats['metrics'] += 1 samples = list(samples) rd = self._get_resource_definition(metric_name) if rd is None: LOG.warning(_LW("metric %s is not handled by Gnocchi") % metric_name) continue if rd.cfg.get("ignore"): continue res_info['resource_type'] = rd.cfg['resource_type'] res_info.setdefault("resource", {}).update({ "id": resource_id, "user_id": samples[0]['user_id'], "project_id": samples[0]['project_id'], "metrics": rd.metrics, }) for sample in samples: res_info.setdefault("resource_extra", {}).update( rd.attributes(sample)) m = measures.setdefault(gnocchi_id, {}).setdefault( metric_name, []) m.append({'timestamp': sample['timestamp'], 'value': sample['counter_volume']}) stats['measures'] += len(measures[gnocchi_id][metric_name]) res_info["resource"].update(res_info["resource_extra"]) try: self.batch_measures(measures, gnocchi_data, stats) except gnocchi_exc.ClientException as e: LOG.error(six.text_type(e)) except Exception as e: LOG.error(six.text_type(e), exc_info=True) for gnocchi_id, info in gnocchi_data.items(): resource = info["resource"] resource_type = info["resource_type"] resource_extra = info["resource_extra"] if not resource_extra: continue try: self._if_not_cached("update", resource_type, resource, self._update_resource, resource_extra) except gnocchi_exc.ClientException as e: LOG.error(six.text_type(e)) except Exception as e: LOG.error(six.text_type(e), exc_info=True)
def test_workflow(self, fakeclient_cls, logger): self.dispatcher = gnocchi.GnocchiDispatcher(self.conf.conf) fakeclient = fakeclient_cls.return_value # FIXME(sileht): we don't use urlparse.quote here # to ensure / is converted in %2F # temporary disabled until we find a solution # on gnocchi side. Current gnocchiclient doesn't # encode the resource_id resource_id = self.sample['resource_id'] # .replace("/", "%2F"), metric_name = self.sample['counter_name'] gnocchi_id = gnocchi_utils.encode_resource_id(resource_id) expected_calls = [ mock.call.capabilities.list(), mock.call.metric.batch_resources_metrics_measures( {gnocchi_id: {metric_name: self.measures_attributes}}) ] expected_debug = [ mock.call('gnocchi project found: %s', 'a2d42c23-d518-46b6-96ab-3fba2e146859'), ] measures_posted = False batch_side_effect = [] if self.post_measure_fail: batch_side_effect += [Exception('boom!')] elif not self.resource_exists or not self.metric_exists: batch_side_effect += [ gnocchi_exc.BadRequest( 400, "Unknown metrics: %s/%s" % (gnocchi_id, metric_name))] attributes = self.postable_attributes.copy() attributes.update(self.patchable_attributes) attributes['id'] = self.sample['resource_id'] attributes['metrics'] = dict((metric_name, {}) for metric_name in self.metric_names) for k, v in six.iteritems(attributes['metrics']): if k == 'disk.root.size': v['unit'] = 'GB' continue if k == 'hardware.ipmi.node.power': v['unit'] = 'W' continue expected_calls.append(mock.call.resource.create( self.resource_type, attributes)) if self.create_resource_fail: fakeclient.resource.create.side_effect = [Exception('boom!')] elif self.resource_exists: fakeclient.resource.create.side_effect = [ gnocchi_exc.ResourceAlreadyExists(409)] expected_calls.append(mock.call.metric.create({ 'name': self.sample['counter_name'], 'unit': self.sample['counter_unit'], 'resource_id': resource_id})) if self.create_metric_fail: fakeclient.metric.create.side_effect = [Exception('boom!')] elif self.metric_exists: fakeclient.metric.create.side_effect = [ gnocchi_exc.NamedMetricAreadyExists(409)] else: fakeclient.metric.create.side_effect = [None] else: # not resource_exists expected_debug.append(mock.call( 'Resource %s created', self.sample['resource_id'])) if not self.create_resource_fail and not self.create_metric_fail: expected_calls.append( mock.call.metric.batch_resources_metrics_measures( {gnocchi_id: {metric_name: self.measures_attributes}}) ) if self.retry_post_measures_fail: batch_side_effect += [Exception('boom!')] else: measures_posted = True else: measures_posted = True if measures_posted: batch_side_effect += [None] expected_debug.append( mock.call("%(measures)d measures posted against %(metrics)d " "metrics through %(resources)d resources", dict( measures=len(self.measures_attributes), metrics=1, resources=1)) ) if self.patchable_attributes: expected_calls.append(mock.call.resource.update( self.resource_type, resource_id, self.patchable_attributes)) if self.update_resource_fail: fakeclient.resource.update.side_effect = [Exception('boom!')] else: expected_debug.append(mock.call( 'Resource %s updated', self.sample['resource_id'])) batch = fakeclient.metric.batch_resources_metrics_measures batch.side_effect = batch_side_effect self.dispatcher.verify_and_record_metering_data([self.sample]) # Check that the last log message is the expected one if (self.post_measure_fail or self.create_metric_fail or self.create_resource_fail or self.retry_post_measures_fail or (self.update_resource_fail and self.patchable_attributes)): logger.error.assert_called_with('boom!', exc_info=True) else: self.assertEqual(0, logger.error.call_count) self.assertEqual(expected_calls, fakeclient.mock_calls) self.assertEqual(expected_debug, logger.debug.mock_calls)
def test_workflow(self, fakeclient_cls, logger): self.dispatcher = gnocchi.GnocchiDispatcher(self.conf.conf) fakeclient = fakeclient_cls.return_value # FIXME(sileht): we don't use urlparse.quote here # to ensure / is converted in %2F # temporary disabled until we find a solution # on gnocchi side. Current gnocchiclient doesn't # encode the resource_id resource_id = self.sample['resource_id'] # .replace("/", "%2F"), metric_name = self.sample['counter_name'] gnocchi_id = gnocchi_utils.encode_resource_id(resource_id) expected_calls = [ mock.call.capabilities.list(), mock.call.metric.batch_resources_metrics_measures( {gnocchi_id: { metric_name: self.measures_attributes }}) ] expected_debug = [ mock.call('gnocchi project found: %s', 'a2d42c23-d518-46b6-96ab-3fba2e146859'), ] measures_posted = False batch_side_effect = [] if self.post_measure_fail: batch_side_effect += [Exception('boom!')] elif not self.resource_exists or not self.metric_exists: batch_side_effect += [ gnocchi_exc.BadRequest( 400, "Unknown metrics: %s/%s" % (gnocchi_id, metric_name)) ] attributes = self.postable_attributes.copy() attributes.update(self.patchable_attributes) attributes['id'] = self.sample['resource_id'] attributes['metrics'] = dict( (metric_name, {}) for metric_name in self.metric_names) expected_calls.append( mock.call.resource.create(self.resource_type, attributes)) if self.create_resource_fail: fakeclient.resource.create.side_effect = [Exception('boom!')] elif self.resource_exists: fakeclient.resource.create.side_effect = [ gnocchi_exc.ResourceAlreadyExists(409) ] expected_calls.append( mock.call.metric.create({ 'name': self.sample['counter_name'], 'resource_id': resource_id })) if self.create_metric_fail: fakeclient.metric.create.side_effect = [Exception('boom!')] elif self.metric_exists: fakeclient.metric.create.side_effect = [ gnocchi_exc.NamedMetricAreadyExists(409) ] else: fakeclient.metric.create.side_effect = [None] else: # not resource_exists expected_debug.append( mock.call('Resource %s created', self.sample['resource_id'])) if not self.create_resource_fail and not self.create_metric_fail: expected_calls.append( mock.call.metric.batch_resources_metrics_measures( {gnocchi_id: { metric_name: self.measures_attributes }})) if self.retry_post_measures_fail: batch_side_effect += [Exception('boom!')] else: measures_posted = True else: measures_posted = True if measures_posted: batch_side_effect += [None] expected_debug.append( mock.call( "%(measures)d measures posted against %(metrics)d " "metrics through %(resources)d resources", dict(measures=len(self.measures_attributes), metrics=1, resources=1))) if self.patchable_attributes: expected_calls.append( mock.call.resource.update(self.resource_type, resource_id, self.patchable_attributes)) if self.update_resource_fail: fakeclient.resource.update.side_effect = [Exception('boom!')] else: expected_debug.append( mock.call('Resource %s updated', self.sample['resource_id'])) batch = fakeclient.metric.batch_resources_metrics_measures batch.side_effect = batch_side_effect self.dispatcher.record_metering_data([self.sample]) # Check that the last log message is the expected one if (self.post_measure_fail or self.create_metric_fail or self.create_resource_fail or self.retry_post_measures_fail or (self.update_resource_fail and self.patchable_attributes)): logger.error.assert_called_with('boom!', exc_info=True) else: self.assertEqual(0, logger.error.call_count) self.assertEqual(expected_calls, fakeclient.mock_calls) self.assertEqual(expected_debug, logger.debug.mock_calls)
def record_metering_data(self, data): # We may have receive only one counter on the wire if not isinstance(data, list): data = [data] # NOTE(sileht): skip sample generated by gnocchi itself data = [s for s in data if not self._is_gnocchi_activity(s)] # FIXME(sileht): This method bulk the processing of samples # grouped by resource_id and metric_name but this is not # efficient yet because the data received here doesn't often # contains a lot of different kind of samples # So perhaps the next step will be to pool the received data from # message bus. data.sort(key=lambda s: (s['resource_id'], s['counter_name'])) resource_grouped_samples = itertools.groupby( data, key=operator.itemgetter('resource_id')) gnocchi_data = {} measures = {} stats = dict(measures=0, resources=0, metrics=0) for resource_id, samples_of_resource in resource_grouped_samples: stats['resources'] += 1 metric_grouped_samples = itertools.groupby( list(samples_of_resource), key=operator.itemgetter('counter_name')) # NOTE(sileht): We convert resource id to Gnocchi format # because batch_resources_metrics_measures exception # returns this id and not the ceilometer one gnocchi_id = gnocchi_utils.encode_resource_id(resource_id) res_info = {} for metric_name, samples in metric_grouped_samples: stats['metrics'] += 1 samples = list(samples) rd = self._get_resource_definition_from_metric(metric_name) if rd is None: LOG.warning( _LW("metric %s is not handled by Gnocchi") % metric_name) continue if rd.cfg.get("ignore"): continue res_info['resource_type'] = rd.cfg['resource_type'] res_info.setdefault("resource", {}).update({ "id": resource_id, "user_id": samples[0]['user_id'], "project_id": samples[0]['project_id'], "metrics": rd.metrics, }) for sample in samples: res_info.setdefault("resource_extra", {}).update( rd.sample_attributes(sample)) m = measures.setdefault(gnocchi_id, {}).setdefault(metric_name, []) m.append({ 'timestamp': sample['timestamp'], 'value': sample['counter_volume'] }) unit = sample['counter_unit'] metric = sample['counter_name'] res_info['resource']['metrics'][metric]['unit'] = unit stats['measures'] += len(measures[gnocchi_id][metric_name]) res_info["resource"].update(res_info["resource_extra"]) if res_info: gnocchi_data[gnocchi_id] = res_info try: self.batch_measures(measures, gnocchi_data, stats) except (gnocchi_exc.ClientException, ka_exceptions.ConnectFailure) as e: LOG.error(six.text_type(e)) except Exception as e: LOG.error(six.text_type(e), exc_info=True) for gnocchi_id, info in gnocchi_data.items(): resource = info["resource"] resource_type = info["resource_type"] resource_extra = info["resource_extra"] if not resource_extra: continue try: self._if_not_cached("update", resource_type, resource, self._update_resource, resource_extra) except gnocchi_exc.ClientException as e: LOG.error(six.text_type(e)) except Exception as e: LOG.error(six.text_type(e), exc_info=True)
class ResourceClientTest(base.ClientTestBase): RESOURCE_ID = str(uuid.uuid4()) RAW_RESOURCE_ID2 = str(uuid.uuid4()) + "/foo" RESOURCE_ID2 = utils.encode_resource_id(RAW_RESOURCE_ID2) PROJECT_ID = str(uuid.uuid4()) def test_help(self): self.gnocchi("help", params="resource list") self.gnocchi("help", params="resource history") self.gnocchi("help", params="resource search") def test_resource_scenario(self): apname = str(uuid.uuid4()) # Create an archive policy self.gnocchi( u'archive-policy', params=u"create %s" u" -d granularity:1s,points:86400" % apname) # CREATE result = self.gnocchi( u'resource', params=u"create %s --type generic" % self.RESOURCE_ID) resource = self.details_multiple(result)[0] self.assertEqual(self.RESOURCE_ID, resource["id"]) self.assertEqual('None', resource["project_id"]) self.assertNotEqual('None', resource["started_at"]) # CREATE FAIL result = self.gnocchi('resource', params="create generic -a id:%s" % self.RESOURCE_ID, fail_ok=True, merge_stderr=True) self.assertFirstLineStartsWith( result.split('\n'), "Resource %s already exists (HTTP 409)" % self.RESOURCE_ID) # UPDATE result = self.gnocchi( 'resource', params=("update -t generic %s -a project_id:%s " "-n temperature:%s" % (self.RESOURCE_ID, self.PROJECT_ID, apname))) resource_updated = self.details_multiple(result)[0] self.assertEqual(self.RESOURCE_ID, resource_updated["id"]) self.assertEqual(self.PROJECT_ID, resource_updated["project_id"]) self.assertEqual(resource["started_at"], resource_updated["started_at"]) self.assertIn("temperature", resource_updated["metrics"]) # GET result = self.gnocchi( 'resource', params="show -t generic %s" % self.RESOURCE_ID) resource_got = self.details_multiple(result)[0] self.assertEqual(self.RESOURCE_ID, resource_got["id"]) self.assertEqual(self.PROJECT_ID, resource_got["project_id"]) self.assertEqual(resource["started_at"], resource_got["started_at"]) self.assertIn("temperature", resource_updated["metrics"]) # HISTORY result = self.gnocchi( 'resource', params="history --type generic %s" % self.RESOURCE_ID) resource_history = self.parser.listing(result) self.assertEqual(2, len(resource_history)) self.assertEqual(self.RESOURCE_ID, resource_history[0]["id"]) self.assertEqual(self.RESOURCE_ID, resource_history[1]["id"]) self.assertEqual("None", resource_history[0]["project_id"]) self.assertEqual(self.PROJECT_ID, resource_history[1]["project_id"]) # LIST result = self.gnocchi('resource', params="list -t generic") self.assertIn(self.RESOURCE_ID, [r['id'] for r in self.parser.listing(result)]) resource_list = [r for r in self.parser.listing(result) if r['id'] == self.RESOURCE_ID][0] self.assertEqual(self.RESOURCE_ID, resource_list["id"]) self.assertEqual(self.PROJECT_ID, resource_list["project_id"]) self.assertEqual(resource["started_at"], resource_list["started_at"]) # Search result = self.gnocchi('resource', params=("search --type generic " "'project_id=%s'" ) % self.PROJECT_ID) resource_list = self.parser.listing(result)[0] self.assertEqual(self.RESOURCE_ID, resource_list["id"]) self.assertEqual(self.PROJECT_ID, resource_list["project_id"]) self.assertEqual(resource["started_at"], resource_list["started_at"]) # UPDATE with Delete metric result = self.gnocchi( 'resource', params=("update -t generic %s -a project_id:%s " "-d temperature" % (self.RESOURCE_ID, self.PROJECT_ID))) resource_updated = self.details_multiple(result)[0] self.assertNotIn("temperature", resource_updated["metrics"]) # CREATE 2 result = self.gnocchi( 'resource', params=("create %s -t generic " "-a project_id:%s" ) % (self.RAW_RESOURCE_ID2, self.PROJECT_ID)) resource2 = self.details_multiple(result)[0] self.assertEqual(self.RESOURCE_ID2, resource2["id"]) self.assertEqual(self.RAW_RESOURCE_ID2, resource2["original_resource_id"]) self.assertEqual(self.PROJECT_ID, resource2["project_id"]) self.assertNotEqual('None', resource2["started_at"]) # Search + limit + short result = self.gnocchi('resource', params=("search " "-t generic " "'project_id=%s' " "--sort started_at:asc " "--marker %s " "--limit 1" ) % (self.PROJECT_ID, self.RESOURCE_ID)) resource_limit = self.parser.listing(result)[0] self.assertEqual(self.RESOURCE_ID2, resource_limit["id"]) self.assertEqual(self.PROJECT_ID, resource_limit["project_id"]) self.assertEqual(resource2["started_at"], resource_limit["started_at"]) # DELETE result = self.gnocchi('resource', params="delete %s" % self.RESOURCE_ID) self.assertEqual("", result) result = self.gnocchi('resource', params="delete %s" % self.RESOURCE_ID2) self.assertEqual("", result) # GET FAIL result = self.gnocchi('resource', params="show --type generic %s" % self.RESOURCE_ID, fail_ok=True, merge_stderr=True) self.assertFirstLineStartsWith( result.split('\n'), "Resource %s does not exist (HTTP 404)" % self.RESOURCE_ID) # DELETE FAIL result = self.gnocchi('resource', params="delete %s" % self.RESOURCE_ID, fail_ok=True, merge_stderr=True) self.assertFirstLineStartsWith( result.split('\n'), "Resource %s does not exist (HTTP 404)" % self.RESOURCE_ID) # LIST EMPTY result = self.gnocchi('resource', params="list -t generic") resource_ids = [r['id'] for r in self.parser.listing(result)] self.assertNotIn(self.RESOURCE_ID, resource_ids) self.assertNotIn(self.RESOURCE_ID2, resource_ids) # LIST THE RESOUCES TYPES resource_type = ('instance', 'generic', 'volume', 'instance_disk', 'stack', 'identity') result = self.gnocchi( 'resource', params="list-types") result_list = self.parser.listing(result) type_from_list = [t['resource_type'] for t in result_list] for one_type in resource_type: self.assertIn(one_type, type_from_list)