def _get_resources(self, host_ids): resource_ids = set((utils.ResourceUUID(host_id, self.conf.amqp1d.creator) for host_id in host_ids)) resources = self.indexer.list_resources( resource_type=self.conf.amqp1d.resource_type, attribute_filter={"in": { "id": resource_ids }}) resources_by_host_id = {r.original_resource_id: r for r in resources} missing_host_ids = set(host_ids) - set(resources_by_host_id.keys()) for host_id in missing_host_ids: resource_id = utils.ResourceUUID(host_id, self.conf.amqp1d.creator) try: r = self.indexer.create_resource( self.conf.amqp1d.resource_type, resource_id, self.conf.amqp1d.creator, original_resource_id=host_id, host=self._hosts[host_id]) except indexer.ResourceAlreadyExists: r = self.indexer.get_resource(self.conf.amqp1d.resource_type, resource_id) resources_by_host_id[host_id] = r return resources_by_host_id
def test_conversion(self): self.assertEqual( uuid.UUID('ba571521-1de6-5aff-b183-1535fd6eb5d0'), utils.ResourceUUID( uuid.UUID('ba571521-1de6-5aff-b183-1535fd6eb5d0'), "bar")) self.assertEqual(uuid.UUID('ba571521-1de6-5aff-b183-1535fd6eb5d0'), utils.ResourceUUID("foo", "bar")) self.assertEqual(uuid.UUID('4efb21f6-3d19-5fe3-910b-be8f0f727846'), utils.ResourceUUID("foo", None)) self.assertEqual(uuid.UUID('853e5c64-f45e-58b2-999c-96df856fbe3d'), utils.ResourceUUID("foo", ""))
def _ensure_resource_exists(self, host_id, host): try: self.indexer.create_resource( self.conf.amqp1d.resource_name, utils.ResourceUUID( host_id, self.conf.amqp1d.creator), self.conf.amqp1d.creator, original_resource_id=host_id, host=host) except indexer.ResourceAlreadyExists: pass # LOG.debug("Resource %s already exists", host_id) else: LOG.info("Created resource for %s", host)
def post_write(self, db="influxdb"): creator = pecan.request.auth_helper.get_current_user(pecan.request) tag_to_rid = pecan.request.headers.get( "X-Gnocchi-InfluxDB-Tag-Resource-ID", self.DEFAULT_TAG_RESOURCE_ID) while True: encoding, chunk = self._write_get_lines() # If chunk is empty then this is over. if not chunk: break # Compute now on a per-chunk basis now = numpy.datetime64(int(time.time() * 10e8), 'ns') # resources = { resource_id: { # metric_name: [ incoming.Measure(t, v), …], … # }, … # } resources = collections.defaultdict( lambda: collections.defaultdict(list)) for line_number, line in enumerate(chunk.split(b"\n")): # Ignore empty lines if not line: continue try: measurement, tags, fields, timestamp = ( line_protocol.parseString(line.decode())) except (UnicodeDecodeError, SyntaxError, pyparsing.ParseException): api.abort( 400, { "cause": "Value error", "detail": "line", "reason": "Unable to parse line %d" % (line_number + 1), }) if timestamp is None: timestamp = now try: resource_id = tags.pop(tag_to_rid) except KeyError: api.abort( 400, { "cause": "Value error", "detail": "key", "reason": "Unable to find key `%s' in tags" % (tag_to_rid), }) tags_str = (("@" if tags else "") + ",".join( ("%s=%s" % (k, tags[k])) for k in sorted(tags))) for field_name, field_value in six.iteritems(fields): if isinstance(field_value, str): # We do not support field value that are not numerical continue # Metric name is the: # <measurement>.<field_key>@<tag_key>=<tag_value>,… # with tag ordered # Replace "/" with "_" because Gnocchi does not support / # in metric names metric_name = (measurement + "." + field_name + tags_str).replace("/", "_") resources[resource_id][metric_name].append( incoming.Measure(timestamp, field_value)) measures_to_batch = {} for resource_name, metrics_and_measures in six.iteritems( resources): resource_name = resource_name resource_id = utils.ResourceUUID(resource_name, creator=creator) LOG.debug("Getting metrics from resource `%s'", resource_name) timeout = pecan.request.conf.api.operation_timeout metrics = (api.get_or_create_resource_and_metrics.retry_with( stop=tenacity.stop_after_delay(timeout))( creator, resource_id, resource_name, metrics_and_measures.keys(), {}, db)) for metric in metrics: api.enforce("post measures", metric) measures_to_batch.update( dict((metric.id, metrics_and_measures[metric.name]) for metric in metrics if metric.name in metrics_and_measures)) LOG.debug("Add measures batch for %d metrics", len(measures_to_batch)) pecan.request.incoming.add_measures_batch(measures_to_batch) pecan.response.status = 204 if encoding != "chunked": return
def upgrade(): connection = op.get_bind() resource_type_tables = {} resource_type_tablenames = dict( (rt.name, rt.tablename) for rt in connection.execute(resource_type_table.select()) if rt.tablename != "generic" ) op.drop_constraint("fk_metric_resource_id_resource_id", "metric", type_="foreignkey") for name, table in resource_type_tablenames.items(): op.drop_constraint("fk_%s_id_resource_id" % table, table, type_="foreignkey") resource_type_tables[name] = sa.Table( table, sa.MetaData(), sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(), nullable=False), ) for resource in connection.execute(resource_table.select()): if resource.original_resource_id is None: # statsd resource has no original_resource_id and is NULL continue try: orig_as_uuid = uuid.UUID(str(resource.original_resource_id)) except ValueError: pass else: if orig_as_uuid == resource.id: continue new_original_resource_id = resource.original_resource_id.replace( '/', '_') if six.PY2: new_original_resource_id = new_original_resource_id.encode('utf-8') new_id = sa.literal(uuidtype.process_bind_param( str(utils.ResourceUUID( new_original_resource_id, resource.creator)), connection.dialect)) # resource table connection.execute( resource_table.update().where( resource_table.c.id == resource.id ).values( id=new_id, original_resource_id=new_original_resource_id ) ) # resource history table connection.execute( resourcehistory_table.update().where( resourcehistory_table.c.id == resource.id ).values( id=new_id, original_resource_id=new_original_resource_id ) ) if resource.type != "generic": rtable = resource_type_tables[resource.type] # resource table (type) connection.execute( rtable.update().where( rtable.c.id == resource.id ).values(id=new_id) ) # Metric connection.execute( metric_table.update().where( metric_table.c.resource_id == resource.id ).values( resource_id=new_id ) ) for (name, table) in resource_type_tablenames.items(): op.create_foreign_key("fk_%s_id_resource_id" % table, table, "resource", ("id",), ("id",), ondelete="CASCADE") op.create_foreign_key("fk_metric_resource_id_resource_id", "metric", "resource", ("resource_id",), ("id",), ondelete="SET NULL") for metric in connection.execute(metric_table.select().where( metric_table.c.name.like("%/%"))): connection.execute( metric_table.update().where( metric_table.c.id == metric.id ).values( name=metric.name.replace('/', '_'), ) )