def entity_create(filename, project): """ Create or update an entity """ entities = [Entity.from_dict(entity_dict) for entity_dict in yaml_loader(filename)] feast_client = Client() # type: Client feast_client.apply(entities, project)
def _mock_feature_table(self, labels: dict = {}, add_stream_source: bool = False) -> None: table_spec = { "name": "driver_trips", "entities": ["driver_id"], "features": [{ "name": "trips_today", "valueType": "INT32" }], "labels": labels, "batchSource": { "type": "BATCH_FILE", "fileOptions": { "fileFormat": { "parquetFormat": {} }, "fileUrl": "file:///some/location", }, }, } if add_stream_source: avro_schema_json = json.dumps({ "type": "record", "name": "DriverTrips", "fields": [ { "name": "driver_id", "type": "long" }, { "name": "trips_today", "type": "int" }, { "name": "datetime", "type": { "type": "long", "logicalType": "timestamp-micros" }, }, ], }) table_spec["streamSource"] = { "type": "STREAM_KAFKA", "eventTimestampColumn": "datetime", "createdTimestampColumn": "datetime", "kafkaOptions": { "bootstrapServers": "broker1", "topic": "driver_trips", "messageFormat": { "avroFormat": { "schemaJson": avro_schema_json, } }, }, } self.extractor._client.list_feature_tables.return_value = [ FeatureTable.from_dict({ "spec": table_spec, "meta": { "createdTimestamp": "2020-01-01T00:00:00Z" }, }) ] self.extractor._client.get_entity.return_value = Entity.from_dict({ "spec": { "name": "driver_id", "valueType": "INT64", "description": "Internal identifier of the driver", } })