Exemple #1
0
def _ingest_test_getfeaturetable_mocked_resp(file_url: str,
                                             date_partition_col: str = ""):
    return GetFeatureTableResponse(table=FeatureTableProto(
        spec=FeatureTableSpecProto(
            name="ingest_featuretable",
            max_age=Duration(seconds=3600),
            features=[
                FeatureSpecProto(
                    name="dev_feature_float",
                    value_type=ValueProto.ValueType.FLOAT,
                ),
                FeatureSpecProto(
                    name="dev_feature_string",
                    value_type=ValueProto.ValueType.STRING,
                ),
            ],
            entities=["dev_entity"],
            batch_source=DataSourceProto(
                file_options=DataSourceProto.FileOptions(
                    file_format=ParquetFormat().to_proto(), file_url=file_url),
                event_timestamp_column="datetime",
                created_timestamp_column="timestamp",
                date_partition_column=date_partition_col,
            ),
        ),
        meta=FeatureTableMetaProto(),
    ))
Exemple #2
0
    def to_proto(self) -> DataSourceProto:
        schema_pb = {}
        for key, value in self._schema.items():
            schema_pb[key] = value.value
        options = DataSourceProto.RequestDataOptions(name=self._name,
                                                     schema=schema_pb)
        data_source_proto = DataSourceProto(
            type=DataSourceProto.REQUEST_SOURCE, request_data_options=options)

        return data_source_proto
Exemple #3
0
    def to_proto(self) -> DataSourceProto:
        data_source_proto = DataSourceProto(
            type=DataSourceProto.STREAM_KINESIS,
            field_mapping=self.field_mapping,
            kinesis_options=self.kinesis_options.to_proto(),
        )

        data_source_proto.event_timestamp_column = self.event_timestamp_column
        data_source_proto.created_timestamp_column = self.created_timestamp_column
        data_source_proto.date_partition_column = self.date_partition_column

        return data_source_proto
Exemple #4
0
    def to_proto(self) -> DataSourceProto:
        data_source_proto = DataSourceProto(
            type=DataSourceProto.BATCH_BIGQUERY,
            field_mapping=self.field_mapping,
            bigquery_options=self.bigquery_options.to_proto(),
        )

        data_source_proto.event_timestamp_column = self.event_timestamp_column
        data_source_proto.created_timestamp_column = self.created_timestamp_column
        data_source_proto.date_partition_column = self.date_partition_column

        return data_source_proto
Exemple #5
0
    def to_proto(self) -> DataSourceProto:
        data_source_proto = DataSourceProto(
            type=DataSourceProto.BATCH_REDSHIFT,
            field_mapping=self.field_mapping,
            redshift_options=self.redshift_options.to_proto(),
        )

        data_source_proto.event_timestamp_column = self.event_timestamp_column
        data_source_proto.created_timestamp_column = self.created_timestamp_column
        data_source_proto.date_partition_column = self.date_partition_column

        return data_source_proto
Exemple #6
0
    def to_proto(self) -> DataSourceProto:
        data_source_proto = DataSourceProto(
            type=DataSourceProto.CUSTOM_SOURCE,
            data_source_class_type=
            "feast.infra.offline_stores.contrib.postgres_offline_store.postgres_source.PostgreSQLSource",
            field_mapping=self.field_mapping,
            custom_options=self._postgres_options.to_proto(),
        )

        data_source_proto.timestamp_field = self.timestamp_field
        data_source_proto.created_timestamp_column = self.created_timestamp_column
        data_source_proto.date_partition_column = self.date_partition_column

        return data_source_proto
Exemple #7
0
    def to_proto(self) -> DataSourceProto:
        batch_source_proto = None
        if self.batch_source:
            batch_source_proto = self.batch_source.to_proto()

        data_source_proto = DataSourceProto(
            name=self.name,
            type=DataSourceProto.PUSH_SOURCE,
            description=self.description,
            tags=self.tags,
            owner=self.owner,
            batch_source=batch_source_proto,
        )

        return data_source_proto
Exemple #8
0
    def to_proto(self) -> DataSourceProto:
        data_source_proto = DataSourceProto(
            name=self.name,
            type=DataSourceProto.BATCH_BIGQUERY,
            field_mapping=self.field_mapping,
            bigquery_options=self.bigquery_options.to_proto(),
            description=self.description,
            tags=self.tags,
            owner=self.owner,
        )

        data_source_proto.timestamp_field = self.timestamp_field
        data_source_proto.created_timestamp_column = self.created_timestamp_column

        return data_source_proto
Exemple #9
0
    def to_proto(self) -> DataSourceProto:
        data_source_proto = DataSourceProto(
            name=self.name,
            type=DataSourceProto.BATCH_TRINO,
            field_mapping=self.field_mapping,
            trino_options=self.trino_options.to_proto(),
            description=self.description,
            tags=self.tags,
            owner=self.owner,
        )

        data_source_proto.timestamp_field = self.timestamp_field
        data_source_proto.created_timestamp_column = self.created_timestamp_column
        data_source_proto.date_partition_column = self.date_partition_column

        return data_source_proto
Exemple #10
0
    def to_proto(self) -> DataSourceProto:
        data_source_proto = DataSourceProto(
            name=self.name,
            type=DataSourceProto.BATCH_SPARK,
            data_source_class_type=
            "feast.infra.offline_stores.contrib.spark_offline_store.spark_source.SparkSource",
            field_mapping=self.field_mapping,
            spark_options=self.spark_options.to_proto(),
            description=self.description,
            tags=self.tags,
            owner=self.owner,
        )

        data_source_proto.timestamp_field = self.timestamp_field
        data_source_proto.created_timestamp_column = self.created_timestamp_column

        return data_source_proto
Exemple #11
0
    def to_proto(self) -> DataSourceProto:
        """
        Converts a RedshiftSource object to its protobuf representation.

        Returns:
            A DataSourceProto object.
        """
        data_source_proto = DataSourceProto(
            type=DataSourceProto.BATCH_REDSHIFT,
            field_mapping=self.field_mapping,
            redshift_options=self.redshift_options.to_proto(),
        )

        data_source_proto.event_timestamp_column = self.event_timestamp_column
        data_source_proto.created_timestamp_column = self.created_timestamp_column
        data_source_proto.date_partition_column = self.date_partition_column

        return data_source_proto
Exemple #12
0
    def to_proto(self) -> DataSourceProto:
        data_source_proto = DataSourceProto(
            name=self.name,
            type=DataSourceProto.STREAM_KAFKA,
            field_mapping=self.field_mapping,
            kafka_options=self.kafka_options.to_proto(),
            description=self.description,
            tags=self.tags,
            owner=self.owner,
        )

        data_source_proto.timestamp_field = self.timestamp_field
        data_source_proto.created_timestamp_column = self.created_timestamp_column
        data_source_proto.date_partition_column = self.date_partition_column
        if self.batch_source:
            data_source_proto.batch_source.MergeFrom(
                self.batch_source.to_proto())
        return data_source_proto
Exemple #13
0
    def to_proto(self) -> DataSourceProto:
        """
        Converts a RedshiftSource object to its protobuf representation.

        Returns:
            A DataSourceProto object.
        """
        data_source_proto = DataSourceProto(
            type=DataSourceProto.BATCH_REDSHIFT,
            field_mapping=self.field_mapping,
            redshift_options=self.redshift_options.to_proto(),
            description=self.description,
            tags=self.tags,
            owner=self.owner,
        )

        data_source_proto.timestamp_field = self.timestamp_field
        data_source_proto.created_timestamp_column = self.created_timestamp_column

        return data_source_proto
Exemple #14
0
    def to_proto(self) -> DataSourceProto:
        """
        Converts a SnowflakeSource object to its protobuf representation.

        Returns:
            A DataSourceProto object.
        """
        data_source_proto = DataSourceProto(
            type=DataSourceProto.BATCH_SNOWFLAKE,
            field_mapping=self.field_mapping,
            snowflake_options=self.snowflake_options.to_proto(),
            description=self.description,
            tags=self.tags,
            owner=self.owner,
        )

        data_source_proto.timestamp_field = self.timestamp_field
        data_source_proto.created_timestamp_column = self.created_timestamp_column

        return data_source_proto
Exemple #15
0
    def to_proto(self) -> DataSourceProto:

        schema_pb = []

        if isinstance(self.schema, Dict):
            for key, value in self.schema.items():
                schema_pb.append(
                    Field(name=key,
                          dtype=VALUE_TYPES_TO_FEAST_TYPES[
                              value.value]).to_proto())
        else:
            for field in self.schema:
                schema_pb.append(field.to_proto())
        data_source_proto = DataSourceProto(
            name=self.name,
            type=DataSourceProto.REQUEST_SOURCE,
            description=self.description,
            tags=self.tags,
            owner=self.owner,
        )
        data_source_proto.request_data_options.schema.extend(schema_pb)

        return data_source_proto