def __init__(self, feature_store_id, features=[], training_dataset_version=None): self._training_dataset_version = training_dataset_version self._features = features self._prepared_statement_engine = None self._prepared_statements = None self._serving_keys = None self._pkname_by_serving_index = None self._prefix_by_serving_index = None self._external = True self._feature_store_id = feature_store_id self._training_dataset_api = training_dataset_api.TrainingDatasetApi( feature_store_id ) self._feature_view_api = feature_view_api.FeatureViewApi(feature_store_id) self._storage_connector_api = storage_connector_api.StorageConnectorApi( feature_store_id ) self._transformation_function_engine = ( transformation_function_engine.TransformationFunctionEngine( feature_store_id ) ) self._feature_view_engine = feature_view_engine.FeatureViewEngine( feature_store_id )
def __init__(self, feature_store_id): self._feature_store_id = feature_store_id self._training_dataset_api = training_dataset_api.TrainingDatasetApi( feature_store_id) self._tags_api = tags_api.TagsApi(feature_store_id, self.ENTITY_TYPE) self._storage_connector_api = storage_connector_api.StorageConnectorApi( feature_store_id) self._transformation_function_engine = ( transformation_function_engine.TransformationFunctionEngine( feature_store_id))
def __init__( self, featurestore_id, featurestore_name, created, hdfs_store_path, project_name, project_id, featurestore_description, inode_id, offline_featurestore_name, hive_endpoint, online_enabled, num_feature_groups=None, num_training_datasets=None, num_storage_connectors=None, online_featurestore_name=None, mysql_server_endpoint=None, online_featurestore_size=None, ): self._id = featurestore_id self._name = featurestore_name self._created = created self._hdfs_store_path = hdfs_store_path self._project_name = project_name self._project_id = project_id self._description = featurestore_description self._inode_id = inode_id self._online_feature_store_name = online_featurestore_name self._online_feature_store_size = online_featurestore_size self._offline_feature_store_name = offline_featurestore_name self._hive_endpoint = hive_endpoint self._mysql_server_endpoint = mysql_server_endpoint self._online_enabled = online_enabled self._num_feature_groups = num_feature_groups self._num_training_datasets = num_training_datasets self._num_storage_connectors = num_storage_connectors self._feature_group_api = feature_group_api.FeatureGroupApi(self._id) self._storage_connector_api = storage_connector_api.StorageConnectorApi( self._id) self._training_dataset_api = training_dataset_api.TrainingDatasetApi( self._id) self._expectations_api = expectations_api.ExpectationsApi(self._id) self._feature_group_engine = feature_group_engine.FeatureGroupEngine( self._id) self._transformation_function_engine = ( transformation_function_engine.TransformationFunctionEngine( self._id)) self._feature_view_engine = feature_view_engine.FeatureViewEngine( self._id)
def __init__( self, featurestore_id, transformation_fn=None, version=None, name=None, source_code_content=None, builtin_source_code=None, output_type=None, id=None, type=None, items=None, count=None, href=None, ): self._id = id self._featurestore_id = featurestore_id self._version = version self._name = name self._transformation_fn = transformation_fn self._source_code_content = source_code_content self._transformation_function_engine = ( transformation_function_engine.TransformationFunctionEngine( self._featurestore_id)) # set up depending on user initialized or coming from backend response if self._transformation_fn is not None: # type -> user init coming from user self._transformer_code = None self._extract_source_code() self._output_type = self._transformation_function_engine.infer_spark_type( output_type) elif builtin_source_code is not None: # user triggered to register built-in transformation function self._output_type = self._transformation_function_engine.infer_spark_type( output_type) self._source_code_content = json.dumps({ "module_imports": "", "transformer_code": builtin_source_code, }) else: # load original source code self._output_type = output_type self._load_source_code(self._source_code_content) self._feature_group_feature_name = None self._feature_group_id = None
def __init__(self, feature_store_id): self._feature_store_id = feature_store_id self._feature_view_api = feature_view_api.FeatureViewApi( feature_store_id) self._tags_api = tags_api.TagsApi(feature_store_id, self.ENTITY_TYPE) self._storage_connector_api = storage_connector_api.StorageConnectorApi( feature_store_id) self._transformation_function_engine = ( transformation_function_engine.TransformationFunctionEngine( feature_store_id)) self._td_code_engine = code_engine.CodeEngine( feature_store_id, self._TRAINING_DATA_API_PATH) self._statistics_engine = statistics_engine.StatisticsEngine( feature_store_id, self._TRAINING_DATA_API_PATH) self._training_dataset_engine = training_dataset_engine.TrainingDatasetEngine( feature_store_id) self._query_constructor_api = query_constructor_api.QueryConstructorApi( )
def __init__( self, name, version, data_format, featurestore_id, location="", event_start_time=None, event_end_time=None, coalesce=False, description=None, storage_connector=None, splits=None, validation_size=None, test_size=None, train_start=None, train_end=None, validation_start=None, validation_end=None, test_start=None, test_end=None, seed=None, created=None, creator=None, features=None, statistics_config=None, featurestore_name=None, id=None, inode_id=None, training_dataset_type=None, from_query=None, querydto=None, label=None, transformation_functions=None, train_split=None, ): self._id = id self._name = name self._version = version self._description = description self._data_format = data_format self._start_time = self._convert_event_time_to_timestamp( event_start_time) self._end_time = self._convert_event_time_to_timestamp(event_end_time) self._validation_size = validation_size self._test_size = test_size self._train_start = train_start self._train_end = train_end self._validation_start = validation_start self._validation_end = validation_end self._test_start = test_start self._test_end = test_end self._coalesce = coalesce self._seed = seed self._location = location self._from_query = from_query self._querydto = querydto self._feature_store_id = featurestore_id self._transformation_functions = transformation_functions self._train_split = train_split self._training_dataset_api = training_dataset_api.TrainingDatasetApi( featurestore_id) self._training_dataset_engine = training_dataset_engine.TrainingDatasetEngine( featurestore_id) self._statistics_engine = statistics_engine.StatisticsEngine( featurestore_id, self.ENTITY_TYPE) self._code_engine = code_engine.CodeEngine(featurestore_id, self.ENTITY_TYPE) self._transformation_function_engine = ( transformation_function_engine.TransformationFunctionEngine( featurestore_id)) if training_dataset_type: self.training_dataset_type = training_dataset_type else: self._training_dataset_type = None # set up depending on user initialized or coming from backend response if created is None: # no type -> user init self._features = features self.storage_connector = storage_connector self.splits = splits self.statistics_config = statistics_config self._label = label if validation_size or test_size: self._train_split = TrainingDatasetSplit.TRAIN self.splits = { TrainingDatasetSplit.TRAIN: 1 - (validation_size or 0) - (test_size or 0), TrainingDatasetSplit.VALIDATION: validation_size, TrainingDatasetSplit.TEST: test_size, } self._set_time_splits( train_start, train_end, validation_start, validation_end, test_start, test_end, ) else: # type available -> init from backend response # make rest call to get all connector information, description etc. self._storage_connector = StorageConnector.from_response_json( storage_connector) if features is None: features = [] self._features = [ training_dataset_feature.TrainingDatasetFeature. from_response_json(feat) for feat in features ] self._splits = [ TrainingDatasetSplit.from_response_json(split) for split in splits ] self._statistics_config = StatisticsConfig.from_response_json( statistics_config) self._label = [ feat.name.lower() for feat in self._features if feat.label ] self._vector_server = vector_server.VectorServer( featurestore_id, features=self._features)