def start_offline_to_online_ingestion( self, feature_table: FeatureTable, start: datetime, end: datetime, ) -> SparkJob: """ Launch Ingestion Job from Batch Source to Online Store for given featureTable :param feature_table: FeatureTable which will be ingested :param start: lower datetime boundary :param end: upper datetime boundary :return: Spark Job Proxy object """ if not self._use_job_service: return start_offline_to_online_ingestion( client=self, project=self.project, feature_table=feature_table, start=start, end=end, ) else: request = StartOfflineToOnlineIngestionJobRequest( project=self.project, table_name=feature_table.name, ) request.start_date.FromDatetime(start) request.end_date.FromDatetime(end) response = self._job_service.StartOfflineToOnlineIngestionJob(request) return RemoteBatchIngestionJob( self._job_service, self._extra_grpc_params, response.id, )
def StartOfflineToOnlineIngestionJob( self, request: StartOfflineToOnlineIngestionJobRequest, context): """Start job to ingest data from offline store into online store""" feature_table = self.client.get_feature_table(request.table_name, request.project) job = start_offline_to_online_ingestion( client=self.client, project=request.project, feature_table=feature_table, start=request.start_date.ToDatetime(), end=request.end_date.ToDatetime(), ) return StartOfflineToOnlineIngestionJobResponse(id=job.get_id())
def start_offline_to_online_ingestion( self, feature_table: FeatureTable, start: datetime, end: datetime, ) -> SparkJob: """ Launch Ingestion Job from Batch Source to Online Store for given featureTable :param feature_table: FeatureTable which will be ingested :param start: lower datetime boundary :param end: upper datetime boundary :return: Spark Job Proxy object """ return start_offline_to_online_ingestion(feature_table, start, end, self)