def execute(self, context: "Context"): hook = DatasetHook( gcp_conn_id=self.gcp_conn_id, delegate_to=self.delegate_to, impersonation_chain=self.impersonation_chain, ) self.log.info("Creating dataset") operation = hook.create_dataset( project_id=self.project_id, region=self.region, dataset=self.dataset, retry=self.retry, timeout=self.timeout, metadata=self.metadata, ) result = hook.wait_for_operation(timeout=self.timeout, operation=operation) dataset = Dataset.to_dict(result) dataset_id = hook.extract_dataset_id(dataset) self.log.info("Dataset was created. Dataset id: %s", dataset_id) self.xcom_push(context, key="dataset_id", value=dataset_id) VertexAIDatasetLink.persist(context=context, task_instance=self, dataset_id=dataset_id) return dataset
def execute(self, context: "Context"): hook = DatasetHook( gcp_conn_id=self.gcp_conn_id, delegate_to=self.delegate_to, impersonation_chain=self.impersonation_chain, ) try: self.log.info("Get dataset: %s", self.dataset_id) dataset_obj = hook.get_dataset( project_id=self.project_id, region=self.region, dataset=self.dataset_id, read_mask=self.read_mask, retry=self.retry, timeout=self.timeout, metadata=self.metadata, ) VertexAIDatasetLink.persist(context=context, task_instance=self, dataset_id=self.dataset_id) self.log.info("Dataset was gotten.") return Dataset.to_dict(dataset_obj) except NotFound: self.log.info("The Dataset ID %s does not exist.", self.dataset_id)
class CreateDatasetOperator(BaseOperator): """ Creates a Dataset. :param project_id: Required. The ID of the Google Cloud project the cluster belongs to. :param region: Required. The Cloud Dataproc region in which to handle the request. :param dataset: Required. The Dataset to create. This corresponds to the ``dataset`` field on the ``request`` instance; if ``request`` is provided, this should not be set. :param retry: Designation of what errors, if any, should be retried. :param timeout: The timeout for this request. :param metadata: Strings which should be sent along with the request as metadata. :param gcp_conn_id: The connection ID to use connecting to Google Cloud. :param delegate_to: The account to impersonate using domain-wide delegation of authority, if any. For this to work, the service account making the request must have domain-wide delegation enabled. :param impersonation_chain: Optional service account to impersonate using short-term credentials, or chained list of accounts required to get the access_token of the last account in the list, which will be impersonated in the request. If set as a string, the account must grant the originating account the Service Account Token Creator IAM role. If set as a sequence, the identities from the list must grant Service Account Token Creator IAM role to the directly preceding identity, with first account from the list granting this role to the originating account (templated). """ template_fields = ("region", "project_id", "impersonation_chain") operator_extra_links = (VertexAIDatasetLink(), ) def __init__( self, *, region: str, project_id: str, dataset: Union[Dataset, Dict], retry: Optional[Retry] = None, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, impersonation_chain: Optional[Union[str, Sequence[str]]] = None, **kwargs, ) -> None: super().__init__(**kwargs) self.region = region self.project_id = project_id self.dataset = dataset self.retry = retry self.timeout = timeout self.metadata = metadata self.gcp_conn_id = gcp_conn_id self.delegate_to = delegate_to self.impersonation_chain = impersonation_chain def execute(self, context: "Context"): hook = DatasetHook( gcp_conn_id=self.gcp_conn_id, delegate_to=self.delegate_to, impersonation_chain=self.impersonation_chain, ) self.log.info("Creating dataset") operation = hook.create_dataset( project_id=self.project_id, region=self.region, dataset=self.dataset, retry=self.retry, timeout=self.timeout, metadata=self.metadata, ) result = hook.wait_for_operation(timeout=self.timeout, operation=operation) dataset = Dataset.to_dict(result) dataset_id = hook.extract_dataset_id(dataset) self.log.info("Dataset was created. Dataset id: %s", dataset_id) self.xcom_push(context, key="dataset_id", value=dataset_id) VertexAIDatasetLink.persist(context=context, task_instance=self, dataset_id=dataset_id) return dataset
class GetDatasetOperator(BaseOperator): """ Get a Dataset. :param project_id: Required. The ID of the Google Cloud project the cluster belongs to. :param region: Required. The Cloud Dataproc region in which to handle the request. :param dataset_id: Required. The ID of the Dataset to get. :param retry: Designation of what errors, if any, should be retried. :param timeout: The timeout for this request. :param metadata: Strings which should be sent along with the request as metadata. :param gcp_conn_id: The connection ID to use connecting to Google Cloud. :param delegate_to: The account to impersonate using domain-wide delegation of authority, if any. For this to work, the service account making the request must have domain-wide delegation enabled. :param impersonation_chain: Optional service account to impersonate using short-term credentials, or chained list of accounts required to get the access_token of the last account in the list, which will be impersonated in the request. If set as a string, the account must grant the originating account the Service Account Token Creator IAM role. If set as a sequence, the identities from the list must grant Service Account Token Creator IAM role to the directly preceding identity, with first account from the list granting this role to the originating account (templated). """ template_fields = ("region", "dataset_id", "project_id", "impersonation_chain") operator_extra_links = (VertexAIDatasetLink(), ) def __init__( self, *, region: str, project_id: str, dataset_id: str, read_mask: Optional[str] = None, retry: Optional[Retry] = None, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, impersonation_chain: Optional[Union[str, Sequence[str]]] = None, **kwargs, ) -> None: super().__init__(**kwargs) self.region = region self.project_id = project_id self.dataset_id = dataset_id self.read_mask = read_mask self.retry = retry self.timeout = timeout self.metadata = metadata self.gcp_conn_id = gcp_conn_id self.delegate_to = delegate_to self.impersonation_chain = impersonation_chain def execute(self, context: "Context"): hook = DatasetHook( gcp_conn_id=self.gcp_conn_id, delegate_to=self.delegate_to, impersonation_chain=self.impersonation_chain, ) try: self.log.info("Get dataset: %s", self.dataset_id) dataset_obj = hook.get_dataset( project_id=self.project_id, region=self.region, dataset=self.dataset_id, read_mask=self.read_mask, retry=self.retry, timeout=self.timeout, metadata=self.metadata, ) VertexAIDatasetLink.persist(context=context, task_instance=self, dataset_id=self.dataset_id) self.log.info("Dataset was gotten.") return Dataset.to_dict(dataset_obj) except NotFound: self.log.info("The Dataset ID %s does not exist.", self.dataset_id)