def execute(self, context: 'Context') -> None: self.log.info( 'Executing copy of %s into: %s', self.source_project_dataset_tables, self.destination_project_dataset_table, ) hook = BigQueryHook( bigquery_conn_id=self.gcp_conn_id, delegate_to=self.delegate_to, location=self.location, impersonation_chain=self.impersonation_chain, ) with warnings.catch_warnings(): warnings.simplefilter("ignore", DeprecationWarning) hook.run_copy( source_project_dataset_tables=self. source_project_dataset_tables, destination_project_dataset_table=self. destination_project_dataset_table, write_disposition=self.write_disposition, create_disposition=self.create_disposition, labels=self.labels, encryption_configuration=self.encryption_configuration, )
def execute(self, context: 'Context') -> None: self.log.info( 'Executing copy of %s into: %s', self.source_project_dataset_tables, self.destination_project_dataset_table, ) hook = BigQueryHook( gcp_conn_id=self.gcp_conn_id, delegate_to=self.delegate_to, location=self.location, impersonation_chain=self.impersonation_chain, ) with warnings.catch_warnings(): warnings.simplefilter("ignore", DeprecationWarning) job_id = hook.run_copy( source_project_dataset_tables=self.source_project_dataset_tables, destination_project_dataset_table=self.destination_project_dataset_table, write_disposition=self.write_disposition, create_disposition=self.create_disposition, labels=self.labels, encryption_configuration=self.encryption_configuration, ) job = hook.get_job(job_id=job_id).to_api_repr() conf = job["configuration"]["copy"]["destinationTable"] BigQueryTableLink.persist( context=context, task_instance=self, dataset_id=conf["datasetId"], project_id=conf["projectId"], table_id=conf["tableId"], )