Exemple #1
0
    def execute(self, context: 'Context'):
        hook = BiqQueryDataTransferServiceHook(
            gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain, location=self.location
        )
        self.log.info('Submitting manual transfer for %s', self.transfer_config_id)
        response = hook.start_manual_transfer_runs(
            transfer_config_id=self.transfer_config_id,
            requested_time_range=self.requested_time_range,
            requested_run_time=self.requested_run_time,
            project_id=self.project_id,
            retry=self.retry,
            timeout=self.timeout,
            metadata=self.metadata,
        )

        transfer_config = _get_transfer_config_details(response.runs[0].name)
        BigQueryDataTransferConfigLink.persist(
            context=context,
            task_instance=self,
            region=transfer_config["region"],
            config_id=transfer_config["config_id"],
            project_id=transfer_config["project_id"],
        )

        result = StartManualTransferRunsResponse.to_dict(response)
        run_id = get_object_id(result['runs'][0])
        self.xcom_push(context, key="run_id", value=run_id)
        self.log.info('Transfer run %s submitted successfully.', run_id)
        return result
Exemple #2
0
 def setUp(self) -> None:
     with mock.patch(
         "airflow.providers.google.cloud.hooks.bigquery_dts.GoogleBaseHook.__init__",
         new=mock_base_gcp_hook_no_default_project_id,
     ):
         self.hook = BiqQueryDataTransferServiceHook()
         self.hook._get_credentials = mock.MagicMock(return_value=CREDENTIALS)  # type: ignore
Exemple #3
0
    def execute(self, context: 'Context'):
        hook = BiqQueryDataTransferServiceHook(
            gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain, location=self.location
        )
        self.log.info("Creating DTS transfer config")
        response = hook.create_transfer_config(
            project_id=self.project_id,
            transfer_config=self.transfer_config,
            authorization_code=self.authorization_code,
            retry=self.retry,
            timeout=self.timeout,
            metadata=self.metadata,
        )

        transfer_config = _get_transfer_config_details(response.name)
        BigQueryDataTransferConfigLink.persist(
            context=context,
            task_instance=self,
            region=transfer_config["region"],
            config_id=transfer_config["config_id"],
            project_id=transfer_config["project_id"],
        )

        result = TransferConfig.to_dict(response)
        self.log.info("Created DTS transfer config %s", get_object_id(result))
        self.xcom_push(context, key="transfer_config_id", value=get_object_id(result))
        return result
Exemple #4
0
 def execute(self, context):
     hook = BiqQueryDataTransferServiceHook(gcp_conn_id=self.gcp_conn_id)
     hook.delete_transfer_config(
         transfer_config_id=self.transfer_config_id,
         project_id=self.project_id,
         retry=self.retry,
         timeout=self.timeout,
         metadata=self.metadata,
     )
Exemple #5
0
 def execute(self, context) -> None:
     hook = BiqQueryDataTransferServiceHook(
         gcp_conn_id=self.gcp_conn_id,
         impersonation_chain=self.impersonation_chain)
     hook.delete_transfer_config(
         transfer_config_id=self.transfer_config_id,
         project_id=self.project_id,
         retry=self.retry,
         timeout=self.timeout,
         metadata=self.metadata,
     )
Exemple #6
0
 def poke(self, context: 'Context') -> bool:
     hook = BiqQueryDataTransferServiceHook(
         gcp_conn_id=self.gcp_cloud_conn_id,
         impersonation_chain=self.impersonation_chain,
     )
     run = hook.get_transfer_run(
         run_id=self.run_id,
         transfer_config_id=self.transfer_config_id,
         project_id=self.project_id,
         retry=self.retry,
         timeout=self.request_timeout,
         metadata=self.metadata,
     )
     self.log.info("Status of %s run: %s", self.run_id, str(run.state))
     return run.state in self.expected_statuses
Exemple #7
0
    def poke(self, context):
        hook = BiqQueryDataTransferServiceHook(
            gcp_conn_id=self.gcp_cloud_conn_id)
        run = hook.get_transfer_run(
            run_id=self.run_id,
            transfer_config_id=self.transfer_config_id,
            project_id=self.project_id,
            retry=self.retry,
            timeout=self.request_timeout,
            metadata=self.metadata,
        )
        result = MessageToDict(run)
        state = result["state"]
        self.log.info("Status of %s run: %s", self.run_id, state)

        return state in self.expected_statuses
Exemple #8
0
 def execute(self, context):
     hook = BiqQueryDataTransferServiceHook(gcp_conn_id=self.gcp_conn_id)
     self.log.info("Creating DTS transfer config")
     response = hook.create_transfer_config(
         project_id=self.project_id,
         transfer_config=self.transfer_config,
         authorization_code=self.authorization_code,
         retry=self.retry,
         timeout=self.timeout,
         metadata=self.metadata,
     )
     result = MessageToDict(response)
     self.log.info("Created DTS transfer config %s", get_object_id(result))
     self.xcom_push(context,
                    key="transfer_config_id",
                    value=get_object_id(result))
     return result
Exemple #9
0
    def poke(self, context: 'Context') -> bool:
        hook = BiqQueryDataTransferServiceHook(
            gcp_conn_id=self.gcp_cloud_conn_id,
            impersonation_chain=self.impersonation_chain,
        )
        run = hook.get_transfer_run(
            run_id=self.run_id,
            transfer_config_id=self.transfer_config_id,
            project_id=self.project_id,
            retry=self.retry,
            timeout=self.request_timeout,
            metadata=self.metadata,
        )
        self.log.info("Status of %s run: %s", self.run_id, str(run.state))

        if run.state in (TransferState.FAILED, TransferState.CANCELLED):
            raise AirflowException(f"Transfer {self.run_id} did not succeed")
        return run.state in self.expected_statuses
Exemple #10
0
 def execute(self, context):
     hook = BiqQueryDataTransferServiceHook(
         gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain
     )
     self.log.info('Submitting manual transfer for %s', self.transfer_config_id)
     response = hook.start_manual_transfer_runs(
         transfer_config_id=self.transfer_config_id,
         requested_time_range=self.requested_time_range,
         requested_run_time=self.requested_run_time,
         project_id=self.project_id,
         retry=self.retry,
         timeout=self.timeout,
         metadata=self.metadata,
     )
     result = StartManualTransferRunsResponse.to_dict(response)
     run_id = get_object_id(result['runs'][0])
     self.xcom_push(context, key="run_id", value=run_id)
     self.log.info('Transfer run %s submitted successfully.', run_id)
     return result
Exemple #11
0
class BigQueryDataTransferHookTestCase(unittest.TestCase):
    def setUp(self) -> None:
        with mock.patch(
            "airflow.providers.google.cloud.hooks.bigquery_dts.CloudBaseHook.__init__",
            new=mock_base_gcp_hook_no_default_project_id,
        ):
            self.hook = BiqQueryDataTransferServiceHook()
            self.hook._get_credentials = mock.MagicMock(  # type: ignore
                return_value=CREDENTIALS
            )

    def test_version_information(self):
        expected_version = "airflow_v" + version
        self.assertEqual(expected_version, self.hook.client_info.client_library_version)

    def test_disable_auto_scheduling(self):
        expected = deepcopy(TRANSFER_CONFIG)
        expected.schedule_options.disable_auto_scheduling = True
        self.assertEqual(expected, self.hook._disable_auto_scheduling(TRANSFER_CONFIG))

    @mock.patch(
        "airflow.providers.google.cloud.hooks.bigquery_dts."
        "DataTransferServiceClient.create_transfer_config"
    )
    def test_create_transfer_config(self, service_mock):
        self.hook.create_transfer_config(
            transfer_config=TRANSFER_CONFIG, project_id=PROJECT_ID
        )
        parent = DataTransferServiceClient.project_path(PROJECT_ID)
        expected_config = deepcopy(TRANSFER_CONFIG)
        expected_config.schedule_options.disable_auto_scheduling = True
        service_mock.assert_called_once_with(
            parent=parent,
            transfer_config=expected_config,
            authorization_code=None,
            metadata=None,
            retry=None,
            timeout=None,
        )

    @mock.patch(
        "airflow.providers.google.cloud.hooks.bigquery_dts."
        "DataTransferServiceClient.delete_transfer_config"
    )
    def test_delete_transfer_config(self, service_mock):
        self.hook.delete_transfer_config(
            transfer_config_id=TRANSFER_CONFIG_ID, project_id=PROJECT_ID
        )

        name = DataTransferServiceClient.project_transfer_config_path(
            PROJECT_ID, TRANSFER_CONFIG_ID
        )
        service_mock.assert_called_once_with(
            name=name, metadata=None, retry=None, timeout=None
        )

    @mock.patch(
        "airflow.providers.google.cloud.hooks.bigquery_dts."
        "DataTransferServiceClient.start_manual_transfer_runs"
    )
    def test_start_manual_transfer_runs(self, service_mock):
        self.hook.start_manual_transfer_runs(
            transfer_config_id=TRANSFER_CONFIG_ID, project_id=PROJECT_ID
        )

        parent = DataTransferServiceClient.project_transfer_config_path(
            PROJECT_ID, TRANSFER_CONFIG_ID
        )
        service_mock.assert_called_once_with(
            parent=parent,
            requested_time_range=None,
            requested_run_time=None,
            metadata=None,
            retry=None,
            timeout=None,
        )