예제 #1
0
    def execute(self, context: 'Context'):
        hook = BiqQueryDataTransferServiceHook(
            gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain, location=self.location
        )
        self.log.info("Creating DTS transfer config")
        response = hook.create_transfer_config(
            project_id=self.project_id,
            transfer_config=self.transfer_config,
            authorization_code=self.authorization_code,
            retry=self.retry,
            timeout=self.timeout,
            metadata=self.metadata,
        )

        transfer_config = _get_transfer_config_details(response.name)
        BigQueryDataTransferConfigLink.persist(
            context=context,
            task_instance=self,
            region=transfer_config["region"],
            config_id=transfer_config["config_id"],
            project_id=transfer_config["project_id"],
        )

        result = TransferConfig.to_dict(response)
        self.log.info("Created DTS transfer config %s", get_object_id(result))
        self.xcom_push(context, key="transfer_config_id", value=get_object_id(result))
        return result
예제 #2
0
 def execute(self, context):
     hook = BiqQueryDataTransferServiceHook(gcp_conn_id=self.gcp_conn_id)
     self.log.info("Creating DTS transfer config")
     response = hook.create_transfer_config(
         project_id=self.project_id,
         transfer_config=self.transfer_config,
         authorization_code=self.authorization_code,
         retry=self.retry,
         timeout=self.timeout,
         metadata=self.metadata,
     )
     result = MessageToDict(response)
     self.log.info("Created DTS transfer config %s", get_object_id(result))
     self.xcom_push(context,
                    key="transfer_config_id",
                    value=get_object_id(result))
     return result
예제 #3
0
class BigQueryDataTransferHookTestCase(unittest.TestCase):
    def setUp(self) -> None:
        with mock.patch(
            "airflow.providers.google.cloud.hooks.bigquery_dts.CloudBaseHook.__init__",
            new=mock_base_gcp_hook_no_default_project_id,
        ):
            self.hook = BiqQueryDataTransferServiceHook()
            self.hook._get_credentials = mock.MagicMock(  # type: ignore
                return_value=CREDENTIALS
            )

    def test_version_information(self):
        expected_version = "airflow_v" + version
        self.assertEqual(expected_version, self.hook.client_info.client_library_version)

    def test_disable_auto_scheduling(self):
        expected = deepcopy(TRANSFER_CONFIG)
        expected.schedule_options.disable_auto_scheduling = True
        self.assertEqual(expected, self.hook._disable_auto_scheduling(TRANSFER_CONFIG))

    @mock.patch(
        "airflow.providers.google.cloud.hooks.bigquery_dts."
        "DataTransferServiceClient.create_transfer_config"
    )
    def test_create_transfer_config(self, service_mock):
        self.hook.create_transfer_config(
            transfer_config=TRANSFER_CONFIG, project_id=PROJECT_ID
        )
        parent = DataTransferServiceClient.project_path(PROJECT_ID)
        expected_config = deepcopy(TRANSFER_CONFIG)
        expected_config.schedule_options.disable_auto_scheduling = True
        service_mock.assert_called_once_with(
            parent=parent,
            transfer_config=expected_config,
            authorization_code=None,
            metadata=None,
            retry=None,
            timeout=None,
        )

    @mock.patch(
        "airflow.providers.google.cloud.hooks.bigquery_dts."
        "DataTransferServiceClient.delete_transfer_config"
    )
    def test_delete_transfer_config(self, service_mock):
        self.hook.delete_transfer_config(
            transfer_config_id=TRANSFER_CONFIG_ID, project_id=PROJECT_ID
        )

        name = DataTransferServiceClient.project_transfer_config_path(
            PROJECT_ID, TRANSFER_CONFIG_ID
        )
        service_mock.assert_called_once_with(
            name=name, metadata=None, retry=None, timeout=None
        )

    @mock.patch(
        "airflow.providers.google.cloud.hooks.bigquery_dts."
        "DataTransferServiceClient.start_manual_transfer_runs"
    )
    def test_start_manual_transfer_runs(self, service_mock):
        self.hook.start_manual_transfer_runs(
            transfer_config_id=TRANSFER_CONFIG_ID, project_id=PROJECT_ID
        )

        parent = DataTransferServiceClient.project_transfer_config_path(
            PROJECT_ID, TRANSFER_CONFIG_ID
        )
        service_mock.assert_called_once_with(
            parent=parent,
            requested_time_range=None,
            requested_run_time=None,
            metadata=None,
            retry=None,
            timeout=None,
        )