def execute(self, context: 'Context'): hook = BiqQueryDataTransferServiceHook( gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain, location=self.location ) self.log.info('Submitting manual transfer for %s', self.transfer_config_id) response = hook.start_manual_transfer_runs( transfer_config_id=self.transfer_config_id, requested_time_range=self.requested_time_range, requested_run_time=self.requested_run_time, project_id=self.project_id, retry=self.retry, timeout=self.timeout, metadata=self.metadata, ) transfer_config = _get_transfer_config_details(response.runs[0].name) BigQueryDataTransferConfigLink.persist( context=context, task_instance=self, region=transfer_config["region"], config_id=transfer_config["config_id"], project_id=transfer_config["project_id"], ) result = StartManualTransferRunsResponse.to_dict(response) run_id = get_object_id(result['runs'][0]) self.xcom_push(context, key="run_id", value=run_id) self.log.info('Transfer run %s submitted successfully.', run_id) return result
def execute(self, context): hook = BiqQueryDataTransferServiceHook( gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain ) self.log.info('Submitting manual transfer for %s', self.transfer_config_id) response = hook.start_manual_transfer_runs( transfer_config_id=self.transfer_config_id, requested_time_range=self.requested_time_range, requested_run_time=self.requested_run_time, project_id=self.project_id, retry=self.retry, timeout=self.timeout, metadata=self.metadata, ) result = StartManualTransferRunsResponse.to_dict(response) run_id = get_object_id(result['runs'][0]) self.xcom_push(context, key="run_id", value=run_id) self.log.info('Transfer run %s submitted successfully.', run_id) return result
class BigQueryDataTransferHookTestCase(unittest.TestCase): def setUp(self) -> None: with mock.patch( "airflow.providers.google.cloud.hooks.bigquery_dts.CloudBaseHook.__init__", new=mock_base_gcp_hook_no_default_project_id, ): self.hook = BiqQueryDataTransferServiceHook() self.hook._get_credentials = mock.MagicMock( # type: ignore return_value=CREDENTIALS ) def test_version_information(self): expected_version = "airflow_v" + version self.assertEqual(expected_version, self.hook.client_info.client_library_version) def test_disable_auto_scheduling(self): expected = deepcopy(TRANSFER_CONFIG) expected.schedule_options.disable_auto_scheduling = True self.assertEqual(expected, self.hook._disable_auto_scheduling(TRANSFER_CONFIG)) @mock.patch( "airflow.providers.google.cloud.hooks.bigquery_dts." "DataTransferServiceClient.create_transfer_config" ) def test_create_transfer_config(self, service_mock): self.hook.create_transfer_config( transfer_config=TRANSFER_CONFIG, project_id=PROJECT_ID ) parent = DataTransferServiceClient.project_path(PROJECT_ID) expected_config = deepcopy(TRANSFER_CONFIG) expected_config.schedule_options.disable_auto_scheduling = True service_mock.assert_called_once_with( parent=parent, transfer_config=expected_config, authorization_code=None, metadata=None, retry=None, timeout=None, ) @mock.patch( "airflow.providers.google.cloud.hooks.bigquery_dts." "DataTransferServiceClient.delete_transfer_config" ) def test_delete_transfer_config(self, service_mock): self.hook.delete_transfer_config( transfer_config_id=TRANSFER_CONFIG_ID, project_id=PROJECT_ID ) name = DataTransferServiceClient.project_transfer_config_path( PROJECT_ID, TRANSFER_CONFIG_ID ) service_mock.assert_called_once_with( name=name, metadata=None, retry=None, timeout=None ) @mock.patch( "airflow.providers.google.cloud.hooks.bigquery_dts." "DataTransferServiceClient.start_manual_transfer_runs" ) def test_start_manual_transfer_runs(self, service_mock): self.hook.start_manual_transfer_runs( transfer_config_id=TRANSFER_CONFIG_ID, project_id=PROJECT_ID ) parent = DataTransferServiceClient.project_transfer_config_path( PROJECT_ID, TRANSFER_CONFIG_ID ) service_mock.assert_called_once_with( parent=parent, requested_time_range=None, requested_run_time=None, metadata=None, retry=None, timeout=None, )