Esempio n. 1
0
    def _disable_auto_scheduling(
            config: Union[dict, TransferConfig]) -> TransferConfig:
        """
        In the case of Airflow, the customer needs to create a transfer config
        with the automatic scheduling disabled (UI, CLI or an Airflow operator) and
        then trigger a transfer run using a specialized Airflow operator that will
        call start_manual_transfer_runs.

        :param config: Data transfer configuration to create.
        :type config: Union[dict, google.cloud.bigquery_datatransfer_v1.types.TransferConfig]
        """
        config = TransferConfig.to_dict(config) if isinstance(
            config, TransferConfig) else config
        new_config = copy(config)
        schedule_options = new_config.get("schedule_options")
        if schedule_options:
            disable_auto_scheduling = schedule_options.get(
                "disable_auto_scheduling", None)
            if disable_auto_scheduling is None:
                schedule_options["disable_auto_scheduling"] = True
        else:
            new_config["schedule_options"] = {"disable_auto_scheduling": True}
        # HACK: TransferConfig.to_dict returns invalid representation
        # See: https://github.com/googleapis/python-bigquery-datatransfer/issues/90
        if isinstance(new_config.get('user_id'), str):
            new_config['user_id'] = int(new_config['user_id'])
        return TransferConfig(**new_config)
Esempio n. 2
0
    def _disable_auto_scheduling(
            config: Union[dict, TransferConfig]) -> TransferConfig:
        """
        In the case of Airflow, the customer needs to create a transfer config
        with the automatic scheduling disabled (UI, CLI or an Airflow operator) and
        then trigger a transfer run using a specialized Airflow operator that will
        call start_manual_transfer_runs.

        :param config: Data transfer configuration to create.
        """
        config = TransferConfig.to_dict(config) if isinstance(
            config, TransferConfig) else config
        new_config = copy(config)
        schedule_options = new_config.get("schedule_options")
        if schedule_options:
            disable_auto_scheduling = schedule_options.get(
                "disable_auto_scheduling", None)
            if disable_auto_scheduling is None:
                schedule_options["disable_auto_scheduling"] = True
        else:
            new_config["schedule_options"] = {"disable_auto_scheduling": True}

        return TransferConfig(**new_config)
Esempio n. 3
0
    "field_delimiter": ",",
    "max_bad_records": "0",
    "skip_leading_rows": "1",
    "data_path_template": "bucket",
    "destination_table_name_template": "name",
    "file_format": "CSV",
}

TRANSFER_CONFIG = ParseDict(
    {
        "destination_dataset_id": "dataset",
        "display_name": "GCS Test Config",
        "data_source_id": "google_cloud_storage",
        "params": PARAMS,
    },
    TransferConfig(),
)

TRANSFER_CONFIG_ID = "id1234"


class BigQueryDataTransferHookTestCase(unittest.TestCase):
    def setUp(self) -> None:
        with mock.patch(
            "airflow.providers.google.cloud.hooks.bigquery_dts.CloudBaseHook.__init__",
            new=mock_base_gcp_hook_no_default_project_id,
        ):
            self.hook = BiqQueryDataTransferServiceHook()
            self.hook._get_credentials = mock.MagicMock(  # type: ignore
                return_value=CREDENTIALS
            )
Esempio n. 4
0
CREDENTIALS = "test-creds"
PROJECT_ID = "id"

PARAMS = {
    "field_delimiter": ",",
    "max_bad_records": 0,
    "skip_leading_rows": 1,
    "data_path_template": "bucket",
    "destination_table_name_template": "name",
    "file_format": "CSV",
}

TRANSFER_CONFIG = TransferConfig(
    destination_dataset_id="dataset",
    display_name="GCS Test Config",
    data_source_id="google_cloud_storage",
    params=PARAMS,
)

TRANSFER_CONFIG_ID = "id1234"


class BigQueryDataTransferHookTestCase(unittest.TestCase):
    def setUp(self) -> None:
        with mock.patch(
                "airflow.providers.google.cloud.hooks.bigquery_dts.GoogleBaseHook.__init__",
                new=mock_base_gcp_hook_no_default_project_id,
        ):
            self.hook = BiqQueryDataTransferServiceHook()
            self.hook._get_credentials = mock.MagicMock(
                return_value=CREDENTIALS)  # type: ignore