コード例 #1
0
    def test_execute(self, mock_hook):
        operator = BigQueryCreateEmptyDatasetOperator(
            task_id=TASK_ID,
            dataset_id=TEST_DATASET,
            project_id=TEST_GCP_PROJECT_ID,
            location=TEST_DATASET_LOCATION)

        operator.execute(None)
        mock_hook.return_value \
            .create_empty_dataset \
            .assert_called_once_with(
                dataset_id=TEST_DATASET,
                project_id=TEST_GCP_PROJECT_ID,
                location=TEST_DATASET_LOCATION,
                dataset_reference={}
            )
コード例 #2
0
ファイル: example_bigquery.py プロジェクト: momer/airflow
        source_project_dataset_tables="{}.selected_data_from_external_table".
        format(DATASET_NAME),
        destination_project_dataset_table=
        "{}.copy_of_selected_data_from_external_table".format(DATASET_NAME),
    )

    bigquery_to_gcs = BigQueryToGCSOperator(
        task_id="bigquery_to_gcs",
        source_project_dataset_table="{}.selected_data_from_external_table".
        format(DATASET_NAME),
        destination_cloud_storage_uris=[
            "gs://{}/export-bigquery.csv".format(DATA_EXPORT_BUCKET_NAME)
        ],
    )

    create_dataset = BigQueryCreateEmptyDatasetOperator(
        task_id="create-dataset", dataset_id=DATASET_NAME)

    create_dataset_with_location = BigQueryCreateEmptyDatasetOperator(
        task_id="create_dataset_with_location",
        dataset_id=LOCATION_DATASET_NAME,
        location=BQ_LOCATION)

    create_table = BigQueryCreateEmptyTableOperator(
        task_id="create_table",
        dataset_id=DATASET_NAME,
        table_id="test_table",
        schema_fields=[
            {
                "name": "emp_name",
                "type": "STRING",
                "mode": "REQUIRED"