コード例 #1
0
    def test_execute(self, mock_hook):
        dataset_resource = {"friendlyName": 'Test DS'}
        operator = BigQueryUpdateDatasetOperator(
            dataset_resource=dataset_resource,
            task_id=TASK_ID,
            dataset_id=TEST_DATASET,
            project_id=TEST_GCP_PROJECT_ID,
        )

        operator.execute(None)
        mock_hook.return_value.update_dataset.assert_called_once_with(
            dataset_resource=dataset_resource,
            dataset_id=TEST_DATASET,
            project_id=TEST_GCP_PROJECT_ID,
            fields=list(dataset_resource.keys()),
        )
コード例 #2
0
ファイル: example_bigquery.py プロジェクト: stu12345/airflow
        task_id="get_dataset_result",
        bash_command=
        "echo \"{{ task_instance.xcom_pull('get-dataset')['id'] }}\"",
    )

    patch_dataset = BigQueryPatchDatasetOperator(
        task_id="patch_dataset",
        dataset_id=DATASET_NAME,
        dataset_resource={
            "friendlyName": "Patched Dataset",
            "description": "Patched dataset"
        },
    )

    update_dataset = BigQueryUpdateDatasetOperator(
        task_id="update_dataset",
        dataset_id=DATASET_NAME,
        dataset_resource={"description": "Updated dataset"})

    delete_dataset = BigQueryDeleteDatasetOperator(task_id="delete_dataset",
                                                   dataset_id=DATASET_NAME,
                                                   delete_contents=True)

    delete_dataset_with_location = BigQueryDeleteDatasetOperator(
        task_id="delete_dataset_with_location",
        dataset_id=LOCATION_DATASET_NAME,
        delete_contents=True)

    create_dataset >> execute_query_save >> delete_dataset
    create_dataset >> get_empty_dataset_tables >> create_table >> get_dataset_tables >> delete_dataset
    create_dataset >> get_dataset >> delete_dataset
    create_dataset >> patch_dataset >> update_dataset >> delete_dataset