def test_execute(self, mock_hook): operator = BigQueryCreateEmptyTableOperator(task_id=TASK_ID, dataset_id=TEST_DATASET, project_id=TEST_GCP_PROJECT_ID, table_id=TEST_TABLE_ID) operator.execute(None) mock_hook.return_value \ .get_conn.return_value \ .cursor.return_value \ .create_empty_table \ .assert_called_once_with( dataset_id=TEST_DATASET, project_id=TEST_GCP_PROJECT_ID, table_id=TEST_TABLE_ID, schema_fields=None, time_partitioning={}, labels=None, encryption_configuration=None )
def test_create_clustered_empty_table(self, mock_hook): schema_fields = [{ "name": "emp_name", "type": "STRING", "mode": "REQUIRED" }, { "name": "date_hired", "type": "DATE", "mode": "REQUIRED" }, { "name": "date_birth", "type": "DATE", "mode": "NULLABLE" }] time_partitioning = {"type": "DAY", "field": "date_hired"} cluster_fields = ["date_birth"] operator = BigQueryCreateEmptyTableOperator( task_id=TASK_ID, dataset_id=TEST_DATASET, project_id=TEST_GCP_PROJECT_ID, table_id=TEST_TABLE_ID, schema_fields=schema_fields, time_partitioning=time_partitioning, cluster_fields=cluster_fields) operator.execute(None) mock_hook.return_value \ .create_empty_table \ .assert_called_once_with( dataset_id=TEST_DATASET, project_id=TEST_GCP_PROJECT_ID, table_id=TEST_TABLE_ID, schema_fields=schema_fields, time_partitioning=time_partitioning, cluster_fields=cluster_fields, labels=None, view=None, encryption_configuration=None )
task_id="create-dataset", dataset_id=DATASET_NAME) create_dataset_with_location = BigQueryCreateEmptyDatasetOperator( task_id="create_dataset_with_location", dataset_id=LOCATION_DATASET_NAME, location=BQ_LOCATION) create_table = BigQueryCreateEmptyTableOperator( task_id="create_table", dataset_id=DATASET_NAME, table_id="test_table", schema_fields=[ { "name": "emp_name", "type": "STRING", "mode": "REQUIRED" }, { "name": "salary", "type": "INTEGER", "mode": "NULLABLE" }, ], ) create_table_with_location = BigQueryCreateEmptyTableOperator( task_id="create_table_with_location", dataset_id=LOCATION_DATASET_NAME, table_id="test_table", schema_fields=[ {
destination_cloud_storage_uris=["gs://{}/export-bigquery.csv".format(DATA_EXPORT_BUCKET_NAME)], ) create_dataset = BigQueryCreateEmptyDatasetOperator(task_id="create-dataset", dataset_id=DATASET_NAME) create_dataset_with_location = BigQueryCreateEmptyDatasetOperator( task_id="create_dataset_with_location", dataset_id=LOCATION_DATASET_NAME, location=BQ_LOCATION ) create_table = BigQueryCreateEmptyTableOperator( task_id="create-table", dataset_id=DATASET_NAME, table_id="test_table", schema_fields=[ {"name": "emp_name", "type": "STRING", "mode": "REQUIRED"}, {"name": "salary", "type": "INTEGER", "mode": "NULLABLE"}, ], ) create_table_with_location = BigQueryCreateEmptyTableOperator( task_id="create_table_with_location", dataset_id=LOCATION_DATASET_NAME, table_id="test_table", schema_fields=[ {"name": "emp_name", "type": "STRING", "mode": "REQUIRED"}, {"name": "salary", "type": "INTEGER", "mode": "NULLABLE"}, ], )