def test_create_clustered_empty_table(self, mock_hook): schema_fields = [ {"name": "emp_name", "type": "STRING", "mode": "REQUIRED"}, {"name": "date_hired", "type": "DATE", "mode": "REQUIRED"}, {"name": "date_birth", "type": "DATE", "mode": "NULLABLE"}, ] time_partitioning = {"type": "DAY", "field": "date_hired"} cluster_fields = ["date_birth"] operator = BigQueryCreateEmptyTableOperator( task_id=TASK_ID, dataset_id=TEST_DATASET, project_id=TEST_GCP_PROJECT_ID, table_id=TEST_TABLE_ID, schema_fields=schema_fields, time_partitioning=time_partitioning, cluster_fields=cluster_fields, ) operator.execute(None) mock_hook.return_value.create_empty_table.assert_called_once_with( dataset_id=TEST_DATASET, project_id=TEST_GCP_PROJECT_ID, table_id=TEST_TABLE_ID, schema_fields=schema_fields, time_partitioning=time_partitioning, cluster_fields=cluster_fields, labels=None, view=None, encryption_configuration=None, table_resource=None, exists_ok=False, )
def bq_create(tablename, bqdataset, **kwargs): table_schema = tablename.split('.')[0] table_name = tablename.split('.')[1] ti = kwargs['ti'] target_dataset = bqdataset schema = json.loads( str(ti.xcom_pull(task_ids='getschema_{}'.format(tablename)))) create = BigQueryCreateEmptyTableOperator( task_id='create_bq_{}'.format(tablename), project_id=gcp_project, dataset_id=target_dataset, table_id=table_name, schema_fields=schema, bigquery_conn_id=bq_conn) print(schema) create.execute(context=kwargs)
def test_execute(self, mock_hook): operator = BigQueryCreateEmptyTableOperator( task_id=TASK_ID, dataset_id=TEST_DATASET, project_id=TEST_GCP_PROJECT_ID, table_id=TEST_TABLE_ID ) operator.execute(None) mock_hook.return_value.create_empty_table.assert_called_once_with( dataset_id=TEST_DATASET, project_id=TEST_GCP_PROJECT_ID, table_id=TEST_TABLE_ID, schema_fields=None, time_partitioning={}, cluster_fields=None, labels=None, view=None, encryption_configuration=None, table_resource=None, exists_ok=False, )