def setUp(self): self.bq_client = bigquery.BigQueryClient(CREDENTIALS) self.gcs_client = gcs.GCSClient(CREDENTIALS) # Setup GCS input data try: self.gcs_client.client.buckets().insert(project=PROJECT_ID, body={ 'name': BUCKET_NAME, 'location': EU_LOCATION }).execute() except googleapiclient.errors.HttpError as ex: # todo verify that existing dataset is not US if ex.resp.status != 409: # bucket already exists raise self.gcs_client.remove(bucket_url(''), recursive=True) self.gcs_client.mkdir(bucket_url('')) text = '\n'.join( map(json.dumps, [{ 'field1': 'hi', 'field2': 1 }, { 'field1': 'bye', 'field2': 2 }])) self.gcs_file = bucket_url(self.id()) self.gcs_client.put_string(text, self.gcs_file) # Setup BigQuery datasets self.table = bigquery.BQTable(project_id=PROJECT_ID, dataset_id=DATASET_ID, table_id=self.id().split('.')[-1], location=None) self.table_eu = bigquery.BQTable(project_id=PROJECT_ID, dataset_id=EU_DATASET_ID, table_id=self.id().split('.')[-1] + '_eu', location=EU_LOCATION) self.addCleanup(self.gcs_client.remove, bucket_url(''), recursive=True) self.addCleanup(self.bq_client.delete_dataset, self.table.dataset) self.addCleanup(self.bq_client.delete_dataset, self.table_eu.dataset) self.bq_client.delete_dataset(self.table.dataset) self.bq_client.delete_dataset(self.table_eu.dataset) self.bq_client.make_dataset(self.table.dataset, body={}) self.bq_client.make_dataset(self.table_eu.dataset, body={})
def setUp(self): super(BigqueryTest, self).setUp() self.bq_client = bigquery.BigqueryClient(gcs_test.CREDENTIALS) self.table = bigquery.BQTable(project_id=PROJECT_ID, dataset_id=DATASET_ID, table_id=self.id().split('.')[-1]) self.addCleanup(self.bq_client.delete_table, self.table)
def source_table(self): return bigquery.BQTable(PROJECT_ID, DATASET_ID, self.table)