def setUp(self): # - PER-TEST FIXTURES - # put here any instruction you want to be run *BEFORE* *EVERY* test is # executed. self.dataset = gbq._Dataset(PROJECT_ID) self.table = gbq._Table(PROJECT_ID, DATASET_ID + "1")
def create_table_data_dataset_does_not_exist(self): dataset_id = DATASET_ID + "6" table_id = TABLE_ID + "1" table_with_new_dataset = gbq._Table(PROJECT_ID, dataset_id) df = make_mixed_dataframe_v2(10) table_with_new_dataset.create(table_id, gbq._generate_bq_schema(df)) self.assertTrue(self.dataset.exists(dataset_id), 'Expected dataset to exist') self.assertTrue(table_with_new_dataset.exists(table_id), 'Expected dataset to exist')
def setUp(self): # - PER-TEST FIXTURES - # put here any instruction you want to be run *BEFORE* *EVERY* test is # executed. self.dataset = gbq._Dataset(_get_project_id(), private_key=_get_private_key_path()) self.table = gbq._Table(_get_project_id(), DATASET_ID + "1", private_key=_get_private_key_path())
def clean_gbq_environment(private_key=None): dataset = gbq._Dataset(PROJECT_ID, private_key=private_key) for i in range(1, 10): if DATASET_ID + str(i) in dataset.datasets(): dataset_id = DATASET_ID + str(i) table = gbq._Table(PROJECT_ID, dataset_id, private_key=private_key) for j in range(1, 20): if TABLE_ID + str(j) in dataset.tables(dataset_id): table.delete(TABLE_ID + str(j)) dataset.delete(dataset_id)
def setUp(self): # - PER-TEST FIXTURES - # put here any instruction you want to be run *BEFORE* *EVERY* test is executed. self.dataset = gbq._Dataset(PROJECT_ID) self.table = gbq._Table(PROJECT_ID, DATASET_ID + "1")