def test_table_exists_false(mock_bigquery_client): connector = gbq._Table("my-project", "my_dataset") connector.client = mock_bigquery_client mock_bigquery_client.get_table.side_effect = google.api_core.exceptions.NotFound( "nope" ) assert not connector.exists("not_exists")
def test_table_exists_translates_exception(mock_bigquery_client): connector = gbq._Table("my-project", "my_dataset") connector.client = mock_bigquery_client mock_bigquery_client.get_table.side_effect = ( google.api_core.exceptions.InternalServerError("something went wrong") ) with pytest.raises(gbq.GenericGBQException): connector.exists("not_gonna_work")
def test_table_delete_notfound_ok(mock_bigquery_client): connector = gbq._Table("my-project", "my_dataset") connector.client = mock_bigquery_client mock_bigquery_client.delete_table.side_effect = google.api_core.exceptions.NotFound( "nope" ) connector.delete("not_exists") mock_bigquery_client.delete_table.assert_called_once()
def test_table_create_already_exists(mock_bigquery_client): connector = gbq._Table("my-project", "my_dataset") connector.client = mock_bigquery_client mock_bigquery_client.get_table.return_value = object() with pytest.raises(gbq.TableCreationError): connector.create( "already_exists", {"fields": [{"name": "f", "type": "STRING"}]} )
def setup(self, project, credentials, random_dataset_id): # - PER-TEST FIXTURES - # put here any instruction you want to be run *BEFORE* *EVERY* test is # executed. self.credentials = credentials self.gbq_connector = gbq.GbqConnector(project, credentials=credentials) self.bqclient = self.gbq_connector.client self.table = gbq._Table(project, random_dataset_id, credentials=credentials) self.destination_table = "{}.{}".format(random_dataset_id, TABLE_ID)
def test_create_table_data_dataset_does_not_exist(project, credentials, gbq_dataset, random_dataset_id): table_id = "test_create_table_data_dataset_does_not_exist" table_with_new_dataset = gbq._Table(project, random_dataset_id, credentials=credentials) df = make_mixed_dataframe_v2(10) table_with_new_dataset.create(table_id, gbq._generate_bq_schema(df)) assert gbq_dataset.exists(random_dataset_id) assert table_with_new_dataset.exists(table_id)
def test_table_create_translates_exception(mock_bigquery_client): connector = gbq._Table("my-project", "my_dataset") connector.client = mock_bigquery_client mock_bigquery_client.get_table.side_effect = google.api_core.exceptions.NotFound( "nope" ) mock_bigquery_client.create_table.side_effect = ( google.api_core.exceptions.InternalServerError("something went wrong") ) with pytest.raises(gbq.GenericGBQException): connector.create( "not_gonna_work", {"fields": [{"name": "f", "type": "STRING"}]} )
def test_table_exists_true(mock_bigquery_client): connector = gbq._Table("my-project", "my_dataset") connector.client = mock_bigquery_client mock_bigquery_client.get_table.return_value = object() assert connector.exists("yes_exists")
def gbq_table(project, credentials, random_dataset_id): from pandas_gbq import gbq return gbq._Table(project, random_dataset_id, credentials=credentials)