def test_bq_list_datasets_1(): bq_token = googlecloud.gcloud_connect( GC_SERVICE_ACCOUNT_EMAIL, GC_SECRET_KEY_PATH, "https://www.googleapis.com/auth/bigquery" ) bqservice = build("bigquery", "v2", http=bq_token) project_id = BQ_PROJECT googlecloud.list_datasets(bqservice, project_id) assert True
def test_bq_query_1(): if run_bq_test: bq_token = googlecloud.gcloud_connect(GC_SERVICE_ACCOUNT_EMAIL, GC_SECRET_KEY_PATH, 'https://www.googleapis.com/auth/bigquery') bqservice = build('bigquery', 'v2', http=bq_token) project_id = BQ_PROJECT dataset_id = BQ_DATASET_PROD target_table = 'test_table' bq_count = bq_query(bqservice,project_id,dataset_id,target_table) assert bq_count > 0
def test_bq_export_table_1(): bq_token = googlecloud.gcloud_connect(GC_SERVICE_ACCOUNT_EMAIL, GC_SECRET_KEY_PATH, 'https://www.googleapis.com/auth/bigquery') bqservice = build('bigquery', 'v2', http=bq_token) project_id = BQ_PROJECT dataset_id = BQ_DATASET_PROD source_table = 'test_table' destination_uris = ["gs://" + GS_BUCKET + "/test_table_export.csv"] googlecloud.export_table(bqservice, project_id, dataset_id, source_table, destination_uris, compress = False, delimiter = ',', print_header = True) assert True
def test_bq_load_table_from_json_1(): bq_token = googlecloud.gcloud_connect(GC_SERVICE_ACCOUNT_EMAIL, GC_SECRET_KEY_PATH, 'https://www.googleapis.com/auth/bigquery') bqservice = build('bigquery', 'v2', http=bq_token) project_id = BQ_PROJECT dataset_id = BQ_DATASET_PROD target_table = 'test_table' source_file = BQ_TEST_DATA_PATH + 'test_table_data.json' field_list = [{"type":"STRING","name":"User"},{"type":"INTEGER","name":"Score"}] googlecloud.load_table_from_json(bqservice, project_id, dataset_id, target_table, source_file, field_list=field_list, overwrite=False) assert True
def test_gc_upload_2(): if run_bq_test: gs_token = googlecloud.gcloud_connect( GC_SERVICE_ACCOUNT_EMAIL, GC_SECRET_KEY_PATH, "https://www.googleapis.com/auth/devstorage.read_write" ) gsservice = build("storage", "v1", http=gs_token) project_id = BQ_PROJECT bucket = GS_BUCKET source_file = LOCAL_TEST_DATA_PATH + "test_table_data.json" dest_file = GS_PATH + "test_table_data.json" googlecloud.cloudstorage_upload(gsservice, project_id, bucket, source_file, dest_file, show_status_messages=False)
def test_bq_query_1(): if run_bq_test: bq_token = googlecloud.gcloud_connect( GC_SERVICE_ACCOUNT_EMAIL, GC_SECRET_KEY_PATH, "https://www.googleapis.com/auth/bigquery" ) bqservice = build("bigquery", "v2", http=bq_token) project_id = BQ_PROJECT dataset_id = BQ_DATASET_PROD target_table = "test_table" bq_count = bq_query(bqservice, project_id, dataset_id, target_table) assert bq_count > 0
def test_gc_upload_2(): if run_bq_test: gs_token = googlecloud.gcloud_connect(GC_SERVICE_ACCOUNT_EMAIL, GC_SECRET_KEY_PATH, 'https://www.googleapis.com/auth/devstorage.read_write') gsservice = build('storage', 'v1', http=gs_token) project_id = BQ_PROJECT bucket = GS_BUCKET source_file = LOCAL_TEST_DATA_PATH + 'test_table_data.json' dest_file = GS_PATH + 'test_table_data.json' googlecloud.cloudstorage_upload(gsservice, project_id, bucket, source_file, dest_file, show_status_messages=False)
def test_bq_load_table_from_json_1(): bq_token = googlecloud.gcloud_connect( GC_SERVICE_ACCOUNT_EMAIL, GC_SECRET_KEY_PATH, "https://www.googleapis.com/auth/bigquery" ) bqservice = build("bigquery", "v2", http=bq_token) project_id = BQ_PROJECT dataset_id = BQ_DATASET_PROD target_table = "test_table" source_file = BQ_TEST_DATA_PATH + "test_table_data.json" field_list = [{"type": "STRING", "name": "User"}, {"type": "INTEGER", "name": "Score"}] googlecloud.load_table_from_json( bqservice, project_id, dataset_id, target_table, source_file, field_list=field_list, overwrite=False ) assert True
def test_bq_export_table_1(): bq_token = googlecloud.gcloud_connect( GC_SERVICE_ACCOUNT_EMAIL, GC_SECRET_KEY_PATH, "https://www.googleapis.com/auth/bigquery" ) bqservice = build("bigquery", "v2", http=bq_token) project_id = BQ_PROJECT dataset_id = BQ_DATASET_PROD source_table = "test_table" destination_uris = ["gs://" + GS_BUCKET + "/test_table_export.csv"] googlecloud.export_table( bqservice, project_id, dataset_id, source_table, destination_uris, compress=False, delimiter=",", print_header=True, ) assert True
def test_bq_list_datasets_1(): bq_token = googlecloud.gcloud_connect(GC_SERVICE_ACCOUNT_EMAIL, GC_SECRET_KEY_PATH, 'https://www.googleapis.com/auth/bigquery') bqservice = build('bigquery', 'v2', http=bq_token) project_id = BQ_PROJECT googlecloud.list_datasets(bqservice, project_id) assert True