def main(): service = auth.get_service() project_id = raw_input("Choose your project ID: ") query_string = raw_input("Enter your Bigquery SQL Query: ") for page in query.query_paging( service, query.query_polling(service, sync_query(service, project_id, query_string))): print(page)
def main(): project_id = raw_input("Choose your project ID: ") dataset_id = raw_input("Choose a dataset ID: ") table_id = raw_input("Choose a table name to copy: ") gcs_path = raw_input("Enter a Google Cloud Storage URI: ") bigquery = auth.get_service() resource = export_table(bigquery, project_id, dataset_id, table_id, gcs_path) poll_job.poll_job(bigquery, resource) print 'Done exporting!'
def main(): service = auth.get_service() project_id = raw_input("Choose your project ID: ") dataset_id = raw_input("Choose a dataset ID: ") table_id = raw_input("Choose a table ID : ") line = raw_input("Stream a line into your bigquery table:") while line: print( stream_row_to_bigquery(service, project_id, dataset_id, table_id, ast.literal_eval(line))) line = raw_input("Stream another line into your bigquery table \n" + "[hit enter to quit]:")
def main(): service = auth.get_service() project_id = raw_input("Choose your project ID: ") query_string = raw_input("Enter your Bigquery SQL Query: ") batch = raw_input("Run query as batch?: ") in set('True', 'true', 'y', 'Y', 'yes', 'Yes') if batch: query_job = async_batch_query(service, project_id, query_string) else: query_job = async_query(service, project_id, query_string) query_response = service.jobs().getQueryResults( projectId=project_id, jobId=query_job['jobReference']['jobId']).execute() for page in query.query_paging(service, query.polling(service, query_response)): print(page)
def setUp(self): self.service = auth.get_service()
def test_get_service(self): service = auth.get_service() self.assertIsNotNone(service)