def __create_qps_table(self): qps_table_schema = [ ('run_id', 'STRING', 'Test run id'), ('pod_name', 'STRING', 'GKE pod hosting this image'), ('recorded_at', 'STRING', 'Metrics recorded at time'), ('qps', 'INTEGER', 'Queries per second') ] desc = 'The table that cointains the qps recorded at various intervals' return bq_utils.create_table(self.bq, self.project_id, self.dataset_id, self.qps_table_id, qps_table_schema, desc)
def __create_summary_table(self): summary_table_schema = [ ('run_id', 'STRING', 'Test run id'), ('image_type', 'STRING', 'Client or Server?'), ('pod_name', 'STRING', 'GKE pod hosting this image'), ('event_date', 'STRING', 'The date of this event'), ('event_type', 'STRING', 'STARTED/SUCCESS/FAILURE'), ('details', 'STRING', 'Any other relevant details') ] desc = ('The table that contains START/SUCCESS/FAILURE events for ' ' the stress test clients and servers') return bq_utils.create_table(self.bq, self.project_id, self.dataset_id, self.summary_table_id, summary_table_schema, desc)
def upload_results_to_bq(resultset, bq_table, args, platform): """Upload test results to a BQ table. Args: resultset: dictionary generated by jobset.run bq_table: string name of table to create/upload results to in BQ args: args in run_tests.py, generated by argparse platform: string name of platform tests were run on """ bq = big_query_utils.create_big_query() big_query_utils.create_table(bq, _PROJECT_ID, _DATASET_ID, bq_table, _RESULTS_SCHEMA, _DESCRIPTION) for shortname, results in six.iteritems(resultset): for result in results: test_results = {} _get_build_metadata(test_results) test_results['compiler'] = args.compiler test_results['config'] = args.config test_results['cpu_estimated'] = result.cpu_estimated test_results['cpu_measured'] = result.cpu_measured test_results['elapsed_time'] = '%.2f' % result.elapsed_time test_results['iomgr_platform'] = args.iomgr_platform # args.language is a list, but will always have one element in the contexts # this function is used. test_results['language'] = args.language[0] test_results['platform'] = platform test_results['result'] = result.state test_results['test_name'] = shortname test_results['timestamp'] = time.strftime('%Y-%m-%d %H:%M:%S') row = big_query_utils.make_row(str(uuid.uuid4()), test_results) if not big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET_ID, bq_table, [row]): print('Error uploading result to bigquery.') sys.exit(1)