def _upload_results_to_bq(rows): """Upload test results to a BQ table. Args: rows: A list of dictionaries containing data for each row to insert """ bq = big_query_utils.create_big_query() big_query_utils.create_partitioned_table( bq, _PROJECT_ID, _DATASET_ID, _TABLE_ID, _RESULTS_SCHEMA, _DESCRIPTION, partition_type=_PARTITION_TYPE, expiration_ms=_EXPIRATION_MS) max_retries = 3 for attempt in range(max_retries): if big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET_ID, _TABLE_ID, rows): break else: if attempt < max_retries - 1: print('Error uploading result to bigquery, will retry.') else: print( 'Error uploading result to bigquery, all attempts failed.') sys.exit(1)
def _upload_results_to_bq(rows): """Upload test results to a BQ table. Args: rows: A list of dictionaries containing data for each row to insert """ bq = big_query_utils.create_big_query() big_query_utils.create_partitioned_table(bq, _PROJECT_ID, _DATASET_ID, _TABLE_ID, _RESULTS_SCHEMA, _DESCRIPTION, partition_type=_PARTITION_TYPE, expiration_ms=_EXPIRATION_MS) max_retries = 3 for attempt in range(max_retries): if big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET_ID, _TABLE_ID, rows): break else: if attempt < max_retries - 1: print('Error uploading result to bigquery, will retry.') else: print( 'Error uploading result to bigquery, all attempts failed.') sys.exit(1)
def upload_interop_results_to_bq(resultset, bq_table, args): """Upload interop test results to a BQ table. Args: resultset: dictionary generated by jobset.run bq_table: string name of table to create/upload results to in BQ args: args in run_interop_tests.py, generated by argparse """ bq = big_query_utils.create_big_query() big_query_utils.create_partitioned_table( bq, _PROJECT_ID, _DATASET_ID, bq_table, _INTEROP_RESULTS_SCHEMA, _DESCRIPTION, partition_type=_PARTITION_TYPE, expiration_ms=_EXPIRATION_MS) bq_rows = [] for shortname, results in six.iteritems(resultset): for result in results: test_results = {} _get_build_metadata(test_results) test_results['elapsed_time'] = '%.2f' % result.elapsed_time test_results['result'] = result.state test_results['test_name'] = shortname test_results['suite'] = shortname.split(':')[0] test_results['client'] = shortname.split(':')[1] test_results['server'] = shortname.split(':')[2] test_results['test_case'] = shortname.split(':')[3] test_results['timestamp'] = time.strftime('%Y-%m-%d %H:%M:%S') row = big_query_utils.make_row(str(uuid.uuid4()), test_results) bq_rows.append(row) _insert_rows_with_retries(bq, bq_table, bq_rows)
def upload_interop_results_to_bq(resultset, bq_table, args): """Upload interop test results to a BQ table. Args: resultset: dictionary generated by jobset.run bq_table: string name of table to create/upload results to in BQ args: args in run_interop_tests.py, generated by argparse """ bq = big_query_utils.create_big_query() big_query_utils.create_partitioned_table(bq, _PROJECT_ID, _DATASET_ID, bq_table, _INTEROP_RESULTS_SCHEMA, _DESCRIPTION, partition_type=_PARTITION_TYPE, expiration_ms=_EXPIRATION_MS) bq_rows = [] for shortname, results in six.iteritems(resultset): for result in results: test_results = {} _get_build_metadata(test_results) test_results['elapsed_time'] = '%.2f' % result.elapsed_time test_results['result'] = result.state test_results['test_name'] = shortname test_results['suite'] = shortname.split(':')[0] test_results['client'] = shortname.split(':')[1] test_results['server'] = shortname.split(':')[2] test_results['test_case'] = shortname.split(':')[3] test_results['timestamp'] = time.strftime('%Y-%m-%d %H:%M:%S') row = big_query_utils.make_row(str(uuid.uuid4()), test_results) bq_rows.append(row) _insert_rows_with_retries(bq, bq_table, bq_rows)
def upload_results_to_bq(resultset, bq_table, extra_fields): """Upload test results to a BQ table. Args: resultset: dictionary generated by jobset.run bq_table: string name of table to create/upload results to in BQ extra_fields: dict with extra values that will be uploaded along with the results """ bq = big_query_utils.create_big_query() big_query_utils.create_partitioned_table(bq, _PROJECT_ID, _DATASET_ID, bq_table, _RESULTS_SCHEMA, _DESCRIPTION, partition_type=_PARTITION_TYPE, expiration_ms=_EXPIRATION_MS) bq_rows = [] for shortname, results in six.iteritems(resultset): for result in results: test_results = {} _get_build_metadata(test_results) test_results['cpu_estimated'] = result.cpu_estimated test_results['cpu_measured'] = result.cpu_measured test_results['elapsed_time'] = '%.2f' % result.elapsed_time test_results['result'] = result.state test_results['return_code'] = result.returncode test_results['test_name'] = shortname test_results['timestamp'] = time.strftime('%Y-%m-%d %H:%M:%S') for field_name, field_value in six.iteritems(extra_fields): test_results[field_name] = field_value row = big_query_utils.make_row(str(uuid.uuid4()), test_results) bq_rows.append(row) _insert_rows_with_retries(bq, bq_table, bq_rows)
def upload_results_to_bq(resultset, bq_table, args, platform): """Upload test results to a BQ table. Args: resultset: dictionary generated by jobset.run bq_table: string name of table to create/upload results to in BQ args: args in run_tests.py, generated by argparse platform: string name of platform tests were run on """ bq = big_query_utils.create_big_query() big_query_utils.create_partitioned_table(bq, _PROJECT_ID, _DATASET_ID, bq_table, _RESULTS_SCHEMA, _DESCRIPTION, partition_type=_PARTITION_TYPE, expiration_ms= _EXPIRATION_MS) for shortname, results in six.iteritems(resultset): for result in results: test_results = {} _get_build_metadata(test_results) test_results['compiler'] = args.compiler test_results['config'] = args.config test_results['cpu_estimated'] = result.cpu_estimated test_results['cpu_measured'] = result.cpu_measured test_results['elapsed_time'] = '%.2f' % result.elapsed_time test_results['iomgr_platform'] = args.iomgr_platform # args.language is a list, but will always have one element in the contexts # this function is used. test_results['language'] = args.language[0] test_results['platform'] = platform test_results['result'] = result.state test_results['test_name'] = shortname test_results['timestamp'] = time.strftime('%Y-%m-%d %H:%M:%S') row = big_query_utils.make_row(str(uuid.uuid4()), test_results) if not big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET_ID, bq_table, [row]): print('Error uploading result to bigquery.') sys.exit(1)
def upload_results_to_bq(resultset, bq_table, args, platform): """Upload test results to a BQ table. Args: resultset: dictionary generated by jobset.run bq_table: string name of table to create/upload results to in BQ args: args in run_tests.py, generated by argparse platform: string name of platform tests were run on """ bq = big_query_utils.create_big_query() big_query_utils.create_partitioned_table(bq, _PROJECT_ID, _DATASET_ID, bq_table, _RESULTS_SCHEMA, _DESCRIPTION, partition_type=_PARTITION_TYPE, expiration_ms=_EXPIRATION_MS) for shortname, results in six.iteritems(resultset): for result in results: test_results = {} _get_build_metadata(test_results) test_results['compiler'] = args.compiler test_results['config'] = args.config test_results['cpu_estimated'] = result.cpu_estimated test_results['cpu_measured'] = result.cpu_measured test_results['elapsed_time'] = '%.2f' % result.elapsed_time test_results['iomgr_platform'] = args.iomgr_platform # args.language is a list, but will always have one element in the contexts # this function is used. test_results['language'] = args.language[0] test_results['platform'] = platform test_results['result'] = result.state test_results['return_code'] = result.returncode test_results['test_name'] = shortname test_results['timestamp'] = time.strftime('%Y-%m-%d %H:%M:%S') row = big_query_utils.make_row(str(uuid.uuid4()), test_results) # TODO(jtattermusch): rows are inserted one by one, very inefficient max_retries = 3 for attempt in range(max_retries): if big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET_ID, bq_table, [row]): break else: if attempt < max_retries - 1: print( 'Error uploading result to bigquery, will retry.') else: print( 'Error uploading result to bigquery, all attempts failed.' ) sys.exit(1)
def upload_interop_results_to_bq(resultset, bq_table, args): """Upload interop test results to a BQ table. Args: resultset: dictionary generated by jobset.run bq_table: string name of table to create/upload results to in BQ args: args in run_interop_tests.py, generated by argparse """ bq = big_query_utils.create_big_query() big_query_utils.create_partitioned_table(bq, _PROJECT_ID, _DATASET_ID, bq_table, _INTEROP_RESULTS_SCHEMA, _DESCRIPTION, partition_type=_PARTITION_TYPE, expiration_ms=_EXPIRATION_MS) for shortname, results in six.iteritems(resultset): bq_rows = [] for result in results: test_results = {} _get_build_metadata(test_results) test_results['elapsed_time'] = '%.2f' % result.elapsed_time test_results['result'] = result.state test_results['test_name'] = shortname test_results['suite'] = shortname.split(':')[0] test_results['client'] = shortname.split(':')[1] test_results['server'] = shortname.split(':')[2] test_results['test_case'] = shortname.split(':')[3] test_results['timestamp'] = time.strftime('%Y-%m-%d %H:%M:%S') row = big_query_utils.make_row(str(uuid.uuid4()), test_results) bq_rows.append(row) # BigQuery sometimes fails with large uploads, so batch 1,000 rows at a time. for i in range((len(bq_rows) / 1000) + 1): max_retries = 3 for attempt in range(max_retries): if big_query_utils.insert_rows( bq, _PROJECT_ID, _DATASET_ID, bq_table, bq_rows[i * 1000:(i + 1) * 1000]): break else: if attempt < max_retries - 1: print( 'Error uploading result to bigquery, will retry.') else: print( 'Error uploading result to bigquery, all attempts failed.' ) sys.exit(1)
def upload_interop_results_to_bq(resultset, bq_table, args): """Upload interop test results to a BQ table. Args: resultset: dictionary generated by jobset.run bq_table: string name of table to create/upload results to in BQ args: args in run_interop_tests.py, generated by argparse """ bq = big_query_utils.create_big_query() big_query_utils.create_partitioned_table( bq, _PROJECT_ID, _DATASET_ID, bq_table, _INTEROP_RESULTS_SCHEMA, _DESCRIPTION, partition_type=_PARTITION_TYPE, expiration_ms=_EXPIRATION_MS) for shortname, results in six.iteritems(resultset): for result in results: test_results = {} _get_build_metadata(test_results) test_results['elapsed_time'] = '%.2f' % result.elapsed_time test_results['result'] = result.state test_results['test_name'] = shortname test_results['suite'] = shortname.split(':')[0] test_results['client'] = shortname.split(':')[1] test_results['server'] = shortname.split(':')[2] test_results['test_case'] = shortname.split(':')[3] test_results['timestamp'] = time.strftime('%Y-%m-%d %H:%M:%S') row = big_query_utils.make_row(str(uuid.uuid4()), test_results) # TODO(jtattermusch): rows are inserted one by one, very inefficient max_retries = 3 for attempt in range(max_retries): if big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET_ID, bq_table, [row]): break else: if attempt < max_retries - 1: print('Error uploading result to bigquery, will retry.') else: print( 'Error uploading result to bigquery, all attempts failed.' ) sys.exit(1)
def upload_results_to_bq(resultset, bq_table, args, platform): """Upload test results to a BQ table. Args: resultset: dictionary generated by jobset.run bq_table: string name of table to create/upload results to in BQ args: args in run_tests.py, generated by argparse platform: string name of platform tests were run on """ bq = big_query_utils.create_big_query() big_query_utils.create_partitioned_table(bq, _PROJECT_ID, _DATASET_ID, bq_table, _RESULTS_SCHEMA, _DESCRIPTION, partition_type=_PARTITION_TYPE, expiration_ms=_EXPIRATION_MS) bq_rows = [] for shortname, results in six.iteritems(resultset): for result in results: test_results = {} _get_build_metadata(test_results) test_results['compiler'] = args.compiler test_results['config'] = args.config test_results['cpu_estimated'] = result.cpu_estimated test_results['cpu_measured'] = result.cpu_measured test_results['elapsed_time'] = '%.2f' % result.elapsed_time test_results['iomgr_platform'] = args.iomgr_platform # args.language is a list, but will always have one element in the contexts # this function is used. test_results['language'] = args.language[0] test_results['platform'] = platform test_results['result'] = result.state test_results['return_code'] = result.returncode test_results['test_name'] = shortname test_results['timestamp'] = time.strftime('%Y-%m-%d %H:%M:%S') row = big_query_utils.make_row(str(uuid.uuid4()), test_results) bq_rows.append(row) _insert_rows_with_retries(bq, bq_table, bq_rows)
def upload_results_to_bq(resultset, bq_table, extra_fields): """Upload test results to a BQ table. Args: resultset: dictionary generated by jobset.run bq_table: string name of table to create/upload results to in BQ extra_fields: dict with extra values that will be uploaded along with the results """ bq = big_query_utils.create_big_query() big_query_utils.create_partitioned_table( bq, _PROJECT_ID, _DATASET_ID, bq_table, _RESULTS_SCHEMA, _DESCRIPTION, partition_type=_PARTITION_TYPE, expiration_ms=_EXPIRATION_MS) bq_rows = [] for shortname, results in six.iteritems(resultset): for result in results: test_results = {} _get_build_metadata(test_results) test_results['cpu_estimated'] = result.cpu_estimated test_results['cpu_measured'] = result.cpu_measured test_results['elapsed_time'] = '%.2f' % result.elapsed_time test_results['result'] = result.state test_results['return_code'] = result.returncode test_results['test_name'] = shortname test_results['timestamp'] = time.strftime('%Y-%m-%d %H:%M:%S') for field_name, field_value in six.iteritems(extra_fields): test_results[field_name] = field_value row = big_query_utils.make_row(str(uuid.uuid4()), test_results) bq_rows.append(row) _insert_rows_with_retries(bq, bq_table, bq_rows)