Ejemplo n.º 1
0
def _upload_results_to_bq(rows):
    """Upload test results to a BQ table.

  Args:
      rows: A list of dictionaries containing data for each row to insert
  """
    bq = big_query_utils.create_big_query()
    big_query_utils.create_partitioned_table(
        bq,
        _PROJECT_ID,
        _DATASET_ID,
        _TABLE_ID,
        _RESULTS_SCHEMA,
        _DESCRIPTION,
        partition_type=_PARTITION_TYPE,
        expiration_ms=_EXPIRATION_MS)

    max_retries = 3
    for attempt in range(max_retries):
        if big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET_ID, _TABLE_ID,
                                       rows):
            break
        else:
            if attempt < max_retries - 1:
                print('Error uploading result to bigquery, will retry.')
            else:
                print(
                    'Error uploading result to bigquery, all attempts failed.')
                sys.exit(1)
Ejemplo n.º 2
0
def _insert_result(bq, dataset_id, table_id, scenario_result, flatten=True):
    if flatten:
        _flatten_result_inplace(scenario_result)
    _populate_metadata_inplace(scenario_result)
    row = big_query_utils.make_row(str(uuid.uuid4()), scenario_result)
    return big_query_utils.insert_rows(bq, _PROJECT_ID, dataset_id, table_id,
                                       [row])
Ejemplo n.º 3
0
def _insert_result(bq, dataset_id, table_id, scenario_result, flatten=True):
    if flatten:
        _flatten_result_inplace(scenario_result)
    _populate_metadata_inplace(scenario_result)
    row = big_query_utils.make_row(str(uuid.uuid4()), scenario_result)
    return big_query_utils.insert_rows(bq, _PROJECT_ID, dataset_id, table_id,
                                       [row])
Ejemplo n.º 4
0
def upload_results_to_bq(resultset, bq_table, args, platform):
  """Upload test results to a BQ table.

  Args:
      resultset: dictionary generated by jobset.run
      bq_table: string name of table to create/upload results to in BQ
      args: args in run_tests.py, generated by argparse
      platform: string name of platform tests were run on
  """
  bq = big_query_utils.create_big_query()
  big_query_utils.create_partitioned_table(bq, _PROJECT_ID, _DATASET_ID, bq_table, _RESULTS_SCHEMA, _DESCRIPTION,
                                           partition_type=_PARTITION_TYPE, expiration_ms= _EXPIRATION_MS)

  for shortname, results in six.iteritems(resultset):
    for result in results:
      test_results = {}
      _get_build_metadata(test_results)
      test_results['compiler'] = args.compiler
      test_results['config'] = args.config
      test_results['cpu_estimated'] = result.cpu_estimated
      test_results['cpu_measured'] = result.cpu_measured
      test_results['elapsed_time'] = '%.2f' % result.elapsed_time
      test_results['iomgr_platform'] = args.iomgr_platform
      # args.language is a list, but will always have one element in the contexts
      # this function is used.
      test_results['language'] = args.language[0]
      test_results['platform'] = platform
      test_results['result'] = result.state
      test_results['test_name'] = shortname
      test_results['timestamp'] = time.strftime('%Y-%m-%d %H:%M:%S')

      row = big_query_utils.make_row(str(uuid.uuid4()), test_results)
      if not big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET_ID, bq_table, [row]):
        print('Error uploading result to bigquery.')
        sys.exit(1)
Ejemplo n.º 5
0
def _upload_results_to_bq(rows):
    """Upload test results to a BQ table.

  Args:
      rows: A list of dictionaries containing data for each row to insert
  """
    bq = big_query_utils.create_big_query()
    big_query_utils.create_partitioned_table(bq,
                                             _PROJECT_ID,
                                             _DATASET_ID,
                                             _TABLE_ID,
                                             _RESULTS_SCHEMA,
                                             _DESCRIPTION,
                                             partition_type=_PARTITION_TYPE,
                                             expiration_ms=_EXPIRATION_MS)

    max_retries = 3
    for attempt in range(max_retries):
        if big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET_ID, _TABLE_ID,
                                       rows):
            break
        else:
            if attempt < max_retries - 1:
                print('Error uploading result to bigquery, will retry.')
            else:
                print(
                    'Error uploading result to bigquery, all attempts failed.')
                sys.exit(1)
Ejemplo n.º 6
0
def upload_result(result_list, metadata):
    for result in result_list:
        new_result = copy.deepcopy(result)
        new_result['metadata'] = metadata
        bq = big_query_utils.create_big_query()
        row = big_query_utils.make_row(str(uuid.uuid4()), new_result)
        if not big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET,
                                           _TABLE + "$" + _NOW, [row]):
            print 'Error when uploading result', new_result
Ejemplo n.º 7
0
def upload_result(result_list, metadata):
  for result in result_list:
    new_result = copy.deepcopy(result)
    new_result['metadata'] = metadata
    bq = big_query_utils.create_big_query()
    row = big_query_utils.make_row(str(uuid.uuid4()), new_result)
    if not big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET,
                                       _TABLE + "$" + _NOW,
                                       [row]):
      print 'Error when uploading result', new_result
Ejemplo n.º 8
0
def upload_results_to_bq(resultset, bq_table, args, platform):
    """Upload test results to a BQ table.

  Args:
      resultset: dictionary generated by jobset.run
      bq_table: string name of table to create/upload results to in BQ
      args: args in run_tests.py, generated by argparse
      platform: string name of platform tests were run on
  """
    bq = big_query_utils.create_big_query()
    big_query_utils.create_partitioned_table(bq,
                                             _PROJECT_ID,
                                             _DATASET_ID,
                                             bq_table,
                                             _RESULTS_SCHEMA,
                                             _DESCRIPTION,
                                             partition_type=_PARTITION_TYPE,
                                             expiration_ms=_EXPIRATION_MS)

    for shortname, results in six.iteritems(resultset):
        for result in results:
            test_results = {}
            _get_build_metadata(test_results)
            test_results['compiler'] = args.compiler
            test_results['config'] = args.config
            test_results['cpu_estimated'] = result.cpu_estimated
            test_results['cpu_measured'] = result.cpu_measured
            test_results['elapsed_time'] = '%.2f' % result.elapsed_time
            test_results['iomgr_platform'] = args.iomgr_platform
            # args.language is a list, but will always have one element in the contexts
            # this function is used.
            test_results['language'] = args.language[0]
            test_results['platform'] = platform
            test_results['result'] = result.state
            test_results['return_code'] = result.returncode
            test_results['test_name'] = shortname
            test_results['timestamp'] = time.strftime('%Y-%m-%d %H:%M:%S')

            row = big_query_utils.make_row(str(uuid.uuid4()), test_results)

            # TODO(jtattermusch): rows are inserted one by one, very inefficient
            max_retries = 3
            for attempt in range(max_retries):
                if big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET_ID,
                                               bq_table, [row]):
                    break
                else:
                    if attempt < max_retries - 1:
                        print(
                            'Error uploading result to bigquery, will retry.')
                    else:
                        print(
                            'Error uploading result to bigquery, all attempts failed.'
                        )
                        sys.exit(1)
Ejemplo n.º 9
0
def upload_interop_results_to_bq(resultset, bq_table, args):
    """Upload interop test results to a BQ table.

  Args:
      resultset: dictionary generated by jobset.run
      bq_table: string name of table to create/upload results to in BQ
      args: args in run_interop_tests.py, generated by argparse
  """
    bq = big_query_utils.create_big_query()
    big_query_utils.create_partitioned_table(bq,
                                             _PROJECT_ID,
                                             _DATASET_ID,
                                             bq_table,
                                             _INTEROP_RESULTS_SCHEMA,
                                             _DESCRIPTION,
                                             partition_type=_PARTITION_TYPE,
                                             expiration_ms=_EXPIRATION_MS)

    for shortname, results in six.iteritems(resultset):
        bq_rows = []
        for result in results:
            test_results = {}
            _get_build_metadata(test_results)
            test_results['elapsed_time'] = '%.2f' % result.elapsed_time
            test_results['result'] = result.state
            test_results['test_name'] = shortname
            test_results['suite'] = shortname.split(':')[0]
            test_results['client'] = shortname.split(':')[1]
            test_results['server'] = shortname.split(':')[2]
            test_results['test_case'] = shortname.split(':')[3]
            test_results['timestamp'] = time.strftime('%Y-%m-%d %H:%M:%S')
            row = big_query_utils.make_row(str(uuid.uuid4()), test_results)
            bq_rows.append(row)

        # BigQuery sometimes fails with large uploads, so batch 1,000 rows at a time.
        for i in range((len(bq_rows) / 1000) + 1):
            max_retries = 3
            for attempt in range(max_retries):
                if big_query_utils.insert_rows(
                        bq, _PROJECT_ID, _DATASET_ID, bq_table,
                        bq_rows[i * 1000:(i + 1) * 1000]):
                    break
                else:
                    if attempt < max_retries - 1:
                        print(
                            'Error uploading result to bigquery, will retry.')
                    else:
                        print(
                            'Error uploading result to bigquery, all attempts failed.'
                        )
                        sys.exit(1)
Ejemplo n.º 10
0
    def insert_qps_row(self, qps, recorded_at):
        row_values_dict = {
            'run_id': self.run_id,
            'pod_name': self.pod_name,
            'recorded_at': recorded_at,
            'qps': qps
        }

        # row_unique_id is something that uniquely identifies the row (BigQuery uses
        # it for duplicate detection).
        row_unique_id = '%s_%s_%s' % (self.run_id, self.pod_name, recorded_at)
        row = bq_utils.make_row(row_unique_id, row_values_dict)
        return bq_utils.insert_rows(self.bq, self.project_id, self.dataset_id,
                                    self.qps_table_id, [row])
Ejemplo n.º 11
0
  def insert_qps_row(self, qps, recorded_at):
    row_values_dict = {
        'run_id': self.run_id,
        'pod_name': self.pod_name,
        'recorded_at': recorded_at,
        'qps': qps
    }

    # row_unique_id is something that uniquely identifies the row (BigQuery uses
    # it for duplicate detection).
    row_unique_id = '%s_%s_%s' % (self.run_id, self.pod_name, recorded_at)
    row = bq_utils.make_row(row_unique_id, row_values_dict)
    return bq_utils.insert_rows(self.bq, self.project_id, self.dataset_id,
                                self.qps_table_id, [row])
Ejemplo n.º 12
0
 def insert_summary_row(self, event_type, details):
     row_values_dict = {
         'run_id': self.run_id,
         'image_type': self.image_type,
         'pod_name': self.pod_name,
         'event_date': datetime.datetime.now().isoformat(),
         'event_type': event_type,
         'details': details
     }
     # row_unique_id is something that uniquely identifies the row (BigQuery uses
     # it for duplicate detection).
     row_unique_id = '%s_%s_%s' % (self.run_id, self.pod_name, event_type)
     row = bq_utils.make_row(row_unique_id, row_values_dict)
     return bq_utils.insert_rows(self.bq, self.project_id, self.dataset_id,
                                 self.summary_table_id, [row])
Ejemplo n.º 13
0
 def insert_summary_row(self, event_type, details):
   row_values_dict = {
       'run_id': self.run_id,
       'image_type': self.image_type,
       'pod_name': self.pod_name,
       'event_date': datetime.datetime.now().isoformat(),
       'event_type': event_type,
       'details': details
   }
   # row_unique_id is something that uniquely identifies the row (BigQuery uses
   # it for duplicate detection).
   row_unique_id = '%s_%s_%s' % (self.run_id, self.pod_name, event_type)
   row = bq_utils.make_row(row_unique_id, row_values_dict)
   return bq_utils.insert_rows(self.bq, self.project_id, self.dataset_id,
                               self.summary_table_id, [row])
Ejemplo n.º 14
0
def _insert_scenario_result(bq,
                            dataset_id,
                            table_id,
                            scenario_result,
                            test_metadata_file,
                            node_info_file,
                            prometheus_query_results_file,
                            flatten=True):
    if flatten:
        _flatten_result_inplace(scenario_result)
    _populate_metadata_from_file(scenario_result, test_metadata_file)
    _populate_node_metadata_from_file(scenario_result, node_info_file)
    _populate_prometheus_query_results_from_file(scenario_result,
                                                 prometheus_query_results_file)
    row = big_query_utils.make_row(str(uuid.uuid4()), scenario_result)
    return big_query_utils.insert_rows(bq, _PROJECT_ID, dataset_id, table_id,
                                       [row])
Ejemplo n.º 15
0
def upload_interop_results_to_bq(resultset, bq_table, args):
    """Upload interop test results to a BQ table.

  Args:
      resultset: dictionary generated by jobset.run
      bq_table: string name of table to create/upload results to in BQ
      args: args in run_interop_tests.py, generated by argparse
  """
    bq = big_query_utils.create_big_query()
    big_query_utils.create_partitioned_table(
        bq,
        _PROJECT_ID,
        _DATASET_ID,
        bq_table,
        _INTEROP_RESULTS_SCHEMA,
        _DESCRIPTION,
        partition_type=_PARTITION_TYPE,
        expiration_ms=_EXPIRATION_MS)

    for shortname, results in six.iteritems(resultset):
        for result in results:
            test_results = {}
            _get_build_metadata(test_results)
            test_results['elapsed_time'] = '%.2f' % result.elapsed_time
            test_results['result'] = result.state
            test_results['test_name'] = shortname
            test_results['suite'] = shortname.split(':')[0]
            test_results['client'] = shortname.split(':')[1]
            test_results['server'] = shortname.split(':')[2]
            test_results['test_case'] = shortname.split(':')[3]
            test_results['timestamp'] = time.strftime('%Y-%m-%d %H:%M:%S')
            row = big_query_utils.make_row(str(uuid.uuid4()), test_results)
            # TODO(jtattermusch): rows are inserted one by one, very inefficient
            max_retries = 3
            for attempt in range(max_retries):
                if big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET_ID,
                                               bq_table, [row]):
                    break
                else:
                    if attempt < max_retries - 1:
                        print('Error uploading result to bigquery, will retry.')
                    else:
                        print(
                            'Error uploading result to bigquery, all attempts failed.'
                        )
                        sys.exit(1)
Ejemplo n.º 16
0
def _insert_rows_with_retries(bq, bq_table, bq_rows):
    """Insert rows to bq table. Retry on error."""
    # BigQuery sometimes fails with large uploads, so batch 1,000 rows at a time.
    for i in range((len(bq_rows) / 1000) + 1):
        max_retries = 3
        for attempt in range(max_retries):
            if big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET_ID,
                                           bq_table,
                                           bq_rows[i * 1000:(i + 1) * 1000]):
                break
            else:
                if attempt < max_retries - 1:
                    print('Error uploading result to bigquery, will retry.')
                else:
                    print(
                        'Error uploading result to bigquery, all attempts failed.'
                    )
                    sys.exit(1)
Ejemplo n.º 17
0
def _insert_rows_with_retries(bq, bq_table, bq_rows):
    """Insert rows to bq table. Retry on error."""
    # BigQuery sometimes fails with large uploads, so batch 1,000 rows at a time.
    for i in range((len(bq_rows) / 1000) + 1):
        max_retries = 3
        for attempt in range(max_retries):
            if big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET_ID,
                                           bq_table,
                                           bq_rows[i * 1000:(i + 1) * 1000]):
                break
            else:
                if attempt < max_retries - 1:
                    print('Error uploading result to bigquery, will retry.')
                else:
                    print(
                        'Error uploading result to bigquery, all attempts failed.'
                    )
                    sys.exit(1)
Ejemplo n.º 18
0
def upload_results_to_bq(resultset, bq_table, args, platform):
    """Upload test results to a BQ table.

  Args:
      resultset: dictionary generated by jobset.run
      bq_table: string name of table to create/upload results to in BQ
      args: args in run_tests.py, generated by argparse
      platform: string name of platform tests were run on
  """
    bq = big_query_utils.create_big_query()
    big_query_utils.create_table(bq, _PROJECT_ID, _DATASET_ID, bq_table,
                                 _RESULTS_SCHEMA, _DESCRIPTION)

    for shortname, results in six.iteritems(resultset):
        for result in results:
            test_results = {}
            _get_build_metadata(test_results)
            test_results['compiler'] = args.compiler
            test_results['config'] = args.config
            test_results['cpu_estimated'] = result.cpu_estimated
            test_results['cpu_measured'] = result.cpu_measured
            test_results['elapsed_time'] = '%.2f' % result.elapsed_time
            test_results['iomgr_platform'] = args.iomgr_platform
            # args.language is a list, but will always have one element in the contexts
            # this function is used.
            test_results['language'] = args.language[0]
            test_results['platform'] = platform
            test_results['result'] = result.state
            test_results['test_name'] = shortname
            test_results['timestamp'] = time.strftime('%Y-%m-%d %H:%M:%S')

            row = big_query_utils.make_row(str(uuid.uuid4()), test_results)
            if not big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET_ID,
                                               bq_table, [row]):
                print('Error uploading result to bigquery.')
                sys.exit(1)
Ejemplo n.º 19
0
  last_complete_build_number = job.get_last_completed_buildnumber()
  # To avoid processing all builds for a project never looked at. In this case,
  # only examine 10 latest builds.
  starting_build_number = max(last_processed_build_number+1, 
                              last_complete_build_number-9)
  for build_number in xrange(starting_build_number, 
                             last_complete_build_number+1):
    print('====> Processing %s build %d.' % (build_name, build_number))
    build = None
    try:
      build = job.get_build_metadata(build_number)
    except KeyError:
      print('====> Build %s is missing. Skip.' % build_number)
      continue
    build_result = {'build_number': build_number, 
                    'timestamp': str(build.get_timestamp())}
    url_base = json_url = '%s/%s/%d' % (_URL_BASE, build_name, build_number)
    if _BUILDS[build_name]:  # The build has matrix, such as gRPC_master.
      build_result['matrix'] = _process_matrix(build, url_base)
    else:
      json_url = '%s/testReport/api/json' % url_base
      console_url = '%s/consoleFull' % url_base
      build_result['duration'] = build.get_duration().total_seconds()
      build_result.update(_process_build(json_url, console_url))
    rows = [big_query_utils.make_row(build_number, build_result)]
    if not big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET_ID, build_name, 
                                       rows):
      print '====> Error uploading result to bigquery.'
      sys.exit(1)