def write_results_to_datastore(grouped):
  """ Saves results to a database """

  from perf_result_datastore import PerfResultDataStore
  print 'Saving perf results to database'
  current_date = datetime.now()
  data_store = PerfResultDataStore(host=options.db_host, username=options.db_username,
      password=options.db_password, database_name=options.db_name)

  run_info_id = data_store.insert_run_info(options.lab_run_info)
  for results in all_query_results(grouped):
    first_query_result = results[RESULT_LIST][0]
    executor_name = first_query_result[EXECUTOR_NAME]
    workload = first_query_result[QUERY][WORKLOAD_NAME]
    scale_factor = first_query_result[QUERY][SCALE_FACTOR]
    query_name = first_query_result[QUERY][NAME]
    query = first_query_result[QUERY][QUERY_STR]
    file_format = first_query_result[QUERY][TEST_VECTOR][FILE_FORMAT]
    compression_codec = first_query_result[QUERY][TEST_VECTOR][COMPRESSION_CODEC]
    compression_type = first_query_result[QUERY][TEST_VECTOR][COMPRESSION_TYPE]
    avg_time = results[AVG]
    stddev = results[STDDEV]
    num_clients = results[NUM_CLIENTS]
    num_iterations = results[ITERATIONS]
    runtime_profile = first_query_result[RUNTIME_PROFILE]

    file_type_id = data_store.get_file_format_id(
        file_format, compression_codec, compression_type)
    if file_type_id is None:
      print 'Skipping unkown file type: %s / %s' % (file_format, compression)
      continue

    workload_id = data_store.get_workload_id(workload, scale_factor)
    if workload_id is None:
      workload_id = data_store.insert_workload_info(workload, scale_factor)

    query_id = data_store.get_query_id(query_name, query)
    if query_id is None:
      query_id = data_store.insert_query_info(query_name, query)

    data_store.insert_execution_result(
        query_id = query_id,
        workload_id = workload_id,
        file_type_id = file_type_id,
        num_clients = num_clients,
        cluster_name = options.cluster_name,
        executor_name = executor_name,
        avg_time = avg_time,
        stddev = stddev,
        run_date = current_date,
        version = options.build_version,
        notes = options.report_description,
        run_info_id = run_info_id,
        num_iterations = num_iterations,
        runtime_profile = runtime_profile,
        is_official = options.is_official)
示例#2
0
def write_results_to_datastore(grouped):
  """ Saves results to a database """

  from perf_result_datastore import PerfResultDataStore
  print 'Saving perf results to database'
  current_date = datetime.now()
  data_store = PerfResultDataStore(host=options.db_host, username=options.db_username,
      password=options.db_password, database_name=options.db_name)

  run_info_id = data_store.insert_run_info(options.lab_run_info)
  for results in all_query_results(grouped):
    first_query_result = results[RESULT_LIST][0]
    executor_name = first_query_result[EXECUTOR_NAME]
    workload = first_query_result[QUERY][WORKLOAD_NAME]
    scale_factor = first_query_result[QUERY][SCALE_FACTOR]
    query_name = first_query_result[QUERY][NAME]
    query = first_query_result[QUERY][QUERY_STR]
    file_format = first_query_result[QUERY][TEST_VECTOR][FILE_FORMAT]
    compression_codec = first_query_result[QUERY][TEST_VECTOR][COMPRESSION_CODEC]
    compression_type = first_query_result[QUERY][TEST_VECTOR][COMPRESSION_TYPE]
    avg_time = results[AVG]
    stddev = results[STDDEV]
    num_clients = results[NUM_CLIENTS]
    num_iterations = results[ITERATIONS]
    runtime_profile = first_query_result[RUNTIME_PROFILE]

    file_type_id = data_store.get_file_format_id(
        file_format, compression_codec, compression_type)
    if file_type_id is None:
      print 'Skipping unkown file type: %s / %s' % (file_format, compression)
      continue

    workload_id = data_store.get_workload_id(workload, scale_factor)
    if workload_id is None:
      workload_id = data_store.insert_workload_info(workload, scale_factor)

    query_id = data_store.get_query_id(query_name, query)
    if query_id is None:
      query_id = data_store.insert_query_info(query_name, query)

    data_store.insert_execution_result(
        query_id = query_id,
        workload_id = workload_id,
        file_type_id = file_type_id,
        num_clients = num_clients,
        cluster_name = options.cluster_name,
        executor_name = executor_name,
        avg_time = avg_time,
        stddev = stddev,
        run_date = current_date,
        version = options.build_version,
        notes = options.report_description,
        run_info_id = run_info_id,
        num_iterations = num_iterations,
        runtime_profile = runtime_profile,
        is_official = options.is_official)
def write_results_to_datastore(results):
    """ Saves results to a database """
    current_date = datetime.now()
    data_store = PerfResultDataStore(host=options.db_host,
                                     username=options.db_username,
                                     password=options.db_password,
                                     database_name=options.db_name)

    run_info_id = data_store.insert_run_info(options.lab_run_info)
    for row in results:
        # We ignore everything after the stddev column
        executor, workload, scale_factor, query_name, query, file_format,\
            compression, avg_time, stddev = row[0:STDDEV_IDX + 1]

        # Instead of storing 'N/A' in the database we want to store NULL
        avg_time = avg_time if avg_time and avg_time != 'N/A' else 'NULL'
        stddev = stddev if stddev and stddev != 'N/A' else 'NULL'

        file_type_id = data_store.get_file_format_id(file_format, compression)
        if file_type_id is None:
            print 'Skipping unkown file type: %s / %s' % (file_format,
                                                          compression)
            continue

        workload_id = data_store.get_workload_id(workload, scale_factor)
        if workload_id is None:
            workload_id = data_store.insert_workload_info(
                workload, scale_factor)

        query_id = data_store.get_query_id(query_name, query)
        if query_id is None:
            query_id = data_store.insert_query_info(query_name, query)

        data_store.insert_execution_result(
            query_id=query_id,
            workload_id=workload_id,
            file_type_id=file_type_id,
            num_clients=int(row[NUM_CLIENTS_IDX]),
            cluster_name=options.cluster_name,
            executor_name=executor,
            avg_time=avg_time,
            stddev=stddev,
            run_date=current_date,
            version=options.build_version,
            notes=options.report_description,
            run_info_id=run_info_id,
            num_iterations=int(row[NUM_ITERS_IDX]),
            runtime_profile=row[RUNTIME_PROFILE_IDX],
            is_official=options.is_official)
示例#4
0
def write_results_to_datastore(grouped):
  """ Saves results to a database """

  from perf_result_datastore import PerfResultDataStore

  LOG.info('Saving perf results to database')
  run_date = str(datetime.now())

  with PerfResultDataStore(
      host=options.db_host,
      port=options.db_port,
      database_name=options.db_name,
      use_secure_connection=options.use_secure_connection) as data_store:

    for results in all_query_results(grouped):
      for query_result in results[RESULT_LIST]:

        data_store.insert_execution_result(
             query_name=query_result[QUERY][NAME],
             query_string=query_result[QUERY][QUERY_STR],
             workload_name=query_result[QUERY][WORKLOAD_NAME],
             scale_factor=query_result[QUERY][SCALE_FACTOR],
             file_format=query_result[QUERY][TEST_VECTOR][FILE_FORMAT],
             compression_codec=query_result[QUERY][TEST_VECTOR][COMPRESSION_CODEC],
             compression_type=query_result[QUERY][TEST_VECTOR][COMPRESSION_TYPE],
             num_clients=results[NUM_CLIENTS],
             num_iterations=results[ITERATIONS],
             cluster_name=options.cluster_name,
             executor_name=query_result[EXECUTOR_NAME],
             exec_time=query_result[TIME_TAKEN],
             run_date=run_date,
             version=options.build_version,
             run_info=options.lab_run_info,
             user_name=options.run_user_name,
             runtime_profile = 'N/A' if options.hive_results else query_result[RUNTIME_PROFILE])
def write_results_to_datastore(results):
  """ Saves results to a database """
  current_date = datetime.now()
  data_store = PerfResultDataStore(host=options.db_host, username=options.db_username,
      password=options.db_password, database_name=options.db_name)

  run_info_id = data_store.insert_run_info(options.lab_run_info)
  for row in results:
    # We ignore everything after the stddev column
    executor, workload, scale_factor, query_name, query, file_format,\
        compression, avg_time, stddev = row[0:STDDEV_IDX + 1]

    # Instead of storing 'N/A' in the database we want to store NULL
    avg_time = avg_time if avg_time and avg_time != 'N/A' else 'NULL'
    stddev = stddev if stddev and stddev != 'N/A' else 'NULL'

    file_type_id = data_store.get_file_format_id(file_format, compression)
    if file_type_id is None:
      print 'Skipping unkown file type: %s / %s' % (file_format, compression)
      continue

    workload_id = data_store.get_workload_id(workload, scale_factor)
    if workload_id is None:
      workload_id = data_store.insert_workload_info(workload, scale_factor)

    query_id = data_store.get_query_id(query_name, query)
    if query_id is None:
      query_id = data_store.insert_query_info(query_name, query)

    data_store.insert_execution_result(
        query_id=query_id, workload_id=workload_id, file_type_id=file_type_id,
        num_clients=int(row[NUM_CLIENTS_IDX]), cluster_name=options.cluster_name,
        executor_name=executor, avg_time=avg_time, stddev=stddev,
        run_date=current_date, version=options.build_version,
        notes=options.report_description, run_info_id=run_info_id,
        num_iterations=int(row[NUM_ITERS_IDX]), runtime_profile=row[RUNTIME_PROFILE_IDX],
        is_official=options.is_official)