def RunWithExec(benchmark_spec, exec_path, remote_job_file_path,
                job_file_contents):
    """Spawn fio and gather the results.

  Args:
    benchmark_spec: The benchmark specification. Contains all data that is
        required to run the benchmark.
    exec_path: string path to the fio executable.
    remote_job_file_path: path, on the vm, to the location of the job file.
    job_file_contents: string contents of the fio job file.

  Returns:
    A list of sample.Sample objects.
  """
    vm = benchmark_spec.vms[0]
    logging.info('FIO running on %s', vm)

    disk = vm.scratch_disks[0]
    mount_point = disk.mount_point

    job_file_string = GetOrGenerateJobFileString(
        FLAGS.fio_jobfile, FLAGS.fio_generate_scenarios, AgainstDevice(), disk,
        FLAGS.fio_io_depths, FLAGS.fio_num_jobs, FLAGS.fio_working_set_size,
        FLAGS.fio_blocksize, FLAGS.fio_runtime, FLAGS.fio_parameters,
        job_file_contents)
    job_file_path = vm_util.PrependTempDir(vm.name + LOCAL_JOB_FILE_SUFFIX)
    with open(job_file_path, 'w') as job_file:
        job_file.write(job_file_string)
        logging.info('Wrote fio job file at %s', job_file_path)
        logging.info(job_file_string)

    vm.PushFile(job_file_path, remote_job_file_path)

    if AgainstDevice():
        fio_command = '%s --output-format=json --filename=%s %s' % (
            exec_path, disk.GetDevicePath(), remote_job_file_path)
    else:
        fio_command = '%s --output-format=json --directory=%s %s' % (
            exec_path, mount_point, remote_job_file_path)

    collect_logs = any([
        FLAGS.fio_lat_log, FLAGS.fio_bw_log, FLAGS.fio_iops_log,
        FLAGS.fio_hist_log
    ])

    log_file_base = ''
    if collect_logs:
        log_file_base = '%s_%s' % (PKB_FIO_LOG_FILE_NAME, str(time.time()))
        fio_command = ' '.join([fio_command, GetLogFlags(log_file_base)])

    # TODO(user): This only gives results at the end of a job run
    #      so the program pauses here with no feedback to the user.
    #      This is a pretty lousy experience.
    logging.info('FIO Results:')

    stdout, _ = vm.RobustRemoteCommand(fio_command, should_log=True)
    bin_vals = []
    if collect_logs:
        vm.PullFile(vm_util.GetTempDir(), '%s*.log' % log_file_base)
        if FLAGS.fio_hist_log:
            num_logs = int(
                vm.RemoteCommand('ls %s_clat_hist.*.log | wc -l' %
                                 log_file_base)[0])
            bin_vals += [
                fio.ComputeHistogramBinVals(
                    vm, '%s_clat_hist.%s.log' % (log_file_base, idx + 1))
                for idx in range(num_logs)
            ]
    samples = fio.ParseResults(job_file_string,
                               json.loads(stdout),
                               log_file_base=log_file_base,
                               bin_vals=bin_vals)

    return samples
示例#2
0
def RunWithExec(vm, exec_path, remote_job_file_path, job_file_contents):
    """Spawn fio and gather the results.

  Args:
    vm: vm to run the benchmark on.
    exec_path: string path to the fio executable.
    remote_job_file_path: path, on the vm, to the location of the job file.
    job_file_contents: string contents of the fio job file.

  Returns:
    A list of sample.Sample objects.
  """
    logging.info('FIO running on %s', vm)

    disk = vm.scratch_disks[0]
    mount_point = disk.mount_point
    if FLAGS.fio_write_against_multiple_clients:
        mount_point = '%s/%s' % (disk.mount_point, vm.name)
        logging.info('FIO mount point changed to %s', mount_point)

    job_file_string = GetOrGenerateJobFileString(
        FLAGS.fio_jobfile, FLAGS.fio_generate_scenarios, AgainstDevice(), disk,
        FLAGS.fio_io_depths, FLAGS.fio_num_jobs, FLAGS.fio_working_set_size,
        FLAGS.fio_blocksize, FLAGS.fio_runtime, _DIRECT_IO.value,
        FLAGS.fio_parameters, job_file_contents)
    job_file_path = vm_util.PrependTempDir(vm.name + LOCAL_JOB_FILE_SUFFIX)
    with open(job_file_path, 'w') as job_file:
        job_file.write(job_file_string)
        logging.info('Wrote fio job file at %s', job_file_path)
        logging.info(job_file_string)

    vm.PushFile(job_file_path, remote_job_file_path)

    if AgainstDevice():
        fio_command = (
            f'{exec_path} --output-format=json '
            f'--random_generator={FLAGS.fio_rng} '
            f'--filename={disk.GetDevicePath()} {remote_job_file_path}')
    else:
        fio_command = (f'{exec_path} --output-format=json '
                       f'--random_generator={FLAGS.fio_rng} '
                       f'--directory={mount_point} {remote_job_file_path}')

    collect_logs = any([
        FLAGS.fio_lat_log, FLAGS.fio_bw_log, FLAGS.fio_iops_log,
        FLAGS.fio_hist_log
    ])

    log_file_base = ''
    if collect_logs:
        log_file_base = '%s_%s' % (PKB_FIO_LOG_FILE_NAME, str(time.time()))
        fio_command = ' '.join([fio_command, GetLogFlags(log_file_base)])

    # TODO(user): This only gives results at the end of a job run
    #      so the program pauses here with no feedback to the user.
    #      This is a pretty lousy experience.
    logging.info('FIO Results:')

    start_time = time.time()
    stdout, _ = vm.RobustRemoteCommand(fio_command,
                                       should_log=True,
                                       timeout=FLAGS.fio_command_timeout_sec)
    end_time = time.time()
    bin_vals = []
    if collect_logs:
        vm.PullFile(vm_util.GetTempDir(), '%s*.log' % log_file_base)
        if FLAGS.fio_hist_log:
            num_logs = int(
                vm.RemoteCommand('ls %s_clat_hist.*.log | wc -l' %
                                 log_file_base)[0])
            bin_vals += [
                fio.ComputeHistogramBinVals(
                    vm, '%s_clat_hist.%s.log' % (log_file_base, idx + 1))
                for idx in range(num_logs)
            ]
    samples = fio.ParseResults(job_file_string,
                               json.loads(stdout),
                               log_file_base=log_file_base,
                               bin_vals=bin_vals)

    samples.append(
        sample.Sample('start_time', start_time, 'sec', samples[0].metadata))
    samples.append(
        sample.Sample('end_time', end_time, 'sec', samples[0].metadata))

    return samples