def testFioCommandToJob(self): fio_parameters = ( '--filesize=10g --directory=/scratch0 --ioengine=libaio ' '--filename=fio_test_file --invalidate=1 --randrepeat=0 ' '--direct=0 --size=3790088k --iodepth=8 ' '--name=sequential_write --overwrite=0 --rw=write --end_fsync=1 ' '--name=random_read --size=379008k --stonewall --rw=randread ' '--name=sequential_read --stonewall --rw=read ') expected_result = ('[global]\n' 'filesize=10g\n' 'directory=/scratch0\n' 'ioengine=libaio\n' 'filename=fio_test_file\n' 'invalidate=1\n' 'randrepeat=0\n' 'direct=0\n' 'size=3790088k\n' 'iodepth=8\n' '[sequential_write]\n' 'overwrite=0\n' 'rw=write\n' 'end_fsync=1\n' '[random_read]\n' 'size=379008k\n' 'stonewall\n' 'rw=randread\n' '[sequential_read]\n' 'stonewall\n' 'rw=read\n') result = fio.FioParametersToJob(fio_parameters) self.assertEqual(expected_result, result)
def _ParseFioJson(fio_json): """Parse fio json output. Args: fio_json: string. Json output from fio comomand. Returns: A list of sample.Sample object. """ samples = [] for job in json.loads(fio_json)['jobs']: cmd = job['fio_command'] # Get rid of ./fio. cmd = ' '.join(cmd.split()[1:]) additional_metadata = {'cmd': cmd} # Remove ssmw suffix from job name. try: job['jobname'] = regex_util.Substitute( STEADY_STATE_MEASUREMENT_WINDOW, '', job['jobname']) additional_metadata['steady_state'] = True except regex_util.NoMatchError: additional_metadata['steady_state'] = False # Mock fio_json to reuse fio parser. mock_json = {'jobs': [job]} new_samples = fio.ParseResults( fio.FioParametersToJob(cmd).__str__(), mock_json) for s in new_samples: s.metadata.update(additional_metadata) samples += new_samples return samples
def RunSimulatedDatabase(vm): """Spawn fio to simulate database and gather the results. Args: vm: The vm that synthetic_storage_workloads_benchmark will be run upon. Returns: A list of sample.Sample objects """ test_size = min(vm.total_memory_kb / 10, 1000000) iodepth_list = FLAGS.iodepth_list or DEFAULT_DATABASE_SIMULATION_IODEPTH_LIST results = [] for depth in iodepth_list: cmd = ('--filesize=10g ' '--directory=%s ' '--ioengine=libaio ' '--filename=fio_test_file ' '--overwrite=1 ' '--invalidate=0 ' '--direct=1 ' '--randrepeat=0 ' '--iodepth=%s ' '--size=%dk ' '--blocksize=4k ') % (vm.GetScratchDir(), depth, test_size) if FLAGS.maxjobs: cmd += '--max-jobs=%s ' % FLAGS.maxjobs cmd += ('--name=random_write ' '--rw=randwrite ' '--end_fsync=1 ' '--name=random_read ' '--stonewall ' '--rw=randread ' '--name=mixed_randrw ' '--stonewall ' '--rw=randrw ' '--rwmixread=90 ' '--rwmixwrite=10 ' '--end_fsync=1 ') logging.info('FIO Results for simulated %s, iodepth %s', DATABASE, depth) res, _ = vm.RemoteCommand('%s %s' % (fio.FIO_CMD_PREFIX, cmd), should_log=True) instance, _, _ = vm.RemoteHostCommandWithReturnCode( 'cat /etc/kubenode') instance = instance.strip() results.extend( fio.ParseResults(fio.FioParametersToJob(cmd), json.loads(res), instance=instance)) UpdateWorkloadMetadata(results) return results
def _RunFio(vm, fio_params, metadata): """Run fio. Args: vm: Virtual machine to run fio on. fio_params: fio parameters used to create the fio command to run. metadata: Metadata to add to the results. Returns: A list of sample.Sample objects """ stdout, _ = vm.RemoteCommand('sudo {0} {1}'.format(fio.GetFioExec(), fio_params)) job_file_contents = fio.FioParametersToJob(fio_params) samples = fio.ParseResults(job_file_contents, json.loads(stdout), base_metadata=metadata, skip_latency_individual_stats=True) return samples
def RunSimulatedStreaming(vm): """Spawn fio to simulate streaming and gather the results. Args: vm: The vm that synthetic_storage_workloads_benchmark will be run upon. Returns: A list of sample.Sample objects """ test_size = min(vm.total_memory_kb / 10, 1000000) iodepth_list = FLAGS.iodepth_list or DEFAULT_STREAMING_SIMULATION_IODEPTH_LIST results = [] for depth in iodepth_list: cmd = ( '--filesize=10g ' '--directory=%s ' '--ioengine=libaio ' '--overwrite=0 ' '--invalidate=1 ' '--direct=1 ' '--randrepeat=0 ' '--iodepth=%s ' '--blocksize=1m ' '--size=%dk ' '--filename=fio_test_file ') % (vm.GetScratchDir(), depth, test_size) if FLAGS.maxjobs: cmd += '--max-jobs=%s ' % FLAGS.maxjobs cmd += ( '--name=sequential_write ' '--rw=write ' '--end_fsync=1 ' '--name=sequential_read ' '--stonewall ' '--rw=read ') logging.info('FIO Results for simulated %s', STREAMING) res, _ = vm.RemoteCommand('%s %s' % (fio.FIO_CMD_PREFIX, cmd), should_log=True) results.extend( fio.ParseResults(fio.FioParametersToJob(cmd), json.loads(res))) UpdateWorkloadMetadata(results) return results
def RunSimulatedLogging(vm): """Spawn fio to simulate logging and gather the results. Args: vm: The vm that synthetic_storage_workloads_benchmark will be run upon. Returns: A list of sample.Sample objects """ test_size = vm.total_memory_kb cmd = ('--filesize=10g ' '--directory=%s ' '--ioengine=libaio ' '--filename=fio_test_file ' '--invalidate=1 ' '--randrepeat=0 ' '--direct=0 ' '--size=%dk ' '--iodepth=%d ') % (vm.GetScratchDir(), test_size, DEFAULT_IODEPTH) if FLAGS.maxjobs: cmd += '--max-jobs=%s ' % FLAGS.maxjobs cmd += ('--name=sequential_write ' '--overwrite=0 ' '--rw=write ' '--end_fsync=1 ' '--name=random_read ' '--size=%dk ' '--stonewall ' '--rw=randread ' '--name=sequential_read ' '--stonewall ' '--rw=read ') % (test_size / 10) logging.info('FIO Results for simulated %s', LOGGING) res, _ = vm.RemoteCommand('%s %s' % (fio.FIO_CMD_PREFIX, cmd), should_log=True) instance, _, _ = vm.RemoteHostCommandWithReturnCode('cat /etc/kubenode') instance = instance.strip() results = fio.ParseResults(fio.FioParametersToJob(cmd), json.loads(res), instance=instance) UpdateWorkloadMetadata(results) return results