def test_rpma_fio_bench(tool_mode, readwrite, mode, monkeypatch): """test all arguments variants of rpma_fio_bench.sh""" def run_mock(args, env): assert len(args) == 5 # XXX is it always correct to assume the tool is in the current working # directory? assert args[0] == './' + TOOL_RPMA_FIO_BENCH assert args[1] == IP_DUMMY assert args[2] == tool_mode assert args[3] == readwrite assert args[4] == mode assert env['OUTPUT_FILE'] == output_file(ID_DUMMY, RESULT_DIR) return ProcessMock() monkeypatch.setattr(subprocess, 'run', run_mock) oneseries = { **ONESERIES_DUMMY, 'tool': TOOL_RPMA_FIO_BENCH, 'mode': mode, 'tool_mode': tool_mode, 'rw': readwrite, 'busy_wait_polling': True } benchmark = Benchmark(oneseries) benchmark.run(CONFIG_DUMMY, RESULT_DIR) assert benchmark.is_done()
def tearDown(self): command = 'deletetable %s -f\n' % self.tablename log.debug("Running Command %r", command) code, out, err = cloudshell.run(self.username, self.password, command) self.assertEqual(code, 0, "Could not delete table") log.debug("Process finished") Benchmark.tearDown(self)
def setUp(self): random.jumpahead(int(time.time())) num = random.randint(1, 100000) self.input_table = self.input_table + "_" + str(num) self.output_table = self.output_table + "_" + str(num) #if (not os.getenv("HADOOP_CLASSPATH")): # os.putenv("HADOOP_CLASSPATH", self.getjars(":")) dir = os.path.dirname(os.path.realpath(__file__)) file = os.path.join( dir, 'splits' ) # code, out, err = cloudshell.run(self.username, self.password, 'table RowHashTestInput\n') # if out.find('no such table') == -1: # code, out, err = cloudshell.run(self.username, self.password, 'deletetable RowHashTestInput\n') # self.sleep(15) code, out, err = cloudshell.run(self.username, self.password, "createtable %s -sf %s\n" % (self.input_table, file)) #code, out, err = cloudshell.run('table RowHashTest\n') #if out.find('no such table') == -1: # code, out, err = cloudshell.run('user root\nsecret\ndeletetable RowHashTest\n') # self.sleep(15) code, out, err = cloudshell.run(self.username, self.password, "createtable %s -sf %s\n" % (self.output_table, file)) command = self.buildcommand('org.apache.accumulo.examples.simple.mapreduce.TeraSortIngest', self.numrows(), self.keysizemin(), self.keysizemax(), self.minvaluesize(), self.maxvaluesize(), self.input_table, self.getInstance(), self.getZookeepers(), self.getUsername(), self.getPassword(), self.maxmaps) handle = runner.start(command, stdin=subprocess.PIPE) log.debug("Running: %r", command) out, err = handle.communicate("") Benchmark.setUp(self)
def setUp(self): random.jumpahead(int(time.time())) num = random.randint(1, 100000) self.input_table = self.input_table + "_" + str(num) self.output_table = self.output_table + "_" + str(num) #if (not os.getenv("HADOOP_CLASSPATH")): # os.putenv("HADOOP_CLASSPATH", self.getjars(":")) dir = os.path.dirname(os.path.realpath(__file__)) file = os.path.join(dir, 'splits') # code, out, err = cloudshell.run(self.username, self.password, 'table RowHashTestInput\n') # if out.find('no such table') == -1: # code, out, err = cloudshell.run(self.username, self.password, 'deletetable RowHashTestInput\n') # self.sleep(15) code, out, err = cloudshell.run( self.username, self.password, "createtable %s -sf %s\n" % (self.input_table, file)) #code, out, err = cloudshell.run('table RowHashTest\n') #if out.find('no such table') == -1: # code, out, err = cloudshell.run('user root\nsecret\ndeletetable RowHashTest\n') # self.sleep(15) code, out, err = cloudshell.run( self.username, self.password, "createtable %s -sf %s\n" % (self.output_table, file)) command = self.buildcommand( 'org.apache.accumulo.examples.simple.mapreduce.TeraSortIngest', self.numrows(), self.keysizemin(), self.keysizemax(), self.minvaluesize(), self.maxvaluesize(), self.input_table, self.getInstance(), self.getZookeepers(), self.getUsername(), self.getPassword(), self.maxmaps) handle = runner.start(command, stdin=subprocess.PIPE) log.debug("Running: %r", command) out, err = handle.communicate("") Benchmark.setUp(self)
def setUp(self): dir = os.path.dirname(os.path.realpath(__file__)) file = os.path.join( dir, 'splits' ) code, out, err = cloudshell.run(self.username, self.password, 'table %s\n' % self.input_table) if out.find('does not exist') == -1: code, out, err = cloudshell.run(self.username, self.password, 'deletetable -f %s\n' % self.input_table) self.sleep(15) code, out, err = cloudshell.run(self.username, self.password, "createtable %s -sf %s\n" % (self.input_table, file)) code, out, err = cloudshell.run(self.username, self.password, 'table %s\n' % self.output_table) if out.find('does not exist') == -1: code, out, err = cloudshell.run(self.username, self.password, 'deletetable -f %s\n' % self.output_table) self.sleep(15) code, out, err = cloudshell.run(self.username, self.password, "createtable %s -sf %s\n" % (self.output_table, file)) command = self.buildcommand('org.apache.accumulo.examples.simple.mapreduce.TeraSortIngest', '--count', self.numrows(), '-nk', self.keysizemin(), '-xk', self.keysizemax(), '-nv', self.minvaluesize(), '-xv', self.maxvaluesize(), '--table', self.input_table, '-i', self.getInstance(), '-z', self.getZookeepers(), '-u', self.getUsername(), '-p', self.getPassword(), '--splits', self.maxmaps) handle = runner.start(command, stdin=subprocess.PIPE) log.debug("Running: %r", command) out, err = handle.communicate("") Benchmark.setUp(self)
def setUp(self): # Need to generate a splits file for each speed code, out, err = cloudshell.run(self.username, self.password, 'table %s\n' % self.tablename) if out.find('does not exist') == -1: log.debug('Deleting table %s' % self.tablename) code, out, err = cloudshell.run(self.username, self.password, 'deletetable %s -f\n' % self.tablename) self.assertEqual(code, 0, "Could not delete table") Benchmark.setUp(self)
def setUp(self): code, out, err = cloudshell.run(self.username, self.password, 'table test_ingest\n') if out.find('does not exist') == -1: log.debug("Deleting table test_ingest") code, out, err = cloudshell.run(self.username, self.password, 'deletetable test_ingest -f\n') self.assertEquals(code, 0, "Could not delete the table 'test_ingest'") code, out, err = cloudshell.run(self.username, self.password, 'createtable test_ingest\n') self.assertEqual(code, 0, "Could not create the table 'test_ingest'") Benchmark.setUp(self)
def setUp(self): code, out, err = cloudshell.run(self.username, self.password, 'table %s\n' % self.tablename) if out.find('does not exist') == -1: log.debug('Deleting table %s' % self.tablename) code, out, err = cloudshell.run( self.username, self.password, 'deletetable -f %s\n' % self.tablename) Benchmark.setUp(self)
def setUp(self): code, out, err = cloudshell.run(self.username, self.password, 'table test_ingest\n') if out.find('no such table') >= 0: log.debug("Deleting table test_ingest") code, out, err = cloudshell.run(self.username, self.password, 'deletetable test_ingest\n') self.sleep(10) code, out, err = cloudshell.run(self.username, self.password, 'createtable test_ingest\n') self.assertEqual(code, 0) Benchmark.setUp(self)
def tearDown(self): code, out, err = cloudshell.run(self.username, self.password, "deletetable %s\n" % self.input_table) self.assertEqual(code, 0, 'Could not delete %s, %s' % (self.input_table, out)) code, out, err = cloudshell.run(self.username, self.password, "deletetable %s\n" % self.output_table) self.assertEqual(code, 0, 'Could not delete %s, %s' % (self.output_table, out)) Benchmark.tearDown(self)
def setUp(self): for x in range(1, self.tables): currentTable = 'test_ingest%d' % (x) log.debug("Checking for table existence: %s" % currentTable) code, out, err = cloudshell.run(self.username, self.password, 'table %s\n' % currentTable) if out.find('does not exist') == -1: command = 'deletetable -f %s\n' % (currentTable) log.debug("Running Command %r", command) code, out, err = cloudshell.run(self.username, self.password, command) self.assertEqual(code, 0, 'Did not successfully delete table: %s' % currentTable) Benchmark.setUp(self)
def setUp(self): random.jumpahead(int(time.time())) num = random.randint(1, 100000) #self.tablename = self.tablename + "-" + str(num) # Find which hadoop version # code, out, err = cloudshell.run(self.username, self.password, 'table %s\n' % self.tablename) #if out.find('no such table') == -1: # log.debug('Deleting table %s' % self.tablename) # code, out, err = cloudshell.run(self.username, self.password, 'deletetable %s\n' % self.tablename) # self.sleep(10) Benchmark.setUp(self)
def test_incomplete_benchmark(key, monkeypatch): """an incomplete benchmark definition""" def run_mock(_args, **_): assert False, "subprocess.run() should not be called" monkeypatch.setattr(subprocess, 'run', run_mock) oneseries = {**ONESERIES_DUMMY} oneseries.pop(key, None) benchmark = Benchmark(oneseries) with pytest.raises(ValueError): benchmark.run(CONFIG_DUMMY, RESULT_DIR) assert not benchmark.is_done()
def setUp(self): random.jumpahead(int(time.time())) num = random.randint(1, 100000) self.tablename = self.tablename + "_" + str(num) # Need to generate a splits file for each speed #code, out, err = cloudshell.run(self.username, self.password, 'table %s\n' % self.tablename) #if out.find('no such table') == -1: # log.debug('Deleting table %s' % self.tablename) # code, out, err = cloudshell.run('user %s\n%s\ndeletetable %s\n' % (self.user, # self.password, # self.tablename)) # self.sleep(5) Benchmark.setUp(self)
def test_busy_wait_polling(config_busy_wait_polling, busy_wait_polling, expected_busy_wait_polling, monkeypatch): """busy_wait_polling to BUSY_WAIT_POLLING mapping""" def run_mock(_args, env): assert env['BUSY_WAIT_POLLING'] == expected_busy_wait_polling return ProcessMock() monkeypatch.setattr(subprocess, 'run', run_mock) oneseries = {**ONESERIES_DUMMY, 'busy_wait_polling': busy_wait_polling} benchmark = Benchmark(oneseries) config = {**CONFIG_BIG, 'BUSY_WAIT_POLLING': config_busy_wait_polling} benchmark.run(config, RESULT_DIR) assert benchmark.is_done()
def test_filetype_pmem_no_mem_path(config_remote_job_mem_path, monkeypatch): """filetype=pmem when no REMOTE_JOB_MEM_PATH provided""" def run_mock(_args, **_): assert False, "subprocess.run() should not be called" monkeypatch.setattr(subprocess, 'run', run_mock) oneseries = {**ONESERIES_DUMMY, 'filetype': 'pmem'} benchmark = Benchmark(oneseries) if config_remote_job_mem_path is None: CONFIG_BIG.pop('REMOTE_JOB_MEM_PATH', None) else: CONFIG_BIG['REMOTE_JOB_MEM_PATH'] = config_remote_job_mem_path with pytest.raises(ValueError): benchmark.run(CONFIG_BIG, RESULT_DIR) assert not benchmark.is_done()
def setUp(self): for x in range(1, self.tables): currentTable = 'test_ingest%d' % (x) log.debug("Checking for table existence: %s" % currentTable) code, out, err = cloudshell.run(self.username, self.password, 'table %s\n' % currentTable) if out.find('does not exist') == -1: command = 'deletetable -f %s\n' % (currentTable) log.debug("Running Command %r", command) code, out, err = cloudshell.run(self.username, self.password, command) self.assertEqual( code, 0, 'Did not successfully delete table: %s' % currentTable) Benchmark.setUp(self)
def test_gpspm_no_busy_wait_polling(readwrite, mode, monkeypatch): """filetype=pmem when no REMOTE_JOB_MEM_PATH provided""" def run_mock(_args, **_): assert False, "subprocess.run() should not be called" monkeypatch.setattr(subprocess, 'run', run_mock) oneseries = { **ONESERIES_DUMMY, 'tool': TOOL_RPMA_FIO_BENCH, 'mode': mode, 'tool_mode': 'gpspm', 'rw': readwrite } benchmark = Benchmark(oneseries) with pytest.raises(ValueError): benchmark.run(CONFIG_BIG, RESULT_DIR) assert not benchmark.is_done()
def test_ib_read(mode, monkeypatch): """test all arguments variants of ib_read.sh""" def run_mock(args, env): assert len(args) == 3 # XXX is it always correct to assume the tool is in the current working # directory? assert args[0] == './' + TOOL_IB_READ assert args[1] == IP_DUMMY assert args[2] == mode assert env['OUTPUT_FILE'] == output_file(ID_DUMMY, RESULT_DIR) return ProcessMock() monkeypatch.setattr(subprocess, 'run', run_mock) oneseries = {**ONESERIES_DUMMY, 'tool': TOOL_IB_READ, 'mode': mode} benchmark = Benchmark(oneseries) benchmark.run(CONFIG_DUMMY, RESULT_DIR) assert benchmark.is_done()
def test_filetype(filetype, monkeypatch): """filetype to REMOTE_JOB_MEM_PATH mapping""" # sanity check assert CONFIG_BIG['REMOTE_JOB_MEM_PATH'] != 'malloc' def run_mock(_args, env): if filetype == 'malloc': assert env['REMOTE_JOB_MEM_PATH'] == 'malloc' else: assert env['REMOTE_JOB_MEM_PATH'] == \ CONFIG_BIG['REMOTE_JOB_MEM_PATH'] return ProcessMock() monkeypatch.setattr(subprocess, 'run', run_mock) oneseries = {**ONESERIES_DUMMY, 'filetype': filetype} benchmark = Benchmark(oneseries) benchmark.run(CONFIG_BIG, RESULT_DIR) assert benchmark.is_done()
def setUp(self): Benchmark.setUp(self)
def tearDown(self): Benchmark.tearDown(self)
def tearDown(self): command = 'deletetable test_ingest -f\n' log.debug("Running Command %r", command) code, out, err = cloudshell.run(self.username, self.password, command) self.assertEqual(code, 0, "Could not delete the table 'test_ingest'") Benchmark.tearDown(self)
def tearDown(self): code, out, err = cloudshell.run(self.username, self.password, "deletetable -f %s\n" % self.tablename) self.assertEqual(code, 0, "Could not delete %s, %s" % (self.tablename, out)) Benchmark.tearDown(self)
def setUp(self): code, out, err = cloudshell.run(self.username, self.password, 'table %s\n' % self.tablename) if out.find('does not exist') == -1: log.debug('Deleting table %s' % self.tablename) code, out, err = cloudshell.run(self.username, self.password, 'deletetable -f %s\n' % self.tablename) Benchmark.setUp(self)
def fixture_benchmark_dummy(): """create a very simple Benchmark instance""" return Benchmark({**ONESERIES_DUMMY})
def setUp(self): code, out, err = cloudshell.run(self.username, self.password, "table %s\n" % self.tablename) if out.find("does not exist") == -1: log.debug("Deleting table %s" % self.tablename) code, out, err = cloudshell.run(self.username, self.password, "deletetable -f %s\n" % self.tablename) Benchmark.setUp(self)