def prepare_wiki_query(self, conf): if conf['query_maker'].strip() == "org.apache.lucene.benchmark.byTask.feeds.WikiQueryMaker" and \ os.path.exists(conf['wiki_query_log_path']) is False: # we need to download wiki_log dir_path = os.path.dirname(conf['wiki_query_log_path']) with helpers.cd(dir_path): helpers.shcmd("wget http://pages.cs.wisc.edu/~kanwu/querylog/wiki_QueryLog")
def test(self): old_dir = "/tmp/results/sqlitewal-update" if os.path.exists(old_dir): shutil.rmtree(old_dir) # copy the data to shcmd("cp -r ./tests/testdata/sqlitewal-update /tmp/results/") for para in rule_parameter.ParaDict("testexpname", ['sqlitewal-update'], "grouping"): experiment.execute_simulation(para)
def download(self, conf): with helpers.cd(self.DOWNLOAD_DIR): filename = conf['download_url'].split("/")[-1] print "------------", filename if not os.path.exists(filename) or not os.path.exists(conf['origin_doc_name']): helpers.shcmd("wget -nc {}".format(conf['download_url'])) self.origin_doc_path = os.path.join(self.DOWNLOAD_DIR, conf['origin_doc_name']) if not os.path.exists(conf['origin_doc_name']): helpers.shcmd(conf['decompress_cmd'])
def treatment(self, conf): helpers.shcmd("{bench_exe} -engine {engine} -shards {n_shards} " \ "-hosts \"{hosts}\" "\ "-benchmark search -queries \"{query}\" -c {n_clients} " \ "-o {outpath}".format( engine = conf['engine'], bench_exe = BENCH_EXE, n_clients = conf['n_clients'], hosts = hosts_string(conf['host'], conf['n_hosts'], conf['start_port']), n_shards = conf['n_shards'], query = conf['query'], outpath = os.path.join(self._subexpdir, "out.csv") ))
def create_index(self, conf): # skip if exists index_path = os.path.join(conf['work_dir'], 'index') if conf['force_indexing'] is True: helpers.shcmd("rm -rf {}".format(index_path)) if os.path.exists(index_path): return benchrun = BenchRun(algs.INDEX_LINE_DOC( docs_file = conf['line_doc_path'], work_dir = conf['work_dir'], index_doc_count = conf['index_doc_count'] )) benchrun.run()
def trace_cmd(cmd, tracer, ffilter): """ tracer: function or function_graph """ ftr = Ftrace() ftr.clean_trace() ftr.set_tracer(tracer) ftr.start_tracing() ftr.set_filter(ffilter) shcmd(cmd) ftr.stop_tracing() text = ftr.get_trace() return text
def afterEach(self, conf): self.endtime = datetime.datetime.now() duration = (self.endtime - self.starttime).total_seconds() print "Duration:", duration query_per_sec = conf['query_count'] / duration print "Query per second:", query_per_sec d = { "duration": duration, "query_per_sec": query_per_sec, } d.update(conf) perf_path = os.path.join(self._subexpdir, "perf.txt") print 'writing to', perf_path helpers.table_to_file([d], perf_path, width=0) config_path = os.path.join(self._subexpdir, "config.json") helpers.shcmd("touch " + config_path)
def create_fs(dev, mntpoint, fstype): shcmd("sudo chmod 777 -R {}".format(mntpoint)) if fstype == 'ext4': shcmd("sudo mkfs.ext4 {}".format(dev)) elif fstype == 'ext3': shcmd("sudo mkfs.ext3 {}".format(dev)) else: raise NotImplementedError('{} not supported yet'.format(fstype)) shcmd("sudo mount {dev} {mnt}".format(dev=dev, mnt=mntpoint))
def build_index(n_shards, n_hosts, engine, start_port, host): helpers.shcmd("{bench_exe} -engine {engine} -shards {n_shards} " \ "-hosts \"{hosts}\" -file {filepath}".format( engine=engine, bench_exe=BENCH_EXE, n_shards=n_shards, hosts=hosts_string(host, n_hosts, start_port), filepath=WIKI_ABSTRACT))
from pyreuse.helpers import cd, shcmd CP = ":".join([ "build/core/lucene-core-7.0.1-SNAPSHOT.jar", "build/queryparser/lucene-queryparser-7.0.1-SNAPSHOT.jar", "build/analysis/common/lucene-analyzers-common-7.0.1-SNAPSHOT.jar", "build/demo/lucene-demo-7.0.1-SNAPSHOT.jar" ]) with cd("../../lucene-7.0.1"): shcmd("ant compile") # ant dist? shcmd("java -cp {} org.apache.lucene.demo.IndexFiles -docs .".format(CP)) shcmd("java -cp {} org.apache.lucene.demo.SearchFiles ".format(CP))
def copy_trace(self, target_path): with cd(self.rootdir): shcmd("cp trace {}".format(target_path))
def beforeEach(self, conf): self.prepare_index(conf) helpers.shcmd("sync && dropcache")