def populate(): if len(operations) == 0: add_operation(query.operation()) add_operation(show_tables.operation()) add_operation(delete_tag.operation()) add_operation(report.operation()) add_operation(run.operation()) add_operation(show_db_config.operation()) add_operation(graph_create.operation()) add_operation(graph_v_ingest.operation()) add_operation(graph_v_search.operation()) add_operation(make_graph500.operation()) add_operation(generate_elist.operation()) add_operation(graph_e_ingest.operation()) add_operation(graph_e_standard_ingest.operation()) add_operation(graph_e_ring_ingest.operation()) add_operation(graph_navigate_ring.operation()) add_operation(graph_navigate_dense.operation()) pass pass
def populate(): if len(operations) == 0: add_operation(query.operation()) add_operation(show_tables.operation()) add_operation(delete_tag.operation()) add_operation(report.operation()) add_operation(run.operation()) add_operation(show_db_config.operation()) add_operation(graph_create.operation()) add_operation(graph_v_ingest.operation()) add_operation(graph_v_search.operation()) add_operation(make_graph500.operation()) add_operation(generate_elist.operation()) add_operation(graph_e_ingest.operation()) add_operation(graph_e_standard_ingest.operation()) add_operation(graph_e_ring_ingest.operation()) add_operation(graph_navigate_ring.operation()) add_operation(graph_navigate_dense.operation()) pass pass
def run_operation(self): for engine in self.engine_objects: for _page_size in self.page_size: for _v_scale in self.scale: for _e_factor in self.factor: for _threads in self.threads: for _txsize in self.txsize: for _cache in self.cache: _index = "gr" _v_size = pow(2, _v_scale) self.initialize_property(engine.name) self.propertyFile.properties[ "IG.Placement.Distributed.Pipelining.Groups"] = "ConnectorGroup" self.propertyFile.properties[ "IG.Placement.Distributed.Pipelining.PipelinesPerStorageUnit"] = 4 self.propertyFile.properties[ "IG.Placement.Distributed.Pipelining.EnablePipelining"] = "true" self.propertyFile.setInitCache(_cache[0]) self.propertyFile.setMaxCache(_cache[1]) self.propertyFile.properties[ "IG.PageSize"] = _page_size self.ig_setup_placement( engine.name, self.propertyFile) _s_ = "\tgraph(%s) create index:%s page_size:%d" % ( engine.name, _index, _page_size) print self.output_string( _s_, base.Colors.Blue, True), sys.stdout.flush() start = time.clock() self.ig_run(engine.name, self.propertyFile, "create", _index) elapsed = (time.clock() - start) print self.output_string( str(elapsed), base.Colors.Red, False) self.ig_setup_Location( engine.name, self.propertyFile) _s_ = "\tgraph(%s) ingest edges index:%s page_size:%d tx_size:%d size:%d diskmap:%s" % ( engine.name, _index, _page_size, _txsize, _v_size, str(self.diskmap)) print self.output_string( _s_, base.Colors.Blue, True) vprofileName = "v_ingest.profile" eprofileName = "e_ingest.profile" elist_name = "g500.elist" now_string = self.db.now_string( True).replace(" ", "_") if self.tag_object: vprofileName = "v_ingest." + now_string + ".profile" eprofileName = "e_ingest." + now_string + ".profile" vprofileName = vprofileName.replace( " ", "_") eprofileName = eprofileName.replace( " ", "_") elist_name = "g500.%s.elist" % ( now_string) pass self.ig_v_ingest(engine.name, self.propertyFile, _index, _v_size, 0, _threads, _txsize, vprofileName, True) generator = generate_elist.operation() generator.run(_v_scale, _e_factor, self.a, self.b, self.c, self.d, elist_name) self.ig_e_pipeline_ingest( engine.name, self.propertyFile, _v_scale, _threads, _txsize, eprofileName, elist_name) os.remove(elist_name) if self.case_object: f = file(eprofileName, "r") line = f.readline() data = eval(line) platform_object = self.db.create_unique_object( db_objects.model.platform, "name", data["os"]) index_object = self.db.create_unique_object( db_objects.model.index_type, "name", _index) case_data_object = self.db.create_object( db_objects.model.case_data, timestamp=self.db.now_string(True), case_id=self.case_object.id, tag_id=self.tag_object.id, engine_id=engine.id, size=data["size"], time=data["time"], memory_init=data["mem_init"], memory_used=data["mem_used"], memory_committed=data[ "mem_committed"], memory_max=data["mem_max"], op_size=data["opsize"], rate=data["rate"], page_size=_page_size, cache_init=self.propertyFile. getInitCache(), cache_max=self.propertyFile. getMaxCache(), tx_size=_txsize, platform_id=platform_object.id, threads=_threads, index_id=index_object.id, status=1) if self.diskmap: case_data_object.setDataValue( "diskmap", self.diskmap) pass case_data_object.setDataValue( "edge_factor", _e_factor) self.db.update(case_data_object) case_data_key = case_data_object.generateKey( ) case_data_stat_object = self.db.fetch_using_generic( db_objects.model.case_data_stat, key=case_data_key, case_id=self.case_object.id) if (len(case_data_stat_object) == 0): case_data_stat_object = self.db.create_unique_object( db_objects.model. case_data_stat, "key", case_data_key, case_id=self.case_object.id) else: case_data_stat_object = case_data_stat_object[ 0] pass case_data_stat_object.addCounter() case_data_stat_object.setRateStat( data["rate"]) case_data_stat_object.setTimeStat( data["time"]) case_data_stat_object.setMemInitStat( data["mem_init"]) case_data_stat_object.setMemUsedStat( data["mem_used"]) case_data_stat_object.setMemCommittedStat( data["mem_committed"]) case_data_stat_object.setMemMaxStat( data["mem_max"]) self.db.update(case_data_stat_object) f.close() os.remove(eprofileName) os.remove(vprofileName) pass pass pass pass pass pass pass pass pass
def run_operation(self): for engine in self.engine_objects: for _page_size in self.page_size: for _v_scale in self.scale: for _e_factor in self.factor: for _threads in self.threads: for _txsize in self.txsize: for _cache in self.cache: _index = "gr" _v_size = pow(2,_v_scale) self.initialize_property(engine.name) self.propertyFile.properties["IG.Placement.Distributed.Pipelining.Groups"] = "ConnectorGroup" self.propertyFile.properties["IG.Placement.Distributed.Pipelining.PipelinesPerStorageUnit"]=4 self.propertyFile.properties["IG.Placement.Distributed.Pipelining.EnablePipelining"]="true" self.propertyFile.setInitCache(_cache[0]) self.propertyFile.setMaxCache(_cache[1]) self.propertyFile.properties["IG.PageSize"] = _page_size self.ig_setup_placement(engine.name,self.propertyFile) _s_ = "\tgraph(%s) create index:%s page_size:%d"%(engine.name,_index,_page_size) print self.output_string(_s_,base.Colors.Blue,True), sys.stdout.flush() start = time.clock() self.ig_run(engine.name,self.propertyFile,"create",_index) elapsed = (time.clock() - start) print self.output_string(str(elapsed),base.Colors.Red,False) self.ig_setup_Location(engine.name,self.propertyFile) _s_ = "\tgraph(%s) ingest edges index:%s page_size:%d tx_size:%d size:%d diskmap:%s"%(engine.name,_index,_page_size,_txsize,_v_size,str(self.diskmap)) print self.output_string(_s_,base.Colors.Blue,True) vprofileName = "v_ingest.profile" eprofileName = "e_ingest.profile" elist_name = "g500.elist" now_string = self.db.now_string(True).replace(" ","_") if self.tag_object: vprofileName = "v_ingest."+ now_string + ".profile" eprofileName = "e_ingest."+ now_string + ".profile" vprofileName = vprofileName.replace(" ","_") eprofileName = eprofileName.replace(" ","_") elist_name = "g500.%s.elist"%(now_string) pass self.ig_v_ingest(engine.name,self.propertyFile,_index,_v_size,0,_threads,_txsize,vprofileName,True) generator = generate_elist.operation() generator.run(_v_scale,_e_factor,self.a,self.b,self.c,self.d,elist_name) self.ig_e_pipeline_ingest(engine.name,self.propertyFile,_v_scale,_threads,_txsize,eprofileName,elist_name) os.remove(elist_name) if self.case_object: f = file(eprofileName,"r") line = f.readline() data = eval(line) platform_object = self.db.create_unique_object(db_objects.model.platform,"name",data["os"]) index_object = self.db.create_unique_object(db_objects.model.index_type,"name",_index) case_data_object = self.db.create_object(db_objects.model.case_data, timestamp=self.db.now_string(True), case_id=self.case_object.id, tag_id=self.tag_object.id, engine_id=engine.id, size=data["size"], time=data["time"], memory_init=data["mem_init"], memory_used=data["mem_used"], memory_committed=data["mem_committed"], memory_max=data["mem_max"], op_size=data["opsize"], rate=data["rate"], page_size=_page_size, cache_init=self.propertyFile.getInitCache(), cache_max=self.propertyFile.getMaxCache(), tx_size=_txsize, platform_id=platform_object.id, threads=_threads, index_id=index_object.id, status=1 ) if self.diskmap: case_data_object.setDataValue("diskmap",self.diskmap) pass case_data_object.setDataValue("edge_factor",_e_factor) self.db.update(case_data_object) case_data_key = case_data_object.generateKey() case_data_stat_object = self.db.fetch_using_generic(db_objects.model.case_data_stat, key=case_data_key, case_id=self.case_object.id ) if (len(case_data_stat_object) == 0): case_data_stat_object = self.db.create_unique_object(db_objects.model.case_data_stat, "key",case_data_key, case_id=self.case_object.id ) else: case_data_stat_object = case_data_stat_object[0] pass case_data_stat_object.addCounter() case_data_stat_object.setRateStat(data["rate"]) case_data_stat_object.setTimeStat(data["time"]) case_data_stat_object.setMemInitStat(data["mem_init"]) case_data_stat_object.setMemUsedStat(data["mem_used"]) case_data_stat_object.setMemCommittedStat(data["mem_committed"]) case_data_stat_object.setMemMaxStat(data["mem_max"]) self.db.update(case_data_stat_object) f.close() os.remove(eprofileName) os.remove(vprofileName) pass pass pass pass pass pass pass pass pass