def submit_single_graph(self, graph_id, algo="sarkar", deploy=False): lgn = lgnames[graph_id] fp = pkg_resources.resource_filename("dfms.dropmake", "web/{0}".format(lgn)) lg = LG(fp) drop_list = lg.unroll_to_tpl() # node_list = self.get_avail_hosts() node_list = self._dc.nodes() pgtp = MySarkarPGTP(drop_list, len(node_list), merge_parts=True) pgtp.json pg_spec = pgtp.to_pg_spec(node_list, ret_str=False) if self._output: with open(self._output, "w") as f: json.dump(pg_spec, f, indent=2) completed_uids = [x["oid"] for x in droputils.get_roots(pg_spec)] ssid = "{0}-{1}".format(lgn.split(".")[0], lg._session_id) self._dc.create_session(ssid) print "session created" self._dc.append_graph(ssid, pg_spec) print "graph appended" if deploy: ret = self._dc.deploy_session(ssid, completed_uids=completed_uids) print "session deployed" return ret
def test_mysarkar_pgtp_gen_pg(self): lgnames = ['lofar_std.json', 'chiles_two.json', 'test_grpby_gather.json', 'chiles_two_dev1.json', 'chiles_simple.json'] tgt_partnum = [15, 15, 10, 10, 5] node_list = ['10.128.0.11', '10.128.0.12', '10.128.0.13'] for i, lgn in enumerate(lgnames): fp = pkg_resources.resource_filename('dfms.dropmake', 'web/{0}'.format(lgn)) lg = LG(fp) drop_list = lg.unroll_to_tpl() pgtp = MySarkarPGTP(drop_list, 3, merge_parts=True) pgtp.json pg_spec = pgtp.to_pg_spec(node_list)
def produce_physical_graphs(self, graph_id, algo="sarkar", tgt="/tmp"): lgn = lgnames[graph_id] fp = pkg_resources.resource_filename("dfms.dropmake", "web/{0}".format(lgn)) lg = LG(fp) drop_list = lg.unroll_to_tpl() node_list = self._dc.nodes() # node_list = ['10.128.0.11', '10.128.0.14', '10.128.0.15', '10.128.0.16'] pgtp = MySarkarPGTP(drop_list, len(node_list), merge_parts=True) pgtp.json pg_spec = pgtp.to_pg_spec(node_list) with open("/{1}/sar_{0}_pgspec.json".format(lgn.split(".")[0], tgt), "w") as f: f.write(pg_spec)
def test_mysarkar_pgtp_gen_pg(self): lgnames = [ 'lofar_std.json', 'test_grpby_gather.json', 'chiles_simple.json' ] tgt_partnum = [15, 15, 10, 10, 5] node_list = ['10.128.0.11', '10.128.0.12', '10.128.0.13'] for i, lgn in enumerate(lgnames): fp = get_lg_fname(lgn) lg = LG(fp) drop_list = lg.unroll_to_tpl() pgtp = MySarkarPGTP(drop_list, 3, merge_parts=True) #pgtp.json pgtp.to_gojs_json(visual=False) pg_spec = pgtp.to_pg_spec(node_list)
def test_mysarkar_pgtp_gen_pg(self): lgnames = [ 'lofar_std.json', 'chiles_two.json', 'test_grpby_gather.json', 'chiles_two_dev1.json', 'chiles_simple.json' ] tgt_partnum = [15, 15, 10, 10, 5] node_list = ['10.128.0.11', '10.128.0.12', '10.128.0.13'] for i, lgn in enumerate(lgnames): fp = pkg_resources.resource_filename('dfms.dropmake', 'web/{0}'.format(lgn)) lg = LG(fp) drop_list = lg.unroll_to_tpl() pgtp = MySarkarPGTP(drop_list, 3, merge_parts=True) pgtp.json pg_spec = pgtp.to_pg_spec(node_list)
def test_mysarkar_pgtp(self): lgnames = [ 'lofar_std.json', 'test_grpby_gather.json', 'chiles_simple.json' ] tgt_partnum = [15, 15, 10, 10, 5] for i, lgn in enumerate(lgnames): fp = get_lg_fname(lgn) lg = LG(fp) drop_list = lg.unroll_to_tpl() pgtp = MySarkarPGTP(drop_list) pgtp.json
def partition(pgt, pip_name, num_partitions, num_islands, algo='metis'): ''' Partitions the Physical Graph Template `pgt` with the algorithm `algo` using `num_partitions` partitions. ''' from dfms.dropmake.pg_generator import MySarkarPGTP, MetisPGTP logger.info("Initialising PGTP %s", algo) if algo == 'sarkar': pgtp = MySarkarPGTP(pgt, num_partitions, merge_parts=True) else: pgtp = MetisPGTP(pgt, num_partitions, merge_parts=True) del pgt logger.info("PGTP initialised %s", algo) logger.info("Starting to partition %s", pip_name) pgtp.to_gojs_json(string_rep=False, visual=True) pgt = pgtp.to_pg_spec([], ret_str=False, num_islands=num_islands, tpl_nodes_len=num_partitions + num_islands) logger.info("Partitioning completed for %s", pip_name) return pgt
def test_mysarkar_pgtp(self): lgnames = [ 'lofar_std.json', 'chiles_two.json', 'test_grpby_gather.json', 'chiles_two_dev1.json', 'chiles_simple.json' ] tgt_partnum = [15, 15, 10, 10, 5] for i, lgn in enumerate(lgnames): fp = pkg_resources.resource_filename('dfms.dropmake', 'web/{0}'.format(lgn)) lg = LG(fp) drop_list = lg.unroll_to_tpl() pgtp = MySarkarPGTP(drop_list) pgtp.json
def test_mysarkar_pgtp_gen_pg_island(self): lgnames = [ 'lofar_std.json', 'test_grpby_gather.json', 'chiles_simple.json' ] node_list = [ '10.128.0.11', '10.128.0.12', '10.128.0.13', '10.128.0.14', '10.128.0.15', '10.128.0.16' ] for i, lgn in enumerate(lgnames): fp = get_lg_fname(lgn) lg = LG(fp) drop_list = lg.unroll_to_tpl() pgtp = MySarkarPGTP(drop_list, None, merge_parts=True) pgtp.to_gojs_json(visual=False) nb_islands = 2 #print(lgn) try: pgtp.merge_partitions(len(node_list) - nb_islands, form_island=False) except GPGTNoNeedMergeException as ge: continue pg_spec = pgtp.to_pg_spec(node_list, num_islands=nb_islands) pgtp.result()
def gen_mysarkar_pgtp(lgfname, pgt_dir, num_islands=2, cores_per_node=2, print_result=False): """ Generate Physical Graph Template (Partition) using MySarkar - A "somewhat greedy" scheudling algoritm No real resource mapping is involved """ par_label = 'p' stt = time.time() lg = LG(lgfname) drop_list = lg.unroll_to_tpl() unroll_ett = time.time() mpp = num_islands > 0 pgt = MySarkarPGTP(drop_list, 1, par_label, cores_per_node, merge_parts=mpp) if (mpp): pgt.to_gojs_json(string_rep=False, visual=False) pgt.merge_partitions(num_islands, form_island=True, island_type=1, visual=False) schedule_ett = time.time() re_dict = pgt.result() re_dict['unroll_time'] = '%.3f' % (unroll_ett - stt) re_dict['schedule_time'] = '%.3f' % (schedule_ett - unroll_ett) if (print_result): part_info = ' - '.join( ['{0}:{1}'.format(k, v) for k, v in re_dict.items()]) print(part_info) return re_dict
def gen_pgt(): """ RESTful interface for translating Logical Graphs to Physical Graphs """ lg_name = request.query.get('lg_name') if (lg_exists(lg_name)): try: lg = LG(lg_path(lg_name)) drop_list = lg.unroll_to_tpl() part = request.query.get('num_par') try: #print('num_islands', request.query.get('num_islands')) num_islands = int(request.query.get('num_islands')) except: num_islands = 0 mpp = num_islands > 0 if (part is None): is_part = '' pgt = PGT(drop_list) else: is_part = 'Partition' par_label = request.query.get('par_label') algo = request.query.get('algo') if ('metis' == algo): min_goal = int(request.query.get('min_goal')) ptype = int(request.query.get('ptype')) ufactor = 100 - int(request.query.get('max_load_imb')) + 1 if (ufactor <= 0): ufactor = 1 pgt = MetisPGTP(drop_list, int(part), min_goal, par_label, ptype, ufactor, merge_parts=mpp) elif ('mysarkar' == algo): pgt = MySarkarPGTP(drop_list, int(part), par_label, int(request.query.get('max_dop')), merge_parts=mpp) elif ('min_num_parts' == algo): time_greedy = 1 - float(request.query.get( 'time_greedy')) / 100.0 # assuming between 1 to 100 pgt = MinNumPartsPGTP(drop_list, int(request.query.get('deadline')), int(part), par_label, int(request.query.get('max_dop')), merge_parts=mpp, optimistic_factor=time_greedy) elif ('pso' == algo): params = ['deadline', 'topk', 'swarm_size'] pars = [None, 30, 40] for i, para in enumerate(params): try: pars[i] = int(request.query.get(para)) except: continue pgt = PSOPGTP(drop_list, par_label, int(request.query.get('max_dop')), deadline=pars[0], topk=pars[1], swarm_size=pars[2], merge_parts=mpp) else: raise GraphException( "Unknown partition algorithm: {0}".format(algo)) if (mpp): pgt_id = pg_mgr.add_pgt(pgt, lg_name, num_islands=num_islands) """ if ('mysarkar' == algo): pgt_id = pg_mgr.add_pgt(pgt, lg_name, num_islands=int(part)) elif ('metis' == algo): """ else: pgt_id = pg_mgr.add_pgt(pgt, lg_name) part_info = ' - '.join( ['{0}:{1}'.format(k, v) for k, v in pgt.result().items()]) tpl = file_as_string('pg_viewer.html') return template(tpl, pgt_view_json_name=pgt_id, partition_info=part_info, is_partition_page=is_part) except GraphException as ge: response.status = 500 return "Invalid Logical Graph {1}: {0}".format(str(ge), lg_name) except SchedulerException as se: response.status = 500 return "Graph scheduling exception {1}: {0}".format( str(se), lg_name) except Exception as exp: response.status = 500 trace_msg = traceback.format_exc() return "Graph partition exception {1}: {0}".format( trace_msg, lg_name) else: response.status = 404 return "{0}: logical graph {1} not found\n".format(err_prefix, lg_name)