def unroll(lg_path, oid_prefix, zerorun=False, app=None): ''' Unrolls the Logical Graph in `lg_graph` into a Physical Graph Template and return the latter. This method prepends `oid_prefix` to all generated Drop OIDs. ''' from dfms.dropmake.pg_generator import LG lg = LG(_open_i(lg_path), ssid=oid_prefix) logger.info("Start to unroll %s", lg_path) drop_list = lg.unroll_to_tpl() logger.info("Unroll completed for %s with # of Drops: %d", lg_path, len(drop_list)) # Optionally set sleepTimes to 0 and apps to a specific type if zerorun: for dropspec in drop_list: if 'sleepTime' in dropspec: dropspec['sleepTime'] = 0 if app: for dropspec in drop_list: if 'app' in dropspec: dropspec['app'] = app return drop_list
def test_mysarkar_scheduler(self): lgnames = ['cont_img.json', 'lofar_std.json', 'chiles_two.json', 'test_grpby_gather.json', 'chiles_two_dev1.json', 'chiles_simple.json'] #lgnames = [lgnames[1]] tgt_partnum = [20, 15, 15, 10, 10, 5] mdp = 8 s_matrix = True for j, lgn in enumerate(lgnames): fp = pkg_resources.resource_filename('dfms.dropmake', 'web/{0}'.format(lgn)) lg = LG(fp) drop_list = lg.unroll_to_tpl() #logger.info( "MySarkarScheduler Partitioning ", lgn) lll = len(lgn) + len("Partitioning ") + 1 #logger.info( "=" * lll) mys = MySarkarScheduler(drop_list, max_dop=mdp) num_parts_done, lpl, ptime, parts = mys.partition_dag() #logger.info( "{3} partitioned: parts = {0}, lpl = {1}, ptime = {2:.2f}".format(num_parts_done, lpl, ptime, lgn)) if (s_matrix): for i, part in enumerate(parts): if (part.cardinality > 0): ma = part.schedule.schedule_matrix ga = DAGUtil.ganttchart_matrix(part.schedule._dag, part.schedule._topo_sort) # print "Partition ", i # print "scheduling matrix: ", ma.shape # print ma # print "ganttchart matrix: ", ga.shape # print ga # print "Workload: ", part.schedule.workload # print mys.merge_partitions(tgt_partnum[j])
def test_pg_generator(self): fp = pkg_resources.resource_filename('dfms.dropmake', 'web/lofar_std.json') #fp = '/Users/Chen/proj/dfms/dfms/lg/web/lofar_std.json' lg = LG(fp) self.assertEqual(len(lg._done_dict.keys()), 36) drop_list = lg.unroll_to_tpl()
def submit_single_graph(self, graph_id, algo="sarkar", deploy=False): lgn = lgnames[graph_id] fp = pkg_resources.resource_filename("dfms.dropmake", "web/{0}".format(lgn)) lg = LG(fp) drop_list = lg.unroll_to_tpl() # node_list = self.get_avail_hosts() node_list = self._dc.nodes() pgtp = MySarkarPGTP(drop_list, len(node_list), merge_parts=True) pgtp.json pg_spec = pgtp.to_pg_spec(node_list, ret_str=False) if self._output: with open(self._output, "w") as f: json.dump(pg_spec, f, indent=2) completed_uids = [x["oid"] for x in droputils.get_roots(pg_spec)] ssid = "{0}-{1}".format(lgn.split(".")[0], lg._session_id) self._dc.create_session(ssid) print "session created" self._dc.append_graph(ssid, pg_spec) print "graph appended" if deploy: ret = self._dc.deploy_session(ssid, completed_uids=completed_uids) print "session deployed" return ret
def test_mysarkar_pgtp(self): lgnames = ['lofar_std.json', 'chiles_two.json', 'test_grpby_gather.json', 'chiles_two_dev1.json', 'chiles_simple.json'] tgt_partnum = [15, 15, 10, 10, 5] for i, lgn in enumerate(lgnames): fp = pkg_resources.resource_filename('dfms.dropmake', 'web/{0}'.format(lgn)) lg = LG(fp) drop_list = lg.unroll_to_tpl() pgtp = MySarkarPGTP(drop_list) pgtp.json
def test_mcts_scheduler(self): lgs = {'lofar_std.json': 450} mdp = 4 for lgn, deadline in lgs.items(): fp = get_lg_fname(lgn) lg = LG(fp) drop_list = lg.unroll_to_tpl() pssa01 = MCTSScheduler(drop_list, max_dop=mdp, max_calc_time=0.25) pssa01.partition_dag()
def gen_pgt(): """ RESTful interface for translating Logical Graphs to Physical Graphs """ lg_name = request.query.get('lg_name') if (lg_exists(lg_name)): try: lg = LG(lg_name) drop_list = lg.unroll_to_tpl() part = request.query.get('num_par') if (part is None): is_part = '' pgt = PGT(drop_list) else: is_part = 'Partition' par_label = request.query.get('par_label') algo = request.query.get('algo') if ('metis' == algo): min_goal = int(request.query.get('min_goal')) ptype = int(request.query.get('ptype')) ufactor = 100 - int(request.query.get('max_load_imb')) + 1 if (ufactor <= 0): ufactor = 1 pgt = MetisPGTP(drop_list, int(part), min_goal, par_label, ptype, ufactor) elif ('mysarkar' == algo): mp = request.query.get('merge_par') mpp = True if '1' == mp else False pgt = MySarkarPGTP(drop_list, int(part), par_label, int(request.query.get('max_dop')), merge_parts=mpp) elif ('min_num_parts' == algo): time_greedy = 1 - float(request.query.get('time_greedy')) / 100.0 # assuming between 1 to 100 pgt = MinNumPartsPGTP(drop_list, int(request.query.get('deadline')), int(part), par_label, int(request.query.get('max_dop')), merge_parts=False, optimistic_factor=time_greedy) elif ('pso' == algo): params = ['deadline', 'topk', 'swarm_size'] pars = [None, 30, 40] for i, para in enumerate(params): try: pars[i] = int(request.query.get(para)) except: continue pgt = PSOPGTP(drop_list, par_label, int(request.query.get('max_dop')), deadline=pars[0], topk=pars[1], swarm_size=pars[2]) elif ('pyrros' == algo): pgt = PyrrosPGTP(drop_list, int(part)) else: raise GraphException("Unknown partition algorithm: {0}".format(algo)) pgt_id = pg_mgr.add_pgt(pgt, lg_name) part_info = pgt.get_partition_info() return template('pg_viewer.html', pgt_view_json_name=pgt_id, partition_info=part_info, is_partition_page=is_part) except GraphException, ge: response.status = 500 return "Invalid Logical Graph {1}: {0}".format(str(ge), lg_name) except SchedulerException, se: response.status = 500 return "Graph scheduling exception {1}: {0}".format(str(se), lg_name)
def test_minnumparts_pgtp(self): lgnames = ['lofar_std.json', 'chiles_two.json', 'test_grpby_gather.json', 'chiles_two_dev1.json', 'chiles_simple.json'] #tgt_partnum = [15, 15, 10, 10, 5] tgt_deadline = [200, 300, 90, 80, 160] for i, lgn in enumerate(lgnames): fp = pkg_resources.resource_filename('dfms.dropmake', 'web/{0}'.format(lgn)) lg = LG(fp) drop_list = lg.unroll_to_tpl() pgtp = MinNumPartsPGTP(drop_list, tgt_deadline[i]) pgtp.json
def test_mcts_scheduler(self): lgnames = ['lofar_std.json'] tgt_deadline = [450] mdp = 4 for j, lgn in enumerate(lgnames): fp = pkg_resources.resource_filename('dfms.dropmake', 'web/{0}'.format(lgn)) lg = LG(fp) drop_list = lg.unroll_to_tpl() pssa01 = MCTSScheduler(drop_list, max_dop=mdp, max_calc_time=0.25) num_parts_done, lpl, ptime, parts = pssa01.partition_dag()
def test_mysarkar_pgtp_gen_pg(self): lgnames = ['lofar_std.json', 'chiles_two.json', 'test_grpby_gather.json', 'chiles_two_dev1.json', 'chiles_simple.json'] tgt_partnum = [15, 15, 10, 10, 5] node_list = ['10.128.0.11', '10.128.0.12', '10.128.0.13'] for i, lgn in enumerate(lgnames): fp = pkg_resources.resource_filename('dfms.dropmake', 'web/{0}'.format(lgn)) lg = LG(fp) drop_list = lg.unroll_to_tpl() pgtp = MySarkarPGTP(drop_list, 3, merge_parts=True) pgtp.json pg_spec = pgtp.to_pg_spec(node_list)
def test_sa_scheduler(self): lgs = {'lofar_std.json': 450} mdp = 4 for lgn, deadline in lgs.items(): fp = get_lg_fname(lgn) lg = LG(fp) drop_list = lg.unroll_to_tpl() pssa01 = SAScheduler(drop_list, max_dop=mdp) pssa01.partition_dag() pssa02 = SAScheduler(drop_list, max_dop=mdp, deadline=deadline) pssa02.partition_dag()
def test_metis_pgtp(self): lgnames = [ 'lofar_std.json', 'test_grpby_gather.json', 'chiles_simple.json' ] tgt_partnum = [15, 15, 10, 10, 5] for i, lgn in enumerate(lgnames): fp = get_lg_fname(lgn) lg = LG(fp) drop_list = lg.unroll_to_tpl() pgtp = MetisPGTP(drop_list) pgtp.json
def test_mcts_scheduler(self): lgnames = ['lofar_std.json'] tgt_deadline = [450] mdp = 4 for j, lgn in enumerate(lgnames): fp = pkg_resources.resource_filename('dfms.dropmake', 'web/{0}'.format(lgn)) lg = LG(fp) drop_list = lg.unroll_to_tpl() pssa01 = MCTSScheduler(drop_list, max_dop=mdp, max_calc_time=0.25) num_parts_done, lpl, ptime, parts = pssa01.partition_dag() print "MCTS (no deadline): {3} partitioned: parts = {0}, lpl = {1}, ptime = {2:.2f}".format(num_parts_done, lpl, ptime, lgn)
def produce_physical_graphs(self, graph_id, algo="sarkar", tgt="/tmp"): lgn = lgnames[graph_id] fp = pkg_resources.resource_filename("dfms.dropmake", "web/{0}".format(lgn)) lg = LG(fp) drop_list = lg.unroll_to_tpl() node_list = self._dc.nodes() # node_list = ['10.128.0.11', '10.128.0.14', '10.128.0.15', '10.128.0.16'] pgtp = MySarkarPGTP(drop_list, len(node_list), merge_parts=True) pgtp.json pg_spec = pgtp.to_pg_spec(node_list) with open("/{1}/sar_{0}_pgspec.json".format(lgn.split(".")[0], tgt), "w") as f: f.write(pg_spec)
def test_minnumparts_pgtp(self): lgnames = [ 'lofar_std.json', 'test_grpby_gather.json', 'chiles_simple.json' ] #tgt_partnum = [15, 15, 10, 10, 5] tgt_deadline = [200, 300, 90, 80, 160] for i, lgn in enumerate(lgnames): fp = get_lg_fname(lgn) lg = LG(fp) drop_list = lg.unroll_to_tpl() pgtp = MinNumPartsPGTP(drop_list, tgt_deadline[i]) pgtp.json
def test_mysarkar_pgtp(self): lgnames = [ 'lofar_std.json', 'chiles_two.json', 'test_grpby_gather.json', 'chiles_two_dev1.json', 'chiles_simple.json' ] tgt_partnum = [15, 15, 10, 10, 5] for i, lgn in enumerate(lgnames): fp = pkg_resources.resource_filename('dfms.dropmake', 'web/{0}'.format(lgn)) lg = LG(fp) drop_list = lg.unroll_to_tpl() pgtp = MySarkarPGTP(drop_list) pgtp.json
def test_metis_pgtp_gen_pg(self): lgnames = [ 'lofar_std.json', 'test_grpby_gather.json', 'chiles_simple.json' ] tgt_partnum = [15, 15, 10, 10, 5] node_list = ['10.128.0.11', '10.128.0.12', '10.128.0.13'] for i, lgn in enumerate(lgnames): fp = get_lg_fname(lgn) lg = LG(fp) drop_list = lg.unroll_to_tpl() pgtp = MetisPGTP(drop_list, 3, merge_parts=True) #pgtp.json pgtp.to_gojs_json(visual=False) pg_spec = pgtp.to_pg_spec(node_list)
def test_minnumparts_scheduler(self): lgnames = ['cont_img.json', 'lofar_std.json', 'chiles_two.json', 'test_grpby_gather.json', 'chiles_two_dev1.json', 'chiles_simple.json'] tgt_deadline = [500, 200, 300, 90, 80, 160] #250 mdp = 8 ofa = 0.5 for j, lgn in enumerate(lgnames): fp = pkg_resources.resource_filename('dfms.dropmake', 'web/{0}'.format(lgn)) lg = LG(fp) drop_list = lg.unroll_to_tpl() #logger.info("MinNumPartsScheduler Partitioning ", lgn) lll = len(lgn) + len("Partitioning ") + 1 #logger.info("=" * lll) mps = MinNumPartsScheduler(drop_list, tgt_deadline[j], max_dop=mdp, optimistic_factor=ofa) num_parts_done, lpl, ptime, parts = mps.partition_dag()
def test_sa_scheduler(self): lgnames = ['lofar_std.json'] tgt_deadline = [450] mdp = 4 for j, lgn in enumerate(lgnames): fp = pkg_resources.resource_filename('dfms.dropmake', 'web/{0}'.format(lgn)) lg = LG(fp) drop_list = lg.unroll_to_tpl() pssa01 = SAScheduler(drop_list, max_dop=mdp) #pssa01 = PSOScheduler(drop_list, max_dop=mdp) num_parts_done, lpl, ptime, parts = pssa01.partition_dag() #print "SA (no deadline): {3} partitioned: parts = {0}, lpl = {1}, ptime = {2:.2f}".format(num_parts_done, lpl, ptime, lgn) pssa02 = SAScheduler(drop_list, max_dop=mdp, deadline=tgt_deadline[j]) num_parts_done, lpl, ptime, parts = pssa02.partition_dag()
def test_minnumparts_pgtp(self): lgnames = [ 'lofar_std.json', 'chiles_two.json', 'test_grpby_gather.json', 'chiles_two_dev1.json', 'chiles_simple.json' ] #tgt_partnum = [15, 15, 10, 10, 5] tgt_deadline = [200, 300, 90, 80, 160] for i, lgn in enumerate(lgnames): fp = pkg_resources.resource_filename('dfms.dropmake', 'web/{0}'.format(lgn)) lg = LG(fp) drop_list = lg.unroll_to_tpl() pgtp = MinNumPartsPGTP(drop_list, tgt_deadline[i]) pgtp.json
def test_metis_pgtp_gen_pg(self): lgnames = [ 'lofar_std.json', 'chiles_two.json', 'test_grpby_gather.json', 'chiles_two_dev1.json', 'chiles_simple.json' ] tgt_partnum = [15, 15, 10, 10, 5] node_list = ['10.128.0.11', '10.128.0.12', '10.128.0.13'] for i, lgn in enumerate(lgnames): fp = pkg_resources.resource_filename('dfms.dropmake', 'web/{0}'.format(lgn)) lg = LG(fp) drop_list = lg.unroll_to_tpl() pgtp = MetisPGTP(drop_list, 3) pgtp.json pg_spec = pgtp.to_pg_spec(node_list)
def test_large_graph_pgtp_gen_pg(self): lgnames = ['lofar_std_large.json'] num_nodes = 500 tgt_partnum = [num_nodes] node_list = [] for j in range(num_nodes): ipa = '10.128.0.{0}'.format(j) node_list.append(ipa) for i, lgn in enumerate(lgnames): fp = pkg_resources.resource_filename('dfms.dropmake', 'web/{0}'.format(lgn)) lg = LG(fp) drop_list = lg.unroll_to_tpl() pgtp = MetisPGTP(drop_list, num_nodes) pgtp.json pg_spec = pgtp.to_pg_spec(node_list)
def test_pso_scheduler(self): lgs = { 'cont_img.json': 540, 'lofar_std.json': 450, 'test_grpby_gather.json': 70, 'chiles_simple.json': 160 } mdp = 2 for lgn, deadline in lgs.items(): fp = get_lg_fname(lgn) lg = LG(fp) drop_list = lg.unroll_to_tpl() psps01 = PSOScheduler(drop_list, max_dop=mdp) psps01.partition_dag() psps02 = PSOScheduler(drop_list, max_dop=mdp, deadline=deadline) psps02.partition_dag()
def test_pso_scheduler(self): lgnames = ['cont_img.json', 'lofar_std.json', 'chiles_two.json', 'test_grpby_gather.json', 'chiles_two_dev1.json', 'chiles_simple.json', 'test_seq_gather.json'] #lgnames = ['test_seq_gather.json'] tgt_deadline = [540, 450, 60, 70, 60, 160, 150] #250 #tgt_deadline = [150] mdp = 2 for j, lgn in enumerate(lgnames): fp = pkg_resources.resource_filename('dfms.dropmake', 'web/{0}'.format(lgn)) lg = LG(fp) drop_list = lg.unroll_to_tpl() psps01 = PSOScheduler(drop_list, max_dop=mdp) num_parts_done, lpl, ptime, parts = psps01.partition_dag() #print "PSO (no deadline): {3} partitioned: parts = {0}, lpl = {1}, ptime = {2:.2f}".format(num_parts_done, lpl, ptime, lgn) psps02 = PSOScheduler(drop_list, max_dop=mdp, deadline=tgt_deadline[j]) num_parts_done, lpl, ptime, parts = psps02.partition_dag()
def test_mysarkar_scheduler(self): lgs = { 'cont_img.json': 20, 'lofar_std.json': 15, 'test_grpby_gather.json': 10, 'chiles_simple.json': 5 } mdp = 8 for lgn, numparts in lgs.items(): fp = get_lg_fname(lgn) lg = LG(fp) drop_list = lg.unroll_to_tpl() mys = MySarkarScheduler(drop_list, max_dop=mdp) _, _, _, parts = mys.partition_dag() for part in parts: pass """
def test_minnumparts_scheduler(self): lgs = { 'cont_img.json': 500, 'lofar_std.json': 200, 'test_grpby_gather.json': 90, 'chiles_simple.json': 160 } mdp = 8 ofa = 0.5 for lgn, deadline in lgs.items(): fp = get_lg_fname(lgn) lg = LG(fp) drop_list = lg.unroll_to_tpl() mps = MinNumPartsScheduler(drop_list, deadline, max_dop=mdp, optimistic_factor=ofa) mps.partition_dag()
def test_metis_pgtp_gen_pg_island(self): lgnames = [ 'lofar_std.json', 'test_grpby_gather.json', 'chiles_simple.json' ] tgt_partnum = [15, 15, 10, 10, 5] node_list = [ '10.128.0.11', '10.128.0.12', '10.128.0.13', '10.128.0.14', '10.128.0.15', '10.128.0.16' ] nb_islands = 2 nb_nodes = len(node_list) - nb_islands for i, lgn in enumerate(lgnames): fp = get_lg_fname(lgn) lg = LG(fp) drop_list = lg.unroll_to_tpl() pgtp = MetisPGTP(drop_list, nb_nodes, merge_parts=True) pgtp.to_gojs_json(visual=False) pg_spec = pgtp.to_pg_spec(node_list, num_islands=nb_islands) pgtp.result(lazy=False)
def test_mysarkar_pgtp_gen_pg_island(self): lgnames = [ 'lofar_std.json', 'test_grpby_gather.json', 'chiles_simple.json' ] node_list = [ '10.128.0.11', '10.128.0.12', '10.128.0.13', '10.128.0.14', '10.128.0.15', '10.128.0.16' ] for i, lgn in enumerate(lgnames): fp = get_lg_fname(lgn) lg = LG(fp) drop_list = lg.unroll_to_tpl() pgtp = MySarkarPGTP(drop_list, None, merge_parts=True) pgtp.to_gojs_json(visual=False) nb_islands = 2 #print(lgn) try: pgtp.merge_partitions(len(node_list) - nb_islands, form_island=False) except GPGTNoNeedMergeException as ge: continue pg_spec = pgtp.to_pg_spec(node_list, num_islands=nb_islands) pgtp.result()
def gen_mysarkar_pgtp(lgfname, pgt_dir, num_islands=2, cores_per_node=2, print_result=False): """ Generate Physical Graph Template (Partition) using MySarkar - A "somewhat greedy" scheudling algoritm No real resource mapping is involved """ par_label = 'p' stt = time.time() lg = LG(lgfname) drop_list = lg.unroll_to_tpl() unroll_ett = time.time() mpp = num_islands > 0 pgt = MySarkarPGTP(drop_list, 1, par_label, cores_per_node, merge_parts=mpp) if (mpp): pgt.to_gojs_json(string_rep=False, visual=False) pgt.merge_partitions(num_islands, form_island=True, island_type=1, visual=False) schedule_ett = time.time() re_dict = pgt.result() re_dict['unroll_time'] = '%.3f' % (unroll_ett - stt) re_dict['schedule_time'] = '%.3f' % (schedule_ett - unroll_ett) if (print_result): part_info = ' - '.join( ['{0}:{1}'.format(k, v) for k, v in re_dict.items()]) print(part_info) return re_dict
def test_pg_generator(self): fp = pkg_resources.resource_filename('dfms.dropmake', 'web/lofar_std.json') #fp = '/Users/Chen/proj/dfms/dfms/lg/web/lofar_std.json' lg = LG(fp) self.assertEquals(len(lg._done_dict.keys()), 36) drop_list = lg.unroll_to_tpl()
def gen_pgt(): """ RESTful interface for translating Logical Graphs to Physical Graphs """ lg_name = request.query.get('lg_name') if (lg_exists(lg_name)): try: lg = LG(lg_path(lg_name)) drop_list = lg.unroll_to_tpl() part = request.query.get('num_par') try: #print('num_islands', request.query.get('num_islands')) num_islands = int(request.query.get('num_islands')) except: num_islands = 0 mpp = num_islands > 0 if (part is None): is_part = '' pgt = PGT(drop_list) else: is_part = 'Partition' par_label = request.query.get('par_label') algo = request.query.get('algo') if ('metis' == algo): min_goal = int(request.query.get('min_goal')) ptype = int(request.query.get('ptype')) ufactor = 100 - int(request.query.get('max_load_imb')) + 1 if (ufactor <= 0): ufactor = 1 pgt = MetisPGTP(drop_list, int(part), min_goal, par_label, ptype, ufactor, merge_parts=mpp) elif ('mysarkar' == algo): pgt = MySarkarPGTP(drop_list, int(part), par_label, int(request.query.get('max_dop')), merge_parts=mpp) elif ('min_num_parts' == algo): time_greedy = 1 - float(request.query.get( 'time_greedy')) / 100.0 # assuming between 1 to 100 pgt = MinNumPartsPGTP(drop_list, int(request.query.get('deadline')), int(part), par_label, int(request.query.get('max_dop')), merge_parts=mpp, optimistic_factor=time_greedy) elif ('pso' == algo): params = ['deadline', 'topk', 'swarm_size'] pars = [None, 30, 40] for i, para in enumerate(params): try: pars[i] = int(request.query.get(para)) except: continue pgt = PSOPGTP(drop_list, par_label, int(request.query.get('max_dop')), deadline=pars[0], topk=pars[1], swarm_size=pars[2], merge_parts=mpp) else: raise GraphException( "Unknown partition algorithm: {0}".format(algo)) if (mpp): pgt_id = pg_mgr.add_pgt(pgt, lg_name, num_islands=num_islands) """ if ('mysarkar' == algo): pgt_id = pg_mgr.add_pgt(pgt, lg_name, num_islands=int(part)) elif ('metis' == algo): """ else: pgt_id = pg_mgr.add_pgt(pgt, lg_name) part_info = ' - '.join( ['{0}:{1}'.format(k, v) for k, v in pgt.result().items()]) tpl = file_as_string('pg_viewer.html') return template(tpl, pgt_view_json_name=pgt_id, partition_info=part_info, is_partition_page=is_part) except GraphException as ge: response.status = 500 return "Invalid Logical Graph {1}: {0}".format(str(ge), lg_name) except SchedulerException as se: response.status = 500 return "Graph scheduling exception {1}: {0}".format( str(se), lg_name) except Exception as exp: response.status = 500 trace_msg = traceback.format_exc() return "Graph partition exception {1}: {0}".format( trace_msg, lg_name) else: response.status = 404 return "{0}: logical graph {1} not found\n".format(err_prefix, lg_name)
def test_basic_scheduler(self): fp = get_lg_fname('lofar_std.json') lg = LG(fp) drop_list = lg.unroll_to_tpl() Scheduler(drop_list)
def test_pg_test(self): fp = pkg_resources.resource_filename('dfms.dropmake', 'web/test_grpby_gather.json') lg = LG(fp) lg.unroll_to_tpl()
def test_pg_test(self): fp = get_lg_fname('test_grpby_gather.json') lg = LG(fp) lg.unroll_to_tpl()
def test_basic_scheduler(self): fp = pkg_resources.resource_filename('dfms.dropmake', 'web/lofar_std.json') lg = LG(fp) drop_list = lg.unroll_to_tpl() mys = Scheduler(drop_list)
def test_pg_generator(self): fp = get_lg_fname('lofar_std.json') #fp = '/Users/Chen/proj/dfms/dfms/lg/web/lofar_std.json' lg = LG(fp) self.assertEqual(len(lg._done_dict.keys()), 36) drop_list = lg.unroll_to_tpl()
def test_pgt_to_json(self): fp = pkg_resources.resource_filename('dfms.dropmake', 'web/lofar_std.json') lg = LG(fp) drop_list = lg.unroll_to_tpl() pgt = PGT(drop_list)
def test_pgt_to_json(self): fp = get_lg_fname('lofar_std.json') lg = LG(fp) drop_list = lg.unroll_to_tpl() pgt = PGT(drop_list)