def test_mysarkar_pgtp_gen_pg_island(self): lgnames = [ "testLoop.graph", "cont_img.graph", "test_grpby_gather.graph", "chiles_simple.graph", ] node_list = [ "10.128.0.11", "10.128.0.12", "10.128.0.13", "10.128.0.14", "10.128.0.15", "10.128.0.16", ] for i, lgn in enumerate(lgnames): fp = get_lg_fname(lgn) lg = LG(fp) drop_list = lg.unroll_to_tpl() pgtp = MySarkarPGTP(drop_list, None, merge_parts=True) pgtp.to_gojs_json(visual=False) nb_islands = 2 # print(lgn) try: pgtp.merge_partitions(len(node_list) - nb_islands, form_island=False) except GPGTNoNeedMergeException as ge: continue pg_spec = pgtp.to_pg_spec(node_list, num_islands=nb_islands) pgtp.result()
def test_metis_pgtp_gen_pg_island(self): lgnames = [ "testLoop.graph", "cont_img.graph", "test_grpby_gather.graph", "chiles_simple.graph", ] tgt_partnum = [15, 15, 10, 10, 5] node_list = [ "10.128.0.11", "10.128.0.12", "10.128.0.13", "10.128.0.14", "10.128.0.15", "10.128.0.16", ] nb_islands = 2 nb_nodes = len(node_list) - nb_islands for i, lgn in enumerate(lgnames): fp = get_lg_fname(lgn) lg = LG(fp) drop_list = lg.unroll_to_tpl() pgtp = MetisPGTP(drop_list, nb_nodes, merge_parts=True) pgtp.to_gojs_json(visual=False) pg_spec = pgtp.to_pg_spec(node_list, num_islands=nb_islands) pgtp.result(lazy=False)
def test_plasma_graph(self): # test loading of Plasma graph lgs = ["Plasma_test.graph"] for lg in lgs: fp = get_lg_fname(lg) lg = LG(fp) lg.unroll_to_tpl()
def test_pgt_to_json(self): fp = get_lg_fname("HelloWorld_simple.graph") lg = LG(fp) drop_list = lg.unroll_to_tpl() pgt = PGT(drop_list) pg_json = pgt.to_gojs_json() _dum = pg_json
def test_shmem_graph(self): # Test loading of shared memory graph lgs = ["SharedMemoryTest.graph"] for lg in lgs: fp = get_lg_fname(lg) lg = LG(fp) out = lg.unroll_to_tpl() for drop in out: if drop["type"] == "plain": self.assertEqual("SharedMemory", drop["storage"])
def test_pg_eagle(self): lgs = [ "eagle_gather_simple.graph", "eagle_gather_empty.graph", "eagle_gather.graph", ] for lg in lgs: fp = get_lg_fname(lg) lg = LG(fp) lg.unroll_to_tpl()
def test_mysarkar_pgtp(self): lgnames = [ "testLoop.graph", "cont_img.graph", "test_grpby_gather.graph", "chiles_simple.graph", ] tgt_partnum = [15, 15, 10, 10, 5] for i, lgn in enumerate(lgnames): fp = get_lg_fname(lgn) lg = LG(fp) drop_list = lg.unroll_to_tpl() pgtp = MySarkarPGTP(drop_list) pgtp.json
def test_minnumparts_pgtp(self): lgnames = [ "testLoop.graph", "cont_img.graph", "test_grpby_gather.graph", "chiles_simple.graph", ] # tgt_partnum = [15, 15, 10, 10, 5] tgt_deadline = [200, 300, 90, 80, 160] for i, lgn in enumerate(lgnames): fp = get_lg_fname(lgn) lg = LG(fp) drop_list = lg.unroll_to_tpl() pgtp = MinNumPartsPGTP(drop_list, tgt_deadline[i]) pgtp.json
def test_pso_scheduler(self): lgs = { "cont_img.graph": 540, "cont_img.graph": 450, "test_grpby_gather.graph": 70, "chiles_simple.graph": 160, } mdp = 2 for lgn, deadline in lgs.items(): fp = get_lg_fname(lgn) lg = LG(fp) drop_list = lg.unroll_to_tpl() psps01 = PSOScheduler(drop_list, max_dop=mdp) psps01.partition_dag() psps02 = PSOScheduler(drop_list, max_dop=mdp, deadline=deadline) psps02.partition_dag()
def test_mysarkar_scheduler(self): lgs = { "cont_img.graph": 20, "cont_img.graph": 15, "test_grpby_gather.graph": 10, "chiles_simple.graph": 5, } mdp = 8 for lgn, numparts in lgs.items(): fp = get_lg_fname(lgn) lg = LG(fp) drop_list = lg.unroll_to_tpl() mys = MySarkarScheduler(drop_list, max_dop=mdp) _, _, _, parts = mys.partition_dag() for part in parts: pass """
def test_metis_pgtp_gen_pg(self): lgnames = [ "HelloWorld_simple.graph", "testLoop.graph", "cont_img.graph", "test_grpby_gather.graph", "chiles_simple.graph", ] tgt_partnum = [15, 15, 10, 10, 5] node_list = ["10.128.0.11", "10.128.0.12", "10.128.0.13"] for i, lgn in enumerate(lgnames): fp = get_lg_fname(lgn) lg = LG(fp) drop_list = lg.unroll_to_tpl() pgtp = MetisPGTP(drop_list, 3, merge_parts=True) # pgtp.json pgtp.to_gojs_json(visual=False) pg_spec = pgtp.to_pg_spec(node_list)
def test_minnumparts_scheduler(self): lgs = { "cont_img.graph": 500, "cont_img.graph": 200, "test_grpby_gather.graph": 90, "chiles_simple.graph": 160, } mdp = 8 ofa = 0.5 for lgn, deadline in lgs.items(): fp = get_lg_fname(lgn) lg = LG(fp) drop_list = lg.unroll_to_tpl() mps = MinNumPartsScheduler(drop_list, deadline, max_dop=mdp, optimistic_factor=ofa) mps.partition_dag()
def test_mysarkar_pgtp_gen_pg(self): # TODO: cont_img.graph causes random failures in this test. # ERROR: dlg.dropmake.scheduler.SchedulerException: Cannot find a idle PID, max_dop provided: 8 # lgnames = ['testLoop.graph', 'cont_img.graph', 'test_grpby_gather.graph', 'chiles_simple.graph'] lgnames = [ "testLoop.graph", "test_grpby_gather.graph", "chiles_simple.graph" ] tgt_partnum = [15, 15, 10, 10, 5] node_list = ["10.128.0.11", "10.128.0.12", "10.128.0.13"] for i, lgn in enumerate(lgnames): fp = get_lg_fname(lgn) lg = LG(fp) drop_list = lg.unroll_to_tpl() pgtp = MySarkarPGTP(drop_list, 3, merge_parts=True) # pgtp.json pgtp.to_gojs_json(visual=False) pg_spec = pgtp.to_pg_spec(node_list)
def unroll(lg, oid_prefix=None, zerorun=False, app=None): """Unrolls a logical graph""" start = time.time() lg = LG(lg, ssid=oid_prefix) drop_list = lg.unroll_to_tpl() logger.info( "Logical Graph unroll completed in %.3f [s]. # of Drops: %d", (time.time() - start), len(drop_list), ) # Optionally set sleepTimes to 0 and apps to a specific type if zerorun: for dropspec in drop_list: if "sleepTime" in dropspec: dropspec["sleepTime"] = 0 if app: for dropspec in drop_list: if "app" in dropspec: dropspec["app"] = app drop_list.append(lg.reprodata) return drop_list
def test_pg_test(self): fp = get_lg_fname("test_grpby_gather.graph") lg = LG(fp) lg.unroll_to_tpl()
def test_pg_generator(self): fp = get_lg_fname("cont_img.graph") # fp = get_lg_fname('testScatter.graph') lg = LG(fp) self.assertEqual(len(lg._done_dict.keys()), 46) drop_list = lg.unroll_to_tpl()
def test_basic_scheduler(self): fp = get_lg_fname("cont_img.graph") lg = LG(fp) drop_list = lg.unroll_to_tpl() Scheduler(drop_list)