join(info_file, f) for f in listdir(info_file) ] #list of json files - each json file corresponds to a single DAG gantt_label = [(info_file + f) for f in listdir(info_file)] gantt = 0 # count = 0 # count1 = 0 # task_dag_id = 0 frontier_Q = fw.frontier_Q ex_stats = "logs/transformer_profiling_128_128_128.json" for i, dag_json_file in enumerate(all_dags_jsons): if dag_json_file.endswith('json'): logging.debug("main : Reading json file " + dag_json_file) with open(dag_json_file, "r") as f: info = json.loads(f.read()) logging.debug("main : prepraing task dag number " + str(i)) all_dags.append(fw.TaskDAG(info,dag_number = i ,dataset = 1024,map_all=args.task,all_map_to_gpu=args.all_gpu,\ gpus=gpus,cpus=cpus,ctxs=ctxs,cmd_qs=cmd_qs,ex_stats_file=ex_stats)) logging.debug("main : prepared task dag number " + str(i) + "\n\n") fw.frontier_Q_lock.acquire() frontier_Q.extend(all_dags[-1].free_tasks) for task in all_dags[-1].free_tasks: task.has_enqueued = True fw.frontier_Q_lock.release() dag = all_dags[0] dag.print_task_information() tasks = dag.G.nodes() for i in range(len(tasks)): print i, tasks[i].get_kernel_ids(), dag.get_task_children_kernel_ids( tasks[i]) print dag.print_kernel_information()
all_dags_jsons = [join(info_file,f) for f in listdir(info_file)] #list of json files - each json file corresponds to a single DAG gantt_label = [(info_file + f) for f in listdir(info_file)] gantt = 0 # count = 0 # count1 = 0 # task_dag_id = 0 frontier_Q = fw.frontier_Q # ex_stats ="logs/transformer_profiling_data.json" ex_stats = args.ex_stats_file for i,dag_json_file in enumerate(all_dags_jsons): if dag_json_file.endswith('json'): logging.debug("main : Reading json file "+ dag_json_file) with open(dag_json_file,"r") as f: info = json.loads(f.read()) logging.debug("main : prepraing task dag number "+str(i)) all_dags.append(fw.TaskDAG(info,dag_number = i ,dataset = 1024,map_all=args.task,all_map_to_gpu=args.all_gpu,\ gpus=gpus,cpus=cpus,ctxs=ctxs,cmd_qs=cmd_qs,ex_stats_file=ex_stats)) #create dag for info file (ex :- dag_test1/t1.json) logging.debug("main : prepared task dag number "+str(i)+"\n\n") fw.frontier_Q_lock.acquire() frontier_Q.extend(all_dags[-1].free_tasks) for task in all_dags[-1].free_tasks: task.has_enqueued = True fw.frontier_Q_lock.release() for dag in all_dags: dag.compute_blevel_ranks() # print "Printing initial frontier_Q tasks\n\n" # for i,task in enumerate(fw.frontier_Q): # print "task number "+str(i+1)+ " rank value: "+ str(task.rank) # logging.debug("it's free kernels "+str([k.id for k in task.free_kernels])) # logging.debug("it's all kernels "+str([k.id for k in task.kernels]))
join(info_file, f) for f in listdir(info_file) ] #list of json files - each json file corresponds to a single DAG gantt_label = [(info_file + f) for f in listdir(info_file)] gantt = 0 # count = 0 # count1 = 0 # task_dag_id = 0 frontier_Q = fw.frontier_Q for i, dag_json_file in enumerate(all_dags_jsons): if dag_json_file.endswith('json'): logging.debug("main : Reading json file " + dag_json_file) with open(dag_json_file, "r") as f: info = json.loads(f.read()) logging.debug("main : prepraing task dag number " + str(i)) all_dags.append(fw.TaskDAG(info,dag_number = i ,dataset = 1024,map_all=args.task,all_map_to_gpu=args.all_gpu,\ gpus=gpus,cpus=cpus,ctxs=ctxs,cmd_qs=cmd_qs,use_predefined_mapping=True)) #create dag for info file (ex :- dag_test1/t1.json) logging.debug("main : prepared task dag number " + str(i) + "\n\n") fw.frontier_Q_lock.acquire() frontier_Q.extend(all_dags[-1].free_tasks) for task in all_dags[-1].free_tasks: task.has_enqueued = True fw.frontier_Q_lock.release() logging.debug("printing initial frontier_Q tasks\n\n") # for i,task in enumerate(frontier_Q): # logging.debug("task number "+str(i+1)+ " "+ task.id) # logging.debug("it's free kernels "+str([k.id for k in task.free_kernels])) # logging.debug("it's all kernels "+str([k.id for k in task.kernels])) # logging.debug("it's dag id "+str(task.task_dag_object.id)) # logging.debug("it's optm device is "+str(task.optm_device))