def multiple_workflows_c2(self): # 1. Connect all workflows with one "BIG" entry node and one "BIG" exit node. all_tasks = Workflow.connect_wfs(self.workflows, self.machines) # 2. Get the level for each task. levels = Workflow.level_order(all_tasks) # 3. Schedule based of the order of their level # Maybe we should run heft based of how many levels we have. for level, tasks in levels.items(): if len(tasks) > 0: Scheduler.heft(tasks, self.machines) self.set_wfs_scheduled()
def re_create_example(workflows, machines, reset_task_status=False): blp_tasks, blp_machines = ExampleGen.blueprint_example( workflows, machines) blp_tasks = sum(blp_tasks, []) new_machines = [ Machine.blueprint_to_machine(blp_m) for blp_m in blp_machines ] new_workflows = [] tasks = [] for wf_id in range(len(workflows)): if reset_task_status is False: tasks = [ Task.blueprint_to_task(blp_t) for blp_t in blp_tasks if blp_t.wf_id == wf_id ] else: for blp_t in blp_tasks: if blp_t.wf_id == wf_id: blp_t.status = TaskStatus.READY if len( blp_t.parents_names ) == 0 else TaskStatus.UNSCHEDULED tasks.append(Task.blueprint_to_task(blp_t)) new_workflows.append( Workflow.blueprint_to_workflow(wf_id, tasks, new_machines)) return new_workflows, new_machines
def load_all_types(m_info, n_tasks, n_times=1): machines = Machine.load_n_static_machines(m_info[0], m_info[1]) workflows = [ Workflow.load_all_types_wfs(machines, n=n, n_tasks=n_tasks) for n in range(n_times) ] workflows = sum(workflows, []) return machines, workflows
def load_small_from_data(best_of='workflows'): machines = [ Machine.blueprint_to_machine( MachineBlueprint(m_info[0], m_info[1], m_info[2], CORE_SPEED, m_info[3])) for m_info in SMALL_EXAMPLE['machines'] ] return machines, [ Workflow.blueprint_to_workflow( id_=i, tasks=[Task.blueprint_to_task(blp_t) for blp_t in blp_tasks], machines=machines) for i, blp_tasks in enumerate(SMALL_EXAMPLE[best_of]) ]
def create_random_small(): machines = [Machine.create_random_machine(i) for i in range(2)] workflows = [] for wf_id in range(N_WORKFLOWS): tasks = [Task.create_random_task(t_id, wf_id) for t_id in range(3)] tasks[0].is_entry = True tasks[0].status = TaskStatus.READY tasks[2].is_exit = True # Task obj is mutable no need to return something ExampleGen.create_edges_for_example(tasks) workflows.append( Workflow(id_=wf_id, machines=machines, tasks=tasks, wf_type="Random", add_dummies=False)) return workflows, machines
def multiple_workflows_c3(self): # 1. Connect all workflows with one "BIG" entry node and one "BIG" exit node. all_tasks = Workflow.connect_wfs(self.workflows, self.machines) # 2. Run HEFT in round-robin-fashion Scheduler.round_robin_heft(all_tasks, self.machines, len(self.workflows)) self.set_wfs_scheduled()
def multiple_workflows_c1(self): all_tasks = Workflow.connect_wfs(self.workflows, self.machines) Scheduler.heft(all_tasks, self.machines) self.set_wfs_scheduled()
def load_fixed_small_random(): machines = [ Machine.blueprint_to_machine( MachineBlueprint(m_info[0], m_info[1], m_info[2], CORE_SPEED, m_info[3])) for m_info in SMALL_EXAMPLE['machines'] ] a_b = randint(1, 100) a_c = randint(1, 100) a_d = randint(1, 100) b_e = randint(1, 100) c_e = randint(1, 100) d_e = randint(1, 100) e_f = randint(1, 100) e_g = randint(1, 100) f_h = randint(1, 100) g_h = randint(1, 100) blp_worklows = [[ TaskBlueprint(0, 0, 'T-A', randint(1, 100), [{ "w": a_b, "n": 'T-B' }, { "w": a_c, "n": 'T-C' }, { "w": a_d, "n": 'T-D' }], [], TaskStatus.READY, True, False), TaskBlueprint(1, 0, 'T-B', randint(1, 100), [{ "w": b_e, "n": 'T-E' }], [{ "w": a_b, "n": 'T-A' }], TaskStatus.UNSCHEDULED, False, False), TaskBlueprint(2, 0, 'T-C', randint(1, 100), [{ "w": c_e, "n": 'T-E' }], [{ "w": a_c, "n": 'T-A' }], TaskStatus.UNSCHEDULED, False, False), TaskBlueprint(3, 0, 'T-D', randint(1, 100), [{ "w": d_e, "n": 'T-E' }], [{ "w": a_d, "n": 'T-A' }], TaskStatus.UNSCHEDULED, False, False), TaskBlueprint(4, 0, 'T-E', randint(1, 100), [{ "w": e_f, "n": 'T-F' }, { "w": e_g, "n": 'T-G' }], [{ "w": b_e, "n": 'T-B' }, { "w": c_e, "n": 'T-C' }, { "w": d_e, "n": 'T-D' }], TaskStatus.UNSCHEDULED, False, False), TaskBlueprint(5, 0, 'T-F', randint(1, 100), [{ "w": f_h, "n": 'T-H' }], [{ "w": e_f, "n": 'T-E' }], TaskStatus.UNSCHEDULED, False, False), TaskBlueprint(6, 0, 'T-G', randint(1, 100), [{ "w": g_h, "n": 'T-H' }], [{ "w": e_g, "n": 'T-E' }], TaskStatus.UNSCHEDULED, False, False), TaskBlueprint(7, 0, 'T-H', randint(1, 100), [], [{ "w": f_h, "n": 'T-F' }, { "w": g_h, "n": 'T-G' }], TaskStatus.UNSCHEDULED, False, True), ]] _a_t_b = randint(1, 100) _b_t_c = randint(1, 100) _c_t_d = randint(1, 100) blp_worklows.append([ TaskBlueprint(0, 1, 'T-A', randint(1, 100), [{ "w": _a_t_b, "n": 'T-B' }], [], TaskStatus.READY, True, False), TaskBlueprint(1, 1, 'T-B', randint(1, 100), [{ "w": _b_t_c, "n": 'T-C' }], [{ "w": _a_t_b, "n": 'T-A' }], TaskStatus.UNSCHEDULED, False, False), TaskBlueprint(2, 1, 'T-C', randint(1, 100), [{ "w": _c_t_d, "n": 'T-D' }], [{ "w": _b_t_c, "n": 'T-B' }], TaskStatus.UNSCHEDULED, False, False), TaskBlueprint(3, 1, 'T-D', randint(1, 100), [], [{ "w": _c_t_d, "n": 'T-C' }], TaskStatus.UNSCHEDULED, False, True), ]) return machines, [ Workflow.blueprint_to_workflow( id_=i, tasks=[Task.blueprint_to_task(blp_t) for blp_t in blp_tasks], machines=machines) for i, blp_tasks in enumerate(blp_worklows) ]
def load_n(m_info, n_wfs): machines = Machine.load_n_static_machines(m_info[0], m_info[1]) workflows = Workflow.load_random_workflows(machines, n=n_wfs) return machines, workflows