def gaheft_reschedule(wf_added_time): copy_gaheft_schedule = Schedule({node:[item for item in items] for (node, items) in ga_initial_schedule.mapping.items()}) added_time = all_initial_wf_time * wf_added_time mark_finished(copy_gaheft_schedule) gaheft_added = DynamicHeft(added_wf, resource_manager, estimator) gaheft_added.current_time = added_time gaheft_added_schedule = gaheft_added.run(copy_gaheft_schedule) new_ga = GAComputationManager(15, added_wf, resource_manager, estimator) gaheft_added_schedule = new_ga.run(gaheft_added_schedule, added_time, False)[2] mark_finished(gaheft_added_schedule) nodes_seq_validaty = Utility.validateNodesSeq(gaheft_added_schedule) if nodes_seq_validaty is not True: raise Exception("Check for nodes_seq_validaty didn't pass") initial_wf_validaty = Utility.validateParentsAndChildren(gaheft_added_schedule, initial_wf) if initial_wf_validaty is not True: raise Exception("Check for initial_wf_validaty didn't pass") added_wf_validaty = Utility.validateParentsAndChildren(gaheft_added_schedule, added_wf) if added_wf_validaty is not True: raise Exception("Check for added_wf_validaty didn't pass") #print("All Ok!") result = Utility.makespan(gaheft_added_schedule) return result
def gaheft_reschedule(wf_added_time): copy_gaheft_schedule = Schedule({ node: [item for item in items] for (node, items) in ga_initial_schedule.mapping.items() }) added_time = all_initial_wf_time * wf_added_time mark_finished(copy_gaheft_schedule) gaheft_added = DynamicHeft(added_wf, resource_manager, estimator) gaheft_added.current_time = added_time gaheft_added_schedule = gaheft_added.run(copy_gaheft_schedule) new_ga = GAComputationManager(15, added_wf, resource_manager, estimator) gaheft_added_schedule = new_ga.run(gaheft_added_schedule, added_time, False)[2] mark_finished(gaheft_added_schedule) nodes_seq_validaty = Utility.validateNodesSeq(gaheft_added_schedule) if nodes_seq_validaty is not True: raise Exception("Check for nodes_seq_validaty didn't pass") initial_wf_validaty = Utility.validateParentsAndChildren( gaheft_added_schedule, initial_wf) if initial_wf_validaty is not True: raise Exception("Check for initial_wf_validaty didn't pass") added_wf_validaty = Utility.validateParentsAndChildren( gaheft_added_schedule, added_wf) if added_wf_validaty is not True: raise Exception("Check for added_wf_validaty didn't pass") #print("All Ok!") result = Utility.makespan(gaheft_added_schedule) return result
def do_exp(): config = { "interact_individuals_count": 500, "generations": 1000, "env": Env(_wf, rm, estimator), "species": [Specie(name=MAPPING_SPECIE, pop_size=500, cxb=0.9, mb=0.9, mate=lambda env, child1, child2: tools.cxOnePoint(child1, child2), # mutate=mapping_default_mutate, # mutate=lambda ctx, mutant: mapping_k_mutate(ctx, 3, mutant) mutate=mapping_all_mutate, # mutate=mapping_improving_mutation, select=selector, initialize=mapping_default_initialize, # initialize=lambda ctx, pop: mapping_heft_based_initialize(ctx, pop, heft_mapping, 3), stat=lambda pop: {"hamming_distances": hamming_distances([to_seq(p) for p in pop], to_seq(ms_ideal_ind)), "unique_inds_count": unique_individuals(pop), "pcm": pcm(pop), "gdm": gdm(pop)} ), Specie(name=ORDERING_SPECIE, fixed=True, representative_individual=ListBasedIndividual(os_representative)) ], "solstat": lambda sols: {"best_components": hamming_for_best_components(sols, ms_ideal_ind, os_ideal_ind), "best_components_itself": best_components_itself(sols), "best": -1*Utility.makespan(build_schedule(_wf, estimator, rm, max(sols, key=lambda x: x.fitness))) }, "operators": { # "choose": default_choose, "build_solutions": default_build_solutions, # "fitness": fitness_mapping_and_ordering, "fitness": overhead_fitness_mapping_and_ordering, # "assign_credits": default_assign_credits # "assign_credits": max_assign_credits "assign_credits": assign_from_transfer_overhead } } return do_experiment(saver, config, _wf, rm, estimator)
initial_wf = ExecutorRunner.get_wf(initial_wf_name, "00") added_wf = ExecutorRunner.get_wf(added_wf_name, "10") bundle = Utility.get_default_bundle() (estimator, resource_manager, initial_schedule) = ExecutorRunner.get_infrastructure(bundle, 1.0, False) ## planning for initial wf heft = DynamicHeft(initial_wf, resource_manager, estimator) empty_schedule = Schedule({node:[] for node in heft.get_nodes()}) ga = GAComputationManager(15, initial_wf, resource_manager, estimator) ga_initial_schedule = ga._get_ga_alg()(empty_schedule, None)[2] all_initial_wf_time = Utility.makespan(ga_initial_schedule) print("Initial time: " + str(all_initial_wf_time)) n = 5 ## planning for added wf def gaheft_reschedule(wf_added_time): copy_gaheft_schedule = Schedule({node:[item for item in items] for (node, items) in ga_initial_schedule.mapping.items()}) added_time = all_initial_wf_time * wf_added_time mark_finished(copy_gaheft_schedule) gaheft_added = DynamicHeft(added_wf, resource_manager, estimator) gaheft_added.current_time = added_time gaheft_added_schedule = gaheft_added.run(copy_gaheft_schedule)
stat=lambda pop: { "hamming_distances": hamming_distances(pop, os_ideal_ind), "unique_inds_count": unique_individuals(pop), "pcm": pcm(pop), "gdm": gdm(pop) }) ], "solstat": lambda sols: { "best_components": hamming_for_best_components(sols, ms_ideal_ind, os_ideal_ind), "best_components_itself": best_components_itself(sols), "best": -1 * Utility.makespan( build_schedule(_wf, estimator, rm, max(sols, key=lambda x: x.fitness))) }, "operators": { # "choose": default_choose, "build_solutions": default_build_solutions, # "fitness": fitness_mapping_and_ordering, "fitness": overhead_fitness_mapping_and_ordering, # "assign_credits": default_assign_credits # "assign_credits": max_assign_credits "assign_credits": assign_from_transfer_overhead } } saver = UniqueNameSaver("../../temp/cga_heft_mixin")
), Specie(name=ORDERING_SPECIE, pop_size=50, cxb=0.8, mb=0.5, mate=ordering_default_crossover, mutate=ordering_default_mutate, select=ordering_selector, initialize=ordering_default_initialize, stat=lambda pop: {"hamming_distances": hamming_distances(pop, os_ideal_ind), "unique_inds_count": unique_individuals(pop), "pcm": pcm(pop), "gdm": gdm(pop)} ) ], "solstat": lambda sols: {"best_components": hamming_for_best_components(sols, ms_ideal_ind, os_ideal_ind), "best_components_itself": best_components_itself(sols), "best": -1* Utility.makespan(build_schedule(_wf, estimator, rm, max(sols, key=lambda x: x.fitness))) }, "operators": { # "choose": default_choose, "build_solutions": default_build_solutions, # "fitness": fitness_mapping_and_ordering, "fitness": overhead_fitness_mapping_and_ordering, # "assign_credits": default_assign_credits # "assign_credits": max_assign_credits "assign_credits": assign_from_transfer_overhead } } saver = UniqueNameSaver("../../temp/cga_heft_mixin") def do_exp():
added_wf_name = "Montage_25" initial_wf = ExecutorRunner.get_wf(initial_wf_name, "00") added_wf = ExecutorRunner.get_wf(added_wf_name, "10") bundle = Utility.get_default_bundle() (estimator, resource_manager, initial_schedule) = ExecutorRunner.get_infrastructure(bundle, 1.0, False) ## planning for initial wf heft = DynamicHeft(initial_wf, resource_manager, estimator) empty_schedule = Schedule({node: [] for node in heft.get_nodes()}) ga = GAComputationManager(15, initial_wf, resource_manager, estimator) ga_initial_schedule = ga._get_ga_alg()(empty_schedule, None)[2] all_initial_wf_time = Utility.makespan(ga_initial_schedule) print("Initial time: " + str(all_initial_wf_time)) n = 5 ## planning for added wf def gaheft_reschedule(wf_added_time): copy_gaheft_schedule = Schedule({ node: [item for item in items] for (node, items) in ga_initial_schedule.mapping.items() }) added_time = all_initial_wf_time * wf_added_time
def do_exp(): config = { "interact_individuals_count": 100, "generations": 300, "env": Env(_wf, rm, estimator), "species": [ Specie( name=MAPPING_SPECIE, pop_size=50, cxb=0.9, mb=0.9, mate=lambda env, child1, child2: tools.cxOnePoint( child1, child2), # mutate=mapping_default_mutate, # mutate=lambda ctx, mutant: mapping_k_mutate(ctx, 3, mutant) mutate=mapping_all_mutate, # mutate=OnlyUniqueMutant()(mapping_all_mutate), select=selector, # initialize=mapping_default_initialize, initialize=lambda ctx, pop: mapping_heft_based_initialize( ctx, pop, heft_mapping, 3), stat=lambda pop: { "hamming_distances": hamming_distances([to_seq(p) for p in pop], to_seq(ms_ideal_ind)), "unique_inds_count": unique_individuals(pop), "pcm": pcm(pop), "gdm": gdm(pop) }), Specie(name=ORDERING_SPECIE, fixed=True, representative_individual=ListBasedIndividual( os_representative)) ], "solstat": lambda sols: { "best_components": hamming_for_best_components(sols, ms_ideal_ind, os_ideal_ind), "best_components_itself": best_components_itself(sols), "best": -1 * Utility.makespan( build_schedule(_wf, estimator, rm, max(sols, key=lambda x: x.fitness))) }, "operators": { # "choose": default_choose, "build_solutions": default_build_solutions, "fitness": fitness_mapping_and_ordering, # "fitness": overhead_fitness_mapping_and_ordering, # "assign_credits": default_assign_credits # "assign_credits": max_assign_credits "assign_credits": assign_from_transfer_overhead } } return do_experiment(saver, config, _wf, rm, estimator)
#wf_added_times = [0.1] initial_wf_name = "Montage_30" added_wf_name = "Montage_25" initial_wf = ExecutorRunner.get_wf(initial_wf_name, "00") added_wf = ExecutorRunner.get_wf(added_wf_name, "10") bundle = Utility.get_default_bundle() (estimator, resource_manager, initial_schedule) = ExecutorRunner.get_infrastructure(bundle, 1.0, False) ## planning for initial wf heft = DynamicHeft(initial_wf, resource_manager, estimator) empty_schedule = Schedule({node:[] for node in heft.get_nodes()}) heft_schedule = heft.run(empty_schedule) all_initial_wf_time = Utility.makespan(heft_schedule) print("Initial time: " + str(all_initial_wf_time)) n = 1 ## planning for added wf def heft_reschedule(wf_added_time): copy_heft_schedule = Schedule({node:[item for item in items] for (node, items) in heft_schedule.mapping.items()}) added_time = all_initial_wf_time * wf_added_time heft_added = DynamicHeft(added_wf, resource_manager, estimator) heft_added.current_time = added_time heft_added_schedule = heft_added.run(copy_heft_schedule) mark_finished(heft_added_schedule)