コード例 #1
0
def gaheft_reschedule(wf_added_time):

    copy_gaheft_schedule = Schedule({node:[item for item in items] for (node, items) in ga_initial_schedule.mapping.items()})

    added_time = all_initial_wf_time * wf_added_time

    mark_finished(copy_gaheft_schedule)
    gaheft_added = DynamicHeft(added_wf, resource_manager, estimator)
    gaheft_added.current_time = added_time
    gaheft_added_schedule = gaheft_added.run(copy_gaheft_schedule)
    new_ga = GAComputationManager(15,
                          added_wf,
                          resource_manager,
                          estimator)

    gaheft_added_schedule = new_ga.run(gaheft_added_schedule, added_time, False)[2]

    mark_finished(gaheft_added_schedule)

    nodes_seq_validaty = Utility.validateNodesSeq(gaheft_added_schedule)
    if nodes_seq_validaty is not True:
        raise Exception("Check for nodes_seq_validaty didn't pass")
    initial_wf_validaty = Utility.validateParentsAndChildren(gaheft_added_schedule, initial_wf)
    if initial_wf_validaty is not True:
        raise Exception("Check for initial_wf_validaty didn't pass")
    added_wf_validaty = Utility.validateParentsAndChildren(gaheft_added_schedule, added_wf)
    if added_wf_validaty is not True:
        raise Exception("Check for added_wf_validaty didn't pass")
    #print("All Ok!")
    result = Utility.makespan(gaheft_added_schedule)
    return result
コード例 #2
0
ファイル: VersusFunctors.py プロジェクト: fonhorst/heft
    def __call__(self, wf_name):
        dax2 = '..\\..\\resources\\' + wf_name + '.xml'
        ## dedicated resource are the same for all bundles
        path = '..\\..\\resources\\saved_schedules\\' + wf_name + '_bundle' + '.json'
        bundle = Utility.load_schedule(path, Utility.readWorkflow(dax2, wf_name))

        mainCloudHEFTwithGA = partial(self.mainCloudHeft, with_ga_initial=True, the_bundle=bundle)
        mainHEFTwithGA = partial(self.mainHeft, with_ga_initial=True, the_bundle=bundle)
        mainGAwithBundle = partial(self.mainGA, the_bundle=bundle)

        resGA = run("GA", mainGAwithBundle, wf_name, self.reliability, self.n)
        resHeft = run("Heft + GA", mainHEFTwithGA, wf_name, self.reliability, self.n)
        resCloudHeft = run("HeftREx + GA", mainCloudHEFTwithGA, wf_name, self.reliability, self.n)

        pc_hg = (1 - resHeft[2]/resGA[2])*100
        pc_chg = (1 - resCloudHeft[2]/resGA[2])*100
        pc_chh = (1 - resCloudHeft[2]/resHeft[2])*100

        result = dict()
        result['wf_name'] = wf_name
        result['algorithms'] = {
            self.HEFT_REX_GA: ComparisonUtility.get_dict(resCloudHeft),
            self.GA_HEFT: ComparisonUtility.get_dict(resHeft),
            self.GA: ComparisonUtility.get_dict(resGA)
        }
        result['profit_by_avr'] = {
            "ga_heft vs ga": pc_hg,
            "ga_heft_ReX vs ga": pc_chh,
            "ga_heft_ReX vs ga_heft": pc_chg
        }
        return result
コード例 #3
0
def gaheft_reschedule(wf_added_time):

    copy_gaheft_schedule = Schedule({
        node: [item for item in items]
        for (node, items) in ga_initial_schedule.mapping.items()
    })

    added_time = all_initial_wf_time * wf_added_time

    mark_finished(copy_gaheft_schedule)
    gaheft_added = DynamicHeft(added_wf, resource_manager, estimator)
    gaheft_added.current_time = added_time
    gaheft_added_schedule = gaheft_added.run(copy_gaheft_schedule)
    new_ga = GAComputationManager(15, added_wf, resource_manager, estimator)

    gaheft_added_schedule = new_ga.run(gaheft_added_schedule, added_time,
                                       False)[2]

    mark_finished(gaheft_added_schedule)

    nodes_seq_validaty = Utility.validateNodesSeq(gaheft_added_schedule)
    if nodes_seq_validaty is not True:
        raise Exception("Check for nodes_seq_validaty didn't pass")
    initial_wf_validaty = Utility.validateParentsAndChildren(
        gaheft_added_schedule, initial_wf)
    if initial_wf_validaty is not True:
        raise Exception("Check for initial_wf_validaty didn't pass")
    added_wf_validaty = Utility.validateParentsAndChildren(
        gaheft_added_schedule, added_wf)
    if added_wf_validaty is not True:
        raise Exception("Check for added_wf_validaty didn't pass")
    #print("All Ok!")
    result = Utility.makespan(gaheft_added_schedule)
    return result
コード例 #4
0
    def main(self,
             reliability,
             is_silent,
             wf_name,
             logger=None,
             task_id_to_fail=None,
             failure_coeff=0.2):

        wf = self.get_wf(wf_name)
        bundle = self.get_bundle(None)
        (estimator, resource_manager,
         initial_schedule) = self.get_infrastructure(bundle, reliability,
                                                     False)

        ##TODO: look here ! I'm an idiot tasks of wf != tasks of initial_schedule
        dynamic_heft = DynamicHeft(wf, resource_manager, estimator)
        heft_machine = SingleFailHeftExecutor(
            heft_planner=dynamic_heft,
            base_fail_duration=40,
            base_fail_dispersion=1,
            #initial_schedule=None)
            initial_schedule=initial_schedule,
            logger=logger,
            task_id_to_fail=task_id_to_fail,
            failure_coeff=failure_coeff)
        heft_machine.init()
        heft_machine.run()

        ## TODO: remove it later.
        if logger is not None:
            logger.flush()

        seq_time_validaty = Utility.validateNodesSeq(
            heft_machine.current_schedule)
        dependency_validaty = Utility.validateParentsAndChildren(
            heft_machine.current_schedule, wf)
        transfer_dependency_validaty = Utility.static_validateParentsAndChildren_transfer(
            heft_machine.current_schedule, wf, estimator)

        if seq_time_validaty is not True:
            raise Exception("seq_time_validaty failed. taskid=" +
                            str(task_id_to_fail))
        if dependency_validaty is not True:
            raise Exception("dependency_validaty failed. taskid=" +
                            str(task_id_to_fail))
        if transfer_dependency_validaty is not True:
            raise Exception("transfer_dependency_validaty failed. taskid=" +
                            str(task_id_to_fail))

        (makespan, vl1,
         vl2) = self.extract_result(heft_machine.current_schedule, is_silent,
                                    wf)
        return makespan
コード例 #5
0
ファイル: FailExperiment.py プロジェクト: fonhorst/heft
    def main(self, reliability, is_silent, wf_name, logger=None, task_id_to_fail=None, failure_coeff=0.2):

        wf = self.get_wf(wf_name)
        bundle = self.get_bundle(None)
        (estimator, resource_manager, initial_schedule) = self.get_infrastructure(bundle, reliability, False)

        ##TODO: look here ! I'm an idiot tasks of wf != tasks of initial_schedule
        dynamic_heft = DynamicHeft(wf, resource_manager, estimator)
        heft_machine = SingleFailHeftExecutor(heft_planner=dynamic_heft,
                                    base_fail_duration=40,
                                    base_fail_dispersion=1,
                                    #initial_schedule=None)
                                    initial_schedule=initial_schedule,
                                    logger=logger,
                                    task_id_to_fail=task_id_to_fail,
                                    failure_coeff=failure_coeff)
        heft_machine.init()
        heft_machine.run()

        ## TODO: remove it later.
        if logger is not None:
            logger.flush()

        seq_time_validaty = Utility.validateNodesSeq(heft_machine.current_schedule)
        dependency_validaty = Utility.validateParentsAndChildren(heft_machine.current_schedule, wf)
        transfer_dependency_validaty = Utility.static_validateParentsAndChildren_transfer(heft_machine.current_schedule, wf, estimator)

        if seq_time_validaty is not True:
            raise Exception("seq_time_validaty failed. taskid=" + str(task_id_to_fail))
        if dependency_validaty is not True:
            raise Exception("dependency_validaty failed. taskid=" + str(task_id_to_fail))
        if transfer_dependency_validaty is not True:
            raise Exception("transfer_dependency_validaty failed. taskid=" + str(task_id_to_fail))


        (makespan, vl1, vl2) = self.extract_result(heft_machine.current_schedule, is_silent, wf)
        return makespan
コード例 #6
0
def do_exp():
    config = {
        "interact_individuals_count": 500,
        "generations": 1000,
        "env": Env(_wf, rm, estimator),
        "species": [Specie(name=MAPPING_SPECIE, pop_size=500,
                           cxb=0.9, mb=0.9,
                           mate=lambda env, child1, child2: tools.cxOnePoint(child1, child2),
                           # mutate=mapping_default_mutate,
                           # mutate=lambda ctx, mutant: mapping_k_mutate(ctx, 3, mutant)
                           mutate=mapping_all_mutate,
                           # mutate=mapping_improving_mutation,
                           select=selector,
                           initialize=mapping_default_initialize,
                           # initialize=lambda ctx, pop: mapping_heft_based_initialize(ctx, pop, heft_mapping, 3),
                           stat=lambda pop: {"hamming_distances": hamming_distances([to_seq(p) for p in pop], to_seq(ms_ideal_ind)),
                                             "unique_inds_count": unique_individuals(pop),
                                             "pcm": pcm(pop),
                                             "gdm": gdm(pop)}

        ),
                    Specie(name=ORDERING_SPECIE, fixed=True,
                           representative_individual=ListBasedIndividual(os_representative))
        ],

        "solstat": lambda sols: {"best_components": hamming_for_best_components(sols, ms_ideal_ind, os_ideal_ind),
                                 "best_components_itself": best_components_itself(sols),
                                 "best": -1*Utility.makespan(build_schedule(_wf, estimator, rm, max(sols, key=lambda x: x.fitness)))
                                 },

        "operators": {
            # "choose": default_choose,
            "build_solutions": default_build_solutions,
            # "fitness": fitness_mapping_and_ordering,
            "fitness": overhead_fitness_mapping_and_ordering,
            # "assign_credits": default_assign_credits
            # "assign_credits": max_assign_credits
            "assign_credits": assign_from_transfer_overhead
        }
    }
    return do_experiment(saver, config, _wf, rm, estimator)
コード例 #7
0
from heft.algs.heft.DSimpleHeft import DynamicHeft
## reliability doesn't matter anything here
from heft.core.environment import Utility
from heft.experiments.comparison_experiments.common import ExecutorRunner
from heft.experiments.comparison_experiments.executors.GaHeftExecutor import GAComputationManager
from heft.core.environment.ResourceManager import Schedule

wf_added_times = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]
#wf_added_times = [0.1]

initial_wf_name = "Montage_30"
added_wf_name = "Montage_25"

initial_wf = ExecutorRunner.get_wf(initial_wf_name, "00")
added_wf = ExecutorRunner.get_wf(added_wf_name, "10")
bundle = Utility.get_default_bundle()
(estimator, resource_manager, initial_schedule) = ExecutorRunner.get_infrastructure(bundle, 1.0, False)

## planning for initial wf
heft = DynamicHeft(initial_wf, resource_manager, estimator)
empty_schedule = Schedule({node:[] for node in heft.get_nodes()})
ga = GAComputationManager(15,
                          initial_wf,
                          resource_manager,
                          estimator)

ga_initial_schedule = ga._get_ga_alg()(empty_schedule, None)[2]

all_initial_wf_time = Utility.makespan(ga_initial_schedule)

print("Initial time: " + str(all_initial_wf_time))
コード例 #8
0
ファイル: cga_heft_mixin.py プロジェクト: fonhorst/heft
               stat=lambda pop: {
                   "hamming_distances": hamming_distances(pop, os_ideal_ind),
                   "unique_inds_count": unique_individuals(pop),
                   "pcm": pcm(pop),
                   "gdm": gdm(pop)
               })
    ],
    "solstat":
    lambda sols: {
        "best_components":
        hamming_for_best_components(sols, ms_ideal_ind, os_ideal_ind),
        "best_components_itself":
        best_components_itself(sols),
        "best":
        -1 * Utility.makespan(
            build_schedule(_wf, estimator, rm,
                           max(sols, key=lambda x: x.fitness)))
    },
    "operators": {
        # "choose": default_choose,
        "build_solutions": default_build_solutions,
        # "fitness": fitness_mapping_and_ordering,
        "fitness": overhead_fitness_mapping_and_ordering,
        # "assign_credits": default_assign_credits
        # "assign_credits": max_assign_credits
        "assign_credits": assign_from_transfer_overhead
    }
}

saver = UniqueNameSaver("../../temp/cga_heft_mixin")
コード例 #9
0
ファイル: cga_heft_mixin.py プロジェクト: fonhorst/heft
                    ),
                    Specie(name=ORDERING_SPECIE, pop_size=50,
                           cxb=0.8, mb=0.5,
                           mate=ordering_default_crossover,
                           mutate=ordering_default_mutate,
                           select=ordering_selector,
                           initialize=ordering_default_initialize,
                           stat=lambda pop: {"hamming_distances": hamming_distances(pop, os_ideal_ind),
                                             "unique_inds_count": unique_individuals(pop),
                                             "pcm": pcm(pop),
                                             "gdm": gdm(pop)}
                    )
        ],
        "solstat": lambda sols: {"best_components": hamming_for_best_components(sols, ms_ideal_ind, os_ideal_ind),
                                 "best_components_itself": best_components_itself(sols),
                                 "best": -1* Utility.makespan(build_schedule(_wf, estimator, rm, max(sols, key=lambda x: x.fitness)))
                                 },
        "operators": {
            # "choose": default_choose,
            "build_solutions": default_build_solutions,
             # "fitness": fitness_mapping_and_ordering,
            "fitness": overhead_fitness_mapping_and_ordering,
            # "assign_credits": default_assign_credits
            # "assign_credits": max_assign_credits
            "assign_credits": assign_from_transfer_overhead
        }
    }

saver = UniqueNameSaver("../../temp/cga_heft_mixin")

def do_exp():
コード例 #10
0
from heft.algs.heft.DSimpleHeft import DynamicHeft
## reliability doesn't matter anything here
from heft.core.environment import Utility
from heft.experiments.comparison_experiments.common import ExecutorRunner
from heft.experiments.comparison_experiments.executors.GaHeftExecutor import GAComputationManager
from heft.core.environment.ResourceManager import Schedule

wf_added_times = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]
#wf_added_times = [0.1]

initial_wf_name = "Montage_30"
added_wf_name = "Montage_25"

initial_wf = ExecutorRunner.get_wf(initial_wf_name, "00")
added_wf = ExecutorRunner.get_wf(added_wf_name, "10")
bundle = Utility.get_default_bundle()
(estimator, resource_manager,
 initial_schedule) = ExecutorRunner.get_infrastructure(bundle, 1.0, False)

## planning for initial wf
heft = DynamicHeft(initial_wf, resource_manager, estimator)
empty_schedule = Schedule({node: [] for node in heft.get_nodes()})
ga = GAComputationManager(15, initial_wf, resource_manager, estimator)

ga_initial_schedule = ga._get_ga_alg()(empty_schedule, None)[2]

all_initial_wf_time = Utility.makespan(ga_initial_schedule)

print("Initial time: " + str(all_initial_wf_time))

n = 5
コード例 #11
0
ファイル: cga_fixed_ordering.py プロジェクト: fonhorst/heft
def do_exp():
    config = {
        "interact_individuals_count":
        100,
        "generations":
        300,
        "env":
        Env(_wf, rm, estimator),
        "species": [
            Specie(
                name=MAPPING_SPECIE,
                pop_size=50,
                cxb=0.9,
                mb=0.9,
                mate=lambda env, child1, child2: tools.cxOnePoint(
                    child1, child2),
                # mutate=mapping_default_mutate,
                # mutate=lambda ctx, mutant: mapping_k_mutate(ctx, 3, mutant)
                mutate=mapping_all_mutate,
                # mutate=OnlyUniqueMutant()(mapping_all_mutate),
                select=selector,
                # initialize=mapping_default_initialize,
                initialize=lambda ctx, pop: mapping_heft_based_initialize(
                    ctx, pop, heft_mapping, 3),
                stat=lambda pop: {
                    "hamming_distances":
                    hamming_distances([to_seq(p)
                                       for p in pop], to_seq(ms_ideal_ind)),
                    "unique_inds_count":
                    unique_individuals(pop),
                    "pcm":
                    pcm(pop),
                    "gdm":
                    gdm(pop)
                }),
            Specie(name=ORDERING_SPECIE,
                   fixed=True,
                   representative_individual=ListBasedIndividual(
                       os_representative))
        ],
        "solstat":
        lambda sols: {
            "best_components":
            hamming_for_best_components(sols, ms_ideal_ind, os_ideal_ind),
            "best_components_itself":
            best_components_itself(sols),
            "best":
            -1 * Utility.makespan(
                build_schedule(_wf, estimator, rm,
                               max(sols, key=lambda x: x.fitness)))
        },
        "operators": {
            # "choose": default_choose,
            "build_solutions": default_build_solutions,
            "fitness": fitness_mapping_and_ordering,
            # "fitness": overhead_fitness_mapping_and_ordering,
            # "assign_credits": default_assign_credits
            # "assign_credits": max_assign_credits
            "assign_credits": assign_from_transfer_overhead
        }
    }
    return do_experiment(saver, config, _wf, rm, estimator)