Ejemplo n.º 1
0
def gaheft_reschedule(wf_added_time):

    copy_gaheft_schedule = Schedule({
        node: [item for item in items]
        for (node, items) in ga_initial_schedule.mapping.items()
    })

    added_time = all_initial_wf_time * wf_added_time

    mark_finished(copy_gaheft_schedule)
    gaheft_added = DynamicHeft(added_wf, resource_manager, estimator)
    gaheft_added.current_time = added_time
    gaheft_added_schedule = gaheft_added.run(copy_gaheft_schedule)
    new_ga = GAComputationManager(15, added_wf, resource_manager, estimator)

    gaheft_added_schedule = new_ga.run(gaheft_added_schedule, added_time,
                                       False)[2]

    mark_finished(gaheft_added_schedule)

    nodes_seq_validaty = Utility.validateNodesSeq(gaheft_added_schedule)
    if nodes_seq_validaty is not True:
        raise Exception("Check for nodes_seq_validaty didn't pass")
    initial_wf_validaty = Utility.validateParentsAndChildren(
        gaheft_added_schedule, initial_wf)
    if initial_wf_validaty is not True:
        raise Exception("Check for initial_wf_validaty didn't pass")
    added_wf_validaty = Utility.validateParentsAndChildren(
        gaheft_added_schedule, added_wf)
    if added_wf_validaty is not True:
        raise Exception("Check for added_wf_validaty didn't pass")
    #print("All Ok!")
    result = Utility.makespan(gaheft_added_schedule)
    return result
Ejemplo n.º 2
0
def gaheft_reschedule(wf_added_time):

    copy_gaheft_schedule = Schedule({node:[item for item in items] for (node, items) in ga_initial_schedule.mapping.items()})

    added_time = all_initial_wf_time * wf_added_time

    mark_finished(copy_gaheft_schedule)
    gaheft_added = DynamicHeft(added_wf, resource_manager, estimator)
    gaheft_added.current_time = added_time
    gaheft_added_schedule = gaheft_added.run(copy_gaheft_schedule)
    new_ga = GAComputationManager(15,
                          added_wf,
                          resource_manager,
                          estimator)

    gaheft_added_schedule = new_ga.run(gaheft_added_schedule, added_time, False)[2]

    mark_finished(gaheft_added_schedule)

    nodes_seq_validaty = Utility.validateNodesSeq(gaheft_added_schedule)
    if nodes_seq_validaty is not True:
        raise Exception("Check for nodes_seq_validaty didn't pass")
    initial_wf_validaty = Utility.validateParentsAndChildren(gaheft_added_schedule, initial_wf)
    if initial_wf_validaty is not True:
        raise Exception("Check for initial_wf_validaty didn't pass")
    added_wf_validaty = Utility.validateParentsAndChildren(gaheft_added_schedule, added_wf)
    if added_wf_validaty is not True:
        raise Exception("Check for added_wf_validaty didn't pass")
    #print("All Ok!")
    result = Utility.makespan(gaheft_added_schedule)
    return result
Ejemplo n.º 3
0
def fitness_ordering_resourceconf(workflow, estimator, solution):
    os = solution[ORDERING_SPECIE]
    rcs = solution[RESOURCE_CONFIG_SPECIE]
    ## TODO: refactor this
    flops_set = [conf.flops for conf in rcs if conf is not None]
    resources = ResourceGenerator.r(flops_set)
    resource_manager = ExperimentResourceManager(resources)
    heft = DynamicHeft(workflow, resource_manager, estimator, os)
    schedule = heft.run({n: [] for n in resource_manager.get_nodes()})
    result = Utility.makespan(schedule)
    return 1 / result
Ejemplo n.º 4
0
def fitness_ordering_resourceconf(workflow,
                                  estimator,
                                  solution):
    os = solution[ORDERING_SPECIE]
    rcs = solution[RESOURCE_CONFIG_SPECIE]
    ## TODO: refactor this
    flops_set = [conf.flops for conf in rcs if conf is not None]
    resources = ResourceGenerator.r(flops_set)
    resource_manager = ExperimentResourceManager(resources)
    heft = DynamicHeft(workflow, resource_manager, estimator, os)
    schedule = heft.run({n: [] for n in resource_manager.get_nodes()})
    result = Utility.makespan(schedule)
    return 1/result
Ejemplo n.º 5
0
        def _run_heft():
            dynamic_planner = DynamicHeft(wf, resource_manager, estimator)
            nodes = HeftHelper.to_nodes(resource_manager.resources)
            current_cleaned_schedule = Schedule({node: [] for node in nodes})
            schedule_dynamic_heft = dynamic_planner.run(current_cleaned_schedule)

            self._validate(wf, estimator, schedule_dynamic_heft)

            if is_visualized:
                viz.visualize_task_node_mapping(wf, schedule_dynamic_heft)
                # Utility.create_jedule_visualization(schedule_dynamic_heft, wf_name+'_heft')
                pass
            return schedule_dynamic_heft
Ejemplo n.º 6
0
        def _run_heft():
            dynamic_planner = DynamicHeft(wf, resource_manager, estimator)
            nodes = HeftHelper.to_nodes(resource_manager.resources)
            current_cleaned_schedule = Schedule({node: [] for node in nodes})
            schedule_dynamic_heft = dynamic_planner.run(
                current_cleaned_schedule)

            self._validate(wf, estimator, schedule_dynamic_heft)

            if is_visualized:
                viz.visualize_task_node_mapping(wf, schedule_dynamic_heft)
                # Utility.create_jedule_visualization(schedule_dynamic_heft, wf_name+'_heft')
                pass
            return schedule_dynamic_heft
Ejemplo n.º 7
0
def heft_exp(saver, wf_name, **params):
    _wf = wf(wf_name)
    rm = ExperimentResourceManager(rg.r(params["resource_set"]["nodes_conf"]))
    estimator = SimpleTimeCostEstimator(**params["estimator_settings"])

    dynamic_heft = DynamicHeft(_wf, rm, estimator)
    heft_machine = HeftExecutor(rm,
                                heft_planner=dynamic_heft,
                                **params["executor_params"])
    heft_machine.init()
    heft_machine.run()
    resulted_schedule = heft_machine.current_schedule

    Utility.validate_dynamic_schedule(_wf, resulted_schedule)

    data = {
        "wf_name": wf_name,
        "params": params,
        "result": {
            "makespan":
            Utility.makespan(resulted_schedule),
            ## TODO: this function should be remade to adapt under conditions of dynamic env
            #"overall_transfer_time": Utility.overall_transfer_time(resulted_schedule, _wf, estimator),
            "overall_execution_time":
            Utility.overall_execution_time(resulted_schedule),
            "overall_failed_tasks_count":
            Utility.overall_failed_tasks_count(resulted_schedule)
        }
    }

    if saver is not None:
        saver(data)

    return data
Ejemplo n.º 8
0
    def run_gaheftoldpop_executor(self, *args, **kwargs):
        dynamic_heft = DynamicHeft(kwargs["wf"], kwargs["resource_manager"],
                                   kwargs["estimator"])
        # stat_saver = ResultSaver(self.DEFAULT_SAVE_PATH.format(kwargs["key_for_save"], ComparisonUtility.cur_time(), ComparisonUtility.uuid()))
        stat_saver = self.build_saver(*args, **kwargs)
        chromosome_cleaner = GaChromosomeCleaner(kwargs["wf"],
                                                 kwargs["resource_manager"],
                                                 kwargs["estimator"])
        kwargs["silent"] = kwargs.get("silent", True)
        ga_machine = GaHeftOldPopExecutor(
            heft_planner=dynamic_heft,
            wf=kwargs["wf"],
            resource_manager=kwargs["resource_manager"],
            estimator=kwargs["estimator"],
            base_fail_duration=40,
            base_fail_dispersion=1,
            fixed_interval_for_ga=kwargs["fixed_interval_for_ga"],
            task_id_to_fail=kwargs["task_id_to_fail"],
            ga_builder=partial(GAFactory.default().create_ga, **kwargs),
            stat_saver=kwargs.get("stat_saver", stat_saver),
            chromosome_cleaner=kwargs.get("chromosome_cleaner",
                                          chromosome_cleaner))

        ga_machine.init()
        ga_machine.run()

        resulted_schedule = ga_machine.current_schedule
        return resulted_schedule
Ejemplo n.º 9
0
    def run_cloudheft_executor(self, *args, **kwargs):
        rgen = ResourceGenerator(min_res_count=1,
                                 max_res_count=1,
                                 min_node_count=4,
                                 max_node_count=4)
        ##min_flops=20,
        ## max_flops=20)

        (public_resources, reliability_map_cloud,
         probability_estimator) = rgen.generate_public_resources()
        public_resource_manager = PublicResourceManager(
            public_resources, reliability_map_cloud, probability_estimator)

        dynamic_heft = DynamicHeft(kwargs["wf"], kwargs["resource_manager"],
                                   kwargs["estimator"])
        cloud_heft_machine = CloudHeftExecutor(
            heft_planner=dynamic_heft,
            base_fail_duration=40,
            base_fail_dispersion=1,
            desired_reliability=0.98,
            public_resource_manager=public_resource_manager,
            #initial_schedule=None)
            initial_schedule=kwargs["initial_schedule"])
        cloud_heft_machine.init()
        cloud_heft_machine.run()

        resulted_schedule = cloud_heft_machine.current_schedule
        return resulted_schedule
Ejemplo n.º 10
0
def do_triple_island_exp(saver, alg_builder, chromosome_cleaner_builder,
                         schedule_to_chromosome_converter_builder, wf_name,
                         **params):
    _wf = wf(wf_name)
    rm = ExperimentResourceManager(rg.r(params["resource_set"]["nodes_conf"]))
    estimator = SimpleTimeCostEstimator(**params["estimator_settings"])
    chromosome_cleaner = chromosome_cleaner_builder(_wf, rm, estimator)
    dynamic_heft = DynamicHeft(_wf, rm, estimator)

    mpga = alg_builder(_wf,
                       rm,
                       estimator,
                       params["init_sched_percent"],
                       log_book=None,
                       stats=None,
                       alg_params=params["alg_params"])

    machine = MIGaHeftExecutor(heft_planner=dynamic_heft,
                               wf=_wf,
                               resource_manager=rm,
                               estimator=estimator,
                               ga_builder=lambda: mpga,
                               chromosome_cleaner=chromosome_cleaner,
                               schedule_to_chromosome_converter=
                               schedule_to_chromosome_converter_builder(
                                   wf, rm, estimator),
                               **params["executor_params"])

    machine.init()
    machine.run()
    resulted_schedule = machine.current_schedule

    Utility.validate_dynamic_schedule(_wf, resulted_schedule)

    data = {
        "wf_name": wf_name,
        "params": params,
        "result": {
            "makespan":
            Utility.makespan(resulted_schedule),
            ## TODO: this function should be remade to adapt under conditions of dynamic env
            #"overall_transfer_time": Utility.overall_transfer_time(resulted_schedule, _wf, estimator),
            "overall_execution_time":
            Utility.overall_execution_time(resulted_schedule),
            "overall_failed_tasks_count":
            Utility.overall_failed_tasks_count(resulted_schedule)
        }
    }

    if saver is not None:
        saver(data)

    return data
Ejemplo n.º 11
0
    def main(self,
             reliability,
             is_silent,
             wf_name,
             logger=None,
             task_id_to_fail=None,
             failure_coeff=0.2):

        wf = self.get_wf(wf_name)
        bundle = self.get_bundle(None)
        (estimator, resource_manager,
         initial_schedule) = self.get_infrastructure(bundle, reliability,
                                                     False)

        ##TODO: look here ! I'm an idiot tasks of wf != tasks of initial_schedule
        dynamic_heft = DynamicHeft(wf, resource_manager, estimator)
        heft_machine = SingleFailHeftExecutor(
            heft_planner=dynamic_heft,
            base_fail_duration=40,
            base_fail_dispersion=1,
            #initial_schedule=None)
            initial_schedule=initial_schedule,
            logger=logger,
            task_id_to_fail=task_id_to_fail,
            failure_coeff=failure_coeff)
        heft_machine.init()
        heft_machine.run()

        ## TODO: remove it later.
        if logger is not None:
            logger.flush()

        seq_time_validaty = Utility.validateNodesSeq(
            heft_machine.current_schedule)
        dependency_validaty = Utility.validateParentsAndChildren(
            heft_machine.current_schedule, wf)
        transfer_dependency_validaty = Utility.static_validateParentsAndChildren_transfer(
            heft_machine.current_schedule, wf, estimator)

        if seq_time_validaty is not True:
            raise Exception("seq_time_validaty failed. taskid=" +
                            str(task_id_to_fail))
        if dependency_validaty is not True:
            raise Exception("dependency_validaty failed. taskid=" +
                            str(task_id_to_fail))
        if transfer_dependency_validaty is not True:
            raise Exception("transfer_dependency_validaty failed. taskid=" +
                            str(task_id_to_fail))

        (makespan, vl1,
         vl2) = self.extract_result(heft_machine.current_schedule, is_silent,
                                    wf)
        return makespan
Ejemplo n.º 12
0
 def run_heft_executor(self, *args, **kwargs):
     ##TODO: look here ! I'm an idiot tasks of wf != tasks of initial_schedule
     dynamic_heft = DynamicHeft(kwargs["wf"], kwargs["resource_manager"],
                                kwargs["estimator"])
     heft_machine = HeftExecutor(
         heft_planner=dynamic_heft,
         base_fail_duration=40,
         base_fail_dispersion=1,
         #initial_schedule=None)
         initial_schedule=kwargs["initial_schedule"],
         logger=kwargs["logger"])
     heft_machine.init()
     heft_machine.run()
     resulted_schedule = heft_machine.current_schedule
     return resulted_schedule
Ejemplo n.º 13
0
    def run_mpgaheftoldpop_executor(self, *args, **kwargs):
        dynamic_heft = DynamicHeft(kwargs["wf"], kwargs["resource_manager"],
                                   kwargs["estimator"])
        stat_saver = self.build_saver(*args, **kwargs)

        # emigrant_selection = lambda pop, k: selRoulette(pop, k)
        # emigrant_selection = lambda pop, k: [pop[i] for i in range(k)]
        def emigrant_selection(pop, k):
            size = len(pop)
            if k > size:
                raise Exception(
                    "Count of emigrants is greater than population: {0}>{1}".
                    format(k, size))
            res = []
            for i in range(k):
                r = random.randint(0, size - 1)
                while r in res:
                    r = random.randint(0, size - 1)
                res.append(r)
            return [pop[r] for r in res]

        kwargs["silent"] = kwargs.get("silent", True)
        kwargs["heft_planner"] = dynamic_heft
        kwargs["base_fail_duration"] = 40
        kwargs["base_fail_dispersion"] = 1
        kwargs["emigrant_selection"] = emigrant_selection
        kwargs["mixed_init_pop"] = kwargs.get("mixed_init_pop", False)
        kwargs["mpnewVSmpoldmode"] = kwargs.get("mpnewVSmpoldmode", False)
        kwargs["ga_params"] = kwargs.get("ga_params", None)
        kwargs["logger"] = kwargs.get("logger", None)
        kwargs["stat_saver"] = kwargs.get("stat_saver", stat_saver)
        kwargs["ga_builder"] = partial(GAFactory.default().create_ga, **kwargs)
        kwargs["mpga_builder"] = partial(create_mpga, **kwargs)
        kwargs["merged_pop_iters"] = kwargs.get("merged_pop_iters", 0)
        kwargs["check_evolution_for_stopping"] = kwargs.get(
            "check_evolution_for_stopping", True)
        kwargs["schedule_to_chromosome_converter"] = kwargs.get(
            "schedule_to_chromosome_converter",
            GAFunctions2.schedule_to_chromosome)

        ga_machine = MPGaHeftOldPopExecutor(**kwargs)

        ga_machine.init()
        ga_machine.run()

        resulted_schedule = ga_machine.current_schedule
        return resulted_schedule
Ejemplo n.º 14
0
    def run_gaheft_executor(self, *args, **kwargs):
        dynamic_heft = DynamicHeft(kwargs["wf"], kwargs["resource_manager"],
                                   kwargs["estimator"])
        kwargs["silent"] = kwargs.get("silent", True)
        ga_heft_machine = GaHeftExecutor(
            heft_planner=dynamic_heft,
            wf=kwargs["wf"],
            resource_manager=kwargs["resource_manager"],
            base_fail_duration=40,
            base_fail_dispersion=1,
            fixed_interval_for_ga=kwargs["fixed_interval_for_ga"],
            ga_builder=partial(GAFactory.default().create_ga, **kwargs))

        ga_heft_machine.init()
        ga_heft_machine.run()

        resulted_schedule = ga_heft_machine.current_schedule
        return resulted_schedule
Ejemplo n.º 15
0
from heft.core.environment.ResourceManager import Schedule

wf_added_times = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]
#wf_added_times = [0.1]

initial_wf_name = "Montage_30"
added_wf_name = "Montage_25"

initial_wf = ExecutorRunner.get_wf(initial_wf_name, "00")
added_wf = ExecutorRunner.get_wf(added_wf_name, "10")
bundle = Utility.get_default_bundle()
(estimator, resource_manager,
 initial_schedule) = ExecutorRunner.get_infrastructure(bundle, 1.0, False)

## planning for initial wf
heft = DynamicHeft(initial_wf, resource_manager, estimator)
empty_schedule = Schedule({node: [] for node in heft.get_nodes()})
ga = GAComputationManager(15, initial_wf, resource_manager, estimator)

ga_initial_schedule = ga._get_ga_alg()(empty_schedule, None)[2]

all_initial_wf_time = Utility.makespan(ga_initial_schedule)

print("Initial time: " + str(all_initial_wf_time))

n = 5


## planning for added wf
def gaheft_reschedule(wf_added_time):
Ejemplo n.º 16
0
from heft.experiments.comparison_experiments.executors.GaHeftExecutor import GAComputationManager
from heft.core.environment.ResourceManager import Schedule

wf_added_times = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]
#wf_added_times = [0.1]

initial_wf_name = "Montage_30"
added_wf_name = "Montage_25"

initial_wf = ExecutorRunner.get_wf(initial_wf_name, "00")
added_wf = ExecutorRunner.get_wf(added_wf_name, "10")
bundle = Utility.get_default_bundle()
(estimator, resource_manager, initial_schedule) = ExecutorRunner.get_infrastructure(bundle, 1.0, False)

## planning for initial wf
heft = DynamicHeft(initial_wf, resource_manager, estimator)
empty_schedule = Schedule({node:[] for node in heft.get_nodes()})
ga = GAComputationManager(15,
                          initial_wf,
                          resource_manager,
                          estimator)

ga_initial_schedule = ga._get_ga_alg()(empty_schedule, None)[2]

all_initial_wf_time = Utility.makespan(ga_initial_schedule)

print("Initial time: " + str(all_initial_wf_time))

n = 5
## planning for added wf
def gaheft_reschedule(wf_added_time):
Ejemplo n.º 17
0
def do_island_inherited_pop_exp(alg_builder, mp_alg_builder, algorithm_builder,
                                chromosome_cleaner_builder,
                                schedule_to_chromosome_converter_builder,
                                wf_name, **params):
    _wf = wf(wf_name)
    rm = ExperimentResourceManager(rg.r(params["resource_set"]["nodes_conf"]))
    estimator = SimpleTimeCostEstimator(**params["estimator_settings"])
    chromosome_cleaner = chromosome_cleaner_builder(_wf, rm, estimator)
    dynamic_heft = DynamicHeft(_wf, rm, estimator)
    ga = alg_builder(_wf,
                     rm,
                     estimator,
                     params["init_sched_percent"],
                     log_book=None,
                     stats=None,
                     alg_params=params["alg_params"])

    ## TODO: remake this part later.
    # def reverse_interface_adapter(func):
    #     def wrap(*args, **kwargs):
    #         (best, pop, resulted_schedule, _), logbook = func(*args, **kwargs)
    #         return pop, logbook, best
    #     return wrap

    mpga = mp_alg_builder(
        _wf,
        rm,
        estimator,
        params["init_sched_percent"],
        algorithm=partial(algorithm_builder, **params["alg_params"]),
        #algorithm=reverse_interface_adapter(ga),
        log_book=None,
        stats=None,
        alg_params=params["alg_params"])

    kwargs = dict(params["executor_params"])
    kwargs.update(params["alg_params"])
    kwargs["ga_params"] = {"population": params["alg_params"]["n"]}

    machine = MPGaHeftOldPopExecutor(heft_planner=dynamic_heft,
                                     wf=_wf,
                                     resource_manager=rm,
                                     estimator=estimator,
                                     stat_saver=None,
                                     ga_builder=lambda: ga,
                                     mpga_builder=lambda: mpga,
                                     chromosome_cleaner=chromosome_cleaner,
                                     mpnewVSmpoldmode=False,
                                     mixed_init_pop=False,
                                     emigrant_selection=None,
                                     check_evolution_for_stopping=False,
                                     schedule_to_chromosome_converter=
                                     schedule_to_chromosome_converter_builder(
                                         wf, rm, estimator),
                                     **kwargs)

    machine.init()
    machine.run()
    resulted_schedule = machine.current_schedule
    stat_data = machine.executor_stat_data

    Utility.validate_dynamic_schedule(_wf, resulted_schedule)

    data = {
        "wf_name": wf_name,
        "params": params,
        "result": {
            "makespan":
            Utility.makespan(resulted_schedule),
            ## TODO: this function should be remade to adapt under conditions of dynamic env
            #"overall_transfer_time": Utility.overall_transfer_time(resulted_schedule, _wf, estimator),
            "overall_execution_time":
            Utility.overall_execution_time(resulted_schedule),
            "overall_failed_tasks_count":
            Utility.overall_failed_tasks_count(resulted_schedule)
        }
    }

    return data
Ejemplo n.º 18
0
from heft.core.environment.ResourceManager import Schedule
from heft.experiments.comparison_experiments.common import ExecutorRunner

wf_added_times = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]
#wf_added_times = [0.1]

initial_wf_name = "Montage_30"
added_wf_name = "Montage_25"

initial_wf = ExecutorRunner.get_wf(initial_wf_name, "00")
added_wf = ExecutorRunner.get_wf(added_wf_name, "10")
bundle = Utility.get_default_bundle()
(estimator, resource_manager, initial_schedule) = ExecutorRunner.get_infrastructure(bundle, 1.0, False)

## planning for initial wf
heft = DynamicHeft(initial_wf, resource_manager, estimator)
empty_schedule  = Schedule({node:[] for node in heft.get_nodes()})
heft_schedule = heft.run(empty_schedule)

all_initial_wf_time = Utility.makespan(heft_schedule)
print("Initial time: " + str(all_initial_wf_time))

n = 1

## planning for added wf
def heft_reschedule(wf_added_time):

    copy_heft_schedule = Schedule({node:[item for item in items] for (node, items) in heft_schedule.mapping.items()})

    added_time = all_initial_wf_time * wf_added_time
    heft_added = DynamicHeft(added_wf, resource_manager, estimator)
Ejemplo n.º 19
0
def do_inherited_pop_exp(saver, alg_builder, chromosome_cleaner_builder,
                         wf_name, **params):
    _wf = wf(wf_name)
    rm = ExperimentResourceManager(rg.r(params["resource_set"]["nodes_conf"]))
    estimator = SimpleTimeCostEstimator(**params["estimator_settings"])
    chromosome_cleaner = chromosome_cleaner_builder(_wf, rm, estimator)
    dynamic_heft = DynamicHeft(_wf, rm, estimator)

    logbook = Logbook()

    stats = tools.Statistics(lambda ind: ind.fitness.values[0])
    stats.register("avg", numpy.mean)
    stats.register("std", numpy.std)
    stats.register("min", numpy.min)
    stats.register("max", numpy.max)

    ga = alg_builder(_wf,
                     rm,
                     estimator,
                     params["init_sched_percent"],
                     log_book=logbook,
                     stats=stats,
                     alg_params=params["alg_params"])

    machine = GaHeftOldPopExecutor(heft_planner=dynamic_heft,
                                   wf=_wf,
                                   resource_manager=rm,
                                   estimator=estimator,
                                   stat_saver=None,
                                   ga_builder=lambda: ga,
                                   chromosome_cleaner=chromosome_cleaner,
                                   **params["executor_params"])

    machine.init()
    print("Executor start")
    machine.run()
    print("Executor stop")

    resulted_schedule = machine.current_schedule
    stat_data = machine.executor_stat_data

    Utility.validate_dynamic_schedule(_wf, resulted_schedule)

    data = {
        "wf_name": wf_name,
        "params": params,
        "result": {
            "random_init_logbook":
            stat_data["random_init_logbook"],
            "inherited_init_logbook":
            stat_data["inherited_init_logbook"],
            "makespan":
            Utility.makespan(resulted_schedule),
            ## TODO: this function should be remade to adapt under conditions of dynamic env
            #"overall_transfer_time": Utility.overall_transfer_time(resulted_schedule, _wf, estimator),
            "overall_execution_time":
            Utility.overall_execution_time(resulted_schedule),
            "overall_failed_tasks_count":
            Utility.overall_failed_tasks_count(resulted_schedule)
        }
    }

    if saver is not None:
        saver(data)

    return data