예제 #1
0
    def compute_probability(self, number_of_executions, sum_execution_time_sq, sum_execution_times):

        n = TypeConversion.get_float(number_of_executions)
        sum_xi = TypeConversion.get_float(sum_execution_times)
        sum_xi_2 = TypeConversion.get_float(sum_execution_time_sq)
        self.number_of_executions = n

        if n is None:
            Logger.error("Cannot compute std deviation! Malformed input data!")
            self.mean = None
            self.deviation = None
            return

        if n == 0:
            self.mean = None
            self.deviation = None
            return

        if n == 1 and sum_xi is not None:
            self.mean = sum_xi
            self.deviation = 0
            return

        if n > 0 and sum_xi is None or sum_xi_2 is None:
            Logger.error("Cannot compute standard deviation! Malformed input data!")
            return

        #calculation of standard deviation
        tmp = sum_xi_2 - (1/n) * sum_xi ** 2
        s = math.sqrt((1/(n - 1)) * tmp)

        self.deviation = s
        #calculation of mean
        mean = (1/n) * sum_xi
        self.mean = mean
예제 #2
0
    def _reschedule(self):
        new_tasks = []
        tasks_to_remove = []
        for task_id in self.order_list:
            if self.mfss_allows_execution(task_id):
                new_task = self.allocate_resources(self.tasks_to_do[task_id])
                if new_task == None:
                    pass
                else:
                    new_tasks.append(new_task)
                    del self.tasks_to_do[task_id]
                    tasks_to_remove.append(task_id)
                    self.already_executed.append(task_id)
        [self.order_list.remove(task) for task in tasks_to_remove]

        if len(new_tasks) == 0 and len(self.currently_assigned_resoruces) == 0 and len(self.tasks_to_do) > 0:
            if not self.ignore_infeasible_schedules:
                raise UnfeasibleScheduleException()
            else:
                Logger.warning("Unfeasible schedule encountered. Ignoring mfss")
                for task_id in self.order_list:
                    new_task = self.allocate_resources(self.tasks_to_do[task_id])
                    if new_task == None:
                        pass
                    else:
                        new_tasks.append(new_task)
                        del self.tasks_to_do[task_id]
                        tasks_to_remove.append(task_id)
                        self.already_executed.append(task_id)
            [self.order_list.remove(task) for task in tasks_to_remove]
        return new_tasks
예제 #3
0
    def do_it(self, ngen, cxpb, mutpb):
        pop = self.toolbox.population(n=self.no_list)

        #inital calculation of fitness for base population. TODO: Optimization. InitialFitness can be taken from the Job Object itself.
        fitness = self.toolbox.map(self.toolbox.evaluate, pop)
        for ind, fit in zip(pop, fitness):
            ind.fitness.values = fit

        best = [copy.deepcopy(ind) for ind in pop]
        for g in range(ngen):
            Logger.info("ListGA Generation: %s" % (g))

            best = select(pop, self.no_list)
            record = self.stats.compile(best)
            self.logbook.record(**record)
            pop = [copy.deepcopy(ind) for ind in best]

            for child1, child2 in zip(pop[::2], pop[1::2]):
                if random.random() < cxpb:
                    crossover(child1, child2)
                    del child1.fitness.values, child2.fitness.values
            for mutant in pop:
                mutate(mutant, mutpb)

            #TODO: Perform double justification here
            for ind in best:
                if not ind in pop:
                    pop.append(ind)
            #invalids = [ind for ind in pop if not ind.fitness.valid]
            fitnesses = self.toolbox.map(self.toolbox.evaluate, pop)
            for ind, fit in zip(pop, fitnesses):
                if fit is not None:
                    ind.fitness.values = fit

        return select(pop, n=1)
예제 #4
0
    def _reschedule(self):
        new_tasks = []
        tasks_to_remove = []
        self.eligble_to_run = self.get_tasks_eligible_to_run()

        for task_id in self.order_list:
            if self.bottleneck_allows_execution(task_id, self.eligble_to_run):
                new_task = self.allocate_resources(self.tasks_to_do[task_id])
                self.eligble_to_run = self.get_tasks_eligible_to_run()
                if new_task == None:
                    pass
                else:
                    new_tasks.append(new_task)
                    del self.tasks_to_do[task_id]
                    tasks_to_remove.append(task_id)
                    self.already_executed.append(task_id)
        [self.order_list.remove(task) for task in tasks_to_remove]

        if len(new_tasks) == 0 and len(self.currently_assigned_resoruces) == 0 and len(self.tasks_to_do) > 0:
            if not self.ignore_infeasible_schedules:
                raise UnfeasibleScheduleException()
            else:
                Logger.warning("Unfeasible schedule encountered. Ignoring mfss")
                for task_id in self.order_list:
                    new_task = self.allocate_resources(self.tasks_to_do[task_id])
                    if new_task == None:
                        pass
                    else:
                        new_tasks.append(new_task)
                        del self.tasks_to_do[task_id]
                        tasks_to_remove.append(task_id)
                        self.already_executed.append(task_id)
            [self.order_list.remove(task) for task in tasks_to_remove]
        return new_tasks
예제 #5
0
    def _reschedule(self):
        new_tasks = []

        for task_id in self.order_list:
            if self.mfss_allows_execution(task_id) and task_id in self.tasks_to_do.keys():
               new_task = self.allocate_resources(self.tasks_to_do[task_id])
               if new_task is not None:
                    new_tasks.append(new_task)
                    del self.tasks_to_do[task_id]
               else:
                   break

        if len(new_tasks) == 0 and len(self.currently_assigned_resoruces) == 0 and len(self.tasks_to_do) is not 0:
            if self.ignore_infeasible_schedules == False:
                raise UnfeasibleScheduleException()
            else:
                Logger.warning("unfeasible schedule encountered. Ignoring mfss")
                for task_id in self.tasks_to_do.keys():
                    new_task = self.allocate_resources(self.tasks_to_do[task_id])
                    if new_task is not None:
                        new_tasks.append(new_task)
                        del self.tasks_to_do[task_id]
                        break

        return new_tasks
예제 #6
0
def visualize(simulation_result):
    resource_frequency = {}
    for task in simulation_result.execution_history:
        if task.finished - task.started == 0:
            continue
        for resource in task.usedResources:
            if resource.max_share_count is not 0:
                if not resource in resource_frequency:
                    resource_frequency[resource] = 0
                resource_frequency[resource] += 1
    top_resources = []
    for resource, frequency in resource_frequency.items():
        top_resources.append((resource, frequency))

    top_resources.sort(key=lambda tup: tup[1], reverse=True)
    Logger.info("Top required Resources:")

    for resource in top_resources[:10]:
        Logger.info("name: '%s', is testbed: %s, frequency: %s" % (resource[0].name, resource[0].is_testbed, resource[1]))

    fig = plt.figure(figsize=(30, 25))

    ax1 = fig.add_subplot(211)
    plot_gantt(simulation_result, top_resources[:7], ax1)

    ax2 = fig.add_subplot(212)
    plot_gantt(simulation_result, top_resources[:7], ax2)
    ax2.set_xscale('log')

    return ax1, ax2
예제 #7
0
    def initialize(self):
        super(PPPolicies, self).initialize()
        Logger.info("Generating initial Population with RBRS")
        initial_pop = self._generate_RBRS(self.job, self.param["listNoList"])
        Logger.info("Applying ListGA to initial population")
        listGA = ListGA(self.job, initial_pop)
        task_list = listGA.do_it(self.param["listGAGen"],
                                 self.param["listGACXp"],
                                 self.param["listGAMUTp"])[0]
        self.listGALog = listGA.get_logbook()
        if self.param["arcGAGen"] > 0:
            arcGA = ArcGA(self.job, task_list)
            arc_list = arcGA.do_it(self.param["arcGAGen"],
                                   self.param["arcGACXp"],
                                   self.param["arcGAMUTp"])[0]  #2, 0.5, 0.1
            #Logger.warning("len arc list: %s" % (len(arc_list)))
            self.arcGALog = arcGA.get_logbook()

            self.scheduler = MfssRB(self.job,
                                    task_list,
                                    arc_list,
                                    ignore_infeasible_schedules=True)
        else:
            self.arcGALog = []
            self.arcGALog.append({"min": 0, "max": 0})

            self.scheduler = MfssRB(self.job,
                                    task_list, [],
                                    ignore_infeasible_schedules=True)
예제 #8
0
def visualize(simulation_result):
    resource_frequency = {}
    for task in simulation_result.execution_history:
        if task.finished - task.started == 0:
            continue
        for resource in task.usedResources:
            if resource.max_share_count is not 0:
                if not resource in resource_frequency:
                    resource_frequency[resource] = 0
                resource_frequency[resource] += 1
    top_resources = []
    for resource, frequency in resource_frequency.items():
        top_resources.append((resource, frequency))

    top_resources.sort(key=lambda tup: tup[1], reverse=True)
    Logger.info("Top required Resources:")

    for resource in top_resources[:10]:
        Logger.info("name: '%s', is testbed: %s, frequency: %s" %
                    (resource[0].name, resource[0].is_testbed, resource[1]))

    fig = plt.figure(figsize=(30, 25))

    ax1 = fig.add_subplot(211)
    plot_gantt(simulation_result, top_resources[:7], ax1)

    ax2 = fig.add_subplot(212)
    plot_gantt(simulation_result, top_resources[:7], ax2)
    ax2.set_xscale('log')

    return ax1, ax2
예제 #9
0
    def initialize(self):
        super(JFPol, self).initialize()
        Logger.info("Generating initial Population with RBRS")
        initial_pop = self._generate_RBRS(self.job, 10)
        Logger.info("Applying ListGA to initial population")
        listGA = ListGA(self.job, initial_pop)
        task_list = listGA.do_it(150, 0.8, 0.2)[0]
        self.listGALog = listGA.get_logbook()

        self.scheduler = DomainRB(self.job, task_list,  ignore_infeasible_schedules=True)
예제 #10
0
def run_optimization_process(args, parameters):
    ts = time.time()
    st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d-%H:%M:%S')
    try:
        job = deserialize(args.file)
    except IOError:
        Logger.error("The file specified was not found.")
        sys.exit(127)

    results, extremes = process_job_parallel("PPPolicies", job, TypeConversion.get_int(args.cores), TypeConversion.get_int(args.iter), parameters=parameters)
    parameters["results"] = results
    pickle.dump(parameters, open(args.out_folder + st + ".pickle", "wb"))
예제 #11
0
def schedulerForName(name):
    schedulers = {ReferenceScheduler.__name__ : ReferenceScheduler, OptimizedDependencyScheduler.__name__ : OptimizedDependencyScheduler,
                  PPPolicies.__name__ : PPPolicies,
                  JFPol.__name__ : JFPol}
    if name is None:
        scheduler = ReferenceScheduler
    else:
        try:
            scheduler =  schedulers[name]
        except KeyError:
            Logger.error("The scheduler specified does not exist.")
            sys.exit(127)
    return scheduler
예제 #12
0
def deserialize(file_path):
    Logger.info("loading data from file: %s" % file_path)
    job = cPickle.load(open(file_path, "rb"))
    Logger.info("loaded %s tasks" % (len(job.tasks.values())))
    Logger.info("loaded %s resources" % (len(job.resources.values())))
    Logger.info("loaded %s capabilities"  % (len(job.capabilities.values())))

    return job
예제 #13
0
def deserialize(file_path):
    Logger.info("loading data from file: %s" % file_path)
    job = cPickle.load(open(file_path, "rb"))
    Logger.info("loaded %s tasks" % (len(job.tasks.values())))
    Logger.info("loaded %s resources" % (len(job.resources.values())))
    Logger.info("loaded %s capabilities" % (len(job.capabilities.values())))

    return job
예제 #14
0
def run_optimization_process(args, parameters):
    ts = time.time()
    st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d-%H:%M:%S')
    try:
        job = deserialize(args.file)
    except IOError:
        Logger.error("The file specified was not found.")
        sys.exit(127)

    results, extremes = process_job_parallel("PPPolicies",
                                             job,
                                             TypeConversion.get_int(
                                                 args.cores),
                                             TypeConversion.get_int(args.iter),
                                             parameters=parameters)
    parameters["results"] = results
    pickle.dump(parameters, open(args.out_folder + st + ".pickle", "wb"))
예제 #15
0
def main():
    arg_parser = argparse.ArgumentParser()
    arg_parser.add_argument("file", help="The pickle file containing the simulation result")
    arg_parser.add_argument("--pdf", help="specifies that the visualization should be written to a pdf file. Followed \
                            by a path")
    args = arg_parser.parse_args()
    try:
        simulation_result = load_simulation_result(args.file)
    except IOError:
        Logger.error("The file specified was not found.")
        sys.exit(127)
    visualize(simulation_result)

    if args.pdf is not None:
        plt.savefig(args.pdf)
    else:
        plt.show()
예제 #16
0
    def initialize(self):
        super(PPPolicies, self).initialize()
        Logger.info("Generating initial Population with RBRS")
        initial_pop = self._generate_RBRS(self.job, self.param["listNoList"])
        Logger.info("Applying ListGA to initial population")
        listGA = ListGA(self.job, initial_pop)
        task_list = listGA.do_it(self.param["listGAGen"], self.param["listGACXp"], self.param["listGAMUTp"])[0]
        self.listGALog = listGA.get_logbook()
        if self.param["arcGAGen"] > 0:
            arcGA = ArcGA(self.job, task_list)
            arc_list = arcGA.do_it(self.param["arcGAGen"], self.param["arcGACXp"],self.param["arcGAMUTp"])[0] #2, 0.5, 0.1
            #Logger.warning("len arc list: %s" % (len(arc_list)))
            self.arcGALog = arcGA.get_logbook()

            self.scheduler = MfssRB(self.job, task_list, arc_list, ignore_infeasible_schedules=True)
        else:
            self.arcGALog = []
            self.arcGALog.append({"min":0, "max":0})

            self.scheduler = MfssRB(self.job, task_list, [], ignore_infeasible_schedules=True)
예제 #17
0
def main():
    arg_parser = argparse.ArgumentParser()
    arg_parser.add_argument(
        "file", help="The pickle file containing the simulation result")
    arg_parser.add_argument(
        "--pdf",
        help=
        "specifies that the visualization should be written to a pdf file. Followed \
                            by a path")
    args = arg_parser.parse_args()
    try:
        simulation_result = load_simulation_result(args.file)
    except IOError:
        Logger.error("The file specified was not found.")
        sys.exit(127)
    visualize(simulation_result)

    if args.pdf is not None:
        plt.savefig(args.pdf)
    else:
        plt.show()
예제 #18
0
def process_job_parallel(scheduler, job, nr_cores, nr_iter, parameters = None):
    Logger.log_level = 2
    processes = []
    manager = Manager()
    return_values = manager.dict()
    extremes = manager.dict()
    start_time = datetime.datetime.now()
    for i in range(nr_cores):
        p = Process(target=worker, args=(i, nr_cores, scheduler, job, nr_iter, return_values, extremes, parameters,))
        processes.append(p)
        p.start()

    for process in processes:
        process.join()

    #reduce
    results = []
    for value in return_values.values():
        for entry in value:
            results.append(entry)

    min = None
    max = None

    for extreme in extremes.values():
        if min is None or extreme[0].total_time < min.total_time:
            min = extreme[0]
        if max is None or extreme[1].total_time > max.total_time:
            max = extreme[1]
    Logger.warning("Min: %s" % min.total_time)
    Logger.warning("Max: %s" % max.total_time)

    duration = datetime.datetime.now() - start_time
    Logger.warning("Simulation  complete. Duration: %s" % (duration))
    return results, (min,max)
예제 #19
0
def main():
    arg_parser = argparse.ArgumentParser()
    arg_parser.add_argument("file", help="The pickle file containing the simulation data")
    arg_parser.add_argument("--cores", help="The number of cores to be used.")
    arg_parser.add_argument("--iter", help="The number of iterations per core.")
    arg_parser.add_argument("--out_extremes", help="The file the extreme values should be written to.")
    arg_parser.add_argument("--out_results", help="The file all the lambdas should be written to.")
    arg_parser.add_argument("--scheduler", help="specifies the scheduler to be used. Default is referenceScheduler. \
                                                Other possible values: optimizedDependencyScheduler")
    args = arg_parser.parse_args()
    try:
        job = deserialize(args.file)
    except IOError:
        Logger.error("The file specified was not found.")
        sys.exit(127)
    results, extremes = process_job_parallel(args.scheduler, job, TypeConversion.get_int(args.cores), TypeConversion.get_int(args.iter))

    if args.out_extremes is not None:
        pickle.dump(extremes, open(args.out_extremes, "wb"))

    if args.out_results is not None:
        pickle.dump(results, open(args.out_results, "wb"))
예제 #20
0
    def do_it(self, ngen, cxpb, mutpb):
        pop = self.toolbox.population(n=self.no_list)

        #inital calculation of fitness for base population. TODO: Optimization. InitialFitness can be taken from the Job Object itself.
        fitness = self.toolbox.map(self.toolbox.evaluate, pop)
        for ind, fit in zip(pop, fitness):
            ind.fitness.values = fit

        best =  [copy.deepcopy(ind) for ind in pop]
        for g in range(ngen):
            Logger.info("ListGA Generation: %s" % (g))



            best = select(pop,self.no_list)
            record = self.stats.compile(best)
            self.logbook.record(**record)
            pop = [copy.deepcopy(ind) for ind in best]

            for child1, child2 in zip(pop[::2], pop[1::2]):
                    if random.random() < cxpb:
                        crossover(child1, child2)
                        del child1.fitness.values, child2.fitness.values
            for mutant in pop:
                    mutate(mutant, mutpb)

            #TODO: Perform double justification here
            for ind in best:
                if not ind in pop:
                    pop.append(ind)
            #invalids = [ind for ind in pop if not ind.fitness.valid]
            fitnesses = self.toolbox.map(self.toolbox.evaluate, pop)
            for ind, fit in zip(pop, fitnesses):
                if fit is not None:
                    ind.fitness.values = fit

        return select(pop, n=1)
예제 #21
0
    def compute_probability(self, number_of_executions, sum_execution_time_sq,
                            sum_execution_times):

        n = TypeConversion.get_float(number_of_executions)
        sum_xi = TypeConversion.get_float(sum_execution_times)
        sum_xi_2 = TypeConversion.get_float(sum_execution_time_sq)
        self.number_of_executions = n

        if n is None:
            Logger.error("Cannot compute std deviation! Malformed input data!")
            self.mean = None
            self.deviation = None
            return

        if n == 0:
            self.mean = None
            self.deviation = None
            return

        if n == 1 and sum_xi is not None:
            self.mean = sum_xi
            self.deviation = 0
            return

        if n > 0 and sum_xi is None or sum_xi_2 is None:
            Logger.error(
                "Cannot compute standard deviation! Malformed input data!")
            return

        #calculation of standard deviation
        tmp = sum_xi_2 - (1 / n) * sum_xi**2
        s = math.sqrt((1 / (n - 1)) * tmp)

        self.deviation = s
        #calculation of mean
        mean = (1 / n) * sum_xi
        self.mean = mean
예제 #22
0
def worker(id, nr_cores, scheduler, job, n, return_values, extremes, parameters=None):
    os.system("taskset -p 0xFFFFFFFF %d" % os.getpid())
    pbar = ProgressBar(maxval=n).start()
    Logger.info("spawning worker id %s" % id)
    numpy.random.seed(id + int(time.time()))
    random.seed()
    #Logger.log_level = 2
    results = []
    min = None
    max = None
    if job.already_initialized == True:
        for task in job.tasks.values():
            size = len(task.pre_computed_execution_times) / nr_cores
            task.pre_computed_execution_times = task.pre_computed_execution_times[id*size:(id +1)*size]
    Scheduler = schedulerForName(scheduler) #gives us the correct class to use

    for i in range(0, n):
        if id == 0:
            pbar.update(i)

        scheduler = Scheduler(job, parameters)
        if job.already_initialized == False:
            job.initialize()
            scheduler.initialize()

        result = simulate_schedule(scheduler)
        results.append(result.total_time)
        if min is None or  result.total_time < min.total_time:
            min = result
        if max is None or  result.total_time > max.total_time:
            max = result

    return_values[id] = results
    extremes[id] = (min, max)
    if id == 0:
        pbar.finish()
예제 #23
0
    def do_it(self, ngen, cxpb, mutpb):
        Logger.info("ArcGA: Creating Initial Population")
        pop = self.toolbox.population(n=self.no_p)
        Logger.info("ArcGA: Calculating base fitness")
        #inital calculation of fitness for base population.
        fitness = self.toolbox.map(self.toolbox.evaluate, pop)
        for ind, fit in zip(pop, fitness):
            if fit is not None:
                ind.fitness.values = fit

        best = [copy.deepcopy(ind) for ind in pop]
        for g in range(ngen):

            Logger.info("ArcGA Generation: %s" % (g))
            best = self.select(pop, self.no_p)
            record = self.stats.compile(best)
            self.logbook.record(**record)
            pop = [copy.deepcopy(ind) for ind in best]

            if random.random() < cxpb:
                self.crossover(pop, cxpb)


            for mutant in pop:
               if random.random() < mutpb:
                    self.mutate(mutant, 0.5)


            #invalids = [ind for ind in pop if not ind.fitness.valid]
            for ind in best:
                if not ind in pop:
                    pop.append(ind)

            fitnesses = self.toolbox.map(self.toolbox.evaluate, pop)
            for ind, fit in zip(pop, fitnesses):
                if fit is not None:
                    ind.fitness.values = fit

        best = self.select(pop, n=1)
        return best
예제 #24
0
    def do_it(self, ngen, cxpb, mutpb):
        Logger.info("ArcGA: Creating Initial Population")
        pop = self.toolbox.population(n=self.no_p)
        Logger.info("ArcGA: Calculating base fitness")
        #inital calculation of fitness for base population.
        fitness = self.toolbox.map(self.toolbox.evaluate, pop)
        for ind, fit in zip(pop, fitness):
            if fit is not None:
                ind.fitness.values = fit

        best = [copy.deepcopy(ind) for ind in pop]
        for g in range(ngen):

            Logger.info("ArcGA Generation: %s" % (g))
            best = self.select(pop, self.no_p)
            record = self.stats.compile(best)
            self.logbook.record(**record)
            pop = [copy.deepcopy(ind) for ind in best]

            if random.random() < cxpb:
                self.crossover(pop, cxpb)

            for mutant in pop:
                if random.random() < mutpb:
                    self.mutate(mutant, 0.5)

            #invalids = [ind for ind in pop if not ind.fitness.valid]
            for ind in best:
                if not ind in pop:
                    pop.append(ind)

            fitnesses = self.toolbox.map(self.toolbox.evaluate, pop)
            for ind, fit in zip(pop, fitnesses):
                if fit is not None:
                    ind.fitness.values = fit

        best = self.select(pop, n=1)
        return best
예제 #25
0
def save_simulation_result(result, output_file):
    Logger.info("writing simulation result to file: %s" % output_file)
    cPickle.dump(result, open(output_file, "wb"))
예제 #26
0
def load_simulation_result(file_path):
    Logger.info("loading simulation result from file: %s " % file_path )
    return cPickle.load(open(file_path, "rb"))
예제 #27
0
def load_simulation_result(file_path):
    Logger.info("loading simulation result from file: %s " % file_path)
    return cPickle.load(open(file_path, "rb"))
예제 #28
0
def save_simulation_result(result, output_file):
    Logger.info("writing simulation result to file: %s" % output_file)
    cPickle.dump(result, open(output_file, "wb"))
예제 #29
0
def run_multiple_opt(args, parameter_sets):
    for idx, param in enumerate(parameter_sets):
        Logger.warning("running parameter set %s of %s" %
                       (idx, len(parameter_sets)))
        run_optimization_process(args, param)
예제 #30
0
def main():
    arg_parser = argparse.ArgumentParser()
    arg_parser.add_argument("file", help="The pickle file containing the simulation data")
    arg_parser.add_argument("--cores", help="The number of cores to be used.")
    arg_parser.add_argument("--iter", help="The number of iterations per core.")
    arg_parser.add_argument("--out_folder", help="The folder to which result files are written")

    args = arg_parser.parse_args()
    try:
        job = deserialize(args.file)
    except IOError:
        Logger.error("The file specified was not found.")
        sys.exit(127)

    params = [

            {"listGAGen": 250,
              "listGACXp" : 1.0,
              "listGAMUTp": 0.5,
              "listNoList" : 10,
              "arcGAGen" : 100,
              "arcGACXp" : 0.5,
              "arcGAMUTp" : 0.01,
              "arcGAn_pairs" : 7,
              "arcGAno_p" : 10,
              "name" : "ArcGA mutp"
            },

            {"listGAGen": 250,
              "listGACXp" : 1.0,
              "listGAMUTp": 0.5,
              "listNoList" : 10,
              "arcGAGen" : 100,
              "arcGACXp" : 0.5,
              "arcGAMUTp" : 0.1,
              "arcGAn_pairs" : 7,
              "arcGAno_p" : 10,
              "name" : "ArcGA mutp"
            },

            {"listGAGen": 250,
              "listGACXp" : 1.0,
              "listGAMUTp": 0.5,
              "listNoList" : 10,
              "arcGAGen" : 100,
              "arcGACXp" : 0.5,
              "arcGAMUTp" : 0.3,
              "arcGAn_pairs" : 7,
              "arcGAno_p" : 10,
              "name" : "ArcGA mutp"
            },

            {"listGAGen": 250,
              "listGACXp" : 1.0,
              "listGAMUTp": 0.5,
              "listNoList" : 10,
              "arcGAGen" : 100,
              "arcGACXp" : 0.5,
              "arcGAMUTp" : 0.5,
              "arcGAn_pairs" : 7,
              "arcGAno_p" : 10,
              "name" : "ArcGA mutp"
            },



    ]
    run_multiple_opt(args, params)
예제 #31
0
def run_multiple_opt(args, parameter_sets):
    for idx, param in enumerate(parameter_sets):
        Logger.warning("running parameter set %s of %s" % (idx, len(parameter_sets)))
        run_optimization_process(args, param)
예제 #32
0
def main():
    arg_parser = argparse.ArgumentParser()
    arg_parser.add_argument(
        "file", help="The pickle file containing the simulation data")
    arg_parser.add_argument("--cores", help="The number of cores to be used.")
    arg_parser.add_argument("--iter",
                            help="The number of iterations per core.")
    arg_parser.add_argument(
        "--out_folder", help="The folder to which result files are written")

    args = arg_parser.parse_args()
    try:
        job = deserialize(args.file)
    except IOError:
        Logger.error("The file specified was not found.")
        sys.exit(127)

    params = [
        {
            "listGAGen": 250,
            "listGACXp": 1.0,
            "listGAMUTp": 0.5,
            "listNoList": 10,
            "arcGAGen": 100,
            "arcGACXp": 0.5,
            "arcGAMUTp": 0.01,
            "arcGAn_pairs": 7,
            "arcGAno_p": 10,
            "name": "ArcGA mutp"
        },
        {
            "listGAGen": 250,
            "listGACXp": 1.0,
            "listGAMUTp": 0.5,
            "listNoList": 10,
            "arcGAGen": 100,
            "arcGACXp": 0.5,
            "arcGAMUTp": 0.1,
            "arcGAn_pairs": 7,
            "arcGAno_p": 10,
            "name": "ArcGA mutp"
        },
        {
            "listGAGen": 250,
            "listGACXp": 1.0,
            "listGAMUTp": 0.5,
            "listNoList": 10,
            "arcGAGen": 100,
            "arcGACXp": 0.5,
            "arcGAMUTp": 0.3,
            "arcGAn_pairs": 7,
            "arcGAno_p": 10,
            "name": "ArcGA mutp"
        },
        {
            "listGAGen": 250,
            "listGACXp": 1.0,
            "listGAMUTp": 0.5,
            "listNoList": 10,
            "arcGAGen": 100,
            "arcGACXp": 0.5,
            "arcGAMUTp": 0.5,
            "arcGAn_pairs": 7,
            "arcGAno_p": 10,
            "name": "ArcGA mutp"
        },
    ]
    run_multiple_opt(args, params)