Beispiel #1
0
    def main(self):
        timer = util.DelayTimer(max_minutes=None, period=SERVICE_PERIOD)
        chkpoint_counter = 0
        num_evals = 0

        logger.info(f"Generating {self.num_workers} initial points...")
        XX = self.optimizer.ask_initial(n_points=self.num_workers)
        self.evaluator.add_eval_batch(XX)

        # MAIN LOOP
        for elapsed_str in timer:
            logger.info(f"Elapsed time: {elapsed_str}")
            results = list(self.evaluator.get_finished_evals())
            num_evals += len(results)
            chkpoint_counter += len(results)
            if EXIT_FLAG or num_evals >= self.max_evals:
                break
            if results:
                logger.info(
                    f"Refitting model with batch of {len(results)} evals")
                self.optimizer.tell(results)
                logger.info(
                    f"Drawing {len(results)} points with strategy {self.optimizer.strategy}")
                # ! 'ask' is written as a generator because asking for a large batch is
                # ! slow. We get better performance when ask is batched. The RF is
                # ! constantly re-fitting during the call to ask. So it becomes slow
                # ! when there are a large number of workers.
                for batch in self.optimizer.ask(n_points=len(results)):
                    self.evaluator.add_eval_batch(batch)
            if chkpoint_counter >= CHECKPOINT_INTERVAL:
                self.evaluator.dump_evals()
                chkpoint_counter = 0

        logger.info('Hyperopt driver finishing')
        self.evaluator.dump_evals()
Beispiel #2
0
    def main(self):
        timer = util.DelayTimer(max_minutes=None, period=SERVICE_PERIOD)
        chkpoint_counter = 0
        num_evals = 0

        logger.info(f"Generating {self.num_workers} initial points...")
        XX = self.optimizer.ask_initial(n_points=self.num_workers)
        self.evaluator.add_eval_batch(XX)

        # MAIN LOOP
        for elapsed_str in timer:
            logger.info(f"Elapsed time: {elapsed_str}")
            results = list(self.evaluator.get_finished_evals())
            num_evals += len(results)
            chkpoint_counter += len(results)
            if EXIT_FLAG or num_evals >= self.args.max_evals:
                break
            if results:
                logger.info(
                    f"Refitting model with batch of {len(results)} evals")
                self.optimizer.tell(results)
                logger.info(
                    f"Drawing {len(results)} points with strategy {self.optimizer.strategy}"
                )
                for batch in self.optimizer.ask(n_points=len(results)):
                    self.evaluator.add_eval_batch(batch)
            if chkpoint_counter >= CHECKPOINT_INTERVAL:
                self.evaluator.dump_evals()
                chkpoint_counter = 0

        logger.info('Hyperopt driver finishing')
        self.evaluator.dump_evals()
Beispiel #3
0
    def main(self):
        # opt = GAOptimizer(cfg)
        # evaluator = evaluate.create_evaluator(cfg)
        logger.info(f"Starting new run")

        timer = util.DelayTimer(max_minutes=None, period=SERVICE_PERIOD)
        timer = iter(timer)
        elapsed_str = next(timer)

        logger.info("Hyperopt GA driver starting")
        logger.info(f"Elapsed time: {elapsed_str}")

        if self.optimizer.pop is None:
            logger.info("Generating initial population")
            logger.info(f"{self.optimizer.INIT_POP_SIZE} individuals")
            self.optimizer.pop = self.optimizer.toolbox.population(
                n=self.optimizer.INIT_POP_SIZE)
            individuals = self.optimizer.pop
            self.evaluate_fitnesses(
                individuals, self.optimizer, self.evaluator, self.args.eval_timeout_minutes)
            self.optimizer.record_generation(num_evals=len(self.optimizer.pop))

            with open('ga_logbook.log', 'w') as fp:
                fp.write(str(self.optimizer.logbook))
            print("best:", self.optimizer.halloffame[0])

        while self.optimizer.current_gen < self.optimizer.NGEN:
            self.optimizer.current_gen += 1
            logger.info(
                f"Generation {self.optimizer.current_gen} out of {self.optimizer.NGEN}")
            logger.info(f"Elapsed time: {elapsed_str}")

            # Select the next generation individuals
            offspring = self.optimizer.toolbox.select(
                self.optimizer.pop, len(self.optimizer.pop))
            # Clone the selected individuals
            offspring = list(map(self.optimizer.toolbox.clone, offspring))

            # Apply crossover and mutation on the offspring
            for child1, child2 in zip(offspring[::2], offspring[1::2]):
                if random.random() < self.optimizer.CXPB:
                    self.optimizer.toolbox.mate(child1, child2)
                    del child1.fitness.values
                    del child2.fitness.values

            for mutant in offspring:
                if random.random() < self.optimizer.MUTPB:
                    self.optimizer.toolbox.mutate(mutant)
                    del mutant.fitness.values

            # Evaluate the individuals with an invalid fitness
            invalid_ind = [ind for ind in offspring if not ind.fitness.valid]
            logger.info(f"Evaluating {len(invalid_ind)} invalid individuals")
            self.evaluate_fitnesses(invalid_ind, self.optimizer, self.evaluator,
                                    self.args.eval_timeout_minutes)

            # The population is entirely replaced by the offspring
            self.optimizer.pop[:] = offspring

            self.optimizer.record_generation(num_evals=len(invalid_ind))

            with open('ga_logbook.log', 'w') as fp:
                fp.write(str(self.optimizer.logbook))
            print("best:", self.optimizer.halloffame[0])