예제 #1
0
    def _evaluate(self, x, out, *args, **kwargs):

        objs = np.full((x.shape[0], self.n_obj), np.nan)

        for i in range(x.shape[0]):
            arch_id = self._n_evaluated + 1
            print('\n')
            logging.info('Network id = {}'.format(arch_id))

            # call back-propagation training
            if self._search_space == 'micro':
                micro_genome = micro_encoding.convert(x[i, :])
                macro_genome = None

            elif self._search_space == 'micro_garbage':
                micro_genome = micro_encoding.convert(x[i, 21:])
                macro_genome = None

            elif self._search_space == 'macro':
                macro_genome = macro_encoding.convert(x[i, :])
                micro_genome = None

            elif self._search_space == 'macro_garbage':
                macro_genome = macro_encoding.convert(x[i, :21])
                micro_genome = None

            elif self._search_space == 'micromacro':
                macro_genome = macro_encoding.convert(x[i, :21])
                micro_genome = micro_encoding.convert(x[i, 21:])

            performance = train_search.main(macro_genome=macro_genome,
                                            micro_genome=micro_genome,
                                            search_space=self._search_space,
                                            init_channels=self._init_channels,
                                            layers=self._layers, cutout=False,
                                            epochs=self._epochs,
                                            save='arch_{}'.format(arch_id),
                                            expr_root=self._save_dir,
                                            batch_size=self.batch_size)

            # all objectives assume to be MINIMIZED !!!!!
            objs[i, 0] = 100 - performance['valid_acc']
            print(f'valid acc - {performance["valid_acc"]}')
            objs[i, 1] = performance['flops']
            ex.log_scalar(f"arch_valid_{config_dict()['performance_measure']}", performance['valid_acc'], arch_id)
            ex.log_scalar("arch_flops", performance['flops'], arch_id)
            self._n_evaluated += 1

        out["F"] = objs
예제 #2
0
    def _evaluate(self, x, out, *args, **kwargs):

        objs = np.full((x.shape[0], self.n_obj), np.nan)

        for i in range(x.shape[0]):
            arch_id = self._n_evaluated + 1
            print('\n')
            logging.info('Network id = {}'.format(arch_id))

            # call back-propagation training
            if self._search_space == 'micro':
                genome = micro_encoding.convert(x[i, :])
            elif self._search_space == 'macro':
                genome = macro_encoding.convert(x[i, :])
            performance = train_search.main(genome=genome,
                                            search_space=self._search_space,
                                            init_channels=self._init_channels,
                                            layers=self._layers,
                                            cutout=False,
                                            epochs=self._epochs,
                                            save='arch_{}'.format(arch_id),
                                            expr_root=self._save_dir,
                                            data_path=self.data_path,
                                            dataset=self.dataset)

            # all objectives assume to be MINIMIZED !!!!!
            objs[i, 0] = 100 - performance['valid_acc']
            objs[i, 1] = performance['flops']

            self._n_evaluated += 1

        out["F"] = objs
예제 #3
0
def decode_individual(individual) -> tuple:
    """
    Takes an individual, with a genome as a numpy array, and decodes it into a
    human friendly genome.

    Args:
        individual:

    Returns:

    """

    return micro_encoding.decode(micro_encoding.convert(individual.X))
예제 #4
0
    def _evaluate(self, x, out, *args, **kwargs):
        '''
        this is the main effect func in pymoo lib, use out as `return`
        :param x: encoding vectors, x.shape[0] for how many vectors are there
                    [pop_size x n_var]
        :param out: a container, to hold F&G key in the dict
        :param args: for param without name in form of tuple
        :param kwargs: for param with names in form of dict
        [:return: optimize target F (and restrain G, which is not used in this problem)]
        '''
        objs = np.full((x.shape[0], self.n_obj), np.nan)
        '''
        traversal every encoding vector in x. every arch has a unique id by aug 1 per iter
        '''
        for i in range(x.shape[0]):
            arch_id = self._n_evaluated + 1
            print('\n')
            logging.info('Network id = {}'.format(arch_id))

            # call back-propagation training
            if self._search_space == 'micro':
                genome = micro_encoding.convert(x[i, :])
            elif self._search_space == 'macro':
                genome = macro_encoding.convert(x[i, :])
            '''
            genome: one list for a phase, contains all such lists
                    example is available in decoder files
            by calling `train_search.main`, a specific arch is trained and evaluated
            '''
            performance = train_search.main(genome=genome,
                                            search_space=self._search_space,
                                            init_channels=self._init_channels,
                                            layers=self._layers,
                                            cutout=False,
                                            epochs=self._epochs,
                                            save='arch_{}'.format(arch_id),
                                            expr_root=self._save_dir)

            # all objectives assume to be MINIMIZED !!!!!
            objs[i, 0] = 100 - performance['valid_acc']
            objs[i, 1] = performance['flops']

            self._n_evaluated += 1

        out["F"] = objs
예제 #5
0
def inherit_one_model(individual,
                      expr_root: str,
                      model=None,
                      args=None) -> nn.Module:
    """
    Very complicated function.
        Handles inheritance of the common components not defined in the genome of the individual.
        Also calls the function to inherit weights for the cells, weight are defined by the individual.

    TODO: Maybe document this better
    TODO: Improve Error Logging

    Args:
        individual: the individual to inherit weights
        expr_root: path as defined in args.save
        model: not used except in testing
        args: not used except in testing

    Returns:
        model
    """

    try:

        r = np.random.uniform(-0.5, 1.5)

        parent1 = projectcode.weightmanagement.common.read_parent_by_id(
            individual.parents[0], expr_root, args)
        parent2 = projectcode.weightmanagement.common.read_parent_by_id(
            individual.parents[1], expr_root, args)

        parents = common.determine_more_fit_parent(parent1, parent2)

        genotype = micro_encoding.decode(micro_encoding.convert(individual.X))
        if model is None:
            CIFAR_CLASSES = 10
            auxiliary = False
            model = Network(args.init_channels, CIFAR_CLASSES, args.layers,
                            auxiliary, genotype)
            model = common.initialize_zero(model)

        wcom = WeightComputer(parents)

        # weight merge
        model.stem[0].weight = wcom.compute_child_weight(
            "stem.0.weight",
            r,
            inherit_rules="both",
            weight_tensor=model.stem[0].weight)

        previous_reduction = False
        pp_reduction = False
        for cell_number, cell in enumerate(model.cells):
            try:

                reduction = cell_number in [
                    len(model.cells) // 3,
                    2 * len(model.cells) // 3,
                ]
                # print(reduction, previous_reduction)
                model = inherit_one_cell(
                    cell_number,
                    individual,
                    model,
                    parents,
                    reduce=reduction,
                    previous_reduce=previous_reduction,
                    weight_computer=wcom,
                    pp_reduce=pp_reduction,
                    r=r,
                )
                pp_reduction = previous_reduction
                previous_reduction = reduction

            except:
                logger.warning("error in cell %i" % cell_number)
                raise

        inherit = None
        child_genome = common.decode_individual(individual)
        if parents[0]["genome"].normal_concat == parents[1][
                "genome"].normal_concat:
            inherit = "both"
        elif parents[0]["genome"].normal_concat == child_genome.normal_concat:
            inherit = "first"
        elif parents[1]["genome"].normal_concat == child_genome.normal_concat:
            inherit = "second"
        else:
            inherit = "concat_mismatch"
        assert inherit is not None, "could not determine classifier inheritance"

        key = "classifier.weight"
        model.classifier.weight = wcom.compute_child_weight(
            key, r, inherit, model.classifier.weight)
        key = "classifier.bias"
        model.classifier.bias = wcom.compute_child_weight(
            key, r, inherit, model.classifier.bias)

        common.assert_non_null_weights(model.state_dict())
    except:

        logger.warning(
            projectcode.weightmanagement.common.decode_individual(individual))
        logger.warning(parents[0]["genome"])
        logger.warning(parents[1]["genome"])
        logger.warning(individual.parents)
        raise

    return model
예제 #6
0
    def _evaluate(self, x, out, individuals, algorithm, *args, **kwargs):

        self.generation = algorithm.n_gen

        if algorithm.pop is None:
            logger.info("Initializing population")
            first_gen = True
        else:
            logger.info("Evaluating New Members")
            first_gen = False

        objs = np.full((x.shape[0], self.n_obj), np.nan)

        pkl_paths = []

        logger.info("Maybe Creating Directory for Saving Training Pickles")
        Path(os.path.join(self.args.save, "train_spec")).mkdir(parents=True,
                                                               exist_ok=True)

        for i in range(x.shape[0]):

            # unique id assigned to the individual
            arch_id = self._n_evaluated + 1

            individuals[i].id = arch_id
            individual = individuals[i]
            print("\n")
            logger.info("Network id = {}".format(arch_id))

            # call back-propagation training
            if self._search_space == "micro":
                genome = micro_encoding.convert(x[i, :])
            else:
                raise KeyError("unknown search space")

            if self.debug:
                performance = {
                    "valid_acc": 100,
                    "params": 100,
                    "flops": 100,
                }
            else:
                logger.info("pickling training specification")
                save = "arch_{}".format(arch_id)

                spec = {
                    "genome": genome,
                    "individual": individual,
                    "save": save,
                    "args": self.args,
                    "first_gen": first_gen,
                }

                pkl_path = os.path.join(self.args.save, "train_spec",
                                        f"individual_{arch_id:05d}.pkl")
                pkl_paths.append(pkl_path)

                dump(spec, pkl_path)

                self._n_evaluated += 1

                # performance = pickled_train_and_evaluate(pkl_path)

                # performance = train_and_evaluate(
                #     genome=genome,
                #     individual=individual,
                #     save=save,
                #     args=self.args,
                #     first_gen=first_gen,
                # )

        results = self.get_evaluation(pkl_paths)

        logger.info("Evaluation Complete, Assigning Results to Population")

        for i, performance in enumerate(results):

            # all objectives assume to be MINIMIZED !!!!!
            objs[i, 0] = 100 - performance["valid_acc"]
            objs[i, 1] = performance["flops"]

        out["F"] = objs