Пример #1
0
 def _initialize(self):
     """
     Randomly initialize all parameters of the module based on the range of parameters allowed by the config_params
     variable.
     """
     # Uniform randomly set module parameters
     self.merge_method = random.choice(self.config_params['merge_method'])
     self.merge_method['config']['dtype'] = self.dtype
     random_filters = random.randint(self.config_params['filters']['min'],
                                     self.config_params['filters']['max'])
     self.filters = round_with_step(random_filters,
                                    self.config_params['filters']['min'],
                                    self.config_params['filters']['max'],
                                    self.config_params['filters']['step'])
     self.kernel_size = random.choice(self.config_params['kernel_size'])
     self.strides = random.choice(self.config_params['strides'])
     self.padding = random.choice(self.config_params['padding'])
     self.activation = random.choice(self.config_params['activation'])
     self.kernel_init = random.choice(self.config_params['kernel_init'])
     self.bias_init = random.choice(self.config_params['bias_init'])
     self.max_pool_flag = random.random(
     ) < self.config_params['max_pool_flag']
     self.max_pool_size = random.choice(self.config_params['max_pool_size'])
     self.dropout_flag = random.random(
     ) < self.config_params['dropout_flag']
     random_dropout_rate = random.uniform(
         self.config_params['dropout_rate']['min'],
         self.config_params['dropout_rate']['max'])
     self.dropout_rate = round_with_step(
         random_dropout_rate, self.config_params['dropout_rate']['min'],
         self.config_params['dropout_rate']['max'],
         self.config_params['dropout_rate']['step'])
Пример #2
0
    def create_crossover(
            self, offspring_id, less_fit_module,
            max_degree_of_mutation) -> CoDeepNEATModuleConv2DMaxPool2DDropout:
        """
        Create crossed over Conv2DMaxPool2DDropout module and return it. Carry over parameters of fitter parent for
        categorical parameters and calculate parameter average between both modules for sortable parameters
        @param offspring_id: int of unique module ID of the offspring
        @param less_fit_module: second Conv2DMaxPool2DDropout module with lower fitness
        @param max_degree_of_mutation: float between 0 and 1 specifying the maximum degree of mutation
        @return: instantiated Conv2DMaxPool2DDropout module with crossed over parameters
        """
        # Create offspring parameters by carrying over parameters of fitter parent for categorical parameters and
        # calculating parameter average between both modules for sortable parameters
        offspring_params = dict()

        # Create the dict that keeps track of the mutations occuring for the offspring
        parent_mutation = {
            'parent_id': (self.module_id, less_fit_module.get_id()),
            'mutation': 'crossover'
        }

        offspring_params['merge_method'] = self.merge_method
        offspring_params['filters'] = round_with_step(
            int((self.filters + less_fit_module.filters) / 2),
            self.config_params['filters']['min'],
            self.config_params['filters']['max'],
            self.config_params['filters']['step'])
        offspring_params['kernel_size'] = self.kernel_size
        offspring_params['strides'] = self.strides
        offspring_params['padding'] = self.padding
        offspring_params['activation'] = self.activation
        offspring_params['kernel_init'] = self.kernel_init
        offspring_params['bias_init'] = self.bias_init
        offspring_params['max_pool_flag'] = self.max_pool_flag
        offspring_params['max_pool_size'] = self.max_pool_size
        offspring_params['dropout_flag'] = self.dropout_flag
        crossed_over_dropout_rate = round_with_step(
            ((self.dropout_rate + less_fit_module.dropout_rate) / 2),
            self.config_params['dropout_rate']['min'],
            self.config_params['dropout_rate']['max'],
            self.config_params['dropout_rate']['step'])
        offspring_params['dropout_rate'] = crossed_over_dropout_rate

        return CoDeepNEATModuleConv2DMaxPool2DDropout(
            config_params=self.config_params,
            module_id=offspring_id,
            parent_mutation=parent_mutation,
            dtype=self.dtype,
            **offspring_params)
Пример #3
0
    def create_mutation(self,
                        offspring_id,
                        max_degree_of_mutation) -> CoDeepNEATModuleConv2D:
        """
        Create mutated Conv2D module and return it. Categorical parameters are chosen randomly from all
        available values. Sortable parameters are perturbed through a random normal distribution with the current value
        as mean and the config specified stddev
        @param offspring_id: int of unique module ID of the offspring
        @param max_degree_of_mutation: float between 0 and 1 specifying the maximum degree of mutation
        @return: instantiated Conv2D module with mutated parameters
        """
        # Copy the parameters of this parent module for the parameters of the offspring
        offspring_params = {'merge_method': self.merge_method,
                            'filters': self.filters,
                            'kernel_size': self.kernel_size,
                            'strides': self.strides,
                            'padding': self.padding,
                            'activation': self.activation,
                            'kernel_init': self.kernel_init,
                            'bias_init': self.bias_init}

        # Create the dict that keeps track of the mutations occuring for the offspring
        parent_mutation = {'parent_id': self.module_id,
                           'mutation': 'mutation',
                           'mutated_params': dict()}

        # Determine exact integer amount of parameters to be mutated, though minimum is 1
        param_mutation_count = math.ceil(max_degree_of_mutation * 12)

        # Uniform randomly choose the parameters to be mutated
        parameters_to_mutate = random.sample(range(12), k=param_mutation_count)

        # Mutate offspring parameters. Categorical parameters are chosen randomly from all available values. Sortable
        # parameters are perturbed through a random normal distribution with the current value as mean and the config
        # specified stddev
        for param_to_mutate in parameters_to_mutate:
            if param_to_mutate == 0:
                offspring_params['merge_method'] = random.choice(self.config_params['merge_method'])
                parent_mutation['mutated_params']['merge_method'] = self.merge_method
            elif param_to_mutate == 1:
                perturbed_filters = int(np.random.normal(loc=self.filters,
                                                         scale=self.config_params['filters']['stddev']))
                offspring_params['filters'] = round_with_step(perturbed_filters,
                                                              self.config_params['filters']['min'],
                                                              self.config_params['filters']['max'],
                                                              self.config_params['filters']['step'])
                parent_mutation['mutated_params']['filters'] = self.filters
            elif param_to_mutate == 2:
                offspring_params['kernel_size'] = random.choice(self.config_params['kernel_size'])
                parent_mutation['mutated_params']['kernel_size'] = self.kernel_size
            elif param_to_mutate == 3:
                offspring_params['strides'] = random.choice(self.config_params['strides'])
                parent_mutation['mutated_params']['strides'] = self.strides
            elif param_to_mutate == 4:
                offspring_params['padding'] = random.choice(self.config_params['padding'])
                parent_mutation['mutated_params']['padding'] = self.padding
            elif param_to_mutate == 5:
                offspring_params['activation'] = random.choice(self.config_params['activation'])
                parent_mutation['mutated_params']['activation'] = self.activation
            elif param_to_mutate == 6:
                offspring_params['kernel_init'] = random.choice(self.config_params['kernel_init'])
                parent_mutation['mutated_params']['kernel_init'] = self.kernel_init
            elif param_to_mutate == 7:
                offspring_params['bias_init'] = random.choice(self.config_params['bias_init'])
                parent_mutation['mutated_params']['bias_init'] = self.bias_init

        return CoDeepNEATModuleConv2D(config_params=self.config_params,
                                                      module_id=offspring_id,
                                                      parent_mutation=parent_mutation,
                                                      dtype=self.dtype,
                                                      **offspring_params)
    def _create_initial_blueprint(
            self, initial_node_species) -> (int, CoDeepNEATBlueprint):
        """"""
        # Create the dict that keeps track of the way a blueprint has been mutated or created
        parent_mutation = {'parent_id': None, 'mutation': 'init'}

        # Create a minimal blueprint graph with node 1 being the input node (having no species) and node 2 being the
        # random initial node species
        blueprint_graph = dict()
        gene_id, gene = self.enc.create_blueprint_node(node=1, species=None)
        blueprint_graph[gene_id] = gene
        gene_id, gene = self.enc.create_blueprint_node(
            node=2, species=initial_node_species)
        blueprint_graph[gene_id] = gene
        gene_id, gene = self.enc.create_blueprint_conn(conn_start=1,
                                                       conn_end=2)
        blueprint_graph[gene_id] = gene

        # Randomly choose an optimizer from the available optimizers and create the parameter config dict of it
        chosen_optimizer = random.choice(self.available_optimizers)
        available_optimizer_params = self.available_opt_params[
            chosen_optimizer]

        # Declare container collecting the specific parameters of the optimizer to be created, setting the just chosen
        # optimizer class
        chosen_optimizer_params = {
            'class_name': chosen_optimizer,
            'config': dict()
        }

        # Traverse each possible parameter option and determine a uniformly random value depending on if its a
        # categorical, sortable or boolean value
        for opt_param, opt_param_val_range in available_optimizer_params.items(
        ):
            # If the optimizer parameter is a categorical value choose randomly from the list
            if isinstance(opt_param_val_range, list):
                chosen_optimizer_params['config'][opt_param] = random.choice(
                    opt_param_val_range)
            # If the optimizer parameter is sortable, create a random value between the min and max values adhering
            # to the configured step
            elif isinstance(opt_param_val_range, dict):
                if isinstance(opt_param_val_range['min'], int) and isinstance(opt_param_val_range['max'], int) \
                        and isinstance(opt_param_val_range['step'], int):
                    opt_param_random = random.randint(
                        opt_param_val_range['min'], opt_param_val_range['max'])
                    chosen_opt_param = round_with_step(
                        opt_param_random, opt_param_val_range['min'],
                        opt_param_val_range['max'],
                        opt_param_val_range['step'])
                elif isinstance(opt_param_val_range['min'], float) and isinstance(opt_param_val_range['max'], float) \
                        and isinstance(opt_param_val_range['step'], float):
                    opt_param_random = random.uniform(
                        opt_param_val_range['min'], opt_param_val_range['max'])
                    chosen_opt_param = round_with_step(
                        opt_param_random, opt_param_val_range['min'],
                        opt_param_val_range['max'],
                        opt_param_val_range['step'])
                else:
                    raise NotImplementedError(
                        f"Config parameter '{opt_param}' of the {chosen_optimizer} optimizer "
                        f"section is of type dict though the dict values are not of type int or "
                        f"float")
                chosen_optimizer_params['config'][opt_param] = chosen_opt_param
            # If the optimizer parameter is a binary value it is specified as a float with the probablity of that
            # parameter being set to True
            elif isinstance(opt_param_val_range, float):
                chosen_optimizer_params['config'][opt_param] = random.random(
                ) < opt_param_val_range
            else:
                raise NotImplementedError(
                    f"Config parameter '{opt_param}' of the {chosen_optimizer} optimizer section "
                    f"is not one of the valid types of list, dict or float")

        # Create new optimizer through encoding
        optimizer_factory = self.enc.create_optimizer_factory(
            optimizer_parameters=chosen_optimizer_params)

        # Create just defined initial blueprint through encoding
        return self.enc.create_blueprint(blueprint_graph=blueprint_graph,
                                         optimizer_factory=optimizer_factory,
                                         parent_mutation=parent_mutation)
Пример #5
0
    def _create_mutated_blueprint_optimizer(self, parent_blueprint):
        """"""
        # Copy the parameters of the parent blueprint for the offspring
        blueprint_graph, optimizer_factory = parent_blueprint.copy_parameters()
        parent_opt_params = optimizer_factory.get_parameters()

        # Create the dict that keeps track of the way a blueprint has been mutated
        parent_mutation = {
            'parent_id': parent_blueprint.get_id(),
            'mutation': 'optimizer',
            'mutated_params': parent_opt_params
        }

        # Randomly choose type of offspring optimizer and declare container collecting the specific parameters of
        # the offspring optimizer, setting only the chosen optimizer class
        offspring_optimizer_type = random.choice(self.available_optimizers)
        available_opt_params = self.available_opt_params[
            offspring_optimizer_type]
        offspring_opt_params = {
            'class_name': offspring_optimizer_type,
            'config': dict()
        }

        if offspring_optimizer_type == parent_opt_params['class_name']:
            ## Mutation of the existing optimizers' parameters ##
            # Traverse each possible parameter option and determine a uniformly random value if its a categorical param
            # or try perturbing the the parent parameter if it is a sortable.
            for opt_param, opt_param_val_range in available_opt_params.items():
                # If the optimizer parameter is a categorical value choose randomly from the list
                if isinstance(opt_param_val_range, list):
                    offspring_opt_params['config'][opt_param] = random.choice(
                        opt_param_val_range)
                # If the optimizer parameter is sortable, create a random value between the min and max values adhering
                # to the configured step
                elif isinstance(opt_param_val_range, dict):
                    if isinstance(opt_param_val_range['min'], int) \
                            and isinstance(opt_param_val_range['max'], int) \
                            and isinstance(opt_param_val_range['step'], int):
                        perturbed_param = int(
                            np.random.normal(
                                loc=parent_opt_params['config'][opt_param],
                                scale=opt_param_val_range['stddev']))
                        chosen_opt_param = round_with_step(
                            perturbed_param, opt_param_val_range['min'],
                            opt_param_val_range['max'],
                            opt_param_val_range['step'])
                    elif isinstance(opt_param_val_range['min'], float) \
                            and isinstance(opt_param_val_range['max'], float) \
                            and isinstance(opt_param_val_range['step'], float):
                        perturbed_param = np.random.normal(
                            loc=parent_opt_params['config'][opt_param],
                            scale=opt_param_val_range['stddev'])
                        chosen_opt_param = round_with_step(
                            perturbed_param, opt_param_val_range['min'],
                            opt_param_val_range['max'],
                            opt_param_val_range['step'])
                    else:
                        raise NotImplementedError(
                            f"Config parameter '{opt_param}' of the {offspring_optimizer_type} "
                            f"optimizer section is of type dict though the dict values are not "
                            f"of type int or float")
                    offspring_opt_params['config'][
                        opt_param] = chosen_opt_param
                # If the optimizer parameter is a binary value it is specified as a float with the probablity of that
                # parameter being set to True
                elif isinstance(opt_param_val_range, float):
                    offspring_opt_params['config'][opt_param] = random.random(
                    ) < opt_param_val_range
                else:
                    raise NotImplementedError(
                        f"Config parameter '{opt_param}' of the {offspring_optimizer_type} "
                        f"optimizer section is not one of the valid types of list, dict or float"
                    )

        else:
            ## Creation of a new optimizer with random parameters ##
            # Traverse each possible parameter option and determine a uniformly random value depending on if its a
            # categorical, sortable or boolean value
            for opt_param, opt_param_val_range in available_opt_params.items():
                # If the optimizer parameter is a categorical value choose randomly from the list
                if isinstance(opt_param_val_range, list):
                    offspring_opt_params['config'][opt_param] = random.choice(
                        opt_param_val_range)
                # If the optimizer parameter is sortable, create a random value between the min and max values adhering
                # to the configured step
                elif isinstance(opt_param_val_range, dict):
                    if isinstance(opt_param_val_range['min'], int) \
                            and isinstance(opt_param_val_range['max'], int) \
                            and isinstance(opt_param_val_range['step'], int):
                        opt_param_random = random.randint(
                            opt_param_val_range['min'],
                            opt_param_val_range['max'])
                        chosen_opt_param = round_with_step(
                            opt_param_random, opt_param_val_range['min'],
                            opt_param_val_range['max'],
                            opt_param_val_range['step'])
                    elif isinstance(opt_param_val_range['min'], float) \
                            and isinstance(opt_param_val_range['max'], float) \
                            and isinstance(opt_param_val_range['step'], float):
                        opt_param_random = random.uniform(
                            opt_param_val_range['min'],
                            opt_param_val_range['max'])
                        chosen_opt_param = round_with_step(
                            opt_param_random, opt_param_val_range['min'],
                            opt_param_val_range['max'],
                            opt_param_val_range['step'])
                    else:
                        raise NotImplementedError(
                            f"Config parameter '{opt_param}' of the {offspring_optimizer_type} "
                            f"optimizer section is of type dict though the dict values are not "
                            f"of type int or float")
                    offspring_opt_params['config'][
                        opt_param] = chosen_opt_param
                # If the optimizer parameter is a binary value it is specified as a float with the probablity of that
                # parameter being set to True
                elif isinstance(opt_param_val_range, float):
                    offspring_opt_params['config'][opt_param] = random.random(
                    ) < opt_param_val_range
                else:
                    raise NotImplementedError(
                        f"Config parameter '{opt_param}' of the {offspring_optimizer_type} "
                        f"optimizer section is not one of the valid types of list, dict or float"
                    )

        # Create new optimizer through encoding, having either the parent perturbed offspring parameters or randomly
        # new created parameters
        optimizer_factory = self.enc.create_optimizer_factory(
            optimizer_parameters=offspring_opt_params)

        # Create and return the offspring blueprint with identical blueprint graph and modified optimizer_factory
        return self.enc.create_blueprint(blueprint_graph=blueprint_graph,
                                         optimizer_factory=optimizer_factory,
                                         parent_mutation=parent_mutation)