def _make_population(self, pop_size: int) -> List[Any]: model_chains = [] while len(model_chains) < pop_size: chain = self.chain_generation_function() if constraint_function(chain): model_chains.append(chain) return model_chains
def mutation(types: List[MutationTypesEnum], chain_generation_params, chain: Chain, requirements, max_depth: int = None) -> Any: max_depth = max_depth if max_depth else requirements.max_depth mutation_prob = requirements.mutation_prob if mutation_prob and random() > mutation_prob: return deepcopy(chain) type = choice(types) if type == MutationTypesEnum.none: new_chain = deepcopy(chain) elif type in mutation_by_type: is_correct_chain = False while not is_correct_chain: if type in (MutationTypesEnum.growth, MutationTypesEnum.local_growth): new_chain = mutation_by_type[type]( chain=deepcopy(chain), requirements=requirements, chain_generation_params=chain_generation_params, max_depth=max_depth) else: new_chain = mutation_by_type[type]( chain=deepcopy(chain), requirements=requirements, chain_generation_params=chain_generation_params) is_correct_chain = constraint_function(new_chain) else: raise ValueError(f'Required mutation type is not found: {type}') return new_chain
def random_chain(chain_generation_params, requirements, max_depth=None) -> Any: secondary_node_func = chain_generation_params.secondary_node_func primary_node_func = chain_generation_params.primary_node_func chain_class = chain_generation_params.chain_class max_depth = max_depth if max_depth else requirements.max_depth def chain_growth(chain: Any, node_parent: Any): offspring_size = randint(requirements.min_arity, requirements.max_arity) for offspring_node in range(offspring_size): height = chain.operator.distance_to_root_level(node_parent) is_max_depth_exceeded = height >= max_depth - 1 is_primary_node_selected = height < max_depth - 1 and randint(0, 1) if is_max_depth_exceeded or is_primary_node_selected: primary_node = primary_node_func( operation_type=choice(requirements.primary)) node_parent.nodes_from.append(primary_node) chain.add_node(primary_node) else: secondary_node = secondary_node_func( operation_type=choice(requirements.secondary)) chain.add_node(secondary_node) node_parent.nodes_from.append(secondary_node) chain_growth(chain, secondary_node) is_correct_chain = False while not is_correct_chain: chain = chain_class() chain_root = secondary_node_func( operation_type=choice(requirements.secondary)) chain.add_node(chain_root) chain_growth(chain, chain_root) is_correct_chain = constraint_function(chain) return chain
def mutation(types: List[MutationTypesEnum], chain_generation_params, chain: Chain, requirements, log: Log, max_depth: int = None) -> Any: """ Function apply mutation operator to chain """ max_depth = max_depth if max_depth else requirements.max_depth mutation_prob = requirements.mutation_prob mutation_type = choice(types) if will_mutation_be_applied(mutation_prob, mutation_type): if mutation_type in mutation_by_type: for _ in range(MAX_NUM_OF_ATTEMPTS): new_chain = mutation_by_type[mutation_type]( chain=deepcopy(chain), requirements=requirements, chain_generation_params=chain_generation_params, max_depth=max_depth) is_correct_chain = constraint_function(new_chain) if is_correct_chain: return new_chain elif mutation_type != MutationTypesEnum.none: raise ValueError( f'Required mutation type is not found: {mutation_type}') log.debug( 'Number of mutation attempts exceeded. Please check composer requirements for correctness.' ) return deepcopy(chain)
def crossover(types: List[CrossoverTypesEnum], chain_first: Any, chain_second: Any, max_depth: int, log: Log, crossover_prob: float = 0.8) -> Any: crossover_type = choice(types) try: if will_crossover_be_applied(chain_first, chain_second, crossover_prob, crossover_type): if crossover_type in crossover_by_type.keys(): for _ in range(MAX_NUM_OF_ATTEMPTS): new_chains = crossover_by_type[crossover_type]( deepcopy(chain_first), deepcopy(chain_second), max_depth) are_correct = all([ constraint_function(new_chain) for new_chain in new_chains ]) if are_correct: return new_chains else: raise ValueError( f'Required crossover type not found: {crossover_type}') log.debug('Number of crossover attempts exceeded. ' 'Please check composer requirements for correctness.') except Exception as ex: log.error(f'Crossover ex: {ex}') chain_first_copy = deepcopy(chain_first) chain_second_copy = deepcopy(chain_second) return chain_first_copy, chain_second_copy
def random_graph(params, requirements, max_depth=None) -> Any: max_depth = max_depth if max_depth else requirements.max_depth def graph_growth(graph: Any, node_parent: Any): offspring_size = randint(requirements.min_arity, requirements.max_arity) for offspring_node in range(offspring_size): height = graph.operator.distance_to_root_level(node_parent) is_max_depth_exceeded = height >= max_depth - 1 is_primary_node_selected = height < max_depth - 1 and randint(0, 1) if is_max_depth_exceeded or is_primary_node_selected: primary_node = OptNode(nodes_from=None, content=choice(requirements.primary)) node_parent.nodes_from.append(primary_node) graph.add_node(primary_node) else: secondary_node = OptNode(nodes_from=[], content=choice(requirements.secondary)) graph.add_node(secondary_node) node_parent.nodes_from.append(secondary_node) graph_growth(graph, secondary_node) is_correct_graph = False graph = None n_iters = 0 while not is_correct_graph or n_iters > max_iters: graph = OptGraph() graph_root = OptNode(nodes_from=[], content=choice(requirements.secondary)) graph.add_node(graph_root) graph_growth(graph, graph_root) is_correct_graph = constraint_function(graph, params) n_iters += 1 if n_iters > max_iters: warnings.warn(f'Random_graph generation failed for {n_iters} iterations.') return graph
def decremental_regularization(population: List[Individual], objective_function: Callable, params: 'GraphGenerationParams', size: Optional[int] = None, timer=None) -> List[Any]: size = size if size else len(population) additional_inds = [] prev_nodes_ids = [] for ind in population: ind_subtrees = [node for node in ind.graph.nodes if node != ind.graph.root_node] subtrees = [OptGraph(deepcopy(node.ordered_subnodes_hierarchy())) for node in ind_subtrees if is_fitted_subtree(node, prev_nodes_ids)] additional_inds += subtrees prev_nodes_ids += [subtree.root_node.descriptive_id for subtree in subtrees] for add_ind in additional_inds: add_ind.parent_operators.append( ParentOperator(operator_type='regularization', operator_name='decremental_regularization', parent_objects=[params.adapter.restore_as_template(ind.graph)])) additional_inds = [ind for ind in additional_inds if constraint_function(ind, params)] is_multi_obj = (population[0].fitness) is MultiObjFitness if additional_inds: evaluate_individuals(additional_inds, objective_function, params, is_multi_obj, timer=timer) if additional_inds and len(additional_inds) > size: additional_inds = sorted(additional_inds, key=lambda ind: ind.fitness)[:size] return additional_inds
def decremental_regularization(population: List[Any], objective_function: Callable, chain_class: Any, size: Optional[int] = None) -> List[Any]: size = size if size else len(population) additional_inds = [] prev_nodes_ids = [] for ind in population: ind_subtrees = [node for node in ind.nodes if node != ind.root_node] subtrees = [ chain_class(deepcopy(node.ordered_subnodes_hierarchy)) for node in ind_subtrees if is_fitted_subtree(node, prev_nodes_ids) ] additional_inds += subtrees prev_nodes_ids += [ subtree.root_node.descriptive_id for subtree in subtrees ] additional_inds = [ ind for ind in additional_inds if constraint_function(ind) ] for additional_ind in additional_inds: additional_ind.fitness = objective_function(additional_ind) if additional_inds and len(additional_inds) > size: additional_inds = sorted(additional_inds, key=lambda ind: ind.fitness)[:size] return additional_inds
def crossover(types: List[Union[CrossoverTypesEnum, Callable]], ind_first: Individual, ind_second: Individual, max_depth: int, log: Log, crossover_prob: float = 0.8, params: 'GraphGenerationParams' = None) -> Any: crossover_type = choice(types) is_custom_crossover = isinstance(crossover_type, Callable) try: if will_crossover_be_applied(ind_first.graph, ind_second.graph, crossover_prob, crossover_type): if crossover_type in crossover_by_type.keys() or is_custom_crossover: for _ in range(MAX_NUM_OF_ATTEMPTS): if is_custom_crossover: crossover_func = crossover_type else: crossover_func = crossover_by_type[crossover_type] new_inds = [] is_custom_operator = isinstance(ind_first, OptGraph) input_obj_first = deepcopy(ind_first.graph) input_obj_second = deepcopy(ind_first.graph) if is_custom_operator: input_obj_first = params.adapter.restore(input_obj_first) input_obj_second = params.adapter.restore(input_obj_second) new_graphs = crossover_func(input_obj_first, input_obj_second, max_depth) if is_custom_operator: for graph_id, graph in enumerate(new_graphs): new_graphs[graph_id] = params.adapter.adapt(graph) are_correct = \ all([constraint_function(new_graph, params) for new_graph in new_graphs]) if are_correct: for graph in new_graphs: new_ind = Individual(graph) new_ind.parent_operators.append( ParentOperator(operator_type='crossover', operator_name=str(crossover_type), parent_objects=[ params.adapter.restore_as_template(ind_first.graph), params.adapter.restore_as_template(ind_second.graph) ])) new_inds.append(new_ind) return new_inds else: raise ValueError(f'Required crossover type not found: {crossover_type}') log.debug('Number of crossover attempts exceeded. ' 'Please check composer requirements for correctness.') except Exception as ex: log.error(f'Crossover ex: {ex}') graph_first_copy = deepcopy(ind_first) graph_second_copy = deepcopy(ind_second) return graph_first_copy, graph_second_copy
def _make_population(self, pop_size: int) -> List[Any]: operation_chains = [] iter_number = 0 while len(operation_chains) < pop_size: iter_number += 1 chain = self.chain_generation_function() if constraint_function(chain): operation_chains.append(chain) if iter_number > MAX_NUM_OF_GENERATED_INDS: self.log.debug( f'More than {MAX_NUM_OF_GENERATED_INDS} generated In population making function. ' f'Process is stopped') break return operation_chains
def _make_population(self, pop_size: int) -> List[Any]: pop = [] iter_number = 0 while len(pop) < pop_size: iter_number += 1 graph = self.graph_generation_function() if constraint_function(graph, self.graph_generation_params): pop.append(Individual(graph)) if iter_number > MAX_NUM_OF_GENERATED_INDS: self.log.debug( f'More than {MAX_NUM_OF_GENERATED_INDS} generated in population making function. ' f'Process is stopped') break return pop
def test_mutation(): chain = chain_first() mutation_types = [MutationTypesEnum.none] log = default_log(__name__) chain_gener_params = ChainGenerationParams() task = Task(TaskTypesEnum.classification) primary_model_types, _ = OperationTypesRepository().suitable_operation( task_type=task.task_type) secondary_model_types = ['xgboost', 'knn', 'lda', 'qda'] composer_requirements = GPComposerRequirements( primary=primary_model_types, secondary=secondary_model_types, mutation_prob=1) new_chain = mutation(mutation_types, chain_gener_params, chain, composer_requirements, log=log, max_depth=3) assert new_chain == chain mutation_types = [MutationTypesEnum.growth] composer_requirements = GPComposerRequirements( primary=primary_model_types, secondary=secondary_model_types, mutation_prob=0) new_chain = mutation(mutation_types, chain_gener_params, chain, composer_requirements, log=log, max_depth=3) assert new_chain == chain chain = chain_fifth() assert not constraint_function(chain) new_chain = mutation(mutation_types, chain_gener_params, chain, composer_requirements, log=log, max_depth=3) assert new_chain == chain
def crossover(types: List[CrossoverTypesEnum], chain_first: Any, chain_second: Any, max_depth: int, crossover_prob: float = 0.8) -> Any: type = choice(types) chain_first_copy = deepcopy(chain_first) chain_second_copy = deepcopy(chain_second) try: if chain_first is chain_second or random() > crossover_prob or type == CrossoverTypesEnum.none: return [chain_first_copy, chain_second_copy] if type in crossover_by_type.keys(): is_correct = False while not is_correct: is_correct_chains = [] new_chains = crossover_by_type[type](chain_first_copy, chain_second_copy, max_depth) for new_chain in new_chains: is_correct_chains.append(constraint_function(new_chain)) is_correct = all(is_correct_chains) return new_chains else: raise ValueError(f'Required crossover not found: {type}') except Exception as ex: print(f'Crossover ex: {ex}') return chain_first_copy, chain_second_copy