Ejemplo n.º 1
0
    def __init__(self,
                 dataset,
                 batch_size,
                 n_samples,
                 lr,
                 weight_decay,
                 n_epochs,
                 n_neurons_per_layer,
                 n_hidden_layers,
                 is_cuda,
                 beta,
                 n_repetitions=1,
                 backprop_report=Mock()):
        self.config = get_configuration()
        self.is_cuda = is_cuda
        self.dataset = dataset
        self.batch_size = batch_size
        self.n_samples = n_samples
        self.lr = lr
        self.weight_decay = weight_decay
        self.n_epochs = n_epochs
        self.n_neurons_per_layer = n_neurons_per_layer
        self.n_hidden_layers = n_hidden_layers
        self.beta = beta
        self.n_repetitions = n_repetitions
        self.backprop_report = backprop_report

        self.last_update = 0
        self.best_loss_val = 100000
        self.best_loss_val_rep = None
        self.best_network_rep = None
        self.best_network = None
Ejemplo n.º 2
0
 def __init__(self):
     self.config = get_configuration()
     self.compatibility_weight_coefficient = self.config.compatibility_weight_coefficient
     self.compatibility_disjoint_coefficient = self.config.compatibility_disjoint_coefficient
     self.distances = {}
     self.hits = 0
     self.misses = 0
Ejemplo n.º 3
0
    def __init__(self):
        self.config = get_configuration()

        self.species_fitness_function = self._get_species_fitness_function(function_name=
                                                                           self.config.species_fitness_function)
        self.species_elitism = self.config.species_elitism
        self.max_stagnation = self.config.max_stagnation
Ejemplo n.º 4
0
    def __init__(self, key, type):
        self.key = key
        self.type = type
        self.parameter_name = PARAMETERS_NAMES[type]
        self.crossover_attributes = copy.deepcopy(CROSSOVER_ATTRIBUTES)
        self.mutation_attributes = copy.deepcopy(MUTATION_ATTRIBUTES)

        self.config = get_configuration()
        self.is_discrete = self.config.is_discrete
        self.fix_std = self.config.fix_std
        if not self.fix_std:
            self.mutation_attributes.append('std')

        self.single_structural_mutation = self.config.single_structural_mutation
        self.mutate_rate = self.config.mutate_rate
        self.mutate_power = self.config.mutate_power
        self.replace_rate = self.config.replace_rate

        self.mean_name = f'_{self.parameter_name}_mean'
        self.var_name = f'_{self.parameter_name}_var'
        self.std_name = f'_{self.parameter_name}_std'
        self.log_var_name = f'_{self.parameter_name}_log_var'

        setattr(self, self.mean_name, None)
        setattr(self, self.var_name, None)
        setattr(self, self.log_var_name, None)
        setattr(self, self.std_name, None)
Ejemplo n.º 5
0
    def __init__(self):
        self.config = get_configuration()
        self.indexer = count(1)
        self.species = {}
        self.genome_to_species = {}

        self.gdmean = None
        self.gdstdev = None
Ejemplo n.º 6
0
    def __init__(self, key):
        super().__init__(key=key, type=NODE_TYPE)
        self.key = key
        self.config = get_configuration()

        self.activation = self.config.node_activation
        self.aggregation = self.config.node_aggregation

        self.bias_configuration = BiasConfig()
Ejemplo n.º 7
0
def get_data_loader(dataset: Dataset, batch_size=None):
    config = get_configuration()
    parallel_evaluation = config.parallel_evaluation
    if not parallel_evaluation:
        return DataLoader(dataset,
                          batch_size=batch_size,
                          shuffle=True,
                          num_workers=1)
    else:
        return CustomDataLoader(dataset, shuffle=True)
Ejemplo n.º 8
0
    def __init__(self):
        self.config = get_configuration()
        self.indexer = count(1)
        self.n_species = self.config.n_species
        self.species = {}
        self.genome_to_species = {}

        self.compatibility_threshold = self.config.compatibility_threshold

        self.gdmean = None
        self.gdstdev = None
Ejemplo n.º 9
0
    def __init__(self):
        self.config = get_configuration()

        self.fix_architecture = self.config.fix_architecture
        self.single_structural_mutation = self.config.single_structural_mutation

        self.architecture_mutation_power = self.config.architecture_mutation_power
        self.node_add_prob = self.config.node_add_prob
        self.node_delete_prob = self.config.node_delete_prob
        self.connection_add_prob = self.config.connection_add_prob
        self.connection_delete_prob = self.config.connection_delete_prob
Ejemplo n.º 10
0
 def __init__(self, key):
     '''
     key: must be a tuple of nodes' keys (key-origin-node, key-destiny-node)
     '''
     if not isinstance(key, tuple):
         raise ValueError('Key needs to be a tuple')
     super().__init__(key=key, type=CONNECTION_TYPE)
     self.key = key
     self.enabled = True
     self.config = get_configuration()
     self.weight_configuration = WeightConfig()
Ejemplo n.º 11
0
 def show_metrics_best(self):
     # only for classification!!
     config = get_configuration()
     if config.problem_type == 'regression':
         # loss = nn.MSELoss()
         pass
     elif config.problem_type == 'classification':
         self._show_classification_metrics(config)
     else:
         raise ConfigError(
             f'Problem Type is incorrect: {config.problem_type}')
Ejemplo n.º 12
0
def get_beta(beta_type, m, batch_idx, epoch, n_epochs):

    if beta_type == 'Blundell':
        beta = 2**(m - (batch_idx + 1)) / (2**m - 1)
    elif beta_type == 'Soenderby':
        beta = min(epoch / (n_epochs // 4), 1)
    elif beta_type == 'Standard':
        beta = 1 / m
    else:
        beta = get_configuration().beta

    return beta
Ejemplo n.º 13
0
def get_neat_logger(path=None):
    config = get_configuration()
    neat_levels_activation = []
    for level in NEAT_LEVELS:
        activated = getattr(config, f'log_{level}', False)
        if activated:
            neat_levels_activation.append(True)
        else:
            neat_levels_activation.append(False)
    levels = dict(zip(NEAT_LEVELS, neat_levels_activation))

    logger = get_logger(path=path, levels=levels)
    return logger
Ejemplo n.º 14
0
    def __init__(self):
        config = get_configuration()
        self.weight_mean_init_mean = config.weight_mean_init_mean
        self.weight_mean_init_std = config.weight_mean_init_std
        self.weight_mean_max_value = config.weight_mean_max_value
        self.weight_mean_min_value = config.weight_mean_min_value

        self.weight_std_init_mean = config.weight_std_init_mean
        self.weight_std_init_std = config.weight_std_init_std
        self.weight_std_max_value = config.weight_std_max_value
        self.weight_std_min_value = config.weight_std_min_value

        weight_mutate_power = 0.5
        weight_mutate_rate = 0.8
        weight_replace_rate = 0.1
Ejemplo n.º 15
0
    def __init__(self, stagnation_engine: Stagnation):
        self.stagnation_engine = stagnation_engine
        self.crossover = Crossover()
        self.architecture_mutation = ArchitectureMutation()

        self.config = get_configuration()
        self.mutation_type = self.config.mutation_type
        self.params_mutation = self._initialize_params_mutation(
            mutation_type=self.mutation_type, config=self.config)
        self.pop_size = self.config.pop_size
        self.min_species_size = self.config.min_species_size
        self.elitism = self.config.elitism
        self.survival_threshold = self.config.survival_threshold

        self.genome_indexer = count(1)
        self.ancestors = {}
Ejemplo n.º 16
0
    def __init__(self, testing=False, batch_size=None):
        self.config = get_configuration()
        self.testing = testing
        self.batch_size = batch_size if batch_size is not None else self.config.batch_size
        self.parallel_evaluation = self.config.parallel_evaluation
        self.is_gpu = self.config.is_gpu

        self.dataset = None
        self.data_loader = None
        self.loss = None

        if self.parallel_evaluation:
            self.n_processes = self._get_n_processes()
            self.pool = Pool(processes=self.n_processes,
                             initializer=process_initialization,
                             initargs=(self.config.dataset, True))
Ejemplo n.º 17
0
    def __init__(self):
        config = get_configuration()
        self.bias_mean_init_mean = config.bias_mean_init_mean
        self.bias_mean_init_std = config.bias_mean_init_std

        self.bias_std_init_mean = config.bias_std_init_mean
        self.bias_std_init_std = config.bias_std_init_std

        self.bias_mean_max_value = config.bias_mean_max_value
        self.bias_mean_min_value = config.bias_mean_min_value

        self.bias_std_max_value = config.bias_std_max_value
        self.bias_std_min_value = config.bias_std_min_value

        self.bias_mutate_power = config.bias_mutate_power
        self.bias_mutate_rate = config.bias_mutate_rate
        self.bias_replace_rate = config.bias_replace_rate
Ejemplo n.º 18
0
def compute_kl_qw_pw_by_sum(genome: Genome):
    # get prior configuration
    config = get_configuration()
    kl_qw_pw = 0.0

    for key, node in genome.node_genes.items():
        pb = Normal(loc=config.bias_mean_prior, scale=config.bias_std_prior)
        qb = Normal(loc=node.get_mean(), scale=np.exp(1.0 + node.get_std()))
        kl_qw_pw += kl_divergence(qb, pb)

    for key, connection in genome.connection_genes.items():
        pw = Normal(loc=config.weight_mean_prior,
                    scale=config.weight_std_prior)
        qw = Normal(loc=connection.get_mean(),
                    scale=np.exp(1.0 + connection.get_std()))
        kl_qw_pw += kl_divergence(qw, pw)
    return kl_qw_pw
Ejemplo n.º 19
0
    def __init__(self,
                 report: EvolutionReport,
                 notifier: Notifier,
                 is_cuda: bool = False):
        self.report = report
        self.notifier = notifier
        self.is_cuda = is_cuda

        self.population_engine = PopulationEngine(
            stagnation_engine=Stagnation())
        # self.speciation_engine = SpeciationEngine()
        self.speciation_engine = FixSpeciationEngine()
        self.evaluation_engine = EvaluationStochasticEngine()
        self.evolution_configuration = get_configuration()

        self.n_generations = self.evolution_configuration.n_generations

        self.population = None

        self.start_time = time.perf_counter()
Ejemplo n.º 20
0
    def __init__(self, key, id=None, genome_config=None):
        self.describe_with_parameters = False
        self.key = key
        self.id = str(uuid.uuid4()) if id is None else id

        self.genome_config = get_configuration(
        ) if genome_config is None else genome_config
        self.n_input = self.genome_config.n_input
        self.n_output = self.genome_config.n_output
        self.initial_nodes_sample = self.genome_config.initial_nodes_sample

        self.output_nodes_keys = self.get_output_nodes_keys()
        self.input_nodes_keys = self.get_input_nodes_keys()

        self.connection_genes = {}
        self.node_genes = {}

        self.n_weight_parameters = None
        self.n_bias_parameters = None

        self.node_counter = None

        self.fitness = None
Ejemplo n.º 21
0
    def create_from_julia_dict(genome_dict: dict):
        config = get_configuration()
        genome = Genome(key=genome_dict["key"], id=None, genome_config=config)

        # reconstruct nodes and connections
        connection_genes_dict = genome_dict['connections']
        for key_str, connection_gene_dict in connection_genes_dict.items():
            connection_key = Genome._get_connection_key_from_key_str(key_str)
            connection_gene = ConnectionGene(key=connection_key)
            connection_gene.set_mean(connection_gene_dict['mean_weight'])
            connection_gene.set_std(connection_gene_dict['std_weight'])
            genome.connection_genes[connection_gene.key] = connection_gene

        node_genes_dict = genome_dict['nodes']
        for key_str, node_gene_dict in node_genes_dict.items():
            node_key = int(key_str)
            node_gene = NodeGene(key=node_key)
            node_gene.set_mean(node_gene_dict['mean_bias'])
            node_gene.set_std(node_gene_dict['std_bias'])
            genome.node_genes[node_gene.key] = node_gene

        genome.calculate_number_of_parameters()
        return genome
Ejemplo n.º 22
0
    def generate_data(self):
        for output in range(get_configuration().n_output):
            mask_i = self.targets == output
            if output == 0:
                mask_or = mask_i
            else:
                mask_or += mask_i

        self.data = self.data[mask_or]
        self.targets = self.targets[mask_or]

        def _data_generator(x_data: torch.Tensor):
            for i in range(len(x_data)):
                img = Image.fromarray(x_data[i].numpy(), mode='L')
                img_trans = self.transform(img)
                yield img_trans

        self.data = torch.cat(tuple(_data_generator(x_data=self.data)), 0)
        self.targets = self.targets.long()

        self.x = self.data
        self.y = self.targets

        self._generate_train_test_sets()
Ejemplo n.º 23
0
 def __init__(self, report: EvolutionReportJupyNeat, notifier: Notifier,
              evaluation_engine: EvaluationStochasticEngine):
     self.report = report
     self.notifier = notifier
     self.evaluation_engine = evaluation_engine
     self.configuration = get_configuration()
Ejemplo n.º 24
0
 def setUp(self) -> None:
     path = get_config_files_path()
     filename = ''.join([path, '/regression-miso.json'])
     config = get_configuration(filename=filename)
     self.genome = generate_genome_with_hidden_units(
         n_input=config.n_input, n_output=config.n_output)
Ejemplo n.º 25
0
 def __init__(self):
     self.config = get_configuration()
Ejemplo n.º 26
0
import os

import torch
from PIL import Image
from torchvision.datasets import MNIST
from torchvision.transforms import transforms

from neat.configuration import get_configuration
from neat.dataset.abstract import NeatTestingDataset

n_output = get_configuration().n_output


class MNISTBinaryDataset(NeatTestingDataset, MNIST):
    '''
    MNIST dataset considering only 2 classes: 1 and 2 digits.
    '''
    def __init__(self, train_percentage, dataset_type='train', random_state=42, noise=0.0, label_noise=0.0):
        self.x = None
        self.y = None
        self.train = False
        if dataset_type == 'train':
            self.train = True

        # TODO: REMOVE THIS SET
        self.train = False

        self.transform = transforms.Compose([transforms.ToTensor(),
                                             transforms.Normalize((0.1307,), (0.3081,))])
        path = ''.join([os.path.dirname(os.path.realpath(__file__)), '/data/mnist'])
        MNIST.__init__(self, root=path, train=False, download=True, transform=self.transform)
Ejemplo n.º 27
0
def create_configuration(filename):
    path = get_config_files_path()
    filename = ''.join([path, filename])
    return get_configuration(filename=filename)