Пример #1
0
 def test_linear_at_end(self):
     self.genome.linear_at_end = False
     self.genome.add(Linear(10))
     self.genome.add(Linear(activation_type="Sigmoid"))
     self.assertEqual([Linear, Linear],
                      [gene.__class__ for gene in self.genome.genes])
     self.evaluate_model([5, 1, 12, 12])
Пример #2
0
 def test_crossover_phenotype(self):
     # create and train the first genotype
     x = Variable(torch.randn(5, 32 * 32)).view(5, 1, 32, 32)
     self.genome.crossover_rate = 1
     self.genome.add(Conv2d(16))
     self.genome.add(Linear(32))
     self.genome.output_genes.append(Linear(activation_type='Sigmoid'))
     self.phenotype.create_model(x)
     self.train_step(self.phenotype, x)
     # create the mate genotype
     mate = Genome()
     mate.add(Conv2d(8))
     mate.add(Linear(16))
     phenotype_mate = Phenotype(1, mate)
     mate.output_genes.append(Linear(activation_type='Sigmoid'))
     phenotype_mate.create_model(x)
     self.train_step(phenotype_mate, x)
     # breed with crossover
     child = self.phenotype.breed(skip_mutation=True, mate=phenotype_mate)
     # verify if the weights was copied
     self.assertTrue(mate.genes[0].module.weight.equal(
         child.genome.genes[0].module.weight))
     old_state = phenotype_mate.optimizer.state[
         phenotype_mate.optimizer.param_groups[0]['params'][0]]
     new_state = child.optimizer.state[child.optimizer.param_groups[0]
                                       ['params'][0]]
     self.assertTrue(old_state['exp_avg'].equal(new_state['exp_avg']))
Пример #3
0
 def test_different_genome_distance(self):
     self.genome.add(Linear(32))
     self.genome.add(Linear(16))
     other = copy.deepcopy(self.genome)
     other.add(Linear(8))
     other.add(Linear(1))
     self.assertEqual(2, self.genome.distance(other, mode="num_genes"))
Пример #4
0
 def test_complex_graph(self):
     self.genome.add(Linear(32))
     self.genome.add(Linear(activation_type="ReLU"))
     self.genome.add(Conv2d(3))
     self.genome.add(Dropout())
     self.assertEqual([Conv2d, Linear, Linear, Dropout], [gene.__class__ for gene in self.genome.genes])
     self.evaluate_model([5, 3, 5, 5])
Пример #5
0
    def __init__(self, output_size=1, genome=None, input_shape=None, optimizer_conf=None):
        super().__init__(output_size=output_size, genome=genome, input_shape=input_shape)
        self.output_size = output_size
        self.optimizer_conf = optimizer_conf or config.gan.discriminator.optimizer

        if genome is None:
            if config.gan.discriminator.fixed:
                self.genome = Genome(random=False, add_layer_prob=0, rm_layer_prob=0, gene_mutation_prob=0,
                                     mutate_gan_type_prob=0)
                self.genome.add(Conv2d(256, stride=2, activation_type="LeakyReLU", activation_params={"negative_slope": 0.2}))
                self.genome.add(Conv2d(128, stride=2, activation_type="LeakyReLU", activation_params={"negative_slope": 0.2}))
                self.genome.add(Conv2d(64, stride=2, activation_type="LeakyReLU", activation_params={"negative_slope": 0.2}))
                # self.genome.add(SelfAttention())
            else:
                self.genome = Genome(random=not config.evolution.sequential_layers)
                self.genome.possible_genes = [(getattr(evolution, l), {}) for l in config.gan.discriminator.possible_layers]

            if not config.gan.discriminator.fixed:
                self.genome.input_genes = [Conv2d(stride=1, normalize="spectral")]
            else:
                self.genome.input_genes = []
            if not config.gan.discriminator.fixed:
                self.genome.output_genes = [Linear(1, activation_type=None, normalize="spectral", bias=False)]
            else:
                self.genome.output_genes = [Linear(1, activation_type=None, normalize="none", bias=False)]
Пример #6
0
    def __init__(self,
                 output_size=(1, 28, 28),
                 genome=None,
                 input_shape=(1, 1, 10, 10),
                 optimizer_conf=config.gan.generator.optimizer):
        super().__init__(output_size=output_size,
                         genome=genome,
                         input_shape=input_shape)
        self.noise_size = int(np.prod(self.input_shape[1:]))
        self.inception_score_mean = 0
        self.fid_score = None
        self.rmse_score = None
        self.optimizer_conf = optimizer_conf

        if genome is None:
            if config.gan.generator.fixed:
                self.genome = Genome(
                    random=False,
                    add_layer_prob=0,
                    rm_layer_prob=0,
                    gene_mutation_prob=0,
                    simple_layers=config.gan.generator.simple_layers,
                    linear_at_end=False)
                self.genome.add(
                    Linear(4 * int(np.prod(output_size)),
                           activation_type="LeakyReLU",
                           activation_params={"negative_slope": 0.2}))
                if not config.gan.generator.simple_layers:
                    self.genome.add(
                        Deconv2d(128,
                                 activation_type="LeakyReLU",
                                 activation_params={"negative_slope": 0.2}))
                    self.genome.add(
                        Deconv2d(64,
                                 activation_type="LeakyReLU",
                                 activation_params={"negative_slope": 0.2}))
            else:
                self.genome = Genome(
                    random=not config.evolution.sequential_layers,
                    linear_at_end=False)
                self.genome.possible_genes = [
                    (getattr(evolution, l), {})
                    for l in config.gan.generator.possible_layers
                ]
                self.genome.add(Linear(4096))
            if config.gan.generator.simple_layers:
                # self.genome.output_genes = [Deconv2d(output_size[0], activation_type="Tanh")]
                self.genome.output_genes = [
                    Linear(int(np.prod(output_size)),
                           activation_type="Tanh",
                           normalize=False)
                ]
            else:
                self.genome.output_genes = [
                    Deconv2d(output_size[0],
                             activation_type="Tanh",
                             normalize=False)
                ]
Пример #7
0
 def test_valid_phenotype_activation_leakyrelu(self):
     self.genome.add(Linear(32))
     self.genome.add(Linear(activation_type="LeakyReLU", activation_params={"negative_slope": 0.2, "inplace": True}))
     self.genome.add(Linear(16))
     x = Variable(torch.randn(5, 64))  # create some input data
     self.phenotype.output_size = 16
     model = self.phenotype.transform_genotype(x)
     out = model(x)  # run pytorch module to check if everything is ok
     self.assertEqual((5, 16), out.shape)
Пример #8
0
    def __init__(self, output_size=1, genome=None, input_shape=None):
        super().__init__(output_size=output_size,
                         genome=genome,
                         input_shape=input_shape)
        self.output_size = output_size

        if genome is None:
            if config.gan.discriminator.fixed:
                self.genome = Genome(
                    random=False,
                    add_layer_prob=0,
                    rm_layer_prob=0,
                    gene_mutation_prob=0,
                    simple_layers=config.gan.discriminator.simple_layers)
                if not config.gan.discriminator.simple_layers:
                    self.genome.add(
                        Conv2d(8,
                               stride=2,
                               activation_type="LeakyReLU",
                               activation_params={"negative_slope": 0.2}))
                    self.genome.add(
                        Conv2d(16,
                               stride=2,
                               activation_type="LeakyReLU",
                               activation_params={"negative_slope": 0.2}))
                    self.genome.add(
                        Conv2d(32,
                               stride=2,
                               activation_type="LeakyReLU",
                               activation_params={"negative_slope": 0.2}))
                    self.genome.add(
                        Conv2d(64,
                               stride=2,
                               activation_type="LeakyReLU",
                               activation_params={"negative_slope": 0.2}))
                    self.genome.add(
                        Conv2d(128,
                               stride=2,
                               activation_type="LeakyReLU",
                               activation_params={"negative_slope": 0.2}))
                # self.genome.add(Linear(1024, activation_type="ELU"))
            else:
                self.genome = Genome(
                    random=not config.evolution.sequential_layers)
                self.genome.possible_genes = [
                    g for g in self.genome.possible_genes if g[0] != Deconv2d
                ]
                self.genome.add_random_gene()

            if config.gan.type == "gan":
                self.genome.output_genes = [
                    Linear(1, activation_type="Sigmoid", normalize=False)
                ]
            else:
                self.genome.output_genes = [
                    Linear(1, activation_type=None, normalize=False)
                ]
Пример #9
0
 def test_breed_copy_skill_Rating(self):
     x = Variable(torch.randn(5, 64)).view(5, 1, 8, 8)
     self.genome.add(Conv2d(2))
     self.genome.add(Linear(16))
     self.genome.output_genes.append(Linear(activation_type='Sigmoid'))
     self.phenotype.create_model(x)
     self.phenotype.skill_rating.mu = 2000
     phenotype2 = self.phenotype.breed()
     self.assertEqual(self.phenotype.skill_rating.mu, phenotype2.skill_rating.mu)
Пример #10
0
 def test_valid_phenotype_activation(self):
     self.genome.add(Linear(32))
     self.genome.add(Linear())
     self.genome.add(Linear(16))
     x = Variable(torch.randn(5, 64))  # create some input data
     self.phenotype.output_size = 16
     model = self.phenotype.transform_genotype(x)
     out = model(x)  # run pytorch module to check if everything is ok
     self.assertEqual((5, 16), out.shape)
Пример #11
0
 def test_convert_phenotype_to_json(self):
     self.genome.add(Conv2d(4, activation_type="ReLU"))
     self.genome.add(Linear(32))
     self.genome.add(Linear(16))
     self.evaluate_model([5, 1, 8, 8])
     layers = json.loads(self.phenotype.to_json())
     self.assertEqual("ReLU", layers[0]["activation_type"])
     self.assertEqual(3, len(layers))
     self.assertEqual(["Conv2d", "Linear", "Linear"], [l["type"] for l in layers])
Пример #12
0
 def test_equal_genome_distance_after_breed(self):
     self.genome.add(Linear(32))
     self.genome.add(Linear(16))
     self.genome.output_genes.append(Linear(activation_type='Sigmoid'))
     x = Variable(torch.randn(5, 64))
     self.phenotype.create_model(x)
     self.train_step(self.phenotype, x)
     phenotype2 = self.phenotype.breed(skip_mutation=True)
     self.assertEqual(0, self.genome.distance(phenotype2.genome))
Пример #13
0
 def test_adjust_last_linear(self):
     self.genome.linear_at_end = False
     self.genome.add(Linear(16))
     x = Variable(torch.randn(5, 64))  # create some input data
     model = self.phenotype.transform_genotype(x)
     out = model(x)  # run pytorch module to check if everything is ok
     self.assertEqual((5, 1), out.shape)
     self.genome.add(Linear(4))
     self.phenotype.transform_genotype(x)
     self.assertEqual(16, self.genome.genes[0].out_features)
Пример #14
0
    def __init__(self,
                 output_size=(1, 28, 28),
                 genome=None,
                 input_shape=(1, 1, 10, 10)):
        super().__init__(output_size=output_size,
                         genome=genome,
                         input_shape=input_shape)
        self.noise_size = int(np.prod(self.input_shape[1:]))
        self.inception_score_mean = 0
        self.fid_score = None
        self.rmse_score = None

        if genome is None:
            if config.gan.generator.fixed:
                self.genome = Genome(
                    random=False,
                    add_layer_prob=0,
                    rm_layer_prob=0,
                    gene_mutation_prob=0,
                    simple_layers=config.gan.generator.simple_layers,
                    linear_at_end=False)
                self.genome.add(
                    Linear(4 * int(np.prod(output_size)),
                           activation_type="ReLU"))
                # self.genome.add(Linear(4*int(np.prod(output_size)), activation_type="LeakyReLU"))
                if not config.gan.generator.simple_layers:
                    self.genome.add(Deconv2d(128, activation_type="ReLU"))
                    self.genome.add(Deconv2d(64, activation_type="ReLU"))
                    self.genome.add(Deconv2d(32, activation_type="ReLU"))
                    self.genome.add(Deconv2d(16, activation_type="ReLU"))
                    # self.genome.add(Deconv2d(8, activation_type="ReLU"))
            else:
                self.genome = Genome(
                    random=not config.evolution.sequential_layers,
                    linear_at_end=False)
                self.genome.possible_genes = [
                    g for g in self.genome.possible_genes if g[0] != Conv2d
                ]
                # IMPORTANT: the performance without a liner layer is pretty bad
                self.genome.add(Linear(512))
                # self.genome.add_random_gene()
            if config.gan.generator.simple_layers:
                # self.genome.output_genes = [Deconv2d(output_size[0], activation_type="Tanh")]
                self.genome.output_genes = [
                    Linear(int(np.prod(output_size)),
                           activation_type="Tanh",
                           normalize=False)
                ]
            else:
                self.genome.output_genes = [
                    Deconv2d(output_size[0],
                             activation_type="Tanh",
                             normalize=False)
                ]
Пример #15
0
 def test_random_genome(self):
     self.genome.random = True
     self.genome.add(Linear(16))
     self.genome.add(Linear(32))
     self.genome.add(Linear(1), force_sequence=True)
     self.genome.add(Conv2d(3, kernel_size=3))
     self.genome.add(Conv2d(6, kernel_size=3))
     self.genome.add(Conv2d(9, kernel_size=3), force_sequence=True)
     self.assertEqual([Conv2d, Conv2d, Conv2d, Linear, Linear, Linear], [gene.__class__ for gene in self.genome.genes])
     self.assertSetEqual(set([3, 6, 9]), set([self.genome.genes[i].out_channels for i in range(3)]))
     self.assertSetEqual(set([16, 32, 1]), set([self.genome.genes[i].out_features for i in range(3, 6)]))
     self.evaluate_model([5, 3, 5, 5])
Пример #16
0
 def test_2d_after_linear(self):
     self.phenotype.output_size = (1, 32, 32)
     self.genome.linear_at_end = False
     self.genome.add(Linear(32*32))
     self.genome.add(Deconv2d(1))
     self.genome.add(Linear(32*32*3))
     self.genome.add(Deconv2d(4))
     self.assertEqual([Linear, Linear, Deconv2d, Deconv2d], [gene.__class__ for gene in self.genome.genes])
     self.evaluate_model([8, 1, 32, 32])
     x = Variable(torch.randn(8, 32*32))
     self.phenotype.create_model(x)
     self.train_step(self.phenotype, x)
Пример #17
0
 def test_reset_module_when_changed(self):
     x = Variable(torch.randn(5, 64)).view(5, 1, 8, 8)
     self.genome.add(Conv2d(2))
     self.genome.add(Linear(16))
     self.phenotype.transform_genotype(x)
     genome = copy.deepcopy(self.genome)
     genome.genes.insert(0, Conv2d(3))
     genome.add(Linear(32))
     self.phenotype.genome = genome
     self.phenotype.transform_genotype(x)
     self.assertFalse(self.genome.genes[0].module.weight.equal(genome.genes[0].module.weight))
     self.assertFalse(self.genome.genes[1].module.weight.equal(genome.genes[2].module.weight))
Пример #18
0
 def test_different_genome_setup(self):
     self.genome.add(Conv2d(32, activation_type="LeakyReLU"))
     self.genome.add(Conv2d(32))
     self.genome.add(Linear(16))
     other = Genome()
     other.add(Conv2d(32, activation_type="ReLU"))
     other.add(Conv2d(64))
     other.add(Linear(16))
     x = Variable(torch.randn(5, 32*32)).view(5, 1, 32, 32)
     self.phenotype.create_model(x)
     other_phenotype = Phenotype(1, other)
     other_phenotype.optimizer_conf = self.phenotype.optimizer_conf
     other_phenotype.create_model(x)
     self.assertEqual(0.4, self.genome.distance(other, mode="num_genes"))
Пример #19
0
 def test_not_share_reused_weights(self):
     x = Variable(torch.randn(5, 64)).view(5, 1, 8, 8)
     self.genome.add(Conv2d(2))
     self.genome.add(Linear(16))
     self.genome.output_genes.append(Linear(activation_type='Sigmoid'))
     self.phenotype.create_model(x)
     self.train_step(self.phenotype, x)
     self.genome.add_layer_prob = 0
     self.genome.gene_mutation_prob = 0
     phenotype2 = self.phenotype.breed(skip_mutation=True)
     self.train_step(phenotype2, x)
     self.assertIsNot(self.genome.genes[0].module, phenotype2.genome.genes[0].module)
     self.assertFalse(self.genome.genes[0].module.weight.equal(phenotype2.genome.genes[0].module.weight))
     self.assertFalse(self.genome.genes[1].module.weight.equal(phenotype2.genome.genes[1].module.weight))
Пример #20
0
 def test_crossover_empty(self):
     # create and train the first genotype
     x = Variable(torch.randn(5, 32*32)).view(5, 1, 32, 32)
     self.genome.add(Conv2d(8))
     self.genome.output_genes.append(Linear(activation_type='Sigmoid'))
     # create the mate genotype
     mate = Genome(crossover_rate=1)
     mate.add(Linear(16))
     # apply crossover
     mate.crossover(self.genome)
     # check if layers correspond to expected
     self.assertEqual([Conv2d, Linear], [gene.__class__ for gene in mate.genes])
     # evaluate the created model
     self.evaluate_model([5, 1, 32, 32])
     self.phenotype.create_model(x)
     self.train_step(self.phenotype, x)
Пример #21
0
 def test_linear_after_conv2d(self):
     self.genome.add(Conv2d(1))
     self.genome.add(Linear(32))
     self.genome.add(Conv2d(3))
     self.assertEqual([Conv2d, Conv2d, Linear],
                      [gene.__class__ for gene in self.genome.genes])
     self.evaluate_model([5, 1, 12, 12])
Пример #22
0
 def test_breed_phenotype_with_new_layer(self):
     x = Variable(torch.randn(5, 64)).view(5, 1, 8, 8)
     self.genome.add(Conv2d(2))
     self.genome.add(Linear(16))
     self.genome.output_genes.append(Linear(activation_type='Sigmoid'))
     self.phenotype.create_model(x)
     self.train_step(self.phenotype, x)
     self.genome.add_layer_prob = 1
     self.genome.rm_layer_prob = 0
     self.genome.gene_mutation_prob = 0
     phenotype2 = self.phenotype.breed()
     old_state = self.phenotype.optimizer.state[self.phenotype.optimizer.param_groups[0]['params'][0]]
     new_state = phenotype2.optimizer.state[phenotype2.optimizer.param_groups[0]['params'][0]]
     self.assertTrue(old_state['exp_avg'].equal(new_state['exp_avg']))
     self.assertIsNot(old_state['exp_avg'], new_state['exp_avg'])
     self.train_step(phenotype2, x)
Пример #23
0
 def test_not_copy_optimizer_new_module(self):
     x = Variable(torch.randn(5, 64)).view(5, 1, 8, 8)
     self.genome.add(Conv2d(2))
     self.genome.add(Linear(16))
     self.genome.output_genes.append(Linear(activation_type='Sigmoid'))
     self.phenotype.create_model(x)
     self.train_step(self.phenotype, x)
     self.genome.add_layer_prob = 0
     conv2d_module = self.genome.genes[0].module
     self.genome.genes[0].module = None
     phenotype2 = self.phenotype.breed()
     old_state = self.phenotype.optimizer.state[self.phenotype.optimizer.param_groups[0]['params'][0]]
     new_state = phenotype2.optimizer.state[phenotype2.optimizer.param_groups[0]['params'][0]]
     self.assertEqual(0, phenotype2.genome.genes[0].used)
     self.assertFalse(conv2d_module.weight.equal(phenotype2.genome.genes[0].module.weight))
     self.assertIn('exp_avg', old_state)
     self.assertNotIn('exp_avg', new_state)
Пример #24
0
 def test_conv2d_phenotype(self):
     self.genome.add(Conv2d(3))
     self.genome.add(Conv2d(6))
     self.genome.add(Linear(1))
     x = Variable(torch.randn(5, 144))  # create some input data
     x = x.view(5, 1, 12, 12)  # convert to 4d (batch_size, channels, w, h)
     model = self.phenotype.transform_genotype(x)
     out = model(x)  # run pytorch module to check if everything is ok
     self.assertEqual((5, 1), out.shape)
Пример #25
0
 def test_linear_at_end_false(self):
     self.genome.linear_at_end = False
     self.genome.random = False
     self.genome.add(Conv2d(1))
     self.genome.add(Conv2d(3))
     self.genome.add(Linear(activation_type="Sigmoid"))
     self.genome.add(Conv2d(6))
     self.assertEqual([Linear, Conv2d, Conv2d, Conv2d], [gene.__class__ for gene in self.genome.genes])
     self.assertEqual([1, 3, 6], [self.genome.genes[i].out_channels for i in range(1, 4)])
Пример #26
0
 def test_add_random_gene(self):
     for i in range(10):
         self.genome.add_random_gene()
     self.genome.add(Linear(1))
     x = Variable(torch.randn(5, 1*32*32))  # create some input data
     x = x.view(5, 1, 32, 32)
     model = self.phenotype.transform_genotype(x)
     out = model(x)  # run pytorch module to check if everything is ok
     self.assertEqual((5, 1), out.shape)
Пример #27
0
 def test_reuse_weights(self):
     x = Variable(torch.randn(5, 64))
     self.genome.add(Conv2d(2))
     self.genome.add(Linear(16))
     x = x.view(5, 1, 8, 8)
     self.phenotype.create_model(x)
     self.genome.rm_layer_prob = 0
     self.genome.add_layer_prob = 0
     self.genome.gene_mutation_prob = 0
     phenotype2 = self.phenotype.breed()
     self.assertTrue(self.genome.genes[0].module.weight.equal(phenotype2.genome.genes[0].module.weight))
     self.assertTrue(self.genome.genes[1].module.weight.equal(phenotype2.genome.genes[1].module.weight))
Пример #28
0
 def test_invalid_graph(self):
     self.phenotype.output_size = (1, 28, 28)
     self.genome.linear_at_end = False
     self.genome.add(Linear(1568))
     self.genome.add(Deconv2d(32))
     self.genome.add(Deconv2d(32))
     self.genome.add(Deconv2d(32))
     self.genome.add(Deconv2d(32))
     self.genome.add(Deconv2d(32))
     self.genome.output_genes.append(Deconv2d(1))
     x = Variable(torch.randn(5, 100)).view(5, 1, 10, 10)
     self.assertRaises(Exception, self.phenotype.create_model, x)
Пример #29
0
    def __init__(self,
                 output_size=(1, 28, 28),
                 genome=None,
                 input_shape=(1, config.gan.latent_dim),
                 optimizer_conf=None):
        super().__init__(output_size=output_size,
                         genome=genome,
                         input_shape=input_shape)
        self.noise_size = int(np.prod(self.input_shape[1:]))
        self.inception_score_mean = 0
        self.fid_score = None
        self.rmse_score = None
        self.optimizer_conf = optimizer_conf or config.gan.generator.optimizer
        deconv2d_class = Deconv2dUpsample if config.layer.deconv2d.use_upsample else Deconv2d

        if genome is None:
            if config.gan.generator.fixed:
                self.genome = Genome(random=False,
                                     add_layer_prob=0,
                                     rm_layer_prob=0,
                                     gene_mutation_prob=0,
                                     mutate_gan_type_prob=0,
                                     linear_at_end=False)
                self.genome.add(
                    deconv2d_class(128,
                                   stride=1,
                                   activation_type="LeakyReLU",
                                   activation_params={"negative_slope": 0.2}))
                self.genome.add(
                    deconv2d_class(64,
                                   stride=1,
                                   activation_type="LeakyReLU",
                                   activation_params={"negative_slope": 0.2}))
            else:
                self.genome = Genome(
                    random=not config.evolution.sequential_layers,
                    linear_at_end=False)
                self.genome.possible_genes = [
                    (getattr(evolution, l), {})
                    for l in config.gan.generator.possible_layers
                ]
            self.genome.input_genes = [
                Linear(8 * int(np.prod(output_size)),
                       activation_type=None,
                       normalize=False)
            ]
            deconv_out = Deconv2d if config.gan.generator.fixed else Deconv2dUpsample
            self.genome.output_genes = [
                deconv_out(output_size[0],
                           size=output_size[-2:],
                           activation_type="Tanh",
                           normalize=False)
            ]
Пример #30
0
 def test_multiple_deconv2d(self):
     self.phenotype.output_size = (1, 28, 28)
     self.genome.linear_at_end = False
     self.genome.add(Linear(1568))
     self.genome.add(Deconv2d(32, kernel_size=3))
     self.genome.add(Deconv2d(32, kernel_size=3))
     self.genome.add(Deconv2d(32, kernel_size=3))
     self.genome.output_genes.append(Deconv2d(1))
     x = Variable(torch.randn(5, 100)).view(5, 1, 10, 10)
     self.phenotype.create_model(x)
     out = self.phenotype.model(x)
     self.assertEqual([28, 28], list(out.size()[2:]))