Esempio n. 1
0
    def __init__(self,
                 output_size=(1, 28, 28),
                 genome=None,
                 input_shape=(1, 1, 10, 10),
                 optimizer_conf=config.gan.generator.optimizer):
        super().__init__(output_size=output_size,
                         genome=genome,
                         input_shape=input_shape)
        self.noise_size = int(np.prod(self.input_shape[1:]))
        self.inception_score_mean = 0
        self.fid_score = None
        self.rmse_score = None
        self.optimizer_conf = optimizer_conf

        if genome is None:
            if config.gan.generator.fixed:
                self.genome = Genome(
                    random=False,
                    add_layer_prob=0,
                    rm_layer_prob=0,
                    gene_mutation_prob=0,
                    simple_layers=config.gan.generator.simple_layers,
                    linear_at_end=False)
                self.genome.add(
                    Linear(4 * int(np.prod(output_size)),
                           activation_type="LeakyReLU",
                           activation_params={"negative_slope": 0.2}))
                if not config.gan.generator.simple_layers:
                    self.genome.add(
                        Deconv2d(128,
                                 activation_type="LeakyReLU",
                                 activation_params={"negative_slope": 0.2}))
                    self.genome.add(
                        Deconv2d(64,
                                 activation_type="LeakyReLU",
                                 activation_params={"negative_slope": 0.2}))
            else:
                self.genome = Genome(
                    random=not config.evolution.sequential_layers,
                    linear_at_end=False)
                self.genome.possible_genes = [
                    (getattr(evolution, l), {})
                    for l in config.gan.generator.possible_layers
                ]
                self.genome.add(Linear(4096))
            if config.gan.generator.simple_layers:
                # self.genome.output_genes = [Deconv2d(output_size[0], activation_type="Tanh")]
                self.genome.output_genes = [
                    Linear(int(np.prod(output_size)),
                           activation_type="Tanh",
                           normalize=False)
                ]
            else:
                self.genome.output_genes = [
                    Deconv2d(output_size[0],
                             activation_type="Tanh",
                             normalize=False)
                ]
Esempio n. 2
0
 def test_simple_deconv2d_64(self):
     self.phenotype.output_size = (3, 64, 64)
     self.genome.linear_at_end = False
     self.genome.add(Deconv2d(32))
     self.genome.output_genes.append(Deconv2d(3))
     x = Variable(torch.randn(5, 100)).view(5, 1, 10, 10)
     self.phenotype.create_model(x)
     out = self.phenotype.model(x)
     self.assertEqual([64, 64], list(out.size()[2:]))
Esempio n. 3
0
    def __init__(self,
                 output_size=(1, 28, 28),
                 genome=None,
                 input_shape=(1, 1, 10, 10)):
        super().__init__(output_size=output_size,
                         genome=genome,
                         input_shape=input_shape)
        self.noise_size = int(np.prod(self.input_shape[1:]))
        self.inception_score_mean = 0
        self.fid_score = None
        self.rmse_score = None

        if genome is None:
            if config.gan.generator.fixed:
                self.genome = Genome(
                    random=False,
                    add_layer_prob=0,
                    rm_layer_prob=0,
                    gene_mutation_prob=0,
                    simple_layers=config.gan.generator.simple_layers,
                    linear_at_end=False)
                self.genome.add(
                    Linear(4 * int(np.prod(output_size)),
                           activation_type="ReLU"))
                # self.genome.add(Linear(4*int(np.prod(output_size)), activation_type="LeakyReLU"))
                if not config.gan.generator.simple_layers:
                    self.genome.add(Deconv2d(128, activation_type="ReLU"))
                    self.genome.add(Deconv2d(64, activation_type="ReLU"))
                    self.genome.add(Deconv2d(32, activation_type="ReLU"))
                    self.genome.add(Deconv2d(16, activation_type="ReLU"))
                    # self.genome.add(Deconv2d(8, activation_type="ReLU"))
            else:
                self.genome = Genome(
                    random=not config.evolution.sequential_layers,
                    linear_at_end=False)
                self.genome.possible_genes = [
                    g for g in self.genome.possible_genes if g[0] != Conv2d
                ]
                # IMPORTANT: the performance without a liner layer is pretty bad
                self.genome.add(Linear(512))
                # self.genome.add_random_gene()
            if config.gan.generator.simple_layers:
                # self.genome.output_genes = [Deconv2d(output_size[0], activation_type="Tanh")]
                self.genome.output_genes = [
                    Linear(int(np.prod(output_size)),
                           activation_type="Tanh",
                           normalize=False)
                ]
            else:
                self.genome.output_genes = [
                    Deconv2d(output_size[0],
                             activation_type="Tanh",
                             normalize=False)
                ]
Esempio n. 4
0
 def test_multiple_deconv2d(self):
     self.phenotype.output_size = (1, 28, 28)
     self.genome.linear_at_end = False
     self.genome.add(Linear(1568))
     self.genome.add(Deconv2d(32, kernel_size=3))
     self.genome.add(Deconv2d(32, kernel_size=3))
     self.genome.add(Deconv2d(32, kernel_size=3))
     self.genome.output_genes.append(Deconv2d(1))
     x = Variable(torch.randn(5, 100)).view(5, 1, 10, 10)
     self.phenotype.create_model(x)
     out = self.phenotype.model(x)
     self.assertEqual([28, 28], list(out.size()[2:]))
Esempio n. 5
0
 def test_2d_after_linear(self):
     self.phenotype.output_size = (1, 32, 32)
     self.genome.linear_at_end = False
     self.genome.add(Linear(32*32))
     self.genome.add(Deconv2d(1))
     self.genome.add(Linear(32*32*3))
     self.genome.add(Deconv2d(4))
     self.assertEqual([Linear, Linear, Deconv2d, Deconv2d], [gene.__class__ for gene in self.genome.genes])
     self.evaluate_model([8, 1, 32, 32])
     x = Variable(torch.randn(8, 32*32))
     self.phenotype.create_model(x)
     self.train_step(self.phenotype, x)
Esempio n. 6
0
 def test_deconv_output_channels(self):
     self.phenotype.output_size = (1, 28, 28)
     self.genome.linear_at_end = False
     self.genome.add(Linear())
     self.genome.add(Deconv2d(32))
     self.genome.add(Deconv2d(16))
     self.genome.add(Deconv2d(8))
     self.genome.add(Deconv2d(4))
     x = Variable(torch.randn(1, 100))
     model = self.phenotype.transform_genotype(x)
     print(model)
     out = model(x)
     self.assertEqual([1, 1, 28, 28], list(out.size()))
Esempio n. 7
0
 def test_multiple_deconv2d_outchannels(self):
     self.phenotype.output_size = (1, 28, 28)
     self.genome.linear_at_end = False
     self.genome.add(Linear(576))
     self.genome.add(Deconv2d(64, kernel_size=3))
     self.genome.add(Deconv2d(32, kernel_size=3))
     self.genome.output_genes.append(Deconv2d(1))
     x = Variable(torch.randn(5, 100)).view(5, 1, 10, 10)
     self.phenotype.create_model(x)
     self.genome.add(Deconv2d())
     config.layer.conv2d.random_out_channels = False
     model = self.phenotype.transform_genotype(x)
     out = model(x)
     self.assertEqual(self.genome.genes[-2].out_channels//2, self.genome.genes[-1].out_channels)
     self.assertEqual([28, 28], list(out.size()[2:]))
Esempio n. 8
0
 def test_add_deconv_not_random_out(self):
     config.layer.conv2d.random_out_channels = False
     self.phenotype.output_size = (1, 28, 28)
     self.genome.linear_at_end = False
     self.genome.add(Linear())
     self.genome.add(Deconv2d())
     x = Variable(torch.randn(8, 100))
     model = self.phenotype.transform_genotype(x)
     print(model)
     out = model(x)
     self.assertEqual([8, 1, 28, 28], list(out.size()))
     self.genome.add(Deconv2d())
     model = self.phenotype.transform_genotype(x)
     print(model)
     out = model(x)
     self.assertEqual([8, 1, 28, 28], list(out.size()))
Esempio n. 9
0
 def test_invalid_graph(self):
     self.phenotype.output_size = (1, 28, 28)
     self.genome.linear_at_end = False
     self.genome.add(Linear(1568))
     self.genome.add(Deconv2d(32))
     self.genome.add(Deconv2d(32))
     self.genome.add(Deconv2d(32))
     self.genome.add(Deconv2d(32))
     self.genome.add(Deconv2d(32))
     self.genome.output_genes.append(Deconv2d(1))
     x = Variable(torch.randn(5, 100)).view(5, 1, 10, 10)
     self.assertRaises(Exception, self.phenotype.create_model, x)