Example #1
0
    def __init__(self,
                 gan_type='gan',
                 net_type='cnn',
                 optim_type='adam',
                 batch_size=64,
                 noise_dim=50,
                 learning_rate=2e-4,
                 optim_num=0.5,
                 clip_num=0.03,
                 critic_iter=5,
                 plot_iter=5,
                 verbose=True):

        self.noise_dim = noise_dim
        self.class_num = 10
        self.clip_num = None if clip_num == 0 else clip_num
        self.lr = learning_rate
        self.optim_num = optim_num
        self.critic_iter = critic_iter
        self.plot_iter = plot_iter
        self.verbose = verbose
        super(GAN, self).__init__(batch_size, gan_type + "_" + net_type)

        self.gan_type = gan_type
        self.optim_type = optim_type

        self.data = datamanager_mnist(train_ratio=1.0,
                                      fold_k=None,
                                      norm=True,
                                      expand_dim=True,
                                      seed=23333)
        sample_data = datamanager_mnist(train_ratio=1.0,
                                        fold_k=None,
                                        norm=True,
                                        expand_dim=True,
                                        seed=23333)
        self.sample_data = sample_data(self.batch_size,
                                       var_list=["data", "labels"])

        if net_type == 'cnn':
            self.generator = Generator_CNN('cnn_generator')
            self.discriminator = Discriminator_CNN(class_num=self.class_num,
                                                   name='cnn_discriminator')
        elif net_type == 'mlp':
            self.generator = Generator_MLP('mlp_generator')
            self.discriminator = Discriminator_MLP(class_num=self.class_num,
                                                   name='mlp_discriminator')

        self.build_placeholder()
        self.build_gan()
        self.build_optimizer(optim_type)
        self.build_summary()

        self.build_sess()
        self.build_dirs()
Example #2
0
    def __init__(self, 
                optim_type='adam',
                net_type='cnn',
                batch_size=64, 
                noise_dim=50,
                learning_rate=2e-4,
                optim_num=0.5,
                critic_iter=1,
                plot_iter=5,
                verbose=True):

        self.noise_dim = noise_dim
        self.class_num = 10
        self.lr = learning_rate
        self.optim_num = optim_num
        self.critic_iter = critic_iter
        self.plot_iter = plot_iter
        self.verbose = verbose
        super(InfoGAN, self).__init__(batch_size, "infogan_"+net_type)

        # code
        self.len_discrete_code = self.class_num  # categorical distribution (i.e. label)
        self.len_continuous_code = 2  # gaussian distribution (e.g. rotation, thickness)

        self.optim_type = optim_type
        self.SUPERVISED = True

        self.data = datamanager_mnist(train_ratio=1.0, fold_k=None, norm=True, expand_dim=True, seed=23333)
        sample_data = datamanager_mnist(train_ratio=1.0, fold_k=None, norm=True, expand_dim=True, seed=23333)
        self.sample_data = sample_data(self.batch_size, var_list=["data", "labels"])

        if net_type == 'cnn':
            self.generator = Generator_CNN(name='cnn_generator')
            self.discriminator = Discriminator_CNN(name='cnn_discriminator')
        elif net_type == 'mlp':
            self.generator = Generator_MLP(name='mlp_generator')
            self.discriminator = Discriminator_CNN(name='discriminator')
        self.classifier = Classifier_MLP(self.len_discrete_code + self.len_continuous_code, name='classifier')

        self.build_placeholder()
        self.build_gan()
        self.build_optimizer(optim_type)
        self.build_summary()

        self.build_sess()
        self.build_dirs()
Example #3
0
    def __init__(self, batch_size, version='daegan'):
        super(DAEGAN, self).__init__(batch_size, version)

        self.critic_iter = 3

        self.data_A = datamanager_mnist(datapath=mnist_path, train_ratio=0.8, fold_k=None, expand_dim=True, norm=True, seed=0)
        self.data_B = datamanager_mnist(datapath=colorbackground_mnist_path, train_ratio=0.8, fold_k=None, expand_dim=False, norm=True, seed=1)

        self.sample_A = self.data_A(self.batch_size, phase='test', var_list=['data', 'labels'])
        self.sample_B = self.data_B(self.batch_size, phase='test', var_list=['data', 'labels'])

        self.autoencoder_A = CNN_Generator(output_dim=1, name='ae_A')
        self.autoencoder_B = CNN_Generator(output_dim=3, name='ae_B')

        self.Latent_classifier = MLP_Classifier(output_dim=10, layers=[128]*2, name='latent_c')
        self.Latent_discriminator = CNN_Latent_discriminator(name='latent_d')

        self.build_placeholder()
        self.build_network()
        self.build_optimizer()
        self.build_summary()

        self.build_sess()
        self.build_dirs()
Example #4
0
    def __init__(self, batch_size, version="AE"):
        super(AutoEncoder, self).__init__(batch_size, version)

        self.data = datamanager_mnist(train_ratio=0.8,
                                      fold_k=None,
                                      expand_dim=True,
                                      norm=True)
        self.sample_data = self.data(self.batch_size,
                                     phase='test',
                                     var_list=["data", "labels"])

        self.emb_dim = 10

        self.encoder = CNN_Encoder(output_dim=self.emb_dim, sn=False)
        self.decoder = CNN_Decoder(sn=False)

        self.build_placeholder()
        self.build_network()
        self.build_optimizer()

        self.build_sess()
        self.build_dirs()
Example #5
0
                        ],
                        feed_dict=feed_dict)
                    print self.version + " Epoch [%3d/%3d] Iter [%3d/%3d] Da=%.3f Db=%.3f Ga=%.3f Gb=%.3f R=%.3f" % (
                        epoch, epoches, idx, batches_per_epoch, da, db, ga, gb,
                        r)
                    self.writer.add_summary(sum_str, cnt)
            if epoch % 20 == 0:
                self.sample(epoch)
        self.sample(epoch)
        self.saver.save(self.sess,
                        os.path.join(self.model_dir, 'model.ckpt'),
                        global_step=cnt)


data_A = datamanager_mnist(datapath=mnist_path,
                           train_ratio=0.8,
                           fold_k=None,
                           expand_dim=True,
                           norm=True,
                           seed=0)
data_B = datamanager_mnist(datapath=colorbackground_mnist_path,
                           train_ratio=0.8,
                           fold_k=None,
                           expand_dim=False,
                           norm=True,
                           seed=1)
cyclegan = CycleGAN(data_A, data_B, 64, gan_type='', version='cyclegan')
cyclegan.train(100)

cyclegan.plot_loss()