Example #1
0
    def test_conditionalwgan(self):

        bn = False
        ns = 16

        # Parameters for the generator
        params_generator = dict()
        params_generator['latent_dim'] = 126
        params_generator['stride'] = [1, 2, 1, 1]
        params_generator['nfilter'] = [32, 64, 32, 1]
        params_generator['shape'] = [[5, 5], [5, 5], [5, 5], [5, 5]]
        params_generator['batch_norm'] = [bn, bn, bn]
        params_generator['full'] = [8 * 8 * 32]
        params_generator['non_lin'] = tf.nn.relu
        params_generator['in_conv_shape'] = [8, 8]

        # Parameters for the discriminator
        params_discriminator = dict()
        params_discriminator['stride'] = [2, 2, 1]
        params_discriminator['nfilter'] = [16, 64, 32]
        params_discriminator['shape'] = [[5, 5], [5, 5], [3, 3]]
        params_discriminator['batch_norm'] = [bn, bn, bn]
        params_discriminator['full'] = [512, 128, 32]
        params_discriminator['minibatch_reg'] = False

        # Optimization parameters
        d_opt = dict()
        d_opt['optimizer'] = "rmsprop"
        d_opt['learning_rate'] = 3e-5
        params_optimization = dict()
        params_optimization['n_critic'] = 5
        params_optimization['batch_size'] = 8
        params_optimization['epoch'] = 1

        # all parameters
        params = dict()
        params['net'] = dict()  # All the parameters for the model
        params['net']['generator'] = params_generator
        params['net']['discriminator'] = params_discriminator
        params['net']['shape'] = [ns, ns, 1]  # Shape of the image
        params['net']['gamma_gp'] = 10  # Gradient penalty

        # Conditional params
        params['net']['prior_normalization'] = False
        params['net']['cond_params'] = 2
        params['net']['init_range'] = [[0, 1], [0, 1]]
        params['net']['prior_distribution'] = "gaussian_length"
        params['net']['final_range'] = [
            0.1 * np.sqrt(params_generator['latent_dim']),
            1 * np.sqrt(params_generator['latent_dim'])
        ]

        params['optimization'] = params_optimization
        params['summary_every'] = 4
        params['save_every'] = 5
        params['print_every'] = 3

        X = np.random.rand(101, ns, ns)
        parameters = np.random.rand(101, 2)
        dataset = Dataset_parameters(X, parameters)
        wgan = GANsystem(ConditionalParamWGAN, params)
        wgan.train(dataset)
        img = wgan.generate(N=16,
                            **{
                                'z':
                                wgan.net.sample_latent(bs=16,
                                                       params=np.random.rand(
                                                           16, 2))
                            })
        assert (img.shape[0] == 16 and img.shape[1] == ns
                and img.shape[2] == ns and img.shape[3] == 1)
Example #2
0
 def test_default_params_lapgan(self):
     obj = GANsystem(LapWGAN)
Example #3
0
 def test_default_params_patchgan(self):
     obj = GANsystem(UpscalePatchWGAN)
Example #4
0
        print("consistency_contribution", consistency_contribution)
        self._R_Con = self.consistency((self.X_real[:, :, :, 0] - 1) * 5)
        self._F_Con = self.consistency((self.X_fake[:, :, :, 0] - 1) * 5)
        self._mean_R_Con, self._std_R_Con = tf.nn.moments(self._R_Con,
                                                          axes=[0])
        self._mean_F_Con, self._std_F_Con = tf.nn.moments(self._F_Con,
                                                          axes=[0])

        self._G_Reg = tf.abs(self._mean_R_Con - self._mean_F_Con)
        self._G_loss += consistency_contribution * self._G_Reg

    def _build_image_summary(self):
        vmin = tf.reduce_min(self.X_real)
        vmax = tf.reduce_max(self.X_real)
        X_real = self.X_real[:, :, :, 0]
        X_fake = self.X_fake[:, :, :, 0]

        tf.summary.image("images/Real_Image",
                         colorize(X_real, vmin, vmax),
                         max_outputs=4,
                         collections=['model'])
        tf.summary.image("images/Fake_Image",
                         colorize(X_fake, vmin, vmax),
                         max_outputs=4,
                         collections=['model'])


wgan = GANsystem(ModSpectrogramGAN, params)

wgan.train(dataset, resume=resume)
Example #5
0
 def test_default_params_wgan(self):
     obj = GANsystem(WGAN)
        params['net']['loss_type'] = 'wasserstein'

        params['optimization'] = params_optimization
        params[
            'summary_every'] = 100  # Tensorboard summaries every ** iterations
        params['print_every'] = 50  # Console summaries every ** iterations
        params['save_every'] = 1000  # Save the model every ** iterations
        params['summary_dir'] = os.path.join(global_path, name + '_summary/')
        params['save_dir'] = os.path.join(global_path, name + '_checkpoints/')
        params['Nstats'] = 0

        resume, params = utils.test_resume(True, params)

        # Build the model
        print('Load the model')
        wgan = GANsystem(InpaintingGAN, params)

        # Generate new samples
        print('Generate new samples')
        real_signals = dataset.get_samples(N=N_f)
        if model == 'extend':
            border1 = real_signals[:, signal_split[0]:(signal_split[0] +
                                                       signal_split[1])]
            border2 = real_signals[:, -(signal_split[3] +
                                        signal_split[4]):-signal_split[4]]
            border3 = real_signals[:, :(signal_split[0] + signal_split[1])]
            border4 = real_signals[:, -(signal_split[3] + signal_split[4]):]
            borders1 = np.stack([border1, border2], axis=2)
            borders2 = np.stack([border3, border4], axis=2)
            fake_signals = np.squeeze(wgan.generate(N=N_f,
                                                    borders1=borders1,
Example #7
0
params['net']['loss_type'] = 'wasserstein'

params['optimization'] = params_optimization
params['summary_every'] = 100  # Tensorboard summaries every ** iterations
params['print_every'] = 50  # Console summaries every ** iterations
params['save_every'] = 1000  # Save the model every ** iterations
params['summary_dir'] = os.path.join(global_path, name + '_summary/')
params['save_dir'] = os.path.join(global_path, name + '_checkpoints/')
params['Nstats'] = 0

resume, params = utils.test_resume(False, params)

#%%
# # Build the model

wgan = GANsystem(InpaintingGAN, params)

# # Train the model

wgan.train(dataset, resume=resume)

end = time.time()
print('Elapse time: {} minutes'.format((end - start) / 60))

# =============================================================================
# #%%
# # # Generate new samples
# # To have meaningful statistics, be sure to generate enough samples
# # * 2000 : 32 x 32
# # * 500 : 64 x 64
# # * 200 : 128 x 128
Example #8
0
params['optimization'] = params_optimization
params['summary_every'] = 100  # Tensorboard summaries every ** iterations
params['print_every'] = 50  # Console summaries every ** iterations
params['save_every'] = 1000  # Save the model every ** iterations
params['summary_dir'] = os.path.join(global_path, name + '_summary/')
params['save_dir'] = os.path.join(global_path, name + '_checkpoints/')
params['Nstats'] = 500

resume, params = utils.test_resume(True, params)
params['optimization']['epoch'] = 10000
params['summary_every'] = 250
params['print_every'] = 250
params['save_dir'] = os.path.join(global_path, name + '_checkpoints/')

wgan = GANsystem(SpectrogramGAN, params)

nsamples = 256
nlatent = 100


def clip_dist2(nsamples, nlatent, m=2.5):
    shape = [nsamples, nlatent]
    z = np.random.randn(*shape)
    support = np.logical_or(z < -m, z > m)
    while np.sum(support):
        z[support] = np.random.randn(*shape)[support]
        support = np.logical_or(z < -m, z > m)
    return z

Example #9
0
 def test_default_params(self):
     wgan = GANsystem(WGAN)
     wgan = DualGANsystem(WGAN)
Example #10
0
params['net']['gamma_gp'] = 10  # Gradient penalty

params['optimization'] = params_optimization
params['summary_every'] = 500  # Tensorboard summaries every ** iterations
params['print_every'] = 50  # Console summaries every ** iterations
params['save_every'] = 1000  # Save the model every ** iterations
params['summary_dir'] = os.path.join(global_path, name + '_summary/')
params['save_dir'] = os.path.join(global_path, name + '_checkpoints/')
params['Nstats'] = 100
params['Nstats_cubes'] = 10

resume, params = utils.test_resume(try_resume, params)


class CosmoUpscalePatchWGAN(UpscalePatchWGAN, CosmoWGAN):
    pass


wgan = GANsystem(CosmoUpscalePatchWGAN, params)

dataset = load.load_nbody_dataset(spix=ns,
                                  scaling=1,
                                  resolution=256,
                                  Mpch=350,
                                  patch=True,
                                  augmentation=True,
                                  forward_map=forward,
                                  is_3d=True)

wgan.train(dataset, resume=resume)
Example #11
0
 def test_default_params(self):
     wgan = GANsystem(WGAN)
     wgan = PaulinaGANsystem(WGAN)
Example #12
0
params_cosmology['forward_map'] = forward
params_cosmology['backward_map'] = backward

# all parameters
params = dict()
params['net'] = dict()  # All the parameters for the model
params['net']['generator'] = params_generator
params['net']['discriminator'] = params_discriminator
params['net'][
    'cosmology'] = params_cosmology  # Parameters for the cosmological summaries
params['net']['prior_distribution'] = 'gaussian'
params['net']['shape'] = [ns, ns, 1]  # Shape of the image
params['net']['loss_type'] = 'wasserstein'  # loss ('hinge' or 'wasserstein')
params['net']['gamma_gp'] = 10  # Gradient penalty

params['optimization'] = params_optimization
params['summary_every'] = 500  # Tensorboard summaries every ** iterations
params['print_every'] = 50  # Console summaries every ** iterations
params['save_every'] = 2000  # Save the model every ** iterations
params['summary_dir'] = os.path.join(global_path, name + '_summary/')
params['save_dir'] = os.path.join(global_path, name + '_checkpoints/')
params['Nstats'] = (64 * 32 * 32) // ns

resume, params = utils.test_resume(try_resume, params)
# If a model is reloaded and some parameters have to be changed, then it should be done here.
# For example, setting the number of epoch to 5 would be:

wgan = GANsystem(CosmoWGAN, params)

wgan.train(dataset, resume=resume)