def test_mapos(self): dataset = load.load_nbody_dataset(ncubes=1, spix=32, resolution=256, Mpch=350) maps = self.list_map_to_test() X = dataset.get_all_data().flatten() for name, forward, backward in maps: print('Test map: {}'.format(name)) x = forward(X) print(np.sum(np.abs(forward(backward(x)) - x))) assert (np.sum(np.abs(forward(np.round(backward(x))) - x)) < 1)
params['optimization'] = params_optimization params['summary_every'] = 500 # Tensorboard summaries every ** iterations params['print_every'] = 50 # Console summaries every ** iterations params['save_every'] = 1000 # Save the model every ** iterations params['summary_dir'] = os.path.join(global_path, name +'_summary/') params['save_dir'] = os.path.join(global_path, name + '_checkpoints/') resume, params = utils.test_resume(try_resume, params) params['Nstats'] = 30 params['Nstats_cubes'] = 10 class CosmoUpscalePatchWGAN(UpscalePatchWGAN, CosmoWGAN): pass wgan = UpscaleGANsystem(CosmoUpscalePatchWGAN, params) dataset = load.load_nbody_dataset( spix=ns, scaling=1, resolution=256, Mpch=350, patch=True, augmentation=True, forward_map=forward, is_3d=True) wgan.train(dataset, resume=resume)
def test_cosmo(self): forward = fmap.forward dataset = load.load_nbody_dataset(ncubes=None, spix=32, Mpch=350, forward_map=forward, patch=True) it = dataset.iter(10) print(next(it).shape) assert (next(it).shape == (10, 32, 32, 4)) del it, dataset dataset = load.load_nbody_dataset(ncubes=None, spix=32, Mpch=350, forward_map=forward, patch=True, is_3d=True) it = dataset.iter(4) print(next(it).shape) assert (next(it).shape == (4, 32, 32, 32, 8)) del it, dataset dataset = load.load_nbody_dataset(ncubes=None, spix=32, Mpch=70, forward_map=None, patch=False) it = dataset.iter(10) print(next(it).shape) assert (next(it).shape == (10, 32, 32, 1)) del it, dataset dataset = load.load_nbody_dataset(ncubes=2, spix=256, Mpch=70, forward_map=forward, patch=False) assert (dataset.get_all_data().shape[0] == 256 * 2) del dataset dataset = load.load_nbody_dataset(ncubes=2, spix=128, Mpch=350, forward_map=forward, patch=False) it = dataset.iter(10) print(next(it).shape) assert (next(it).shape == (10, 128, 128, 1)) del it, dataset dataset1 = load.load_nbody_dataset(ncubes=4, spix=128, Mpch=350, forward_map=forward, patch=False, shuffle=False, augmentation=False, scaling=2, is_3d=True) it1 = dataset1.iter(3) s1 = next(it1) del it1, dataset1 dataset2 = load.load_nbody_dataset(ncubes=4, spix=32, Mpch=350, forward_map=forward, patch=False, shuffle=False, augmentation=False, scaling=8, is_3d=True) it2 = dataset2.iter(3) s2 = next(it2) del it2, dataset2 np.testing.assert_allclose(np_downsample_3d(s1, 4), s2) dataset1 = load.load_nbody_dataset(ncubes=2, spix=128, Mpch=350, forward_map=forward, patch=False, shuffle=False, augmentation=False, scaling=2) it1 = dataset1.iter(10) s1 = next(it1) del it1, dataset1 dataset2 = load.load_nbody_dataset(ncubes=2, spix=32, Mpch=350, forward_map=forward, patch=False, shuffle=False, augmentation=False, scaling=8) it2 = dataset2.iter(10) s2 = next(it2) del it2, dataset2 np.testing.assert_allclose(np_downsample_2d(s1, 4), s2)
try_resume = True # Try to resume previous simulation Mpch = 350 # Type of dataset (select 70 or 350) forward = fmap.stat_forward backward = fmap.stat_backward def non_lin(x): return tf.nn.relu(x) global_path = '../saved_results/nbody-2d/' dataset = load.load_nbody_dataset(ncubes=30, spix=ns, Mpch=Mpch, forward_map=forward) name = 'WGAN{}'.format(ns) + 'test_full_' + '2D' bn = False md = 32 params_discriminator = dict() params_discriminator['stride'] = [1, 2, 2, 2, 1] params_discriminator['nfilter'] = [md, 2 * md, 4 * md, 2 * md, md] params_discriminator['shape'] = [[4, 4], [4, 4], [4, 4], [4, 4], [4, 4]] params_discriminator['batch_norm'] = [bn, bn, bn, bn, bn] params_discriminator['full'] = [] params_discriminator['minibatch_reg'] = False