'weight_decay': 0, 'amsgrad': False } loss_params = {'n_critic': 5, 'grad_lambda': 10, 'l1_lambda': (1e4) / 0.05} paper_opts = adam_opts paper_opts['betas'] = (0.5, 0.999) paper_opts['lr'] = 0.0002 range_compress_transform, range_compress_inv_transform = \ create_range_compress_transforms(k_values={"dm": 4, "pressure": 4}, modes={'dm': 'x/(1+x)', 'pressure': 'log'}) transform = chain_transformations([range_compress_transform, atleast_3d]) inv_transform = chain_transformations([squeeze, range_compress_inv_transform]) def Schedule(name, transform=transform, inv_transform=inv_transform, loss_params=loss_params, paper_opts=paper_opts, epoch_end=100, n_test=64): schedule = { 'type': 'translator', 'transform': transform, 'inv_transform': inv_transform,
folder = os.path.basename(os.path.dirname(__file__)) subfolder = os.path.splitext(os.path.basename(__file__))[0] name = '/' + folder + '/' + subfolder + '/' from src.configs.schedules.round_16.stock import Schedule from src.configs.resnet.dim256x1 import g_structure from src.configs.patchgan.dim256x2_70_nobn_nosig import d_structure fc_transform, fc_transform_inv = create_fcs(k_values={ 'dm': 2, 'pressure': 4 }, scale=1.75, shift=-1) transform = chain_transformations([fc_transform, atleast_3d]) inv_transform = chain_transformations([squeeze, fc_transform_inv]) schedule = Schedule(name) schedule['sample_interval'] = 100 schedule['batch_size'] = 4 schedule['decay_iter'] = 10 schedule['g_optim_opts']['lr'] = 0.0002 schedule['d_optim_opts']['lr'] = 0.0002 schedule['save_summary']['iters'] = [1] + np.arange(0, 10000, 50).tolist() schedule['transform'] = transform schedule['inv_transform'] = inv_transform
# "pressure" : (1,0)}, # modes={"dm":"x/(1+x)", # "pressure" : "x/(1+x)"}, eps=1e-4) # range_compress_transform, range_compress_inv_transform = data_transforms.create_range_compress_transforms( # k_values={"dm" : 1.5, # "pressure" : 1}, # modes={"dm":"x/(1+x)", # "pressure" : "1/x"}) with open(os.path.join(data_path, "train_files_info.pickle"), "rb") as f: training_files_info = pickle.load(f) transform = data_transforms.chain_transformations([ range_compress_transform, data_transforms.atleast_3d, ]) inv_transform = data_transforms.chain_transformations([ data_transforms.squeeze, range_compress_inv_transform, ]) training_dataset = datasets.BAHAMASDataset(files=training_files_info, root_path=data_path, redshifts=redshifts, label_fields=label_fields, n_stack=n_training_stack, transform=transform, inverse_transform=inv_transform, n_feature_per_field=n_scale,
split_scale_transform, inv_split_scale_transform = \ data_transforms.create_split_scale_transform(n_scale=2, step_size=8, include_original=False, truncate=2.0) fc_transform, fc_transform_inv = create_fcs(k_values={ 'dm': 2, 'pressure': 4 }, scale=1.75, shift=-1) transform = data_transforms.chain_transformations([ split_scale_transform, fc_transform, data_transforms.atleast_3d, ]) inv_transform = data_transforms.chain_transformations([ data_transforms.squeeze, fc_transform_inv, inv_split_scale_transform, ]) schedule = Schedule(name) schedule['sample_interval'] = 100 schedule['batch_size'] = 4 schedule['decay_iter'] = 10 schedule['g_optim_opts']['lr'] = 0.0002 schedule['d_optim_opts']['lr'] = 0.0002