Exemple #1
0
                                        target_transform=transform,
                                        patch_size=args.imsize,
                                        test_fov=args.test_group)

test_loader = load_denoising_test_mix(args.data_root,
                                      batch_size=args.test_batch_size,
                                      noise_levels=args.noise_levels_test,
                                      transform=transform,
                                      patch_size=args.imsize)

optimizer = torch.optim.Adam(model.parameters(),
                             lr=args.lr,
                             weight_decay=args.wd,
                             betas=[0.9, 0.99])
# scheduler = ReduceLROnPlateau(optimizer, mode='min', factor=0.2, patience=10)
scheduler = OneCycleScheduler(lr_max=args.lr, div_factor=10, pct_start=0.3)

multiplier = 4 if args.transform == 'four_crop' else 1
n_train_samples = len(train_loader.dataset) * multiplier
n_test_samples = len(test_loader.dataset) * multiplier
pixels_per_sample = train_loader.dataset[0][0].numel()
n_train_pixels = n_train_samples * pixels_per_sample
n_test_pixels = n_test_samples * pixels_per_sample

np.random.seed(113)
fixed_idx = np.random.permutation(len(test_loader.dataset))[:8]
print(f'fixed test index: {fixed_idx}')

fixed_test_noisy = torch.stack([(test_loader.dataset[i][0])
                                for i in fixed_idx])
fixed_test_clean = torch.stack([(test_loader.dataset[i][1])
Exemple #2
0
# G, B0, dNdx, dNdy, D, penal = NonLinearConstant(device)

data_tuple = (torch.FloatTensor(data).to(device), ref)
train_loader = DataLoader(TensorDataset(*data_tuple),
                          batch_size=args.batch_size,
                          shuffle=True,
                          drop_last=True)

# SGDM
optimizer = torch.optim.SGD(model.parameters(), lr=args.lr, momentum=0.9)

# optimizer = optim.Adam(model.parameters(), lr=args.lr,
#                     weight_decay=args.weight_decay)
scheduler = OneCycleScheduler(lr_max=args.lr,
                              div_factor=args.lr_div,
                              pct_start=args.lr_pct)

logger = {}
logger['loss_train'] = []
logger['loss_pde1'] = []
logger['loss_pde2'] = []
logger['loss_b'] = []
logger['u_l2loss'] = []
logger['ux_l2loss'] = []
logger['uy_l2loss'] = []
logger['s_l2loss'] = []

print('Start training...................................................')
start_epoch = 1 if args.ckpt_epoch is None else args.ckpt_epoch + 1
tic = time.time()