# ----- load test data -------- test_data_params = { "mask_func": mask_func, "seed": 1, "filter": [filter_acquisition_no_fs], "num_sym_slices": 0, "multi_slice_gt": False, "keep_mask_as_func": True, "transform": torchvision.transforms.Compose([ CropOrPadAndResimulate(n), Flatten(0, -3), Normalize(reduction="mean", use_target=True), ], ), } test_data = AlmostFixedMaskDataset test_data = test_data("val", **test_data_params) vols = range(30) slices_in_vols = [test_data.get_slices_in_volume(vol_idx) for vol_idx in vols] slices_selected = [ range((lo + hi) // 2, (lo + hi) // 2 + 1) for lo, hi in slices_in_vols ] samples = np.concatenate(slices_selected) X_0 = torch.stack([test_data[s][2] for s in samples])
torch.optim.Adam, "optimizer_params": [{ "lr": 5e-5, "eps": 1e-4, "weight_decay": 1e-6 }], "scheduler": torch.optim.lr_scheduler.StepLR, "scheduler_params": { "step_size": 1, "gamma": 1.0 }, "acc_steps": [1], "train_transform": torchvision.transforms.Compose([ CropOrPadAndResimulate((320, 320)), Flatten(0, -3), Normalize(reduction="mean", use_target=True), Jitter(1.5e2, 0.0, 1.0), ]), "val_transform": torchvision.transforms.Compose([ CropOrPadAndResimulate((320, 320)), Flatten(0, -3), Normalize(reduction="mean", use_target=True), ], ), "train_loader_params": { "shuffle": True, "num_workers": 8 }, "val_loader_params": {
# ----- load test data -------- test_data_params = { "mask_func": cfg_rob.mask_func, "seed": 1, "filter": [filter_acquisition_no_fs], "num_sym_slices": 0, "multi_slice_gt": False, "keep_mask_as_func": True, "transform": torchvision.transforms.Compose([ CropOrPadAndResimulate(cfg_rob.n), Flatten(0, -3), Normalize(reduction="mean", use_target=True), ], ), } test_data = AlmostFixedMaskDataset test_data = test_data("val", **test_data_params) vols = range(30) slices_in_vols = [test_data.get_slices_in_volume(vol_idx) for vol_idx in vols] slices_selected = [ range((lo + hi) // 2, (lo + hi) // 2 + 1) for lo, hi in slices_in_vols ] samples = np.concatenate(slices_selected) X_0 = torch.stack([test_data[s][2] for s in samples])
torch.optim.Adam, "optimizer_params": [{ "lr": 1e-4, "eps": 1e-4, "weight_decay": 1e-5 }], "scheduler": torch.optim.lr_scheduler.StepLR, "scheduler_params": { "step_size": 1, "gamma": 1.0 }, "acc_steps": [1], "train_transform": torchvision.transforms.Compose([ CropOrPadAndResimulate((368, 368)), Flatten(0, -3), Normalize(reduction="mean", use_target=False), Jitter(1e1, 0.0, 1.0), ]), "val_transform": torchvision.transforms.Compose([ CropOrPadAndResimulate((368, 368)), Flatten(0, -3), Normalize(reduction="mean", use_target=False), ], ), "train_loader_params": { "shuffle": True, "num_workers": 8 }, "val_loader_params": {