def build_args(): # ------------------------ # TRAINING ARGUMENTS # ------------------------ path_config = pathlib.Path.cwd() / ".." / ".." / "fastmri_dirs.yaml" brain_path = fetch_dir("brain_path", path_config) logdir = fetch_dir("log_path", path_config) / "varnet" / "varnet_leaderboard" parent_parser = ArgumentParser(add_help=False) parser = VarNetModule.add_model_specific_args(parent_parser) parser = Trainer.add_argparse_args(parser) backend = "ddp" num_gpus = 32 # this was the number of GPUs for training batch_size = 1 # module config config = dict( num_cascades=12, pools=4, chans=18, sens_pools=4, sens_chans=8, mask_type="equispaced", center_fractions=[0.08, 0.04], # note: paper used fixed number of lines accelerations=[4, 8], # note: paper trained 4x and 8x separately lr=0.0003, lr_step_size=40, lr_gamma=0.1, weight_decay=0.0, data_path=brain_path, challenge="multicoil", exp_dir=logdir, exp_name="varnet_leaderboard", test_split="test", batch_size=batch_size, ) parser.set_defaults(**config) # trainer config parser.set_defaults( gpus=num_gpus, default_root_dir=logdir, replace_sampler_ddp=(backend != "ddp"), distributed_backend=backend, max_epochs=50, seed=42, deterministic=True, ) parser.add_argument("--mode", default="train", type=str) args = parser.parse_args() return args
def build_args(): # ------------------------ # TRAINING ARGUMENTS # ------------------------ path_config = pathlib.Path.cwd() / ".." / ".." / "fastmri_dirs.yaml" knee_path = fetch_dir("knee_path", path_config) logdir = fetch_dir("log_path", path_config) / "unet" / "unet_knee_sc_leaderboard" parent_parser = ArgumentParser(add_help=False) parser = UnetModule.add_model_specific_args(parent_parser) parser = Trainer.add_argparse_args(parser) num_gpus = 32 # this was the number of GPUs for training backend = "ddp" batch_size = 1 if backend == "ddp" else num_gpus # module config config config = dict( in_chans=1, out_chans=1, chans=256, num_pool_layers=4, drop_prob=0.0, mask_type="random", center_fractions=[0.08, 0.04], accelerations=[4, 8], lr=0.001, lr_step_size=40, lr_gamma=0.1, weight_decay=0.0, data_path=knee_path, challenge="singlecoil", exp_dir=logdir, exp_name="unet_leaderboard", test_split="test", batch_size=batch_size, ) parser.set_defaults(**config) # trainer config parser.set_defaults( default_root_dir=logdir, replace_sampler_ddp=(backend != "ddp"), distributed_backend=backend, gpus=num_gpus, max_epochs=50, seed=42, deterministic=True, ) parser.add_argument("--mode", default="train", type=str) args = parser.parse_args() return args
def build_args(): # ------------------------ # TRAINING ARGUMENTS # ------------------------ path_config = pathlib.Path.cwd() / ".." / ".." / "fastmri_dirs.yaml" knee_path = fetch_dir("knee_path", path_config) logdir = fetch_dir("log_path", path_config) / "varnet" / "varnet_demo" parent_parser = ArgumentParser(add_help=False) parser = VarNetModule.add_model_specific_args(parent_parser) parser = Trainer.add_argparse_args(parser) backend = "ddp" num_gpus = 2 if backend == "ddp" else 1 batch_size = 1 # module config config = dict( num_cascades=8, pools=4, chans=18, sens_pools=4, sens_chans=8, mask_type="equispaced", center_fractions=[0.08], accelerations=[4], lr=0.001, lr_step_size=40, lr_gamma=0.1, weight_decay=0.0, data_path=knee_path, challenge="multicoil", exp_dir=logdir, exp_name="varnet_demo", test_split="test", batch_size=batch_size, ) parser.set_defaults(**config) # trainer config parser.set_defaults( gpus=num_gpus, default_root_dir=logdir, replace_sampler_ddp=(backend != "ddp"), distributed_backend=backend, seed=42, deterministic=True, ) parser.add_argument("--mode", default="train", type=str) args = parser.parse_args() return args
def build_args(): # ------------------------ # TRAINING ARGUMENTS # ------------------------ path_config = pathlib.Path.cwd() / ".." / ".." / "fastmri_dirs.yaml" knee_path = fetch_dir("knee_path", path_config) brain_path = fetch_dir("brain_path", path_config) logdir = fetch_dir("log_path", path_config) / "unet" / "overfit-on-batch" parent_parser = ArgumentParser(add_help=False) parser = UnetModule.add_model_specific_args(parent_parser) parser = Trainer.add_argparse_args(parser) num_gpus = 1 backend = "ddp" batch_size = 1 if backend == "ddp" else num_gpus # module config config = dict( in_chans=4, out_chans=4, chans=32, num_pool_layers=4, drop_prob=0.0, mask_type="equispaced", center_fractions=[0.08, 0.04], accelerations=[4, 8], lr=0.001, lr_step_size=40, lr_gamma=0.1, weight_decay=0.0, data_path=brain_path, challenge="multicoil", exp_dir=logdir, exp_name="unet_demo", test_split="test", batch_size=batch_size, ) parser.set_defaults(**config) # trainer config parser.set_defaults(gpus=num_gpus, default_root_dir=logdir, replace_sampler_ddp=(backend != "ddp"), distributed_backend=backend, seed=42, deterministic=True, fast_dev_run=True) parser.add_argument("--mode", default="train", type=str) args = parser.parse_args() return args
def build_varnet_args(): knee_path = fetch_dir("knee_path") logdir = fetch_dir("log_path") / "test_dir" parent_parser = ArgumentParser(add_help=False) parser = VarNetModule.add_model_specific_args(parent_parser) parser = Trainer.add_argparse_args(parser) backend = "dp" num_gpus = 2 if backend == "ddp" else 1 batch_size = 1 config = dict( num_cascades=8, pools=4, chans=18, sens_pools=4, sens_chans=8, mask_type="equispaced", center_fractions=[0.08], accelerations=[4], resolution=384, lr=0.001, lr_step_size=40, lr_gamma=0.1, weight_decay=0.0, data_path=knee_path, challenge="multicoil", exp_dir=logdir, exp_name="varnet_demo", test_split="test", batch_size=batch_size, ) parser.set_defaults(**config) parser.set_defaults( gpus=num_gpus, default_root_dir=logdir, replace_sampler_ddp=(backend != "ddp"), distributed_backend=backend, ) parser.add_argument("--mode", default="train", type=str) args = parser.parse_args([]) return args
def build_unet_args(): knee_path = fetch_dir("knee_path") logdir = fetch_dir("log_path") / "test_dir" parent_parser = ArgumentParser(add_help=False) parser = UnetModule.add_model_specific_args(parent_parser) parser = Trainer.add_argparse_args(parser) num_gpus = 1 backend = "dp" batch_size = 1 if backend == "ddp" else num_gpus config = dict( in_chans=1, out_chans=1, chans=32, num_pool_layers=4, drop_prob=0.0, mask_type="random", center_fractions=[0.08], accelerations=[4], resolution=384, lr=0.001, lr_step_size=40, lr_gamma=0.1, weight_decay=0.0, data_path=knee_path, challenge="singlecoil", exp_dir=logdir, exp_name="unet_demo", test_split="test", batch_size=batch_size, ) parser.set_defaults(**config) parser.set_defaults( gpus=num_gpus, default_root_dir=logdir, replace_sampler_ddp=(backend != "ddp"), distributed_backend=backend, ) parser.add_argument("--mode", default="train", type=str) args = parser.parse_args([]) return args
def test_combined_dataset_lengths(knee_split_lens, brain_split_lens, skip_data_test): if skip_data_test: pytest.skip("config set to skip") knee_path = fetch_dir("knee_path") brain_path = fetch_dir("brain_path") for knee_split, knee_data_len in knee_split_lens.items(): for brain_split, brain_data_len in brain_split_lens.items(): dataset = CombinedSliceDataset( [knee_path / knee_split, brain_path / brain_split], transforms=[None, None], challenges=["multicoil", "multicoil"], ) assert len(dataset) == knee_data_len + brain_data_len
def test_brain_dataset_lengths(brain_split_lens, skip_data_test): if skip_data_test: pytest.skip("config set to skip") brain_path = fetch_dir("brain_path") for split, data_len in brain_split_lens.items(): dataset = SliceDataset(brain_path / split, transform=None, challenge="multicoil") assert len(dataset) == data_len
def test_knee_dataset_lengths(knee_split_lens, skip_data_test): if skip_data_test: pytest.skip("config set to skip") knee_path = fetch_dir("knee_path") for split, data_len in knee_split_lens.items(): challenge = "multicoil" if "multicoil" in split else "singlecoil" dataset = SliceDataset(knee_path / split, transform=None, challenge=challenge) assert len(dataset) == data_len
def build_args(): parser = ArgumentParser() # basic args path_config = pathlib.Path("../../fastmri_dirs.yaml") backend = "ddp" num_gpus = 2 if backend == "ddp" else 1 batch_size = 1 # set defaults based on optional directory config data_path = fetch_dir("knee_path", path_config) default_root_dir = fetch_dir("log_path", path_config) / "varnet" / "varnet_demo" # client arguments parser.add_argument( "--mode", default="train", choices=("train", "test"), type=str, help="Operation mode", ) # data transform params parser.add_argument( "--mask_type", choices=("random", "equispaced"), default="equispaced", type=str, help="Type of k-space mask", ) parser.add_argument( "--center_fractions", nargs="+", default=[0.08], type=float, help="Number of center lines to use in mask", ) parser.add_argument( "--accelerations", nargs="+", default=[4], type=int, help="Acceleration rates to use for masks", ) # data config parser = FastMriDataModule.add_data_specific_args(parser) parser.set_defaults( data_path=data_path, # path to fastMRI data mask_type="equispaced", # VarNet uses equispaced mask challenge="multicoil", # only multicoil implemented for VarNet batch_size=batch_size, # number of samples per batch test_path=None, # path for test split, overwrites data_path ) # module config parser = VarNetModule.add_model_specific_args(parser) parser.set_defaults( num_cascades=8, # number of unrolled iterations pools=4, # number of pooling layers for U-Net chans=18, # number of top-level channels for U-Net sens_pools=4, # number of pooling layers for sense est. U-Net sens_chans=8, # number of top-level channels for sense est. U-Net lr=0.001, # Adam learning rate lr_step_size=40, # epoch at which to decrease learning rate lr_gamma=0.1, # extent to which to decrease learning rate weight_decay=0.0, # weight regularization strength ) # trainer config parser = pl.Trainer.add_argparse_args(parser) parser.set_defaults( gpus=num_gpus, # number of gpus to use replace_sampler_ddp= False, # this is necessary for volume dispatch during val accelerator=backend, # what distributed version to use seed=42, # random seed deterministic=True, # makes things slower, but deterministic default_root_dir=default_root_dir, # directory for logs and checkpoints max_epochs=50, # max number of epochs ) args = parser.parse_args() # configure checkpointing in checkpoint_dir checkpoint_dir = args.default_root_dir / "checkpoints" if not checkpoint_dir.exists(): checkpoint_dir.mkdir(parents=True) args.checkpoint_callback = pl.callbacks.ModelCheckpoint( filepath=args.default_root_dir / "checkpoints", save_top_k=True, verbose=True, monitor="validation_loss", mode="min", prefix="", ) # set default checkpoint if one exists in our checkpoint directory if args.resume_from_checkpoint is None: ckpt_list = sorted(checkpoint_dir.glob("*.ckpt"), key=os.path.getmtime) if ckpt_list: args.resume_from_checkpoint = str(ckpt_list[-1]) return args
def build_args(): parser = ArgumentParser() # basic args path_config = pathlib.Path("../../fastmri_dirs.yaml") num_gpus = 0 backend = "ddp_cpu" batch_size = 1 if backend == "ddp_cpu" else num_gpus # set defaults based on optional directory config data_path = fetch_dir("knee_path", path_config) default_root_dir = fetch_dir("log_path", path_config) / "unet" / "unet_demo" # client arguments parser.add_argument( "--mode", default="train", choices=("train", "test"), type=str, help="Operation mode", ) # data transform params parser.add_argument( "--mask_type", choices=("random", "equispaced"), default="random", type=str, help="Type of k-space mask", ) parser.add_argument( "--center_fractions", nargs="+", default=[0.08], type=float, help="Number of center lines to use in mask", ) parser.add_argument( "--proportion", default=0.1, type=float, help="Proportion of label data", ) parser.add_argument( "--accelerations", nargs="+", default=[4], type=int, help="Acceleration rates to use for masks", ) # data config with path to fastMRI data and batch size parser = FastMriDataModule.add_data_specific_args(parser) parser.set_defaults(data_path=data_path, batch_size=batch_size, test_path=None) # module config parser = UnetModule.add_model_specific_args(parser) parser.set_defaults( in_chans=1, # number of input channels to U-Net out_chans=1, # number of output chanenls to U-Net chans=32, # number of top-level U-Net channels num_pool_layers=4, # number of U-Net pooling layers drop_prob=0.0, # dropout probability lr=0.001, # RMSProp learning rate lr_step_size=40, # epoch at which to decrease learning rate lr_gamma=0.1, # extent to which to decrease learning rate weight_decay=0.0, # weight decay regularization strength ) # trainer config parser = pl.Trainer.add_argparse_args(parser) parser.set_defaults( gpus=num_gpus, # number of gpus to use replace_sampler_ddp= False, # this is necessary for volume dispatch during val accelerator=backend, # what distributed version to use seed=42, # random seed deterministic=True, # makes things slower, but deterministic default_root_dir=default_root_dir, # directory for logs and checkpoints max_epochs=50, # max number of epochs ) args = parser.parse_args() # configure checkpointing in checkpoint_dir checkpoint_dir = args.default_root_dir / "checkpoints" if not checkpoint_dir.exists(): checkpoint_dir.mkdir(parents=True) args.checkpoint_callback = pl.callbacks.ModelCheckpoint( dirpath=args.default_root_dir / "checkpoints", save_top_k=True, verbose=True, monitor="validation_loss", mode="min", prefix="", ) # set default checkpoint if one exists in our checkpoint directory if args.resume_from_checkpoint is None: ckpt_list = sorted(checkpoint_dir.glob("*.ckpt"), key=os.path.getmtime) if ckpt_list: args.resume_from_checkpoint = str(ckpt_list[-1]) return args
def build_args(): parser = ArgumentParser() # basic args path_config = pathlib.Path("../../fastmri_dirs.yaml") batch_size = 1 if backend == "ddp" else num_gpus # set defaults based on optional directory config data_path = fetch_dir("knee_path", path_config) default_root_dir = fetch_dir("log_path", path_config) / "nnret" / "nnret_demo" # client arguments parser.add_argument( "--mode", default="train", choices=("train", "test"), type=str, help="Operation mode", ) # data transform params parser.add_argument( "--mask_type", choices=("random", "equispaced"), default="random", type=str, help="Type of k-space mask", ) parser.add_argument( "--center_fractions", nargs="+", default=[0.08], type=float, help="Number of center lines to use in mask", ) parser.add_argument( "--accelerations", nargs="+", default=[4], type=int, help="Acceleration rates to use for masks", ) # data config with path to fastMRI data and batch size parser = FastMriDataModule.add_data_specific_args(parser) parser.set_defaults(data_path=data_path, batch_size=batch_size, test_path=None) # module config parser = NnRetModule.add_model_specific_args(parser) parser.set_defaults( in_chans=1, # number of input channels to NNRET out_chans=1, # number of output chanenls to NNRET chans=32, # number of top-level NNRET channels num_pool_layers=4, # number of NNRET pooling layers drop_prob=0.0, # dropout probability lr=0.001, # RMSProp learning rate lr_step_size=40, # epoch at which to decrease learning rate lr_gamma=0.1, # extent to which to decrease learning rate weight_decay=0.0, # weight decay regularization strength ) # trainer config parser = pl.Trainer.add_argparse_args(parser) parser.set_defaults( gpus=num_gpus, # number of gpus to use replace_sampler_ddp=False, # this is necessary for volume dispatch during val accelerator=backend, # what distributed version to use seed=42, # random seed deterministic=True, # makes things slower, but deterministic default_root_dir=default_root_dir, # directory for logs and checkpoints max_epochs=1, # max number of epochs ) args = parser.parse_args() return args
def build_args(): # ------------------------ # TRAINING ARGUMENTS # ------------------------ path_config = pathlib.Path.cwd() / "mriSR_dirs.yaml" knee_path = fetch_dir("knee_path", path_config) logdir = fetch_dir("log_path", path_config) / "minet" / "2x_SR" parent_parser = ArgumentParser(add_help=False) parser = SRModule.add_model_specific_args(parent_parser) parser = Trainer.add_argparse_args(parser) num_gpus = 1 backend = "ddp" batch_size = 4 if backend == "ddp" else num_gpus # module config config = dict( in_chans=1, out_chans=1, chans=32, num_pool_layers=4, drop_prob=0.0, mask_type="random", center_fractions=[0.08], accelerations=[4], n_channels_in=1, n_channels_out=1, n_resgroups = 6, n_resblocks = 6, n_feats = 64, lr=0.00001, lr_step_size=40, lr_gamma=0.1, weight_decay=0.0, data_path=knee_path, challenge="singlecoil", exp_dir=logdir, exp_name="unet_demo", test_split="test", batch_size=batch_size, ) parser.set_defaults(**config) # trainer config parser.set_defaults( gpus=num_gpus, max_epochs=35, default_root_dir=logdir, replace_sampler_ddp=(backend != "ddp"), distributed_backend=backend, seed=42, deterministic=True, # resume_from_checkpoint = '/checkpoints/epoch=34.ckpt' ) parser.add_argument("--mode", default="train", type=str) args = parser.parse_args() return args