def init_experiment(experiment='2D'): # Input arguments # TODO check paths and input variables parser = argparse.ArgumentParser() parser.add_argument('--data_location', type=pathlib.Path, default='/data/Repositories/HMDS_orientation/Data/train_rotated/') parser.add_argument('--workdir', type=pathlib.Path, default='/data/Repositories/HMDS_collagen/workdir/') parser.add_argument('--experiment', default='/data/Repositories/HMDS_collagen/experiments/experiment_config_HMDS.yml') parser.add_argument('--data_dir', default="/data/Repositories/HMDS_orientation/Data/train_rotated/old_working_file/") parser.add_argument('--seed', type=int, default=42) parser.add_argument('--model_unet', type=bool, default=False) parser.add_argument('--num_threads', type=int, default=25) # parallel processing parser.add_argument('--bs', type=int, default=60) # images per batch (batch size) parser.add_argument('--n_epochs', type=int, default=10) # iteration for training args = parser.parse_args() # Open configuration file with open(args.experiment, 'r') as f: config = yaml.load(f, Loader=yaml.FullLoader) # Seeding torch.manual_seed(args.seed) torch.cuda.manual_seed(args.seed) np.random.seed(args.seed) # Initialize working directories snapshots_dir = pathlib.Path(args.workdir) / 'snapshots' snapshots_dir.mkdir(exist_ok=True) device = auto_detect_device() snapshot_name = time.strftime(f'{socket.gethostname()}_%Y_%m_%d_%H_%M_%S') (snapshots_dir / snapshot_name).mkdir(exist_ok=True, parents=True) # Save the experiment parameters with open(snapshots_dir / snapshot_name / 'config.yml', 'w') as f: yaml.dump(config, f, Dumper=yaml.Dumper, default_flow_style=False) # args with open(snapshots_dir / snapshot_name / 'args.dill', 'wb') as f: dill.dump(args, f) return args, config, device, snapshots_dir, snapshot_name
start = time() save_dir = args.save_dir / str(snap.stem + '_oof') save_dir.mkdir(exist_ok=True) # Load snapshot configuration with open(snap / 'config.yml', 'r') as f: config = yaml.load(f, Loader=yaml.Loader) with open(snap / 'args.dill', 'rb') as f: args_experiment = dill.load(f) with open(snap / 'split_config.dill', 'rb') as f: split_config = dill.load(f) device = auto_detect_device() # Load models try: unet = config['model']['decoder'].lower() == 'unet' model_list = load_models(str(snap), config, unet=unet, n_gpus=args_experiment.gpus) except (KeyError, RuntimeError): model_list = load_models(str(snap), config, unet=args_experiment.model_unet, n_gpus=args_experiment.gpus) if config['training']['uCT']: config['training']['experiment'] = '3D' config['training']['bs'] = args.bs print(f'Found {len(model_list)} models.') threshold = args.threshold
def init_experiment(): """ Setup the model training experiments. Lists all configuration files in the args.experiment directory and runs experiments with the given parameters. :return: General arguments, experiment parameters, computation device (CPU/GPU) """ # Input arguments parser = argparse.ArgumentParser() parser.add_argument('--data_location', type=pathlib.Path, default='../../../Data', help='Path to input and target images') parser.add_argument( '--workdir', type=pathlib.Path, default='../../../workdir/', help='Path for saving the experiment logs and segmentation models') parser.add_argument( '--experiment', type=pathlib.Path, default='../experiments/run', help= 'Path to experiment files for training (all experiments are conducted)' ) parser.add_argument('--ID_char', type=str, default='_', help='Separator for the subject ID and image name') parser.add_argument('--ID_split', type=int, default=4, help='Count of the ID_char to split the subject ID') parser.add_argument( '--seed', type=int, default=42, help= 'Random seed to allow consistent experiments (e.g. for random augmentations)' ) parser.add_argument('--num_threads', type=int, default=16, help='Number of CPUs for parallel processing') parser.add_argument('--gpus', type=int, default=2, help='Number of GPUs for model training') args = parser.parse_args() # Initialize working directories args.snapshots_dir = args.workdir / 'snapshots' args.snapshots_dir.mkdir(exist_ok=True) # List configuration files config_paths = os.listdir(str(args.experiment)) config_paths.sort() # Open configuration files and add to list config_list = [] for config_path in config_paths: if config_path[-4:] == '.yml': with open(args.experiment / config_path, 'r') as f: config = yaml.load(f, Loader=yaml.FullLoader) config_list.append(config) # Snapshot directory encoder = config['model']['backbone'] decoder = config['model']['decoder'] experiment = config['training']['experiment'] snapshot_name = time.strftime( f'{socket.gethostname()}_%Y_%m_%d_%H_%M_%S_{experiment}_{encoder}_{decoder}' ) (args.snapshots_dir / snapshot_name).mkdir(exist_ok=True, parents=True) config['training']['snapshot'] = snapshot_name # Save the experiment parameters with open(args.snapshots_dir / snapshot_name / 'config.yml', 'w') as f: yaml.dump(config, f, Dumper=yaml.Dumper, default_flow_style=False) # Save args with open(args.snapshots_dir / snapshot_name / 'args.dill', 'wb') as f: dill.dump(args, f) # Seeding torch.manual_seed(args.seed) torch.cuda.manual_seed(args.seed) np.random.seed(args.seed) # Calculation resource device = auto_detect_device() return args, config_list, device