def training(): c = get_config() dataset_name = 'Task04_Hippocampus' # dataset_name = 'Task02_Heart' # download_dataset(dest_path=c.data_root_dir, dataset=dataset_name, id=c.google_drive_id) # c.do_load_checkpoint = True # c.checkpoint_dir = c.base_dir + '/20190801-_unet_experiment' + '/checkpoint/checkpoint_current' # c.checkpoint_file = "checkpoint_last.pth.tar" if not exists(os.path.join(os.path.join(c.data_root_dir, dataset_name), 'preprocessed')): print('Preprocessing data. [STARTED]') preprocess_data(root_dir=os.path.join(c.data_root_dir, dataset_name)) create_splits(output_dir=c.split_dir, image_dir=c.data_dir) print('Preprocessing data. [DONE]') else: print('The data has already been preprocessed. It will not be preprocessed again. Delete the folder to enforce it.') exp = UNetExperiment(config=c, name='unet_experiment', n_epochs=c.n_epochs, seed=42, append_rnd_to_name=c.append_rnd_string) # visdomlogger_kwargs={"auto_start": c.start_visdom} exp.run() exp.run_test(setup=False)
c = get_config() #download_dataset(dest_path=c.data_root_dir, dataset=c.dataset_name, id=c.google_drive_id) if not exists( os.path.join(os.path.join(c.data_root_dir, c.dataset_name), 'preprocessed')): print('Preprocessing data. [STARTED]') #preprocess_data(root_dir=os.path.join(c.data_root_dir, c.dataset_name), y_shape=c.patch_size, z_shape=c.patch_size) #create_splits(output_dir=c.split_dir, image_dir=c.data_dir) print('Preprocessing data. [DONE]') else: print( 'The data has already been preprocessed. It will not be preprocessed again. Delete the folder to enforce it.' ) exp = UNetExperiment( config=c, name=c.name, n_epochs=c.n_epochs, seed=42, append_rnd_to_name=c.append_rnd_string, globs=globals(), # visdomlogger_kwargs={"auto_start": c.start_visdom}, loggers={"visdom": ("visdom", { "auto_start": c.start_visdom })}) exp.run() exp.run_test(setup=False)