def get_default_configuration(network, task, network_trainer, plans_identifier=default_plans_identifier, search_in=(nnunet.__path__[0], "training", "network_training"), base_module='nnunet.training.network_training'): assert network in ['2d', '3d_lowres', '3d_fullres', '3d_cascade_fullres'], \ "network can only be one of the following: \'3d\', \'3d_lowres\', \'3d_fullres\', \'3d_cascade_fullres\'" dataset_directory = join(preprocessing_output_dir, task) os.makedirs(dataset_directory, exist_ok=True) if network == '2d': plans_file = join(preprocessing_output_dir, task, plans_identifier + "_plans_2D.pkl") else: plans_file = join(preprocessing_output_dir, task, plans_identifier + "_plans_3D.pkl") plans = load_pickle(plans_file) possible_stages = list(plans['plans_per_stage'].keys()) if (network == '3d_cascade_fullres' or network == "3d_lowres") and len(possible_stages) == 1: raise RuntimeError( "3d_lowres/3d_cascade_fullres only applies if there is more than one stage. This task does " "not require the cascade. Run 3d_fullres instead") if network == '2d' or network == "3d_lowres": stage = 0 else: stage = possible_stages[-1] trainer_class = recursive_find_python_class([join(*search_in)], network_trainer, current_module=base_module) output_folder_name = join(network_training_output_dir, network, task, network_trainer + "__" + plans_identifier) print("###############################################") print("I am running the following nnUNet: %s" % network) print("My trainer class is: ", trainer_class) print("For that I will be using the following configuration:") summarize_plans(plans_file) print("I am using stage %d from these plans" % stage) if (network == '2d' or len(possible_stages) > 1) and not network == '3d_lowres': batch_dice = True print("I am using batch dice + CE loss") else: batch_dice = False print("I am using sample dice + CE loss") print("\nI am using data from this folder: ", join(dataset_directory, plans['data_identifier'])) print("###############################################") return plans_file, output_folder_name, dataset_directory, batch_dice, stage, trainer_class
def get_default_configuration(network, task, network_trainer, plans_identifier=default_plans_identifier, search_in=(nnunet.__path__[0], "training", "network_training"), base_module='nnunet.training.network_training'): assert network in ['2d', '3d_lowres', '3d_fullres', '3d_cascade_fullres'], \ "network can only be one of the following: \'3d\', \'3d_lowres\', \'3d_fullres\', \'3d_cascade_fullres\'" dataset_directory = join(preprocessing_output_dir, task) if network == '2d': plans_file = join(preprocessing_output_dir, task, plans_identifier + "_plans_2D.pkl") else: plans_file = join(preprocessing_output_dir, task + '/', plans_identifier + "_plans_3D.pkl") plans = load_pickle(plans_file) """ plans_file content: dict_keys(['preprocessed_data_folder', 'min_size_per_class', 'num_classes', 'keep_only_largest_region', 'original_sizes', 'original_spacings', 'all_classes', 'modalities', 'base_num_features', 'min_region_size_per_class', 'data_identifier', 'use_mask_for_norm', 'num_modalities', 'normalization_schemes', 'plans_per_stage', 'num_stages', 'dataset_properties', 'list_of_npz_files']) """ possible_stages = list(plans['plans_per_stage'].keys()) """ file_data['plans_per_stage'][0] {'pool_op_kernel_sizes': [[2, 2, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2], [2, 2, 2]], 'conv_kernel_sizes': [[3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'num_pool_per_axis': [5, 5, 4], 'batch_size': 2, 'original_spacing': array([0.78162497, 0.78162497, 3. ]), 'current_spacing': array([1.5530063, 1.5530063, 3.090903 ]), 'do_dummy_2D_data_aug': False, 'patch_size': array([160, 160, 80]), 'median_patient_size_in_voxels': array([263, 263, 113])} file_data['plans_per_stage'][1] {'pool_op_kernel_sizes': [[2, 2, 1], [2, 2, 1], [2, 2, 2], [2, 2, 2], [2, 2, 2]], 'conv_kernel_sizes': [[3, 3, 1], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3], [3, 3, 3]], 'num_pool_per_axis': [5, 5, 3], 'batch_size': 2, 'original_spacing': array([0.78162497, 0.78162497, 3. ]), 'current_spacing': array([0.78162497, 0.78162497, 3. ]), 'do_dummy_2D_data_aug': False, 'patch_size': array([192, 192, 48]), 'median_patient_size_in_voxels': array([523, 523, 116])} """ if (network == '3d_cascade_fullres' or network == "3d_lowres") and len(possible_stages) == 1: raise RuntimeError( "3d_lowres/3d_cascade_fullres only applies if there is more than one stage. This task does " "not require the cascade. Run 3d_fullres instead") if network == '2d' or network == "3d_lowres": stage = 0 else: stage = possible_stages[-1] trainer_class = recursive_find_trainer([join(*search_in)], network_trainer, current_module=base_module) output_folder_name = join(network_training_output_dir + '/', network + '/', task + '/', network_trainer + "__" + plans_identifier + '/') print("###############################################") print("I am running the following nnUNet: %s" % network) print("My trainer class is: ", trainer_class) print("For that I will be using the following configuration:") summarize_plans(plans_file) print("I am using stage %d from these plans" % stage) if (network == '2d' or len(possible_stages) > 1) and not network == '3d_lowres': batch_dice = True print("I am using batch dice + CE loss") else: batch_dice = False print("I am using sample dice + CE loss") print("\nI am using data from this folder: ", join(dataset_directory, plans['data_identifier'])) print("###############################################") return plans_file, output_folder_name, dataset_directory, batch_dice, stage, trainer_class