def ensemble(
    experiments=(
        'nnUNetTrainerNewCandidate23_FabiansPreActResNet__nnUNetPlans',
        'nnUNetTrainerNewCandidate23_FabiansResNet__nnUNetPlans'),
    out_dir="/media/fabian/Results/nnUNet/3d_lowres/Task48_KiTS_clean/ensemble_preactres_and_res"
):
    from nnunet.inference.ensemble_predictions import merge
    folders = [
        join(network_training_output_dir, "3d_lowres/Task48_KiTS_clean", i,
             'crossval_npz') for i in experiments
    ]
    merge(folders, out_dir, 8)
Exemple #2
0
    def run_inference_and_store_results(self,output_file_tag=''):
        output_file_base_name = output_file_tag + "_nnunet_seg.nii.gz"
        
        # passing only lists of length one to predict_cases
        for inner_list in self.data.inference_loader:
            list_of_lists = [inner_list]
            
            # output filenames (list of one) include information about patient folder name
            # infering patient folder name from all file paths for a sanity check
            # (should give the same answer)
            folder_names = [fpath.split('/')[-2] for fpath in inner_list]
            if set(folder_names) != set(folder_names[:1]):
                raise RuntimeError('Patient file paths: {} were found to come from different folders against expectation.'.format(inner_list)) 
            patient_folder_name = folder_names[0]
            output_filename = patient_folder_name + output_file_base_name
            
            final_out_folder = join(self.intermediate_out_folder, patient_folder_name)

            intermediate_output_folders = []
            
            for model_name, folds in zip(self.model_list, self.folds_list):
                output_model = join(self.intermediate_out_folder, model_name)
                intermediate_output_folders.append(output_model)
                intermediate_output_filepaths = [join(output_model, output_filename)]
                maybe_mkdir_p(output_model)
                params_folder_model = join(self.params_folder, model_name)
                
                predict_cases(model=params_folder_model, 
                            list_of_lists=list_of_lists, 
                            output_filenames=intermediate_output_filepaths, 
                            folds=folds, 
                            save_npz=True, 
                            num_threads_preprocessing=1, 
                            num_threads_nifti_save=1, 
                            segs_from_prev_stage=None, 
                            do_tta=True, 
                            mixed_precision=True,
                            overwrite_existing=True, 
                            all_in_gpu=False, 
                            step_size=0.5)

            merge(folders=intermediate_output_folders, 
                output_folder=final_out_folder, 
                threads=1, 
                override=True, 
                postprocessing_file=None, 
                store_npz=False)

            f = join(final_out_folder, output_filename)
            apply_brats_threshold(f, f, self.threshold, self.replace_with)
            load_convert_save(f)

        _ = [shutil.rmtree(i) for i in intermediate_output_folders]
Exemple #3
0
def convert_variant2_predicted_test_to_submission_format(
    folder_with_predictions,
    output_folder="/home/fabian/drives/datasets/results/nnUNet/test_sets/Task038_CHAOS_Task_3_5_Variant2/ready_to_submit",
    postprocessing_file="/home/fabian/drives/datasets/results/nnUNet/ensembles/Task038_CHAOS_Task_3_5_Variant2/ensemble_2d__nnUNetTrainerV2__nnUNetPlansv2.1--3d_fullres__nnUNetTrainerV2__nnUNetPlansv2.1/postprocessing.json"
):
    """
    output_folder is where the extracted template is
    :param folder_with_predictions:
    :param output_folder:
    :return:
    """
    postprocessing_file = "/media/fabian/Results/nnUNet/3d_fullres/Task039_CHAOS_Task_3_5_Variant2_highres/" \
                          "nnUNetTrainerV2__nnUNetPlansfixed/postprocessing.json"

    # variant 2 treats in and out phase as two training examples, so we need to ensemble these two again
    final_predictions_folder = join(output_folder, "final")
    maybe_mkdir_p(final_predictions_folder)
    t1_patient_names = [
        i.split("_")[-1][:-7] for i in subfiles(
            folder_with_predictions, prefix="T1", suffix=".nii.gz", join=False)
    ]
    folder_for_ensembing0 = join(output_folder, "ens0")
    folder_for_ensembing1 = join(output_folder, "ens1")
    maybe_mkdir_p(folder_for_ensembing0)
    maybe_mkdir_p(folder_for_ensembing1)
    # now copy all t1 out phases in ens0 and all in phases in ens1. Name them the same.
    for t1 in t1_patient_names:
        shutil.copy(join(folder_with_predictions, "T1_in_%s.npz" % t1),
                    join(folder_for_ensembing1, "T1_%s.npz" % t1))
        shutil.copy(join(folder_with_predictions, "T1_in_%s.pkl" % t1),
                    join(folder_for_ensembing1, "T1_%s.pkl" % t1))
        shutil.copy(join(folder_with_predictions, "T1_out_%s.npz" % t1),
                    join(folder_for_ensembing0, "T1_%s.npz" % t1))
        shutil.copy(join(folder_with_predictions, "T1_out_%s.pkl" % t1),
                    join(folder_for_ensembing0, "T1_%s.pkl" % t1))
    shutil.copy(join(folder_with_predictions, "plans.pkl"),
                join(folder_for_ensembing0, "plans.pkl"))
    shutil.copy(join(folder_with_predictions, "plans.pkl"),
                join(folder_for_ensembing1, "plans.pkl"))

    # there is a problem with T1_35 that I need to correct manually (different crop size, will not negatively impact results)
    #ens0_softmax = np.load(join(folder_for_ensembing0, "T1_35.npz"))['softmax']
    ens1_softmax = np.load(join(folder_for_ensembing1, "T1_35.npz"))['softmax']
    #ens0_props = load_pickle(join(folder_for_ensembing0, "T1_35.pkl"))
    #ens1_props = load_pickle(join(folder_for_ensembing1, "T1_35.pkl"))
    ens1_softmax = ens1_softmax[:, :, :-1, :]
    np.savez_compressed(join(folder_for_ensembing1, "T1_35.npz"),
                        softmax=ens1_softmax)
    shutil.copy(join(folder_for_ensembing0, "T1_35.pkl"),
                join(folder_for_ensembing1, "T1_35.pkl"))

    # now call my ensemble function
    merge((folder_for_ensembing0, folder_for_ensembing1),
          final_predictions_folder,
          8,
          True,
          postprocessing_file=postprocessing_file)
    # copy t2 files to final_predictions_folder as well
    t2_files = subfiles(folder_with_predictions,
                        prefix="T2",
                        suffix=".nii.gz",
                        join=False)
    for t2 in t2_files:
        shutil.copy(join(folder_with_predictions, t2),
                    join(final_predictions_folder, t2))

    # apply postprocessing
    from nnunet.postprocessing.connected_components import apply_postprocessing_to_folder, load_postprocessing
    postprocessed_folder = join(output_folder, "final_postprocessed")
    for_which_classes, min_valid_obj_size = load_postprocessing(
        postprocessing_file)
    apply_postprocessing_to_folder(final_predictions_folder,
                                   postprocessed_folder, for_which_classes,
                                   min_valid_obj_size, 8)

    # now export the niftis in the weird png format
    # task 3
    output_dir = join(output_folder, "CHAOS_submission_template_new", "Task3",
                      "MR")
    for t1 in t1_patient_names:
        output_folder_here = join(output_dir, t1, "T1DUAL", "Results")
        nifti_file = join(postprocessed_folder, "T1_%s.nii.gz" % t1)
        write_pngs_from_nifti(nifti_file,
                              output_folder_here,
                              converter=convert_seg_to_intensity_task3)
    for t2 in t2_files:
        patname = t2.split("_")[-1][:-7]
        output_folder_here = join(output_dir, patname, "T2SPIR", "Results")
        nifti_file = join(postprocessed_folder, "T2_%s.nii.gz" % patname)
        write_pngs_from_nifti(nifti_file,
                              output_folder_here,
                              converter=convert_seg_to_intensity_task3)

    # task 5
    output_dir = join(output_folder, "CHAOS_submission_template_new", "Task5",
                      "MR")
    for t1 in t1_patient_names:
        output_folder_here = join(output_dir, t1, "T1DUAL", "Results")
        nifti_file = join(postprocessed_folder, "T1_%s.nii.gz" % t1)
        write_pngs_from_nifti(nifti_file,
                              output_folder_here,
                              converter=convert_seg_to_intensity_task5)
    for t2 in t2_files:
        patname = t2.split("_")[-1][:-7]
        output_folder_here = join(output_dir, patname, "T2SPIR", "Results")
        nifti_file = join(postprocessed_folder, "T2_%s.nii.gz" % patname)
        write_pngs_from_nifti(nifti_file,
                              output_folder_here,
                              converter=convert_seg_to_intensity_task5)