Пример #1
0
def merge(folders,
          output_folder,
          threads,
          override=True,
          postprocessing_file=None,
          store_npz=False):
    maybe_mkdir_p(output_folder)

    if postprocessing_file is not None:
        output_folder_orig = deepcopy(output_folder)
        output_folder = join(output_folder, 'not_postprocessed')
        maybe_mkdir_p(output_folder)
    else:
        output_folder_orig = None

    patient_ids = [subfiles(i, suffix=".npz", join=False) for i in folders]
    patient_ids = [i for j in patient_ids for i in j]
    patient_ids = [i[:-4] for i in patient_ids]
    patient_ids = np.unique(patient_ids)

    for f in folders:
        assert all([isfile(join(f, i + ".npz")) for i in patient_ids]), "Not all patient npz are available in " \
                                                                        "all folders"
        assert all([isfile(join(f, i + ".pkl")) for i in patient_ids]), "Not all patient pkl are available in " \
                                                                        "all folders"

    files = []
    property_files = []
    out_files = []
    for p in patient_ids:
        files.append([join(f, p + ".npz") for f in folders])
        property_files.append(join(folders[0], p + ".pkl"))
        out_files.append(join(output_folder, p + ".nii.gz"))

    plans = load_pickle(join(folders[0], "plans.pkl"))

    only_keep_largest_connected_component, min_region_size_per_class = plans['keep_only_largest_region'], \
                                                                       plans['min_region_size_per_class']
    p = Pool(threads)
    p.map(
        merge_files,
        zip(files, property_files, out_files,
            [only_keep_largest_connected_component] * len(out_files),
            [min_region_size_per_class] * len(out_files),
            [override] * len(out_files), [store_npz] * len(out_files)))
    p.close()
    p.join()

    if postprocessing_file is not None:
        for_which_classes, min_valid_obj_size = load_postprocessing(
            postprocessing_file)
        print('Postprocessing...')
        apply_postprocessing_to_folder(output_folder, output_folder_orig,
                                       for_which_classes, min_valid_obj_size,
                                       threads)
        shutil.copy(postprocessing_file, output_folder_orig)
Пример #2
0
def merge(folders,
          output_folder,
          threads,
          override=True,
          postprocessing_file=None,
          store_npz=False):
    os.makedirs(output_folder, exist_ok=True)

    if postprocessing_file is not None:
        output_folder_orig = deepcopy(output_folder)
        output_folder = join(output_folder, 'not_postprocessed')
        os.makedirs(output_folder, exist_ok=True)
    else:
        output_folder_orig = None

    patient_ids = [subfiles(i, suffix=".npz", join=False) for i in folders]
    patient_ids = [i for j in patient_ids for i in j]
    patient_ids = [i[:-4] for i in patient_ids]
    patient_ids = np.unique(patient_ids)

    for f in folders:
        assert all([isfile(join(f, i + ".npz")) for i in patient_ids]), "Not all patient npz are available in " \
                                                                        "all folders"
        assert all([isfile(join(f, i + ".pkl")) for i in patient_ids]), "Not all patient pkl are available in " \
                                                                        "all folders"

    files = []
    property_files = []
    out_files = []
    for p in patient_ids:
        files.append([join(f, p + ".npz") for f in folders])
        property_files.append([join(f, p + ".pkl") for f in folders])
        out_files.append(join(output_folder, p + ".nii.gz"))

    p = Pool(threads)
    p.starmap(
        merge_files,
        zip(files, property_files, out_files, [override] * len(out_files),
            [store_npz] * len(out_files)))
    p.close()
    p.join()

    if postprocessing_file is not None:
        for_which_classes, min_valid_obj_size = load_postprocessing(
            postprocessing_file)
        print('Postprocessing...')
        apply_postprocessing_to_folder(output_folder, output_folder_orig,
                                       for_which_classes, min_valid_obj_size,
                                       threads)
        shutil.copy(postprocessing_file, output_folder_orig)
Пример #3
0
def convert_variant2_predicted_test_to_submission_format(
    folder_with_predictions,
    output_folder="/home/fabian/drives/datasets/results/nnUNet/test_sets/Task038_CHAOS_Task_3_5_Variant2/ready_to_submit",
    postprocessing_file="/home/fabian/drives/datasets/results/nnUNet/ensembles/Task038_CHAOS_Task_3_5_Variant2/ensemble_2d__nnUNetTrainerV2__nnUNetPlansv2.1--3d_fullres__nnUNetTrainerV2__nnUNetPlansv2.1/postprocessing.json"
):
    """
    output_folder is where the extracted template is
    :param folder_with_predictions:
    :param output_folder:
    :return:
    """
    postprocessing_file = "/media/fabian/Results/nnUNet/3d_fullres/Task039_CHAOS_Task_3_5_Variant2_highres/" \
                          "nnUNetTrainerV2__nnUNetPlansfixed/postprocessing.json"

    # variant 2 treats in and out phase as two training examples, so we need to ensemble these two again
    final_predictions_folder = join(output_folder, "final")
    maybe_mkdir_p(final_predictions_folder)
    t1_patient_names = [
        i.split("_")[-1][:-7] for i in subfiles(
            folder_with_predictions, prefix="T1", suffix=".nii.gz", join=False)
    ]
    folder_for_ensembing0 = join(output_folder, "ens0")
    folder_for_ensembing1 = join(output_folder, "ens1")
    maybe_mkdir_p(folder_for_ensembing0)
    maybe_mkdir_p(folder_for_ensembing1)
    # now copy all t1 out phases in ens0 and all in phases in ens1. Name them the same.
    for t1 in t1_patient_names:
        shutil.copy(join(folder_with_predictions, "T1_in_%s.npz" % t1),
                    join(folder_for_ensembing1, "T1_%s.npz" % t1))
        shutil.copy(join(folder_with_predictions, "T1_in_%s.pkl" % t1),
                    join(folder_for_ensembing1, "T1_%s.pkl" % t1))
        shutil.copy(join(folder_with_predictions, "T1_out_%s.npz" % t1),
                    join(folder_for_ensembing0, "T1_%s.npz" % t1))
        shutil.copy(join(folder_with_predictions, "T1_out_%s.pkl" % t1),
                    join(folder_for_ensembing0, "T1_%s.pkl" % t1))
    shutil.copy(join(folder_with_predictions, "plans.pkl"),
                join(folder_for_ensembing0, "plans.pkl"))
    shutil.copy(join(folder_with_predictions, "plans.pkl"),
                join(folder_for_ensembing1, "plans.pkl"))

    # there is a problem with T1_35 that I need to correct manually (different crop size, will not negatively impact results)
    #ens0_softmax = np.load(join(folder_for_ensembing0, "T1_35.npz"))['softmax']
    ens1_softmax = np.load(join(folder_for_ensembing1, "T1_35.npz"))['softmax']
    #ens0_props = load_pickle(join(folder_for_ensembing0, "T1_35.pkl"))
    #ens1_props = load_pickle(join(folder_for_ensembing1, "T1_35.pkl"))
    ens1_softmax = ens1_softmax[:, :, :-1, :]
    np.savez_compressed(join(folder_for_ensembing1, "T1_35.npz"),
                        softmax=ens1_softmax)
    shutil.copy(join(folder_for_ensembing0, "T1_35.pkl"),
                join(folder_for_ensembing1, "T1_35.pkl"))

    # now call my ensemble function
    merge((folder_for_ensembing0, folder_for_ensembing1),
          final_predictions_folder,
          8,
          True,
          postprocessing_file=postprocessing_file)
    # copy t2 files to final_predictions_folder as well
    t2_files = subfiles(folder_with_predictions,
                        prefix="T2",
                        suffix=".nii.gz",
                        join=False)
    for t2 in t2_files:
        shutil.copy(join(folder_with_predictions, t2),
                    join(final_predictions_folder, t2))

    # apply postprocessing
    from nnunet.postprocessing.connected_components import apply_postprocessing_to_folder, load_postprocessing
    postprocessed_folder = join(output_folder, "final_postprocessed")
    for_which_classes, min_valid_obj_size = load_postprocessing(
        postprocessing_file)
    apply_postprocessing_to_folder(final_predictions_folder,
                                   postprocessed_folder, for_which_classes,
                                   min_valid_obj_size, 8)

    # now export the niftis in the weird png format
    # task 3
    output_dir = join(output_folder, "CHAOS_submission_template_new", "Task3",
                      "MR")
    for t1 in t1_patient_names:
        output_folder_here = join(output_dir, t1, "T1DUAL", "Results")
        nifti_file = join(postprocessed_folder, "T1_%s.nii.gz" % t1)
        write_pngs_from_nifti(nifti_file,
                              output_folder_here,
                              converter=convert_seg_to_intensity_task3)
    for t2 in t2_files:
        patname = t2.split("_")[-1][:-7]
        output_folder_here = join(output_dir, patname, "T2SPIR", "Results")
        nifti_file = join(postprocessed_folder, "T2_%s.nii.gz" % patname)
        write_pngs_from_nifti(nifti_file,
                              output_folder_here,
                              converter=convert_seg_to_intensity_task3)

    # task 5
    output_dir = join(output_folder, "CHAOS_submission_template_new", "Task5",
                      "MR")
    for t1 in t1_patient_names:
        output_folder_here = join(output_dir, t1, "T1DUAL", "Results")
        nifti_file = join(postprocessed_folder, "T1_%s.nii.gz" % t1)
        write_pngs_from_nifti(nifti_file,
                              output_folder_here,
                              converter=convert_seg_to_intensity_task5)
    for t2 in t2_files:
        patname = t2.split("_")[-1][:-7]
        output_folder_here = join(output_dir, patname, "T2SPIR", "Results")
        nifti_file = join(postprocessed_folder, "T2_%s.nii.gz" % patname)
        write_pngs_from_nifti(nifti_file,
                              output_folder_here,
                              converter=convert_seg_to_intensity_task5)