def load_mri_volumes(self, normalize) -> np.ndarray:
        patient_path = os.path.join(self.data_path, self.patch_name)

        flair = load_nifi_volume(os.path.join(patient_path, self.flair),
                                 normalize)
        t1 = load_nifi_volume(os.path.join(patient_path, self.t1), normalize)
        t2 = load_nifi_volume(os.path.join(patient_path, self.t2), normalize)
        t1_ce = load_nifi_volume(os.path.join(patient_path, self.t1ce),
                                 normalize)
        modalities = np.stack((flair, t1, t2, t1_ce))

        return modalities
Пример #2
0
def load_patient() -> Tuple[np.ndarray, np.ndarray]:
    flair = load_nifi_volume(
        os.path.join(dataset_path, patient, f"{patient}_flair.nii.gz"))
    t1 = load_nifi_volume(
        os.path.join(dataset_path, patient, f"{patient}_t1.nii.gz"))
    t1ce = load_nifi_volume(
        os.path.join(dataset_path, patient, f"{patient}_t1ce.nii.gz"))
    t2 = load_nifi_volume(
        os.path.join(dataset_path, patient, f"{patient}_t2.nii.gz"))
    masks = load_nifi_volume(
        os.path.join(dataset_path, patient, f"{patient}_seg.nii.gz"))

    modalities = np.asarray([t1, t1ce, t2, flair])

    return modalities, masks
Пример #3
0
def get_brain_mask():
    data = load_nifi_volume(os.path.join(dataset_path, patient,
                                         f"{patient}_flair.nii.gz"),
                            normalize=False)
    brain_mask = np.zeros(data.shape, np.float)
    brain_mask[data > 0] = 1
    return brain_mask
    def get_brain_mask(self):
        patient_path = os.path.join(self.data_path, self.patch_name)
        data = load_nifi_volume(os.path.join(patient_path, self.flair), False)

        brain_mask = np.zeros(data.shape, np.float)
        brain_mask[data > 0] = 1
        return brain_mask
def read_preds_from_models(model_list: list, patient_name: str) -> np.ndarray:
    seg_maps = [
        nifi_volume.load_nifi_volume(os.path.join(model_path, patient_name),
                                     normalize=False)
        for model_path in model_list
    ]

    return np.stack(seg_maps)
def compute_normalization(input_dir, output_dir, ground_truth_path):

    file_list = sorted(
        [file for file in os.listdir(input_dir) if "unc" in file and "nii.gz"])
    file_list_all = sorted(
        [file for file in os.listdir(input_dir) if "nii.gz" in file])

    max_uncertainty = 0
    min_uncertainty = 10000

    for uncertainty_map in tqdm(file_list,
                                total=len(file_list),
                                desc="Getting min and max"):

        # Load Uncertainty maps
        patient_name = uncertainty_map.split(".")[0].split("_unc")[0]
        path_gt = os.path.join(ground_truth_path, patient_name,
                               f"{patient_name}_flair.nii.gz")
        flair = nifi_volume.load_nifi_volume(path_gt, normalize=False)
        brain_mask = np.zeros(flair.shape, np.float)
        brain_mask[flair > 0] = 1

        path = os.path.join(input_dir, uncertainty_map)
        unc_map, _ = nifi_volume.load_nifi_volume_return_nib(path,
                                                             normalize=False)

        tmp_max = np.max(unc_map[brain_mask == 1])
        tmp_min = np.min(unc_map[brain_mask == 1])

        if tmp_max > max_uncertainty:
            max_uncertainty = tmp_max

        if tmp_min < min_uncertainty:
            min_uncertainty = tmp_min

    for uncertainty_map_path in tqdm(file_list_all,
                                     total=len(file_list_all),
                                     desc="Normalizing.."):

        path = os.path.join(input_dir, uncertainty_map_path)
        output_path = os.path.join(output_dir, uncertainty_map_path)

        unc_map, nib_data = nifi_volume.load_nifi_volume_return_nib(
            path, normalize=False)

        if "unc" in uncertainty_map_path:
            uncertainty_map_normalized = brats_normalize(
                unc_map, max_unc=max_uncertainty, min_unc=min_uncertainty)
            print(f"Saving to: {output_path}")
            nifi_volume.save_segmask_as_nifi_volume(uncertainty_map_normalized,
                                                    nib_data.affine,
                                                    output_path)
        else:
            nifi_volume.save_segmask_as_nifi_volume(unc_map, nib_data.affine,
                                                    output_path)
            post_result = {"prediction": segmentation_post}
            predict.save_predictions(data[idx], post_result, model_path,
                                     f"{task}_post_processed")

        results["prediction"] = prediction_map
        predict.save_predictions(data[idx], results, model_path, task)

        if compute_metrics:
            patient_path = os.path.join(data[idx].data_path,
                                        data[idx].patch_name, data[idx].seg)
            data_path = os.path.join(data[idx].data_path, data[idx].patch_name,
                                     data[idx].flair)

            if os.path.exists(patient_path):
                volume_gt = data[idx].load_gt_mask()
                volume = nifi_volume.load_nifi_volume(data_path)
                metrics = compute_wt_tc_et(prediction_map, volume_gt, volume)
                logger.info(f"{data[idx].patient} | {metrics}")

    print("Normalize uncertainty for brats!")
    if uncertainty_flag:
        input_dir = os.path.join(model_path, task)
        output_dir = os.path.join(model_path, task, "normalized")
        gt_path = data[0].data_path
        compute_normalization(input_dir=input_dir,
                              output_dir=output_dir,
                              ground_truth_path=gt_path)

    print("All done!!!! Be happy!")
 def load_gt_mask(self) -> np.ndarray:
     patient_path = os.path.join(self.data_path, self.patch_name)
     volume = load_nifi_volume(os.path.join(patient_path, self.seg),
                               normalize=False)
     return volume
def volume():
    patient = "BraTS20_Training_001_p0_64x64x64"
    gen_path = "/Users/lauramora/Documents/MASTER/TFM/Data/2020/train/random_tumor_distribution/"
    volume_path = os.path.join(gen_path, patient, f"{patient}_seg.nii.gz")
    return nifi_volume.load_nifi_volume(volume_path, normalize=False)
Пример #10
0
            "subject_ID", "Grade", "Center", "Size", "Dice WT", "HD WT",
            "Recall WT", "Precision WT", "F1 WT", "Dice TC", "HD TC",
            "Recall TC", "Precision TC", "F1 TC", "Dice ET", "HD ET",
            "Recall ET", "Precision ET", "F1 ET"
        ])

        for patient in tqdm(data, total=len(data)):

            patient_data = []
            gt_path = os.path.join(patient.data_path, patient.patient,
                                   f"{patient.seg}")
            data_path = os.path.join(patient.data_path, patient.patient,
                                     f"{patient.flair}")
            prediction_path = os.path.join(
                patient.data_path, patient.patient,
                f"{patient.patient}_prediction.nii.gz")
            if not os.path.exists(prediction_path):
                print(f"{prediction_path} not found")
                continue

            patient_data.extend(
                [patient.patient, patient.grade, patient.center, patient.size])

            volume_gt_all, _ = nifi_utils.load_nifi_volume(gt_path)
            volume_pred_all, _ = nifi_utils.load_nifi_volume(prediction_path)
            volume, _ = nifi_utils.load_nifi_volume(data_path)

            patient_data = compute_wt_tc_et(volume_pred_all, volume_gt_all,
                                            volume)
            writer.writerow(patient_data)
def volume():
    patient = "BraTS20_Training_001"
    gen_path = "/Users/lauramora/Documents/MASTER/TFM/Data/2020/train/no_patch/"
    volume_path = os.path.join(gen_path, patient, f"{patient}_seg.nii.gz")
    volume = nifi_utils.load_nifi_volume(volume_path)
    return volume
    output_dir = os.path.join(model_path, f"{task}/{setx}/normalized")
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    file_list = sorted([file for file in os.listdir(input_dir) if "unc" in file and "nii.gz"])
    file_list_all = sorted([file for file in os.listdir(input_dir) if "nii.gz" in file])

    max_uncertainty = 0
    min_uncertainty = 10000

    for uncertainty_map in tqdm(file_list, total=len(file_list), desc="Getting min and max"):

        # Load Uncertainty maps
        patient_name = uncertainty_map.split(".")[0].split("_unc")[0]
        path_gt = os.path.join(ground_truth_path, patient_name, f"{patient_name}_flair.nii.gz")
        flair = load_nifi_volume(path_gt, normalize=False)
        brain_mask = np.zeros(flair.shape, np.float)
        brain_mask[flair > 0] = 1

        path = os.path.join(input_dir, uncertainty_map)
        unc_map, _ = load_nifi_volume_return_nib(path, normalize=False)

        tmp_max = np.max(unc_map[brain_mask == 1])
        tmp_min = np.min(unc_map[brain_mask == 1])

        if tmp_max > max_uncertainty:
            max_uncertainty = tmp_max

        if tmp_min < min_uncertainty:
            min_uncertainty = tmp_min