Beispiel #1
0
    def precompute_batches(custom_type=None):
        '''
        9000 slices per epoch -> 200 batches (batchsize=44) per epoch
        => 200-1000 batches needed


        270g_125mm_bundle_peaks_Y: no DAug, no Norm, only Y
        All_sizes_DAug_XYZ: 12g, 90g, 270g, DAug (no rotation, no elastic deform), Norm, XYZ
        270g_125mm_bundle_peaks_XYZ: no DAug, Norm, XYZ
        '''

        class HP:
            NORMALIZE_DATA = True
            DATA_AUGMENTATION = False
            CV_FOLD = 0
            INPUT_DIM = (144, 144)
            BATCH_SIZE = 44
            DATASET_FOLDER = "HCP"
            TYPE = "single_direction"
            EXP_PATH = "~"
            LABELS_FILENAME = "bundle_peaks"
            FEATURES_FILENAME = "270g_125mm_peaks"
            DATASET = "HCP"
            RESOLUTION = "1.25mm"
            LABELS_TYPE = np.float32

        HP.TRAIN_SUBJECTS, HP.VALIDATE_SUBJECTS, HP.TEST_SUBJECTS = ExpUtils.get_cv_fold(HP.CV_FOLD)

        num_batches_base = 5000
        num_batches = {
            "train": num_batches_base,
            "validate": int(num_batches_base / 3.),
            "test": int(num_batches_base / 3.),
        }

        if custom_type is None:
            types = ["train", "validate", "test"]
        else:
            types = [custom_type]

        for type in types:
            dataManager = DataManagerTrainingNiftiImgs(HP)
            batch_gen = dataManager.get_batches(batch_size=HP.BATCH_SIZE, type=type,
                                                subjects=getattr(HP, type.upper() + "_SUBJECTS"), num_batches=num_batches[type])

            for idx, batch in enumerate(batch_gen):
                print("Processing: {}".format(idx))

                # DATASET_DIR = "HCP_batches/270g_125mm_bundle_peaks_Y"
                # DATASET_DIR = "HCP_batches/All_sizes_DAug_XYZ"
                DATASET_DIR = "HCP_batches/270g_125mm_bundle_peaks_XYZ"
                ExpUtils.make_dir(join(C.HOME, DATASET_DIR, type))

                data = nib.Nifti1Image(batch["data"], ImgUtils.get_dwi_affine(HP.DATASET, HP.RESOLUTION))
                nib.save(data, join(C.HOME, DATASET_DIR, type, "batch_" + str(idx) + "_data.nii.gz"))

                seg = nib.Nifti1Image(batch["seg"], ImgUtils.get_dwi_affine(HP.DATASET, HP.RESOLUTION))
                nib.save(seg, join(C.HOME, DATASET_DIR, type, "batch_" + str(idx) + "_seg.nii.gz"))
Beispiel #2
0
    def _create_prob_slices_file(HP, subjects, filename, bundle, shuffle=True):

        mask_dir = join(C.HOME, HP.DATASET_FOLDER)

        input_dir = HP.MULTI_PARENT_PATH

        combined_slices = []
        mask_slices = []

        for s in subjects:
            print("processing subject {}".format(s))

            probs_x = nib.load(join(input_dir, "UNet_x_" + str(HP.CV_FOLD), "probmaps", s + "_probmap.nii.gz")).get_data()
            probs_y = nib.load(join(input_dir, "UNet_y_" + str(HP.CV_FOLD), "probmaps", s + "_probmap.nii.gz")).get_data()
            probs_z = nib.load(join(input_dir, "UNet_z_" + str(HP.CV_FOLD), "probmaps", s + "_probmap.nii.gz")).get_data()
            # probs_x = DatasetUtils.scale_input_to_unet_shape(probs_x, HP.DATASET, HP.RESOLUTION)
            # probs_y = DatasetUtils.scale_input_to_unet_shape(probs_y, HP.DATASET, HP.RESOLUTION)
            # probs_z = DatasetUtils.scale_input_to_unet_shape(probs_z, HP.DATASET, HP.RESOLUTION)
            combined = np.stack((probs_x, probs_y, probs_z), axis=4)  # (73, 87, 73, 18, 3)  #not working alone: one dim too much for UNet -> reshape
            combined = np.reshape(combined, (combined.shape[0], combined.shape[1], combined.shape[2],
                                             combined.shape[3] * combined.shape[4]))    # (73, 87, 73, 3*18)

            # print("combined shape after", combined.shape)

            mask_data = ImgUtils.create_multilabel_mask(HP, s, labels_type=HP.LABELS_TYPE)
            if HP.DATASET == "HCP_2mm":
                #use "HCP" because for mask we need downscaling
                mask_data = DatasetUtils.scale_input_to_unet_shape(mask_data, "HCP", HP.RESOLUTION)
            elif HP.DATASET == "HCP_2.5mm":
                # use "HCP" because for mask we need downscaling
                mask_data = DatasetUtils.scale_input_to_unet_shape(mask_data, "HCP", HP.RESOLUTION)
            else:
                # Mask has same resolution as probmaps -> we can use same resizing
                mask_data = DatasetUtils.scale_input_to_unet_shape(mask_data, HP.DATASET, HP.RESOLUTION)

            # Save as Img
            img = nib.Nifti1Image(combined, ImgUtils.get_dwi_affine(HP.DATASET, HP.RESOLUTION))
            nib.save(img, join(HP.EXP_PATH, "combined", s + "_combinded_probmap.nii.gz"))


            combined = DatasetUtils.scale_input_to_unet_shape(combined, HP.DATASET, HP.RESOLUTION)
            assert (combined.shape[2] == mask_data.shape[2])

            #Save as Slices
            for z in range(combined.shape[2]):
                combined_slices.append(combined[:, :, z, :])
                mask_slices.append(mask_data[:, :, z, :])

        if shuffle:
            combined_slices, mask_slices = sk_shuffle(combined_slices, mask_slices, random_state=9)

        if HP.TRAIN:
            np.save(filename + "_data.npy", combined_slices)
            np.save(filename + "_seg.npy", mask_slices)
Beispiel #3
0
    def precompute_batches(custom_type=None):
        '''
        9000 slices per epoch -> 200 batches (batchsize=44) per epoch
        => 200-1000 batches needed


        270g_125mm_bundle_peaks_Y: no DAug, no Norm, only Y
        All_sizes_DAug_XYZ: 12g, 90g, 270g, DAug (no rotation, no elastic deform), Norm, XYZ
        270g_125mm_bundle_peaks_XYZ: no DAug, Norm, XYZ
        '''
        class HP:
            NORMALIZE_DATA = True
            DATA_AUGMENTATION = False
            CV_FOLD = 0
            INPUT_DIM = (144, 144)
            BATCH_SIZE = 44
            DATASET_FOLDER = "HCP"
            TYPE = "single_direction"
            EXP_PATH = "~"
            LABELS_FILENAME = "bundle_peaks"
            FEATURES_FILENAME = "270g_125mm_peaks"
            DATASET = "HCP"
            RESOLUTION = "1.25mm"
            LABELS_TYPE = np.float32

        HP.TRAIN_SUBJECTS, HP.VALIDATE_SUBJECTS, HP.TEST_SUBJECTS = ExpUtils.get_cv_fold(
            HP.CV_FOLD)

        num_batches_base = 5000
        num_batches = {
            "train": num_batches_base,
            "validate": int(num_batches_base / 3.),
            "test": int(num_batches_base / 3.),
        }

        if custom_type is None:
            types = ["train", "validate", "test"]
        else:
            types = [custom_type]

        for type in types:
            dataManager = DataManagerTrainingNiftiImgs(HP)
            batch_gen = dataManager.get_batches(
                batch_size=HP.BATCH_SIZE,
                type=type,
                subjects=getattr(HP,
                                 type.upper() + "_SUBJECTS"),
                num_batches=num_batches[type])

            for idx, batch in enumerate(batch_gen):
                print("Processing: {}".format(idx))

                # DATASET_DIR = "HCP_batches/270g_125mm_bundle_peaks_Y"
                # DATASET_DIR = "HCP_batches/All_sizes_DAug_XYZ"
                DATASET_DIR = "HCP_batches/270g_125mm_bundle_peaks_XYZ"
                ExpUtils.make_dir(join(C.HOME, DATASET_DIR, type))

                data = nib.Nifti1Image(
                    batch["data"],
                    ImgUtils.get_dwi_affine(HP.DATASET, HP.RESOLUTION))
                nib.save(
                    data,
                    join(C.HOME, DATASET_DIR, type,
                         "batch_" + str(idx) + "_data.nii.gz"))

                seg = nib.Nifti1Image(
                    batch["seg"],
                    ImgUtils.get_dwi_affine(HP.DATASET, HP.RESOLUTION))
                nib.save(
                    seg,
                    join(C.HOME, DATASET_DIR, type,
                         "batch_" + str(idx) + "_seg.nii.gz"))
Beispiel #4
0
    def _create_prob_slices_file(HP, subjects, filename, bundle, shuffle=True):

        mask_dir = join(C.HOME, HP.DATASET_FOLDER)

        input_dir = HP.MULTI_PARENT_PATH

        combined_slices = []
        mask_slices = []

        for s in subjects:
            print("processing subject {}".format(s))

            probs_x = nib.load(
                join(input_dir, "UNet_x_" + str(HP.CV_FOLD), "probmaps",
                     s + "_probmap.nii.gz")).get_data()
            probs_y = nib.load(
                join(input_dir, "UNet_y_" + str(HP.CV_FOLD), "probmaps",
                     s + "_probmap.nii.gz")).get_data()
            probs_z = nib.load(
                join(input_dir, "UNet_z_" + str(HP.CV_FOLD), "probmaps",
                     s + "_probmap.nii.gz")).get_data()
            # probs_x = DatasetUtils.scale_input_to_unet_shape(probs_x, HP.DATASET, HP.RESOLUTION)
            # probs_y = DatasetUtils.scale_input_to_unet_shape(probs_y, HP.DATASET, HP.RESOLUTION)
            # probs_z = DatasetUtils.scale_input_to_unet_shape(probs_z, HP.DATASET, HP.RESOLUTION)
            combined = np.stack(
                (probs_x, probs_y, probs_z), axis=4
            )  # (73, 87, 73, 18, 3)  #not working alone: one dim too much for UNet -> reshape
            combined = np.reshape(
                combined,
                (combined.shape[0], combined.shape[1], combined.shape[2],
                 combined.shape[3] * combined.shape[4]))  # (73, 87, 73, 3*18)

            # print("combined shape after", combined.shape)

            mask_data = ImgUtils.create_multilabel_mask(
                HP, s, labels_type=HP.LABELS_TYPE)
            if HP.DATASET == "HCP_2mm":
                #use "HCP" because for mask we need downscaling
                mask_data = DatasetUtils.scale_input_to_unet_shape(
                    mask_data, "HCP", HP.RESOLUTION)
            elif HP.DATASET == "HCP_2.5mm":
                # use "HCP" because for mask we need downscaling
                mask_data = DatasetUtils.scale_input_to_unet_shape(
                    mask_data, "HCP", HP.RESOLUTION)
            else:
                # Mask has same resolution as probmaps -> we can use same resizing
                mask_data = DatasetUtils.scale_input_to_unet_shape(
                    mask_data, HP.DATASET, HP.RESOLUTION)

            # Save as Img
            img = nib.Nifti1Image(
                combined, ImgUtils.get_dwi_affine(HP.DATASET, HP.RESOLUTION))
            nib.save(
                img,
                join(HP.EXP_PATH, "combined", s + "_combinded_probmap.nii.gz"))

            combined = DatasetUtils.scale_input_to_unet_shape(
                combined, HP.DATASET, HP.RESOLUTION)
            assert (combined.shape[2] == mask_data.shape[2])

            #Save as Slices
            for z in range(combined.shape[2]):
                combined_slices.append(combined[:, :, z, :])
                mask_slices.append(mask_data[:, :, z, :])

        if shuffle:
            combined_slices, mask_slices = sk_shuffle(combined_slices,
                                                      mask_slices,
                                                      random_state=9)

        if HP.TRAIN:
            np.save(filename + "_data.npy", combined_slices)
            np.save(filename + "_seg.npy", mask_slices)