Esempio n. 1
0
 def test_IOI_NIFTI_loading(self):
     interface = NIFTI_interface(pattern="nifti")
     sample_list = interface.initialize(self.tmp_data.name)
     img, extended = interface.load_image(sample_list[0])
     # Create a Sample object
     sample = MIScnn_sample.Sample(sample_list[0], img, interface.channels, interface.classes, extended)
     seg = interface.load_segmentation(sample_list[0])
     
     self.assertTrue(np.array_equal(img, self.img))
     self.assertTrue(np.array_equal(seg, self.seg))
Esempio n. 2
0
 def test_IOI_NIFTI_predictionhandling(self):
     interface = NIFTI_interface(pattern="nifti")
     sample_list = interface.initialize(self.tmp_data.name)
     sample = MIScnn_sample.Sample("pred.nifti", np.asarray([0]), interface.channels, interface.classes)
     sample.add_prediction(self.seg);
     interface.save_prediction(sample, self.tmp_data.name)
     pred = interface.load_prediction("pred.nifti", self.tmp_data.name)
     self.assertTrue(np.array_equal(pred.reshape(self.seg.shape), self.seg))
Esempio n. 3
0
 def test_IOI_NIFTI_loading(self):
     interface = NIFTI_interface(pattern="nifti")
     sample_list = interface.initialize(self.tmp_data.name)
     img = interface.load_image(sample_list[0])
     seg = interface.load_segmentation(sample_list[0])
     details = interface.load_details(sample_list[0])
     self.assertTrue(np.array_equal(img, self.img))
     self.assertTrue(np.array_equal(seg, self.seg))
Esempio n. 4
0
    def setUpClass(self):
        # Create imgaging and segmentation data set
        np.random.seed(1234)
        self.dataset = dict()
        for i in range(0, 10):
            img = np.random.rand(16, 16, 16) * 256
            self.img = img.astype(int)
            seg = np.random.rand(16, 16, 16) * 3
            self.seg = seg.astype(int)
            sample = (self.img, self.seg)
            self.dataset["TEST.sample_" + str(i)] = sample
        # Initialize Dictionary IO Interface
        # Initialize temporary directory
        self.tmp_dir = tempfile.TemporaryDirectory(prefix="tmp.miscnn.")
        self.tmp_data = os.path.join(self.tmp_dir.name, "data")
        os.mkdir(self.tmp_data)

        for key, value in self.dataset.items():
            write_sample(value, self.tmp_data, key)

        self.dataio = Data_IO(NIFTI_interface(), self.tmp_data)
Esempio n. 5
0
from miscnn.evaluation.cross_validation import run_fold, load_csv2fold
import os
import numpy as np

#-----------------------------------------------------#
#      Tensorflow Configuration for GPU Cluster       #
#-----------------------------------------------------#
# physical_devices = tf.config.list_physical_devices('GPU')
# tf.config.experimental.set_memory_growth(physical_devices[0], True)

#-----------------------------------------------------#
#               Setup of MIScnn Pipeline              #
#-----------------------------------------------------#
# Initialize Data IO Interface for NIfTI data
## We are using 4 classes due to [background, lung_left, lung_right, covid-19]
interface = NIFTI_interface(channels=1, classes=4)

# Create Data IO object to load and write samples in the file structure
data_io = Data_IO(interface, input_path="data", delete_batchDir=False)

# Access all available samples in our file structure
sample_list = data_io.get_indiceslist()
sample_list.sort()

# Create and configure the Data Augmentation class
data_aug = Data_Augmentation(cycles=1, scaling=True, rotations=True,
                             elastic_deform=True, mirror=True,
                             brightness=True, contrast=True, gamma=True,
                             gaussian_noise=True)

# Create a clipping Subfunction to the lung window of CTs (-1250 and 250)
    def run(self):
        # Create sample list for miscnn
        util.create_sample_list(self.input_dir)

        # Initialize Data IO Interface for NIfTI data
        interface = NIFTI_interface(channels=1, classes=2)

        # Create Data IO object to load and write samples in the file structure
        data_io = Data_IO(interface,
                          input_path=self.input_dir,
                          delete_batchDir=False)

        # Access all available samples in our file structure
        sample_list = data_io.get_indiceslist()
        sample_list.sort()

        # Create a resampling Subfunction to voxel spacing 1.58 x 1.58 x 2.70
        sf_resample = Resampling((1.58, 1.58, 2.70))

        # Create a pixel value normalization Subfunction for z-score scaling
        sf_zscore = Normalization(mode="z-score")

        # Create a pixel value normalization Subfunction to scale between 0-255
        sf_normalize = Normalization(mode="grayscale")

        # Assemble Subfunction classes into a list
        sf = [sf_normalize, sf_resample, sf_zscore]

        # Create and configure the Preprocessor class
        pp = Preprocessor(data_io,
                          batch_size=2,
                          subfunctions=sf,
                          prepare_subfunctions=True,
                          prepare_batches=False,
                          analysis="patchwise-crop",
                          patch_shape=(160, 160, 80))

        # Adjust the patch overlap for predictions
        pp.patchwise_overlap = (80, 80, 30)

        # Initialize the Architecture
        unet_standard = Architecture(depth=4,
                                     activation="softmax",
                                     batch_normalization=True)

        # Create the Neural Network model
        model = Neural_Network(
            preprocessor=pp,
            architecture=unet_standard,
            loss=tversky_crossentropy,
            metrics=[tversky_loss, dice_soft, dice_crossentropy],
            batch_queue_size=3,
            workers=1,
            learninig_rate=0.001)

        # Load best model weights during fitting
        model.load(f'{self.model_dir}{self.model_name}.hdf5')

        # Obtain training and validation data set ----- CHANGE BASED ON PRED/TRAIN
        images, _ = load_disk2fold(f'{self.input_dir}sample_list.json')

        print('\n\nRunning automatic segmentation on samples...\n')
        print(f'Segmenting images: {images}')

        # Compute predictions
        self.predictions = model.predict(images)

        # Delete folder created by miscnn
        shutil.rmtree('batches/')
Esempio n. 7
0
 def test_IOI_NIFTI_initialize(self):
     interface = NIFTI_interface(pattern="nifti")
     sample_list = interface.initialize(self.tmp_data.name)
     self.assertEqual(len(sample_list), 1)
     self.assertEqual(sample_list[0], "nifti")
Esempio n. 8
0
 def test_IOI_NIFTI_creation(self):
     interface = NIFTI_interface()
Esempio n. 9
0
 def test_IOI_NIFTI_predictionhandling(self):
     interface = NIFTI_interface(pattern="nifti")
     sample_list = interface.initialize(self.tmp_data.name)
     interface.save_prediction(self.seg, "pred.nifti", self.tmp_data.name)
     pred = interface.load_prediction("pred.nifti", self.tmp_data.name)
     self.assertTrue(np.array_equal(pred, self.seg))