Exemple #1
0
 def test_DATAGENERATOR_runTraining(self):
     pp_fi = Preprocessor(self.data_io,
                          batch_size=4,
                          data_aug=self.data_aug,
                          prepare_subfunctions=False,
                          prepare_batches=False,
                          analysis="fullimage")
     data_gen = DataGenerator(self.sample_list,
                              pp_fi,
                              training=True,
                              shuffle=False,
                              iterations=None)
     self.assertEqual(len(data_gen), 3)
     for batch in data_gen:
         self.assertIsInstance(batch, tuple)
         self.assertEqual(batch[0].shape, (4, 16, 16, 16, 1))
         self.assertEqual(batch[1].shape, (4, 16, 16, 16, 3))
     pp_pc = Preprocessor(self.data_io,
                          batch_size=3,
                          data_aug=self.data_aug,
                          prepare_subfunctions=False,
                          prepare_batches=False,
                          patch_shape=(5, 5, 5),
                          analysis="patchwise-crop")
     data_gen = DataGenerator(self.sample_list,
                              pp_pc,
                              training=True,
                              shuffle=False,
                              iterations=None)
     self.assertEqual(len(data_gen), 4)
     for batch in data_gen:
         self.assertIsInstance(batch, tuple)
         self.assertEqual(batch[0].shape, (3, 5, 5, 5, 1))
         self.assertEqual(batch[1].shape, (3, 5, 5, 5, 3))
Exemple #2
0
 def evaluate(self,
              training_samples,
              validation_samples,
              epochs=20,
              iterations=None,
              callbacks=[]):
     # Initialize a Keras Data Generator for generating Training data
     dataGen_training = DataGenerator(training_samples,
                                      self.preprocessor,
                                      training=True,
                                      validation=False,
                                      shuffle=self.shuffle_batches,
                                      iterations=iterations)
     # Initialize a Keras Data Generator for generating Validation data
     dataGen_validation = DataGenerator(validation_samples,
                                        self.preprocessor,
                                        training=True,
                                        validation=True,
                                        shuffle=self.shuffle_batches)
     # Run training & validation process with the Keras fit
     history = self.model.fit(dataGen_training,
                              validation_data=dataGen_validation,
                              callbacks=callbacks,
                              epochs=epochs,
                              workers=self.workers,
                              max_queue_size=self.batch_queue_size)
     # Clean up temporary files if necessary
     if self.preprocessor.prepare_batches or self.preprocessor.prepare_subfunctions:
         self.preprocessor.data_io.batch_cleanup()
     # Return the training & validation history
     return history
Exemple #3
0
 def test_DATAGENERATOR_iterations(self):
     pp_fi = Preprocessor(self.data_io,
                          batch_size=1,
                          data_aug=None,
                          prepare_subfunctions=False,
                          prepare_batches=False,
                          analysis="fullimage")
     data_gen = DataGenerator(self.sample_list,
                              pp_fi,
                              training=True,
                              shuffle=False,
                              iterations=None)
     self.assertEqual(10, len(data_gen))
     data_gen = DataGenerator(self.sample_list,
                              pp_fi,
                              training=True,
                              shuffle=False,
                              iterations=5)
     self.assertEqual(5, len(data_gen))
     data_gen = DataGenerator(self.sample_list,
                              pp_fi,
                              training=True,
                              shuffle=False,
                              iterations=50)
     self.assertEqual(50, len(data_gen))
     data_gen = DataGenerator(self.sample_list,
                              pp_fi,
                              training=True,
                              shuffle=False,
                              iterations=100)
     self.assertEqual(100, len(data_gen))
Exemple #4
0
 def predict(self,
             sample_list,
             return_output=False,
             activation_output=False):
     # Initialize result array for direct output
     if return_output: results = []
     # Iterate over each sample
     for sample in sample_list:
         # Initialize Keras Data Generator for generating batches
         dataGen = DataGenerator([sample],
                                 self.preprocessor,
                                 training=False,
                                 validation=False,
                                 shuffle=False,
                                 iterations=None)
         # Run prediction process with Keras predict
         pred_list = []
         for batch in dataGen:
             pred_batch = self.model.predict_on_batch(batch)
             pred_list.append(pred_batch)
         pred_seg = np.concatenate(pred_list, axis=0)
         # Postprocess prediction
         pred_seg = self.preprocessor.postprocessing(
             sample, pred_seg, activation_output)
         # Backup predicted segmentation
         if return_output: results.append(pred_seg)
         else: self.preprocessor.data_io.save_prediction(pred_seg, sample)
         # Clean up temporary files if necessary
         if self.preprocessor.prepare_batches or self.preprocessor.prepare_subfunctions:
             self.preprocessor.data_io.batch_cleanup()
     # Output predictions results if direct output modus is active
     if return_output: return results
Exemple #5
0
 def test_DATAGENERATOR_create(self):
     pp_fi = Preprocessor(self.data_io,
                          batch_size=4,
                          data_aug=self.data_aug,
                          prepare_subfunctions=False,
                          prepare_batches=False,
                          analysis="fullimage")
     data_gen = DataGenerator(self.sample_list,
                              pp_fi,
                              training=False,
                              validation=False,
                              shuffle=False,
                              iterations=None)
     self.assertIsInstance(data_gen, DataGenerator)
Exemple #6
0
 def test_DATAGENERATOR_augcyling(self):
     data_aug = Data_Augmentation(cycles=20)
     pp_fi = Preprocessor(self.data_io,
                          batch_size=4,
                          data_aug=data_aug,
                          prepare_subfunctions=False,
                          prepare_batches=False,
                          analysis="fullimage")
     data_gen = DataGenerator(self.sample_list,
                              pp_fi,
                              training=True,
                              shuffle=False,
                              iterations=None)
     self.assertEqual(50, len(data_gen))
Exemple #7
0
 def train(self, sample_list, epochs=20, iterations=None, callbacks=[]):
     # Initialize Keras Data Generator for generating batches
     dataGen = DataGenerator(sample_list, self.preprocessor, training=True,
                             validation=False, shuffle=self.shuffle_batches,
                             iterations=iterations)
     # Run training process with Keras fit_generator
     self.model.fit_generator(generator=dataGen,
                              epochs=epochs,
                              callbacks=callbacks,
                              workers=self.workers,
                              max_queue_size=self.batch_queue_size)
     # Clean up temporary files if necessary
     if self.preprocessor.prepare_batches or self.preprocessor.prepare_subfunctions:
         self.preprocessor.data_io.batch_cleanup()
Exemple #8
0
 def test_DATAGENERATOR_prepareData(self):
     pp_fi = Preprocessor(self.data_io,
                          batch_size=4,
                          data_aug=None,
                          prepare_subfunctions=True,
                          prepare_batches=True,
                          analysis="fullimage")
     data_gen = DataGenerator(self.sample_list,
                              pp_fi,
                              training=True,
                              shuffle=True,
                              iterations=None)
     self.assertEqual(len(data_gen), 3)
     for batch in data_gen:
         self.assertIsInstance(batch, tuple)
         self.assertEqual(batch[0].shape[1:], (16, 16, 16, 1))
         self.assertEqual(batch[1].shape[1:], (16, 16, 16, 3))
         self.assertIn(batch[0].shape[0], [2, 4])
Exemple #9
0
    def predict_augmentated(self, sample):
        if self.preprocessor.data_augmentation is None:
            raise ValueError("Inference Augmentation requires a " + \
                             "Data Augmentation class instance!")
        else:
            data_aug = self.preprocessor.data_augmentation
        # Initialize result array for the augmentated predictions
        results = []
        # Activate augmentation inferene
        data_aug.infaug = True
        if self.three_dim: flip_list = data_aug.infaug_flip_list
        else: flip_list = data_aug.infaug_flip_list[:-1]
        # Compute inference for each flip augmentation / for each axis
        for flip_axis in flip_list:
            # Update flip axis
            data_aug.infaug_flip_current = flip_axis
            # Initialize Keras Data Generator for generating batches
            dataGen = DataGenerator([sample],
                                    self.preprocessor,
                                    training=False,
                                    validation=False,
                                    shuffle=False,
                                    iterations=None)

            sampleObj = self.preprocessor.data_io.sample_loader(sample,
                                                                load_seg=False)
            #TODO optimize
            # Run prediction process with Keras predict
            pred_list = []
            for batch in dataGen:
                pred_batch = self.model.predict_on_batch(batch)
                pred_list.append(pred_batch)
            pred_seg = np.concatenate(pred_list, axis=0)
            # Postprocess prediction
            pred_seg = self.preprocessor.postprocessing(sampleObj,
                                                        pred_seg,
                                                        activation_output=True)
            # Backup predicted segmentation for current augmentation
            results.append(pred_seg)
        # Reset inference augmentation modus
        data_aug.infaug = False
        data_aug.infaug_flip_current = None
        # Return result array
        return results
Exemple #10
0
 def test_DATAGENERATOR_shuffle(self):
     pp_fi = Preprocessor(self.data_io,
                          batch_size=1,
                          data_aug=None,
                          prepare_subfunctions=False,
                          prepare_batches=False,
                          analysis="fullimage")
     data_gen = DataGenerator(self.sample_list,
                              pp_fi,
                              training=True,
                              shuffle=False,
                              iterations=None)
     list_ordered = []
     for batch in data_gen:
         list_ordered.append(batch)
     for batch in data_gen:
         list_ordered.append(batch)
     data_gen = DataGenerator(self.sample_list,
                              pp_fi,
                              training=True,
                              shuffle=True,
                              iterations=None)
     list_shuffled = []
     for batch in data_gen:
         list_shuffled.append(batch)
     data_gen.on_epoch_end()
     for batch in data_gen:
         list_shuffled.append(batch)
     size = len(data_gen)
     o_counter = 0
     s_counter = 0
     for i in range(0, size):
         oa_img = list_ordered[i][0]
         oa_seg = list_ordered[i][1]
         ob_img = list_ordered[i + size][0]
         ob_seg = list_ordered[i + size][1]
         sa_img = list_shuffled[i][0]
         sa_seg = list_shuffled[i][1]
         sb_img = list_shuffled[i + size][0]
         sb_seg = list_shuffled[i + size][1]
         if np.array_equal(oa_img, ob_img) and \
             np.array_equal(oa_seg, ob_seg):
             o_counter += 1
         if not np.array_equal(sa_img, sb_img) and \
             not np.array_equal(sa_seg, sb_seg):
             s_counter += 1
     o_ratio = o_counter / size
     self.assertTrue(o_ratio == 1.0)
     s_ratio = s_counter / size
     self.assertTrue(1.0 >= s_ratio and s_ratio >= 0.5)
Exemple #11
0
 def test_DATAGENERATOR_consistency(self):
     pp_fi = Preprocessor(self.data_io,
                          batch_size=1,
                          data_aug=None,
                          prepare_subfunctions=False,
                          prepare_batches=False,
                          analysis="fullimage")
     data_gen = DataGenerator(self.sample_list,
                              pp_fi,
                              training=True,
                              shuffle=False,
                              iterations=None)
     i = 0
     for batch in data_gen:
         sample = self.data_io.sample_loader(self.sample_list[i],
                                             load_seg=True)
         self.assertTrue(np.array_equal(batch[0][0], sample.img_data))
         seg = to_categorical(sample.seg_data, num_classes=3)
         self.assertTrue(np.array_equal(batch[1][0], seg))
         i += 1
Exemple #12
0
 def test_DATAGENERATOR_inferenceAug(self):
     data_aug = Data_Augmentation()
     pp_fi = Preprocessor(self.data_io,
                          batch_size=4,
                          data_aug=data_aug,
                          prepare_subfunctions=False,
                          prepare_batches=False,
                          analysis="fullimage")
     data_gen = DataGenerator([self.sample_list[0]],
                              pp_fi,
                              training=False,
                              shuffle=False,
                              iterations=None)
     pred_list_inactive = []
     for batch in data_gen:
         pred_list_inactive.append(batch)
     data_aug.infaug = True
     pred_list_active = []
     for batch in data_gen:
         pred_list_active.append(batch)
     for i in range(0, len(pred_list_active)):
         ba = pred_list_active[i]
         bi = pred_list_inactive[i]
         self.assertFalse(np.array_equal(ba, bi))
Exemple #13
0
# Create a pixel value normalization Subfunction for z-score scaling
sf_zscore = Normalization(mode="z-score")

# Assemble Subfunction classes into a list
sf = [sf_clipping, sf_normalize, sf_resample, sf_zscore]

# Create and configure the Preprocessor class
pp = Preprocessor(data_io, data_aug=data_aug, batch_size=2, subfunctions=sf,
                  prepare_subfunctions=True, prepare_batches=False,
                  analysis="fullimage", patch_shape=(160, 160, 80))
# Adjust the patch overlap for predictions
pp.patchwise_overlap = (80, 80, 40)


# Initialize Keras Data Generator for generating batches
from miscnn.neural_network.data_generator import DataGenerator
dataGen = DataGenerator(sample_list, pp, training=False, validation=False, shuffle=False)

x = []
y = []
z = []
for batch in dataGen:
    print("Batch:", batch.shape)
    x.append(batch.shape[1])
    y.append(batch.shape[2])
    z.append(batch.shape[3])

print("Mean:")
print(np.mean(x), np.mean(y), np.mean(z))
print(np.median(x), np.median(y), np.median(z))