def test_SUBFUNCTIONS_preprocessing(self):
     ds = dict()
     for i in range(0, 10):
         img = np.random.rand(16, 16, 16) * 255
         img = img.astype(int)
         seg = np.random.rand(16, 16, 16) * 3
         seg = seg.astype(int)
         sample = (img, seg)
         ds["TEST.sample_" + str(i)] = sample
     io_interface = Dictionary_interface(ds, classes=3, three_dim=True)
     self.tmp_dir = tempfile.TemporaryDirectory(prefix="tmp.CESP.")
     tmp_batches = os.path.join(self.tmp_dir.name, "batches")
     dataio = Data_IO(io_interface,
                      input_path="",
                      output_path="",
                      batch_path=tmp_batches,
                      delete_batchDir=False)
     sf = [Resize((8, 8, 8)), Normalization(), Clipping(min=-1.0, max=0.0)]
     pp = Preprocessor(dataio,
                       batch_size=1,
                       prepare_subfunctions=False,
                       analysis="fullimage",
                       subfunctions=sf)
     sample_list = dataio.get_indiceslist()
     batches = pp.run(sample_list, training=True, validation=False)
     for i in range(0, 10):
         img = batches[i][0]
         seg = batches[i][1]
         self.assertEqual(img.shape, (1, 8, 8, 8, 1))
         self.assertEqual(seg.shape, (1, 8, 8, 8, 3))
         self.assertTrue(np.min(img) >= -1.75 and np.max(img) <= 0.75)
     self.tmp_dir.cleanup()
 def setUpClass(self):
     np.random.seed(1234)
     # Create imgaging and segmentation data set
     self.dataset = dict()
     for i in range(0, 10):
         img = np.random.rand(16, 16, 16) * 255
         self.img = img.astype(int)
         seg = np.random.rand(16, 16, 16) * 3
         self.seg = seg.astype(int)
         sample = (self.img, self.seg)
         self.dataset["TEST.sample_" + str(i)] = sample
     # Initialize Dictionary IO Interface
     io_interface = Dictionary_interface(self.dataset,
                                         classes=3,
                                         three_dim=True)
     # Initialize temporary directory
     self.tmp_dir = tempfile.TemporaryDirectory(prefix="tmp.CESP.")
     tmp_batches = os.path.join(self.tmp_dir.name, "batches")
     # Initialize Data IO
     self.data_io = Data_IO(io_interface,
                            input_path="",
                            output_path="",
                            batch_path=tmp_batches,
                            delete_batchDir=False)
     # Initialize Data Augmentation
     self.data_aug = Data_Augmentation()
     # Get sample list
     self.sample_list = self.data_io.get_indiceslist()
 def setUpClass(self):
     np.random.seed(1234)
     # Create 2D imgaging and segmentation data set
     self.dataset = dict()
     for i in range(0, 2):
         img = np.random.rand(16, 16) * 255
         self.img = img.astype(int)
         seg = np.random.rand(16, 16) * 2
         self.seg = seg.astype(int)
         self.dataset["TEST.sample_" + str(i)] = (self.img, self.seg)
     # Initialize Dictionary IO Interface
     io_interface = Dictionary_interface(self.dataset, classes=3,
                                           three_dim=False)
     # Initialize temporary directory
     self.tmp_dir = tempfile.TemporaryDirectory(prefix="tmp.CESP.")
     tmp_batches = os.path.join(self.tmp_dir.name, "batches")
     # Initialize Data IO
     self.data_io = Data_IO(io_interface,
                            input_path=os.path.join(self.tmp_dir.name),
                            output_path=os.path.join(self.tmp_dir.name),
                            batch_path=tmp_batches, delete_batchDir=False)
     # Initialize Preprocessor
     self.pp = Preprocessor(self.data_io, batch_size=1,
                            data_aug=None, analysis="fullimage")
     # Initialize Neural Network
     self.model = Neural_Network(self.pp)
     # Get sample list
     self.sample_list = self.data_io.get_indiceslist()
 def setUpClass(self):
     np.random.seed(1234)
     # Create 2D imgaging and segmentation data set
     self.dataset2D = dict()
     for i in range(0, 6):
         img = np.random.rand(16, 16) * 255
         self.img = img.astype(int)
         seg = np.random.rand(16, 16) * 3
         self.seg = seg.astype(int)
         self.dataset2D["TEST.sample_" + str(i)] = (self.img, self.seg)
     # Initialize Dictionary IO Interface
     io_interface2D = Dictionary_interface(self.dataset2D,
                                           classes=3,
                                           three_dim=False)
     # Initialize temporary directory
     self.tmp_dir2D = tempfile.TemporaryDirectory(prefix="tmp.CESP.")
     tmp_batches = os.path.join(self.tmp_dir2D.name, "batches")
     # Initialize Data IO
     self.data_io2D = Data_IO(io_interface2D,
                              input_path=os.path.join(self.tmp_dir2D.name),
                              output_path=os.path.join(self.tmp_dir2D.name),
                              batch_path=tmp_batches,
                              delete_batchDir=False)
     # Initialize Preprocessor
     self.pp2D = Preprocessor(self.data_io2D,
                              batch_size=2,
                              data_aug=None,
                              analysis="fullimage")
     # Get sample list
     self.sample_list2D = self.data_io2D.get_indiceslist()
     # Create 3D imgaging and segmentation data set
     self.dataset3D = dict()
     for i in range(0, 6):
         img = np.random.rand(16, 16, 16) * 255
         self.img = img.astype(int)
         seg = np.random.rand(16, 16, 16) * 3
         self.seg = seg.astype(int)
         self.dataset3D["TEST.sample_" + str(i)] = (self.img, self.seg)
     # Initialize Dictionary IO Interface
     io_interface3D = Dictionary_interface(self.dataset3D,
                                           classes=3,
                                           three_dim=True)
     # Initialize temporary directory
     self.tmp_dir3D = tempfile.TemporaryDirectory(prefix="tmp.CESP.")
     tmp_batches = os.path.join(self.tmp_dir3D.name, "batches")
     # Initialize Data IO
     self.data_io3D = Data_IO(io_interface3D,
                              input_path=os.path.join(self.tmp_dir3D.name),
                              output_path=os.path.join(self.tmp_dir3D.name),
                              batch_path=tmp_batches,
                              delete_batchDir=False)
     # Initialize Preprocessor
     self.pp3D = Preprocessor(self.data_io3D,
                              batch_size=2,
                              data_aug=None,
                              analysis="fullimage")
     # Get sample list
     self.sample_list3D = self.data_io3D.get_indiceslist()
 def test_DATAIO_BASE_getSampleList(self):
     data_io = Data_IO(self.io_interface,
                       input_path="",
                       output_path="",
                       batch_path=self.tmp_batches,
                       delete_batchDir=False)
     sample_list = data_io.get_indiceslist()
     self.assertEqual(len(sample_list), 10)
     self.assertIn("TEST.sample_0", sample_list)
 def test_DATAIO_SampleLoader_Imaging(self):
     data_io = Data_IO(self.io_interface,
                       input_path="",
                       output_path="",
                       batch_path=self.tmp_batches,
                       delete_batchDir=False)
     sample = data_io.sample_loader("TEST.sample_0",
                                    backup=False,
                                    load_seg=False,
                                    load_pred=False)
     self.assertTrue(
         np.array_equal(np.reshape(sample.img_data, (16, 16, 16)),
                        self.dataset["TEST.sample_0"][0]))
     self.assertEqual(sample.img_data.shape, (16, 16, 16, 1))
 def test_DATAIO_SampleLoader_Combined(self):
     data_io = Data_IO(self.io_interface,
                       input_path="",
                       output_path="",
                       batch_path=self.tmp_batches,
                       delete_batchDir=False)
     sample = data_io.sample_loader("TEST.sample_3",
                                    backup=False,
                                    load_seg=True,
                                    load_pred=True)
     self.assertIsNotNone(sample.img_data)
     self.assertIsNotNone(sample.seg_data)
     self.assertIsNotNone(sample.pred_data)
     self.assertEqual(sample.img_data.shape, sample.seg_data.shape)
     self.assertEqual(sample.seg_data.shape, sample.pred_data.shape)
 def setUpClass(self):
     np.random.seed(1234)
     # Create 2D imgaging and segmentation data set
     self.dataset2D = dict()
     for i in range(0, 10):
         img = np.random.rand(16, 16) * 255
         self.img = img.astype(int)
         seg = np.random.rand(16, 16) * 2
         self.seg = seg.astype(int)
         self.dataset2D["TEST.sample_" + str(i)] = (self.img, self.seg)
     # Initialize Dictionary IO Interface
     io_interface2D = Dictionary_interface(self.dataset2D,
                                           classes=3,
                                           three_dim=False)
     # Initialize temporary directory
     self.tmp_dir2D = tempfile.TemporaryDirectory(prefix="tmp.CESP.")
     tmp_batches = os.path.join(self.tmp_dir2D.name, "batches")
     # Initialize Data IO
     self.data_io2D = Data_IO(io_interface2D,
                              input_path="",
                              output_path="",
                              batch_path=tmp_batches,
                              delete_batchDir=False)
     # Create 3D imgaging and segmentation data set
     self.dataset3D = dict()
     for i in range(0, 10):
         img = np.random.rand(16, 16, 16) * 255
         self.img = img.astype(int)
         seg = np.random.rand(16, 16, 16) * 3
         self.seg = seg.astype(int)
         if i in range(8, 10): sample = (self.img, None)
         else: sample = (self.img, self.seg)
         self.dataset3D["TEST.sample_" + str(i)] = sample
     # Initialize Dictionary IO Interface
     io_interface3D = Dictionary_interface(self.dataset3D,
                                           classes=3,
                                           three_dim=True)
     # Initialize temporary directory
     self.tmp_dir3D = tempfile.TemporaryDirectory(prefix="tmp.CESP.")
     tmp_batches = os.path.join(self.tmp_dir3D.name, "batches")
     # Initialize Data IO
     self.data_io3D = Data_IO(io_interface3D,
                              input_path="",
                              output_path="",
                              batch_path=tmp_batches,
                              delete_batchDir=False)
 def test_SUBFUNCTIONS_postprocessing(self):
     ds = dict()
     for i in range(0, 10):
         img = np.random.rand(16, 16, 16) * 255
         img = img.astype(int)
         seg = np.random.rand(16, 16, 16) * 3
         seg = seg.astype(int)
         sample = (img, seg)
         ds["TEST.sample_" + str(i)] = sample
     io_interface = Dictionary_interface(ds, classes=3, three_dim=True)
     self.tmp_dir = tempfile.TemporaryDirectory(prefix="tmp.CESP.")
     tmp_batches = os.path.join(self.tmp_dir.name, "batches")
     dataio = Data_IO(io_interface,
                      input_path="",
                      output_path="",
                      batch_path=tmp_batches,
                      delete_batchDir=False)
     sf = [Resize((9, 9, 9)), Normalization(), Clipping(min=-1.0, max=0.0)]
     pp = Preprocessor(dataio,
                       batch_size=1,
                       prepare_subfunctions=False,
                       analysis="patchwise-grid",
                       subfunctions=sf,
                       patch_shape=(4, 4, 4))
     sample_list = dataio.get_indiceslist()
     for index in sample_list:
         sample = dataio.sample_loader(index)
         for sf in pp.subfunctions:
             sf.preprocessing(sample, training=False)
         pp.cache["shape_" + str(index)] = sample.img_data.shape
         sample.seg_data = np.random.rand(9, 9, 9) * 3
         sample.seg_data = sample.seg_data.astype(int)
         sample.seg_data = to_categorical(sample.seg_data, num_classes=3)
         data_patches = pp.analysis_patchwise_grid(sample,
                                                   training=True,
                                                   data_aug=False)
         seg_list = []
         for i in range(0, len(data_patches)):
             seg_list.append(data_patches[i][1])
         seg = np.stack(seg_list, axis=0)
         self.assertEqual(seg.shape, (27, 4, 4, 4, 3))
         pred = pp.postprocessing(index, seg)
         self.assertEqual(pred.shape, (16, 16, 16))
     self.tmp_dir.cleanup()
 def test_DATAIO_BATCHES_loading(self):
     data_io = Data_IO(self.io_interface,
                       input_path="",
                       output_path="",
                       batch_path=self.tmp_batches,
                       delete_batchDir=False)
     sample = data_io.sample_loader("TEST.sample_0",
                                    backup=False,
                                    load_seg=True,
                                    load_pred=False)
     data_io.backup_batches(sample.img_data, sample.seg_data, "abc")
     img = data_io.batch_load(pointer="abc", img=True)
     self.assertTrue(np.array_equal(sample.img_data, img))
     seg = data_io.batch_load(pointer="abc", img=False)
     self.assertTrue(np.array_equal(sample.seg_data, seg))
     data_io.batch_cleanup()
    def test_EVALUATION_leaveOneOut(self):
        # Create 3D imgaging and segmentation data set
        self.dataset3D = dict()
        for i in range(0, 6):
            img = np.random.rand(16, 16, 16) * 255
            self.img = img.astype(int)
            seg = np.random.rand(16, 16, 16) * 3
            self.seg = seg.astype(int)
            self.dataset3D["TEST.sample_" + str(i)] = (self.img, self.seg)
        # Initialize Dictionary IO Interface
        io_interface3D = Dictionary_interface(self.dataset3D,
                                              classes=3,
                                              three_dim=True)
        # Initialize temporary directory
        self.tmp_dir3D = tempfile.TemporaryDirectory(prefix="tmp.CESP.")
        tmp_batches = os.path.join(self.tmp_dir3D.name, "batches")
        # Initialize Data IO
        self.data_io3D = Data_IO(io_interface3D,
                                 input_path=os.path.join(self.tmp_dir3D.name),
                                 output_path=os.path.join(self.tmp_dir3D.name),
                                 batch_path=tmp_batches,
                                 delete_batchDir=False)
        # Initialize Preprocessor
        self.pp3D = Preprocessor(self.data_io3D,
                                 batch_size=2,
                                 data_aug=None,
                                 analysis="fullimage")
        # Initialize Neural Network
        model = Neural_Network(self.pp3D)
        # Get sample list
        self.sample_list3D = self.data_io3D.get_indiceslist()

        eval_path = os.path.join(self.tmp_dir3D.name, "evaluation")
        leave_one_out(self.sample_list3D,
                      model,
                      epochs=3,
                      iterations=None,
                      evaluation_path=eval_path,
                      callbacks=[])
        self.assertTrue(os.path.exists(eval_path))
        # Cleanup stuff
        self.tmp_dir3D.cleanup()
 def test_DATAIO_SampleLoader_Prediction(self):
     data_io = Data_IO(self.io_interface,
                       input_path="",
                       output_path="",
                       batch_path=self.tmp_batches,
                       delete_batchDir=False)
     sample = data_io.sample_loader("TEST.sample_5",
                                    backup=False,
                                    load_seg=False,
                                    load_pred=True)
     self.assertTrue(
         np.array_equal(np.reshape(sample.pred_data, (16, 16, 16)),
                        self.dataset["TEST.sample_5"][2]))
     self.assertEqual(sample.pred_data.shape, (16, 16, 16, 1))
     self.assertIsNotNone(sample.img_data)
     self.assertIsNone(sample.seg_data)
     with self.assertRaises(Exception):
         sample = data_io.sample_loader("TEST.sample_2",
                                        backup=False,
                                        load_seg=False,
                                        load_pred=True)
 def test_DATAIO_BATCHES_sampleLoading(self):
     data_io = Data_IO(self.io_interface,
                       input_path="",
                       output_path="",
                       batch_path=self.tmp_batches,
                       delete_batchDir=False)
     sample = data_io.sample_loader("TEST.sample_0",
                                    backup=False,
                                    load_seg=True,
                                    load_pred=False)
     data_io.backup_sample(sample)
     sample_new = data_io.load_sample_pickle(sample.index)
     data_io.batch_cleanup()
     self.assertTrue(np.array_equal(sample_new.img_data, sample.img_data))
     self.assertTrue(np.array_equal(sample_new.seg_data, sample.seg_data))
 def test_DATAIO_BATCHES_sampleStorage(self):
     data_io = Data_IO(self.io_interface,
                       input_path="",
                       output_path="",
                       batch_path=self.tmp_batches,
                       delete_batchDir=False)
     sample = data_io.sample_loader("TEST.sample_0",
                                    backup=False,
                                    load_seg=True,
                                    load_pred=False)
     data_io.backup_sample(sample)
     self.assertEqual(len(os.listdir(self.tmp_batches)), 1)
     data_io.batch_cleanup()
 def test_DATAIO_BASE_savePrediction(self):
     data_io = Data_IO(self.io_interface,
                       input_path="",
                       output_path=os.path.join(self.tmp_dir.name, "pred"),
                       batch_path=self.tmp_batches,
                       delete_batchDir=False)
     sample = data_io.sample_loader("TEST.sample_0",
                                    backup=False,
                                    load_seg=True,
                                    load_pred=False)
     self.assertIsNone(sample.pred_data)
     data_io.save_prediction(sample.seg_data, sample.index)
     self.assertTrue(os.path.exists(os.path.join(self.tmp_dir.name,
                                                 "pred")))
     sample = data_io.sample_loader("TEST.sample_0",
                                    backup=False,
                                    load_seg=True,
                                    load_pred=True)
     self.assertTrue(np.array_equal(sample.seg_data, sample.pred_data))
class DataGeneratorTEST(unittest.TestCase):
    # Create random imaging and segmentation data
    @classmethod
    def setUpClass(self):
        np.random.seed(1234)
        # Create imgaging and segmentation data set
        self.dataset = dict()
        for i in range(0, 10):
            img = np.random.rand(16, 16, 16) * 255
            self.img = img.astype(int)
            seg = np.random.rand(16, 16, 16) * 3
            self.seg = seg.astype(int)
            sample = (self.img, self.seg)
            self.dataset["TEST.sample_" + str(i)] = sample
        # Initialize Dictionary IO Interface
        io_interface = Dictionary_interface(self.dataset,
                                            classes=3,
                                            three_dim=True)
        # Initialize temporary directory
        self.tmp_dir = tempfile.TemporaryDirectory(prefix="tmp.CESP.")
        tmp_batches = os.path.join(self.tmp_dir.name, "batches")
        # Initialize Data IO
        self.data_io = Data_IO(io_interface,
                               input_path="",
                               output_path="",
                               batch_path=tmp_batches,
                               delete_batchDir=False)
        # Initialize Data Augmentation
        self.data_aug = Data_Augmentation()
        # Get sample list
        self.sample_list = self.data_io.get_indiceslist()

    # Delete all temporary files
    @classmethod
    def tearDownClass(self):
        self.tmp_dir.cleanup()

    #-------------------------------------------------#
    #                Base Functionality               #
    #-------------------------------------------------#
    # Class Creation
    def test_DATAGENERATOR_create(self):
        pp_fi = Preprocessor(self.data_io,
                             batch_size=4,
                             data_aug=self.data_aug,
                             prepare_subfunctions=False,
                             prepare_batches=False,
                             analysis="fullimage")
        data_gen = DataGenerator(self.sample_list,
                                 pp_fi,
                                 training=False,
                                 validation=False,
                                 shuffle=False,
                                 iterations=None)
        self.assertIsInstance(data_gen, DataGenerator)

    # Run data generation for training
    def test_DATAGENERATOR_runTraining(self):
        pp_fi = Preprocessor(self.data_io,
                             batch_size=4,
                             data_aug=self.data_aug,
                             prepare_subfunctions=False,
                             prepare_batches=False,
                             analysis="fullimage")
        data_gen = DataGenerator(self.sample_list,
                                 pp_fi,
                                 training=True,
                                 shuffle=False,
                                 iterations=None)
        self.assertEqual(len(data_gen), 3)
        for batch in data_gen:
            self.assertIsInstance(batch, tuple)
            self.assertEqual(batch[0].shape, (4, 16, 16, 16, 1))
            self.assertEqual(batch[1].shape, (4, 16, 16, 16, 3))
        pp_pc = Preprocessor(self.data_io,
                             batch_size=3,
                             data_aug=self.data_aug,
                             prepare_subfunctions=False,
                             prepare_batches=False,
                             patch_shape=(5, 5, 5),
                             analysis="patchwise-crop")
        data_gen = DataGenerator(self.sample_list,
                                 pp_pc,
                                 training=True,
                                 shuffle=False,
                                 iterations=None)
        self.assertEqual(len(data_gen), 4)
        for batch in data_gen:
            self.assertIsInstance(batch, tuple)
            self.assertEqual(batch[0].shape, (3, 5, 5, 5, 1))
            self.assertEqual(batch[1].shape, (3, 5, 5, 5, 3))

    # Run data generation for prediction
    def test_DATAGENERATOR_runPrediction(self):
        pp_fi = Preprocessor(self.data_io,
                             batch_size=4,
                             data_aug=self.data_aug,
                             prepare_subfunctions=False,
                             prepare_batches=False,
                             analysis="fullimage")
        data_gen = DataGenerator(self.sample_list,
                                 pp_fi,
                                 training=False,
                                 shuffle=False,
                                 iterations=None)
        self.assertEqual(len(data_gen), 10)
        for batch in data_gen:
            self.assertNotIsInstance(batch, tuple)
            self.assertEqual(batch.shape, (1, 16, 16, 16, 1))
        pp_pc = Preprocessor(self.data_io,
                             batch_size=3,
                             data_aug=self.data_aug,
                             prepare_subfunctions=False,
                             prepare_batches=False,
                             patch_shape=(5, 5, 5),
                             analysis="patchwise-crop")
        data_gen = DataGenerator(self.sample_list,
                                 pp_pc,
                                 training=False,
                                 shuffle=False,
                                 iterations=None)
        self.assertEqual(len(data_gen), 220)
        for batch in data_gen:
            self.assertNotIsInstance(batch, tuple)
            self.assertIn(batch.shape, [(3, 5, 5, 5, 1), (1, 5, 5, 5, 1)])

    # Check if full images without data augmentation are consistent
    def test_DATAGENERATOR_consistency(self):
        pp_fi = Preprocessor(self.data_io,
                             batch_size=1,
                             data_aug=None,
                             prepare_subfunctions=False,
                             prepare_batches=False,
                             analysis="fullimage")
        data_gen = DataGenerator(self.sample_list,
                                 pp_fi,
                                 training=True,
                                 shuffle=False,
                                 iterations=None)
        i = 0
        for batch in data_gen:
            sample = self.data_io.sample_loader(self.sample_list[i],
                                                load_seg=True)
            self.assertTrue(np.array_equal(batch[0][0], sample.img_data))
            seg = to_categorical(sample.seg_data, num_classes=3)
            self.assertTrue(np.array_equal(batch[1][0], seg))
            i += 1

    # Iteration fixation test
    def test_DATAGENERATOR_iterations(self):
        pp_fi = Preprocessor(self.data_io,
                             batch_size=1,
                             data_aug=None,
                             prepare_subfunctions=False,
                             prepare_batches=False,
                             analysis="fullimage")
        data_gen = DataGenerator(self.sample_list,
                                 pp_fi,
                                 training=True,
                                 shuffle=False,
                                 iterations=None)
        self.assertEqual(10, len(data_gen))
        data_gen = DataGenerator(self.sample_list,
                                 pp_fi,
                                 training=True,
                                 shuffle=False,
                                 iterations=5)
        self.assertEqual(5, len(data_gen))
        data_gen = DataGenerator(self.sample_list,
                                 pp_fi,
                                 training=True,
                                 shuffle=False,
                                 iterations=50)
        self.assertEqual(50, len(data_gen))
        data_gen = DataGenerator(self.sample_list,
                                 pp_fi,
                                 training=True,
                                 shuffle=False,
                                 iterations=100)
        self.assertEqual(100, len(data_gen))

    # Iteration fixation test
    def test_DATAGENERATOR_augcyling(self):
        data_aug = Data_Augmentation(cycles=20)
        pp_fi = Preprocessor(self.data_io,
                             batch_size=4,
                             data_aug=data_aug,
                             prepare_subfunctions=False,
                             prepare_batches=False,
                             analysis="fullimage")
        data_gen = DataGenerator(self.sample_list,
                                 pp_fi,
                                 training=True,
                                 shuffle=False,
                                 iterations=None)
        self.assertEqual(50, len(data_gen))

    # Check if shuffling is functional
    def test_DATAGENERATOR_shuffle(self):
        pp_fi = Preprocessor(self.data_io,
                             batch_size=1,
                             data_aug=None,
                             prepare_subfunctions=False,
                             prepare_batches=False,
                             analysis="fullimage")
        data_gen = DataGenerator(self.sample_list,
                                 pp_fi,
                                 training=True,
                                 shuffle=False,
                                 iterations=None)
        list_ordered = []
        for batch in data_gen:
            list_ordered.append(batch)
        for batch in data_gen:
            list_ordered.append(batch)
        data_gen = DataGenerator(self.sample_list,
                                 pp_fi,
                                 training=True,
                                 shuffle=True,
                                 iterations=None)
        list_shuffled = []
        for batch in data_gen:
            list_shuffled.append(batch)
        data_gen.on_epoch_end()
        for batch in data_gen:
            list_shuffled.append(batch)
        size = len(data_gen)
        o_counter = 0
        s_counter = 0
        for i in range(0, size):
            oa_img = list_ordered[i][0]
            oa_seg = list_ordered[i][1]
            ob_img = list_ordered[i + size][0]
            ob_seg = list_ordered[i + size][1]
            sa_img = list_shuffled[i][0]
            sa_seg = list_shuffled[i][1]
            sb_img = list_shuffled[i + size][0]
            sb_seg = list_shuffled[i + size][1]
            if np.array_equal(oa_img, ob_img) and \
                np.array_equal(oa_seg, ob_seg):
                o_counter += 1
            if not np.array_equal(sa_img, sb_img) and \
                not np.array_equal(sa_seg, sb_seg):
                s_counter += 1
        o_ratio = o_counter / size
        self.assertTrue(o_ratio == 1.0)
        s_ratio = s_counter / size
        self.assertTrue(1.0 >= s_ratio and s_ratio >= 0.5)

    # Run data generation with preparation of subfunctions and batches
    def test_DATAGENERATOR_prepareData(self):
        pp_fi = Preprocessor(self.data_io,
                             batch_size=4,
                             data_aug=None,
                             prepare_subfunctions=True,
                             prepare_batches=True,
                             analysis="fullimage")
        data_gen = DataGenerator(self.sample_list,
                                 pp_fi,
                                 training=True,
                                 shuffle=True,
                                 iterations=None)
        self.assertEqual(len(data_gen), 3)
        for batch in data_gen:
            self.assertIsInstance(batch, tuple)
            self.assertEqual(batch[0].shape[1:], (16, 16, 16, 1))
            self.assertEqual(batch[1].shape[1:], (16, 16, 16, 3))
            self.assertIn(batch[0].shape[0], [2, 4])
示例#17
0
class PatchOperationsTEST(unittest.TestCase):
    # Create random imaging and segmentation data
    @classmethod
    def setUpClass(self):
        np.random.seed(1234)
        # Create 2D imgaging and segmentation data set
        self.dataset2D = dict()
        for i in range(0, 10):
            img = np.random.rand(16, 16) * 255
            self.img = img.astype(int)
            seg = np.random.rand(16, 16) * 2
            self.seg = seg.astype(int)
            self.dataset2D["TEST.sample_" + str(i)] = (self.img, self.seg)
        # Initialize Dictionary IO Interface
        io_interface2D = Dictionary_interface(self.dataset2D,
                                              classes=3,
                                              three_dim=False)
        # Initialize temporary directory
        self.tmp_dir2D = tempfile.TemporaryDirectory(prefix="tmp.CESP.")
        tmp_batches = os.path.join(self.tmp_dir2D.name, "batches")
        # Initialize Data IO
        self.data_io2D = Data_IO(io_interface2D,
                                 input_path="",
                                 output_path="",
                                 batch_path=tmp_batches,
                                 delete_batchDir=False)
        # Create 3D imgaging and segmentation data set
        self.dataset3D = dict()
        for i in range(0, 10):
            img = np.random.rand(16, 16, 16) * 255
            self.img = img.astype(int)
            seg = np.random.rand(16, 16, 16) * 3
            self.seg = seg.astype(int)
            self.dataset3D["TEST.sample_" + str(i)] = (self.img, self.seg)
        # Initialize Dictionary IO Interface
        io_interface3D = Dictionary_interface(self.dataset3D,
                                              classes=3,
                                              three_dim=True)
        # Initialize temporary directory
        self.tmp_dir3D = tempfile.TemporaryDirectory(prefix="tmp.CESP.")
        tmp_batches = os.path.join(self.tmp_dir3D.name, "batches")
        # Initialize Data IO
        self.data_io3D = Data_IO(io_interface3D,
                                 input_path="",
                                 output_path="",
                                 batch_path=tmp_batches,
                                 delete_batchDir=False)

    # Delete all temporary files
    @classmethod
    def tearDownClass(self):
        self.tmp_dir2D.cleanup()
        self.tmp_dir3D.cleanup()

    #-------------------------------------------------#
    #                   Slice Matrix                  #
    #-------------------------------------------------#
    def test_PATCHOPERATIONS_slicing(self):
        sample_list = self.data_io2D.get_indiceslist()
        for index in sample_list:
            sample = self.data_io2D.sample_loader(index)
            patches = slice_matrix(sample.img_data,
                                   window=(5, 5),
                                   overlap=(2, 2),
                                   three_dim=False)
            self.assertEqual(len(patches), 25)
            self.assertEqual(patches[0].shape, (5, 5, 1))
        sample_list = self.data_io3D.get_indiceslist()
        for index in sample_list:
            sample = self.data_io3D.sample_loader(index)
            patches = slice_matrix(sample.img_data,
                                   window=(5, 5, 5),
                                   overlap=(2, 2, 2),
                                   three_dim=True)
            self.assertEqual(len(patches), 125)
            self.assertEqual(patches[0].shape, (5, 5, 5, 1))

    #-------------------------------------------------#
    #               Concatenate Matrices              #
    #-------------------------------------------------#
    def test_PATCHOPERATIONS_concatenate(self):
        sample_list = self.data_io2D.get_indiceslist()
        for index in sample_list:
            sample = self.data_io2D.sample_loader(index)
            patches = slice_matrix(sample.img_data,
                                   window=(5, 5),
                                   overlap=(2, 2),
                                   three_dim=False)
            concat = concat_matrices(patches=patches,
                                     image_size=(16, 16),
                                     window=(5, 5),
                                     overlap=(2, 2),
                                     three_dim=False)
            self.assertEqual(concat.shape, (16, 16, 1))
        sample_list = self.data_io3D.get_indiceslist()
        for index in sample_list:
            sample = self.data_io3D.sample_loader(index)
            patches = slice_matrix(sample.img_data,
                                   window=(5, 5, 5),
                                   overlap=(2, 2, 2),
                                   three_dim=True)
            concat = concat_matrices(patches=patches,
                                     image_size=(16, 16, 16),
                                     window=(5, 5, 5),
                                     overlap=(2, 2, 2),
                                     three_dim=True)
            self.assertEqual(concat.shape, (16, 16, 16, 1))

    #-------------------------------------------------#
    #                  Patch Padding                  #
    #-------------------------------------------------#
    def test_PATCHOPERATIONS_padding(self):
        sample_list = self.data_io2D.get_indiceslist()
        for index in sample_list:
            sample = self.data_io2D.sample_loader(index)
            img_padded = pad_patch(np.expand_dims(sample.img_data, axis=0),
                                   patch_shape=(8, 20),
                                   return_slicer=False)
            self.assertEqual(img_padded.shape, (1, 16, 20, 1))
        sample_list = self.data_io3D.get_indiceslist()
        for index in sample_list:
            sample = self.data_io3D.sample_loader(index)
            img_padded = pad_patch(np.expand_dims(sample.img_data, axis=0),
                                   patch_shape=(8, 16, 32),
                                   return_slicer=False)
            self.assertEqual(img_padded.shape, (1, 16, 16, 32, 1))

    #-------------------------------------------------#
    #                  Patch Cropping                 #
    #-------------------------------------------------#
    def test_PATCHOPERATIONS_cropping(self):
        sample_list = self.data_io2D.get_indiceslist()
        for index in sample_list:
            sample = self.data_io2D.sample_loader(index)
            img_padded, slicer = pad_patch(np.expand_dims(sample.img_data,
                                                          axis=0),
                                           patch_shape=(8, 20),
                                           return_slicer=True)
            img_processed = crop_patch(img_padded, slicer)
            self.assertEqual(img_processed.shape, (1, 16, 16, 1))
        sample_list = self.data_io3D.get_indiceslist()
        for index in sample_list:
            sample = self.data_io3D.sample_loader(index)
            img_padded, slicer = pad_patch(np.expand_dims(sample.img_data,
                                                          axis=0),
                                           patch_shape=(8, 16, 32),
                                           return_slicer=True)
            img_processed = crop_patch(img_padded, slicer)
            self.assertEqual(img_processed.shape, (1, 16, 16, 16, 1))
 def test_DATAIO_BASE_create(self):
     data_io = Data_IO(self.io_interface,
                       input_path="",
                       output_path="",
                       batch_path=self.tmp_batches,
                       delete_batchDir=False)
class NeuralNetworkTEST(unittest.TestCase):
    # Create random imaging and segmentation data
    @classmethod
    def setUpClass(self):
        np.random.seed(1234)
        # Create 2D imgaging and segmentation data set
        self.dataset2D = dict()
        for i in range(0, 6):
            img = np.random.rand(16, 16) * 255
            self.img = img.astype(int)
            seg = np.random.rand(16, 16) * 3
            self.seg = seg.astype(int)
            self.dataset2D["TEST.sample_" + str(i)] = (self.img, self.seg)
        # Initialize Dictionary IO Interface
        io_interface2D = Dictionary_interface(self.dataset2D,
                                              classes=3,
                                              three_dim=False)
        # Initialize temporary directory
        self.tmp_dir2D = tempfile.TemporaryDirectory(prefix="tmp.CESP.")
        tmp_batches = os.path.join(self.tmp_dir2D.name, "batches")
        # Initialize Data IO
        self.data_io2D = Data_IO(io_interface2D,
                                 input_path=os.path.join(self.tmp_dir2D.name),
                                 output_path=os.path.join(self.tmp_dir2D.name),
                                 batch_path=tmp_batches,
                                 delete_batchDir=False)
        # Initialize Preprocessor
        self.pp2D = Preprocessor(self.data_io2D,
                                 batch_size=2,
                                 data_aug=None,
                                 analysis="fullimage")
        # Get sample list
        self.sample_list2D = self.data_io2D.get_indiceslist()
        # Create 3D imgaging and segmentation data set
        self.dataset3D = dict()
        for i in range(0, 6):
            img = np.random.rand(16, 16, 16) * 255
            self.img = img.astype(int)
            seg = np.random.rand(16, 16, 16) * 3
            self.seg = seg.astype(int)
            self.dataset3D["TEST.sample_" + str(i)] = (self.img, self.seg)
        # Initialize Dictionary IO Interface
        io_interface3D = Dictionary_interface(self.dataset3D,
                                              classes=3,
                                              three_dim=True)
        # Initialize temporary directory
        self.tmp_dir3D = tempfile.TemporaryDirectory(prefix="tmp.CESP.")
        tmp_batches = os.path.join(self.tmp_dir3D.name, "batches")
        # Initialize Data IO
        self.data_io3D = Data_IO(io_interface3D,
                                 input_path=os.path.join(self.tmp_dir3D.name),
                                 output_path=os.path.join(self.tmp_dir3D.name),
                                 batch_path=tmp_batches,
                                 delete_batchDir=False)
        # Initialize Preprocessor
        self.pp3D = Preprocessor(self.data_io3D,
                                 batch_size=2,
                                 data_aug=None,
                                 analysis="fullimage")
        # Get sample list
        self.sample_list3D = self.data_io3D.get_indiceslist()

    # Delete all temporary files
    @classmethod
    def tearDownClass(self):
        self.tmp_dir2D.cleanup()
        self.tmp_dir3D.cleanup()

    #-------------------------------------------------#
    #                Base Functionality               #
    #-------------------------------------------------#
    # Class Creation
    def test_MODEL_create(self):
        nn2D = Neural_Network(preprocessor=self.pp2D)
        self.assertIsInstance(nn2D, Neural_Network)
        self.assertFalse(nn2D.three_dim)
        self.assertIsNotNone(nn2D.model)
        nn3D = Neural_Network(preprocessor=self.pp3D)
        self.assertIsInstance(nn3D, Neural_Network)
        self.assertTrue(nn3D.three_dim)
        self.assertIsNotNone(nn3D.model)

    # Model storage
    def test_MODEL_storage(self):
        nn = Neural_Network(preprocessor=self.pp3D)
        model_path = os.path.join(self.tmp_dir3D.name, "my_model.hdf5")
        nn.dump(model_path)
        self.assertTrue(os.path.exists(model_path))

    # Model loading
    def test_MODEL_loading(self):
        nn = Neural_Network(preprocessor=self.pp3D)
        model_path = os.path.join(self.tmp_dir3D.name, "my_model.hdf5")
        nn.dump(model_path)
        nn_new = Neural_Network(preprocessor=self.pp3D)
        nn_new.load(model_path)

    # Reseting weights
    def test_MODEL_resetWeights(self):
        nn = Neural_Network(preprocessor=self.pp3D)
        nn.reset_weights()

    #-------------------------------------------------#
    #                     Training                    #
    #-------------------------------------------------#
    def test_MODEL_training2D(self):
        nn = Neural_Network(preprocessor=self.pp2D)
        nn.train(self.sample_list2D, epochs=3)

    def test_MODEL_training3D(self):
        nn = Neural_Network(preprocessor=self.pp3D)
        nn.train(self.sample_list3D, epochs=3)

    #-------------------------------------------------#
    #                    Prediction                   #
    #-------------------------------------------------#
    def test_MODEL_prediction2D(self):
        nn = Neural_Network(preprocessor=self.pp2D)
        nn.predict(self.sample_list2D)
        for index in self.sample_list2D:
            sample = self.data_io2D.sample_loader(index,
                                                  load_seg=True,
                                                  load_pred=True)
            self.assertIsNotNone(sample.pred_data)

    def test_MODEL_prediction3D(self):
        nn = Neural_Network(preprocessor=self.pp3D)
        nn.predict(self.sample_list3D)
        for index in self.sample_list3D:
            sample = self.data_io3D.sample_loader(index,
                                                  load_seg=True,
                                                  load_pred=True)
            self.assertIsNotNone(sample.pred_data)

    def test_MODEL_prediction_returnOutput(self):
        nn = Neural_Network(preprocessor=self.pp2D)
        pred_list = nn.predict(self.sample_list2D, return_output=True)
        for pred in pred_list:
            self.assertIsNotNone(pred)
            self.assertEqual(pred.shape, (16, 16))

    def test_MODEL_prediction_activationOutput(self):
        nn = Neural_Network(preprocessor=self.pp2D)
        pred_list = nn.predict(self.sample_list2D,
                               return_output=True,
                               activation_output=True)
        for pred in pred_list:
            self.assertIsNotNone(pred)
            self.assertEqual(pred.shape, (16, 16, 3))

    #-------------------------------------------------#
    #                    Validation                   #
    #-------------------------------------------------#
    def test_MODEL_validation2D(self):
        nn = Neural_Network(preprocessor=self.pp2D)
        history = nn.evaluate(self.sample_list2D[0:4],
                              self.sample_list2D[4:6],
                              epochs=3)
        self.assertIsNotNone(history)

    def test_MODEL_validation3D(self):
        nn = Neural_Network(preprocessor=self.pp3D)
        history = nn.evaluate(self.sample_list3D[0:4],
                              self.sample_list3D[4:6],
                              epochs=3)
        self.assertIsNotNone(history)
 def test_DATAIO_BATCHES_cleanup(self):
     data_io = Data_IO(self.io_interface,
                       input_path="",
                       output_path="",
                       batch_path=self.tmp_batches,
                       delete_batchDir=False)
     sample = data_io.sample_loader("TEST.sample_0",
                                    backup=False,
                                    load_seg=True,
                                    load_pred=False)
     data_io.backup_batches(sample.img_data, sample.seg_data, "abc")
     data_io.backup_batches(sample.img_data, sample.seg_data, "def")
     data_io.backup_batches(sample.img_data, None, pointer="ghi")
     self.assertEqual(len(os.listdir(self.tmp_batches)), 5)
     data_io.batch_cleanup(pointer="def")
     self.assertEqual(len(os.listdir(self.tmp_batches)), 3)
     data_io.batch_cleanup()
     self.assertEqual(len(os.listdir(self.tmp_batches)), 0)
class PreprocessorTEST(unittest.TestCase):
    # Create random imaging and segmentation data
    @classmethod
    def setUpClass(self):
        np.random.seed(1234)
        # Create 2D imgaging and segmentation data set
        self.dataset2D = dict()
        for i in range(0, 10):
            img = np.random.rand(16, 16) * 255
            self.img = img.astype(int)
            seg = np.random.rand(16, 16) * 2
            self.seg = seg.astype(int)
            self.dataset2D["TEST.sample_" + str(i)] = (self.img, self.seg)
        # Initialize Dictionary IO Interface
        io_interface2D = Dictionary_interface(self.dataset2D,
                                              classes=3,
                                              three_dim=False)
        # Initialize temporary directory
        self.tmp_dir2D = tempfile.TemporaryDirectory(prefix="tmp.CESP.")
        tmp_batches = os.path.join(self.tmp_dir2D.name, "batches")
        # Initialize Data IO
        self.data_io2D = Data_IO(io_interface2D,
                                 input_path="",
                                 output_path="",
                                 batch_path=tmp_batches,
                                 delete_batchDir=False)
        # Create 3D imgaging and segmentation data set
        self.dataset3D = dict()
        for i in range(0, 10):
            img = np.random.rand(16, 16, 16) * 255
            self.img = img.astype(int)
            seg = np.random.rand(16, 16, 16) * 3
            self.seg = seg.astype(int)
            if i in range(8, 10): sample = (self.img, None)
            else: sample = (self.img, self.seg)
            self.dataset3D["TEST.sample_" + str(i)] = sample
        # Initialize Dictionary IO Interface
        io_interface3D = Dictionary_interface(self.dataset3D,
                                              classes=3,
                                              three_dim=True)
        # Initialize temporary directory
        self.tmp_dir3D = tempfile.TemporaryDirectory(prefix="tmp.CESP.")
        tmp_batches = os.path.join(self.tmp_dir3D.name, "batches")
        # Initialize Data IO
        self.data_io3D = Data_IO(io_interface3D,
                                 input_path="",
                                 output_path="",
                                 batch_path=tmp_batches,
                                 delete_batchDir=False)

    # Delete all temporary files
    @classmethod
    def tearDownClass(self):
        self.tmp_dir2D.cleanup()
        self.tmp_dir3D.cleanup()

    #-------------------------------------------------#
    #                Base Functionality               #
    #-------------------------------------------------#
    # Class Creation
    def test_PREPROCESSOR_BASE_create(self):
        with self.assertRaises(Exception):
            Preprocessor()
        Preprocessor(self.data_io3D, batch_size=1, analysis="fullimage")
        Preprocessor(self.data_io3D,
                     batch_size=1,
                     analysis="patchwise-crop",
                     patch_shape=(16, 16, 16))
        Preprocessor(self.data_io3D,
                     batch_size=1,
                     analysis="patchwise-grid",
                     patch_shape=(16, 16, 16),
                     data_aug=None)

    # Simple Prepreossor run
    def test_PREPROCESSOR_BASE_run(self):
        sample_list = self.data_io3D.get_indiceslist()
        pp = Preprocessor(self.data_io3D,
                          data_aug=None,
                          batch_size=1,
                          analysis="fullimage")
        batches = pp.run(sample_list[8:10], training=False, validation=False)
        self.assertEqual(len(batches), 2)
        self.assertEqual(batches[0][0].shape, (1, 16, 16, 16, 1))
        self.assertIsNone(batches[0][1])
        batches = pp.run(sample_list[0:8], training=True, validation=False)
        self.assertEqual(batches[0][0].shape, (1, 16, 16, 16, 1))
        self.assertEqual(batches[0][1].shape, (1, 16, 16, 16, 3))
        batches = pp.run(sample_list[0:8], training=True, validation=True)
        self.assertEqual(batches[0][0].shape, (1, 16, 16, 16, 1))
        self.assertEqual(batches[0][1].shape, (1, 16, 16, 16, 3))

    # Prepreossor run with data augmentation
    def test_PREPROCESSOR_BASE_dataaugmentation(self):
        sample_list = self.data_io3D.get_indiceslist()
        pp = Preprocessor(self.data_io3D, batch_size=1, analysis="fullimage")
        batches = pp.run(sample_list[8:10], training=False, validation=False)
        self.assertEqual(len(batches), 2)
        self.assertEqual(batches[0][0].shape, (1, 16, 16, 16, 1))
        self.assertIsNone(batches[0][1])
        sample = self.data_io3D.sample_loader(sample_list[8], load_seg=False)
        self.assertFalse(np.array_equal(batches[0][0], sample.img_data))

    # Different batchsizes run
    def test_PREPROCESSOR_BASE_batchsizes(self):
        sample_list = self.data_io3D.get_indiceslist()
        pp = Preprocessor(self.data_io3D, batch_size=1, analysis="fullimage")
        batches = pp.run(sample_list[0:8], training=True, validation=False)
        self.assertEqual(len(batches), 8)
        self.assertEqual(batches[0][0].shape, (1, 16, 16, 16, 1))
        pp = Preprocessor(self.data_io3D, batch_size=2, analysis="fullimage")
        batches = pp.run(sample_list[0:8], training=True, validation=False)
        self.assertEqual(len(batches), 4)
        self.assertEqual(batches[0][0].shape, (2, 16, 16, 16, 1))
        pp = Preprocessor(self.data_io3D, batch_size=3, analysis="fullimage")
        batches = pp.run(sample_list[0:8], training=True, validation=False)
        self.assertEqual(len(batches), 3)
        self.assertEqual(batches[0][0].shape, (3, 16, 16, 16, 1))
        self.assertEqual(batches[-1][0].shape, (2, 16, 16, 16, 1))
        pp = Preprocessor(self.data_io3D, batch_size=8, analysis="fullimage")
        batches = pp.run(sample_list[0:8], training=True, validation=False)
        self.assertEqual(len(batches), 1)
        self.assertEqual(batches[0][0].shape, (8, 16, 16, 16, 1))
        pp = Preprocessor(self.data_io3D, batch_size=100, analysis="fullimage")
        batches = pp.run(sample_list[0:8], training=True, validation=False)
        self.assertEqual(len(batches), 1)
        self.assertEqual(batches[0][0].shape, (8, 16, 16, 16, 1))

    # Batch preparation check
    def test_PREPROCESSOR_BASE_prepareBatches(self):
        sample_list = self.data_io3D.get_indiceslist()
        pp = Preprocessor(self.data_io3D,
                          batch_size=1,
                          analysis="fullimage",
                          prepare_batches=True)
        batch_pointer = pp.run(sample_list[0:8],
                               training=True,
                               validation=False)
        self.assertEqual(batch_pointer, 7)
        tmp_batches = os.path.join(self.tmp_dir3D.name, "batches")
        batch_list = []
        for batch_file in os.listdir(tmp_batches):
            if batch_file.startswith(str(pp.data_io.seed)):
                batch_list.append(batch_file)
        self.assertEqual(len(batch_list), 16)

    #-------------------------------------------------#
    #                  Postprocessing                 #
    #-------------------------------------------------#
    def test_PREPROCESSOR_postprocessing_(self):
        sample_list = self.data_io3D.get_indiceslist()
        pp = Preprocessor(self.data_io3D,
                          batch_size=1,
                          analysis="fullimage",
                          data_aug=None)
        batches = pp.run(sample_list[0:3], training=True, validation=False)
        for i in range(0, 3):
            pred_postprec = pp.postprocessing(sample_list[i], batches[i][1])
            self.assertEqual(pred_postprec.shape, (16, 16, 16))
            sam = self.data_io3D.sample_loader(sample_list[i], load_seg=True)
            self.assertTrue(
                np.array_equal(pred_postprec,
                               np.reshape(sam.seg_data, (16, 16, 16))))

    #-------------------------------------------------#
    #            Analysis: Patchwise-crop             #
    #-------------------------------------------------#
    def test_PREPROCESSOR_patchwisecrop_2D(self):
        sample_list = self.data_io2D.get_indiceslist()
        pp = Preprocessor(self.data_io2D,
                          data_aug=None,
                          batch_size=1,
                          analysis="patchwise-crop",
                          patch_shape=(4, 4))
        batches = pp.run(sample_list[0:3], training=True, validation=False)
        self.assertEqual(len(batches), 3)
        batches = pp.run(sample_list[0:1], training=False, validation=False)
        self.assertEqual(len(batches), 16)
        sample = self.data_io2D.sample_loader(sample_list[0], load_seg=True)
        sample.seg_data = to_categorical(sample.seg_data,
                                         num_classes=sample.classes)
        ready_data = pp.analysis_patchwise_crop(sample, data_aug=False)
        self.assertEqual(len(ready_data), 1)
        self.assertEqual(ready_data[0][0].shape, (4, 4, 1))
        self.assertEqual(ready_data[0][1].shape, (4, 4, 3))

    def test_PREPROCESSOR_patchwisecrop_3D(self):
        sample_list = self.data_io3D.get_indiceslist()
        pp = Preprocessor(self.data_io3D,
                          data_aug=None,
                          batch_size=1,
                          analysis="patchwise-crop",
                          patch_shape=(4, 4, 4))
        batches = pp.run(sample_list[0:3], training=True, validation=False)
        self.assertEqual(len(batches), 3)
        batches = pp.run(sample_list[0:1], training=False, validation=False)
        self.assertEqual(len(batches), 64)
        sample = self.data_io3D.sample_loader(sample_list[0], load_seg=True)
        sample.seg_data = to_categorical(sample.seg_data,
                                         num_classes=sample.classes)
        ready_data = pp.analysis_patchwise_crop(sample, data_aug=False)
        self.assertEqual(len(ready_data), 1)
        self.assertEqual(ready_data[0][0].shape, (4, 4, 4, 1))
        self.assertEqual(ready_data[0][1].shape, (4, 4, 4, 3))

    def test_PREPROCESSOR_patchwisecrop_skipBlanks(self):
        sample_list = self.data_io3D.get_indiceslist()
        pp = Preprocessor(self.data_io3D,
                          data_aug=None,
                          batch_size=1,
                          analysis="patchwise-crop",
                          patch_shape=(4, 4, 4))
        pp.patchwise_skip_blanks = True
        batches = pp.run(sample_list[0:3], training=True, validation=False)
        sample = self.data_io3D.sample_loader(sample_list[0], load_seg=True)
        sample.seg_data = to_categorical(sample.seg_data,
                                         num_classes=sample.classes)
        ready_data = pp.analysis_patchwise_crop(sample, data_aug=False)
        self.assertEqual(len(ready_data), 1)
        self.assertEqual(ready_data[0][0].shape, (4, 4, 4, 1))
        self.assertEqual(ready_data[0][1].shape, (4, 4, 4, 3))

    #-------------------------------------------------#
    #            Analysis: Patchwise-grid             #
    #-------------------------------------------------#
    def test_PREPROCESSOR_patchwisegrid_2D(self):
        sample_list = self.data_io2D.get_indiceslist()
        pp = Preprocessor(self.data_io2D,
                          data_aug=None,
                          batch_size=1,
                          analysis="patchwise-grid",
                          patch_shape=(4, 4))
        batches = pp.run(sample_list[0:1], training=False, validation=False)
        self.assertEqual(len(batches), 16)
        sample = self.data_io2D.sample_loader(sample_list[0], load_seg=True)
        sample.seg_data = to_categorical(sample.seg_data,
                                         num_classes=sample.classes)
        pp = Preprocessor(self.data_io2D,
                          data_aug=None,
                          batch_size=1,
                          analysis="patchwise-grid",
                          patch_shape=(5, 5))
        ready_data = pp.analysis_patchwise_grid(sample,
                                                data_aug=False,
                                                training=True)
        self.assertEqual(len(ready_data), 16)
        self.assertEqual(ready_data[0][0].shape, (5, 5, 1))
        self.assertEqual(ready_data[0][1].shape, (5, 5, 3))

    def test_PREPROCESSOR_patchwisegrid_3D(self):
        sample_list = self.data_io3D.get_indiceslist()
        pp = Preprocessor(self.data_io3D,
                          data_aug=None,
                          batch_size=1,
                          analysis="patchwise-grid",
                          patch_shape=(4, 4, 4))
        batches = pp.run(sample_list[0:1], training=False, validation=False)
        self.assertEqual(len(batches), 64)
        sample = self.data_io3D.sample_loader(sample_list[0], load_seg=True)
        sample.seg_data = to_categorical(sample.seg_data,
                                         num_classes=sample.classes)
        pp = Preprocessor(self.data_io3D,
                          data_aug=None,
                          batch_size=1,
                          analysis="patchwise-grid",
                          patch_shape=(5, 5, 5))
        ready_data = pp.analysis_patchwise_grid(sample,
                                                data_aug=False,
                                                training=True)
        self.assertEqual(len(ready_data), 64)
        self.assertEqual(ready_data[0][0].shape, (5, 5, 5, 1))
        self.assertEqual(ready_data[0][1].shape, (5, 5, 5, 3))

    def test_PREPROCESSOR_patchwisegrid_skipBlanks(self):
        sample_list = self.data_io3D.get_indiceslist()
        pp = Preprocessor(self.data_io3D,
                          data_aug=None,
                          batch_size=1,
                          analysis="patchwise-grid",
                          patch_shape=(4, 4, 4))
        pp.patchwise_skip_blanks = True
        batches = pp.run(sample_list[0:3], training=True, validation=False)
        sample = self.data_io3D.sample_loader(sample_list[0], load_seg=True)
        sample.seg_data = to_categorical(sample.seg_data,
                                         num_classes=sample.classes)
        ready_data = pp.analysis_patchwise_grid(sample,
                                                data_aug=False,
                                                training=True)
        self.assertEqual(len(ready_data), 64)
        self.assertEqual(ready_data[0][0].shape, (4, 4, 4, 1))
        self.assertEqual(ready_data[0][1].shape, (4, 4, 4, 3))

    #-------------------------------------------------#
    #               Analysis: Fullimage               #
    #-------------------------------------------------#
    def test_PREPROCESSOR_fullimage_2D(self):
        sample_list = self.data_io2D.get_indiceslist()
        pp = Preprocessor(self.data_io2D,
                          data_aug=None,
                          batch_size=2,
                          analysis="fullimage")
        batches = pp.run(sample_list[0:3], training=True, validation=False)
        self.assertEqual(len(batches), 2)
        batches = pp.run(sample_list[0:1], training=False, validation=False)
        self.assertEqual(len(batches), 1)
        sample = self.data_io2D.sample_loader(sample_list[0], load_seg=True)
        sample.seg_data = to_categorical(sample.seg_data,
                                         num_classes=sample.classes)
        ready_data = pp.analysis_fullimage(sample,
                                           data_aug=False,
                                           training=True)
        self.assertEqual(len(ready_data), 1)
        self.assertEqual(ready_data[0][0].shape, (16, 16, 1))
        self.assertEqual(ready_data[0][1].shape, (16, 16, 3))

    def test_PREPROCESSOR_fullimage_3D(self):
        sample_list = self.data_io3D.get_indiceslist()
        pp = Preprocessor(self.data_io3D,
                          data_aug=None,
                          batch_size=2,
                          analysis="fullimage")
        batches = pp.run(sample_list[0:3], training=True, validation=False)
        self.assertEqual(len(batches), 2)
        batches = pp.run(sample_list[0:1], training=False, validation=False)
        self.assertEqual(len(batches), 1)
        sample = self.data_io3D.sample_loader(sample_list[0], load_seg=True)
        sample.seg_data = to_categorical(sample.seg_data,
                                         num_classes=sample.classes)
        ready_data = pp.analysis_fullimage(sample,
                                           data_aug=False,
                                           training=True)
        self.assertEqual(len(ready_data), 1)
        self.assertEqual(ready_data[0][0].shape, (16, 16, 16, 1))
        self.assertEqual(ready_data[0][1].shape, (16, 16, 16, 3))
class metricTEST(unittest.TestCase):
    # Create random imaging and segmentation data
    @classmethod
    def setUpClass(self):
        np.random.seed(1234)
        # Create 2D imgaging and segmentation data set
        self.dataset = dict()
        for i in range(0, 2):
            img = np.random.rand(16, 16) * 255
            self.img = img.astype(int)
            seg = np.random.rand(16, 16) * 2
            self.seg = seg.astype(int)
            self.dataset["TEST.sample_" + str(i)] = (self.img, self.seg)
        # Initialize Dictionary IO Interface
        io_interface = Dictionary_interface(self.dataset, classes=3,
                                              three_dim=False)
        # Initialize temporary directory
        self.tmp_dir = tempfile.TemporaryDirectory(prefix="tmp.CESP.")
        tmp_batches = os.path.join(self.tmp_dir.name, "batches")
        # Initialize Data IO
        self.data_io = Data_IO(io_interface,
                               input_path=os.path.join(self.tmp_dir.name),
                               output_path=os.path.join(self.tmp_dir.name),
                               batch_path=tmp_batches, delete_batchDir=False)
        # Initialize Preprocessor
        self.pp = Preprocessor(self.data_io, batch_size=1,
                               data_aug=None, analysis="fullimage")
        # Initialize Neural Network
        self.model = Neural_Network(self.pp)
        # Get sample list
        self.sample_list = self.data_io.get_indiceslist()

    # Delete all temporary files
    @classmethod
    def tearDownClass(self):
        self.tmp_dir.cleanup()

    #-------------------------------------------------#
    #               Standard DSC Metric               #
    #-------------------------------------------------#
    def test_METRICS_DSC_standard(self):
        self.model.loss = dice_coefficient
        self.model.metrics = [dice_coefficient]
        self.model.train(self.sample_list, epochs=1)

    #-------------------------------------------------#
    #                Standard DSC Loss               #
    #-------------------------------------------------#
    def test_METRICS_DSC_standardLOSS(self):
        self.model.loss = dice_coefficient_loss
        self.model.metrics = [dice_coefficient_loss]
        self.model.train(self.sample_list, epochs=1)

    #-------------------------------------------------#
    #                 Soft DSC Metric                 #
    #-------------------------------------------------#
    def test_METRICS_DSC_soft(self):
        self.model.loss = dice_soft
        self.model.metrics = [dice_soft]
        self.model.train(self.sample_list, epochs=1)

    #-------------------------------------------------#
    #                  Soft DSC Loss                  #
    #-------------------------------------------------#
    def test_METRICS_DSC_softLOSS(self):
        self.model.loss = dice_soft_loss
        self.model.metrics = [dice_soft_loss]
        self.model.train(self.sample_list, epochs=1)

    #-------------------------------------------------#
    #                   Weighted DSC                  #
    #-------------------------------------------------#
    def test_METRICS_DSC_weighted(self):
        self.model.loss = dice_weighted([1,1,4])
        self.model.metrics = [dice_weighted([1,1,4])]
        self.model.train(self.sample_list, epochs=1)

    #-------------------------------------------------#
    #             Dice & Crossentropy loss            #
    #-------------------------------------------------#
    def test_METRICS_DSC_CrossEntropy(self):
        self.model.loss = dice_crossentropy
        self.model.metrics = [dice_crossentropy]
        self.model.train(self.sample_list, epochs=1)

    #-------------------------------------------------#
    #                   Tversky loss                  #
    #-------------------------------------------------#
    def test_METRICS_Tversky(self):
        self.model.loss = tversky_loss
        self.model.metrics = [tversky_loss]
        self.model.train(self.sample_list, epochs=1)

    #-------------------------------------------------#
    #           Tversky & Crossentropy loss           #
    #-------------------------------------------------#
    def test_METRICS_Tversky_CrossEntropy(self):
        self.model.loss = tversky_crossentropy
        self.model.metrics = [tversky_crossentropy]
        self.model.train(self.sample_list, epochs=1)
示例#23
0
class architectureTEST(unittest.TestCase):
    # Create random imaging and segmentation data
    @classmethod
    def setUpClass(self):
        np.random.seed(1234)
        # Create 2D imgaging and segmentation data set
        self.dataset2D = dict()
        for i in range(0, 1):
            img = np.random.rand(32, 32) * 255
            self.img = img.astype(int)
            seg = np.random.rand(32, 32) * 2
            self.seg = seg.astype(int)
            self.dataset2D["TEST.sample_" + str(i)] = (self.img, self.seg)
        # Initialize Dictionary IO Interface
        io_interface2D = Dictionary_interface(self.dataset2D, classes=3,
                                              three_dim=False)
        # Initialize temporary directory
        self.tmp_dir2D = tempfile.TemporaryDirectory(prefix="tmp.CESP.")
        tmp_batches = os.path.join(self.tmp_dir2D.name, "batches")
        # Initialize Data IO
        self.data_io2D = Data_IO(io_interface2D,
                                 input_path=os.path.join(self.tmp_dir2D.name),
                                 output_path=os.path.join(self.tmp_dir2D.name),
                                 batch_path=tmp_batches, delete_batchDir=False)
        # Initialize Preprocessor
        self.pp2D = Preprocessor(self.data_io2D, batch_size=1,
                                 data_aug=None, analysis="fullimage")
        # Get sample list
        self.sample_list2D = self.data_io2D.get_indiceslist()
        # Create 3D imgaging and segmentation data set
        self.dataset3D = dict()
        for i in range(0, 1):
            img = np.random.rand(32, 32, 32) * 255
            self.img = img.astype(int)
            seg = np.random.rand(32, 32, 32) * 3
            self.seg = seg.astype(int)
            self.dataset3D["TEST.sample_" + str(i)] = (self.img, self.seg)
        # Initialize Dictionary IO Interface
        io_interface3D = Dictionary_interface(self.dataset3D, classes=3,
                                              three_dim=True)
        # Initialize temporary directory
        self.tmp_dir3D = tempfile.TemporaryDirectory(prefix="tmp.CESP.")
        tmp_batches = os.path.join(self.tmp_dir3D.name, "batches")
        # Initialize Data IO
        self.data_io3D = Data_IO(io_interface3D,
                                 input_path=os.path.join(self.tmp_dir3D.name),
                                 output_path=os.path.join(self.tmp_dir3D.name),
                                 batch_path=tmp_batches, delete_batchDir=False)
        # Initialize Preprocessor
        self.pp3D = Preprocessor(self.data_io3D, batch_size=1,
                                 data_aug=None, analysis="fullimage")
        # Get sample list
        self.sample_list3D = self.data_io3D.get_indiceslist()

    # Delete all temporary files
    @classmethod
    def tearDownClass(self):
        self.tmp_dir2D.cleanup()
        self.tmp_dir3D.cleanup()

    #-------------------------------------------------#
    #                  U-Net Standard                 #
    #-------------------------------------------------#
    def test_ARCHITECTURES_UNET_standard(self):
        model2D = Neural_Network(self.pp2D, architecture=UNet_standard())
        model2D.predict(self.sample_list2D)
        model3D = Neural_Network(self.pp3D, architecture=UNet_standard())
        model3D.predict(self.sample_list3D)

    #-------------------------------------------------#
    #                   U-Net Plain                   #
    #-------------------------------------------------#
    def test_ARCHITECTURES_UNET_plain(self):
        model2D = Neural_Network(self.pp2D, architecture=UNet_plain())
        model2D.predict(self.sample_list2D)
        model3D = Neural_Network(self.pp3D, architecture=UNet_plain())
        model3D.predict(self.sample_list3D)

    #-------------------------------------------------#
    #                  U-Net Residual                 #
    #-------------------------------------------------#
    def test_ARCHITECTURES_UNET_residual(self):
        model2D = Neural_Network(self.pp2D, architecture=UNet_residual())
        model2D.predict(self.sample_list2D)
        model3D = Neural_Network(self.pp3D, architecture=UNet_residual())
        model3D.predict(self.sample_list3D)

    #-------------------------------------------------#
    #                  U-Net MultiRes                 #
    #-------------------------------------------------#
    def test_ARCHITECTURES_UNET_multires(self):
        model2D = Neural_Network(self.pp2D, architecture=UNet_multiRes())
        model2D.predict(self.sample_list2D)
        model3D = Neural_Network(self.pp3D, architecture=UNet_multiRes())
        model3D.predict(self.sample_list3D)

    #-------------------------------------------------#
    #                   U-Net Dense                   #
    #-------------------------------------------------#
    def test_ARCHITECTURES_UNET_dense(self):
        model2D = Neural_Network(self.pp2D, architecture=UNet_dense())
        model2D.predict(self.sample_list2D)
        model3D = Neural_Network(self.pp3D, architecture=UNet_dense())
        model3D.predict(self.sample_list3D)

    #-------------------------------------------------#
    #                  U-Net Compact                  #
    #-------------------------------------------------#
    def test_ARCHITECTURES_UNET_compact(self):
        model2D = Neural_Network(self.pp2D, architecture=UNet_compact())
        model2D.predict(self.sample_list2D)
        model3D = Neural_Network(self.pp3D, architecture=UNet_compact())
        model3D.predict(self.sample_list3D)
class evaluationTEST(unittest.TestCase):
    # Create random imaging and segmentation data
    @classmethod
    def setUpClass(self):
        np.random.seed(1234)
        # Create 2D imgaging and segmentation data set
        self.dataset = dict()
        for i in range(0, 6):
            img = np.random.rand(16, 16) * 255
            self.img = img.astype(int)
            seg = np.random.rand(16, 16) * 2
            self.seg = seg.astype(int)
            self.dataset["TEST.sample_" + str(i)] = (self.img, self.seg)
        # Initialize Dictionary IO Interface
        io_interface = Dictionary_interface(self.dataset,
                                            classes=3,
                                            three_dim=False)
        # Initialize temporary directory
        self.tmp_dir = tempfile.TemporaryDirectory(prefix="tmp.CESP.")
        tmp_batches = os.path.join(self.tmp_dir.name, "batches")
        # Initialize Data IO
        self.data_io = Data_IO(io_interface,
                               input_path=os.path.join(self.tmp_dir.name),
                               output_path=os.path.join(self.tmp_dir.name),
                               batch_path=tmp_batches,
                               delete_batchDir=False)
        # Initialize Preprocessor
        self.pp = Preprocessor(self.data_io,
                               batch_size=2,
                               data_aug=None,
                               analysis="fullimage")
        # Initialize Neural Network
        self.model = Neural_Network(self.pp)
        # Get sample list
        self.sample_list = self.data_io.get_indiceslist()

    # Delete all temporary files
    @classmethod
    def tearDownClass(self):
        self.tmp_dir.cleanup()

    #-------------------------------------------------#
    #                 Cross-Validation                #
    #-------------------------------------------------#
    def test_EVALUATION_crossValidation(self):
        eval_path = os.path.join(self.tmp_dir.name, "evaluation")
        cross_validation(self.sample_list,
                         self.model,
                         k_fold=3,
                         epochs=3,
                         iterations=None,
                         evaluation_path=eval_path,
                         run_detailed_evaluation=False,
                         draw_figures=False,
                         callbacks=[],
                         save_models=False,
                         return_output=False)
        self.assertTrue(os.path.exists(eval_path))
        self.assertTrue(os.path.exists(os.path.join(eval_path, "fold_0")))
        self.assertTrue(os.path.exists(os.path.join(eval_path, "fold_1")))
        self.assertTrue(os.path.exists(os.path.join(eval_path, "fold_2")))

    def test_EVALUATION_crossValidation_splitRun(self):
        eval_path = os.path.join(self.tmp_dir.name, "evaluation")
        split_folds(self.sample_list, k_fold=3, evaluation_path=eval_path)
        self.assertTrue(os.path.exists(eval_path))
        self.assertTrue(os.path.exists(os.path.join(eval_path, "fold_0")))
        self.assertTrue(os.path.exists(os.path.join(eval_path, "fold_1")))
        self.assertTrue(os.path.exists(os.path.join(eval_path, "fold_2")))
        for fold in range(0, 3):
            run_fold(fold,
                     self.model,
                     epochs=1,
                     iterations=None,
                     evaluation_path=eval_path,
                     draw_figures=False,
                     callbacks=[],
                     save_models=True)
            fold_dir = os.path.join(eval_path, "fold_0")
            self.assertTrue(
                os.path.exists(os.path.join(fold_dir, "history.tsv")))
            self.assertTrue(
                os.path.exists(os.path.join(fold_dir, "sample_list.csv")))
            self.assertTrue(
                os.path.exists(os.path.join(fold_dir, "model.hdf5")))

    #-------------------------------------------------#
    #                 Split Validation                #
    #-------------------------------------------------#
    def test_EVALUATION_splitValidation(self):
        eval_path = os.path.join(self.tmp_dir.name, "evaluation")
        split_validation(self.sample_list,
                         self.model,
                         percentage=0.3,
                         epochs=3,
                         iterations=None,
                         evaluation_path=eval_path,
                         run_detailed_evaluation=False,
                         draw_figures=False,
                         callbacks=[],
                         return_output=False)
        self.assertTrue(os.path.exists(eval_path))

    #-------------------------------------------------#
    #                  Leave One Out                  #
    #-------------------------------------------------#
    def test_EVALUATION_leaveOneOut(self):
        # Create 3D imgaging and segmentation data set
        self.dataset3D = dict()
        for i in range(0, 6):
            img = np.random.rand(16, 16, 16) * 255
            self.img = img.astype(int)
            seg = np.random.rand(16, 16, 16) * 3
            self.seg = seg.astype(int)
            self.dataset3D["TEST.sample_" + str(i)] = (self.img, self.seg)
        # Initialize Dictionary IO Interface
        io_interface3D = Dictionary_interface(self.dataset3D,
                                              classes=3,
                                              three_dim=True)
        # Initialize temporary directory
        self.tmp_dir3D = tempfile.TemporaryDirectory(prefix="tmp.CESP.")
        tmp_batches = os.path.join(self.tmp_dir3D.name, "batches")
        # Initialize Data IO
        self.data_io3D = Data_IO(io_interface3D,
                                 input_path=os.path.join(self.tmp_dir3D.name),
                                 output_path=os.path.join(self.tmp_dir3D.name),
                                 batch_path=tmp_batches,
                                 delete_batchDir=False)
        # Initialize Preprocessor
        self.pp3D = Preprocessor(self.data_io3D,
                                 batch_size=2,
                                 data_aug=None,
                                 analysis="fullimage")
        # Initialize Neural Network
        model = Neural_Network(self.pp3D)
        # Get sample list
        self.sample_list3D = self.data_io3D.get_indiceslist()

        eval_path = os.path.join(self.tmp_dir3D.name, "evaluation")
        leave_one_out(self.sample_list3D,
                      model,
                      epochs=3,
                      iterations=None,
                      evaluation_path=eval_path,
                      callbacks=[])
        self.assertTrue(os.path.exists(eval_path))
        # Cleanup stuff
        self.tmp_dir3D.cleanup()