Esempio n. 1
0
 def test_SUBFUNCTIONS_prepare_MULTIPROCESSING(self):
     ds = dict()
     for i in range(0, 5):
         img = np.random.rand(16, 16, 16) * 255
         img = img.astype(int)
         seg = np.random.rand(16, 16, 16) * 3
         seg = seg.astype(int)
         sample = (img, seg)
         ds["TEST.sample_" + str(i)] = sample
     io_interface = Dictionary_interface(ds, classes=3, three_dim=True)
     self.tmp_dir = tempfile.TemporaryDirectory(prefix="tmp.miscnn.")
     tmp_batches = os.path.join(self.tmp_dir.name, "batches")
     dataio = Data_IO(io_interface, input_path="", output_path="",
                      batch_path=tmp_batches, delete_batchDir=False)
     sf = [Resize((8,8,8)), Normalization(), Clipping(min=-1.0, max=0.0)]
     pp = Preprocessor(dataio, batch_size=1, prepare_subfunctions=True,
                       analysis="fullimage", subfunctions=sf,
                       use_multiprocessing=True)
     pp.mp_threads = 4
     sample_list = dataio.get_indiceslist()
     pp.run_subfunctions(sample_list, training=True)
     batches = pp.run(sample_list, training=True, validation=False)
     self.assertEqual(len(os.listdir(tmp_batches)), 5)
     for i in range(0, 5):
         file_prepared_subfunctions = os.path.join(tmp_batches,
                 str(pp.data_io.seed) + ".TEST.sample_" + str(i) + ".pickle")
         self.assertTrue(os.path.exists(file_prepared_subfunctions))
         img = batches[i][0]
         seg = batches[i][1]
         self.assertIsNotNone(img)
         self.assertIsNotNone(seg)
         self.assertEqual(img.shape, (1,8,8,8,1))
         self.assertEqual(seg.shape, (1,8,8,8,3))
Esempio n. 2
0
 def setUpClass(self):
     np.random.seed(1234)
     # Create imgaging and segmentation data set
     self.dataset = dict()
     for i in range(0, 10):
         img = np.random.rand(16, 16, 16) * 255
         self.img = img.astype(int)
         seg = np.random.rand(16, 16, 16) * 3
         self.seg = seg.astype(int)
         sample = (self.img, self.seg)
         self.dataset["TEST.sample_" + str(i)] = sample
     # Initialize Dictionary IO Interface
     io_interface = Dictionary_interface(self.dataset,
                                         classes=3,
                                         three_dim=True)
     # Initialize temporary directory
     self.tmp_dir = tempfile.TemporaryDirectory(prefix="tmp.miscnn.")
     tmp_batches = os.path.join(self.tmp_dir.name, "batches")
     # Initialize Data IO
     self.data_io = Data_IO(io_interface,
                            input_path="",
                            output_path="",
                            batch_path=tmp_batches,
                            delete_batchDir=False)
     # Initialize Data Augmentation
     self.data_aug = Data_Augmentation()
     # Get sample list
     self.sample_list = self.data_io.get_indiceslist()
Esempio n. 3
0
 def test_SUBFUNCTIONS_preprocessing(self):
     ds = dict()
     for i in range(0, 10):
         img = np.random.rand(16, 16, 16) * 255
         img = img.astype(int)
         seg = np.random.rand(16, 16, 16) * 3
         seg = seg.astype(int)
         sample = (img, seg)
         ds["TEST.sample_" + str(i)] = sample
     io_interface = Dictionary_interface(ds, classes=3, three_dim=True)
     self.tmp_dir = tempfile.TemporaryDirectory(prefix="tmp.miscnn.")
     tmp_batches = os.path.join(self.tmp_dir.name, "batches")
     dataio = Data_IO(io_interface, input_path="", output_path="",
                      batch_path=tmp_batches, delete_batchDir=False)
     sf = [Resize((8,8,8)), Normalization(), Clipping(min=-1.0, max=0.0)]
     pp = Preprocessor(dataio, data_aug=None, batch_size=1,
                       prepare_subfunctions=False, analysis="fullimage",
                       subfunctions=sf)
     sample_list = dataio.get_indiceslist()
     batches = pp.run(sample_list, training=True, validation=False)
     for i in range(0, 10):
         img = batches[i][0]
         seg = batches[i][1]
         self.assertEqual(img.shape, (1,8,8,8,1))
         self.assertEqual(seg.shape, (1,8,8,8,3))
         self.assertTrue(np.min(img) >= -1.0 and np.max(img) <= 0.0)
     self.tmp_dir.cleanup()
Esempio n. 4
0
 def setUpClass(self):
     np.random.seed(1234)
     # Create 2D imgaging and segmentation data set
     self.dataset = dict()
     for i in range(0, 6):
         img = np.random.rand(16, 16) * 255
         self.img = img.astype(int)
         seg = np.random.rand(16, 16) * 2
         self.seg = seg.astype(int)
         self.dataset["TEST.sample_" + str(i)] = (self.img, self.seg)
     # Initialize Dictionary IO Interface
     io_interface = Dictionary_interface(self.dataset,
                                         classes=3,
                                         three_dim=False)
     # Initialize temporary directory
     self.tmp_dir = tempfile.TemporaryDirectory(prefix="tmp.miscnn.")
     tmp_batches = os.path.join(self.tmp_dir.name, "batches")
     # Initialize Data IO
     self.data_io = Data_IO(io_interface,
                            input_path=os.path.join(self.tmp_dir.name),
                            output_path=os.path.join(self.tmp_dir.name),
                            batch_path=tmp_batches,
                            delete_batchDir=False)
     # Initialize Preprocessor
     self.pp = Preprocessor(self.data_io,
                            batch_size=2,
                            data_aug=None,
                            analysis="fullimage")
     # Initialize Neural Network
     self.model = Neural_Network(self.pp)
     # Get sample list
     self.sample_list = self.data_io.get_indiceslist()
Esempio n. 5
0
    def test_EVALUATION_leaveOneOut(self):
        # Create 3D imgaging and segmentation data set
        self.dataset3D = dict()
        for i in range(0, 6):
            img = np.random.rand(16, 16, 16) * 255
            self.img = img.astype(int)
            seg = np.random.rand(16, 16, 16) * 3
            self.seg = seg.astype(int)
            self.dataset3D["TEST.sample_" + str(i)] = (self.img, self.seg)
        # Initialize Dictionary IO Interface
        io_interface3D = Dictionary_interface(self.dataset3D, classes=3,
                                              three_dim=True)
        # Initialize temporary directory
        self.tmp_dir3D = tempfile.TemporaryDirectory(prefix="tmp.miscnn.")
        tmp_batches = os.path.join(self.tmp_dir3D.name, "batches")
        # Initialize Data IO
        self.data_io3D = Data_IO(io_interface3D,
                                 input_path=os.path.join(self.tmp_dir3D.name),
                                 output_path=os.path.join(self.tmp_dir3D.name),
                                 batch_path=tmp_batches, delete_batchDir=False)
        # Initialize Preprocessor
        self.pp3D = Preprocessor(self.data_io3D, batch_size=2,
                                 data_aug=None, analysis="fullimage")
        # Initialize Neural Network
        model = Neural_Network(self.pp3D)
        # Get sample list
        self.sample_list3D = self.data_io3D.get_indiceslist()

        eval_path = os.path.join(self.tmp_dir3D.name, "evaluation")
        leave_one_out(self.sample_list3D, model, epochs=3, iterations=None,
                      evaluation_path=eval_path, callbacks=[])
        self.assertTrue(os.path.exists(eval_path))
        # Cleanup stuff
        self.tmp_dir3D.cleanup()
Esempio n. 6
0
 def test_DATAIO_SampleLoader_Imaging(self):
     data_io = Data_IO(self.io_interface, input_path="", output_path="",
                      batch_path=self.tmp_batches, delete_batchDir=False)
     sample = data_io.sample_loader("TEST.sample_0", backup=False,
                                    load_seg=False, load_pred=False)
     self.assertTrue(np.array_equal(np.reshape(sample.img_data, (16,16,16)),
                                    self.dataset["TEST.sample_0"][0]))
     self.assertEqual(sample.img_data.shape, (16, 16, 16, 1))
Esempio n. 7
0
 def test_DATAIO_BASE_getIndexList(self):
     data_io = Data_IO(self.io_interface,
                       input_path="",
                       output_path="",
                       batch_path=self.tmp_batches,
                       delete_batchDir=False)
     sample_list = data_io.get_indiceslist()
     self.assertEqual(len(sample_list), 10)
     self.assertIn("TEST.sample_0", sample_list)
Esempio n. 8
0
 def test_DATAIO_BASE_getSampleList(self):
     data_io = Data_IO(self.io_interface,
                       input_path="",
                       output_path="",
                       batch_path=self.tmp_batches,
                       delete_batchDir=False)
     sample_list = data_io.get_samples()
     self.assertEqual(len(sample_list), 10)
     for i, sample in enumerate(sample_list):
         self.assertEqual("TEST.sample_" + str(i), sample.index)
Esempio n. 9
0
 def test_DATAIO_SampleLoader_Combined(self):
     data_io = Data_IO(self.io_interface, input_path="", output_path="",
                      batch_path=self.tmp_batches, delete_batchDir=False)
     sample = data_io.sample_loader("TEST.sample_3", backup=False,
                                    load_seg=True, load_pred=True)
     self.assertIsNotNone(sample.img_data)
     self.assertIsNotNone(sample.seg_data)
     self.assertIsNotNone(sample.pred_data)
     self.assertEqual(sample.img_data.shape, sample.seg_data.shape)
     self.assertEqual(sample.seg_data.shape, sample.pred_data.shape)
Esempio n. 10
0
 def test_DATAIO_SampleLoader_Prediction(self):
     data_io = Data_IO(self.io_interface, input_path="", output_path="",
                      batch_path=self.tmp_batches, delete_batchDir=False)
     sample = data_io.sample_loader("TEST.sample_5", backup=False,
                                    load_seg=False, load_pred=True)
     self.assertTrue(np.array_equal(np.reshape(sample.pred_data, (16,16,16)),
                                    self.dataset["TEST.sample_5"][2]))
     self.assertEqual(sample.pred_data.shape, (16, 16, 16, 1))
     self.assertIsNotNone(sample.img_data)
     self.assertIsNone(sample.seg_data)
     with self.assertRaises(Exception):
         sample = data_io.sample_loader("TEST.sample_2", backup=False,
                                        load_seg=False, load_pred=True)
Esempio n. 11
0
 def setUpClass(self):
     np.random.seed(1234)
     # Create 2D imgaging and segmentation data set
     self.dataset2D = dict()
     for i in range(0, 1):
         img = np.random.rand(32, 32) * 255
         self.img = img.astype(int)
         seg = np.random.rand(32, 32) * 2
         self.seg = seg.astype(int)
         self.dataset2D["TEST.sample_" + str(i)] = (self.img, self.seg)
     # Initialize Dictionary IO Interface
     io_interface2D = Dictionary_interface(self.dataset2D, classes=3,
                                           three_dim=False)
     # Initialize temporary directory
     self.tmp_dir2D = tempfile.TemporaryDirectory(prefix="tmp.miscnn.")
     tmp_batches = os.path.join(self.tmp_dir2D.name, "batches")
     # Initialize Data IO
     self.data_io2D = Data_IO(io_interface2D,
                              input_path=os.path.join(self.tmp_dir2D.name),
                              output_path=os.path.join(self.tmp_dir2D.name),
                              batch_path=tmp_batches, delete_batchDir=False)
     # Initialize Preprocessor
     self.pp2D = Preprocessor(self.data_io2D, batch_size=1,
                              data_aug=None, analysis="fullimage")
     # Get sample list
     self.sample_list2D = self.data_io2D.get_indiceslist()
     # Create 3D imgaging and segmentation data set
     self.dataset3D = dict()
     for i in range(0, 1):
         img = np.random.rand(32, 32, 32) * 255
         self.img = img.astype(int)
         seg = np.random.rand(32, 32, 32) * 3
         self.seg = seg.astype(int)
         self.dataset3D["TEST.sample_" + str(i)] = (self.img, self.seg)
     # Initialize Dictionary IO Interface
     io_interface3D = Dictionary_interface(self.dataset3D, classes=3,
                                           three_dim=True)
     # Initialize temporary directory
     self.tmp_dir3D = tempfile.TemporaryDirectory(prefix="tmp.miscnn.")
     tmp_batches = os.path.join(self.tmp_dir3D.name, "batches")
     # Initialize Data IO
     self.data_io3D = Data_IO(io_interface3D,
                              input_path=os.path.join(self.tmp_dir3D.name),
                              output_path=os.path.join(self.tmp_dir3D.name),
                              batch_path=tmp_batches, delete_batchDir=False)
     # Initialize Preprocessor
     self.pp3D = Preprocessor(self.data_io3D, batch_size=1,
                              data_aug=None, analysis="fullimage")
     # Get sample list
     self.sample_list3D = self.data_io3D.get_indiceslist()
Esempio n. 12
0
 def test_DATAIO_BATCHES_loading(self):
     data_io = Data_IO(self.io_interface, input_path="", output_path="",
                      batch_path=self.tmp_batches, delete_batchDir=False)
     sample = data_io.sample_loader("TEST.sample_0", backup=False,
                                    load_seg=True, load_pred=False)
     data_io.backup_batches(sample.img_data, sample.seg_data, "abc")
     img = data_io.batch_load(pointer="abc", img=True)
     self.assertTrue(np.array_equal(sample.img_data, img))
     seg = data_io.batch_load(pointer="abc", img=False)
     self.assertTrue(np.array_equal(sample.seg_data, seg))
     data_io.batch_cleanup()
Esempio n. 13
0
def setup_execution(args):
    data_dir = str(args.data_dir)
    interface = None
    if (args.imagetype in miscnn_data_interfaces.keys()):
        interface = miscnn_data_interfaces[args.imagetype]
    else:
        files = [
            f[f.find("."):] for dp, dn, filenames in os.walk(data_dir)
            for f in filenames if os.path.isfile(os.path.join(dp, f)) and (
                "imaging" in f or "segmentation" in f)
        ]
        unique = list(np.unique(np.asarray(files)))
        unique = [get_data_interface_from_file_term(u) for u in unique]
        if len(unique) != 1:
            raise RuntimeError("Failed to infer image type")
        if (None in unique):
            raise RuntimeError("Failed to infer image type")
        interface = unique[0]()

    dataio = Data_IO(interface, args.data_dir)

    indices = dataio.get_indiceslist()
    cnt = len(indices)
    print("interface found " + str(cnt) + " indices in the data directory.")

    images = [
        index for index in indices
        if os.path.exists(data_dir + "/" + index + "/imaging.nii.gz")
        or os.path.exists(data_dir + "/" + index + "/imaging.dcm")
        or os.path.exists(data_dir + "/" + index + "/imaging.png")
    ]
    segmentations = [
        index for index in indices
        if os.path.exists(data_dir + "/" + index + "/segmentation.nii.gz")
        or os.path.exists(data_dir + "/" + index + "/segmentation.dcm")
        or os.path.exists(data_dir + "/" + index + "/segmentation.png")
    ]

    return {
        "dataio": dataio,
        "indices": indices,
        "cnt": cnt,
        "images": images,
        "segmentations": segmentations,
        "data_dir": data_dir
    }
Esempio n. 14
0
 def setUpClass(self):
     np.random.seed(1234)
     # Create 2D imgaging and segmentation data set
     self.dataset2D = dict()
     for i in range(0, 10):
         img = np.random.rand(16, 16) * 255
         img = img.astype(int)
         seg = np.random.rand(16, 16) * 2
         seg = seg.astype(int)
         self.dataset2D["TEST.sample_" + str(i)] = (img, seg)
     # Initialize Dictionary IO Interface
     io_interface2D = Dictionary_interface(self.dataset2D,
                                           classes=3,
                                           three_dim=False)
     # Initialize temporary directory
     self.tmp_dir2D = tempfile.TemporaryDirectory(prefix="tmp.miscnn.")
     tmp_batches = os.path.join(self.tmp_dir2D.name, "batches")
     # Initialize Data IO
     self.data_io2D = Data_IO(io_interface2D,
                              input_path="",
                              output_path="",
                              batch_path=tmp_batches,
                              delete_batchDir=False)
     # Create 3D imgaging and segmentation data set
     self.dataset3D = dict()
     for i in range(0, 10):
         img = np.random.rand(16, 16, 16) * 255
         img = img.astype(int)
         seg = np.random.rand(16, 16, 16) * 3
         seg = seg.astype(int)
         if i in range(8, 10): sample = (img, None)
         else: sample = (img, seg)
         self.dataset3D["TEST.sample_" + str(i)] = sample
     # Initialize Dictionary IO Interface
     io_interface3D = Dictionary_interface(self.dataset3D,
                                           classes=3,
                                           three_dim=True)
     # Initialize temporary directory
     self.tmp_dir3D = tempfile.TemporaryDirectory(prefix="tmp.miscnn.")
     tmp_batches = os.path.join(self.tmp_dir3D.name, "batches")
     # Initialize Data IO
     self.data_io3D = Data_IO(io_interface3D,
                              input_path="",
                              output_path="",
                              batch_path=tmp_batches,
                              delete_batchDir=False)
Esempio n. 15
0
 def test_DATAIO_BATCHES_sampleLoading(self):
     data_io = Data_IO(self.io_interface, input_path="", output_path="",
                      batch_path=self.tmp_batches, delete_batchDir=False)
     sample = data_io.sample_loader("TEST.sample_0", backup=False,
                                    load_seg=True, load_pred=False)
     data_io.backup_sample(sample)
     sample_new = data_io.load_sample_pickle(sample.index)
     data_io.batch_cleanup()
     self.assertTrue(np.array_equal(sample_new.img_data, sample.img_data))
     self.assertTrue(np.array_equal(sample_new.seg_data, sample.seg_data))
Esempio n. 16
0
 def test_SUBFUNCTIONS_postprocessing(self):
     ds = dict()
     for i in range(0, 10):
         img = np.random.rand(16, 16, 16) * 255
         img = img.astype(int)
         seg = np.random.rand(16, 16, 16) * 3
         seg = seg.astype(int)
         sample = (img, seg)
         ds["TEST.sample_" + str(i)] = sample
     io_interface = Dictionary_interface(ds, classes=3, three_dim=True)
     self.tmp_dir = tempfile.TemporaryDirectory(prefix="tmp.miscnn.")
     tmp_batches = os.path.join(self.tmp_dir.name, "batches")
     dataio = Data_IO(io_interface,
                      input_path="",
                      output_path="",
                      batch_path=tmp_batches,
                      delete_batchDir=False)
     sf = [Resize((9, 9, 9)), Normalization(), Clipping(min=-1.0, max=0.0)]
     pp = Preprocessor(dataio,
                       batch_size=1,
                       prepare_subfunctions=False,
                       analysis="patchwise-grid",
                       subfunctions=sf,
                       patch_shape=(4, 4, 4))
     sample_list = dataio.get_indiceslist()
     for index in sample_list:
         sample = dataio.sample_loader(index)
         for sf in pp.subfunctions:
             sf.preprocessing(sample, training=False)
         pp.cache["shape_" + str(index)] = sample.img_data.shape
         sample.seg_data = np.random.rand(9, 9, 9) * 3
         sample.seg_data = sample.seg_data.astype(int)
         sample.seg_data = to_categorical(sample.seg_data, num_classes=3)
         data_patches = pp.analysis_patchwise_grid(sample,
                                                   training=True,
                                                   data_aug=False)
         seg_list = []
         for i in range(0, len(data_patches)):
             seg_list.append(data_patches[i][1])
         seg = np.stack(seg_list, axis=0)
         self.assertEqual(seg.shape, (27, 4, 4, 4, 3))
         pred = pp.postprocessing(sample, seg)
         self.assertEqual(pred.shape, (16, 16, 16))
     self.tmp_dir.cleanup()
Esempio n. 17
0
 def test_DATAIO_BATCHES_sampleStorage(self):
     data_io = Data_IO(self.io_interface, input_path="", output_path="",
                      batch_path=self.tmp_batches, delete_batchDir=False)
     sample = data_io.sample_loader("TEST.sample_0", backup=False,
                                    load_seg=True, load_pred=False)
     data_io.backup_sample(sample)
     self.assertEqual(len(os.listdir(self.tmp_batches)), 1)
     data_io.batch_cleanup()
Esempio n. 18
0
 def test_SUBFUNCTIONS_fullrun(self):
     ds = dict()
     for i in range(0, 10):
         img = np.random.rand(16, 16, 16) * 255
         img = img.astype(int)
         seg = np.random.rand(16, 16, 16) * 3
         seg = seg.astype(int)
         sample = (img, seg)
         ds["TEST.sample_" + str(i)] = sample
     io_interface = Dictionary_interface(ds, classes=3, three_dim=True)
     self.tmp_dir = tempfile.TemporaryDirectory(prefix="tmp.miscnn.")
     tmp_batches = os.path.join(self.tmp_dir.name, "batches")
     dataio = Data_IO(io_interface, input_path="", output_path="",
                      batch_path=tmp_batches, delete_batchDir=False)
     sf = [Resize((16,16,16)), Normalization(), Clipping(min=-1.0, max=0.0)]
     pp = Preprocessor(dataio, batch_size=1, prepare_subfunctions=True,
                       analysis="fullimage", subfunctions=sf)
     nn = Neural_Network(preprocessor=pp)
     sample_list = dataio.get_indiceslist()
     nn.predict(sample_list, return_output=True)
Esempio n. 19
0
 def test_DATAIO_BASE_savePrediction(self):
     data_io = Data_IO(self.io_interface, input_path="",
                      output_path=os.path.join(self.tmp_dir.name, "pred"),
                      batch_path=self.tmp_batches, delete_batchDir=False)
     sample = data_io.sample_loader("TEST.sample_0", backup=False,
                                    load_seg=True, load_pred=False)
     self.assertIsNone(sample.pred_data)
     data_io.save_prediction(sample.seg_data, sample.index)
     self.assertTrue(os.path.exists(os.path.join(self.tmp_dir.name, "pred")))
     sample = data_io.sample_loader("TEST.sample_0", backup=False,
                                    load_seg=True, load_pred=True)
     self.assertTrue(np.array_equal(sample.seg_data, sample.pred_data))
Esempio n. 20
0
    def setUpClass(self):
        # Create imgaging and segmentation data set
        np.random.seed(1234)
        self.dataset = dict()
        for i in range(0, 10):
            img = np.random.rand(16, 16, 16) * 256
            self.img = img.astype(int)
            seg = np.random.rand(16, 16, 16) * 3
            self.seg = seg.astype(int)
            sample = (self.img, self.seg)
            self.dataset["TEST.sample_" + str(i)] = sample
        # Initialize Dictionary IO Interface
        # Initialize temporary directory
        self.tmp_dir = tempfile.TemporaryDirectory(prefix="tmp.miscnn.")
        self.tmp_data = os.path.join(self.tmp_dir.name, "data")
        os.mkdir(self.tmp_data)

        for key, value in self.dataset.items():
            write_sample(value, self.tmp_data, key)

        self.dataio = Data_IO(NIFTI_interface(), self.tmp_data)
Esempio n. 21
0
##
## Based on the KITS 19 data set (Kidney Tumor Segmentation Challenge 2019)
## Data Set: https://github.com/neheller/kits19

# Import all libraries we need
from miscnn import Data_IO, Preprocessor, Neural_Network
from miscnn.data_loading.interfaces import NIFTIslicer_interface
from miscnn.processing.subfunctions import Resize
import numpy as np

# Initialize the NIfTI interface IO slicer variant
interface = NIFTIslicer_interface(pattern="case_0000[0-3]", channels=1, classes=3)

# Initialize the Data IO class
data_path = "/home/mudomini/projects/KITS_challenge2019/kits19/data.interpolated/"
data_io = Data_IO(interface, data_path, delete_batchDir=False)

# Obtain the list of samples from our data set
## A sample is defined as a single slice (2D image)
samples_list = data_io.get_indiceslist()
samples_list.sort()

# Let's test out, if the the NIfTI slicer interface works like we want
# and output the image and segmentation shape of a random slice
sample = data_io.sample_loader("case_00002:#:42", load_seg=True)
print(sample.img_data.shape, sample.seg_data.shape)

## As you hopefully noted, the index of a slice is defined
## as the volume file name and the slice number separated with a ":#:"

# Specify subfunctions for preprocessing
Esempio n. 22
0
class evaluationTEST(unittest.TestCase):
    # Create random imaging and segmentation data
    @classmethod
    def setUpClass(self):
        np.random.seed(1234)
        # Create 2D imgaging and segmentation data set
        self.dataset = dict()
        for i in range(0, 6):
            img = np.random.rand(16, 16) * 255
            self.img = img.astype(int)
            seg = np.random.rand(16, 16) * 2
            self.seg = seg.astype(int)
            self.dataset["TEST.sample_" + str(i)] = (self.img, self.seg)
        # Initialize Dictionary IO Interface
        io_interface = Dictionary_interface(self.dataset,
                                            classes=3,
                                            three_dim=False)
        # Initialize temporary directory
        self.tmp_dir = tempfile.TemporaryDirectory(prefix="tmp.miscnn.")
        tmp_batches = os.path.join(self.tmp_dir.name, "batches")
        # Initialize Data IO
        self.data_io = Data_IO(io_interface,
                               input_path=os.path.join(self.tmp_dir.name),
                               output_path=os.path.join(self.tmp_dir.name),
                               batch_path=tmp_batches,
                               delete_batchDir=False)
        # Initialize Preprocessor
        self.pp = Preprocessor(self.data_io,
                               batch_size=2,
                               data_aug=None,
                               analysis="fullimage")
        # Initialize Neural Network
        self.model = Neural_Network(self.pp)
        # Get sample list
        self.sample_list = self.data_io.get_indiceslist()

    # Delete all temporary files
    @classmethod
    def tearDownClass(self):
        self.tmp_dir.cleanup()

    #-------------------------------------------------#
    #                 Cross-Validation                #
    #-------------------------------------------------#
    def test_EVALUATION_crossValidation(self):
        eval_path = os.path.join(self.tmp_dir.name, "evaluation")
        cross_validation(self.sample_list,
                         self.model,
                         k_fold=3,
                         epochs=3,
                         iterations=None,
                         evaluation_path=eval_path,
                         run_detailed_evaluation=False,
                         draw_figures=False,
                         callbacks=[],
                         save_models=False,
                         return_output=False)
        self.assertTrue(os.path.exists(eval_path))
        self.assertTrue(os.path.exists(os.path.join(eval_path, "fold_0")))
        self.assertTrue(os.path.exists(os.path.join(eval_path, "fold_1")))
        self.assertTrue(os.path.exists(os.path.join(eval_path, "fold_2")))

    def test_EVALUATION_crossValidation_splitRun(self):
        eval_path = os.path.join(self.tmp_dir.name, "evaluation")
        split_folds(self.sample_list, k_fold=3, evaluation_path=eval_path)
        self.assertTrue(os.path.exists(eval_path))
        self.assertTrue(os.path.exists(os.path.join(eval_path, "fold_0")))
        self.assertTrue(os.path.exists(os.path.join(eval_path, "fold_1")))
        self.assertTrue(os.path.exists(os.path.join(eval_path, "fold_2")))
        for fold in range(0, 3):
            run_fold(fold,
                     self.model,
                     epochs=1,
                     iterations=None,
                     evaluation_path=eval_path,
                     draw_figures=False,
                     callbacks=[],
                     save_models=True)
            fold_dir = os.path.join(eval_path, "fold_0")
            self.assertTrue(
                os.path.exists(os.path.join(fold_dir, "history.tsv")))
            self.assertTrue(
                os.path.exists(os.path.join(fold_dir, "sample_list.csv")))
            self.assertTrue(
                os.path.exists(os.path.join(fold_dir, "model.hdf5")))

    #-------------------------------------------------#
    #                 Split Validation                #
    #-------------------------------------------------#
    def test_EVALUATION_splitValidation(self):
        eval_path = os.path.join(self.tmp_dir.name, "evaluation")
        split_validation(self.sample_list,
                         self.model,
                         percentage=0.3,
                         epochs=3,
                         iterations=None,
                         evaluation_path=eval_path,
                         run_detailed_evaluation=False,
                         draw_figures=False,
                         callbacks=[],
                         return_output=False)
        self.assertTrue(os.path.exists(eval_path))

    #-------------------------------------------------#
    #                  Leave One Out                  #
    #-------------------------------------------------#
    def test_EVALUATION_leaveOneOut(self):
        # Create 3D imgaging and segmentation data set
        self.dataset3D = dict()
        for i in range(0, 6):
            img = np.random.rand(16, 16, 16) * 255
            self.img = img.astype(int)
            seg = np.random.rand(16, 16, 16) * 3
            self.seg = seg.astype(int)
            self.dataset3D["TEST.sample_" + str(i)] = (self.img, self.seg)
        # Initialize Dictionary IO Interface
        io_interface3D = Dictionary_interface(self.dataset3D,
                                              classes=3,
                                              three_dim=True)
        # Initialize temporary directory
        self.tmp_dir3D = tempfile.TemporaryDirectory(prefix="tmp.miscnn.")
        tmp_batches = os.path.join(self.tmp_dir3D.name, "batches")
        # Initialize Data IO
        self.data_io3D = Data_IO(io_interface3D,
                                 input_path=os.path.join(self.tmp_dir3D.name),
                                 output_path=os.path.join(self.tmp_dir3D.name),
                                 batch_path=tmp_batches,
                                 delete_batchDir=False)
        # Initialize Preprocessor
        self.pp3D = Preprocessor(self.data_io3D,
                                 batch_size=2,
                                 data_aug=None,
                                 analysis="fullimage")
        # Initialize Neural Network
        model = Neural_Network(self.pp3D)
        # Get sample list
        self.sample_list3D = self.data_io3D.get_indiceslist()

        eval_path = os.path.join(self.tmp_dir3D.name, "evaluation")
        leave_one_out(self.sample_list3D,
                      model,
                      epochs=3,
                      iterations=None,
                      evaluation_path=eval_path,
                      callbacks=[])
        self.assertTrue(os.path.exists(eval_path))
        # Cleanup stuff
        self.tmp_dir3D.cleanup()
Esempio n. 23
0
#-----------------------------------------------------#
#      Tensorflow Configuration for GPU Cluster       #
#-----------------------------------------------------#
# physical_devices = tf.config.list_physical_devices('GPU')
# tf.config.experimental.set_memory_growth(physical_devices[0], True)

#-----------------------------------------------------#
#               Setup of MIScnn Pipeline              #
#-----------------------------------------------------#
# Initialize Data IO Interface for NIfTI data
## We are using 4 classes due to [background, lung_left, lung_right, covid-19]
interface = NIFTI_interface(channels=1, classes=4)

# Create Data IO object to load and write samples in the file structure
data_io = Data_IO(interface, input_path="data", delete_batchDir=False)

# Access all available samples in our file structure
sample_list = data_io.get_indiceslist()
sample_list.sort()

# Create and configure the Data Augmentation class
data_aug = Data_Augmentation(cycles=1, scaling=True, rotations=True,
                             elastic_deform=True, mirror=True,
                             brightness=True, contrast=True, gamma=True,
                             gaussian_noise=True)

# Create a clipping Subfunction to the lung window of CTs (-1250 and 250)
sf_clipping = Clipping(min=-1250, max=250)
# Create a pixel value normalization Subfunction to scale between 0-255
sf_normalize = Normalization(mode="grayscale")
Esempio n. 24
0
 def test_DATAIO_BASE_create(self):
     data_io = Data_IO(self.io_interface,
                       input_path="",
                       output_path="",
                       batch_path=self.tmp_batches,
                       delete_batchDir=False)
Esempio n. 25
0
class PatchOperationsTEST(unittest.TestCase):
    # Create random imaging and segmentation data
    @classmethod
    def setUpClass(self):
        np.random.seed(1234)
        # Create 2D imgaging and segmentation data set
        self.dataset2D = dict()
        for i in range(0, 10):
            img = np.random.rand(16, 16) * 255
            self.img = img.astype(int)
            seg = np.random.rand(16, 16) * 2
            self.seg = seg.astype(int)
            self.dataset2D["TEST.sample_" + str(i)] = (self.img, self.seg)
        # Initialize Dictionary IO Interface
        io_interface2D = Dictionary_interface(self.dataset2D,
                                              classes=3,
                                              three_dim=False)
        # Initialize temporary directory
        self.tmp_dir2D = tempfile.TemporaryDirectory(prefix="tmp.miscnn.")
        tmp_batches = os.path.join(self.tmp_dir2D.name, "batches")
        # Initialize Data IO
        self.data_io2D = Data_IO(io_interface2D,
                                 input_path="",
                                 output_path="",
                                 batch_path=tmp_batches,
                                 delete_batchDir=False)
        # Create 3D imgaging and segmentation data set
        self.dataset3D = dict()
        for i in range(0, 10):
            img = np.random.rand(16, 16, 16) * 255
            self.img = img.astype(int)
            seg = np.random.rand(16, 16, 16) * 3
            self.seg = seg.astype(int)
            self.dataset3D["TEST.sample_" + str(i)] = (self.img, self.seg)
        # Initialize Dictionary IO Interface
        io_interface3D = Dictionary_interface(self.dataset3D,
                                              classes=3,
                                              three_dim=True)
        # Initialize temporary directory
        self.tmp_dir3D = tempfile.TemporaryDirectory(prefix="tmp.miscnn.")
        tmp_batches = os.path.join(self.tmp_dir3D.name, "batches")
        # Initialize Data IO
        self.data_io3D = Data_IO(io_interface3D,
                                 input_path="",
                                 output_path="",
                                 batch_path=tmp_batches,
                                 delete_batchDir=False)

    # Delete all temporary files
    @classmethod
    def tearDownClass(self):
        self.tmp_dir2D.cleanup()
        self.tmp_dir3D.cleanup()

    #-------------------------------------------------#
    #                   Slice Matrix                  #
    #-------------------------------------------------#
    def test_PATCHOPERATIONS_slicing(self):
        sample_list = self.data_io2D.get_indiceslist()
        for index in sample_list:
            sample = self.data_io2D.sample_loader(index)
            patches = slice_matrix(sample.img_data,
                                   window=(5, 5),
                                   overlap=(2, 2),
                                   three_dim=False)
            self.assertEqual(len(patches), 25)
            self.assertEqual(patches[0].shape, (5, 5, 1))
        sample_list = self.data_io3D.get_indiceslist()
        for index in sample_list:
            sample = self.data_io3D.sample_loader(index)
            patches = slice_matrix(sample.img_data,
                                   window=(5, 5, 5),
                                   overlap=(2, 2, 2),
                                   three_dim=True)
            self.assertEqual(len(patches), 125)
            self.assertEqual(patches[0].shape, (5, 5, 5, 1))

    #-------------------------------------------------#
    #               Concatenate Matrices              #
    #-------------------------------------------------#
    def test_PATCHOPERATIONS_concatenate(self):
        sample_list = self.data_io2D.get_indiceslist()
        for index in sample_list:
            sample = self.data_io2D.sample_loader(index)
            patches = slice_matrix(sample.img_data,
                                   window=(5, 5),
                                   overlap=(2, 2),
                                   three_dim=False)
            concat = concat_matrices(patches=patches,
                                     image_size=(16, 16),
                                     window=(5, 5),
                                     overlap=(2, 2),
                                     three_dim=False)
            self.assertEqual(concat.shape, (16, 16, 1))
        sample_list = self.data_io3D.get_indiceslist()
        for index in sample_list:
            sample = self.data_io3D.sample_loader(index)
            patches = slice_matrix(sample.img_data,
                                   window=(5, 5, 5),
                                   overlap=(2, 2, 2),
                                   three_dim=True)
            concat = concat_matrices(patches=patches,
                                     image_size=(16, 16, 16),
                                     window=(5, 5, 5),
                                     overlap=(2, 2, 2),
                                     three_dim=True)
            self.assertEqual(concat.shape, (16, 16, 16, 1))

    #-------------------------------------------------#
    #                  Patch Padding                  #
    #-------------------------------------------------#
    def test_PATCHOPERATIONS_padding(self):
        sample_list = self.data_io2D.get_indiceslist()
        for index in sample_list:
            sample = self.data_io2D.sample_loader(index)
            img_padded = pad_patch(np.expand_dims(sample.img_data, axis=0),
                                   patch_shape=(8, 20),
                                   return_slicer=False)
            self.assertEqual(img_padded.shape, (1, 16, 20, 1))
        sample_list = self.data_io3D.get_indiceslist()
        for index in sample_list:
            sample = self.data_io3D.sample_loader(index)
            img_padded = pad_patch(np.expand_dims(sample.img_data, axis=0),
                                   patch_shape=(8, 16, 32),
                                   return_slicer=False)
            self.assertEqual(img_padded.shape, (1, 16, 16, 32, 1))

    #-------------------------------------------------#
    #                  Patch Cropping                 #
    #-------------------------------------------------#
    def test_PATCHOPERATIONS_cropping(self):
        sample_list = self.data_io2D.get_indiceslist()
        for index in sample_list:
            sample = self.data_io2D.sample_loader(index)
            img_padded, slicer = pad_patch(np.expand_dims(sample.img_data,
                                                          axis=0),
                                           patch_shape=(8, 20),
                                           return_slicer=True)
            img_processed = crop_patch(img_padded, slicer)
            self.assertEqual(img_processed.shape, (1, 16, 16, 1))
        sample_list = self.data_io3D.get_indiceslist()
        for index in sample_list:
            sample = self.data_io3D.sample_loader(index)
            img_padded, slicer = pad_patch(np.expand_dims(sample.img_data,
                                                          axis=0),
                                           patch_shape=(8, 16, 32),
                                           return_slicer=True)
            img_processed = crop_patch(img_padded, slicer)
            self.assertEqual(img_processed.shape, (1, 16, 16, 16, 1))
Esempio n. 26
0
 def test_DATAIO_BATCHES_cleanup(self):
     data_io = Data_IO(self.io_interface,
                       input_path="",
                       output_path="",
                       batch_path=self.tmp_batches,
                       delete_batchDir=False)
     sample = data_io.sample_loader("TEST.sample_0",
                                    backup=False,
                                    load_seg=True,
                                    load_pred=False)
     data_io.backup_batches(sample.img_data, sample.seg_data, "abc")
     data_io.backup_batches(sample.img_data, sample.seg_data, "def")
     data_io.backup_batches(sample.img_data, None, pointer="ghi")
     self.assertEqual(len(os.listdir(self.tmp_batches)), 5)
     data_io.batch_cleanup(pointer="def")
     self.assertEqual(len(os.listdir(self.tmp_batches)), 3)
     data_io.batch_cleanup()
     self.assertEqual(len(os.listdir(self.tmp_batches)), 0)
import pandas as pd

#-----------------------------------------------------#
#                    Configurations                   #
#-----------------------------------------------------#
# Data directory
path_data = "data"

#-----------------------------------------------------#
#                   Data Exploration                  #
#-----------------------------------------------------#
# Initialize Data IO Interface for NIfTI data
interface = NIFTI_interface(channels=1, classes=3)

# Create Data IO object to load and write samples in the file structure
data_io = Data_IO(interface, path_data, delete_batchDir=True)

# Access all available samples in our file structure
sample_list = data_io.get_indiceslist()
sample_list.sort()

# Print out the sample list
print("Sample list:", sample_list)

# Now let's load each sample and obtain collect diverse information from them
sample_data = {}
for index in tqdm(sample_list):
    # Sample loading
    sample = data_io.sample_loader(index, load_seg=True)
    # Create an empty list for the current asmple in our data dictionary
    sample_data[index] = []
Esempio n. 28
0
class architectureTEST(unittest.TestCase):
    # Create random imaging and segmentation data
    @classmethod
    def setUpClass(self):
        np.random.seed(1234)
        # Create 2D imgaging and segmentation data set
        self.dataset2D = dict()
        for i in range(0, 1):
            img = np.random.rand(32, 32) * 255
            self.img = img.astype(int)
            seg = np.random.rand(32, 32) * 2
            self.seg = seg.astype(int)
            self.dataset2D["TEST.sample_" + str(i)] = (self.img, self.seg)
        # Initialize Dictionary IO Interface
        io_interface2D = Dictionary_interface(self.dataset2D, classes=3,
                                              three_dim=False)
        # Initialize temporary directory
        self.tmp_dir2D = tempfile.TemporaryDirectory(prefix="tmp.miscnn.")
        tmp_batches = os.path.join(self.tmp_dir2D.name, "batches")
        # Initialize Data IO
        self.data_io2D = Data_IO(io_interface2D,
                                 input_path=os.path.join(self.tmp_dir2D.name),
                                 output_path=os.path.join(self.tmp_dir2D.name),
                                 batch_path=tmp_batches, delete_batchDir=False)
        # Initialize Preprocessor
        self.pp2D = Preprocessor(self.data_io2D, batch_size=1,
                                 data_aug=None, analysis="fullimage")
        # Get sample list
        self.sample_list2D = self.data_io2D.get_indiceslist()
        # Create 3D imgaging and segmentation data set
        self.dataset3D = dict()
        for i in range(0, 1):
            img = np.random.rand(32, 32, 32) * 255
            self.img = img.astype(int)
            seg = np.random.rand(32, 32, 32) * 3
            self.seg = seg.astype(int)
            self.dataset3D["TEST.sample_" + str(i)] = (self.img, self.seg)
        # Initialize Dictionary IO Interface
        io_interface3D = Dictionary_interface(self.dataset3D, classes=3,
                                              three_dim=True)
        # Initialize temporary directory
        self.tmp_dir3D = tempfile.TemporaryDirectory(prefix="tmp.miscnn.")
        tmp_batches = os.path.join(self.tmp_dir3D.name, "batches")
        # Initialize Data IO
        self.data_io3D = Data_IO(io_interface3D,
                                 input_path=os.path.join(self.tmp_dir3D.name),
                                 output_path=os.path.join(self.tmp_dir3D.name),
                                 batch_path=tmp_batches, delete_batchDir=False)
        # Initialize Preprocessor
        self.pp3D = Preprocessor(self.data_io3D, batch_size=1,
                                 data_aug=None, analysis="fullimage")
        # Get sample list
        self.sample_list3D = self.data_io3D.get_indiceslist()

    # Delete all temporary files
    @classmethod
    def tearDownClass(self):
        self.tmp_dir2D.cleanup()
        self.tmp_dir3D.cleanup()

    #-------------------------------------------------#
    #                  U-Net Standard                 #
    #-------------------------------------------------#
    def test_ARCHITECTURES_UNET_standard(self):
        model2D = Neural_Network(self.pp2D, architecture=UNet_standard())
        model2D.predict(self.sample_list2D)
        model3D = Neural_Network(self.pp3D, architecture=UNet_standard())
        model3D.predict(self.sample_list3D)

    #-------------------------------------------------#
    #                   U-Net Plain                   #
    #-------------------------------------------------#
    def test_ARCHITECTURES_UNET_plain(self):
        model2D = Neural_Network(self.pp2D, architecture=UNet_plain())
        model2D.predict(self.sample_list2D)
        model3D = Neural_Network(self.pp3D, architecture=UNet_plain())
        model3D.predict(self.sample_list3D)

    #-------------------------------------------------#
    #                  U-Net Residual                 #
    #-------------------------------------------------#
    def test_ARCHITECTURES_UNET_residual(self):
        model2D = Neural_Network(self.pp2D, architecture=UNet_residual())
        model2D.predict(self.sample_list2D)
        model3D = Neural_Network(self.pp3D, architecture=UNet_residual())
        model3D.predict(self.sample_list3D)

    #-------------------------------------------------#
    #                  U-Net MultiRes                 #
    #-------------------------------------------------#
    def test_ARCHITECTURES_UNET_multires(self):
        model2D = Neural_Network(self.pp2D, architecture=UNet_multiRes())
        model2D.predict(self.sample_list2D)
        model3D = Neural_Network(self.pp3D, architecture=UNet_multiRes())
        model3D.predict(self.sample_list3D)

    #-------------------------------------------------#
    #                   U-Net Dense                   #
    #-------------------------------------------------#
    def test_ARCHITECTURES_UNET_dense(self):
        model2D = Neural_Network(self.pp2D, architecture=UNet_dense())
        model2D.predict(self.sample_list2D)
        model3D = Neural_Network(self.pp3D, architecture=UNet_dense())
        model3D.predict(self.sample_list3D)

    #-------------------------------------------------#
    #                  U-Net Compact                  #
    #-------------------------------------------------#
    def test_ARCHITECTURES_UNET_compact(self):
        model2D = Neural_Network(self.pp2D, architecture=UNet_compact())
        model2D.predict(self.sample_list2D)
        model3D = Neural_Network(self.pp3D, architecture=UNet_compact())
        model3D.predict(self.sample_list3D)
Esempio n. 29
0
    #               + ggtitle("Fitting Curve during Training")
    #               + xlab("Epoch")
    #               + ylab("Loss Function")
    #               + theme_bw())
    fig.save(filename="fitting_curve.png", path=eval_path,
             width=12, height=10, dpi=300)

#-----------------------------------------------------#
#                    Run Evaluation                   #
#-----------------------------------------------------#
# Initialize Data IO Interface for NIfTI data
## We are using 4 classes due to [background, lung_left, lung_right, covid-19]
interface = NIFTI_interface(channels=1, classes=4)

# Create Data IO object to load and write samples in the file structure
data_io = Data_IO(interface, input_path="data", output_path=pred_path)

# Access all available samples in our file structure
sample_list = data_io.get_indiceslist()
sample_list.sort()

# Initialize dataframe
cols = ["index", "score", "background", "lung_L", "lung_R", "infection"]
df = pd.DataFrame(data=[], dtype=np.float64, columns=cols)

# Iterate over each sample
for index in tqdm(sample_list):
    # Load a sample including its image, ground truth and prediction
    sample = data_io.sample_loader(index, load_seg=True, load_pred=True)
    # Access image, ground truth and prediction data
    image = sample.img_data
    def run(self):
        # Create sample list for miscnn
        util.create_sample_list(self.input_dir)

        # Initialize Data IO Interface for NIfTI data
        interface = NIFTI_interface(channels=1, classes=2)

        # Create Data IO object to load and write samples in the file structure
        data_io = Data_IO(interface,
                          input_path=self.input_dir,
                          delete_batchDir=False)

        # Access all available samples in our file structure
        sample_list = data_io.get_indiceslist()
        sample_list.sort()

        # Create a resampling Subfunction to voxel spacing 1.58 x 1.58 x 2.70
        sf_resample = Resampling((1.58, 1.58, 2.70))

        # Create a pixel value normalization Subfunction for z-score scaling
        sf_zscore = Normalization(mode="z-score")

        # Create a pixel value normalization Subfunction to scale between 0-255
        sf_normalize = Normalization(mode="grayscale")

        # Assemble Subfunction classes into a list
        sf = [sf_normalize, sf_resample, sf_zscore]

        # Create and configure the Preprocessor class
        pp = Preprocessor(data_io,
                          batch_size=2,
                          subfunctions=sf,
                          prepare_subfunctions=True,
                          prepare_batches=False,
                          analysis="patchwise-crop",
                          patch_shape=(160, 160, 80))

        # Adjust the patch overlap for predictions
        pp.patchwise_overlap = (80, 80, 30)

        # Initialize the Architecture
        unet_standard = Architecture(depth=4,
                                     activation="softmax",
                                     batch_normalization=True)

        # Create the Neural Network model
        model = Neural_Network(
            preprocessor=pp,
            architecture=unet_standard,
            loss=tversky_crossentropy,
            metrics=[tversky_loss, dice_soft, dice_crossentropy],
            batch_queue_size=3,
            workers=1,
            learninig_rate=0.001)

        # Load best model weights during fitting
        model.load(f'{self.model_dir}{self.model_name}.hdf5')

        # Obtain training and validation data set ----- CHANGE BASED ON PRED/TRAIN
        images, _ = load_disk2fold(f'{self.input_dir}sample_list.json')

        print('\n\nRunning automatic segmentation on samples...\n')
        print(f'Segmenting images: {images}')

        # Compute predictions
        self.predictions = model.predict(images)

        # Delete folder created by miscnn
        shutil.rmtree('batches/')