def test_IOI_DICTIONARY_initialize(self): my_dict = dict() my_dict["dict_sample"] = (self.img, self.seg) interface = Dictionary_interface(my_dict) sample_list = interface.initialize("") self.assertEqual(len(sample_list), 1) self.assertEqual(sample_list[0], "dict_sample")
def setUpClass(self): np.random.seed(1234) # Create 2D imgaging and segmentation data set self.dataset2D = dict() for i in range(0, 6): img = np.random.rand(16, 16) * 255 self.img = img.astype(int) seg = np.random.rand(16, 16) * 3 self.seg = seg.astype(int) self.dataset2D["TEST.sample_" + str(i)] = (self.img, self.seg) # Initialize Dictionary IO Interface io_interface2D = Dictionary_interface(self.dataset2D, classes=3, three_dim=False) # Initialize temporary directory self.tmp_dir2D = tempfile.TemporaryDirectory(prefix="tmp.miscnn.") tmp_batches = os.path.join(self.tmp_dir2D.name, "batches") # Initialize Data IO self.data_io2D = Data_IO(io_interface2D, input_path=os.path.join(self.tmp_dir2D.name), output_path=os.path.join(self.tmp_dir2D.name), batch_path=tmp_batches, delete_batchDir=False) # Initialize Preprocessor self.pp2D = Preprocessor(self.data_io2D, batch_size=2, data_aug=None, analysis="fullimage") # Get sample list self.sample_list2D = self.data_io2D.get_indiceslist() # Create 3D imgaging and segmentation data set self.dataset3D = dict() for i in range(0, 6): img = np.random.rand(16, 16, 16) * 255 self.img = img.astype(int) seg = np.random.rand(16, 16, 16) * 3 self.seg = seg.astype(int) self.dataset3D["TEST.sample_" + str(i)] = (self.img, self.seg) # Initialize Dictionary IO Interface io_interface3D = Dictionary_interface(self.dataset3D, classes=3, three_dim=True) # Initialize temporary directory self.tmp_dir3D = tempfile.TemporaryDirectory(prefix="tmp.miscnn.") tmp_batches = os.path.join(self.tmp_dir3D.name, "batches") # Initialize Data IO self.data_io3D = Data_IO(io_interface3D, input_path=os.path.join(self.tmp_dir3D.name), output_path=os.path.join(self.tmp_dir3D.name), batch_path=tmp_batches, delete_batchDir=False) # Initialize Preprocessor self.pp3D = Preprocessor(self.data_io3D, batch_size=2, data_aug=None, analysis="fullimage") # Get sample list self.sample_list3D = self.data_io3D.get_indiceslist()
def test_IOI_DICTIONARY_loading(self): my_dict = dict() my_dict["dict_sample"] = (self.img, self.seg) interface = Dictionary_interface(my_dict) sample_list = interface.initialize("") img = interface.load_image(sample_list[0]) seg = interface.load_segmentation(sample_list[0]) self.assertTrue(np.array_equal(img[0], self.img)) self.assertTrue(np.array_equal(seg, self.seg))
def test_SUBFUNCTIONS_preprocessing(self): ds = dict() for i in range(0, 10): img = np.random.rand(16, 16, 16) * 255 img = img.astype(int) seg = np.random.rand(16, 16, 16) * 3 seg = seg.astype(int) sample = (img, seg) ds["TEST.sample_" + str(i)] = sample io_interface = Dictionary_interface(ds, classes=3, three_dim=True) self.tmp_dir = tempfile.TemporaryDirectory(prefix="tmp.miscnn.") tmp_batches = os.path.join(self.tmp_dir.name, "batches") dataio = Data_IO(io_interface, input_path="", output_path="", batch_path=tmp_batches, delete_batchDir=False) sf = [Resize((8,8,8)), Normalization(), Clipping(min=-1.0, max=0.0)] pp = Preprocessor(dataio, data_aug=None, batch_size=1, prepare_subfunctions=False, analysis="fullimage", subfunctions=sf) sample_list = dataio.get_indiceslist() batches = pp.run(sample_list, training=True, validation=False) for i in range(0, 10): img = batches[i][0] seg = batches[i][1] self.assertEqual(img.shape, (1,8,8,8,1)) self.assertEqual(seg.shape, (1,8,8,8,3)) self.assertTrue(np.min(img) >= -1.0 and np.max(img) <= 0.0) self.tmp_dir.cleanup()
def test_SUBFUNCTIONS_prepare_MULTIPROCESSING(self): ds = dict() for i in range(0, 5): img = np.random.rand(16, 16, 16) * 255 img = img.astype(int) seg = np.random.rand(16, 16, 16) * 3 seg = seg.astype(int) sample = (img, seg) ds["TEST.sample_" + str(i)] = sample io_interface = Dictionary_interface(ds, classes=3, three_dim=True) self.tmp_dir = tempfile.TemporaryDirectory(prefix="tmp.miscnn.") tmp_batches = os.path.join(self.tmp_dir.name, "batches") dataio = Data_IO(io_interface, input_path="", output_path="", batch_path=tmp_batches, delete_batchDir=False) sf = [Resize((8,8,8)), Normalization(), Clipping(min=-1.0, max=0.0)] pp = Preprocessor(dataio, batch_size=1, prepare_subfunctions=True, analysis="fullimage", subfunctions=sf, use_multiprocessing=True) pp.mp_threads = 4 sample_list = dataio.get_indiceslist() pp.run_subfunctions(sample_list, training=True) batches = pp.run(sample_list, training=True, validation=False) self.assertEqual(len(os.listdir(tmp_batches)), 5) for i in range(0, 5): file_prepared_subfunctions = os.path.join(tmp_batches, str(pp.data_io.seed) + ".TEST.sample_" + str(i) + ".pickle") self.assertTrue(os.path.exists(file_prepared_subfunctions)) img = batches[i][0] seg = batches[i][1] self.assertIsNotNone(img) self.assertIsNotNone(seg) self.assertEqual(img.shape, (1,8,8,8,1)) self.assertEqual(seg.shape, (1,8,8,8,3))
def setUpClass(self): np.random.seed(1234) # Create imgaging and segmentation data set self.dataset = dict() for i in range(0, 10): img = np.random.rand(16, 16, 16) * 255 self.img = img.astype(int) seg = np.random.rand(16, 16, 16) * 3 self.seg = seg.astype(int) sample = (self.img, self.seg) self.dataset["TEST.sample_" + str(i)] = sample # Initialize Dictionary IO Interface io_interface = Dictionary_interface(self.dataset, classes=3, three_dim=True) # Initialize temporary directory self.tmp_dir = tempfile.TemporaryDirectory(prefix="tmp.miscnn.") tmp_batches = os.path.join(self.tmp_dir.name, "batches") # Initialize Data IO self.data_io = Data_IO(io_interface, input_path="", output_path="", batch_path=tmp_batches, delete_batchDir=False) # Initialize Data Augmentation self.data_aug = Data_Augmentation() # Get sample list self.sample_list = self.data_io.get_indiceslist()
def setUpClass(self): np.random.seed(1234) # Create 2D imgaging and segmentation data set self.dataset = dict() for i in range(0, 2): img = np.random.rand(16, 16) * 255 self.img = img.astype(int) seg = np.random.rand(16, 16) * 2 self.seg = seg.astype(int) self.dataset["TEST.sample_" + str(i)] = (self.img, self.seg) # Initialize Dictionary IO Interface io_interface = Dictionary_interface(self.dataset, classes=3, three_dim=False) # Initialize temporary directory self.tmp_dir = tempfile.TemporaryDirectory(prefix="tmp.miscnn.") tmp_batches = os.path.join(self.tmp_dir.name, "batches") # Initialize Data IO self.data_io = Data_IO(io_interface, input_path=os.path.join(self.tmp_dir.name), output_path=os.path.join(self.tmp_dir.name), batch_path=tmp_batches, delete_batchDir=False) # Initialize Preprocessor self.pp = Preprocessor(self.data_io, batch_size=1, data_aug=None, analysis="fullimage") # Initialize Neural Network self.model = Neural_Network(self.pp) # Get sample list self.sample_list = self.data_io.get_indiceslist()
def test_EVALUATION_leaveOneOut(self): # Create 3D imgaging and segmentation data set self.dataset3D = dict() for i in range(0, 6): img = np.random.rand(16, 16, 16) * 255 self.img = img.astype(int) seg = np.random.rand(16, 16, 16) * 3 self.seg = seg.astype(int) self.dataset3D["TEST.sample_" + str(i)] = (self.img, self.seg) # Initialize Dictionary IO Interface io_interface3D = Dictionary_interface(self.dataset3D, classes=3, three_dim=True) # Initialize temporary directory self.tmp_dir3D = tempfile.TemporaryDirectory(prefix="tmp.miscnn.") tmp_batches = os.path.join(self.tmp_dir3D.name, "batches") # Initialize Data IO self.data_io3D = Data_IO(io_interface3D, input_path=os.path.join(self.tmp_dir3D.name), output_path=os.path.join(self.tmp_dir3D.name), batch_path=tmp_batches, delete_batchDir=False) # Initialize Preprocessor self.pp3D = Preprocessor(self.data_io3D, batch_size=2, data_aug=None, analysis="fullimage") # Initialize Neural Network model = Neural_Network(self.pp3D) # Get sample list self.sample_list3D = self.data_io3D.get_indiceslist() eval_path = os.path.join(self.tmp_dir3D.name, "evaluation") leave_one_out(self.sample_list3D, model, epochs=3, iterations=None, evaluation_path=eval_path, callbacks=[]) self.assertTrue(os.path.exists(eval_path)) # Cleanup stuff self.tmp_dir3D.cleanup()
def setUpClass(self): np.random.seed(1234) # Create 2D imgaging and segmentation data set self.dataset2D = dict() for i in range(0, 10): img = np.random.rand(16, 16) * 255 img = img.astype(int) seg = np.random.rand(16, 16) * 2 seg = seg.astype(int) self.dataset2D["TEST.sample_" + str(i)] = (img, seg) # Initialize Dictionary IO Interface io_interface2D = Dictionary_interface(self.dataset2D, classes=3, three_dim=False) # Initialize temporary directory self.tmp_dir2D = tempfile.TemporaryDirectory(prefix="tmp.miscnn.") tmp_batches = os.path.join(self.tmp_dir2D.name, "batches") # Initialize Data IO self.data_io2D = Data_IO(io_interface2D, input_path="", output_path="", batch_path=tmp_batches, delete_batchDir=False) # Create 3D imgaging and segmentation data set self.dataset3D = dict() for i in range(0, 10): img = np.random.rand(16, 16, 16) * 255 img = img.astype(int) seg = np.random.rand(16, 16, 16) * 3 seg = seg.astype(int) if i in range(8, 10): sample = (img, None) else: sample = (img, seg) self.dataset3D["TEST.sample_" + str(i)] = sample # Initialize Dictionary IO Interface io_interface3D = Dictionary_interface(self.dataset3D, classes=3, three_dim=True) # Initialize temporary directory self.tmp_dir3D = tempfile.TemporaryDirectory(prefix="tmp.miscnn.") tmp_batches = os.path.join(self.tmp_dir3D.name, "batches") # Initialize Data IO self.data_io3D = Data_IO(io_interface3D, input_path="", output_path="", batch_path=tmp_batches, delete_batchDir=False)
def test_IOI_DICTIONARY_predictionhandling(self): my_dict = dict() my_dict["dict_sample"] = (self.img, self.seg) interface = Dictionary_interface(my_dict) sample_list = interface.initialize("") interface.save_prediction(self.seg, "dict_sample", "") pred = interface.load_prediction("dict_sample", "") self.assertTrue(np.array_equal(pred, self.seg))
def test_IOI_DICTIONARY_predictionhandling(self): my_dict = dict() my_dict["dict_sample"] = (self.img, self.seg) interface = Dictionary_interface(my_dict) sample_list = interface.initialize("") sample = MIScnn_sample.Sample("dict_sample", np.asarray([0]), 1, 2) sample.add_prediction(self.seg); interface.save_prediction(sample, "") pred = interface.load_prediction("dict_sample", "") self.assertTrue(np.array_equal(pred.reshape(self.seg.shape), self.seg))
def setUpClass(self): # Create imgaging and segmentation data set np.random.seed(1234) self.dataset = dict() for i in range(0, 10): img = np.random.rand(16, 16, 16) * 255 self.img = img.astype(int) seg = np.random.rand(16, 16, 16) * 3 self.seg = seg.astype(int) if i == 3: sample = (self.img, self.seg, self.seg) elif i == 5: sample = (self.img, None, self.seg) else: sample = (self.img, self.seg) self.dataset["TEST.sample_" + str(i)] = sample # Initialize Dictionary IO Interface self.io_interface = Dictionary_interface(self.dataset) # Initialize temporary directory self.tmp_dir = tempfile.TemporaryDirectory(prefix="tmp.miscnn.") self.tmp_batches = os.path.join(self.tmp_dir.name, "batches")
def test_SUBFUNCTIONS_postprocessing(self): ds = dict() for i in range(0, 10): img = np.random.rand(16, 16, 16) * 255 img = img.astype(int) seg = np.random.rand(16, 16, 16) * 3 seg = seg.astype(int) sample = (img, seg) ds["TEST.sample_" + str(i)] = sample io_interface = Dictionary_interface(ds, classes=3, three_dim=True) self.tmp_dir = tempfile.TemporaryDirectory(prefix="tmp.miscnn.") tmp_batches = os.path.join(self.tmp_dir.name, "batches") dataio = Data_IO(io_interface, input_path="", output_path="", batch_path=tmp_batches, delete_batchDir=False) sf = [Resize((9, 9, 9)), Normalization(), Clipping(min=-1.0, max=0.0)] pp = Preprocessor(dataio, batch_size=1, prepare_subfunctions=False, analysis="patchwise-grid", subfunctions=sf, patch_shape=(4, 4, 4)) sample_list = dataio.get_indiceslist() for index in sample_list: sample = dataio.sample_loader(index) for sf in pp.subfunctions: sf.preprocessing(sample, training=False) pp.cache["shape_" + str(index)] = sample.img_data.shape sample.seg_data = np.random.rand(9, 9, 9) * 3 sample.seg_data = sample.seg_data.astype(int) sample.seg_data = to_categorical(sample.seg_data, num_classes=3) data_patches = pp.analysis_patchwise_grid(sample, training=True, data_aug=False) seg_list = [] for i in range(0, len(data_patches)): seg_list.append(data_patches[i][1]) seg = np.stack(seg_list, axis=0) self.assertEqual(seg.shape, (27, 4, 4, 4, 3)) pred = pp.postprocessing(sample, seg) self.assertEqual(pred.shape, (16, 16, 16)) self.tmp_dir.cleanup()
def test_SUBFUNCTIONS_fullrun(self): ds = dict() for i in range(0, 10): img = np.random.rand(16, 16, 16) * 255 img = img.astype(int) seg = np.random.rand(16, 16, 16) * 3 seg = seg.astype(int) sample = (img, seg) ds["TEST.sample_" + str(i)] = sample io_interface = Dictionary_interface(ds, classes=3, three_dim=True) self.tmp_dir = tempfile.TemporaryDirectory(prefix="tmp.miscnn.") tmp_batches = os.path.join(self.tmp_dir.name, "batches") dataio = Data_IO(io_interface, input_path="", output_path="", batch_path=tmp_batches, delete_batchDir=False) sf = [Resize((16,16,16)), Normalization(), Clipping(min=-1.0, max=0.0)] pp = Preprocessor(dataio, batch_size=1, prepare_subfunctions=True, analysis="fullimage", subfunctions=sf) nn = Neural_Network(preprocessor=pp) sample_list = dataio.get_indiceslist() nn.predict(sample_list, return_output=True)
def test_IOI_DICTIONARY_creation(self): my_dict = dict() my_dict["dict_sample"] = (self.img, self.seg) interface = Dictionary_interface(my_dict)