Exemplo n.º 1
0
 def read(self, file_path):
     """
     load the dataset from a hdf5 file.
     """
     h5file = open_h5file(file_path)
     self.inputs = h5file["inputs"].value
     self.is_perm = bool(h5file.attrs['is_perm'])
     self.outputs = h5file["outputs"].value
     self.read_virtual(h5file)
     h5file.close()
Exemplo n.º 2
0
 def read(self, file_path):
     """
     load the dataset from a hdf5 file.
     """
     h5file = open_h5file(file_path)
     self.inputs = h5file["inputs"].value
     self.is_perm = bool(h5file.attrs['is_perm'])
     self.outputs = h5file["outputs"].value
     self.read_virtual(h5file)
     h5file.close()
Exemplo n.º 3
0
from spynet.utils.utilities import open_h5file

if __name__ == '__main__':
    """
    Compute the segmentations of the testing brains with the trained networks (with approximation of the centroids)
    """

    experiment_path = "./experiments/test_iter_0/"
    data_path = "./datasets/test_iter/"
    cf_data = imp.load_source("cf_data",
                              data_path + "cfg_testing_data_creation.py")

    # Load the network
    net = NetworkUltimateConv()
    net.init(29, 29, 13, 134, 135)
    net.load_parameters(open_h5file(experiment_path + "net.net"))
    n_out = net.n_out

    # Load the scaler
    scaler = pickle.load(open(experiment_path + "s.scaler", "rb"))

    # Files on which to evaluate the network
    file_list = list_miccai_files(**{
        "mode": "folder",
        "path": "./datasets/miccai/2/"
    })
    n_files = len(file_list)

    # Options for the generation of the dataset
    # The generation/evaluation of the dataset has to be split into batches as a whole brain does not fit into memory
    batch_size = 50000
Exemplo n.º 4
0
from spynet.data.utils_3d.pick_target import *
from data_brain_parcellation import DatasetBrainParcellation, DataGeneratorBrain, list_miccai_files, RegionCentroids
from spynet.utils.utilities import open_h5file

if __name__ == '__main__':
    """
    Evaluate a trained network (without approximating the centroids)
    """

    experiment_path = "./experiments/paper_ultimate_conv/"
    data_path = "./datasets/paper_ultimate_conv/"

    # Load the network
    net = NetworkUltimateConv()
    net.init(33, 29, 5, 134, 135)
    net.load_parameters(open_h5file(experiment_path + "net.net"))
    n_out = net.n_out

    # Load the scaler
    scaler = pickle.load(open(experiment_path + "s.scaler", "rb"))

    testing_data_path = data_path + "test.h5"
    ds_testing = DatasetBrainParcellation()
    ds_testing.read(testing_data_path)
    scaler.scale(ds_testing.inputs)

    out_pred = net.predict(ds_testing.inputs, 1000)
    errors = np.argmax(out_pred, axis=1) != np.argmax(ds_testing.outputs,
                                                      axis=1)
    dice = compute_dice(np.argmax(out_pred, axis=1),
                        np.argmax(ds_testing.outputs, axis=1), 134)
from spynet.utils.utilities import open_h5file


if __name__ == '__main__':
    """
    Compute the segmentations of the testing brains with the trained networks (with approximation of the centroids)
    """

    experiment_path = "./experiments/test_iter_0/"
    data_path = "./datasets/test_iter/"
    cf_data = imp.load_source("cf_data", data_path + "cfg_testing_data_creation.py")

    # Load the network
    net = NetworkUltimateConv()
    net.init(29, 29, 13, 134, 135)
    net.load_parameters(open_h5file(experiment_path + "net.net"))
    n_out = net.n_out

    # Load the scaler
    scaler = pickle.load(open(experiment_path + "s.scaler", "rb"))

    # Files on which to evaluate the network
    file_list = list_miccai_files(**{"mode": "folder", "path": "./datasets/miccai/2/"})
    n_files = len(file_list)

    # Options for the generation of the dataset
    # The generation/evaluation of the dataset has to be split into batches as a whole brain does not fit into memory
    batch_size = 50000
    select_region = SelectWholeBrain()
    extract_vx = ExtractVoxelAll(1)
    pick_vx = PickVoxel(select_region, extract_vx)
Exemplo n.º 6
0

if __name__ == '__main__':

    mode = "drop"

    experiment_path = "./experiments/mnist_example/"
    data_path = "./datasets/mnist/"

    testing_data_path = data_path + "test.h5"
    ds_testing = Dataset.create_and_read(testing_data_path)

    # Load the network
    net = AutoEncoder()
    net.init([28**2, 256, 28**2], dropout=True, dropout_p=[0.5], neuron_function=NeuronSigmoid())
    net.load_parameters(open_h5file(experiment_path + "netdrop.net"))

    i = ds_testing.inputs[0:10,:]
    e = net.predict(i, 10)

    print ""
    print MSE(e,i)

    image = PIL.Image.fromarray(tile_raster_images(X=net.ls_layers[0].ls_layer_blocks[0].w.get_value(borrow=True).T,
                 img_shape=(28, 28), tile_shape=(16, 16),
                 tile_spacing=(1, 1)))
    image.save(experiment_path + "filters" + mode + ".png")

    image = PIL.Image.fromarray(tile_raster_images(X=i,
                                                   img_shape=(28, 28), tile_shape=(1, 10),
                                                   tile_spacing=(1, 1)))