experiment_path = "./experiments/test_iter_0/"
    data_path = "./datasets/test_iter/"
    cf_data = imp.load_source("cf_data", data_path + "cfg_testing_data_creation.py")

    # Load the network
    net = NetworkUltimateConv()
    net.init(29, 29, 13, 134, 135)
    net.load_parameters(open_h5file(experiment_path + "net.net"))
    n_out = net.n_out

    # Load the scaler
    scaler = pickle.load(open(experiment_path + "s.scaler", "rb"))

    # Files on which to evaluate the network
    file_list = list_miccai_files(**{"mode": "folder", "path": "./datasets/miccai/2/"})
    n_files = len(file_list)

    # Options for the generation of the dataset
    # The generation/evaluation of the dataset has to be split into batches as a whole brain does not fit into memory
    batch_size = 50000
    select_region = SelectWholeBrain()
    extract_vx = ExtractVoxelAll(1)
    pick_vx = PickVoxel(select_region, extract_vx)
    pick_patch = create_pick_features(cf_data)
    pick_tg = create_pick_target(cf_data)

    # Create the data generator
    data_gen = DataGeneratorBrain()
    data_gen.init_from(file_list, pick_vx, pick_patch, pick_tg)
Ejemplo n.º 2
0
    data_path = "./datasets/test_iter/"
    cf_data = imp.load_source("cf_data",
                              data_path + "cfg_testing_data_creation.py")

    # Load the network
    net = NetworkUltimateConv()
    net.init(29, 29, 13, 134, 135)
    net.load_parameters(open_h5file(experiment_path + "net.net"))
    n_out = net.n_out

    # Load the scaler
    scaler = pickle.load(open(experiment_path + "s.scaler", "rb"))

    # Files on which to evaluate the network
    file_list = list_miccai_files(**{
        "mode": "folder",
        "path": "./datasets/miccai/2/"
    })
    n_files = len(file_list)

    # Options for the generation of the dataset
    # The generation/evaluation of the dataset has to be split into batches as a whole brain does not fit into memory
    batch_size = 50000
    select_region = SelectWholeBrain()
    extract_vx = ExtractVoxelAll(1)
    pick_vx = PickVoxel(select_region, extract_vx)
    pick_patch = create_pick_features(cf_data)
    pick_tg = create_pick_target(cf_data)

    # Create the data generator
    data_gen = DataGeneratorBrain()
    data_gen.init_from(file_list, pick_vx, pick_patch, pick_tg)
    experiment_path = "./experiments/best_so_far/"
    data_path = "./datasets/test_iter/"
    cf_data = imp.load_source("cf_data", data_path + "cfg_testing_data_creation.py")

    # Load the network
    net = NetworkUltimateConv()
    net.init(29, 29, 13, 134, 135)
    net.load_parameters(open_h5file(experiment_path + "net.net"))
    n_out = net.n_out

    # Load the scaler
    scaler = pickle.load(open(experiment_path + "s.scaler", "rb"))
    # scaler = None

    # Files on which to evaluate the network
    file_list = list_miccai_files(**{"mode": "idx_files", "path": "./datasets/miccai/2/", "idx_files": range(20)})
    n_files = len(file_list)

    # Options for the generation of the dataset
    # The generation/evaluation of the dataset has to be split into batches as a whole brain does not fit into memory
    batch_size = 50000
    select_region = SelectWholeBrain()
    extract_vx = ExtractVoxelAll(1)
    pick_vx = PickVoxel(select_region, extract_vx)
    pick_patch = create_pick_features(cf_data)
    pick_tg = create_pick_target(cf_data)

    # Create the data generator
    data_gen = DataGeneratorBrain()
    data_gen.init_from(file_list, pick_vx, pick_patch, pick_tg)