コード例 #1
0
def golkov_multi(data, n_directions, random_seed=400):

    max_path = (
        "/v/raid1b/egibbons/MRIdata/DTI/noddi/max_values_%i_directions_1d.h5" %
        n_directions)
    maxs = readhdf5.read_hdf5(max_path, "max_values")[None, None, None, :]

    max_y_path = "/v/raid1b/egibbons/MRIdata/DTI/noddi/max_y_1d.h5"
    max_y = readhdf5.read_hdf5(max_y_path, "max_y")

    subsampling_pattern = subsampling.gensamples(n_directions,
                                                 random_seed=random_seed)

    image_size = (n_directions, )

    model = model1d_multi.fc_1d(image_size)
    model.compile(optimizer=Adam(lr=1e-3,
                                 beta_1=0.99,
                                 beta_2=0.995,
                                 epsilon=1e-08,
                                 decay=0.85),
                  loss="mean_absolute_error",
                  metrics=["accuracy"])
    model.load_weights("/v/raid1b/egibbons/models/noddi-%i_golkov_multi.h5" %
                       n_directions)
    print("golkov_multi model loaded")

    data_subsampled = data[:, :, :, subsampling_pattern]

    data_subsampled /= maxs

    dim0, dim1, n_slices, n_directions = data_subsampled.shape

    print("Predicting...")
    start = time.time()

    # data_subsampled_temp = data_subsampled.reshape(0,1)
    x = data_subsampled.reshape(n_slices * dim0 * dim1, -1)

    recon = model.predict(x, batch_size=10000)

    prediction = recon.reshape(dim0, dim1, n_slices, 4)

    for ii in range(4):
        prediction[:, :, :, ii] *= max_y[ii]

    print("Predictions completed...took: %f" % (time.time() - start))

    return prediction
コード例 #2
0
def model_raw(data, n_directions, random_seed=400, loss_type="l1"):

    image_size = (128, 128, n_directions)

    model = simple2d.res2d(image_size)
    model.compile(optimizer=Adam(lr=1e-3),
                  loss="mean_absolute_error",
                  metrics=["accuracy"])
    model.load_weights("/v/raid1b/egibbons/models/noddi-%i_raw.h5" %
                       (n_directions))
    print("2D dense model loaded for raw data.  Using %s loss" % loss_type)

    max_path = (
        "/v/raid1b/egibbons/MRIdata/DTI/noddi/max_values_%i_directions_raw.h5"
        % n_directions)
    maxs = readhdf5.read_hdf5(max_path, "max_values")[None, None, None, :]

    max_y_path = "/v/raid1b/egibbons/MRIdata/DTI/noddi/max_y_raw.h5"
    max_y = readhdf5.read_hdf5(max_y_path, "max_y")

    subsampling_pattern = subsampling.gensamples(n_directions,
                                                 random_seed=random_seed)
    x = data[:, :, :, subsampling_pattern]
    x = x.transpose(2, 0, 1, 3)

    x /= maxs.squeeze()[None, None, None, :]

    print("Predicting 2D...")
    start = time.time()
    prediction = model.predict(x, batch_size=10).transpose(1, 2, 0, 3)
    print("Predictions completed...took: %f" % (time.time() - start))

    ### DISPLAY ###

    diffusivity_scaling = 1
    for ii in range(4):
        prediction[:, :, :, ii] *= max_y[ii]

    prediction[:, :, :, 3] /= diffusivity_scaling

    return prediction
コード例 #3
0
from noddi_utils import noddistudy
from noddi_utils import subsampling

### SETUP ###
with open("noddi_metadata.json") as metadata:
    patient_database = json.load(metadata)
print("We have %i cases" % len(patient_database))

directions = [128, 64, 32, 24, 16, 8]

### MAIN LOOP ###

for num_directions in directions:
    print("Generating 1D data for %i directions" % num_directions)

    subsampling_indices = subsampling.gensamples(num_directions)

    num_cases = len(patient_database)

    print("\n")
    mm = 0
    for patient_number in sorted(patient_database.keys()):

        noddi_data = noddistudy.NoddiData(patient_number)

        if (noddi_data.get_type() == "test"
                or noddi_data.get_type() == "duplicate"):
            continue

        print("Currently reading: %s as %s data" %
              (patient_number, noddi_data.get_type()))
コード例 #4
0
    return data_patches


### SETUP ###
with open("noddi_metadata.json") as metadata:
    patient_database = json.load(metadata)
print("We have %i cases" % len(patient_database))

seeds = [100, 225, 300, 325, 400, 425, 500, 525, 600]

### MAIN LOOP ###
for random_seed in seeds:
    num_directions = 24
    print("Generating 2D data for %i directions" % num_directions)

    subsampling_indices = subsampling.gensamples(num_directions,
                                                 random_seed=random_seed)

    print("Processing %i directions" % len(subsampling_indices))

    ### DATA LOADING AND AUGMENTATION ###
    print("\n")
    ii = 0
    for patient_number in sorted(patient_database.keys()):

        noddi_data = noddistudy.NoddiData(patient_number)

        if (noddi_data.get_type() == "test"
                or noddi_data.get_type() == "duplicate"):
            continue

        print("Currently reading: %s as %s data" %
コード例 #5
0
def separate_2d(data,
                n_directions,
                random_seed=400,
                loss_type="l1",
                scaling=True):

    # load the data
    max_path = (
        "/v/raid1b/egibbons/MRIdata/DTI/noddi/max_values_%i_directions_2d.h5" %
        n_directions)
    maxs = readhdf5.read_hdf5(max_path, "max_values")[None, None, None, :]

    max_y_path = "/v/raid1b/egibbons/MRIdata/DTI/noddi/max_y_2d.h5"
    max_y = readhdf5.read_hdf5(max_y_path, "max_y")

    subsampling_pattern = subsampling.gensamples(n_directions,
                                                 random_seed=random_seed)
    x = data[:, :, :, subsampling_pattern]
    x = x.transpose(2, 0, 1, 3)

    x /= maxs.squeeze()[None, None, None, :]

    x_noddi = np.copy(x)
    x_gfa = np.copy(x)

    image_size = (128, 128, n_directions)

    # noddi model
    model_noddi = simple2d_noddi.res2d(image_size)
    model_noddi.compile(optimizer=Adam(lr=1e-3),
                        loss="mean_absolute_error",
                        metrics=["accuracy"])
    model_noddi.load_weights("/v/raid1b/egibbons/models/noddi-%i_2d_noddi.h5" %
                             (n_directions))

    model_gfa = simple2d_gfa.res2d(image_size)
    model_gfa.compile(optimizer=Adam(lr=1e-3),
                      loss="mean_absolute_error",
                      metrics=["accuracy"])

    if scaling is True:
        model_gfa.load_weights("/v/raid1b/egibbons/models/noddi-%i_2d_gfa.h5" %
                               (n_directions))
        scaling_factor = 5
    else:
        print("no scaling")
        model_gfa.load_weights(
            "/v/raid1b/egibbons/models/noddi-%i_2d_gfa_no_scale.h5" %
            (n_directions))
        scaling_factor = 1

    print("2D dense model loaded.  Using %s loss" % loss_type)

    print("Predicting 2D separate...")
    start = time.time()
    prediction_noddi = model_noddi.predict(x_noddi, batch_size=10).transpose(
        1, 2, 0, 3)
    prediction_gfa = model_gfa.predict(x_gfa,
                                       batch_size=10).transpose(1, 2, 0, 3)
    print("Predictions completed...took: %f" % (time.time() - start))

    prediction = np.concatenate(
        (prediction_noddi, prediction_gfa / scaling_factor), axis=3)

    ### DISPLAY ###
    for ii in range(4):
        prediction[:, :, :, ii] *= max_y[ii]

    return prediction
コード例 #6
0
from model_2d import dense2d
from model_2d import simple2d
from model_2d import unet2d
from noddi_utils import network_utils
from noddi_utils import noddistudy
from noddi_utils import predict
from noddi_utils import subsampling
from recon import imtools
from utils import readhdf5
from utils import display

test_cases = ["P032315", "P080715", "P061114", "N011118A", "N011118B"]

n_directions = 24

sampling_pattern = subsampling.gensamples(n_directions, random_seed=425)

### LOAD DATA ###
patient_number = test_cases[0]
noddi_data = noddistudy.NoddiData(patient_number)

# max_y_path = "/v/raid1b/egibbons/MRIdata/DTI/noddi/max_y_2d.h5"
# max_y = readhdf5.read_hdf5(max_y_path,"max_y")

data_full = noddi_data.get_full()
data_raw = noddi_data.get_raw()
data_odi = noddi_data.get_odi().transpose(1, 0, 2)[::-1, ::-1]
data_fiso = noddi_data.get_fiso().transpose(1, 0, 2)[::-1, ::-1]
data_ficvf = noddi_data.get_ficvf().transpose(1, 0, 2)[::-1, ::-1]
data_gfa = noddi_data.get_gfa().transpose(1, 0, 2)[::-1, ::-1]
コード例 #7
0
from matplotlib import pyplot as plt

sys.path.append("/home/mirl/egibbons/noddi")

from model_2d import simple2d
from noddi_utils import noddistudy
from noddi_utils import subsampling
from recon import imtools
from utils import readhdf5

test_cases = ["P032315","P080715","P061114",
              "N011118A","N011118B"]

n_directions = 24

sampling_pattern = subsampling.gensamples(n_directions)

### LOAD DATA ###
patient_number = test_cases[0]
noddi_data = noddistudy.NoddiData(patient_number)

data_full = noddi_data.get_full()
data_odi = noddi_data.get_odi().transpose(1,0,2)[::-1,::-1]
data_fiso = noddi_data.get_fiso().transpose(1,0,2)[::-1,::-1]
data_ficvf = noddi_data.get_ficvf().transpose(1,0,2)[::-1,::-1]
data_gfa = noddi_data.get_gfa().transpose(1,0,2)[::-1,::-1]

max_y_path = "/v/raid1b/egibbons/MRIdata/DTI/noddi/max_y_2d.h5"
max_y = readhdf5.read_hdf5(max_y_path,"max_y")

### LOAD MODEL ###