Example #1
0
def getBatches(*args):
    q, p_number, config = args

    import random
    random.seed(p_number)
    atlas, itk_atlas = loadAtlas(config)

    data = loadOASISData()
    train, test = data[:int(len(data) * config['split']
                            )], data[int(len(data) * config['split']):]

    volume_shape = config['resolution']

    data_train = train[int(len(train) * config['validation']):]

    while True:
        minibatch = np.empty(shape=(config['batchsize'], *volume_shape, 2))

        for i in range(config['batchsize']):
            idx_volume = random.choice(list(range(len(data_train))))
            vol = readNormalizedVolumeByPath(data_train[idx_volume]['img'],
                                             itk_atlas)
            minibatch[i, :, :, :,
                      0] = atlas.reshape(volume_shape).astype("float32")
            minibatch[i, :, :, :,
                      1] = vol.reshape(volume_shape).astype("float32")

        q.put(minibatch)
Example #2
0
def getTestData(config):
    atlas, itk_atlas = loadAtlas(config)
    data = loadOASISData()
    data_test = data[int(len(data) * config['split']):]
    volume_shape = config['resolution']

    l = len(data_test)
    test = np.empty(shape=(l, *volume_shape, 2))

    for i in range(l):
        vol = readNormalizedVolumeByPath(data_test[i]['img'], itk_atlas)
        test[i, :, :, :, 0] = atlas.reshape(volume_shape).astype("float32")
        test[i, :, :, :, 1] = vol.reshape(volume_shape).astype("float32")

    return test
Example #3
0
def getValidationData(config):
    atlas, itk_atlas = loadAtlas(config)
    data = loadOASISData()
    train, test = data[:int(len(data) * config['split']
                            )], data[int(len(data) * config['split']):]
    volume_shape = config['resolution']

    data_val = train[:int(len(train) * config['validation'])]
    l = len(data_val)
    val = np.empty(shape=(l, *volume_shape, 2))

    for i in range(l):
        vol = readNormalizedVolumeByPath(data_val[i]['img'], itk_atlas)
        val[i, :, :, :, 0] = atlas.reshape(volume_shape).astype("float32")
        val[i, :, :, :, 1] = vol.reshape(volume_shape).astype("float32")

    return val
Example #4
0
def generateAvgFromVolumes(vol_center, volumes, model):
    session = tf.Session()

    model_config = {
        'batchsize': 1,
        'split': 0.9,
        'validation': 0.1,
        'half_res': True,
        'epochs': 200,
        'groupnorm': True,
        'GN_groups': 32,
        'atlas': 'atlas.nii.gz',
        'model_output': 'model.pkl',
        'exponentialSteps': 7,
    }

    atlas, itk_atlas = DataGenerator.loadAtlas(model_config)

    m = DiffeomorphicRegistrationNet.create_model(model_config)
    m.load_weights(model)
    shapes = atlas.squeeze().shape

    print("First is : {}".format(vol_center))
    vol_first = vol_center
    np_vol_center = readNormalizedVolumeByPath(vol_first, itk_atlas).reshape(
        1, *shapes).astype(np.float32)

    velocities = []
    for vol in volumes:
        #np_atlas = atlas.reshape(1,*shapes).astype(np.float32)
        np_vol = readNormalizedVolumeByPath(vol, itk_atlas).reshape(
            1, *shapes).astype(np.float32)

        np_stack = np.empty(1 * shapes[0] * shapes[1] * shapes[2] * 2,
                            dtype=np.float32).reshape(1, *shapes, 2)
        np_stack[:, :, :, :, 0] = np_vol
        np_stack[:, :, :, :, 1] = np_vol_center

        #tf_stack = tf.convert_to_tensor(np_stack)
        predictions = m.predict(np_stack)
        velocity = predictions[2][0, :, :, :, :]
        velocities.append(velocity)

    # compute avg velocities
    avg_velocity = np.zeros(
        int(1 * shapes[0] / 2 * shapes[1] / 2 * shapes[2] / 2 * 3),
        dtype=np.float32).reshape(1, *[int(s / 2) for s in shapes], 3)
    for v in velocities:
        avg_velocity += v
    avg_velocity /= float(len(velocities))

    # apply squaring&scaling
    steps = model_config['exponentialSteps']
    tf_velo = tf.convert_to_tensor(
        avg_velocity.reshape(1, *[int(s / 2) for s in shapes], 3))
    tf_vol_center = tf.convert_to_tensor(np_vol_center.reshape(1, *shapes, 1))

    x, y, z = K.int_shape(tf_velo)[1:4]

    # clip too large values:
    v_max = 0.5 * (2**steps)
    v_min = -v_max
    velo = tf.clip_by_value(tf_velo, v_min, v_max)

    # ij indexing doesn't change (x,y,z) to (y,x,z)
    grid = tf.expand_dims(
        tf.stack(
            tf.meshgrid(tf.linspace(0., x - 1., x),
                        tf.linspace(0., y - 1., y),
                        tf.linspace(0., z - 1., z),
                        indexing='ij'), -1), 0)

    # replicate along batch size
    stacked_grids = tf.tile(grid, (tf.shape(velo)[0], 1, 1, 1, 1))

    displacement = tfVectorFieldExpHalf(velo, stacked_grids, n_steps=steps)
    displacement_highres = toUpscaleResampled(displacement)
    # warp center volume
    new_warped = remap3d(tf_vol_center, displacement_highres)
    with session.as_default():
        new_volume = new_warped.eval(session=session).reshape(*shapes)

    vol_dirs = np.array(itk_atlas.GetDirection()).reshape(3, 3)
    # reapply directions
    warp_np = np.flip(new_volume,
                      [a for a in range(3) if vol_dirs[a, a] == -1.])
    # prepare axes swap from xyz to zyx
    warp_np = np.transpose(warp_np, (2, 1, 0))
    # write image
    warp_img = sitk.GetImageFromArray(warp_np)
    warp_img.SetOrigin(itk_atlas.GetOrigin())
    warp_img.SetDirection(itk_atlas.GetDirection())
    sitk.WriteImage(warp_img, "new_volume.nii.gz")