def intensity_based_registration_for_loop():

    # Defining of images, learning rates and number of iterations that you want to test.
    # Look at lines 24/25 and 112 to properly define the image paths and savenames to your needs.
    imagepaths = ['2_2', '2_3', '3_1', '3_2', '3_3']
    mu_s = [0.00018, 0.0001, 0.0002, 0.00013, 0.00015]
    iterationlist = [350, 350, 350, 350, 350]
    savenames = ['2_2', '2_3', '3_1', '3_2', '3_3']

    for i in range(len(imagepaths)):

        # read the fixed and moving images
        # change these in order to read different images
        I = plt.imread('./data/image_data/{}_t1.tif'.format(imagepaths[i]))
        Im = plt.imread('./data/image_data/{}_t1_d.tif'.format(imagepaths[i]))

        # initial values for the parameters
        # we start with the identity transformation
        # most likely you will not have to change these
        similarity_measure = reg.affine_corr

        if similarity_measure == reg.rigid_corr:
            x = np.array([0., 0., 0.])

        else:
            x = np.array([0., 1., 1., 0., 0., 0., 0.])

        # NOTE: for affine registration you have to initialize
        # more parameters and the scaling parameters should be
        # initialized to 1 instead of 0

        # the similarity function
        # this line of code in essence creates a version of rigid_corr()
        # in which the first two input parameters (fixed and moving image)
        # are fixed and the only remaining parameter is the vector x with the
        # parameters of the transformation
        fun = lambda x: reg.affine_mi(I, Im, x)

        # the learning rate
        mu = mu_s[i]

        # number of iterations
        num_iter = iterationlist[i]

        iterations = np.arange(1, num_iter + 1)
        similarity = np.full((num_iter, 1), np.nan)

        fig = plt.figure(figsize=(14, 6))

        # fixed and moving image, and parameters
        ax1 = fig.add_subplot(121)

        # fixed image
        im1 = ax1.imshow(I)
        # moving image
        im2 = ax1.imshow(I, alpha=0.7)
        # parameters
        if similarity_measure == reg.rigid_corr:
            txt = ax1.text(0.3,
                           0.95,
                           np.array2string(x, precision=5, floatmode='fixed'),
                           bbox={
                               'facecolor': 'white',
                               'alpha': 1,
                               'pad': 10
                           },
                           transform=ax1.transAxes)
        else:
            txt = ax1.text(-0.02,
                           1.02,
                           np.array2string(x, precision=5, floatmode='fixed'),
                           bbox={
                               'facecolor': 'white',
                               'alpha': 1,
                               'pad': 10
                           },
                           transform=ax1.transAxes)
        # 'learning' curve
        ax2 = fig.add_subplot(122, xlim=(0, num_iter), ylim=(0, 1))

        learning_curve, = ax2.plot(iterations, similarity, lw=2)
        ax2.set_title("mu =" + str(mu))
        ax2.set_xlabel('Iteration')
        ax2.set_ylabel('Similarity')
        ax2.grid()

        # perform 'num_iter' gradient ascent updates
        for k in np.arange(num_iter):

            # gradient ascent
            g = reg.ngradient(fun, x)
            x += g * mu

            # for visualization of the result
            S, Im_t, _ = fun(x)

            clear_output(wait=True)

            # update moving image and parameters
            im2.set_data(Im_t)
            txt.set_text(np.array2string(x, precision=5, floatmode='fixed'))

            # update 'learning' curve
            similarity[k] = S
            learning_curve.set_ydata(similarity)

            # display the figure
            display(fig)

        # save the figure
        # Currently optimized for speed. If one wants to save an image every iteration one has to tab these lines.
        savename = './data/image_results/{}_t1 + {}_t1_d affine_mi mu = {} integer = {}.png'.format(
            savenames[i], savenames[i], mu, num_iter)
        fig.savefig(savename)
Пример #2
0
def intensity_based_registration_demo(I,
                                      Im,
                                      mu=0.0005,
                                      num_iter=100,
                                      h=1e-3,
                                      x=np.array([0., 1., 1., 0., 0., 0., 0.]),
                                      type="affine",
                                      sim_meas="mi"):
    # read the fixed and moving images
    # change these in order to read different images
    # I = plt.imread('../data/image_data/1_1_t1.tif')
    # Im = plt.imread('../data/image_data/1_1_t2.tif')

    # initial values for the parameters
    # we start with the identity transformation
    # most likely you will not have to change these
    # x = np.array([0., 1., 1., 0., 0., 0., 0.])

    # NOTE: for affine registration you have to initialize
    # more parameters and the scaling parameters should be
    # initialized to 1 instead of 0

    # the similarity function
    # this line of code in essence creates a version of rigid_corr()
    # in which the first two input parameters (fixed and moving image)
    # are fixed and the only remaining parameter is the vector x with the
    # parameters of the transformation

    assert type.lower() in ["affine", "rigid"], "error: unknown type"
    assert sim_meas.lower() in ["mi",
                                "cc"], "error: unknown similarity measure"

    if type == "affine":
        if sim_meas == "mi":
            fun = lambda x: reg.affine_mi(I, Im, x)
        else:
            fun = lambda x: reg.affine_corr(I, Im, x)
    else:
        if sim_meas == "cc":
            fun = lambda x: reg.rigid_corr(I, Im, x)
        else:
            ModuleNotFoundError(
                "no functionality for type=rigid and sim_meas=mi")

    # the learning rate
    # mu = 0.0005

    # number of iterations
    # num_iter = 100

    iterations = np.arange(1, num_iter + 1)
    similarity = np.full((num_iter, 1), np.nan)

    fig = plt.figure(figsize=(14, 6))

    # fixed and moving image, and parameters
    ax1 = fig.add_subplot(121)

    # fixed image
    im1 = ax1.imshow(I)
    # moving image
    im2 = ax1.imshow(Im, alpha=0.5)
    # parameters
    txt = ax1.text(0.3,
                   0.95,
                   np.array2string(x, precision=5, floatmode='fixed'),
                   bbox={
                       'facecolor': 'white',
                       'alpha': 1,
                       'pad': 10
                   },
                   transform=ax1.transAxes)

    # 'learning' curve
    ax2 = fig.add_subplot(122, xlim=(0, num_iter), ylim=(0, 2))

    learning_curve, = ax2.plot(iterations, similarity, lw=2)
    ax2.set_xlabel('Iteration')
    ax2.set_ylabel('Similarity')
    ax2.grid()

    path = []
    # perform 'num_iter' gradient ascent updates
    for k in np.arange(num_iter):
        # gradient ascent
        g = reg.ngradient(fun, x, h=h)
        x += g * mu
        path.append(x.copy())

        # for visualization of the result
        S, Im_t, _ = fun(x)

        clear_output(wait=True)

        # update moving image and parameters
        im2.set_data(Im_t)
        txt.set_text(np.array2string(x, precision=5, floatmode='fixed'))

        # update 'learning' curve
        similarity[k] = S
        learning_curve.set_ydata(similarity)

        if k % int(num_iter / 10) == 0:
            print("biep, {:4.0%}...".format(k / num_iter))

    print("helemaal klaar dr mee!")

    fig.show()
    return path
def intensity_based_registration_affine_mi(im1, im2):

    # read the fixed and moving images
    # change these in order to read different images
    I = plt.imread(im1)
    Im = plt.imread(im2)

    # initial values for the parameters
    # we start with the identity transformation
    # most likely you will not have to change these
    x = np.array([0., 1., 1., 0., 0., 0., 0.])

    # NOTE: for affine registration you have to initialize
    # more parameters and the scaling parameters should be
    # initialized to 1 instead of 0

    # the similarity function
    # this line of code in essence creates a version of rigid_corr()
    # in which the first two input parameters (fixed and moving image)
    # are fixed and the only remaining parameter is the vector x with the
    # parameters of the transformation
    fun = lambda x: reg.affine_mi(I, Im, x)

    # the learning rate
    mu = 0.00006

    # number of iterations
    num_iter = 50

    iterations = np.arange(1, num_iter + 1)
    similarity = np.full((num_iter, 1), np.nan)

    fig = plt.figure(figsize=(14, 6))

    # fixed and moving image, and parameters
    ax1 = fig.add_subplot(121)

    # fixed image
    im1 = ax1.imshow(I)
    # moving image
    im2 = ax1.imshow(I, alpha=0.7)
    # parameters
    txt = ax1.text(0.3,
                   0.95,
                   np.array2string(x, precision=5, floatmode='fixed'),
                   bbox={
                       'facecolor': 'white',
                       'alpha': 1,
                       'pad': 10
                   },
                   transform=ax1.transAxes)

    # 'learning' curve
    ax2 = fig.add_subplot(122, xlim=(0, num_iter), ylim=(0, 1))

    learning_curve, = ax2.plot(iterations, similarity, lw=2)
    ax2.set_xlabel('Iteration')
    ax2.set_ylabel('Similarity')
    ax2.grid()

    # perform 'num_iter' gradient ascent updates
    for k in np.arange(num_iter):

        # gradient ascent
        g = reg.ngradient(fun, x)
        x += g * mu

        # for visualization of the result
        S, Im_t, _ = fun(x)

        clear_output(wait=True)

        # update moving image and parameters
        im2.set_data(Im_t)
        txt.set_text(np.array2string(x, precision=5, floatmode='fixed'))

        # update 'learning' curve
        similarity[k] = S
        learning_curve.set_ydata(similarity)

        display(fig)
Пример #4
0
def intensity_based_registration(I, Im, Affine=True, CC=True):

    # initial values for the parameters
    # we start with the identity transformation
    # most likely you will not have to change these
    if Affine:
        x = np.array([0., 1., 1., 0., 0., 0., 0.])
        if CC:
            fun = lambda x: reg.affine_corr(I, Im, x)
        else:
            fun = lambda x: reg.affine_mi(I, Im, x)
    else:
        x = np.array([0., 0., 0.])
        fun = lambda x: reg.rigid_corr(I, Im, x)

    # the learning rate
    mu = 0.001

    # number of iterations
    num_iter = 200

    iterations = np.arange(1, num_iter + 1)
    similarity = np.full((num_iter, 1), np.nan)

    fig = plt.figure(figsize=(14, 6))

    # fixed and moving image, and parameters
    ax1 = fig.add_subplot(121)

    # fixed image
    im1 = ax1.imshow(I)
    # moving image
    im2 = ax1.imshow(I, alpha=0.7)
    # parameters
    txt = ax1.text(0.3,
                   0.95,
                   np.array2string(x, precision=5, floatmode='fixed'),
                   bbox={
                       'facecolor': 'white',
                       'alpha': 1,
                       'pad': 10
                   },
                   transform=ax1.transAxes)

    # 'learning' curve
    ax2 = fig.add_subplot(122, xlim=(0, num_iter), ylim=(0, 1))

    learning_curve, = ax2.plot(iterations, similarity, lw=2)
    ax2.set_xlabel('Iteration')
    ax2.set_ylabel('Similarity')
    ax2.grid()

    # perform 'num_iter' gradient ascent updates
    for k in np.arange(num_iter):

        # gradient ascent
        g = reg.ngradient(fun, x)
        x += g * mu

        # for visualization of the result
        S, Im_t, _ = fun(x)

        clear_output(wait=True)

        # update moving image and parameters
        im2.set_data(Im_t)
        txt.set_text(np.array2string(x, precision=5, floatmode='fixed'))

        # update 'learning' curve
        similarity[k] = S
        learning_curve.set_ydata(similarity)

        display(fig)
Пример #5
0
def intensity_based_registration_affine_MI_adapted(I1_path,
                                                   Im1_path,
                                                   mu=0.001):
    #ADAPTED:
    #Added 1)'fun2' with the original reg.affine_corr(I, Im, x), because
    #three outputs (C, Im_t, Th) are needed for the visualization. So you
    #use the adapted function. 'fun' is the adapted rigid_corr.

    #2) Changed "x += g*mu" to "x =np.add(x, g*mu)", because of shape/dimension error
    #3) Flattened the x-array when used as input for fun2, because of shape/dimension error

    # read the fixed and moving images
    # change these in order to read different images
    I = plt.imread(I1_path)
    Im = plt.imread(Im1_path)

    # initial values for the parameters
    # we start with the identity transformation
    # most likely you will not have to change these
    x = np.array([0., 1., 1., 0., 0., 0., 0.])

    # NOTE: for affine registration you have to initialize
    # more parameters and the scaling parameters should be
    # initialized to 1 instead of 0

    # the similarity function
    # this line of code in essence creates a version of rigid_corr()
    # in which the first two input parameters (fixed and moving image)
    # are fixed and the only remaining parameter is the vector x with the
    # parameters of the transformation
    fun = lambda x: reg_adapt.affine_mi_adapted(I, Im, x)
    fun2 = lambda x: reg.affine_mi(I, Im, x)
    # the learning rate

    # number of iterations
    num_iter = 200

    iterations = np.arange(1, num_iter + 1)
    similarity = np.full((num_iter, 1), np.nan)

    fig = plt.figure(figsize=(16, 6))

    # fixed and moving image, and parameters
    ax1 = fig.add_subplot(121)

    # fixed image
    im1 = ax1.imshow(I)
    # moving image
    im2 = ax1.imshow(I, alpha=0.7)
    # parameters
    #    txt = ax1.text(0.05, 0.95,
    #        np.array2string(x, precision=5, floatmode='fixed'),
    #        bbox={'facecolor': 'white', 'alpha': 1, 'pad': 10},
    #        transform=ax1.transAxes)
    #

    # 'learning' curve
    ax2 = fig.add_subplot(122, xlim=(0, num_iter), ylim=(0, 1))

    learning_curve, = ax2.plot(iterations, similarity, lw=2)
    ax2.set_xlabel('Iteration')
    ax2.set_ylabel('Similarity')
    ax2.title.set_text('\u03BC = ' + str(mu))
    ax2.grid()

    # perform 'num_iter' gradient ascent updates
    for k in np.arange(num_iter):

        # gradient ascent
        g = reg.ngradient(fun, x)

        temp = g * mu
        x += temp.flatten()

        # for visualization of the result
        S, Im_t, _ = fun2(x.flatten())

        clear_output(wait=True)

        # update moving image and parameters
        im2.set_data(Im_t)
        #        txt.set_text(np.array2string(x, precision=5, floatmode='fixed'))
        ax1.title.set_text(np.array2string(x, precision=5, floatmode='fixed'))

        # update 'learning' curve
        similarity[k] = S
        learning_curve.set_ydata(similarity)

        display(fig)
    return similarity, fig