Example #1
0
def local_translation(r1, r2, x, y, w, h, m):
    """
    Estimates the optimal translation to minimise the relative pointing error
    on a given tile.

    Args:
        r1, r2: two instances of the rpc_model.RPCModel class
        x, y, w, h: region of interest in the reference image (r1)
        m: Nx4 numpy array containing a list of matches, one per line. Each
            match is given by (p1, p2, q1, q2) where (p1, p2) is a point of the
            reference view and (q1, q2) is the corresponding point in the
            secondary view.

    Returns:
        3x3 numpy array containing the homogeneous representation of the
        optimal planar translation, to be applied to the secondary image in
        order to correct the pointing error.
    """
    # estimate the affine fundamental matrix between the two views
    n = cfg['n_gcp_per_axis']
    rpc_matches = rpc_utils.matches_from_rpc(r1, r2, x, y, w, h, n)
    F = estimation.affine_fundamental_matrix(rpc_matches)

    # compute the error vectors
    e = error_vectors(m, F, 'sec')

    # compute the median: as the vectors are collinear (because F is affine)
    # computing the median of each component independently is correct
    N = len(e)
    out_x = np.sort(e[:, 0])[int(N / 2)]
    out_y = np.sort(e[:, 1])[int(N / 2)]

    # the correction to be applied to the second view is the opposite
    A = np.array([[1, 0, -out_x], [0, 1, -out_y], [0, 0, 1]])
    return A
Example #2
0
def matches_on_rpc_roi(im1, im2, rpc1, rpc2, x, y, w, h):
    """
    Compute a list of SIFT matches between two images on a given roi.

    The corresponding roi in the second image is determined using the rpc
    functions.

    Args:
        im1, im2: paths to two large tif images
        rpc1, rpc2: two instances of the rpc_model.RPCModel class
        x, y, w, h: four integers defining the rectangular ROI in the first
            image. (x, y) is the top-left corner, and (w, h) are the dimensions
            of the rectangle.

    Returns:
        matches: 2D numpy array containing a list of matches. Each line
            contains one pair of points, ordered as x1 y1 x2 y2.
            The coordinate system is that of the full images.
    """
    x2, y2, w2, h2 = rpc_utils.corresponding_roi(rpc1, rpc2, x, y, w, h)

    # estimate an approximate affine fundamental matrix from the rpcs
    rpc_matches = rpc_utils.matches_from_rpc(rpc1, rpc2, x, y, w, h, 5)
    F = estimation.affine_fundamental_matrix(rpc_matches)

    # if less than 10 matches, lower thresh_dog. An alternative would be ASIFT
    thresh_dog = 0.0133
    for i in range(2):
        p1 = image_keypoints(im1,
                             x,
                             y,
                             w,
                             h,
                             extra_params='--thresh-dog %f' % thresh_dog)
        p2 = image_keypoints(im2,
                             x2,
                             y2,
                             w2,
                             h2,
                             extra_params='--thresh-dog %f' % thresh_dog)
        matches = keypoints_match(p1,
                                  p2,
                                  'relative',
                                  cfg['sift_match_thresh'],
                                  F,
                                  model='fundamental',
                                  epipolar_threshold=cfg['max_pointing_error'])
        if matches is not None and matches.ndim == 2 and matches.shape[0] > 10:
            break
        thresh_dog /= 2.0
    else:
        print("WARNING: sift.matches_on_rpc_roi: found no matches.")
        return None
    return matches
Example #3
0
def evaluation_from_estimated_F(im1,
                                im2,
                                rpc1,
                                rpc2,
                                x,
                                y,
                                w,
                                h,
                                A=None,
                                matches=None):
    """
    Measures the pointing error on a Pleiades' pair of images, affine approx.

    Args:
        im1, im2: paths to the two Pleiades images (usually jp2 or tif)
        rpc1, rpc2: two instances of the rpc_model.RPCModel class
        x, y, w, h: four integers defining the rectangular ROI in the first image.
            (x, y) is the top-left corner, and (w, h) are the dimensions of the
            rectangle.
        A (optional): 3x3 numpy array containing the pointing error correction
            for im2.
        matches (optional): Nx4 numpy array containing a list of matches to use
            to compute the pointing error

    Returns:
        the mean pointing error, in the direction orthogonal to the epipolar
        lines. This error is measured in pixels, and computed from an
        approximated fundamental matrix.
    """
    if not matches:
        matches = sift.matches_on_rpc_roi(im1, im2, rpc1, rpc2, x, y, w, h)
    p1 = matches[:, 0:2]
    p2 = matches[:, 2:4]
    print('%d sift matches' % len(matches))

    # apply pointing correction matrix, if available
    if A is not None:
        p2 = common.points_apply_homography(A, p2)

    # estimate the fundamental matrix between the two views
    rpc_matches = rpc_utils.matches_from_rpc(rpc1, rpc2, x, y, w, h, 5)
    F = estimation.affine_fundamental_matrix(rpc_matches)

    # compute the mean displacement from epipolar lines
    d_sum = 0
    for i in range(len(p1)):
        x = np.array([p1[i, 0], p1[i, 1], 1])
        xx = np.array([p2[i, 0], p2[i, 1], 1])
        ll = F.dot(x)
        #d = np.sign(xx.dot(ll)) * evaluation.distance_point_to_line(xx, ll)
        d = evaluation.distance_point_to_line(xx, ll)
        d_sum += d
    return d_sum / len(p1)
Example #4
0
def plot_pointing_error_tile(im1, im2, rpc1, rpc2, x, y, w, h,
        matches_sift=None, f=100, out_files_pattern=None):
    """
    Args:
        im1, im2: path to full images
        rpc1, rcp2: path to associated rpc xml files
        x, y, w, h: four integers defining the rectangular tile in the reference
            image. (x, y) is the top-left corner, and (w, h) are the dimensions
            of the tile.
        f (optional, default is 100): exageration factor for the error vectors
        out_files_pattern (optional, default is None): pattern used to name the
            two output files (plots of the pointing error)

    Returns:
        nothing, but opens a display
    """
    # read rpcs
    r1 = rpc_model.RPCModel(rpc1)
    r2 = rpc_model.RPCModel(rpc2)

    # compute sift matches
    if not matches_sift:
        matches_sift = sift.matches_on_rpc_roi(im1, im2, r1, r2, x, y, w, h)

    # compute rpc matches
    matches_rpc = rpc_utils.matches_from_rpc(r1, r2, x, y, w, h, 5)

    # estimate affine fundamental matrix
    F = estimation.affine_fundamental_matrix(matches_rpc)

    # compute error vectors
    e = s2plib.pointing_accuracy.error_vectors(matches_sift, F, 'ref')

    A = s2plib.pointing_accuracy.local_translation(r1,r2, x,y,w,h, matches_sift)
    p = matches_sift[:, 0:2]
    q = matches_sift[:, 2:4]
    qq = common.points_apply_homography(A, q)
    ee = s2plib.pointing_accuracy.error_vectors(np.hstack((p, qq)), F, 'ref')
    print(s2plib.pointing_accuracy.evaluation_from_estimated_F(im1, im2,
        r1, r2, x, y, w, h, None, matches_sift))
    print(s2plib.pointing_accuracy.evaluation_from_estimated_F(im1, im2,
        r1, r2, x, y, w, h, A, matches_sift))

    # plot the vectors: they go from the point x to the line (F.T)x'
    plot_vectors(p, -e, x, y, w, h, f, out_file='%s_before.png' % out_files_pattern)
    plot_vectors(p, -ee, x, y, w, h, f, out_file='%s_after.png' % out_files_pattern)
Example #5
0
def plot_pointing_error_tile(im1, im2, rpc1, rpc2, x, y, w, h,
        matches_sift=None, f=100, out_files_pattern=None):
    """
    Args:
        im1, im2: path to full images
        rpc1, rcp2: path to associated rpc xml files
        x, y, w, h: four integers defining the rectangular tile in the reference
            image. (x, y) is the top-left corner, and (w, h) are the dimensions
            of the tile.
        f (optional, default is 100): exageration factor for the error vectors
        out_files_pattern (optional, default is None): pattern used to name the
            two output files (plots of the pointing error)

    Returns:
        nothing, but opens a display
    """
    # read rpcs
    r1 = rpc_model.RPCModel(rpc1)
    r2 = rpc_model.RPCModel(rpc2)

    # compute sift matches
    if not matches_sift:
        matches_sift = sift.matches_on_rpc_roi(im1, im2, r1, r2, x, y, w, h)

    # compute rpc matches
    matches_rpc = rpc_utils.matches_from_rpc(r1, r2, x, y, w, h, 5)

    # estimate affine fundamental matrix
    F = estimation.affine_fundamental_matrix(matches_rpc)

    # compute error vectors
    e = s2plib.pointing_accuracy.error_vectors(matches_sift, F, 'ref')

    A = s2plib.pointing_accuracy.local_translation(r1,r2, x,y,w,h, matches_sift)
    p = matches_sift[:, 0:2]
    q = matches_sift[:, 2:4]
    qq = common.points_apply_homography(A, q)
    ee = s2plib.pointing_accuracy.error_vectors(np.hstack((p, qq)), F, 'ref')
    print(s2plib.pointing_accuracy.evaluation_from_estimated_F(im1, im2,
        r1, r2, x, y, w, h, None, matches_sift))
    print(s2plib.pointing_accuracy.evaluation_from_estimated_F(im1, im2,
        r1, r2, x, y, w, h, A, matches_sift))

    # plot the vectors: they go from the point x to the line (F.T)x'
    plot_vectors(p, -e, x, y, w, h, f, out_file='%s_before.png' % out_files_pattern)
    plot_vectors(p, -ee, x, y, w, h, f, out_file='%s_after.png' % out_files_pattern)
Example #6
0
def rectification_homographies(matches, x, y, w, h, hmargin=0, vmargin=0):
    """
    Computes rectifying homographies from point matches for a given ROI.

    The affine fundamental matrix F is estimated with the gold-standard
    algorithm, then two rectifying similarities (rotation, zoom, translation)
    are computed directly from F.

    Args:
        matches: numpy array of shape (n, 4) containing a list of 2D point
            correspondences between the two images.
        x, y, w, h: four integers defining the rectangular ROI in the first
            image. (x, y) is the top-left corner, and (w, h) are the dimensions
            of the rectangle.
        {h,v}margin: translations added to the rectifying similarities to extend the
            horizontal and vertical footprint of the rectified images

    Returns:
        S1, S2, F: three numpy arrays of shape (3, 3) representing the
        two rectifying similarities to be applied to the two images and the
        corresponding affine fundamental matrix.
    """
    # estimate the affine fundamental matrix with the Gold standard algorithm
    F = estimation.affine_fundamental_matrix(matches)

    # compute rectifying similarities
    S1, S2 = estimation.rectifying_similarities_from_affine_fundamental_matrix(
        F, cfg['debug'])

    if cfg['debug']:
        y1 = common.points_apply_homography(S1, matches[:, :2])[:, 1]
        y2 = common.points_apply_homography(S2, matches[:, 2:])[:, 1]
        err = np.abs(y1 - y2)
        print("max, min, mean rectification error on point matches: ", end=' ')
        print(np.max(err), np.min(err), np.mean(err))

    # pull back top-left corner of the ROI to the origin (plus margin)
    pts = common.points_apply_homography(
        S1, [[x, y], [x + w, y], [x + w, y + h], [x, y + h]])
    x0, y0 = common.bounding_box2D(pts)[:2]
    T = common.matrix_translation(-x0 + hmargin, -y0 + vmargin)
    return np.dot(T, S1), np.dot(T, S2), F
Example #7
0
def rectification_homographies(matches, x, y, w, h, hmargin=0, vmargin=0):
    """
    Computes rectifying homographies from point matches for a given ROI.

    The affine fundamental matrix F is estimated with the gold-standard
    algorithm, then two rectifying similarities (rotation, zoom, translation)
    are computed directly from F.

    Args:
        matches: numpy array of shape (n, 4) containing a list of 2D point
            correspondences between the two images.
        x, y, w, h: four integers defining the rectangular ROI in the first
            image. (x, y) is the top-left corner, and (w, h) are the dimensions
            of the rectangle.
        {h,v}margin: translations added to the rectifying similarities to extend the
            horizontal and vertical footprint of the rectified images

    Returns:
        S1, S2, F: three numpy arrays of shape (3, 3) representing the
        two rectifying similarities to be applied to the two images and the
        corresponding affine fundamental matrix.
    """
    # estimate the affine fundamental matrix with the Gold standard algorithm
    F = estimation.affine_fundamental_matrix(matches)

    # compute rectifying similarities
    S1, S2 = estimation.rectifying_similarities_from_affine_fundamental_matrix(F, cfg['debug'])

    if cfg['debug']:
        y1 = common.points_apply_homography(S1, matches[:, :2])[:, 1]
        y2 = common.points_apply_homography(S2, matches[:, 2:])[:, 1]
        err = np.abs(y1 - y2)
        print("max, min, mean rectification error on point matches: ", end=' ')
        print(np.max(err), np.min(err), np.mean(err))

    # pull back top-left corner of the ROI to the origin (plus margin)
    pts = common.points_apply_homography(S1, [[x, y], [x+w, y], [x+w, y+h], [x, y+h]])
    x0, y0 = common.bounding_box2D(pts)[:2]
    T = common.matrix_translation(-x0 + hmargin, -y0 + vmargin)
    return np.dot(T, S1), np.dot(T, S2), F
Example #8
0
def local_translation_rectified(r1, r2, x, y, w, h, m):
    """
    Estimates the optimal translation to minimise the relative pointing error
    on a given tile.

    Args:
        r1, r2: two instances of the rpc_model.RPCModel class
        x, y, w, h: region of interest in the reference image (r1)
        m: Nx4 numpy array containing a list of matches, one per line. Each
            match is given by (p1, p2, q1, q2) where (p1, p2) is a point of the
            reference view and (q1, q2) is the corresponding point in the
            secondary view.

    Returns:
        3x3 numpy array containing the homogeneous representation of the
        optimal planar translation, to be applied to the secondary image in
        order to correct the pointing error.
    """
    # estimate the affine fundamental matrix between the two views
    n = cfg['n_gcp_per_axis']
    rpc_matches = rpc_utils.matches_from_rpc(r1, r2, x, y, w, h, n)
    F = estimation.affine_fundamental_matrix(rpc_matches)

    # Apply rectification on image 1
    S1p1 = common.points_apply_homography(S1, m[:,0:2])
    S1p1 = np.column_stack((S1p1,np.ones((N,1))))
    print("Points 1 rectied")
    print(S1p1[:10])
    # Apply rectification on image 2
    S2p2 = common.points_apply_homography(S1, m[:,2:4])
    S2p2 = np.column_stack((S2p2, np.ones((N,1))))
    print("Points 2 rectied")
    print(S2p2[:10])

    # Compute F in the rectified space
    rect_matches = np.column_stack((S1p1[:,0:2], S2p2[:, 0:2]))
    F_rect2 = estimation.affine_fundamental_matrix(rect_matches)

    # Compute epipolar lines
    FS1p1 = np.dot(F_rect2, S1p1.T).T
    print(FS1p1[:10])

    # Normalize epipolar lines
    c1 = -FS1p1[:, 1]
    FS1p1_norm = FS1p1/c1[:, np.newaxis]
    print(FS1p1_norm[:10])


    b_ = np.abs(S2p2[:, 1] - S1p1[:, 1])
    t_med = np.median(b_)
    print(t_med)

    t_med2 = np.sort(b_)[int(N/2)]
    print("t_med2", t_med2)

    # Compute epipolar lines witout recitifcation
    p1 = np.column_stack((m[:,0:2],np.ones((N,1))))
    Fp1 = np.dot(F, p1.T).T

    # Compute normal vector to epipolar liness
    ab = np.array([Fp1[0][0], Fp1[0][1]])
    ab = ab/np.linalg.norm(ab)

    tx = t_med*ab[0]
    ty = t_med*ab[1]
    print(tx, ty)

    # Get translation in not rectified image
    T = common.matrix_translation(0, -t_med)
    T = np.linalg.inv(S2).dot(T).dot(S2)
    # T = np.dot(S2_inv, T)
    print(T)

    # the correction to be applied to the second view is the opposite
    A = np.array([[1, 0, -out_x],
                  [0, 1, -out_y],
                  [0, 0, 1]])
    return A, F
Example #9
0
def local_translation_rotation(r1, r2, x, y, w, h, m):
    """
    Estimates the optimal translation to minimise the relative pointing error
    on a given tile.

    Args:
        r1, r2: two instances of the rpc_model.RPCModel class
        x, y, w, h: region of interest in the reference image (r1)
        m: Nx4 numpy array containing a list of matches, one per line. Each
            match is given by (p1, p2, q1, q2) where (p1, p2) is a point of the
            reference view and (q1, q2) is the corresponding point in the
            secondary view.

    Returns:
        3x3 numpy array containing the homogeneous representation of the
        optimal planar translation, to be applied to the secondary image in
        order to correct the pointing error.
    """
    # estimate the affine fundamental matrix between the two views
    n = cfg['n_gcp_per_axis']
    rpc_matches = rpc_utils.matches_from_rpc(r1, r2, x, y, w, h, n)
    F = estimation.affine_fundamental_matrix(rpc_matches)

    # Compute rectification homographies
    # S1, S2 = estimation.rectifying_similarities_from_affine_fundamental_matrix(F, cfg['debug'])
    hmargin = cfg['horizontal_margin']
    vmargin = cfg['vertical_margin']
    S1, S2, F_rect = rectification.rectification_homographies(rpc_matches, x, y, w, h, hmargin, vmargin)

    N = len(m)
    # Apply rectification on image 1
    S1p1 = common.points_apply_homography(S1, m[:,0:2])
    S1p1 = np.column_stack((S1p1,np.ones((N,1))))
    print("Points 1 rectied")
    print(S1p1[:10])
    # Apply rectification on image 2
    S2p2 = common.points_apply_homography(S1, m[:,2:4])
    S2p2 = np.column_stack((S2p2, np.ones((N,1))))
    print("Points 2 rectied")
    print(S2p2[:10])

    # Compute F in the rectified space
    rect_matches = np.column_stack((S1p1[:,0:2], S2p2[:, 0:2]))
    F_rect2 = estimation.affine_fundamental_matrix(rect_matches)

    # Compute epipolar lines
    FS1p1 = np.dot(F_rect2, S1p1.T).T
    print(FS1p1[:10])

    # Normalize epipolar lines
    c1 = -FS1p1[:, 1]
    FS1p1_norm = FS1p1/c1[:, np.newaxis]
    print(FS1p1_norm[:10])


    # Variable of optimization problem
    A_ = np.ones((N,1))
    # A_ = np.column_stack((S2p2[:,0].reshape(N, 1),np.ones((N,1))))
    # b_ = S2p2[:, 1] - FS1p1_norm[:, 2]
    b_ = S2p2[:, 1] - S1p1[:, 1]
    t_med = np.median(b_)
    print(t_med)

    t_med2 = np.sort(b_)[int(N/2)]
    print("t_med2", t_med2)

    # min ||Ax + b||^2 => x = - (A^T A )^-1 A^T b
    # X_ = - np.dot(np.linalg.inv(np.dot(A_.T, A_)), np.dot(A_.T, b_))
    # [theta, t] = X_
    # print(t, theta)
    # t = X_[0]

    # Compute epipolar lines witout recitifcation
    p1 = np.column_stack((m[:,0:2],np.ones((N,1))))
    Fp1 = np.dot(F, p1.T).T

    # Compute normal vector to epipolar liness
    ab = np.array([Fp1[0][0], Fp1[0][1]])
    ab = ab/np.linalg.norm(ab)

    tx = t_med*ab[0]
    ty = t_med*ab[1]
    print(tx, ty)

    # Get translation in not rectified image
    T = common.matrix_translation(0, -t_med)
    T = np.linalg.inv(S2).dot(T).dot(S2)
    # T = np.dot(S2_inv, T)
    print(T)

    theta = 0
    cos_theta = np.cos(theta)
    sin_theta = np.sin(theta)
    A = np.array([[cos_theta, -sin_theta, tx],
                  [sin_theta,  cos_theta, ty],
                  [0, 0, 1]])
    return A, F
Example #10
0
def local_transformation(r1, r2, x, y, w, h, m,
                         pointing_error_correction_method='analytic', apply_rectification=True):
    """
    Estimates the optimal rotation following a translation to minimise the
    relative pointing error on a given tile.

    Args:
        r1, r2: two instances of the rpc_model.RPCModel class
        x, y, w, h: region of interest in the reference image (r1)
        m: Nx4 numpy array containing a list of matches, one per line. Each
            match is given by (p1, p2, q1, q2) where (p1, p2) is a point of the
            reference view and (q1, q2) is the corresponding point in the
            secondary view.

    Returns:
        3x3 numpy array containing the homogeneous representation of the
        optimal planar rotation composed with translation, to be applied
        to the secondary image in order to correct the pointing error.
    """
    # estimate the affine fundamental matrix between the two views
    n = cfg['n_gcp_per_axis']
    rpc_matches = rpc_utils.matches_from_rpc(r1, r2, x, y, w, h, n)
    F = estimation.affine_fundamental_matrix(rpc_matches)

    args_opti = (F, m, apply_rectification)

    # Solve the resulting optimization problem
    from scipy.optimize import fmin_l_bfgs_b

    number_variables = pointing_error_correction_method
    if (not apply_rectification):
        number_variables += 1

    v0 = np.zeros(number_variables)
    v, min_val, debug = fmin_l_bfgs_b(
            cost_function,
            v0,
            args=args_opti,
            approx_grad=True,
            factr=1,
            #bounds=[(-150, 150), (-100, 100), (-100, 100)])
            #maxiter=50,
            callback=print_params)
            #iprint=0,
            #disp=0)


    # compute epipolar lines
    p1 = np.column_stack((m[:,0:2],np.ones((len(m),1))))
    Fp1 = np.dot(F, np.transpose(p1)).T


    if (apply_rectification):
        # Compute normal vector to epipolar liness
        ab = np.array([Fp1[0][0], Fp1[0][1]])
        ab = ab/np.linalg.norm(ab)

        # Compute translation
        t = v[0]
        tx = t*ab[0]
        ty = t*ab[1]
        # theta
        if (pointing_error_correction_method > 1):
            theta = v[1]
            theta /= 10

        else:
            theta = 0
    else:
        ab = []
        tx = v[0]
        ty = v[1]
        # theta
        if (pointing_error_correction_method > 1):
            theta = v[2]
            theta /= 10
        else:
            theta = 0

    print('tx: %f' % tx)
    print('ty: %f' % ty)
    print('theta: %f' % theta)
    if (len(v) > 3):
        print('cx: %f' % v[3])
        print('cy: %f' % v[4])
    print('min cost: %f' % min_val)
    print(debug)
    A = rigid_transform_matrix(v, ab)
    print('A:', A)
    return A, F