Ejemplo n.º 1
0
def matches_on_rpc_roi(im1, im2, rpc1, rpc2, x, y, w, h):
    """
    Compute a list of SIFT matches between two images on a given roi.

    The corresponding roi in the second image is determined using the rpc
    functions.

    Args:
        im1, im2: paths to two large tif images
        rpc1, rpc2: two instances of the rpc_model.RPCModel class
        x, y, w, h: four integers defining the rectangular ROI in the first
            image. (x, y) is the top-left corner, and (w, h) are the dimensions
            of the rectangle.

    Returns:
        matches: 2D numpy array containing a list of matches. Each line
            contains one pair of points, ordered as x1 y1 x2 y2.
            The coordinate system is that of the full images.
    """
    x2, y2, w2, h2 = rpc_utils.corresponding_roi(rpc1, rpc2, x, y, w, h)

    # estimate an approximate affine fundamental matrix from the rpcs
    rpc_matches = rpc_utils.matches_from_rpc(rpc1, rpc2, x, y, w, h, 5)
    # TODO FIXME: do we need to center the points as we do in the rectification module?
    F = estimation.affine_fundamental_matrix(rpc_matches)

    # if less than 10 matches, lower thresh_dog. An alternative would be ASIFT
    thresh_dog = 0.0133
    for i in range(6):
        p1 = image_keypoints(im1,
                             x,
                             y,
                             w,
                             h,
                             extra_params='--thresh-dog %f' % thresh_dog)
        p2 = image_keypoints(im2,
                             x2,
                             y2,
                             w2,
                             h2,
                             extra_params='--thresh-dog %f' % thresh_dog)
        matches = keypoints_match(p1,
                                  p2,
                                  'relative',
                                  cfg['sift_match_thresh'],
                                  F,
                                  model='fundamental')
        if matches.shape[0] > 10:
            break
        else:
            thresh_dog /= 2.0
    return matches
Ejemplo n.º 2
0
def crop_rpc_and_image(out_dir, img, rpc, rpc_ref, x, y, w, h):
    """
    Crops an image and its rpc. The ROI may be defined on another image.

    Args:
        out_dir: path to the output directory. The cropped image and rpc files
            will be written there.
        img: path to the input image
        rpc: path to the input rpc
        rpc_ref: path to the rpc file of the reference image
        x, y, w, h: 4 integers defining a rectangular ROI in the reference
            image
    """
    r = rpc_model.RPCModel(rpc)

    # recompute the roi if the input image is not the reference image
    if rpc_ref is not rpc:
        r_ref = rpc_model.RPCModel(rpc_ref)
        x, y, w, h = rpc_utils.corresponding_roi(r_ref, r, x, y, w, h)

    # output filenames
    crop_rpc_and_image.counter += 1
    s = "_%02d" % crop_rpc_and_image.counter
    out_img_file = os.path.join(out_dir, "img%s.tif" % s)
    out_rpc_file = os.path.join(out_dir, "rpc%s.xml" % s)
    out_prv_file = os.path.join(out_dir, "prv%s.png" % s)

    # do the crop
    out_r = rpc_apply_crop_to_rpc_model(r, x, y, w, h)
    out_r.write(out_rpc_file)
    common.run('gdal_translate -srcwin %d %d %d %d "%s" "%s"' %
               (x, y, w, h, img, out_img_file))

    # do the preview: it has to fit a 1366x768 rectangle
    w = float(w)
    h = float(h)
    if w > 1366 or h > 768:
        if w / h > float(1366) / 768:
            f = w / 1366
        else:
            f = h / 768
        tmp = common.tmpfile('.tif')
        common.image_zoom_gdal(out_img_file, f, tmp, w, h)
        common.run('gdal_translate -of png -ot Byte -scale %s %s' %
                   (tmp, out_prv_file))
    else:
        common.run('gdal_translate -of png -ot Byte -scale %s %s' %
                   (out_img_file, out_prv_file))
    common.run('rm %s.aux.xml' % out_prv_file)
Ejemplo n.º 3
0
def matches_from_sift_rpc_roi(im1, im2, rpc1, rpc2, x, y, w, h):
    """
    Computes a list of sift matches between two Pleiades images.

    Args:
        im1, im2: paths to two large tif images
        rpc1, rpc2: two instances of the rpc_model.RPCModel class
        x, y, w, h: four integers defining the rectangular ROI in the first
            image. (x, y) is the top-left corner, and (w, h) are the dimensions
            of the rectangle.

    Returns:
        matches: 2D numpy array containing a list of matches. Each line
            contains one pair of points, ordered as x1 y1 x2 y2.
            The coordinate system is that of the big images.
    """
    x1, y1, w1, h1 = x, y, w, h
    x2, y2, w2, h2 = rpc_utils.corresponding_roi(rpc1, rpc2, x, y, w, h)

    p1 = common.image_sift_keypoints(im1, x1, y1, w1, h1, max_nb=2000)
    p2 = common.image_sift_keypoints(im2, x2, y2, w2, h2, max_nb=2000)
    matches = common.sift_keypoints_match(p1, p2, 'relative',
                                          cfg['sift_match_thresh'])

    # Below is an alternative to ASIFT: lower the thresh_dog for the sift calls.
    # Default value for thresh_dog is 0.0133
    thresh_dog = 0.0133
    nb_sift_tries = 1
    while (matches.shape[0] < 10 and nb_sift_tries < 6):
        nb_sift_tries += 1
        thresh_dog /= 2.0
        p1 = common.image_sift_keypoints(im1, x1, y1, w1, h1, None,
                                         '-thresh_dog %f' % thresh_dog)
        p2 = common.image_sift_keypoints(im2, x2, y2, w2, h2, None,
                                         '-thresh_dog %f' % thresh_dog)
        matches = common.sift_keypoints_match(p1, p2, 'relative',
                                              cfg['sift_match_thresh'])

    return matches

    if matches.size:
        # compensate coordinates for the crop and the zoom
        pts1 = common.points_apply_homography(T1, matches[:, 0:2])
        pts2 = common.points_apply_homography(T2, matches[:, 2:4])
        return np.hstack([pts1, pts2])
    else:
        return np.array([[]])
Ejemplo n.º 4
0
def matches_from_sift_rpc_roi(im1, im2, rpc1, rpc2, x, y, w, h):
    """
    Computes a list of sift matches between two Pleiades images.

    Args:
        im1, im2: paths to two large tif images
        rpc1, rpc2: two instances of the rpc_model.RPCModel class
        x, y, w, h: four integers defining the rectangular ROI in the first
            image. (x, y) is the top-left corner, and (w, h) are the dimensions
            of the rectangle.

    Returns:
        matches: 2D numpy array containing a list of matches. Each line
            contains one pair of points, ordered as x1 y1 x2 y2.
            The coordinate system is that of the big images.
    """
    x1, y1, w1, h1 = x, y, w, h
    x2, y2, w2, h2 = rpc_utils.corresponding_roi(rpc1, rpc2, x, y, w, h)

    p1 = common.image_sift_keypoints(im1, x1, y1, w1, h1, max_nb=2000)
    p2 = common.image_sift_keypoints(im2, x2, y2, w2, h2, max_nb=2000)
    matches = common.sift_keypoints_match(p1, p2, 'relative',
                                          cfg['sift_match_thresh'])

    # Below is an alternative to ASIFT: lower the thresh_dog for the sift calls.
    # Default value for thresh_dog is 0.0133
    thresh_dog = 0.0133
    nb_sift_tries = 1
    while (matches.shape[0] < 10 and nb_sift_tries < 6):
        nb_sift_tries += 1
        thresh_dog /= 2.0
        p1 = common.image_sift_keypoints(im1, x1, y1, w1, h1, None, '-thresh_dog %f' % thresh_dog)
        p2 = common.image_sift_keypoints(im2, x2, y2, w2, h2, None, '-thresh_dog %f' % thresh_dog)
        matches = common.sift_keypoints_match(p1, p2, 'relative',
                                              cfg['sift_match_thresh'])

    return matches

    if matches.size:
        # compensate coordinates for the crop and the zoom
        pts1 = common.points_apply_homography(T1, matches[:, 0:2])
        pts2 = common.points_apply_homography(T2, matches[:, 2:4])
        return np.hstack([pts1, pts2])
    else:
        return np.array([[]])
Ejemplo n.º 5
0
Archivo: sift.py Proyecto: jguinet/s2p
def matches_on_rpc_roi(im1, im2, rpc1, rpc2, x, y, w, h):
    """
    Compute a list of SIFT matches between two images on a given roi.

    The corresponding roi in the second image is determined using the rpc
    functions.

    Args:
        im1, im2: paths to two large tif images
        rpc1, rpc2: two instances of the rpc_model.RPCModel class
        x, y, w, h: four integers defining the rectangular ROI in the first
            image. (x, y) is the top-left corner, and (w, h) are the dimensions
            of the rectangle.

    Returns:
        matches: 2D numpy array containing a list of matches. Each line
            contains one pair of points, ordered as x1 y1 x2 y2.
            The coordinate system is that of the full images.
    """
    x2, y2, w2, h2 = rpc_utils.corresponding_roi(rpc1, rpc2, x, y, w, h)

    # estimate an approximate affine fundamental matrix from the rpcs
    rpc_matches = rpc_utils.matches_from_rpc(rpc1, rpc2, x, y, w, h, 5)
    # TODO FIXME: do we need to center the points as we do in the rectification module?
    F = estimation.affine_fundamental_matrix(rpc_matches)

    # if less than 10 matches, lower thresh_dog. An alternative would be ASIFT
    thresh_dog = 0.0133
    for i in range(6):
        p1 = image_keypoints(im1, x, y, w, h, extra_params='--thresh-dog %f' % thresh_dog)
        p2 = image_keypoints(im2, x2, y2, w2, h2, extra_params='--thresh-dog %f' % thresh_dog)
        matches = keypoints_match(p1, p2, 'relative', cfg['sift_match_thresh'],
                                  F, model='fundamental')
        if matches.shape[0] > 10:
            break
        else:
            thresh_dog /= 2.0
    return matches
Ejemplo n.º 6
0
def plot_matches_pleiades(im1, im2, rpc1, rpc2, matches, x=None, y=None,
        w=None, h=None, outfile=None):
    """
    Plot matches on Pleiades images

    Args:
        im1, im2: paths to full Pleiades images
        rpc1, rpc2: paths to xml files containing the rpc coefficients
        matches: 2D numpy array of size 4xN containing a list of matches (a
            list of pairs of points, each pair being represented by x1, y1, x2,
            y2). The coordinates are given in the frame of the full images.
        x, y, w, h (optional, default is None): ROI in the reference image
        outfile (optional, default is None): path to the output file. If None,
            the file image is displayed using the pvflip viewer

    Returns:
        path to the displayed output
    """
    # if no matches, no plot
    if not matches.size:
        print "visualisation.plot_matches_pleiades: nothing to plot"
        return

    # read rpcs
    r1 = rpc_model.RPCModel(rpc1)
    r2 = rpc_model.RPCModel(rpc2)

    # determine regions to crop in im1 and im2
    if x is not None:
        x1 = x
    else:
        x1 = np.min(matches[:, 0])

    if y is not None:
        y1 = y
    else:
        y1 = np.min(matches[:, 1])

    if w is not None:
        w1 = w
    else:
        w1 = np.max(matches[:, 0]) - x1

    if h is not None:
        h1 = h
    else:
        h1 = np.max(matches[:, 1]) - y1

    x2, y2, w2, h2 = rpc_utils.corresponding_roi(r1, r2, x1, y1, w1, h1)
    # x2 = np.min(matches[:, 2])
    # w2 = np.max(matches[:, 2]) - x2
    # y2 = np.min(matches[:, 3])
    # h2 = np.max(matches[:, 3]) - y2

    # # add 20 pixels offset and round. The image_crop_TIFF function will round
    # # off the coordinates before it does the crops.
    # x1 -= 20; x1 = np.round(x1)
    # y1 -= 20; y1 = np.round(y1)
    # x2 -= 20; x2 = np.round(x2)
    # y2 -= 20; y2 = np.round(y2)
    # w1 += 40; w1 = np.round(w1)
    # h1 += 40; h1 = np.round(h1)
    # w2 += 40; w2 = np.round(w2)
    # h2 += 40; h2 = np.round(h2)

    # do the crops
    crop1 = common.image_qauto(common.image_crop_TIFF(im1, x1, y1, w1, h1))
    crop2 = common.image_qauto(common.image_crop_TIFF(im2, x2, y2, w2, h2))

    # compute matches coordinates in the cropped images
    pts1 = matches[:, 0:2] - [x1, y1]
    pts2 = matches[:, 2:4] - [x2, y2]

    # plot the matches on the two crops
    to_display = plot_matches(crop1, crop2, np.hstack((pts1, pts2)))
    if outfile is None:
        os.system('v %s &' % (to_display))
    else:
        common.run('cp %s %s' % (to_display, outfile))

    return