def test_deprecated_params_attributes():
    for t in ('projective', 'affine', 'similarity'):
        tform = estimate_transform(t, SRC, DST)
        assert_equal(tform._matrix, tform.params)

    tform = estimate_transform('polynomial', SRC, DST, order=3)
    assert_equal(tform._params, tform.params)
def test_deprecated_params_attributes():
    for t in ('projective', 'affine', 'similarity'):
        tform = estimate_transform(t, SRC, DST)
        assert_equal(tform._matrix, tform.params)

    tform = estimate_transform('polynomial', SRC, DST, order=3)
    assert_equal(tform._params, tform.params)
Example #3
0
def getLocalTransform(newOrigin, newDestiny, method='affine'):
    # FIRST NORMAL
    if (method == 'affine'):
        try:
            # if(self.matrixrank(self.coordRef[neighs])>0):
            #hn = af.Affine_Fit(newOrigin, newDestiny)
            if newOrigin.shape[0] < 5:
                transform = tf.estimate_transform('similarity', newOrigin,
                                                  newDestiny)
            else:
                transform = tf.estimate_transform('affine', newOrigin,
                                                  newDestiny)
            hn = transform.params
            # hn,mask = cv2.findHomography(newOrigin,newDestiny)
            message = "Using affine fit with " + str(
                newOrigin.shape) + " neighbors."
            return hn, message
        except np.linalg.linalg.LinAlgError as err:
            return [], err.message
        except FloatingPointError as err:
            return [], err.message

    else:
        if newOrigin.shape[0] < 10:
            transform = tf.estimate_transform('similarity', newOrigin,
                                              newDestiny)
            hn = transform.params
        else:
            hn = cv2.findHomography(newOrigin,
                                    newDestiny)  # global are maintained
    return hn
Example #4
0
def test_estimate_transform():
    for tform in ('euclidean', 'similarity', 'affine', 'projective',
                  'polynomial'):
        estimate_transform(tform, SRC[:2, :], DST[:2, :])
    with pytest.raises(ValueError):
        estimate_transform('foobar',
                  SRC[:2, :], DST[:2, :])
def test_estimate_transform():
    for tform in ('euclidean', 'similarity', 'affine', 'projective',
                  'polynomial'):
        estimate_transform(tform, SRC[:2, :], DST[:2, :])
    with pytest.raises(ValueError):
        estimate_transform('foobar',
                  SRC[:2, :], DST[:2, :])
Example #6
0
def test_deprecated_params_attributes():
    for t in ('projective', 'affine', 'similarity'):
        tform = estimate_transform(t, SRC, DST)
        with expected_warnings(['`_matrix`.*deprecated']):
            assert_equal(tform._matrix, tform.params)

    tform = estimate_transform('polynomial', SRC, DST, order=3)
    with expected_warnings(['`_params`.*deprecated']):
        assert_equal(tform._params, tform.params)
Example #7
0
def test_deprecated_params_attributes():
    for t in ('projective', 'affine', 'similarity'):
        tform = estimate_transform(t, SRC, DST)
        with expected_warnings(['`_matrix`.*deprecated']):
            assert_equal(tform._matrix, tform.params)

    tform = estimate_transform('polynomial', SRC, DST, order=3)
    with expected_warnings(['`_params`.*deprecated']):
        assert_equal(tform._params, tform.params)
Example #8
0
def applyGeometricTransformation(startXs, startYs, newXs, newYs, bbox, frame=None):
	N, F = startXs.shape

	newXs_th, newYs_th = np.zeros((N, F)), np.zeros((N, F))

	newbbox = np.zeros((F, 4, 2), dtype=np.int)

	for f in range(F):
		startX, startY = startXs[:, f], startYs[:, f]

		newX, newY = newXs[:, f], newYs[:, f]
		valid_idx = np.logical_and(newX > -1, newY > -1)

		startX, startY = startX[valid_idx], startY[valid_idx]
		newX, newY = newX[valid_idx], newY[valid_idx]

		n = len(startX)

		src = np.hstack((startX.reshape((n,1)), startY.reshape((n,1))))
		dst = np.hstack((newX.reshape((n,1)), newY.reshape((n,1))))
		
		if COMBINE:
			if frame is not None and frame > 170 and frame < 208:
				sform = tf.estimate_transform(TRANSFORM_COMBINE, src, dst)
			else:
				sform = tf.estimate_transform(TRANSFORM, src, dst)
		else:
			sform = tf.estimate_transform(TRANSFORM, src, dst)

		newXY_pred = sform(src)
		error = np.sqrt(np.sum((newXY_pred - dst)**2, axis=1))
		idx = error < ERROR_TH
		num_inliers = np.sum(idx)

		if frame is not None:
			print("Num of inliers: ", num_inliers, "Frame", frame, "min error: ", np.min(error), "max error", np.max(error))
		else:
			print("Num of inliers:", num_inliers)

		if(num_inliers <= 2):
			return None, None, None
		
		newXY_af = dst.copy()
		newXY_af[np.logical_not(idx)] = [-1, -1]

		newX_th, newY_th = newXY_af[:, 0], newXY_af[:, 1]
		newX_th, newY_th = np.round(newX_th).astype(np.int), np.round(newY_th).astype(np.int)

		n_th = len(newX_th)
		newXs_th[:n_th, f], newYs_th[:n_th, f] = newX_th, newY_th
		newXs_th[n_th:, f], newYs_th[n_th:, f] = -1, -1

		b = bbox[f]
		new_b = sform(b)
		newbbox[f] = np.round(new_b).astype(np.int)

	return newXs_th, newYs_th, newbbox
Example #9
0
def test_projective_estimation():
    # exact solution
    tform = estimate_transform('projective', SRC[:4, :], DST[:4, :])
    assert_almost_equal(tform(SRC[:4, :]), DST[:4, :])

    # over-determined
    tform2 = estimate_transform('projective', SRC, DST)
    assert_almost_equal(tform2.inverse(tform2(SRC)), SRC)

    # via estimate method
    tform3 = ProjectiveTransform()
    tform3.estimate(SRC, DST)
    assert_almost_equal(tform3.params, tform2.params)
Example #10
0
def test_projective_estimation():
    # exact solution
    tform = estimate_transform('projective', SRC[:4, :], DST[:4, :])
    assert_almost_equal(tform(SRC[:4, :]), DST[:4, :])

    # over-determined
    tform2 = estimate_transform('projective', SRC, DST)
    assert_almost_equal(tform2.inverse(tform2(SRC)), SRC)

    # via estimate method
    tform3 = ProjectiveTransform()
    tform3.estimate(SRC, DST)
    assert_almost_equal(tform3.params, tform2.params)
def test_affine_estimation():
    # exact solution
    tform = estimate_transform('affine', SRC[:3, :], DST[:3, :])
    assert_array_almost_equal(tform(SRC[:3, :]), DST[:3, :])

    # over-determined
    tform2 = estimate_transform('affine', SRC, DST)
    assert_array_almost_equal(tform2.inverse(tform2(SRC)), SRC)

    # via estimate method
    tform3 = AffineTransform()
    tform3.estimate(SRC, DST)
    assert_array_almost_equal(tform3._matrix, tform2._matrix)
Example #12
0
def warp_2_ims(im_no_mu, im_mu):
    pts1 = get_lms_all(im_no_mu)
    pts2 = get_lms_all(im_mu)
    pts_av = (pts1 + pts2) / 2
    pts1 = add_boundaries(pts1, im_no_mu)
    pts2 = add_boundaries(pts2, im_mu)
    pts_av = add_boundaries(pts_av, im_mu)
    M1 = estimate_transform('piecewise-affine', pts2, pts_av)
    warped_im1 = warp(im_mu, M1.inverse, output_shape=im_size, mode='edge')
    M2 = estimate_transform('piecewise-affine', pts1, pts_av)
    warped_im2 = warp(im_no_mu, M2.inverse, output_shape=im_size, mode='edge')
    added_ims = np.array((warped_im1 / 2 + warped_im2 / 2) * 255,
                         dtype=np.uint8)
    all_ims = np.hstack((im_no_mu, im_mu, added_ims))
    return added_ims
Example #13
0
def get_transfrom_matrix(center_scale, output_size):
    """get transform matrix
    """
    center, scale = center_scale[0], center_scale[1]
    # todo: further add rot and shift here.
    src_w = scale[0]
    dst_w = output_size[0]
    dst_h = output_size[1]

    src_dir = np.array([0, src_w * -0.5])
    dst_dir = np.array([0, dst_w * -0.5])

    src = np.zeros((3, 2), dtype=np.float32)
    dst = np.zeros((3, 2), dtype=np.float32)
    src[0, :] = center
    src[1, :] = center + src_dir
    dst[0, :] = np.array([dst_w * 0.5, dst_h * 0.5])
    dst[1, :] = np.array([dst_w * 0.5, dst_h * 0.5]) + dst_dir

    src[2, :] = get_3rd_point(src[0, :], src[1, :])
    dst[2, :] = get_3rd_point(dst[0, :], dst[1, :])

    get_matrix = trans.estimate_transform("affine", src, dst)
    matrix = get_matrix.params

    return matrix.astype(np.float32)
Example #14
0
def alignment(fp_file, image_path, save_path, ref_points=[]):
    point_lines = read_file(fp_file)
    image_lines = read_file(image_path)

    if not point_lines or not image_lines:
        print 'empty file'
        return

    if len(ref_points) == 0:
        ref_points = np.float32(point_lines[0].split()).reshape((5, 2))

    i = 0
    for (point_line, image_line) in zip(point_lines, image_lines):
        image_line = image_line.strip('\n')
        point_line = point_line.strip('\n')

        img_name = image_line.split()[0]
        dir_name = image_line.split('/')[0]
        exist_dirs = os.listdir(TARGET_ROOT)
        if dir_name not in exist_dirs:
            os.mkdir(TARGET_ROOT + dir_name)

        img = skimage.io.imread(DATA_ROOT + img_name)
        pts = np.float32(point_line.split()).reshape((5, 2))

        tfrom = tf.estimate_transform('similarity', ref_points, pts)
        warpimage = tf.warp(img, inverse_map=tfrom, output_shape=imgSize)
        skimage.io.imsave(save_path + img_name, warpimage)

        i = i + 1
        print('\riter %d' % (i))

    print 'done'
Example #15
0
def transform_to_template(shape, template, transformation_type='similarity'):
    """Returns a transformed shape that is as close as possible to the given
    template under a certain type of transformation.

    Args:
        shape (ndarray): An n x 2 array where each row represents a vertex in
                         the shape. The first column is the x-coordinate and
                         the second column is the y-coordinate.

        template (ndarray): Another shape corresponding to the first input.
                            It must have the same array shape and type, and
                            corresponding rows must respresent corresponding
                            vertices. For example, the vertex respresented by
                            row **i** in the *input* will try to match as
                            closesly as possible to the vertex represented by
                            row **i** in the *template*.

        transformation_type (str): The type of transformation to use when
                                   fitting the shape to the template. The
                                   string must be one of the ones specified by
                                   `skimage.transform.estimate_transform`_.

    Returns:
        ndarray: Transformed shape of the same type and array shape as the
        input shape.

    ..  _skimage.transform.estimate_transform: http://scikit-image.org/docs/dev/api/skimage.transform.html#skimage.transform.estimate_transform
    """
    transformation = estimate_transform(transformation_type, shape, template)
    return matrix_transform(shape, transformation.params)
Example #16
0
    def __call__(self, sample):
        image, labels, index, landmarks = sample['image'], sample[
            'labels'], sample['index'], sample['landmarks']

        dst = np.array([[-0.25, -0.1], [0.25, -0.1], [0.0, 0.1], [-0.15, 0.4],
                        [0.15, 0.4]])
        tform = transform.estimate_transform('similarity', landmarks, dst)

        def map_func1(coords):
            tform2 = transform.SimilarityTransform(scale=1 / 32,
                                                   rotation=0,
                                                   translation=(-1.0, -1.0))
            return tform.inverse(tform2(coords))

        l, h, w = labels.shape
        image = np.uint8(
            transform.warp(
                image, inverse_map=map_func1, output_shape=[64, 64, 3]) * 255)
        labels = np.uint8(
            transform.warp(labels.transpose(1, 2, 0),
                           inverse_map=map_func1,
                           output_shape=[64, 64, l]).transpose(2, 0, 1) * 255)

        return {
            'image': image,
            'labels': labels,
            'index': index,
            'landmarks': landmarks
        }
    def align_to_template_similarity(self, image, gray, rect):
        template_landmarks = get_template_landmark()
        detected_landmarks = shape_to_np(self.predictor(gray, rect))

        tf = transform.estimate_transform('similarity', detected_landmarks,
                                          template_landmarks)
        result = img_as_ubyte(
            transform.warp(image,
                           inverse_map=tf.inverse,
                           output_shape=(self.desiredFaceWidth,
                                         self.desiredFaceWidth, 3)))
        # imshow(result)

        # overlay template landmarks on result -- successful
        # canvas = result
        # for p in template_landmarks:
        #     x, y = p
        #     result[y, x] = [0, 255, 0]  # OG
        #     surround
        #     result[y-1, x] = [0, 255, 0]
        #     result[y-1, x+1] = [0, 255, 0]
        #     result[y, x+1] = [0, 255, 0]
        #     result[y+1, x+1] = [0, 255, 0]
        #     result[y+1, x] = [0, 255, 0]
        #     result[y+1, x-1] = [0, 255, 0]
        #     result[y, x-1] = [0, 255, 0]
        #     result[y-1, x-1] = [0, 255, 0]
        #
        # imshow(canvas)
        # #
        # # save image as jpg -- successful
        # result = Image.fromarray(result, mode='RGB')
        # result.save('testing123_2.jpg')

        return result
Example #18
0
def get_cropping_transformation(image, face_detector, shape_predictor):
    detected_faces = face_detector(image, 1)
    if len(detected_faces) == 0:
        print('warning: no detected face')
        return None

    d = detected_faces[
        0].rect  ## only use the first detected face (assume that each input image only contains one face)
    left = d.left()
    right = d.right()
    top = d.top()
    bottom = d.bottom()
    old_size = (right - left + bottom - top) / 2
    center = np.array([
        right - (right - left) / 2.0,
        bottom - (bottom - top) / 2.0 + old_size * 0.14
    ])
    size = int(old_size * 1.58)

    shape = shape_predictor(image, d)
    coords = np.zeros((68, 2), dtype=int)
    for i in range(0, 68):
        coords[i] = (shape.part(i).x, shape.part(i).y)

    src_pts = np.array([[center[0] - size / 2, center[1] - size / 2],
                        [center[0] - size / 2, center[1] + size / 2],
                        [center[0] + size / 2, center[1] - size / 2]])
    DST_PTS = np.array([[0, 0], [0, 255], [255, 0]])
    tform = estimate_transform('similarity', src_pts, DST_PTS)
    return coords, tform
Example #19
0
    def __getitem__(self, index):
        imagepath = self.imagepath_list[index]
        imagename = imagepath.split('/')[-1].split('.')[0]

        image = imread(imagepath)[:,:,:3]

        h, w, _ = image.shape
        if self.iscrop:
            kptpath = os.path.join(self.kptfolder, imagename+'.npy')
            kpt = np.load(kptpath)
            left = np.min(kpt[:,0]); right = np.max(kpt[:,0])
            top = np.min(kpt[:,1]); bottom = np.max(kpt[:,1])
            old_size = (right - left + bottom - top)/2
            center = np.array([right - (right - left) / 2.0, bottom - (bottom - top) / 2.0 ])#+ old_size*0.1])
            size = int(old_size*self.scale)
            src_pts = np.array([[center[0]-size/2, center[1]-size/2], [center[0] - size/2, center[1]+size/2], [center[0]+size/2, center[1]-size/2]])
        else:
            src_pts = np.array([[0, 0], [0, h-1], [w-1, 0]])
        
        DST_PTS = np.array([[0,0], [0,self.resolution_inp - 1], [self.resolution_inp - 1, 0]])
        tform = estimate_transform('similarity', src_pts, DST_PTS)
        
        image = image/255.

        dst_image = warp(image, tform.inverse, output_shape=(self.resolution_inp, self.resolution_inp))
        dst_image = dst_image.transpose(2,0,1)
        return {'image': torch.tensor(dst_image).float(),
                'imagename': imagename,
                'tform': tform,
                'original_image': torch.tensor(image.transpose(2,0,1)).float(),
                }
Example #20
0
def map_func1(landmarks, coords):
  dst = np.array([[-0.25,-0.1], [0.25, -0.1], [0.0, 0.1], [-0.15, 0.4], [0.15, 0.4]])
  tform = transform.estimate_transform('similarity', np.array(landmarks, np.float), dst)
  tform2 = transform.SimilarityTransform(scale=1/32, rotation=0, translation=(-1.0, -1.0))
  ans = tform.inverse(tform2(coords))
  #print(coords, ans)
  return ans
Example #21
0
def gen_data(name):
    reftracker = scio.loadmat('data/images_tracker.00047.mat')['tracker']
    desttracker = scio.loadmat('data/images_tracker/'+name+'.mat')['tracker']
    refpos = np.floor(np.mean(reftracker, 0))
    xxc, yyc = np.meshgrid(np.arange(1, 1801, dtype=np.int), np.arange(1, 2001, dtype=np.int))
    #normalize x and y channels
    xxc = (xxc - 600 - refpos[0]) * 1.0 / 600
    yyc = (yyc - 600 - refpos[1]) * 1.0 / 600
    maskimg = Image.open('data/meanmask.png')
    maskc = np.array(maskimg, dtype=np.float)
    maskc = np.pad(maskc, (600, 600), 'minimum')
    # warp is an inverse transform, and so src and dst must be reversed here
    tform = transform.estimate_transform('affine', desttracker + 600, reftracker + 600)
    
    img_data = skio.imread('data/images_data/'+name+'.jpg')
    # save org mat
    warpedxx = transform.warp(xxc, tform, output_shape=xxc.shape)
    warpedyy = transform.warp(yyc, tform, output_shape=xxc.shape)
    warpedmask = transform.warp(maskc, tform, output_shape=xxc.shape)
    warpedxx = warpedxx[600:1400, 600:1200, :]
    warpedyy = warpedyy[600:1400, 600:1200, :]
    warpedmask = warpedmask[600:1400, 600:1200, :]
    img_h, img_w, _ = img_data.shape
    mat = np.zeros((img_h, img_w, 6), dtype=np.float)
    mat[:, :, 0] = (img_data[2] * 1.0 - 104.008) / 255
    mat[:, :, 1] = (img_data[1] * 1.0 - 116.669) / 255
    mat[:, :, 2] = (img_data[0] * 1.0 - 122.675) / 255
    scio.savemat('portraitFCN_data/' + name + '.mat', {'img':mat})
    mat_plus = np.zeros((img_h, img_w, 6), dtype=np.float)
    mat_plus[:, :, 0:3] = mat
    mat_plus[:, :, 3] = warpedxx
    mat_plus[:, :, 4] = warpedyy
    mat_plus[:, :, 5] = warpedmask
def crop_image(image, resolution=256):
    """
    Crops the image to a desired size (resolution) to a square with black edges (in case the image is not square)
    :param image: patch image
    :param resolution: desired size for the image
    :return: cropped image
    """
    bounding_box = BoundingBox((0, 0, image.shape[1], image.shape[0]))

    frame_image = skimage.img_as_float32(image)

    left = bounding_box.left
    right = bounding_box.right
    top = bounding_box.top
    bottom = bounding_box.bottom
    old_size = (right - left + bottom - top) / 2
    center = np.array([right - (right - left) / 2.0, bottom - (bottom - top) / 2.0])
    size = int(old_size * 1.)

    src_pts = np.array([[center[0] - size / 2, center[1] - size / 2], [center[0] - size / 2, center[1] + size / 2],
                        [center[0] + size / 2, center[1] - size / 2]])

    dst_pts = np.array([[0, 0], [0, resolution - 1], [resolution - 1, 0]])
    tform = estimate_transform('similarity', src_pts, dst_pts)

    cropped_image = warp(frame_image, tform.inverse, output_shape=(resolution, resolution))
    return cropped_image, tform
Example #23
0
    def test_find_transform_givensources(self):
        from skimage.transform import estimate_transform, matrix_transform

        source = np.array([
            [1.4, 2.2],
            [5.3, 1.0],
            [3.7, 1.5],
            [10.1, 9.6],
            [1.3, 10.2],
            [7.1, 2.0],
        ])
        nsrc = source.shape[0]
        scale = 1.5  # scaling parameter
        alpha = np.pi / 8.0  # rotation angle
        mm = scale * np.array([[np.cos(alpha), -np.sin(alpha)],
                               [np.sin(alpha), np.cos(alpha)]])
        tx, ty = 2.0, 1.0  # translation parameters
        transl = np.array([nsrc * [tx], nsrc * [ty]])
        dest = (mm.dot(source.T) + transl).T
        t_true = estimate_transform("similarity", source, dest)

        # disorder dest points so they don't match the order of source
        np.random.shuffle(dest)

        t, (src_pts, dst_pts) = aa.find_transform(source, dest)
        self.assertLess(t_true.scale - t.scale, 1e-10)
        self.assertLess(t_true.rotation - t.rotation, 1e-10)
        self.assertLess(np.linalg.norm(t_true.translation - t.translation),
                        1e-10)
        self.assertEqual(src_pts.shape[0], dst_pts.shape[0])
        self.assertEqual(src_pts.shape[1], 2)
        self.assertEqual(dst_pts.shape[1], 2)
        dst_pts_test = matrix_transform(src_pts, t.params)
        self.assertLess(np.linalg.norm(dst_pts_test - dst_pts), 1e-10)
Example #24
0
def extract_sift(image,lm=None, shape=[200,300], fix_points='outer',ttype='affine'):
    if lm==None:lm = landmarks(image)
    if np.any(np.isnan(lm)):
        return np.nan*np.ones([out_shape,out_shape,image.shape[2]]).astype(np.float16), np.nan*np.zeros_like(lm)
    dst = mean_face[:,p[fix_points]]
    dst = dst-dst.mean(1)[:,None]
    dst = dst/np.abs(dst).max()
    dst *=shape[0]/2
    dst +=shape[1]/2
    print(dst.min())
    print(dst.max())
    src = lm[:,p[fix_points]]
    tform = transform.estimate_transform(ttype, src.T,dst.T)
    lm_reg = tform(lm.T).T
    image = transform.warp(image,inverse_map=tform.inverse,output_shape=[shape[1],shape[1]])
    image = exposure.equalize_hist(image,mask=image!=0)
    S = 12
    for l1,l2 in lm_reg.T:
        x = np.arange(l2-S,l2+S)
        y = np.arange(l1-S,l1+S)
        for x_ in x:
            for y_ in y:
                image[x_,y_,0]=255


    return image, lm_reg
Example #25
0
    def __init__(self, landmarks):
        self.landmarks = landmarks

        src = np.array(self.landmarks)
        dst = np.array([[-0.25, -0.1], [0.25, -0.1], [0.0, 0.1], [-0.15, 0.4],
                        [0.15, 0.4]])
        self.tform = transform.estimate_transform('similarity', src, dst)
Example #26
0
def regMRI(data, reg_df, reg_id=1):
    n_sample = data.shape[-1]
    if n_sample != reg_df.shape[0]:
        logging.error("Error, registration and data not match. Please check")
        sys.exit()
    n_landmark = int((reg_df.shape[1] - 1) / 2)
    # reg_target = data[..., 0, reg_id]
    _dst = reg_df.iloc[reg_id, 2:].values
    _dst = _dst.reshape((n_landmark, 2))
    max_dist = np.zeros(n_sample)
    for i in range(n_sample):
        if i == reg_id:
            continue
        else:
            # epts = reg_df.iloc[i, 1:].values
            _src = reg_df.iloc[i, 2:].values
            _src = _src.reshape((n_landmark, 2))
            idx_valid = np.isnan(_src[:, 0])
            tform = transform.estimate_transform(ttype="similarity",
                                                 src=_src[~idx_valid, :],
                                                 dst=_dst[~idx_valid, :])
            # forward transform used here, inverse transform used for warp
            src_tform = tform(_src[~idx_valid, :])
            dist = np.linalg.norm(src_tform - _dst[~idx_valid, :], axis=1)
            max_dist[i] = dist.max()

            for j in range(data[..., i].shape[-1]):
                src_img = data[..., j, i].copy()
                warped = transform.warp(src_img,
                                        inverse_map=tform.inverse,
                                        preserve_range=True)
                data[..., j, i] = warped

    return data, max_dist
def GetTarget(train_shapes, train_bboxes, gt_shapes, mean_rshape):
    targets=[]
    for i in range(len(train_shapes)):
        # do similarity transformation to mean space
        sim_trans=transform.estimate_transform('similarity', CenterShape(Shape2Relative(train_shapes[i], train_bboxes[i])), CenterShape(mean_rshape))
        targets.append(sim_trans(Shape2Relative(gt_shapes[i], train_bboxes[i])-Shape2Relative(train_shapes[i], train_bboxes[i])))
    return np.array(targets)
Example #28
0
    def calc_convergence(self, corr_points, em_points, min_points, refine_matrix):
        em_points = np.array(em_points)
        corr_points = np.array(corr_points)

        corr_points_refined = []
        if refine_matrix is None:
            corr_points_refined = corr_points
        else:
            for i in range(len(corr_points)):
                p = np.array([corr_points[i, 0], corr_points[i, 1], 1])
                p_new = refine_matrix @ p
                corr_points_refined.append(p_new[:2])
            # corr_points_refined = np.array(corr_points)
            corr_points_refined = np.array(corr_points_refined)

        num_sims = len(em_points) - min_points + 1
        num_iterations = 100
        precision_refined = []
        precision_free = []
        precision_all = []
        num_points = min_points
        for i in range(num_sims):
            precision_i_refined = []
            precision_i_free = []
            precision_i_all = []
            for k in range(num_iterations):
                indices = random.sample(range(len(em_points)), num_points)
                p_em = em_points[indices]
                p_corr = corr_points_refined[indices]

                refine_matrix = tf.estimate_transform('affine', p_corr, p_em).params
                calc_points = []
                for l in range(len(corr_points)):
                    p = np.array([corr_points_refined[l, 0], corr_points_refined[l, 1], 1])
                    p_refined = refine_matrix @ p
                    calc_points.append(p_refined[:2])

                diff_all = em_points - np.array(calc_points)
                diff_refined = diff_all[indices]
                diff_free = np.array([diff_all[i] for i in range(len(diff_all)) if i not in indices])

                rms_all = np.sqrt(1 / len(diff_all) * (diff_all ** 2).sum())
                rms_refined = np.sqrt(1 / len(diff_refined) * (diff_refined ** 2).sum())
                if len(diff_free) > 0:
                    rms_free = np.sqrt(1 / len(diff_free) * (diff_free ** 2).sum())
                else:
                    rms_free = 0
                precision_i_refined.append(rms_refined)
                precision_i_free.append(rms_free)
                precision_i_all.append(rms_all)
            precision_refined.append(np.mean(precision_i_refined))
            precision_free.append(np.mean(precision_i_free))
            precision_all.append(np.mean(precision_i_all))
            num_points += 1

        precision_refined = np.array(precision_refined) * self.pixel_size[0]
        precision_free = np.array(precision_free) * self.pixel_size[0]
        precision_all = np.array(precision_all) * self.pixel_size[0]
        self.print('RMS error: ', precision_all[-1])
        return [precision_refined, precision_free, precision_all]
def affine_fun(src, dst):
    tform = tf.estimate_transform('affine', src, dst)

    def affine_fun(x):
        return tform(x)[0]

    return affine_fun
Example #30
0
def estimate_coordinate_transform(source, target, method, **method_kwargs):
    """Calculates a transformation from a source list of coordinates to a
    target list of coordinates.

    Parameters
    ----------
    source : Nx2 array
        (x, y) coordinate pairs from source image.
    target : Nx2 array
        (x, y) coordinate pairs from target image. Must be same shape as
        'source'.
    method : string, optional
        Method to use for transform estimation.
    **method_kwargs : optional
        Additional arguments can be passed in specific to the particular
        method. For example, 'order' for a polynomial transform estimation.

    Returns
    -------
    transform : skimage.transform._geometric.GeometricTransform
        An skimage transform object.

    See Also
    --------
    skimage.transform.estimate_transform

    """

    return tf.estimate_transform(method, source, target, **method_kwargs)
Example #31
0
    def __getitem__(self, index):
        imagepath = self.imagepath_list[index]
        imagename = imagepath.split('/')[-1].split('.')[0]
        image = imread(imagepath)[:,:,:3]
        kpt = scipy.io.loadmat(imagepath.replace('jpg', 'mat'))['pt3d_68'].T        
        left = np.min(kpt[:,0]); right = np.max(kpt[:,0]); 
        top = np.min(kpt[:,1]); bottom = np.max(kpt[:,1])

        h, w, _ = image.shape
        old_size = (right - left + bottom - top)/2
        center = np.array([right - (right - left) / 2.0, bottom - (bottom - top) / 2.0 ])#+ old_size*0.1])
        size = int(old_size*self.scale)

        # crop image
        src_pts = np.array([[center[0]-size/2, center[1]-size/2], [center[0] - size/2, center[1]+size/2], [center[0]+size/2, center[1]-size/2]])
        DST_PTS = np.array([[0,0], [0,self.resolution_inp - 1], [self.resolution_inp - 1, 0]])
        tform = estimate_transform('similarity', src_pts, DST_PTS)
        
        image = image/255.
        dst_image = warp(image, tform.inverse, output_shape=(self.resolution_inp, self.resolution_inp))
        dst_image = dst_image.transpose(2,0,1)
        return {'image': torch.tensor(dst_image).float(),
                'imagename': imagename,
                # 'tform': tform,
                # 'original_image': torch.tensor(image.transpose(2,0,1)).float(),
                }
Example #32
0
def estimate_coordinate_transform(source, target, method, **method_kwargs):
    """Calculates a transformation from a source list of coordinates to a
    target list of coordinates.

    Parameters
    ----------
    source : Nx2 array
        (x, y) coordinate pairs from source image.
    target : Nx2 array
        (x, y) coordinate pairs from target image. Must be same shape as
        'source'.
    method : string, optional
        Method to use for transform estimation.
    **method_kwargs : optional
        Additional arguments can be passed in specific to the particular
        method. For example, 'order' for a polynomial transform estimation.

    Returns
    -------
    transform : skimage.transform._geometric.GeometricTransform
        An skimage transform object.

    See Also
    --------
    skimage.transform.estimate_transform

    """

    return tf.estimate_transform(method, source, target, **method_kwargs)
Example #33
0
    def calculate_transform(frame):
        """
        Calculates affine transformation.

        Write result to frame.tform
        """
        if frame.isref:
            print("No need to calculate for reference frame.")
            return

        print("Starting affine transform for frame number " + frame.number)
        primary = []
        secondary = []
        m = 0

        for i in frame.pairs:
            if m > 11 or i[2] < 20:
                break
            #primary.append([i[0][0], i[0][1], 0])
            #secondary.append([i[1][0], i[1][1], 0])
            primary.append([i[0][0], i[0][1]])
            secondary.append([i[1][0], i[1][1]])
            m += 1
        primary = np.squeeze(np.array(primary))
        secondary = np.squeeze(np.array(secondary))

        try:
            frame.tform = tf.estimate_transform(ttype="affine", src=secondary, dst=primary)
        except IndexError:
            pass
    def Apply(self, batched_data_dict, batch_size, istub):
        if (batch_size != len(batched_data_dict[batched_data_dict.keys()[0]])):
            print("[Error] Apply() batch size not matched...")
            return None
        else:
            batched_result_dict = dict()

            tform = estimate_transform('similarity',
                                       batched_data_dict["src_pts"][0],
                                       PRNetImageCropper.DST_PTS)
            image = batched_data_dict["raw_image"][0] / 255.
            # print(image.shape)
            cropped_image = cv2.warpAffine(
                image,
                tform.params[:2],
                dsize=(PRNetImageCropper.resolution_inp,
                       PRNetImageCropper.resolution_inp))
            # print(cropped_image.shape)

            batched_result_dict["tform_params"] = [tform.params]
            batched_result_dict["cropped_image"] = [cropped_image]
            batched_result_dict["output_flag"] = batched_data_dict[
                "output_flag"]

            return batched_result_dict
Example #35
0
def main(base_dir):
    BASE_DIR = base_dir

    # Load the set of pictures
    ic = io.ImageCollection(BASE_DIR + '*.JPG')

    # Select points on the first picture
    f, ax = plt.subplots(1,1)
    ax.get_xaxis().set_visible(False)
    ax.get_yaxis().set_visible(False)
    ax.autoscale(enable=True, axis='both', tight=True);
    plt.tight_layout(pad=0.4, w_pad=0.0, h_pad=0.0)
    ax.imshow(ic[0])

    coords = [plt.ginput(8, timeout=0)]

    plt.close()

    # Load first picture side-by side with second, select points.
    # Scroll through images one-by-one
    for i, img in enumerate(ic[1:]):
        ax1 = plt.subplot2grid((6,10),(0,1), rowspan=6, colspan=9)
        ax0 = plt.subplot2grid((6,10),(0,0))
        
        for ax in [ax0, ax1]:
            ax.get_xaxis().set_visible(False)
            ax.get_yaxis().set_visible(False)
        
        plt.tight_layout(pad=0.4, w_pad=0.0, h_pad=0.0)
        
        #f, (ax0,ax1) = plt.subplots(1,2)
        ax0.imshow(ic[i])
        for coord in coords[i]:
            ax0.scatter(coord[0],coord[1])
        ax1.imshow(img)
        
        coords.append(plt.ginput(8, timeout=0))
        
        plt.close()

    # Use a similarity transformation to transform each one.

    if not os.path.exists(BASE_DIR + 'corrected'):
        os.mkdir(BASE_DIR + 'corrected')

    np.save(BASE_DIR + 'corrected/coords.npy', coords)

    io.imsave(BASE_DIR + 'corrected/0.jpg', ic[0])
    for i, img in enumerate(ic[1:]):
        tf = transform.estimate_transform('similarity', np.array(coords[0]), np.array(coords[i+1]))

    # Use a translation transformation to center both images for display purposes

        img_warped = transform.warp(img, inverse_map=tf,
                                     output_shape=(1728,3072))
        
        print BASE_DIR + 'corrected/%d.jpg' %(i+1)
        print img_warped
        
        io.imsave(BASE_DIR + 'corrected/%d.jpg' %(i+1), img_warped)
Example #36
0
    def calc_affine_transform(self, my_points):
        my_points = self.calc_orientation(my_points)
        self.log('Input points:\n', my_points)
        side_list = np.linalg.norm(np.diff(my_points, axis=0), axis=1)
        side_list = np.append(side_list, np.linalg.norm(my_points[0] - my_points[-1]))
        self.side_length = np.mean(side_list)
        self.log('ROI side length:', self.side_length, '\xb1', side_list.std())

        cen = my_points.mean(0) - np.ones(2) * self.side_length / 2.
        points_tmp = np.zeros_like(my_points)
        points_tmp[0] = cen + (0, 0)
        points_tmp[1] = cen + (self.side_length, 0)
        points_tmp[2] = cen + (self.side_length, self.side_length)
        points_tmp[3] = cen + (0, self.side_length)

        self.tf_matrix = tf.estimate_transform('affine', my_points, points_tmp).params

        nx, ny = self.data.shape
        corners = np.array([[0, 0, 1], [nx, 0, 1], [nx, ny, 1], [0, ny, 1]]).T
        self.tf_corners = np.dot(self.tf_matrix, corners)
        self.tf_shape = tuple([int(i) for i in (self.tf_corners.max(1) - self.tf_corners.min(1))[:2]])
        self.tf_matrix[:2, 2] -= self.tf_corners.min(1)[:2]
        self.print('Transform matrix:\n', self.tf_matrix)
        self.print('Shift: ', -self.tf_corners.min(1)[:2])
        self.log('Assembled? ', self.assembled)
        if not self._transformed:
            if self.assembled:
                self._orig_points = np.copy(my_points)
                self.tf_matrix_orig = np.copy(self.tf_matrix)
            else:
                self._orig_points_region = np.copy(my_points)
                self.tf_matrix_orig_region = np.copy(self.tf_matrix)
        self.apply_transform(points_tmp)
Example #37
0
    def crop(self, image, kpt):
        left = np.min(kpt[:, 0])
        right = np.max(kpt[:, 0])
        top = np.min(kpt[:, 1])
        bottom = np.max(kpt[:, 1])

        h, w, _ = image.shape
        old_size = (right - left + bottom - top) / 2
        center = np.array(
            [right - (right - left) / 2.0,
             bottom - (bottom - top) / 2.0])  #+ old_size*0.1])
        # translate center
        trans_scale = (np.random.rand(2) * 2 - 1) * self.trans_scale
        center = center + trans_scale * old_size  # 0.5

        scale = np.random.rand() * (self.scale[1] -
                                    self.scale[0]) + self.scale[0]
        size = int(old_size * scale)

        # crop image
        src_pts = np.array([[center[0] - size / 2, center[1] - size / 2],
                            [center[0] - size / 2, center[1] + size / 2],
                            [center[0] + size / 2, center[1] - size / 2]])
        DST_PTS = np.array([[0, 0], [0, self.image_size - 1],
                            [self.image_size - 1, 0]])
        tform = estimate_transform('similarity', src_pts, DST_PTS)

        # cropped_image = warp(image, tform.inverse, output_shape=(self.image_size, self.image_size))
        # # change kpt accordingly
        # cropped_kpt = np.dot(tform.params, np.hstack([kpt, np.ones([kpt.shape[0],1])]).T).T # np.linalg.inv(tform.params)
        return tform
def similarity_fun(src, dst):
    tform = tf.estimate_transform('similarity', src, dst)

    def affine_fun(x):
        return tform(x)[0]

    return affine_fun
Example #39
0
def infer(edge_image, edge_lengths, mu, phi, sigma2,
          update_slice=slice(None),
          scale_estimate=None,
          rotation=0,
          translation=(0, 0)):
    # edge_points = np.array(np.where(edge_image)).T
    # edge_points[:, [0, 1]] = edge_points[:, [1, 0]]
    # edge_score = edge_image.shape[0] * np.exp(-edge_lengths[edge_image] / (0.25 * edge_image.shape[0])).reshape(-1, 1)
    # edge_points = np.concatenate((edge_points, edge_score), axis=1)
    #
    # edge_nn = NearestNeighbors(n_neighbors=1).fit(edge_points)

    edge_near = scipy.ndimage.distance_transform_edt(~edge_image)
    edge_near_blur = gaussian(edge_near, 2)
    Gy, Gx = np.gradient(edge_near_blur)
    mag = np.sqrt(np.power(Gy, 2) + np.power(Gx, 2))

    if scale_estimate is None:
        scale_estimate = min(edge_image.shape) * 4

    mu = (mu.reshape(-1, 2) - mu.reshape(-1, 2).mean(axis=0)).reshape(-1, 1)
    average_distance = np.sqrt(np.power(mu.reshape(-1, 2), 2).sum(axis=1)).mean()
    scale_estimate /= average_distance * np.sqrt(2)

    h = np.zeros((phi.shape[1], 1))

    psi = SimilarityTransform(scale=scale_estimate, rotation=rotation, translation=translation)

    while True:
        w = (mu + phi @ h).reshape(-1, 2)
        image_points = matrix_transform(w, psi.params)[update_slice, :]
        image_points = np.concatenate((image_points, np.zeros((image_points.shape[0], 1))), axis=1)

        # closest_edge_point_indices = edge_nn.kneighbors(image_points)[1].flatten()
        # closest_edge_points = edge_points[closest_edge_point_indices, :2]

        closest_edge_points = gradient_step(Gy, Gx, mag, image_points)

        w = mu.reshape(-1, 2)
        psi = estimate_transform('similarity', w[update_slice, :], closest_edge_points)

        image_points = matrix_transform(w, psi.params)[update_slice, :]
        image_points = np.concatenate((image_points, np.zeros((image_points.shape[0], 1))), axis=1)

        # closest_edge_point_indices = edge_nn.kneighbors(image_points)[1].flatten()
        # closest_edge_points = edge_points[closest_edge_point_indices, :2]

        closest_edge_points = gradient_step(Gy, Gx, mag, image_points)

        mu_slice = mu.reshape(-1, 2)[update_slice, :].reshape(-1, 1)
        K = phi.shape[-1]
        phi_full = phi.reshape(-1, 2, K)
        phi_slice = phi_full[update_slice, :].reshape(-1, K)
        h = update_h(sigma2, phi_slice, closest_edge_points, mu_slice, psi)

        w = (mu + phi @ h).reshape(-1, 2)
        image_points = matrix_transform(w, psi.params)

        update_slice = yield image_points, closest_edge_points
Example #40
0
def similarity(reference, points, bone, properties_to_transform):
    """
    Estimates a similarity transform
    """
    tform = tf.estimate_transform('similarity', points, reference)
    transformed = list(map(tform, [ bone[p] for p in properties_to_transform  ]))
    error = get_error(points, reference, tform)
    return transformed, error
def test_similarity_estimation():
    # exact solution
    tform = estimate_transform('similarity', SRC[:2, :], DST[:2, :])
    assert_array_almost_equal(tform(SRC[:2, :]), DST[:2, :])
    assert_equal(tform._matrix[0, 0], tform._matrix[1, 1])
    assert_equal(tform._matrix[0, 1], - tform._matrix[1, 0])

    # over-determined
    tform2 = estimate_transform('similarity', SRC, DST)
    assert_array_almost_equal(tform2.inverse(tform2(SRC)), SRC)
    assert_equal(tform2._matrix[0, 0], tform2._matrix[1, 1])
    assert_equal(tform2._matrix[0, 1], - tform2._matrix[1, 0])

    # via estimate method
    tform3 = SimilarityTransform()
    tform3.estimate(SRC, DST)
    assert_array_almost_equal(tform3._matrix, tform2._matrix)
Example #42
0
def test_euclidean_estimation():
    # exact solution
    tform = estimate_transform('euclidean', SRC[:2, :], SRC[:2, :] + 10)
    assert_almost_equal(tform(SRC[:2, :]), SRC[:2, :] + 10)
    assert_almost_equal(tform.params[0, 0], tform.params[1, 1])
    assert_almost_equal(tform.params[0, 1], - tform.params[1, 0])

    # over-determined
    tform2 = estimate_transform('euclidean', SRC, DST)
    assert_almost_equal(tform2.inverse(tform2(SRC)), SRC)
    assert_almost_equal(tform2.params[0, 0], tform2.params[1, 1])
    assert_almost_equal(tform2.params[0, 1], - tform2.params[1, 0])

    # via estimate method
    tform3 = EuclideanTransform()
    tform3.estimate(SRC, DST)
    assert_almost_equal(tform3.params, tform2.params)
Example #43
0
def projective(reference, points, bone, properties_to_transform):
    """
    Estimates a projective transform
    """
    tform = tf.estimate_transform('projective', points, reference)
    transformed = list(map(tform, [ bone[p] for p in properties_to_transform  ]))
    error = get_error(points, reference, tform)
    return transformed, error
def test_polynomial_estimation():
    # over-determined
    tform = estimate_transform('polynomial', SRC, DST, order=10)
    assert_array_almost_equal(tform(SRC), DST, 6)

    # via estimate method
    tform2 = PolynomialTransform()
    tform2.estimate(SRC, DST, order=10)
    assert_array_almost_equal(tform2._params, tform._params)
Example #45
0
def shape_features(image, fix_points='Stable', feature_points='inner',lm=None):
    if lm==None:lm = landmarks(image)

    mf = mean_face[:,p[fix_points]]
    lm_fix = lm[:,p[fix_points]]
    if np.any(np.isnan(lm_fix)):return None,None

    tform = transform.estimate_transform('affine', lm_fix.T,mf.T)
    lm_reg = tform(lm.T).T
    lm_reg = lm_reg[:,p[feature_points]]

    X = lm_reg.flatten()
    return X, lm
def test_essential_matrix_estimation():
    src = np.array(
        [
            1.839035,
            1.924743,
            0.543582,
            0.375221,
            0.473240,
            0.142522,
            0.964910,
            0.598376,
            0.102388,
            0.140092,
            15.994343,
            9.622164,
            0.285901,
            0.430055,
            0.091150,
            0.254594,
        ]
    ).reshape(-1, 2)
    dst = np.array(
        [
            1.002114,
            1.129644,
            1.521742,
            1.846002,
            1.084332,
            0.275134,
            0.293328,
            0.588992,
            0.839509,
            0.087290,
            1.779735,
            1.116857,
            0.878616,
            0.602447,
            0.642616,
            1.028681,
        ]
    ).reshape(-1, 2)

    tform = estimate_transform("essential", src, dst)

    # Reference values obtained using COLMAP SfM library.
    tform_ref = np.array(
        [[-0.0811666, 0.255449, -0.0478999], [-0.192392, -0.0531675, 0.119547], [0.177784, -0.22008, -0.015203]]
    )
    assert_almost_equal(tform.params, tform_ref, 6)
def test_fundamental_matrix_estimation():
    src = np.array(
        [
            1.839035,
            1.924743,
            0.543582,
            0.375221,
            0.473240,
            0.142522,
            0.964910,
            0.598376,
            0.102388,
            0.140092,
            15.994343,
            9.622164,
            0.285901,
            0.430055,
            0.091150,
            0.254594,
        ]
    ).reshape(-1, 2)
    dst = np.array(
        [
            1.002114,
            1.129644,
            1.521742,
            1.846002,
            1.084332,
            0.275134,
            0.293328,
            0.588992,
            0.839509,
            0.087290,
            1.779735,
            1.116857,
            0.878616,
            0.602447,
            0.642616,
            1.028681,
        ]
    ).reshape(-1, 2)

    tform = estimate_transform("fundamental", src, dst)

    # Reference values obtained using COLMAP SfM library.
    tform_ref = np.array(
        [[-0.217859, 0.419282, -0.0343075], [-0.0717941, 0.0451643, 0.0216073], [0.248062, -0.429478, 0.0221019]]
    )
    assert_almost_equal(tform.params, tform_ref, 6)
Example #48
0
def main():

#    image = data.coins() # or any NumPy array!
#    edges = filter.sobel(image)
#    io.imshow(edges)
#    io.show()

    image_file_name_0 = '/local/decarlo/projects/data/microCT/0_180/hdf4/tilt_020_020_0001.hdf'
    image_file_name_180 = '/local/decarlo/projects/data/microCT/0_180/hdf4/tilt_020_020_0002.hdf'
    image_file_name_white = '/local/decarlo/projects/data/microCT/0_180/hdf4/tilt_020_020_0003.hdf'

    image_0 = read_hdf4(image_file_name_0, 'data')
    image_180 = read_hdf4(image_file_name_180, 'data')
    image_white = read_hdf4(image_file_name_white, 'data')

    image_0 = normalize (image_0, image_white)
    image_180 = normalize (image_180, image_white)
    
    plt.imshow(image_0+image_180, cmap=plt.cm.hot)
    plt.colorbar()
    plt.show()

    image_180 = np.fliplr(image_180)

    tform = tf.estimate_transform('similarity', image_0, image_180)

    a, grad = structural_similarity(image_0, image_180, gradient=True)
    print a
    print "grad shape", grad.shape

#    print grad
    plt.imshow(grad, cmap=plt.cm.hot)
    plt.colorbar()
    plt.show()

    result = match_template(image_0, image_180)
    print result.shape
    ij = np.unravel_index(np.argmax(result), result.shape)
    x, y = ij[::-1]
    print x, y

    im2, scale, angle, t = similarity(image_0, image_180)
    print "Scale: ", scale, "Angle: ", angle, "Transforamtion Matrix: ", t

    rot_axis_shift_x = -t[0]/2.0
    rot_axis_tilt = -t[1]/1.0
    
    print "Rotation Axis Shift (x, y):", "(", rot_axis_shift_x, ",", rot_axis_tilt,")"
def estimate_transform(src, dst):
    """ Create source and destination feature match coordinates"""

    src = np.array([keypoints1[elem] for elem in matches12[:,0]])
    dst = np.array([keypoints2[elem] for elem in matches12[:,1]])

    """ Estimate transform
    Available transformations:
    (‘similarity’, ‘affine’, ‘piecewise-affine’, ‘projective’, ‘polynomial’)
    """

    tform = tf.estimate_transform('similarity', src, dst)

    """ Error check transform (should return True)"""
    assert  np.allclose(tform.inverse(tform(src)), src) == False

    return(tform)
Example #50
0
def cnn_input_features(image,lm=None, face_size = 64, shape=[128,96], fix_points='outer',ttype='similarity',normalization='hist'):
    if lm==None:lm = landmarks(image)
    if np.any(np.isnan(lm)):
        return np.nan*np.ones([shape[0],shape[1]]).astype(np.float16), np.nan*np.zeros_like(lm)
    dst = mean_face[:,p[fix_points]]
    dst = dst-dst.mean(1)[:,None]
    dst = dst/(np.abs(dst).max())
    dst = dst*face_size/2
    dst+=np.array([shape[1],shape[0]])[:,None]/2

    src = lm[:,p[fix_points]]

    tform = transform.estimate_transform(ttype, src.T,dst.T)
    lm_reg = tform(lm.T).T
    image = transform.warp(image,inverse_map=tform.inverse,
            output_shape=[shape[0],shape[1]]
            )

    return image, lm_reg
    def calcuate_transform_from_shape_to_mean(self, shape):
        #  calculate the transform between shapes (ground trurh and mean)

        #  we convert the landmark format to numpy.array
        #  src store the landmark position information of shape in xml
        #  dst store the landmark position information of mean shape
        src = self.convert_landmarks_to_nparray(
            shape.get_normalized_landmarks(self.width, self.height))

        dst = self.convert_landmarks_to_nparray(
            self.mean_shape.get_normalized_landmarks(self.width, self.height))

        #  after format converted we can using numpy library find out
        #  transform matrix
        tform = tf.estimate_transform(
            'piecewise-affine', src, dst)

        #  print "\nthe tform form normalized shape to mean shape:\n"
        #  print tform._matrix
        return tform
Example #52
0
def main():
    # setup constants
    conf = set_conf()
    # generate random key points
    src_points = generate_points(conf)
    # generate random transformation
    tform = generate_transform(conf)
    # compute transformed key points with gaussian noise
    dst_points = similarity_transform(src_points, tform, conf)
    # estimate the transformation by skimage
    tform_estimated_skimage = tf.estimate_transform(
        'similarity', src_points, dst_points)
    # estimate the transformation by own implementation
    tform_estimated_own = estimate_transform_own(src_points, dst_points)
    # quantitatively compare two estimation methods
    evaluate(src_points, dst_points,
        tform, tform_estimated_skimage, tform_estimated_own)
    # qualitatively compare two estimation methods
    visualize(src_points, dst_points,
        tform, tform_estimated_skimage, tform_estimated_own)
Example #53
0
    def match(self, image):
        """Matches an image to the baseline image.

        Parameters
        ----------

        image: M,N,3 ndarray
            Image to be matched.

        Returns
        -------

        transformed_image: M,N,3 ndarray
            The input image transformed to match the baseline image at the
            selected points.

        """
        search_windows, search_coords = extract_patches(image,
                                                        self.windows,
                                                        pad=self.pad)

        shifts = []

        for window, template in zip(search_windows, self.templates):
            shifts.append(find_max_correlation(window, template))

        shifts = np.vstack(shifts)

        # Convert coordinates from padded windows to absolute position
        delta = search_coords[::2, [1, 0]] - self.windows[::2, [1, 0]]
        points1 = self.windows[::2, [1, 0]]
        points2 = points1 - shifts - delta

        if self.tform_type == 'translate':
            x, y = (-shifts - delta).ravel()
            match_tform = transform.SimilarityTransform(translation=(-x, -y))
        else:
            match_tform = transform.estimate_transform(self.tform_type,
                                                       points2,
                                                       points1)
        return (transform.warp(image, match_tform), match_tform)
def alignment(filePath, points, ref_points):
    '''
    @brief: 根据检测到的点,对其人脸图像
    '''
    assert(len(points) == len(ref_points))    
    num_point = len(ref_points) / 2
    #参考图像的点
    dst = np.empty((num_point, 2), dtype = np.int)
    k = 0
    for i in range(num_point):
        for j in range(2):
            dst[i][j] = ref_points[k]
            k = k+1
    #待对齐图像的点
    src = np.empty((num_point, 2), dtype = np.int)
    k = 0
    for i in range(num_point):
        for j in range(2):
            src[i][j] = points[k]
            k = k+1
    #根据检测到的点,求其相应的仿射变换矩阵
    tfrom = tf.estimate_transform('affine', dst,src)
    #用opencv的试试,其只能采用三个点,计算矩阵M
#    pts1 = np.float32([[src[0][0],src[0][1]],[src[1][0],src[1][1]],[src[2][0],src[2][1]]])
#    pts2 = np.float32([[dst[0][0],dst[0][1]],[dst[1][0],dst[1][1]],[dst[2][0],dst[2][1]]])
#    M = cv2.getAffineTransform(pts2,pts1)
    #用最小二乘法的方法进行处理    
    pts3 = np.float32([[src[0][0],src[0][1]],[src[1][0],src[1][1]],[src[2][0],src[2][1]],[src[3][0],src[3][1]],[src[4][0],src[4][1]]])
    pts4 = np.float32([[dst[0][0],dst[0][1]],[dst[1][0],dst[1][1]],[dst[2][0],dst[2][1]],[dst[3][0],dst[3][1]],[dst[4][0],dst[4][1]]])
    N = compute_affine_transform(pts4, pts3)
    #
    im = skimage.io.imread(filePath)
    
    if im.ndim == 3:
        rows, cols, ch = im.shape
    else:
        rows, cols = im.shape
    warpimage_cv2 = cv2.warpAffine(im, N, (cols, rows))
    warpimage = tf.warp(im, inverse_map = tfrom)
    
    return warpimage, warpimage_cv2
def lfw_imgs(alignment):
    if alignment == 'landmarks':
        dataset = dp.dataset.LFW('original')
        imgs = dataset.imgs
        landmarks = dataset.landmarks('68')
        n_landmarks = 68
        landmarks_mean = np.mean(landmarks, axis=0)
        landmarks_mean = np.array([landmarks_mean[:n_landmarks],
                                   landmarks_mean[n_landmarks:]])
        aligned_imgs = []
        for img, points in zip(imgs, landmarks):
            points = np.array([points[:n_landmarks], points[n_landmarks:]])
            transf = transform.estimate_transform('similarity',
                                                  landmarks_mean.T, points.T)
            img = img / 255.
            img = transform.warp(img, transf, order=3)
            img = np.round(img*255).astype(np.uint8)
            aligned_imgs.append(img)
        imgs = np.array(aligned_imgs)
    else:
        dataset = dp.dataset.LFW(alignment)
        imgs = dataset.imgs
    return imgs
Example #56
0
def fetch_example_anno_indygo(recipe, global_spec):
    try:
        img = fetch_path_local(recipe.path)
        target_h = global_spec['target_h']
        target_w = global_spec['target_w']
        TARGETS = global_spec['TARGETS']
        buckets = global_spec['buckets']

        target_size = target_h
        indygo_equalize = global_spec['indygo_equalize']
        m = global_spec['margin']
        diag = global_spec['diag']
        if target_h != target_w:
            raise NotImplementedError()

        true_dist = np.zeros(shape=(WhaleTrainer.get_y_shape(TARGETS),), dtype=floatX)

        if 'class_idx' in recipe:
            inter = WhaleTrainer.get_interval('class', TARGETS)
            assert(inter is not None)
            assert(inter[0] == 0)
            #print 'inter0', inter
            true_dist[inter[0] + recipe.class_idx] = 1.0


        #img = resize_simple(img, target_h)
        crop_annos = recipe.annotations['auto_indygo']
        idx = random.randint(0, len(crop_annos) - 1)
        crop_anno = crop_annos[idx]
        (x1, y1) = crop_anno['coord1']
        (x2, y2) = crop_anno['coord2']

        if 'point1' in recipe.annotations:
            point_1 = recipe.annotations['point1'][0]
        else:
            point_1 = {'x': 0.0, 'y': 0.0}

        if 'point2' in recipe.annotations:
            point_2 = recipe.annotations['point2'][0]
        else:
            point_2 = {'x': 0.0, 'y': 0.0}

        inter = WhaleTrainer.get_interval('widacryj', TARGETS)

        if 'widacryj' in recipe.annotations and inter is not None:
            widacryj_target = recipe.annotations['widacryj']
            assert (widacryj_target >= 0 and widacryj_target < inter[1] - inter[0])
            true_dist[inter[0] + widacryj_target] = 1

        inter = WhaleTrainer.get_interval('new_conn', TARGETS)
        if 'new_conn' in recipe.annotations and inter is not None:
            new_conn_target_a = recipe.annotations['new_conn']
            if new_conn_target_a == 0:
                new_conn_target = 0
            elif new_conn_target_a == 2:
                new_conn_target = 1
            else:
                new_conn_target = -1

            if new_conn_target != -1:
                #print 'New_conn', new_conn_target
                assert (new_conn_target >= 0 and new_conn_target < inter[1] - inter[0])
                true_dist[inter[0] + new_conn_target] = 1

        inter = WhaleTrainer.get_interval('symetria', TARGETS)
        if 'symetria' in recipe.annotations and inter is not None:
            symetria_target = recipe.annotations['symetria']

            if symetria_target != -1:
                assert (symetria_target >= 0 and symetria_target < inter[1] - inter[0])
                true_dist[inter[0] + symetria_target] = 1

        if diag:
            dsts = [[m, m], [target_size - m, target_size - m]]
        else:
            dsts = [[target_size / 2, m], [target_size / 2, target_size - m]]

        srcs = [[x1, y1], [x2, y2]]

        def rot90(w):
            return np.array([-w[1], w[0]], dtype=w.dtype)

        if indygo_equalize:
            s1 = np.array(srcs[0])
            s2 = np.array(srcs[1])
            w = s2 - s1
            wp = rot90(w)

            d1 = np.array(dsts[0])
            d2 = np.array(dsts[1])
            v = d2 - d1
            vp = rot90(v)
            srcs.append(list(s1 + wp))
            dsts.append(list(d1 + vp))
            # print '-------------'
            # print 'srcs'
            # print srcs
            # print 'dsts'
            # print dsts
            # we want (wxp, wyp) transleted to

        src = np.array(srcs)
        dst = np.array(dsts)

        tform_res = estimate_transform('affine', src, dst)

        tform_center, tform_uncenter = build_center_uncenter_transforms2(target_w, target_h)
        tform_augment, r = unpack(random_perturbation_transform(rng=np.random, **global_spec['augmentation_params']),
                                  'tform', 'r')

        tform_res += tform_center + tform_augment + tform_uncenter
        img = fast_warp(img, AffineTransform(tform_res._inv_matrix), output_shape=(target_h, target_w))
        img = transformation(img, global_spec, perturb=False)

        res1 = tform_res((point_1['x'], point_1['y']))[0]
        res2 = tform_res((point_2['x'], point_2['y']))[0]


        bucket1_x = find_bucket(target_w, buckets, res1[0])
        bucket1_y = find_bucket(target_h, buckets, res1[1])
        bucket2_x = find_bucket(target_w, buckets, res2[0])
        bucket2_y = find_bucket(target_h, buckets, res2[1])

        inter = WhaleTrainer.get_interval('indygo_point1_x', TARGETS)
        if bucket1_x != -1 and bucket1_y != -1 and inter is not None:
            idx = inter[0] + bucket1_x
            if idx < inter[1]:
                true_dist[idx] = 1

        inter = WhaleTrainer.get_interval('indygo_point1_y', TARGETS)
        if bucket1_x != -1 and bucket1_y != -1 and inter is not None:
            idx = inter[0] + bucket1_y
            if idx < inter[1]:
                true_dist[idx] = 1

        inter = WhaleTrainer.get_interval('indygo_point2_x', TARGETS)
        if bucket2_x != -1 and bucket2_y != -1 and inter is not None:
            idx = inter[0] + bucket2_x
            if idx < inter[1]:
                true_dist[idx] = 1

        inter = WhaleTrainer.get_interval('indygo_point2_y', TARGETS)
        if bucket2_x != -1 and bucket2_y != -1 and inter is not None:
            idx = inter[0] + bucket2_y
            #print idx
            if idx < inter[1]:
                true_dist[idx] = 1

        info = {
                'perturb_params': r
                }
        #print 'buckets', res1, bucket1_x, bucket1_y


        inter = WhaleTrainer.get_interval('conn', TARGETS)
        if 'ryj_conn' in recipe.annotations and inter is not None:
            ryj_conn_anno = recipe.annotations['ryj_conn']
            class_idx = ryj_conn_anno['class']
            true_dist[inter[0] + class_idx] = 1

        def draw_point(img, x, y, color, w=7):
            img[0, y:y+w, x:x+w] = color[0]
            img[1, y:y+w, x:x+w] = color[1]
            img[2, y:y+w, x:x+w] = color[2]

        target_size = target_h
        x1 = (float(target_size) / buckets) * bucket1_x
        y1 = (float(target_size) / buckets) * bucket1_y
        x2 = (float(target_size) / buckets) * bucket2_x
        y2 = (float(target_size) / buckets) * bucket2_y
        w = int(floor(target_size / buckets))
        #draw_point(img, x1, y1, (0, 0, 2), w=w)
        #draw_point(img, x2, y2, (2, 0, 0), w=w)

        #draw_point(img, res1[0], res1[1], (0, 0, 2))
        #draw_point(img, res2[0], res2[1], (2, 0, 0))
        #print true_dist[447+2: 447+2+30]

        #print 'rest4 took', ml_utils.elapsed_time_ms(timer)
        return Bunch(x=img, y=true_dist, recipe=recipe, info=info)
    except Exception as e:
            print traceback.format_exc()
            raise
def test_polynomial_default_order():
    tform = estimate_transform('polynomial', SRC, DST)
    tform2 = estimate_transform('polynomial', SRC, DST, order=2)
    assert_array_almost_equal(tform2._params, tform._params)
def test_polynomial_init():
    tform = estimate_transform('polynomial', SRC, DST, order=10)
    # init with transformation parameters
    tform2 = PolynomialTransform(tform._params)
    assert_array_almost_equal(tform2._params, tform._params)
def test_projective_init():
    tform = estimate_transform('projective', SRC, DST)
    # init with transformation matrix
    tform2 = ProjectiveTransform(tform._matrix)
    assert_array_almost_equal(tform2._matrix, tform._matrix)