Пример #1
0
def generateElements():
    image = read_image_return_scikit()
    rows, cols = image.shape[0], image.shape[1]

    src_cols = np.linspace(0, cols, 20)
    src_rows = np.linspace(0, rows, 10)
    src_rows, src_cols = np.meshgrid(src_rows, src_cols)
    src = np.dstack([src_cols.flat, src_rows.flat])[0]

    # add sinusoidal oscillation to row coordinates
    dst_rows = src[:, 1] - np.sin(np.linspace(0, 3 * np.pi, src.shape[0])) * 50
    dst_cols = src[:, 0]
    dst_rows *= 1.5
    dst_rows -= 1.5 * 50
    dst = np.vstack([dst_cols, dst_rows]).T


    tform = transform.PiecewiseAffineTransform()
    tform.estimate(src, dst)

    out_rows = image.shape[0] - 1.5 * 50
    out_cols = cols
    out = transform.warp(image, tform, output_shape=(out_rows, out_cols))

    fig, ax = plt.subplots()
    ax.imshow(out)
    ax.plot(tform.inverse(src)[:, 0], tform.inverse(src)[:, 1], '.b')
    ax.axis((0, out_cols, out_rows, 0))
    plt.show()
Пример #2
0
def RotateAndCrop(im, bbox, angle):

    pat = transform.PiecewiseAffineTransform()

    dstPts = [(bbox[0][0], bbox[1][0]), (bbox[0][1], bbox[1][0]),
              (bbox[0][1], bbox[1][1]), (bbox[0][0], bbox[1][1])]

    srcPts = [(0., 0.), (bbox[0][1] - bbox[0][0], 0.),
              (bbox[0][1] - bbox[0][0], bbox[1][1] - bbox[1][0]),
              (0., bbox[1][1] - bbox[1][0])]

    srcPts = np.array(srcPts)
    dstPts = np.array(dstPts)

    #Rotate points
    meanPos = dstPts.mean(axis=0)
    centrePts = dstPts - meanPos
    rotMatrix = [[math.cos(-angle), math.sin(-angle)],
                 [-math.sin(-angle), math.cos(-angle)]]
    rotPts = np.dot(rotMatrix, centrePts.transpose()).transpose()
    rotDstPts = rotPts + meanPos

    pat.estimate(srcPts, rotDstPts)
    outSize = bbox[1][1] - bbox[1][0], bbox[0][1] - bbox[0][0]
    outSize = map(int, map(round, outSize))
    return transform.warp(im, pat, output_shape=outSize)
Пример #3
0
def distort(image, from_points, to_points, roi_points):
    """
    Args:
        image: openCV image (np.ndarray)
        roi_points: roi points list
        from_points and to_points: distort image from_points to to_points
    """
    # Convert openCV array to PIL data
    image = Image.fromarray(image)
    image = image.convert('RGBA')

    from_points = np.concatenate((roi_points, from_points))
    to_points = np.concatenate((roi_points, to_points))

    affin = transform.PiecewiseAffineTransform()
    affin.estimate(to_points, from_points)
    image_array = transform.warp(image, affin)
    image_array = np.array(image_array * 255, dtype='uint8')


    if image_array.shape[2] == 1:
        image_array = image_array.reshape(
            (image_array.shape[0], image_array.shape[1]))
    warped_image = Image.fromarray(image_array, 'RGBA')
    image.paste(warped_image, (0, 0), warped_image)

    return np.asarray(image)
Пример #4
0
    def linkEbsdMap(self, ebsdMap, transformType="affine", order=2):
        """Calculates the transformation required to align EBSD dataset to DIC

        Args:
            ebsdMap(ebsd.Map): EBSD map object to link
            transformType(string, optional): affine, piecewiseAffine or polynomial
            order(int, optional): Order of polynomial transform to apply
        """

        self.ebsdMap = ebsdMap
        if transformType == "piecewiseAffine":
            self.ebsdTransform = tf.PiecewiseAffineTransform()
            self.ebsdTransformInv = self.ebsdTransform.inverse
        elif transformType == "polynomial":
            self.ebsdTransform = tf.PolynomialTransform()
            # You can't calculate the inverse of a polynomial transform
            # so have to estimate by swapping source and destination
            # homog points
            self.ebsdTransformInv = tf.PolynomialTransform()
            self.ebsdTransformInv.estimate(np.array(self.ebsdMap.homogPoints),
                                           np.array(self.homogPoints),
                                           order=order)
            # calculate transform from EBSD to DIC frame
            self.ebsdTransform.estimate(np.array(self.homogPoints),
                                        np.array(self.ebsdMap.homogPoints),
                                        order=order)
            return
        else:
            self.ebsdTransform = tf.AffineTransform()
            self.ebsdTransformInv = self.ebsdTransform.inverse

        # calculate transform from EBSD to DIC frame
        self.ebsdTransform.estimate(np.array(self.homogPoints),
                                    np.array(self.ebsdMap.homogPoints))
Пример #5
0
def affine_transform_by_arr(img,
                            arrx,
                            arry,
                            smoothx=False,
                            smoothy=False,
                            mvx=10,
                            mvy=10):
    # 変形前の座標点を生成
    [r, c, d] = img.shape
    src_cols = np.linspace(0, c, int(np.sqrt(len(arrx))))
    src_rows = np.linspace(0, r, int(np.sqrt(len(arry))))
    src_rows, src_cols = np.meshgrid(src_rows, src_cols)
    src = np.dstack([src_cols.flat, src_rows.flat])[0]
    # 必要ならば変形量配列に移動平均を適用する
    if smoothx:
        lx = len(arrx)
        arrx = np.convolve(arrx, np.ones(mvx) / mvx, mode='valid')
        arrx = skt.resize(arrx, (lx, 1), anti_aliasing=True, mode='reflect')[:,
                                                                             0]
    if smoothy:
        ly = len(arry)
        arry = np.convolve(arry, np.ones(mvy) / mvy, mode='valid')
        arry = skt.resize(arry, (ly, 1), anti_aliasing=True, mode='reflect')[:,
                                                                             0]
    # 座標点に変形量を加える
    dst_rows = src[:, 1] + arrx
    dst_cols = src[:, 0] + arry
    dst = np.vstack([dst_cols, dst_rows]).T
    # 区分的アフィン変換を行う
    affin = skt.PiecewiseAffineTransform()
    affin.estimate(src, dst)
    return skt.warp(img, affin)
Пример #6
0
def convert2shaclo(fname=None, stitch_correction=False):
    panties = os.listdir('./dream/')
    if fname is None:
        fname =  input("Type pantie name: ./dream/")
    if fname in panties:
        pantie = io.imread('./dream/'+fname)
        [r,c,d] = pantie.shape
        # move from hip to front
        patch = np.copy(pantie[-170:,546:,:])
        pantie[-80:,546:,:] = 0
        patch = skt.resize(patch[::-1,::-1,:],(patch.shape[0],40),anti_aliasing=True,mode='reflect')
        [pr,pc,d] = patch.shape
        pantie[157:157+pr,:pc,:] = np.uint8(patch*255)

        # cut intermediate pantie (for acculate stitch on hip)
        pantie[260:,546:,:]=0
        pantie = pantie[:365,:,:]
        pantie = pantie[:,:-10,:]
        
        if stitch_correction:
            pantie = np.delete(pantie,[np.arange(200,275)],1)
            pantie = np.pad(pantie[:,:,:],((0,0),(0,40),(0,0)),mode='reflect')
            pantie = np.pad(pantie[:,:,:],((0,0),(0,50),(0,0)),mode='reflect')

        # Afine transform matrix
        src_cols = np.linspace(0, c, 10)
        src_rows = np.linspace(0, r, 10)
        src_rows, src_cols = np.meshgrid(src_rows, src_cols)
        src = np.dstack([src_cols.flat, src_rows.flat])[0]
        shifter_row = np.zeros(src.shape[0])
        shifter_col = np.zeros(src.shape[0])
        shifter_row[10:50] = (np.sin(np.linspace(0, 1 * np.pi, src.shape[0]))[20:60]*40)
        shifter_row[00:20] = -(np.sin(np.linspace(0, 1 * np.pi, src.shape[0]))[50:70]*15)
        shifter_row[50:] = -(np.sin(np.linspace(0, 1 * np.pi, src.shape[0]))[0:50]*10)
        shifter_row = np.convolve(shifter_row,np.ones(30)/30,mode='same')
        dst_rows = src[:, 1] + shifter_row -50
        dst_cols = src[:, 0] + shifter_col
        dst = np.vstack([dst_cols, dst_rows]).T
        affin = skt.PiecewiseAffineTransform()
        affin.estimate(src,dst)
        pantie = np.uint8(skt.warp(pantie, affin)*255)

        # mirroring and finalize
        overlap = 6
        [r,c,d] = pantie.shape
        pantie_new = np.zeros((r,c*2-overlap*2,d),dtype=np.uint8)
        pantie_inv = pantie[:,::-1,:]
        pantie_new[:r,:c,:] = pantie_inv
        pantie_new[:r,c-overlap:c*2-overlap,:] = pantie[:,overlap:,:]
        if stitch_correction:
            mag_c = 1.7
        else:
            mag_c = 1.72
        mag_r = 1.8
        out = skt.resize(pantie_new,(np.int(pantie_new.shape[0]*mag_r),np.int(pantie_new.shape[1]*mag_c)),anti_aliasing=True,mode='reflect')
        io.imsave('shaclo_pantie.png',np.uint8(out*255))
    else:
        print("Cannot find it")
Пример #7
0
 def mesh_transform(img, arr):
     [r, c, d] = img.shape
     src_cols = np.linspace(0, c, int(np.sqrt(arr.shape[0])))
     src_rows = np.linspace(0, r, int(np.sqrt(arr.shape[0])))
     src_rows, src_cols = np.meshgrid(src_rows, src_cols)
     src = np.dstack([src_cols.flat, src_rows.flat])[0]
     affin = skt.PiecewiseAffineTransform()
     affin.estimate(arr, src)
     return skt.warp(img, affin)
Пример #8
0
    def convert(self, image):
        pantie = np.array(image)

        # Rear to front
        patch = np.copy(pantie[-110:-5, 548:, :])[::-1, ::-1, :]
        [pr, pc, d] = patch.shape
        pantie[105:105 + pr, :pc, :] = patch
        pantie = pantie[:-100, :, :]
        pantie = np.pad(pantie, [(100, 0), (0, 0), (0, 0)], mode='constant')
        pantie = perspective_transform(
            pantie, np.matrix('1, 0.01, 0; 0, 1, 0; -0.0008,0,1'))

        # Affine transform
        [r, c, d] = pantie.shape
        src_cols = np.linspace(0, c, 10)
        src_rows = np.linspace(0, r, 10)
        src_rows, src_cols = np.meshgrid(src_rows, src_cols)
        src = np.dstack([src_cols.flat, src_rows.flat])[0]
        shifter_row = np.zeros(src.shape[0])
        shifter_col = np.zeros(src.shape[0])
        shifter_row = (
            np.sin(np.linspace(0, 1 * np.pi, src.shape[0]) - np.pi / 4) * 40)
        shifter_col = -np.sin(
            np.linspace(0, 1 * np.pi, src.shape[0]) + np.pi / 8) * 20
        shifter_row[shifter_row < 0] = 0
        shifter_row = np.convolve(shifter_row, np.ones(10) / 10, mode='valid')
        shifter_row = skt.resize(shifter_row, (100, 1),
                                 anti_aliasing=True,
                                 mode='reflect')[:, 0]
        shifter_col = np.convolve(shifter_col, np.ones(10) / 10, mode='valid')
        shifter_col = skt.resize(shifter_col, (100, 1),
                                 anti_aliasing=True,
                                 mode='reflect')[:, 0]
        dst_rows = src[:, 1] + shifter_row
        dst_cols = src[:, 0] + shifter_col
        dst = np.vstack([dst_cols, dst_rows]).T
        affin = skt.PiecewiseAffineTransform()
        affin.estimate(src, dst)
        pantie = skt.warp(pantie, affin)

        # Mirroring
        pantie = pantie[25:290, 19:430, :]
        pantie = skt.resize(
            pantie,
            (np.int(pantie.shape[0] * 1.47), np.int(pantie.shape[1] * 1.49)),
            anti_aliasing=True,
            mode='reflect')
        pantie = np.bitwise_and(np.uint8(pantie[7:, :, :] * 255), self.mask)
        [r, c, d] = pantie.shape
        npantie = np.zeros((r, c * 2, d), dtype=np.uint8)
        npantie[:, c:, :] = pantie
        npantie[:, :c, :] = pantie[:, ::-1, :]

        return Image.fromarray(npantie)
Пример #9
0
 def get_next(self, size):
     nx, ny = self.num_grid
     gy, gx = np.meshgrid(size[1] * np.linspace(-0.5, 0.5, ny),
                          size[0] * np.linspace(-0.5, 0.5, nx))
     g = np.stack([gx.flatten(), gy.flatten()], axis=-1).astype(np.float32)
     g1 = np.copy(g)
     for i in range(g.shape[0]):
         g1[i, 0] = g[i, 0] + size[0] / (nx - 1) * self.perturbation()
         g1[i, 1] = g[i, 1] + size[1] / (ny - 1) * self.perturbation()
     t = sktf.PiecewiseAffineTransform()
     t.estimate(g, g1)
     return t, lambda s: s
Пример #10
0
def warp_image(image, src_points, dst_points):
    src_points = np.array([[0, 0], [0, image.shape[0]], [image.shape[0], 0],
                           list(image.shape[:2])] + src_points  # .tolist()
                          )
    dst_points = np.array([[0, 0], [0, image.shape[0]], [image.shape[0], 0],
                           list(image.shape[:2])] + dst_points  # .tolist()
                          )

    tform3 = tf.PiecewiseAffineTransform()
    tform3.estimate(dst_points, src_points)

    warped = tf.warp(image, tform3, output_shape=image.shape)
    return warped
Пример #11
0
def get_warped_pwa(img_file, pts_file, used_idx, text_pts):
    n = 68
    img = cv2.imread(img_file)
    img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
    width, height, _ = img.shape

    # read img points
    with open(pts_file, 'r') as f:
        tmp = [float(r) for r in f.read().split()]
        img_pts = np.dstack((tmp[:n], tmp[n:]))[0]
        img_pts[:, 0] *= width
        img_pts[:, 1] *= height
        # move eyebrows a bit lower
        idx_eyebrows = list(range(17, 27))
        img_pts[idx_eyebrows, 1] += 5

    # add forehead points to end
    for i in [21, 22, 18, 25]:
        d = -166
        img_pts = np.append(img_pts, [img_pts[i, :] + [0, d]], axis=0)

    # add 4 pts to the mouth center line
    x = np.mean([img_pts[49], img_pts[59]], axis=0)[0]
    y = np.mean(img_pts[60:62], axis=0)[1]
    img_pts = np.append(img_pts, [[x, y]], axis=0)

    x = np.mean([img_pts[53], img_pts[55]], axis=0)[0]
    y = np.mean(img_pts[63:65], axis=0)[1]
    img_pts = np.append(img_pts, [[x, y]], axis=0)

    x = np.mean([img_pts[53], img_pts[55]], axis=0)[0]
    y = np.mean(img_pts[64:66], axis=0)[1]
    img_pts = np.append(img_pts, [[x, y]], axis=0)

    x = np.mean([img_pts[49], img_pts[59]], axis=0)[0]
    y = np.mean([img_pts[60], img_pts[67]], axis=0)[1]
    img_pts = np.append(img_pts, [[x, y]], axis=0)

    # select only used points
    img_pts = img_pts[used_idx, :]
    text_pts_cur = text_pts[used_idx, :]

    # find piecewise affine transform from img to texture
    tform = tf.PiecewiseAffineTransform()
    tform.estimate(img_pts, text_pts_cur)

    return tf.warp(img, tform.inverse, output_shape=(2048, 2048),
                   order=3), img_pts, img
Пример #12
0
    def linkEbsdMap(self, ebsdMap, transformType="affine", order=2):
        """Calculates the transformation required to align EBSD dataset to DIC.

        Parameters
        ----------
        ebsdMap : defdap.ebsd.Map
            EBSD map object to link.
        transformType : str, optional
            affine, piecewiseAffine or polynomial.
        order : int, optional
            Order of polynomial transform to apply.

        """
        self.ebsdMap = ebsdMap
        if transformType.lower() == "piecewiseaffine":
            self.ebsdTransform = tf.PiecewiseAffineTransform()
            self.ebsdTransformInv = self.ebsdTransform.inverse
        elif transformType.lower() == "projective":
            self.ebsdTransform = tf.ProjectiveTransform()
            self.ebsdTransformInv = self.ebsdTransform.inverse
        elif transformType.lower() == "polynomial":
            self.ebsdTransform = tf.PolynomialTransform()
            # You can't calculate the inverse of a polynomial transform
            # so have to estimate by swapping source and destination
            # homog points
            self.ebsdTransformInv = tf.PolynomialTransform()
            self.ebsdTransformInv.estimate(np.array(self.ebsdMap.homogPoints),
                                           np.array(self.homogPoints),
                                           order=order)
            # calculate transform from EBSD to DIC frame
            self.ebsdTransform.estimate(np.array(self.homogPoints),
                                        np.array(self.ebsdMap.homogPoints),
                                        order=order)
            return
        else:
            # default to using affine
            self.ebsdTransform = tf.AffineTransform()
            self.ebsdTransformInv = self.ebsdTransform.inverse

        # calculate transform from EBSD to DIC frame
        self.ebsdTransform.estimate(np.array(self.homogPoints),
                                    np.array(self.ebsdMap.homogPoints))
def affine_transform(img, mx, my, inv=False):
    [r, c, d] = img.shape
    src_cols = np.linspace(0, c, 10)
    src_rows = np.linspace(0, r, 10)
    src_rows, src_cols = np.meshgrid(src_rows, src_cols)
    src = np.dstack([src_cols.flat, src_rows.flat])[0]
    shifter_row = np.zeros(src.shape[0])
    shifter_col = np.zeros(src.shape[0])
    if inv:
        line = np.linspace(np.pi, np.pi / 2, src.shape[0])
    else:
        line = np.linspace(np.pi / 2, np.pi, src.shape[0])
    shifter_row = -(np.sin(line) * mx)
    shifter_col = -(np.sin(line) * my)
    dst_rows = src[:, 1] + shifter_row
    dst_cols = src[:, 0] + shifter_col
    dst = np.vstack([dst_cols, dst_rows]).T
    affin = skt.PiecewiseAffineTransform()
    affin.estimate(src, dst)
    return skt.warp(img, affin)
Пример #14
0
    def convert(self, image):
        pantie = np.array(image)
        pantie = np.bitwise_and(pantie, self.mask)
        [r, c, d] = pantie.shape

        # move from hip to front
        patch = np.copy(pantie[-140:-5, 546:, :])
        pantie[-115:, 546:, :] = 0
        patch = skt.resize(patch[::-1, ::-1, :], (patch.shape[0], 63), anti_aliasing=True, mode='reflect')
        [pr, pc, d] = patch.shape
        pantie[127 - 5:127 - 5 + pr, :pc, :] = np.uint8(patch * 255)

        # Affine transform matrix
        src_cols = np.linspace(0, c, 10)
        src_rows = np.linspace(0, r, 10)
        src_rows, src_cols = np.meshgrid(src_rows, src_cols)
        src = np.dstack([src_cols.flat, src_rows.flat])[0]
        shifter_row = np.zeros(src.shape[0])
        shifter_col = np.zeros(src.shape[0])
        shifter_row[30:-30] = (np.sin(np.linspace(0, 1 * np.pi, src.shape[0]) - np.pi / 32) * 100)[30:-30]
        shifter_row[:30] = (np.sin(np.linspace(0, 1 * np.pi, src.shape[0]) + np.pi / 2) * 60)[:30]
        shifter_row[-30:] = (np.sin(np.linspace(0, 1 * np.pi, src.shape[0]) - np.pi / 2) * 80)[-30:]
        shifter_col[13:-30] = -(np.sin(np.linspace(0, 1 * np.pi, src.shape[0]) + np.pi / 8) * 22)[13:-30]

        shifter_row = np.convolve(shifter_row, np.ones(20) / 20, mode='valid')
        shifter_col = np.convolve(shifter_col, np.ones(10) / 10, mode='valid')
        shifter_row = skt.resize(shifter_row, (100, 1), anti_aliasing=True, mode='reflect')[:, 0]
        shifter_col = skt.resize(shifter_col, (100, 1), anti_aliasing=True, mode='reflect')[:, 0]

        dst_rows = src[:, 1] + shifter_row - 110
        dst_cols = src[:, 0] + shifter_col
        dst = np.vstack([dst_cols, dst_rows]).T
        affin = skt.PiecewiseAffineTransform()
        affin.estimate(src, dst)
        pantie = np.uint8(skt.warp(pantie, affin) * 255)[:310, :, :]

        # Finalize
        pantie_ = skt.resize(pantie, (np.int(pantie.shape[0] * 2.05), np.int(pantie.shape[1] * 2.05)), anti_aliasing=True, mode='reflect')
        pantie = np.uint8(pantie_ * 255)
        return Image.fromarray(pantie)
def convert2anna(fname=None):
    panties = os.listdir('./dream/')
    if fname is None:
        fname =  input("Type pantie name: ./dream/")
    if fname in panties:
        pantie = io.imread('./dream/'+fname)
        mask = io.imread('./mask/mask_anna.png')
        pantie = np.bitwise_and(pantie,mask)
        [r,c,d] = pantie.shape
        
        # move from hip to front
        patch = np.copy(pantie[-170:,546:,:])
        pantie[-100:,546:,:] = 0
        patch = skt.resize(patch[::-1,::-1,:],(patch.shape[0],40),anti_aliasing=True,mode='reflect')
        [pr,pc,d] = patch.shape
        pantie[157:157+pr,:pc,:] = np.uint8(patch*255)

        # Affine transform matrix
        src_cols = np.linspace(0, c, 10)
        src_rows = np.linspace(0, r, 10)
        src_rows, src_cols = np.meshgrid(src_rows, src_cols)
        src = np.dstack([src_cols.flat, src_rows.flat])[0]
        shifter_row = np.zeros(src.shape[0])
        shifter_col = np.zeros(src.shape[0])
        shifter_row = -(np.sin(np.linspace(0, 1 * np.pi, src.shape[0])-np.pi/8)*60)
        shifter_row[-30:] += (np.sin(np.linspace(0, 1 * np.pi, src.shape[0]))[-30:]*50)
        shifter_row = np.convolve(shifter_row,np.ones(30)/30,mode='same')
        dst_rows = src[:, 1] + shifter_row -20
        dst_cols = src[:, 0] + shifter_col
        dst = np.vstack([dst_cols, dst_rows]).T
        affin = skt.PiecewiseAffineTransform()
        affin.estimate(src,dst)
        pantie = np.uint8(skt.warp(pantie, affin)*255)
        pantie = pantie[:,6:-35,:]

        # Finalize
        pantie = skt.resize(pantie,(np.int(pantie.shape[0]*1.25),np.int(pantie.shape[1]*1.56)),anti_aliasing=True,mode='reflect')
        io.imsave('anna_pantie.png',np.uint8(pantie*255))
    else:
        print("Cannot find it")
Пример #16
0
def view_morph(m1, p1, m2, p2):

    # performs a view morph of two input images and correspondences.
    #
    # inputs ....................................................................
    # m1                image 1. [y x {rgb}]
    # p1                points in image 1. [{x,y} points]
    # m2                image 2. [y x {rgb}]
    # p2                points in image 2. [{x,y} points]

    cc = np.random.permutation(np.arange(p1.shape[1]))
    pl.imshow(m1)
    pl.scatter(p1[0, :], p1[1, :], c=cc, marker='.')
    pl.show()
    pl.imshow(m2)
    pl.scatter(p2[0, :], p2[1, :], c=cc, marker='.')
    pl.show()

    # rectify images and correspondences
    m1r, m2r, H1, H2 = rectify_images(m1, p1, m2, p2)
    p1r = tform_pts(p1, H1)
    p2r = tform_pts(p2, H2)

    alpha = .3  # fraction the camera moves
    src, dst = np.array(p1r.T), np.array(p2r.T)

    # # ny,nx,_ = m1.shape
    # # src = np.array(np.append(src, [[0,0],[0,nx-1],[ny-1,0],[nx-1,ny-1]], axis=0))
    # # dst = np.array(np.append(dst, [[0,0],[0,nx-1],[ny-1,0],[nx-1,ny-1]], axis=0))
    #
    # # http://scikit-image.org/docs/dev/auto_examples/transform/plot_piecewise_affine.html

    tform = tf.PiecewiseAffineTransform()
    tform.estimate(src, dst)
    out = tf.warp(m1r, tform)
    pl.imshow(m1r)
    pl.show()
    pl.imshow(out)
    pl.show()
Пример #17
0
    for xy in zip([0] * 2 + [WIDTH * 2 - 1] * 2, [HEIGHT * 2 - 1, 0] * 2)
])
MEAN_SHAPE = np.concatenate([MEAN_SHAPE, CORNERS])

WARPED_IMGS = []

for img, face, shape in FACE_LIST:
    #Affine transform to align with mean shape
    shape_points = np.array([[p.x, p.y] for p in shape.parts()])
    af_tform = tr.estimate_transform('affine', shape_points, MEAN_SHAPE[:-4])
    affine_img = tr.warp(img,
                         af_tform.inverse,
                         output_shape=(int(WIDTH * 2), int(HEIGHT * 2)))
    shape_points = np.array(af_tform(shape_points))
    #Perform a peicewise affine transform from the face shape to the mean face shape
    #i.e. warp the face image to match the average shape
    pw_af_tform = tr.PiecewiseAffineTransform()
    shape_points = np.concatenate([shape_points, CORNERS])
    pw_af_tform.estimate(shape_points, MEAN_SHAPE)
    warped = tr.warp(affine_img,
                     pw_af_tform.inverse,
                     output_shape=(int(WIDTH * 2), int(HEIGHT * 2)))

    WARPED_IMGS.append(warped)

#Compute the mean for each pixel, can replace with median etc.
MEAN_IMG = np.mean(WARPED_IMGS, axis=0)

plt.imshow(MEAN_IMG)
plt.show()
Пример #18
0
def convert2mishe(fname=None):
    panties = os.listdir('./dream/')
    if fname is None:
        fname = input("Type pantie name: ./dream/")
    if fname in panties:
        pantie = io.imread('./dream/' + fname)
        mask = io.imread('./mask/mask_mishe.png')
        pantie = np.bitwise_and(pantie, mask)
        [r, c, d] = pantie.shape

        # move from hip to front
        patch = np.copy(pantie[-140:-5, 546:, :])
        pantie[-115:, 546:, :] = 0
        patch = skt.resize(patch[::-1, ::-1, :], (patch.shape[0], 63),
                           anti_aliasing=True,
                           mode='reflect')
        [pr, pc, d] = patch.shape
        pantie[127 - 5:127 - 5 + pr, :pc, :] = np.uint8(patch * 255)

        # Affine transform matrix
        src_cols = np.linspace(0, c, 10)
        src_rows = np.linspace(0, r, 10)
        src_rows, src_cols = np.meshgrid(src_rows, src_cols)
        src = np.dstack([src_cols.flat, src_rows.flat])[0]
        shifter_row = np.zeros(src.shape[0])
        shifter_col = np.zeros(src.shape[0])
        shifter_row[30:-30] = (
            np.sin(np.linspace(0, 1 * np.pi, src.shape[0]) - np.pi / 32) *
            100)[30:-30]
        shifter_row[:30] = (
            np.sin(np.linspace(0, 1 * np.pi, src.shape[0]) + np.pi / 2) *
            60)[:30]
        shifter_row[-30:] = (
            np.sin(np.linspace(0, 1 * np.pi, src.shape[0]) - np.pi / 2) *
            80)[-30:]
        shifter_col[13:-30] = -(
            np.sin(np.linspace(0, 1 * np.pi, src.shape[0]) + np.pi / 8) *
            22)[13:-30]

        shifter_row = np.convolve(shifter_row, np.ones(20) / 20, mode='valid')
        shifter_col = np.convolve(shifter_col, np.ones(10) / 10, mode='valid')
        shifter_row = skt.resize(shifter_row, (100, 1),
                                 anti_aliasing=True,
                                 mode='reflect')[:, 0]
        shifter_col = skt.resize(shifter_col, (100, 1),
                                 anti_aliasing=True,
                                 mode='reflect')[:, 0]

        dst_rows = src[:, 1] + shifter_row - 110
        dst_cols = src[:, 0] + shifter_col
        dst = np.vstack([dst_cols, dst_rows]).T
        affin = skt.PiecewiseAffineTransform()
        affin.estimate(src, dst)
        pantie = np.uint8(skt.warp(pantie, affin) * 255)[:310, :, :]

        # Finalize
        pantie_ = skt.resize(
            pantie,
            (np.int(pantie.shape[0] * 2.05), np.int(pantie.shape[1] * 2.05)),
            anti_aliasing=True,
            mode='reflect')
        io.imsave('mishe_pantie.png', np.uint8(pantie_ * 255))
    else:
        print("Cannot find it")
Пример #19
0
def piecewise_transform(image,
                        numcols=5,
                        numrows=5,
                        warp_left_right=10,
                        warp_up_down=10,
                        order=1):
    """2D piecewise affine transformation.
        Control points are used to define the mapping. The transform is based on
        a Delaunay triangulation of the points to form a mesh. Each triangle is
        used to find a local affine transform.
        Parameters
        ----------
        img : ndarray
        numcols : int, optional (default: 5)
            numbers of the colums to transformation
        numrows : int, optional (default: 5)
            numbers of the rows to transformation
        warp_left_right: int, optional (default: 10)
            the pixels of transformation left and right
        warp_up_down: int, optional (default: 10)
            the pixels of transformation up and down
        Returns
        -------
        Transformed_image : ndarray
        Examples
        --------
            >>> Transformed_img = piecetransform(image,numcols=10, numrows=10, warp_left_right=5, warp_up_down=5)
        """

    rows, cols = image.shape[0], image.shape[1]

    numcols = numcols
    numrows = numrows

    src_cols = np.linspace(0, cols, numcols, dtype=int)
    src_rows = np.linspace(0, rows, numrows, dtype=int)
    src_rows, src_cols = np.meshgrid(src_rows, src_cols)
    src = np.dstack([src_cols.flat, src_rows.flat])[0]

    src_rows_new = np.ndarray.transpose(src_rows)
    src_cols_new = np.ndarray.transpose(src_cols)
    # src_new = np.dstack([src_cols_new.flat, src_rows_new.flat])[0]

    dst_cols = np.ndarray(src_cols.shape)
    dst_rows = np.ndarray(src_rows.shape)
    for i in range(0, numcols):
        for j in range(0, numrows):
            if src_cols[i, j] == 0 or src_cols[i, j] == cols:
                dst_cols[i, j] = src_cols[i, j]
            else:
                dst_cols[i, j] = src_cols[i, j] + np.random.uniform(
                    -1, 1) * warp_left_right

            if src_rows[i, j] == 0 or src_rows[i, j] == rows:
                dst_rows[i, j] = src_rows[i, j]
            else:
                dst_rows[i, j] = src_rows[i, j] + np.random.uniform(
                    -1, 1) * warp_up_down

    dst = np.dstack([dst_cols.flat, dst_rows.flat])[0]

    # dst_rows_new = np.ndarray.transpose(dst_rows)
    # dst_cols_new = np.ndarray.transpose(dst_cols)
    # dst_new = np.dstack([dst_cols_new.flat, dst_rows_new.flat])[0]

    tform = transform.PiecewiseAffineTransform()
    tform.estimate(src, dst)

    img_new = transform.warp(image,
                             tform,
                             output_shape=(rows, cols),
                             order=order,
                             preserve_range=True)
    img_new = img_new.astype(image.dtype)

    return img_new
Пример #20
0
    def convert(self, image):
        pantie = np.array(image)
        pantie = np.bitwise_and(pantie, self.mask)

        # move from hip to front
        if self.is_frill:
            patch = np.copy(pantie[-230:-5, 485:, :])
            patch = np.pad(patch, [(0, 0), (100, 100), (0, 0)],
                           mode='constant')
            patch = skt.rotate(patch, 90)
        else:
            patch = np.copy(pantie[-212:-5, 485:, :])
            patch = np.pad(patch, [(0, 0), (100, 100), (0, 0)],
                           mode='constant')
            patch = skt.rotate(patch, 90)
        # Affine transform matrix for patch
        [r, c, d] = patch.shape
        src_cols = np.linspace(0, c, 10)
        src_rows = np.linspace(0, r, 10)
        src_rows, src_cols = np.meshgrid(src_rows, src_cols)
        src = np.dstack([src_cols.flat, src_rows.flat])[0]
        shifter_row = np.zeros(src.shape[0])
        shifter_col = np.zeros(src.shape[0])
        shifter_row = +(
            np.sin(np.linspace(0, 1 * np.pi, src.shape[0]) + np.pi / 8) * 60)
        dst_rows = src[:, 1] + shifter_row + 25
        dst_cols = src[:, 0] + shifter_col
        dst = np.vstack([dst_cols, dst_rows]).T
        affin = skt.PiecewiseAffineTransform()
        affin.estimate(src, dst)
        patch = np.uint8(skt.warp(patch, affin) * 255)
        pantie[-250:, 485:, :] = 0
        if self.is_frill:
            patch = skt.rotate(patch, 90)[:, 59:160]
            patch = skt.resize(patch[:, :, :], (215, 86),
                               anti_aliasing=True,
                               mode='reflect')
            pantie[119:119 + 215, :86, :] = np.uint8(patch * 255)
        else:
            patch = skt.rotate(patch, 90)[:, 68:155]
            patch = skt.resize(patch[:, :, :], (210, 90),
                               anti_aliasing=True,
                               mode='reflect')
            pantie[119:119 + 210, :90, :] = np.uint8(patch * 255)

        # Affine transform matrix for whole image
        pantie = np.pad(pantie, [(50, 0), (50, 0), (0, 0)], mode='constant')
        [r, c, d] = pantie.shape
        src_cols = np.linspace(0, c, 10)
        src_rows = np.linspace(0, r, 10)
        src_rows, src_cols = np.meshgrid(src_rows, src_cols)
        src = np.dstack([src_cols.flat, src_rows.flat])[0]
        shifter_row = np.zeros(src.shape[0])
        shifter_col = np.zeros(src.shape[0])
        shifter_row = (
            np.sin(np.linspace(0, 1 * np.pi, src.shape[0]) - np.pi / 2) * 80)
        shifter_row[30:60] += (
            np.sin(np.linspace(0, 1 * np.pi, src.shape[0]) - np.pi / 8) *
            40)[30:60]
        shifter_row[:30] += (
            np.sin(np.linspace(0, 1 * np.pi, src.shape[0]) + np.pi / 2) *
            60)[:30]

        shifter_col = np.sin(
            np.linspace(0, 1 * np.pi, src.shape[0]) + np.pi / 2) * 125
        shifter_col[-50:] -= (
            np.sin(np.linspace(0, 1 * np.pi, src.shape[0]) - np.pi / 3) *
            260)[-50:]
        shifter_col = abs(shifter_col)

        shifter_row = np.convolve(shifter_row, np.ones(30) / 30, mode='valid')
        shifter_col = np.convolve(shifter_col, np.ones(10) / 10, mode='valid')
        shifter_row = skt.resize(shifter_row, (100, 1),
                                 anti_aliasing=True,
                                 mode='reflect')[:, 0]
        shifter_col = skt.resize(shifter_col, (100, 1),
                                 anti_aliasing=True,
                                 mode='reflect')[:, 0]
        shifter_row[0:20] = -17

        dst_rows = src[:, 1] + shifter_row - 20
        dst_cols = src[:, 0] + shifter_col - 80
        dst = np.vstack([dst_cols, dst_rows]).T
        affin = skt.PiecewiseAffineTransform()
        affin.estimate(src, dst)
        pantie = np.uint8(skt.warp(pantie, affin) * 255)[20:-30, 19:-180, :]
        [r, c, d] = pantie.shape
        npantie = np.zeros((r, c * 2, d), dtype=np.uint8)
        npantie[:, c:, :] = pantie
        npantie[:, :c, :] = pantie[:, ::-1, :]

        # Finalize
        npantie = skt.resize(npantie, (np.int(
            npantie.shape[0] * 2.51 * 1.04), np.int(npantie.shape[1] * 2.51)),
                             anti_aliasing=True,
                             mode='reflect')
        npantie = np.uint8(npantie * 255)
        return Image.fromarray(npantie)
Пример #21
0
# Alternatively, use PiecewiseAffineTransform from SciKit-image to transform the image
leftmost = tuple(cnt[cnt[:, :, 0].argmin()][0])
rightmost = tuple(cnt[cnt[:, :, 0].argmax()][0])
topmost = tuple(cnt[cnt[:, :, 1].argmin()][0])
bottommost = tuple(cnt[cnt[:, :, 1].argmax()][0])

dst = list()
src = list()
for y, x, z in np.transpose(np.nonzero(top_border)):
    dst.append([x, y])
    src.append([x, topmost[1]])
for y, x, z in np.transpose(np.nonzero(bottom_border)):
    dst.append([x, y])
    src.append([x, bottommost[1]])
for y, x, z in np.transpose(np.nonzero(left_border)):
    dst.append([x, y])
    src.append([leftmost[0], y])
for y, x, z in np.transpose(np.nonzero(right_border)):
    dst.append([x, y])
    src.append([rightmost[0], y])
src = np.array(src)
dst = np.array(dst)

tform3 = tf.PiecewiseAffineTransform()
tform3.estimate(src, dst)
warped = tf.warp(img, tform3, order=2)
warped = warped[85:170, 31:138]

cv2.imshow("warped", warped)
cv2.waitKey(0)
Пример #22
0
    def convert(self, image):
        pantie = np.array(image)
        patch = np.copy(pantie[-180:-5, 546:, :])
        patch = skt.resize(patch[::-1, ::-1, :], (200, 65),
                           anti_aliasing=True,
                           mode='reflect')
        [pr, pc, d] = patch.shape
        pantie[127 - 5:127 - 5 + pr, :pc, :] = np.uint8(patch * 255)

        # Front affine transform
        front = pantie[:, :300]
        front = np.pad(front, [(100, 00), (100, 100), (0, 0)], mode='constant')
        [r, c, d] = front.shape
        src_cols = np.linspace(0, c, 10)
        src_rows = np.linspace(0, r, 10)
        src_rows, src_cols = np.meshgrid(src_rows, src_cols)
        src = np.dstack([src_cols.flat, src_rows.flat])[0]
        shifter_row = np.zeros(src.shape[0])
        shifter_col = np.zeros(src.shape[0])
        shifter_row = np.cos(
            np.linspace(0, 2 * np.pi, src.shape[0]) + np.pi / 8) * 10
        shifter_row[50:] = np.sin(np.linspace(0, 1 * np.pi, 50)) * 20
        shifter_row[70:] += np.sin(np.linspace(0, 1 * np.pi, 30)) * 20
        shifter_col = np.sin(
            np.linspace(0, 1 * np.pi, src.shape[0]) - np.pi / 4) * -50
        shifter_row = np.convolve(shifter_row, np.ones(20) / 20, mode='valid')
        shifter_row = skt.resize(shifter_row, (100, 1),
                                 anti_aliasing=True,
                                 mode='reflect')[:, 0]
        dst_rows = src[:, 1] + shifter_row
        dst_cols = src[:, 0] + shifter_col
        dst = np.vstack([dst_cols, dst_rows]).T
        affin = skt.PiecewiseAffineTransform()
        affin.estimate(src, dst)
        front = skt.warp(front, affin)
        front = skt.rotate(front, -15)
        front = front[60:-100, 60:-10, :]
        front = skt.resize(
            front, (int(front.shape[0] * 1.4), int(front.shape[1] * 1.4)),
            anti_aliasing=True,
            mode='reflect')

        # First back affine transform
        back = pantie[:, 300:]
        back = np.pad(back, [(100, 100), (100, 100), (0, 0)], mode='constant')
        [r, c, d] = back.shape
        src_cols = np.linspace(0, c, 10)
        src_rows = np.linspace(0, r, 10)
        src_rows, src_cols = np.meshgrid(src_rows, src_cols)
        src = np.dstack([src_cols.flat, src_rows.flat])[0]
        shifter_row = np.zeros(src.shape[0])
        shifter_col = np.zeros(src.shape[0])
        shifter_row = np.sin(
            np.linspace(0, 1 * np.pi, src.shape[0]) + np.pi / 2) * 120
        dst_rows = src[:, 1] + shifter_row
        dst_cols = src[:, 0] + shifter_col
        dst = np.vstack([dst_cols, dst_rows]).T
        affin.estimate(src, dst)
        back = skt.rotate(skt.warp(back, affin), 34, resize=True)

        # Second back affine transform
        [r, c, d] = back.shape
        src_cols = np.linspace(0, c, 10)
        src_rows = np.linspace(0, r, 10)
        src_rows, src_cols = np.meshgrid(src_rows, src_cols)
        src = np.dstack([src_cols.flat, src_rows.flat])[0]
        shifter_row = np.zeros(src.shape[0])
        shifter_col = np.zeros(src.shape[0])
        shifter_row = np.sin(
            np.linspace(0, 2 * np.pi, src.shape[0]) + np.pi / 10) * -40
        shifter_row[:25] += np.cos(np.linspace(0, 0.5 * np.pi,
                                               25)) * 88  # left up
        shifter_row[25:45] -= np.sin(np.linspace(0, 1 * np.pi,
                                                 20)) * 5  # center up
        shifter_row = np.convolve(shifter_row, np.ones(20) / 20, mode='valid')
        shifter_row = skt.resize(shifter_row, (100, 1),
                                 anti_aliasing=True,
                                 mode='reflect')[:, 0]
        dst_rows = src[:, 1] + shifter_row
        dst_cols = src[:, 0] + shifter_col
        dst = np.vstack([dst_cols, dst_rows]).T
        affin.estimate(src, dst)
        back = skt.warp(back, affin)
        back = back[180:-380, 80:-150]
        back = skt.resize(
            back, (int(back.shape[0] * 1.45), int(back.shape[1] * 1.43)),
            anti_aliasing=True,
            mode='reflect')

        front = front[:, :-8]
        back = back[:, 5:]
        [fr, fc, _] = front.shape
        [br, bc, _] = back.shape
        shiftc = 8
        pantie = np.zeros((np.max([fr, br]), fc + bc - shiftc, d))
        pantie[:br, -bc:] = back
        pantie[:fr, :fc] = front

        # Finalize
        pantie = np.uint8(pantie * 255)
        return Image.fromarray(pantie)
Пример #23
0
def convert2fuzzy(fname=None, stitch_frill=False):
    panties = os.listdir('./dream/')
    if fname is None:
        fname = input("Type pantie name: ./dream/")
    if fname in panties:
        pantie = io.imread('./dream/' + fname)
        mask = io.imread('./mask/mask_fuzzy.png')
        pantie = np.bitwise_and(pantie, mask)
        # [r,c,d] = pantie.shape

        # move from hip to front
        if stitch_frill:
            patch = np.copy(pantie[-230:-5, 485:, :])
            patch = np.pad(patch, [(0, 0), (100, 100), (0, 0)],
                           mode='constant')
            patch = skt.rotate(patch, 90)
        else:
            patch = np.copy(pantie[-212:-5, 485:, :])
            patch = np.pad(patch, [(0, 0), (100, 100), (0, 0)],
                           mode='constant')
            patch = skt.rotate(patch, 90)
        # Affine transform matrix for patch
        [r, c, d] = patch.shape
        src_cols = np.linspace(0, c, 10)
        src_rows = np.linspace(0, r, 10)
        src_rows, src_cols = np.meshgrid(src_rows, src_cols)
        src = np.dstack([src_cols.flat, src_rows.flat])[0]
        shifter_row = np.zeros(src.shape[0])
        shifter_col = np.zeros(src.shape[0])
        shifter_row = +(
            np.sin(np.linspace(0, 1 * np.pi, src.shape[0]) + np.pi / 8) * 60)
        dst_rows = src[:, 1] + shifter_row + 25
        dst_cols = src[:, 0] + shifter_col
        dst = np.vstack([dst_cols, dst_rows]).T
        affin = skt.PiecewiseAffineTransform()
        affin.estimate(src, dst)
        patch = np.uint8(skt.warp(patch, affin) * 255)
        pantie[-250:, 485:, :] = 0
        if stitch_frill:
            patch = skt.rotate(patch, 90)[:, 59:160]
            patch = skt.resize(patch[:, :, :], (215, 86),
                               anti_aliasing=True,
                               mode='reflect')
            pantie[119:119 + 215, :86, :] = np.uint8(patch * 255)
        else:
            patch = skt.rotate(patch, 90)[:, 68:155]
            patch = skt.resize(patch[:, :, :], (210, 90),
                               anti_aliasing=True,
                               mode='reflect')
            pantie[119:119 + 210, :90, :] = np.uint8(patch * 255)

        # Affine transform matrix for whole image
        pantie = np.pad(pantie, [(50, 0), (50, 0), (0, 0)], mode='constant')
        [r, c, d] = pantie.shape
        src_cols = np.linspace(0, c, 10)
        src_rows = np.linspace(0, r, 10)
        src_rows, src_cols = np.meshgrid(src_rows, src_cols)
        src = np.dstack([src_cols.flat, src_rows.flat])[0]
        shifter_row = np.zeros(src.shape[0])
        shifter_col = np.zeros(src.shape[0])
        shifter_row = (
            np.sin(np.linspace(0, 1 * np.pi, src.shape[0]) - np.pi / 2) * 80)
        shifter_row[30:60] += (
            np.sin(np.linspace(0, 1 * np.pi, src.shape[0]) - np.pi / 8) *
            40)[30:60]
        shifter_row[:30] += (
            np.sin(np.linspace(0, 1 * np.pi, src.shape[0]) + np.pi / 2) *
            60)[:30]

        shifter_col = np.sin(
            np.linspace(0, 1 * np.pi, src.shape[0]) + np.pi / 2) * 125
        shifter_col[-50:] -= (
            np.sin(np.linspace(0, 1 * np.pi, src.shape[0]) - np.pi / 3) *
            260)[-50:]
        shifter_col = abs(shifter_col)

        shifter_row = np.convolve(shifter_row, np.ones(30) / 30, mode='valid')
        shifter_col = np.convolve(shifter_col, np.ones(10) / 10, mode='valid')
        shifter_row = skt.resize(shifter_row, (100, 1),
                                 anti_aliasing=True,
                                 mode='reflect')[:, 0]
        shifter_col = skt.resize(shifter_col, (100, 1),
                                 anti_aliasing=True,
                                 mode='reflect')[:, 0]
        shifter_row[0:20] = -17

        dst_rows = src[:, 1] + shifter_row - 20
        dst_cols = src[:, 0] + shifter_col - 80
        dst = np.vstack([dst_cols, dst_rows]).T
        affin = skt.PiecewiseAffineTransform()
        affin.estimate(src, dst)
        pantie = np.uint8(skt.warp(pantie, affin) * 255)[20:-30, 19:-180, :]
        [r, c, d] = pantie.shape
        npantie = np.zeros((r, c * 2, d), dtype=np.uint8)
        npantie[:, c:, :] = pantie
        npantie[:, :c, :] = pantie[:, ::-1, :]

        # Finalize
        npantie = skt.resize(npantie, (np.int(
            npantie.shape[0] * 2.51 * 1.04), np.int(npantie.shape[1] * 2.51)),
                             anti_aliasing=True,
                             mode='reflect')
        io.imsave('fuzzy_pantie.png', np.uint8(npantie * 255))
    else:
        print("Cannot find it")