예제 #1
0
def morph(src_img, src_points, dest_img, dest_points,
          video, width=500, height=600, num_frames=20, fps=10,
          out_frames=None, out_video=None, alpha=False, plot=False):
  """
  Create a morph sequence from source to destination image

  :param src_img: ndarray source image
  :param src_img: source image array of x,y face points
  :param dest_img: ndarray destination image
  :param dest_img: destination image array of x,y face points
  :param video: facemorpher.videoer.Video object
  """
  size = (height, width)
  stall_frames = np.clip(int(fps*0.15), 1, fps)  # Show first & last longer
  plt = plotter.Plotter(plot, num_images=num_frames, folder=out_frames)
  num_frames -= (stall_frames * 2)  # No need to process src and dest image

  plt.plot_one(src_img)
  video.write(src_img, stall_frames)

  # Produce morph frames!
  for percent in np.linspace(1, 0, num=num_frames):
    points = locator.weighted_average_points(src_points, dest_points, percent)
    src_face = warper.warp_image(src_img, src_points, points, size)
    end_face = warper.warp_image(dest_img, dest_points, points, size)
    average_face = blender.weighted_average(src_face, end_face, percent)
    # Comment to remove background
    #average_face = alpha_image(average_face, points) if alpha else average_face
    plt.plot_one(average_face, 'save')
    video.write(average_face)

  plt.plot_one(dest_img)
  video.write(dest_img, stall_frames)

  plt.show()
예제 #2
0
def morph(src_img, src_points, dest_img, dest_points,
          video, width=500, height=600, num_frames=20, fps=10,
          out_frames=None, out_video=None, alpha=False, plot=False):
  """
  Create a morph sequence from source to destination image

  :param src_img: ndarray source image
  :param src_img: source image array of x,y face points
  :param dest_img: ndarray destination image
  :param dest_img: destination image array of x,y face points
  :param video: facemorpher.videoer.Video object
  """
  size = (height, width)
  stall_frames = np.clip(int(fps*0.15), 1, fps)  # Show first & last longer
  plt = plotter.Plotter(plot, num_images=num_frames, folder=out_frames)
  num_frames -= (stall_frames * 2)  # No need to process src and dest image

  plt.plot_one(src_img)
  video.write(src_img, stall_frames)

  # Produce morph frames!
  for percent in np.linspace(1, 0, num=num_frames):
    points = locator.weighted_average_points(src_points, dest_points, percent)
    src_face = warper.warp_image(src_img, src_points, points, size)
    end_face = warper.warp_image(dest_img, dest_points, points, size)
    average_face = blender.weighted_average(src_face, end_face, percent)
    average_face = alpha_image(average_face, points) if alpha else average_face
    plt.plot_one(average_face, 'save')
    video.write(average_face)

  plt.plot_one(dest_img)
  video.write(dest_img, stall_frames)

  plt.show()
예제 #3
0
def morph(name, src_img, src_points, dest_img, dest_points, width=500, height=600, num_frames=20, fps=10,
          out_frames=None, plot=False, background='black'):
  """
  Create a morph sequence from source to destination image

  :param src_img: ndarray source image
  :param src_points: source image array of x,y face points
  :param dest_img: ndarray destination image
  :param dest_points: destination image array of x,y face points
  :param video: facemorpher.videoer.Video object
  """
  size = (height, width)
  stall_frames = np.clip(int(fps*0.15), 1, fps)  # Show first & last longer
  plt = plotter.Plotter(plot, num_images=num_frames, out_folder=out_frames)
  num_frames -= (stall_frames * 2)  # No need to process src and dest image

  plt.plot_one(src_img)

  # Produce morph frames!
  percent = np.linspace(1, 0, num=num_frames)
  points = locator.weighted_average_points(src_points, dest_points, percent[8])
  src_face = warper.warp_image(src_img, src_points, points, size)
  end_face = warper.warp_image(dest_img, dest_points, points, size)
  average_face = blender.weighted_average(src_face, end_face, percent[8])
  if background in ('transparent', 'average'):
    mask = blender.mask_from_points(average_face.shape[:2], points)
    average_face = np.dstack((average_face, mask))

    if background == 'average':
      average_background = blender.weighted_average(src_img, dest_img, percent[8])
      average_face = blender.overlay_image(average_face, mask, average_background)

  #plt.plot_one(average_face)
  #name = src_img[-11:-4]+"_"+dest_img[-11:-4]+".png"
  plt.save(average_face,name)
예제 #4
0
def test_local():
    from functools import partial
    import cv2
    import scipy.misc
    import locator
    import aligner
    from matplotlib import pyplot as plt

    # Load source image
    face_points_func = partial(locator.face_points, '../data')
    base_path = '../females/Screenshot 2015-03-04 17.11.12.png'
    src_path = '../females/BlDmB5QCYAAY8iw.jpg'
    src_img = cv2.imread(src_path)

    # Define control points for warps
    src_points = face_points_func(src_path)
    base_img = cv2.imread(base_path)
    base_points = face_points_func(base_path)

    size = (600, 500)
    src_img, src_points = aligner.resize_align(src_img, src_points, size)
    base_img, base_points = aligner.resize_align(base_img, base_points, size)
    result_points = locator.weighted_average_points(src_points, base_points,
                                                    0.2)

    # Perform transform
    dst_img1 = warp_image(src_img, src_points, result_points, size)
    dst_img2 = warp_image(base_img, base_points, result_points, size)

    import blender
    ave = blender.weighted_average(dst_img1, dst_img2, 0.6)
    mask = blender.mask_from_points(size, result_points)
    blended_img = blender.poisson_blend(dst_img1, dst_img2, mask)

    plt.subplot(2, 2, 1)
    plt.imshow(ave)
    plt.subplot(2, 2, 2)
    plt.imshow(dst_img1)
    plt.subplot(2, 2, 3)
    plt.imshow(dst_img2)
    plt.subplot(2, 2, 4)

    plt.imshow(blended_img)
    plt.show()
예제 #5
0
def test_local():
  from functools import partial
  import scipy.ndimage
  import scipy.misc
  import locator
  import aligner
  from matplotlib import pyplot as plt

  # Load source image
  face_points_func = partial(locator.face_points, '../data')
  base_path = '../females/Screenshot 2015-03-04 17.11.12.png'
  src_path = '../females/BlDmB5QCYAAY8iw.jpg'
  src_img = scipy.ndimage.imread(src_path)[:, :, :3]

  # Define control points for warps
  src_points = face_points_func(src_path)
  base_img = scipy.ndimage.imread(base_path)[:, :, :3]
  base_points = face_points_func(base_path)

  size = (600, 500)
  src_img, src_points = aligner.resize_align(src_img, src_points, size)
  base_img, base_points = aligner.resize_align(base_img, base_points, size)
  result_points = locator.weighted_average_points(src_points, base_points, 0.2)

  # Perform transform
  dst_img1 = warp_image(src_img, src_points, result_points, size)
  dst_img2 = warp_image(base_img, base_points, result_points, size)

  print 'blending'
  import blender
  ave = blender.weighted_average(dst_img1, dst_img2, 0.6)
  mask = blender.mask_from_points(size, result_points)
  blended_img = blender.poisson_blend(dst_img1, dst_img2, mask)

  plt.subplot(2, 2, 1)
  plt.imshow(ave)
  plt.subplot(2, 2, 2)
  plt.imshow(dst_img1)
  plt.subplot(2, 2, 3)
  plt.imshow(dst_img2)
  plt.subplot(2, 2, 4)

  plt.imshow(blended_img)
  plt.show()