コード例 #1
0
    def process (self, image_list):
        """
        Args:
            image_list: list of tuples, where first element is an image_name,
                second element is a dictionary with the hand bounding_boxes 
                as well as other information
        """
        hand_mesh_list = []
        print('Extracting hand pose and hand mesh...')
        for image_name, hand_info in tqdm(image_list):
            save_name = os.path.join(self.cache_loc,
                    ('#'.join(image_name.split('/')[-3:])[:-self.extension_length])+'.pkl')
            
            if os.path.exists(save_name) and not self.overwrite:
                with open(save_name, 'rb')  as f :
                    hand_mesh_list.append(pickle.load(f))   
            else:
                hand_mesh = {}
                # crop the image for the left hand
                # pass through the model
                for which_hand in ['left', 'right']:
                    if which_hand in hand_info:
                        image_raw = cv2.imread(image_name)
                        # cropping the hand
                        crop = image_raw[int(hand_info[which_hand]['top_y']):int(hand_info[which_hand]['bottom_y'])+1, 
                                    int(hand_info[which_hand]['left_x']):int(hand_info[which_hand]['right_x'])+1, :]
                        frame= preprocess_frame(crop)
                        img = Image.fromarray(frame.copy())
                        hand_crop = cv2.resize(np.array(img), (256, 256)) 
                        
                        if which_hand == 'left':
                            hand_image = prepare_input(hand_crop, flip_left_right=False)
                        elif which_hand == 'right':
                            hand_image= prepare_input(hand_crop, flip_left_right=True)

                        output = self.forward_pass_3d(self.model, hand_image)
                        verts = output['verts'].cpu().detach().numpy()[0]
                        joints = output['joints'].cpu().detach().numpy()[0]
                        hand_mesh[which_hand] = {'verts': verts, 'joints': joints}
                
                hand_mesh_list.append(hand_mesh)
                # save into cache
                with open(save_name, 'wb') as f:
                    pickle.dump(hand_mesh, f)
        return hand_mesh_list                
コード例 #2
0
    model = reload_model(args.resume, opts, no_beta=args.no_beta)

    model.eval()

    print("Input image is processed flipped and unflipped "
          "(as left and right hand), both outputs are displayed")

    # load faces of hand
    with open("misc/mano/MANO_RIGHT.pkl", "rb") as p_f:
        mano_right_data = pickle.load(p_f, encoding="latin1")
        faces = mano_right_data["f"]

    fig = plt.figure(figsize=(4, 4))
    fig.clf()
    frame = cv2.imread(args.image_path)
    frame = preprocess_frame(frame)
    input_image = prepare_input(frame)
    cv2.imshow("input", frame)
    img = Image.fromarray(frame.copy())
    hand_crop = cv2.resize(np.array(img), (256, 256))

    noflip_hand_image = prepare_input(hand_crop, flip_left_right=False)
    flip_hand_image = prepare_input(hand_crop, flip_left_right=True)
    noflip_output = forward_pass_3d(model, noflip_hand_image)
    flip_output = forward_pass_3d(model, flip_hand_image)
    flip_verts = flip_output["verts"].cpu().detach().numpy()[0]
    noflip_verts = noflip_output["verts"].cpu().detach().numpy()[0]
    ax = fig.add_subplot(1, 2, 1, projection="3d")
    ax.title.set_text("unflipped input")
    displaymano.add_mesh(ax, flip_verts, faces, flip_x=True)
    if "objpoints3d" in flip_output: