示例#1
0
    def __call__(self,
                 beta,
                 theta,
                 exp=None,
                 reg_type='legacy',
                 get_skin=False,
                 name=None):
        """
        Obtain SMPL with shape (beta) & pose (theta) inputs.
        Theta includes the global rotation.
        Args:
          beta: N x 30
          theta: N x 186 (with 3-D axis-angle rep)
          exp:N x 200

        Updates:
        self.J_transformed: N x 24 x 3 joint location after shaping
                 & posing with beta and theta
        Returns:
          - joints: N x 19 or 14 x 3 joint locations depending on joint_type
        If get_skin is True, also returns
          - Verts: N x 6980 x 3
        """

        num_batch = beta.shape[0]

        # 1. Add shape blend shapes
        # (N x 10) x (10 x 6890*3) = N x 6890 x 3
        v_shaped = np.reshape(
            np.matmul(beta, self.shapedirs),
            [-1, self.size[0], self.size[1]]) + self.v_template

        # 2. Infer shape-dependent joint locations.
        Jx = np.matmul(v_shaped[:, :, 0], self.J_regressor)
        Jy = np.matmul(v_shaped[:, :, 1], self.J_regressor)
        Jz = np.matmul(v_shaped[:, :, 2], self.J_regressor)
        J = np.stack([Jx, Jy, Jz], axis=2)

        # 3. Add pose blend shapes
        # N x 24 x 3 x 3
        Rs = np.reshape(batch_rodrigues(np.reshape(theta, [-1, 3])),
                        [-1, self.nJoints, 3, 3])
        # Ignore global rotation.

        # No posedirs in Adam Model
        # pose_feature = np.reshape(Rs[:, 1:, :, :] - np.eye(3),
        #                           [-1, 207])

        # # (N x 207) x (207, 20670) -> N x 6890 x 3
        if exp is not None:
            v_faced = np.reshape(np.matmul(exp, self.facedirs),
                                 [-1, self.size[0], self.size[1]]) + v_shaped
        else:
            v_faced = v_shaped
        # 4. Get the global joint location

        self.J_transformed, A = batch_global_rigid_transformation(
            Rs, J, self.parents)

        # 5. Do skinning:
        # W is N x 6890 x 24
        W = np.reshape(np.tile(self.weights, [num_batch, 1]),
                       [num_batch, -1, self.nJoints])
        # (N x 6890 x 24) x (N x 24 x 16)
        T = np.reshape(
            np.matmul(W, np.reshape(A, [num_batch, self.nJoints, 16])),
            [num_batch, -1, 4, 4])

        v_shaped_homo = np.concatenate(
            [v_faced, np.ones([num_batch, v_faced.shape[1], 1])], 2)
        v_homo = np.matmul(T, np.expand_dims(v_shaped_homo, -1))

        verts = v_homo[:, :, :3, 0]

        # Get cocoplus or lsp joints:
        if reg_type == 'legacy':
            joint_x = np.matmul(verts[:, :, 0], self.J_regressor)
            joint_y = np.matmul(verts[:, :, 1], self.J_regressor)
            joint_z = np.matmul(verts[:, :, 2], self.J_regressor)
            joints = np.stack([joint_x, joint_y, joint_z], axis=2)
        elif reg_type == 'coco25':
            joint_x = np.matmul(verts[:, :, 0], self.J_regressor_coco25)
            joint_y = np.matmul(verts[:, :, 1], self.J_regressor_coco25)
            joint_z = np.matmul(verts[:, :, 2], self.J_regressor_coco25)
            joints = np.stack([joint_x, joint_y, joint_z], axis=2)
        else:
            raise ValueError('Unknown regressor type')

        if get_skin:
            return verts, joints
        else:
            return joints
示例#2
0
文件: batch_smpl.py 项目: minar09/MGN
    def call(self, theta, beta, trans, v_personal):
        """
        Obtain SMPL with shape (beta) & pose (theta) inputs.
        Theta includes the global rotation.
        Args:
          beta: N x 10
          theta: N x 72 (with 3-D axis-angle rep)

        Updates:
        self.J_transformed: N x 24 x 3 joint location after shaping
                 & posing with beta and theta
        Returns:
          - joints: N x 19 or 14 x 3 joint locations depending on joint_type
        If get_skin is True, also returns
          - Verts: N x 6980 x 3 (low res) N x 27554 x 3 (hres)
        """

        # Cast inputs to float64
        theta, beta, trans, v_personal = tf.cast(
            theta, self.data_type), tf.cast(beta, self.data_type), tf.cast(
                trans, self.data_type), tf.cast(v_personal, self.data_type)

        if not self.isHres:
            v_personal = v_personal[:, :6890, :]

        # num_batch = int(tf.shape(beta)[0])
        num_batch = tf.keras.backend.int_shape(beta)[0]

        # 1. Add shape blend shapes
        v_shaped_scaled = tf.reshape(
            tf.matmul(beta, self.shapedirs, name='shape_bs'),
            [-1, self.size[0], self.size[1]]) + self.v_template

        if self.scale:
            body_height = (
                v_shaped_scaled[:, 2802, 1] + v_shaped_scaled[:, 6262, 1]) - (
                    v_shaped_scaled[:, 2237, 1] + v_shaped_scaled[:, 6728, 1])
            scale = tf.reshape(1.66 / body_height, (-1, 1, 1))
        else:
            scale = tf.reshape(tf.ones_like(v_shaped_scaled[:, 2802, 1]),
                               (-1, 1, 1))

        # Scale to 1.66m height
        v_shaped_scaled *= scale

        v_shaped = v_shaped_scaled

        if self.isHres:
            v_shaped = tf.map_fn(self.fn, v_shaped, dtype=self.data_type)

        v_shaped_personal = v_shaped + v_personal

        # 2. Infer shape-dependent joint locations.
        # Some gpu dont support float64 operations
        with tf.device('/cpu:0'):
            Jx = tf.transpose(
                tf.sparse_tensor_dense_matmul(
                    self.J_regressor, tf.transpose(v_shaped_scaled[:, :, 0])))
            Jy = tf.transpose(
                tf.sparse_tensor_dense_matmul(
                    self.J_regressor, tf.transpose(v_shaped_scaled[:, :, 1])))
            Jz = tf.transpose(
                tf.sparse_tensor_dense_matmul(
                    self.J_regressor, tf.transpose(v_shaped_scaled[:, :, 2])))
        J = tf.stack([Jx, Jy, Jz], axis=2)

        # 3. Add pose blend shapes
        # N x 24 x 3 x 3
        if self.theta_in_rodrigues:
            Rs = tf.reshape(batch_rodrigues(tf.reshape(theta, [-1, 3])),
                            [-1, 24, 3, 3])
        else:
            if self.theta_is_perfect_rotmtx:
                Rs = theta
            else:
                s, u, v = tf.svd(theta)
                Rs = tf.matmul(u, tf.transpose(v, perm=[0, 1, 3, 2]))

        # with tf.name_scope("lrotmin"):
        # Ignore global rotation.
        pose_feature = tf.reshape(Rs[:, 1:, :, :] - tf.eye(3, dtype=Rs.dtype),
                                  [-1, 207])

        # (N x 207) x (207, 20670) -> N x 6890 x 3
        v_posed = tf.reshape(tf.matmul(pose_feature, self.posedirs),
                             [-1, self.size[0], self.size[1]])

        if self.isHres:
            v_posed = tf.map_fn(self.fn, v_posed)
        v_posed += v_shaped_personal

        # 4. Get the global joint location
        J_transformed, A = batch_global_rigid_transformation(
            Rs, J, self.parents)
        J_transformed += tf.expand_dims(trans, axis=1)

        # 5. Do skinning:
        if self.isHres:
            W = tf.reshape(tf.tile(self.weights_hres, [num_batch, 1]),
                           [num_batch, -1, 24])
        else:
            # W is N x 6890 x 24
            W = tf.reshape(tf.tile(self.weights_, [num_batch, 1]),
                           [num_batch, -1, 24])
        # (N x 6890 x 24) x (N x 24 x 16)
        T = tf.reshape(tf.matmul(W, tf.reshape(A, [num_batch, 24, 16])),
                       [num_batch, -1, 4, 4])
        v_posed_homo = tf.concat([
            v_posed,
            tf.ones([num_batch, v_posed.shape[1], 1], dtype=v_posed.dtype)
        ], 2)

        v_homo = tf.matmul(T, tf.expand_dims(v_posed_homo, -1))

        verts = v_homo[:, :, :3, 0]  # / v_homo[:, :, 3, 0:1]

        verts_t = verts + tf.expand_dims(trans, axis=1)

        # Return verts, unposed verts, unposed naked verts, joints
        return verts_t, v_shaped_personal, v_shaped, J_transformed
    def __call__(self, beta, theta, get_skin=False, name=None):
        """
        Obtain SMPL with shape (beta) & pose (theta) inputs.
        Theta includes the global rotation.
        Args:
          beta: N x 10
          theta: N x 72 (with 3-D axis-angle rep)

        Updates:
        self.J_transformed: N x 24 x 3 joint location after shaping
                 & posing with beta and theta
        Returns:
          - joints: N x 19 or 14 x 3 joint locations depending on joint_type
        If get_skin is True, also returns
          - Verts: N x 6980 x 3
        """

        with tf.compat.v1.variable_scope(name, "smpl_main", [beta, theta]):
            num_batch = tf.shape(input=beta)[0]
            #print('\n\n\n\n','Num batch = ',num_batch)
            #print(beta.shape, tf.shape(beta)[1], beta.shape[0], beta.shape[1])

            # 1. Add shape blend shapes
            # (N x 10) x (10 x 6890*3) = N x 6890 x 3
            v_shaped = tf.reshape(
                tf.matmul(beta, self.shapedirs, name='shape_bs'),
                [-1, self.size[0], self.size[1]]) + self.v_template

            # 2. Infer shape-dependent joint locations.
            Jx = tf.matmul(v_shaped[:, :, 0], self.J_regressor)
            Jy = tf.matmul(v_shaped[:, :, 1], self.J_regressor)
            Jz = tf.matmul(v_shaped[:, :, 2], self.J_regressor)
            J = tf.stack([Jx, Jy, Jz], axis=2)

            # 3. Add pose blend shapes
            # N x 24 x 3 x 3
            Rs = tf.reshape(batch_rodrigues(tf.reshape(theta, [-1, 3])),
                            [-1, 24, 3, 3])
            with tf.compat.v1.variable_scope("lrotmin"):
                # Ignore global rotation.
                pose_feature = tf.reshape(Rs[:, 1:, :, :] - tf.eye(3),
                                          [-1, 207])

            # (N x 207) x (207, 20670) -> N x 6890 x 3
            v_posed = tf.reshape(tf.matmul(pose_feature, self.posedirs),
                                 [-1, self.size[0], self.size[1]]) + v_shaped

            #4. Get the global joint location
            self.J_transformed, A = batch_global_rigid_transformation(
                Rs, J, self.parents)

            # 5. Do skinning:
            # W is N x 6890 x 24, the weight of 24 joints inference every vertex
            W = tf.reshape(tf.tile(self.weights1, [num_batch, 1]),
                           [num_batch, -1, 24])
            #print(W[0,0:2])
            # (N x 6890 x 24) x (N x 24 x 16)
            T = tf.reshape(tf.matmul(W, tf.reshape(A, [num_batch, 24, 16])),
                           [num_batch, -1, 4, 4])
            v_posed_homo = tf.concat(
                [v_posed, tf.ones([num_batch, v_posed.shape[1], 1])], 2)
            v_homo = tf.matmul(T, tf.expand_dims(v_posed_homo, -1))
            #print(v_homo)
            verts = v_homo[:, :, :3, 0]

            # Get cocoplus or lsp joints:
            joint_x = tf.matmul(verts[:, :, 0], self.joint_regressor)
            joint_y = tf.matmul(verts[:, :, 1], self.joint_regressor)
            joint_z = tf.matmul(verts[:, :, 2], self.joint_regressor)
            joints = tf.stack([joint_x, joint_y, joint_z], axis=2)
            #print(verts.shape,'\n\n\n')
            if get_skin:
                return verts, joints, Rs
            else:
                return joints