def lookat_matrix(up, lookat_direction):
    """Construct a matrix that "looks at" a direction."""
    # lookat_direction [Batch, 3]
    # return [Batch, 3, 3] colomn major cam2world lookat matrix.
    # z is the forward direction. x is the right vector. y is the up vector.
    # Stack x, y, z vectors by colomn to get the lookat matrix.
    # [[x.x y.x z.x]
    #  [x.y y.y z.y]
    #  [x.z y.z z.z]]
    z = tf.linalg.l2_normalize(-lookat_direction, axis=-1)
    x = tf.linalg.l2_normalize(tf.cross(up, z), axis=-1)
    y = tf.cross(z, x)
    lookat = tf.stack([x, y, z], axis=-1)
    return lookat
Esempio n. 2
0
 def setup_v_rot_(links):
     r = links.thickness[:-1, newaxis]
     dl = (links.points[:-1] - links.points[1:]) / 2
     dlh = dl / (tf.norm(dl, axis=1)[:, newaxis] + 1e-10)
     idx = tf.argmin(dlh, 1)
     r0 = tf.one_hot(idx, 3)
     r1h = tf.cross(dlh, r0)
     r2h = tf.cross(dlh, r1h)
     r1 = r * r1h
     r2 = r * r2h
     vi = tf.concat(
         (dl[:, :, newaxis], r1[:, :, newaxis], r2[:, :, newaxis]), axis=2)
     ri = tf.concat(
         (dlh[:, :, newaxis], r1h[:, :, newaxis], r2h[:, :, newaxis]),
         axis=2)
     links.v_rot = tf.matmul(vi, ri, transpose_b=True)
Esempio n. 3
0
    def ref_point(self, v):
        if v.shape.ndims > self.rot.shape.ndims:
            rot = self.rot[None] * (tf.zeros_like(v) + 1.)
            vel = self.vel + tf1.cross(rot, v)
        elif v.shape.ndims < self.rot.shape.ndims:
            n = self.rot.shape[0].value
            vel = self.vel + tf1.cross(self.rot, v[None] * tf.ones((n, 1)))
            rot = self.rot
        else:
            vel = self.vel + tf1.cross(self.rot, v)
            rot = self.rot

        if self.is_batch or v.shape.ndims == 2:
            return Twist(tf.concat([vel, rot], 1))
        else:
            return Twist(tf.concat([vel, rot], 0))
def lookat_matrix(up, lookat_direction):
    """Return rotation matrices given camera's lookat directions and up vectors.

  Using OpenGL's coordinate system: -Z is the camera's lookat and +Y is up.
  Args:
    up: [BATCH, 3] the up vectors.
    lookat_direction:  [BATCH, 3] the lookat directions.

  Returns:
    [BATCH, 3, 3] the rotation matrices (from camera to world frame).
  """
    z = tf.linalg.l2_normalize(-lookat_direction, axis=-1)
    x = tf.linalg.l2_normalize(tf.cross(up, z), axis=-1)
    y = tf.cross(z, x)
    # Stack x, y, z basis vectors by colomn.
    return tf.stack([x, y, z], axis=-1)
    def Compute_norm(self, face_shape, facemodel):
        shape = face_shape
        face_id = facemodel.face_buf
        point_id = facemodel.point_buf

        # face_id and point_id index starts from 1
        face_id = tf.cast(face_id - 1, tf.int32)
        point_id = tf.cast(point_id - 1, tf.int32)

        #compute normal for each face
        v1 = tf.gather(shape, face_id[:, 0], axis=1)
        v2 = tf.gather(shape, face_id[:, 1], axis=1)
        v3 = tf.gather(shape, face_id[:, 2], axis=1)
        e1 = v1 - v2
        e2 = v2 - v3
        face_norm = tf.cross(e1, e2)

        face_norm = tf.nn.l2_normalize(face_norm,
                                       dim=2)  # normalized face_norm first
        face_norm = tf.concat(
            [face_norm, tf.zeros([tf.shape(face_shape)[0], 1, 3])], axis=1)

        #compute normal for each vertex using one-ring neighborhood
        v_norm = tf.reduce_sum(tf.gather(face_norm, point_id, axis=1), axis=2)
        v_norm = tf.nn.l2_normalize(v_norm, dim=2)

        return v_norm
def rotation_between_vectors(v1, v2):
    """Get the rotation matrix to align v1 with v2 (v2 = R * v1).

  The rotation is computed by Rodrigues' rotation formula given an axis-angle.
  The function returns an identity matrix when v1 = v2. When v1 = -v2, this
  gives an 180-degree rotation around any axis perpendicular to v1 or v2 . The
  function is discontinuous in this case.

  Args:
    v1: [BATCH, 3] 3d vectors.
    v2: [BATCH, 3] 3d vectors.

  Returns:
    [BATCH, 3, 3] rotation matrices.

  Raises:
    Input has the wrong dimensions.
  """
    with tf.name_scope(None, 'rotation_between_vectors', [v1, v2]):
        if v1.shape[-1] != 3 or v2.shape[-1] != 3:
            raise ValueError('Input has the wrong dimensions.')

        batch = v1.shape.as_list()[0]
        v1 = tf.linalg.l2_normalize(v1, -1)
        v2 = tf.linalg.l2_normalize(v2, -1)
        cross = tf.cross(v1, v2)
        cos_angle = tf.reduce_sum(v1 * v2, -1, keepdims=True)
        sin_angle = tf.norm(cross, axis=-1)
        x_axis = tf.tile(tf.constant([[1., 0., 0.]]), [batch, 1])
        y_axis = tf.tile(tf.constant([[0., 1., 0.]]), [batch, 1])
        z_axis = tf.tile(tf.constant([[0., 0., 1.]]), [batch, 1])
        identity = tf.eye(3, batch_shape=[batch])
        rotation_axis = tf.where(
            tf.abs(tf.tile(cos_angle, [1, 3]) - (-tf.ones([batch, 3]))) < 1e-6,
            tf.cross(v1, x_axis) + tf.cross(v1, y_axis) + tf.cross(v1, z_axis),
            cross)
        rotation_axis = tf.linalg.l2_normalize(rotation_axis, -1)
        ss = skew_symmetric(rotation_axis)
        sin_angle = sin_angle[:, tf.newaxis, tf.newaxis]
        cos_angle = cos_angle[:, tf.newaxis]
        rotation_matrix = identity + sin_angle * ss + (
            1 - cos_angle) * tf.matmul(ss, ss)
        return rotation_matrix
Esempio n. 7
0
    def __mul__(self, other):
        if isinstance(other, Twist):
            # TODO check
            rot = matvecmul(self.m, other.rot)
            vel = matvecmul(self.m, other.vel) + tf1.cross(
                self.p, rot)  # should be cross product
            return Twist(tf.concat([vel, rot], 0))

        elif isinstance(other, tf.Tensor) or isinstance(other, tf.Variable):
            if other.shape[-1].value == 3:  # only position
                if self.is_batch:
                    # return tf.einsum('aij,bj->abi', self.m, other) + self.p[:, None]
                    return tf.linalg.LinearOperatorFullMatrix(
                        self.m[:, None]).matvec(other[None]) + self.p[None]
                else:
                    return tf.linalg.LinearOperatorFullMatrix(
                        self.m).matvec(other) + self.p[None]
            else:
                raise NotImplementedError('Only position supported yet')
        else:
            return Frame(p=matvecmul(self.m, other.p) + self.p,
                         m=matmatmul(self.m, other.m))
Esempio n. 8
0
def get_edge_cross_feature(point_cloud, nn_idx, k=20):
    """Construct edge feature for each point
  Args:
    point_cloud: (batch_size, num_points, 1, num_dims)
    nn_idx: (batch_size, num_points, k)
    k: int

  Returns:
    edge features: (batch_size, num_points, k, num_dims)
  """
    og_batch_size = point_cloud.get_shape().as_list()[0]
    point_cloud = tf.squeeze(point_cloud)
    if og_batch_size == 1:
        point_cloud = tf.expand_dims(point_cloud, 0)

    point_cloud_central = point_cloud

    point_cloud_shape = point_cloud.get_shape()
    batch_size = point_cloud_shape[0].value
    num_points = point_cloud_shape[1].value
    num_dims = point_cloud_shape[2].value

    idx_ = tf.range(batch_size) * num_points
    idx_ = tf.reshape(idx_, [batch_size, 1, 1])

    point_cloud_flat = tf.reshape(point_cloud, [-1, num_dims])
    point_cloud_neighbors = tf.gather(point_cloud_flat, nn_idx + idx_)
    point_cloud_central = tf.expand_dims(point_cloud_central, axis=-2)

    point_cloud_central = tf.tile(point_cloud_central, [1, 1, k, 1])

    edge_feature = tf.concat([
        point_cloud_central, point_cloud_neighbors - point_cloud_central,
        tf.cross(point_cloud_central,
                 point_cloud_neighbors - point_cloud_central)
    ],
                             axis=-1)
    return edge_feature
Esempio n. 9
0
    return sampling_module.farthest_point_sample(inp, npoint)


ops.NoGradient('FarthestPointSample')

if __name__ == '__main__':
    import numpy as np
    np.random.seed(100)
    triangles = np.random.rand(1, 5, 3, 3).astype('float32')
    with tf.device('/gpu:1'):
        inp = tf.constant(triangles)
        tria = inp[:, :, 0, :]
        trib = inp[:, :, 1, :]
        tric = inp[:, :, 2, :]
        areas = tf.sqrt(
            tf.reduce_sum(tf.cross(trib - tria, tric - tria)**2, 2) + 1e-9)
        randomnumbers = tf.random_uniform((1, 8192))
        triids = prob_sample(areas, randomnumbers)
        tria_sample = gather_point(tria, triids)
        trib_sample = gather_point(trib, triids)
        tric_sample = gather_point(tric, triids)
        us = tf.random_uniform((1, 8192))
        vs = tf.random_uniform((1, 8192))
        uplusv = 1 - tf.abs(us + vs - 1)
        uminusv = us - vs
        us = (uplusv + uminusv) * 0.5
        vs = (uplusv - uminusv) * 0.5
        pt_sample = tria_sample + (trib_sample - tria_sample) * tf.expand_dims(
            us, -1) + (tric_sample - tria_sample) * tf.expand_dims(vs, -1)
        print('pt_sample: ', pt_sample)
        reduced_sample = gather_point(pt_sample,