def plot_stable_pose(mesh, stable_pose, T_table_world=stf.SimilarityTransform3D(from_frame='world', to_frame='table'), d=0.5, style='wireframe', color=(0.5,0.5,0.5)):
     T_mesh_stp = stf.SimilarityTransform3D(pose=tfx.pose(stable_pose.r))
     mesh_tf = mesh.transform(T_mesh_stp)
     mn, mx = mesh_tf.bounding_box()
     z = mn[2]
     x0 = np.array([0,0,-z])
     T_table_obj = stf.SimilarityTransform3D(pose=tfx.pose(stable_pose.r, x0),
                                              from_frame='obj', to_frame='table')
     T_world_obj = T_table_world.inverse().dot(T_table_obj)
     MayaviVisualizer.plot_mesh(mesh, T_world_obj.inverse(), style=style, color=color)
     MayaviVisualizer.plot_table(T_table_world, d=d)
     return T_world_obj.inverse()
    def __init__(self,
                 sdf,
                 mesh=None,
                 features=None,
                 tf=stf.SimilarityTransform3D(tfx.identity_tf(), 1.0),
                 key='',
                 model_name='',
                 category='',
                 mass=1.0):
        if not isinstance(tf, stf.SimilarityTransform3D):
            raise ValueError(
                'Must initialize graspable objects with 3D similarity transform'
            )
        self.sdf_ = sdf
        self.mesh_ = mesh
        self.tf_ = tf

        self.key_ = key
        self.model_name_ = model_name  # for OpenRave usage, gross!
        self.category_ = category
        self.mass_ = mass

        self.features_ = features  # shot features

        # make consistent poses, scales
        self.sdf_.tf = self.tf_
        if self.mesh_ is not None:
            self.mesh_.tf = self.tf_
 def plot_gripper(grasp, T_obj_world=stf.SimilarityTransform3D(from_frame='world', to_frame='obj'), gripper=None, color=(0.5,0.5,0.5)):
     if gripper is None:
         gripper = FANUC_GRIPPER
     T_gripper_obj = grasp.gripper_pose(gripper).inverse()
     T_mesh_obj = gripper.T_mesh_gripper.dot(T_gripper_obj)
     T_mesh_world = T_mesh_obj.dot(T_obj_world)
     MayaviVisualizer.plot_mesh(gripper.mesh, T_mesh_world, style='surface', color=color)
Esempio n. 4
0
    def __init__(self, grasp_center, grasp_axis, grasp_width, jaw_width = 0, grasp_angle = 0,
                 tf = stf.SimilarityTransform3D(tfx.identity_tf(from_frame = 'world'), 1.0)):
        """
        Create a point grasp for parallel jaws with given center and width (relative to object)
        Params: (Note: all in meters!)
            grasp_center - numpy 3-array for center of grasp
            grasp_axis - numpy 3-array for grasp direction (should be normalized)
            grasp_width - how wide the jaws open in meters
            jaw_width - the width of the jaws tangent to the axis (0 means classical point grasp)
            grasp_angle - the angle of approach for parallel-jaw grippers (the 6th DOF for point grasps)
                wrt the orthogonal dir to the axis in the XY plane
            tf - the similarity tf of the grasp wrt the world
        """
        if not isinstance(grasp_center, np.ndarray) or grasp_center.shape[0] != 3:
            raise ValueError('Center must be numpy ndarray of size 3')
        if not isinstance(grasp_axis, np.ndarray)  or grasp_axis.shape[0] != 3:
            raise ValueError('Axis must be numpy ndarray of size 3')
        if jaw_width != 0:
            raise ValueError('Nonzero jaw width not yet supported')

        self.center_ = grasp_center
        self.axis_ = grasp_axis / np.linalg.norm(grasp_axis)
        self.grasp_width_ = grasp_width
        self.jaw_width_ = jaw_width
        self.approach_angle_ = grasp_angle
        self.tf_ = tf

        self.quality_ = None
 def __init__(self,
              sdf,
              tf=stf.SimilarityTransform3D(tfx.identity_tf(), 1.0)):
     """ 2D objects are initialized with sdfs only"""
     if not isinstance(sdf, s.Sdf2D):
         raise ValueError('Must initialize graspable object 2D with 2D sdf')
     GraspableObject.__init__(self, sdf, tf=tf)
    def __init__(self,
                 sdf,
                 mesh=None,
                 features=None,
                 tf=stf.SimilarityTransform3D(tfx.identity_tf(), 1.0),
                 key='',
                 category='',
                 model_name='',
                 mass=1.0):
        """ 2D objects are initialized with sdfs only"""
        if not isinstance(sdf, s.Sdf3D):
            raise ValueError('Must initialize graspable object 3D with 3D sdf')
        if mesh is not None and not (isinstance(mesh, m.Mesh3D)
                                     or isinstance(mesh, mx.Mesh)):
            raise ValueError('Must initialize graspable object 3D with 3D sdf')

        self.center_of_mass_ = sdf.center_world()  # use SDF bb center for now
        GraspableObject.__init__(self,
                                 sdf,
                                 mesh=mesh,
                                 features=features,
                                 tf=tf,
                                 key=key,
                                 category=category,
                                 model_name=model_name,
                                 mass=mass)
Esempio n. 7
0
File: sdf.py Progetto: puneetp/GPIS
    def __init__(self,
                 sdf_data,
                 origin,
                 resolution,
                 tf=stf.SimilarityTransform3D(tfx.identity_tf(), scale=1.0),
                 frame=None,
                 use_abs=True):
        self.data_ = sdf_data
        self.origin_ = origin
        self.resolution_ = resolution
        self.dims_ = self.data_.shape

        # set up surface params
        self.surface_thresh_ = self.resolution_ * np.sqrt(
            2
        ) / 2  # resolution is max dist from surface when surf is orthogonal to diagonal grid cells
        self.center_ = np.array([(self.dims_[0] - 1) / 2,
                                 (self.dims_[1] - 1) / 2,
                                 (self.dims_[2] - 1) / 2])
        self.points_buf_ = np.zeros([Sdf3D.num_interpolants, 3], dtype=np.int)
        self.coords_buf_ = np.zeros([
            3,
        ])

        # set up tf
        self.tf_ = tf

        # tranform sdf basis to grid (X and Z axes are flipped!)
        R_sdf_mesh = np.eye(3)
        self.tf_grid_sdf_ = stf.SimilarityTransform3D(
            tfx.canonical.CanonicalTransform(R_sdf_mesh,
                                             -R_sdf_mesh.T.dot(self.center_)),
            1.0 / self.resolution_)
        self.tf_sdf_grid_ = self.tf_grid_sdf_.inverse()

        # optionally use only the absolute values (useful for non-closed meshes in 3D)
        if use_abs:
            self.data_ = np.abs(self.data_)

        self._compute_flat_indices()
        self._compute_gradients()

        self.feature_vector_ = None  #Kmeans feature representation
Esempio n. 8
0
def makegrasp(originalgrasp, bmat=None, center=None, direction=None):
    if bmat is not None:
        axis = bmat[:, 1][:-1]
        center = bmat[:, 3][:-1]
        return ParallelJawPtGrasp3D(center,
                                    axis,
                                    originalgrasp.grasp_width,
                                    originalgrasp.jaw_width,
                                    originalgrasp.approach_angle,
                                    tf=stf.SimilarityTransform3D(
                                        tfx.identity_tf(), 1.0))
    else:
        axis = direction[:, 1]
        return ParallelJawPtGrasp3D(center,
                                    axis,
                                    originalgrasp.grasp_width,
                                    originalgrasp.jaw_width,
                                    originalgrasp.approach_angle,
                                    tf=stf.SimilarityTransform3D(
                                        tfx.identity_tf(), 1.0))
def test_fanuc_gripper():
    mesh_filename = '/home/jmahler/jeff_working/GPIS/data/grippers/fanuc_lehf/gripper.obj'
    of = objf.ObjFile(mesh_filename)
    gripper_mesh = of.read()

    gripper_mesh.center_vertices_bb()
    oof = objf.ObjFile(mesh_filename)
    oof.write(gripper_mesh)
    
    T_mesh_world = stf.SimilarityTransform3D(pose=tfx.pose(np.eye(4)), from_frame='world', to_frame='mesh')
    R_grasp_gripper = np.array([[0, 0, 1],
                                [0, -1, 0],
                                [1, 0, 0]])
    R_mesh_gripper = np.array([[0, 1, 0],
                               [-1, 0, 0],
                               [0, 0, 1]])
    t_mesh_gripper = np.array([0.0, 0.0, 0.065])
    T_mesh_gripper = stf.SimilarityTransform3D(pose=tfx.pose(R_mesh_gripper, t_mesh_gripper),
                                               from_frame='gripper', to_frame='mesh')
    T_gripper_world = T_mesh_gripper.inverse().dot(T_mesh_world)
    T_grasp_gripper = stf.SimilarityTransform3D(pose=tfx.pose(R_grasp_gripper), from_frame='gripper', to_frame='grasp')
    T_grasp_world = T_grasp_gripper.dot(T_gripper_world)

    #T_mesh_gripper.save('/home/jmahler/jeff_working/GPIS/data/grippers/fanuc_lehf/T_mesh_gripper.stf')
    #T_grasp_gripper.save('/home/jmahler/jeff_working/GPIS/data/grippers/fanuc_lehf/T_grasp_gripper.stf')

    gripper_params = {}
    gripper_params['min_width'] = 0.015
    gripper_params['max_width'] = 0.048
    #f = open('/home/jmahler/jeff_working/GPIS/data/grippers/fanuc_lehf/params.json', 'w')
    #json.dump(gripper_params, f)

    MayaviVisualizer.plot_pose(T_mesh_world, alpha=0.05, tube_radius=0.0025, center_scale=0.005)
    #MayaviVisualizer.plot_pose(T_gripper_world, alpha=0.05, tube_radius=0.0025, center_scale=0.005)
    MayaviVisualizer.plot_pose(T_grasp_world, alpha=0.05, tube_radius=0.0025, center_scale=0.005)
    MayaviVisualizer.plot_mesh(gripper_mesh, T_mesh_world, style='surface', color=(1,1,1))
    mv.axes()
 def gripper_pose(self, gripper=None):
     """ Returns the pose from the object frame to the gripper frame as a similarity transform 3D object """
     R_grip_p_grip = self.rotated_full_axis
     R_grip_robot = np.eye(3)
     if gripper is not None:
         R_grip_robot = gripper.T_grasp_gripper.rotation
     R = R_grip_p_grip.dot(R_grip_robot)
     t = self.center
     if USE_ALAN:
         return RigidTransform(R, t, from_frame='gripper', to_frame='obj')
     else:
         return stf.SimilarityTransform3D(tfx.pose(R, t),
                                          from_frame='gripper',
                                          to_frame='obj')
Esempio n. 11
0
    def write_mesh_stable_poses(self, mesh, filename, min_prob=0, vis=False):
        prob_mapping, cv_hull = st.compute_stable_poses(mesh), mesh.convex_hull()

        R_list = []
        for face, p in prob_mapping.items():
            if p >= min_prob:
                x0 = np.array(cv_hull.vertices()[face[0]])
                R_list.append([p, st.compute_basis([cv_hull.vertices()[i] for i in face], cv_hull), x0])

        if vis:
            print 'P', R_list[0][0]
            mv.figure()
            mesh.visualize()
            mv.axes()

            mv.figure()
            cv_hull_tf = cv_hull.transform(stf.SimilarityTransform3D(tfx.transform(R_list[0][1], np.zeros(3))))
            cv_hull_tf.visualize()
            mv.axes()
            mv.show()

        f = open(filename[:-4] + ".stp", "w")
        f.write("#############################################################\n")
        f.write("# STP file generated by UC Berkeley Automation Sciences Lab #\n")
        f.write("#                                                           #\n")
        f.write("# Num Poses: %d" %len(R_list))
        for _ in range(46 - len(str(len(R_list)))):
            f.write(" ")
        f.write(" #\n")
        f.write("# Min Probability: %s" %str(min_prob))
        for _ in range(40 - len(str(min_prob))):
            f.write(" ")
        f.write(" #\n")
        f.write("#                                                           #\n")
        f.write("#############################################################\n")
        f.write("\n")

        # adding R matrices to .stp file
        pose_index = 1
        for i in range(len(R_list)):
            f.write("p %f\n" %R_list[i][0])
            f.write("r %f %f %f\n" %(R_list[i][1][0][0], R_list[i][1][0][1], R_list[i][1][0][2]))
            f.write("  %f %f %f\n" %(R_list[i][1][1][0], R_list[i][1][1][1], R_list[i][1][1][2]))
            f.write("  %f %f %f\n" %(R_list[i][1][2][0], R_list[i][1][2][1], R_list[i][1][2][2]))
            f.write("x0 %f %f %f\n" %(R_list[i][2][0], R_list[i][2][1], R_list[i][2][2]))
        f.write("\n\n")
Esempio n. 12
0
def grasp_model_figure():
    np.random.seed(100)

    h = plt.figure()
    ax = h.add_subplot(111, projection='3d')

    sdf_3d_file_name = '/mnt/terastation/shape_data/MASTER_DB_v2/Cat50_ModelDatabase/5c7bf45b0f847489181be2d6e974dccd.sdf'
    sf = sdf_file.SdfFile(sdf_3d_file_name)
    sdf_3d = sf.read()

    mesh_name = '/mnt/terastation/shape_data/MASTER_DB_v2/Cat50_ModelDatabase/5c7bf45b0f847489181be2d6e974dccd.obj'
    of = obj_file.ObjFile(mesh_name)
    m = of.read()

    clean_mesh_name = 'data/test/meshes/flashlight.obj'
    mc = mesh_cleaner.MeshCleaner(m)
    mc.rescale_vertices(0.07)
    oof = obj_file.ObjFile(clean_mesh_name)
    oof.write(mc.mesh())

    graspable = graspable_object.GraspableObject3D(
        sdf_3d,
        mesh=m,
        model_name=clean_mesh_name,
        tf=stf.SimilarityTransform3D(tfx.identity_tf(), 0.75))

    rave.raveSetDebugLevel(rave.DebugLevel.Error)
    grasp_checker = OpenRaveGraspChecker()

    center = np.array([0, 0.01, 0])
    axis = np.array([1, 0, 0])
    axis = axis / np.linalg.norm(axis)
    width = 0.1
    grasp = g.ParallelJawPtGrasp3D(center, axis, width)

    rotated_grasps = grasp.transform(graspable.tf, 2.0 * np.pi / 20.0)
    print len(rotated_grasps)
    grasp_checker.prune_grasps_in_collision(graspable,
                                            rotated_grasps,
                                            auto_step=False,
                                            close_fingers=True,
                                            delay=1)
Esempio n. 13
0
    def sdf_3d(data):
        """ Converts HDF5 data provided in dictionary |data| to an SDF object """
        sdf_data = np.array(data[SDF_DATA_KEY])
        origin = np.array(data.attrs[SDF_ORIGIN_KEY])
        resolution = data.attrs[SDF_RES_KEY]

        # should never be used, really
        pose = tfx.identity_tf()
        scale = 1.0
        if SDF_POSE_KEY in data.attrs.keys():
            pose = data.attrs[SDF_POSE_KEY]
        if SDF_SCALE_KEY in data.attrs.keys():
            scale = data.attrs[SDF_SCALE_KEY]
        tf = stf.SimilarityTransform3D(pose, scale)

        frame = None
        if SDF_FRAME_KEY in data.attrs.keys():
            frame = data.attrs[SDF_FRAME_KEY]

        return sdf.Sdf3D(sdf_data, origin, resolution, tf, frame)
Esempio n. 14
0
File: pfc.py Progetto: puneetp/GPIS
    def sample(self, size=1):
        """ Sample |size| random variables from the model """
        samples = []
        for i in range(size):
            # sample random pose
            xi = self.r_xi_rv_.rvs(size=1)
            S_xi = skew(xi)
            R = scipy.linalg.expm(S_xi).dot(self.obj_.tf.rotation)
            s = self.s_rv_.rvs(size=1)[0]
            t = self.t_rv_.rvs(size=1)
            sample_tf = stf.SimilarityTransform3D(tfx.transform(R.T, t), s)

            # transform object by pose
            obj_sample = self.obj_.transform(sample_tf)
            samples.append(obj_sample)

        # not a list if only 1 sample
        if size == 1:
            return samples[0]
        return samples
Esempio n. 15
0
def test_3d_transform():
    np.random.seed(100)
    sdf_3d_file_name = 'data/test/sdf/Co_clean_dim_25.sdf'
    s = sf.SdfFile(sdf_3d_file_name)
    sdf_3d = s.read()

    # transform
    pose_mat = np.matrix([[0, 1, 0, 0], [-1, 0, 0, 0], [0, 0, 1, 0],
                          [0, 0, 0, 1]])
    tf = tfx.transform(pose_mat, from_frame='world')
    tf = tfx.random_tf()
    tf.position = 0.01 * np.random.rand(3)

    start_t = time.clock()
    s_tf = stf.SimilarityTransform3D(tf, scale=1.2)
    sdf_tf = sdf_3d.transform(s_tf)
    end_t = time.clock()
    duration = end_t - start_t
    logging.info('3D Transform took %f sec' % (duration))
    logging.info('Transformed resolution %f' % (sdf_tf.resolution))

    start_t = time.clock()
    sdf_tf_d = sdf_3d.transform(s_tf, detailed=True)
    end_t = time.clock()
    duration = end_t - start_t
    logging.info('Detailed 3D Transform took %f sec' % (duration))
    logging.info('Transformed detailed resolution %f' % (sdf_tf_d.resolution))

    # display
    plt.figure()
    sdf_3d.scatter()
    plt.title('Original')

    plt.figure()
    sdf_tf.scatter()
    plt.title('Transformed')

    plt.figure()
    sdf_tf_d.scatter()
    plt.title('Detailed Transformed')
    plt.show()
    def plot_grasp(grasp, T_obj_world=stf.SimilarityTransform3D(from_frame='world', to_frame='obj'),
                   plot_approach=False, alpha=0.5, tube_radius=0.002, endpoint_color=(0,1,0),
                   endpoint_scale=0.004, grasp_axis_color=(0,1,0), palm_axis_color=(0,0,1),
                   stp=None):
        g1, g2 = grasp.endpoints()
        center = grasp.center
        g1_tf = T_obj_world.inverse().apply(g1)
        g2_tf = T_obj_world.inverse().apply(g2)
        center_tf = T_obj_world.inverse().apply(center)
        grasp_axis_tf = np.array([g1_tf, g2_tf])

        T_gripper_obj = grasp.gripper_pose(gripper=ZEKE_GRIPPER).inverse()
        palm_axis = T_gripper_obj.inverse().rotation[:,1]

        axis_tf = np.array([g1_tf, g2_tf])
        palm_axis_tf = T_obj_world.inverse().apply(palm_axis, direction=True)
        palm_axis_tf = np.array([center_tf, center_tf + alpha * palm_axis_tf])

        mv.points3d(g1_tf[0], g1_tf[1], g1_tf[2], color=endpoint_color, scale_factor=endpoint_scale)
        mv.points3d(g2_tf[0], g2_tf[1], g2_tf[2], color=endpoint_color, scale_factor=endpoint_scale)

        mv.plot3d(grasp_axis_tf[:,0], grasp_axis_tf[:,1], grasp_axis_tf[:,2], color=grasp_axis_color, tube_radius=tube_radius)
        if plot_approach:
            mv.plot3d(palm_axis_tf[:,0], palm_axis_tf[:,1], palm_axis_tf[:,2], color=palm_axis_color, tube_radius=tube_radius)
Esempio n. 17
0
        stable_pose.r[1, :] = -stable_pose.r[1, :]

    R_stp_obj = stable_pose.r
    n = stable_pose.r[2, :]

    # read in mesh
    obj_filename = dataset.obj_mesh_filename(object_key)
    of = objf.ObjFile(obj_filename)
    mesh = of.read()
    gripper = gr.RobotGripper.load(config['gripper'])

    # form transformation matrices
    R_stp_cb = np.array([[0, -1, 0], [1, 0, 0], [0, 0, 1]])
    t_stp_cb = np.array([delta_x, delta_y, delta_z])
    T_stp_cb = stf.SimilarityTransform3D(pose=tfx.pose(R_stp_cb, t_stp_cb),
                                         from_frame='cb',
                                         to_frame='stp')
    T_obj_stp = stf.SimilarityTransform3D(pose=tfx.pose(
        R_stp_obj.T, np.zeros(3)),
                                          from_frame='stp',
                                          to_frame='obj')
    T_obj_cb = T_obj_stp.dot(T_stp_cb)
    T_cb_obj = T_obj_cb.inverse()

    R_cb_world = np.eye(3)
    t_cb_world = np.zeros(3)
    T_cb_world = stf.SimilarityTransform3D(pose=tfx.pose(
        R_cb_world, t_cb_world),
                                           to_frame='cb')

    # transformations from FANUC to Dex-Net conventions
 def plot_mesh(mesh, T_mesh_world=stf.SimilarityTransform3D(from_frame='world', to_frame='mesh'),
               style='wireframe', color=(0.5,0.5,0.5), opacity=1.0):
     mesh_tf = mesh.transform(T_mesh_world.inverse())
     mesh_tf.visualize(style=style, color=color, opacity=opacity)
def test_yumi_gripper():
    # build new mesh
    finger_filename = '/home/jmahler/jeff_working/GPIS/data/grippers/yumi/finger.obj'
    base_filename = '/home/jmahler/jeff_working/GPIS/data/grippers/yumi/base.obj'
    of = objf.ObjFile(finger_filename)
    finger1_mesh = of.read()
    finger2_mesh = copy.copy(finger1_mesh)
    of = objf.ObjFile(base_filename)
    base_mesh = of.read()

    R_finger1_world = np.eye(3)
    t_finger1_world = np.array([-0.025, -0.005, 0.082])
    T_finger1_world = stf.SimilarityTransform3D(pose=tfx.pose(R_finger1_world,
                                                              t_finger1_world),
                                                from_frame='mesh', to_frame='world')
    R_finger2_world = np.array([[-1,0,0],
                                [0,-1,0],
                                [0,0,1]])
    t_finger2_world = np.array([0.025, 0.005, 0.082])
    T_finger2_world = stf.SimilarityTransform3D(pose=tfx.pose(R_finger2_world,
                                                              t_finger2_world),
                                                from_frame='mesh', to_frame='world')
    T_mesh_world = stf.SimilarityTransform3D(pose=tfx.pose(np.eye(4)), from_frame='world', to_frame='mesh')

    finger1_mesh = finger1_mesh.transform(T_finger1_world)
    finger2_mesh = finger2_mesh.transform(T_finger2_world)

    offset = 0
    vertices = []
    triangles = []
    meshes = [base_mesh, finger1_mesh, finger2_mesh]
    for i, mesh in enumerate(meshes):
        vertices.extend(mesh.vertices())
        offset_tris = [[t[0]+offset,t[1]+offset,t[2]+offset] for t in mesh.triangles()]
        triangles.extend(offset_tris)
        offset += len(mesh.vertices())
    gripper_mesh = m.Mesh3D(vertices, triangles)
    gripper_mesh.center_vertices_bb()

    of = objf.ObjFile('/home/jmahler/jeff_working/GPIS/data/grippers/yumi/gripper.obj')
    #of.write(gripper_mesh)

    # frames of reference
    R_grasp_gripper = np.array([[0, 0, -1],
                                [0, 1, 0],
                                [1, 0, 0]])
    R_mesh_gripper = np.array([[1, 0, 0],
                               [0, 1, 0],
                               [0, 0, 1]])
    t_mesh_gripper = np.array([0.0, -0.002, 0.058])
    T_mesh_gripper = stf.SimilarityTransform3D(pose=tfx.pose(R_mesh_gripper, t_mesh_gripper),
                                               from_frame='gripper', to_frame='mesh')
    T_gripper_world = T_mesh_gripper.inverse().dot(T_mesh_world)
    T_grasp_gripper = stf.SimilarityTransform3D(pose=tfx.pose(R_grasp_gripper), from_frame='gripper', to_frame='grasp')

    #T_mesh_gripper.save('/home/jmahler/jeff_working/GPIS/data/grippers/yumi/T_mesh_gripper.stf')
    #T_grasp_gripper.save('/home/jmahler/jeff_working/GPIS/data/grippers/yumi/T_grasp_gripper.stf')

    gripper_params = {}
    gripper_params['min_width'] = 0.0
    gripper_params['max_width'] = 0.05
    #f = open('/home/jmahler/jeff_working/GPIS/data/grippers/yumi/params.json', 'w')
    #json.dump(gripper_params, f)    

    MayaviVisualizer.plot_mesh(gripper_mesh, T_mesh_world, style='surface', color=(0,0,1))
    MayaviVisualizer.plot_pose(T_gripper_world, alpha=0.05, tube_radius=0.0025, center_scale=0.005)
    mv.axes()
    def plot_patch(window, T_patch_obj, window_dim_obj=(1.0,1.0), T_obj_world=stf.SimilarityTransform3D(from_frame='world', to_frame='obj'),
                   patch_color=(1,1,1), contact_color=(1,1,0), contact_scale=0.005,
                   delta_z=0.001, edge_len_thresh=3.0,
                   dist_thresh=0.05, grad_thresh=0.025):
        """ Plot a patch defined by a window and a contact """
        # extract dimensions
        proj_window = window.proj_win_2d
        grad_x_window = window.grad_x_2d
        grad_y_window = window.grad_y_2d
        win_height, win_width = proj_window.shape
        res_x = window_dim_obj[1] / win_width
        res_y = window_dim_obj[0] / win_height
        offset = 0

        t_patch_render = np.array([0, 0, -delta_z])
        T_patch_render = stf.SimilarityTransform3D(pose=tfx.pose(np.eye(3), t_patch_render),
                                                   from_frame='render',
                                                   to_frame='contact')

        # x,y lists for triangulation
        x_list = []
        y_list = []

        # convert patch into 3d points
        points_3d = np.zeros([0,3])    
        for y in range(win_height):
            for x in range(win_width):
                # convert to 3d points
                x_3d = (x -  win_width/2) * res_x
                y_3d = (y - win_height/2) * res_y

                # add the point if the distance and gradients are appropriate
                if np.abs(proj_window[y, x]-offset) < dist_thresh and \
                        np.abs(grad_x_window[y, x]) < grad_thresh and \
                        np.abs(grad_y_window[y, x]) < grad_thresh:
                    x_list.append(x)
                    y_list.append(y)
                    points_3d = np.r_[points_3d,
                                      np.array([[x_3d, y_3d, proj_window[y, x] - offset]])]
            
        # abort if too few points
        if len(x_list) <= 3 and len(y_list) <= 3:
            logging.warning('Too few points for triangulation')

        # triangulate and prune large tris
        tri = mtri.Triangulation(x_list, y_list)
        points_2d = np.array([x_list, y_list]).T
        triangles = []
        for t in tri.triangles.tolist():
            v = points_2d[t,:]
            largest_dist = np.max(ssd.pdist(v))
            if largest_dist < edge_len_thresh:
                triangles.append(t)

        # transform into world reference frame
        points_3d_obj = T_patch_obj.inverse().dot(T_patch_render).apply(points_3d.T)
        points_3d_world = T_obj_world.inverse().apply(points_3d_obj).T
        contact_world = T_obj_world.inverse().dot(T_patch_obj.inverse()).dot(T_patch_render).apply(np.zeros(3))

        # plot
        mesh_background = mlab.triangular_mesh(points_3d_world[:,0], points_3d_world[:,1], points_3d_world[:,2], triangles,
                                               representation='surface', color=patch_color)
        mesh_tris = mlab.triangular_mesh(points_3d_world[:,0], points_3d_world[:,1], points_3d_world[:,2], triangles,
                                         representation='wireframe', color=(0,0,0))
        points = mlab.points3d(contact_world[0], contact_world[1], contact_world[2],
                               color=contact_color, scale_factor=contact_scale)
      
Esempio n. 21
0
 def perform_stf(self, meshtotransform):
     self.source_mesh = meshtotransform
     tftransform = tfx.canonical.pose(self.transform(self.source_points))
     similaritytf = stf.SimilarityTransform3D(tftransform)
     self.source_mesh = self.source_mesh.transform(similaritytf)
Esempio n. 22
0
            if os.path.exists(grasp_ids_filename):
                logging.warning(
                    'Candidate grasps already exist for object %s in stable pose %s for gripper %s. Skipping...'
                    % (object_key, stp_id, gripper.name))
                continue

            # generate candidate grasps
            logging.info(
                'Computing candidate grasps for object {}'.format(object_key))
            candidate_grasp_set, candidate_grasp_ids, candidate_grasp_metrics = \
                generate_candidate_grasps(object_key, dataset, stable_pose,
                                          config['num_grasp_candidates'],
                                          gripper, config)

            # visualize grasp candidates
            T_obj_stp = stf.SimilarityTransform3D(pose=tfx.pose(stable_pose.r))
            T_table_world = stf.SimilarityTransform3D(from_frame='world',
                                                      to_frame='table')
            object_mesh = obj.mesh
            object_mesh_tf = object_mesh.transform(T_obj_stp)
            for i, grasp in enumerate(candidate_grasp_set):
                logging.info('')
                logging.info('Grasp %d (%d of %d)' %
                             (grasp.grasp_id, i, len(candidate_grasp_set)))

                metrics = config['candidate_grasp_metrics']
                for metric in metrics:
                    if metric == 'efc':
                        metric = db.generate_metric_tag('efcny_L1', config)
                    elif metric == 'pfc':
                        metric = db.generate_metric_tag('pfc', config)