Exemplo n.º 1
0
    def test_grasp_quality_functions(self):
        num_grasps = NUM_TEST_CASES
        of = ObjFile(OBJ_FILENAME)
        sf = SdfFile(SDF_FILENAME)
        mesh = of.read()
        sdf = sf.read()
        obj = GraspableObject3D(sdf, mesh)

        gripper = RobotGripper.load(GRIPPER_NAME)

        ags = UniformGraspSampler(gripper, CONFIG)
        grasps = ags.generate_grasps(obj, target_num_grasps=num_grasps)

        # test with grasp quality function
        quality_config = GraspQualityConfigFactory.create_config(
            CONFIG['metrics']['force_closure'])
        quality_fn = GraspQualityFunctionFactory.create_quality_function(
            obj, quality_config)

        for i, grasp in enumerate(grasps):
            success, c = grasp.close_fingers(obj)
            if success:
                c1, c2 = c
                fn_fc = quality_fn(grasp).quality
                true_fc = PointGraspMetrics3D.force_closure(
                    c1, c2, quality_config.friction_coef)
                self.assertEqual(fn_fc, true_fc)
Exemplo n.º 2
0
    def test_contacts(self):
        num_samples = 128
        mu = 0.5
        num_faces = 8
        num_grasps = NUM_TEST_CASES
        of = ObjFile(OBJ_FILENAME)
        sf = SdfFile(SDF_FILENAME)
        mesh = of.read()
        sdf = sf.read()
        obj = GraspableObject3D(sdf, mesh)

        gripper = RobotGripper.load(GRIPPER_NAME)

        ags = UniformGraspSampler(gripper, CONFIG)
        grasps = ags.generate_grasps(obj, target_num_grasps=num_grasps)

        for grasp in grasps:
            success, c = grasp.close_fingers(obj)
            if success:
                c1, c2 = c

                # test friction cones
                fc1_exists, fc1, n1 = c1.friction_cone(num_cone_faces=num_faces,
                                                       friction_coef=mu)
                if fc1_exists:
                    self.assertEqual(fc1.shape[1], num_faces)
                    alpha = np.tile(fc1.T.dot(c1.normal), [3,1]).T
                    w = np.tile(c1.normal.reshape(3,1), [1,8]).T
                    tan_vecs = fc1.T - alpha * w
                    self.assertTrue(np.allclose(np.linalg.norm(tan_vecs, axis=1), mu))


                fc2_exists, fc2, n2 = c2.friction_cone(num_cone_faces=num_faces,
                                                       friction_coef=mu)
                if fc2_exists:
                    self.assertEqual(fc2.shape[1], num_faces)
                    alpha = np.tile(fc2.T.dot(c2.normal), [3,1]).T
                    w = np.tile(c2.normal.reshape(3,1), [1,8]).T
                    tan_vecs = fc2.T - alpha * w
                    self.assertTrue(np.allclose(np.linalg.norm(tan_vecs, axis=1), mu))

                # test reference frames
                T_contact1_obj = c1.reference_frame(align_axes=True)
                self.assertTrue(np.allclose(T_contact1_obj.z_axis, c1.in_direction))
                self.assertTrue(np.allclose(T_contact1_obj.translation, c1.point))
                for i in range(num_samples):
                    theta = 2 * i * np.pi / num_samples
                    v = np.cos(theta) * T_contact1_obj.x_axis + np.sin(theta) * T_contact1_obj.y_axis
                    self.assertLessEqual(v[0], T_contact1_obj.x_axis[0])
                    

                T_contact2_obj = c2.reference_frame(align_axes=True)
                self.assertTrue(np.allclose(T_contact2_obj.z_axis, c2.in_direction))
                self.assertTrue(np.allclose(T_contact2_obj.translation, c2.point))
                for i in range(num_samples):
                    theta = 2 * i * np.pi / num_samples
                    v = np.cos(theta) * T_contact2_obj.x_axis + np.sin(theta) * T_contact2_obj.y_axis
                    self.assertLessEqual(v[0], T_contact2_obj.x_axis[0])
Exemplo n.º 3
0
def antipodal_grasp_sampler_for_storing(obj_graspable):
    gripper = RobotGripper.load(
        GRIPPER_NAME, gripper_dir='/home/silvia/dex-net/data/grippers')

    ags = AntipodalGraspSampler(gripper, CONFIG)

    grasps = ags.generate_grasps(obj_graspable,
                                 target_num_grasps=100,
                                 max_iter=4)

    return grasps
Exemplo n.º 4
0
    def database_delete_grasps(self, object_name):
        # load gripper
        gripper = RobotGripper.load(GRIPPER_NAME, gripper_dir='/home/silvia/dex-net/data/grippers')

        # open Dex-Net API
        dexnet_handle = DexNet()
        dexnet_handle.open_database(self.database_path)
        dexnet_handle.open_dataset(self.dataset_name)

        dexnet_handle.dataset.delete_grasps(object_name, gripper=gripper.name)

        dexnet_handle.close_database()
Exemplo n.º 5
0
    def get_object_mesh(self, object_name):
        # load gripper
        gripper = RobotGripper.load(
            GRIPPER_NAME, gripper_dir='/home/silvia/dex-net/data/grippers')

        # open Dex-Net API
        dexnet_handle = DexNet()
        dexnet_handle.open_database(self.database_path)
        dexnet_handle.open_dataset(self.dataset_name)

        object_mesh = dexnet_handle.dataset.mesh(object_name)

        return object_mesh
Exemplo n.º 6
0
    def get_stable_pose_transform(self, object_name, stable_pose_id):
        # load gripper
        gripper = RobotGripper.load(
            GRIPPER_NAME, gripper_dir='/home/silvia/dex-net/data/grippers')

        # open Dex-Net API
        dexnet_handle = DexNet()
        dexnet_handle.open_database(self.database_path)
        dexnet_handle.open_dataset(self.dataset_name)

        stable_pose = dexnet_handle.dataset.stable_pose(
            object_name, stable_pose_id=('pose_' + str(stable_pose_id)))

        dexnet_handle.close_database()

        return stable_pose
Exemplo n.º 7
0
    def database_save(self, object_name, filepath, stable_poses_n, force_overwrite=False):
        # load gripper
        gripper = RobotGripper.load(GRIPPER_NAME, gripper_dir='/home/silvia/dex-net/data/grippers')

        # open Dex-Net API
        dexnet_handle = DexNet()
        dexnet_handle.open_database(self.database_path)
        dexnet_handle.open_dataset(self.dataset_name)

        mass = CONFIG['default_mass']

        if object_name in dexnet_handle.dataset.object_keys:
            graspable = dexnet_handle.dataset[object_name]
            mesh = graspable.mesh
            sdf = graspable.sdf
            stable_poses = dexnet_handle.dataset.stable_poses(object_name)
        else:
            # Create temp dir if cache dir is not provided
            mp_cache = CONFIG['cache_dir']
            del_cache = False
            if mp_cache is None:
                mp_cache = tempfile.mkdtemp()
                del_cache = True
            
            # open mesh preprocessor
            mesh_processor = MeshProcessor(filepath, mp_cache)
            mesh_processor.generate_graspable(CONFIG)
            mesh = mesh_processor.mesh
            sdf = mesh_processor.sdf
            stable_poses = mesh_processor.stable_poses[:stable_poses_n]

            # write graspable to database
            dexnet_handle.dataset.create_graspable(object_name, mesh, sdf, stable_poses, mass=mass)

        # write grasps to database
        grasps, metrics = antipodal_grasp_sampler_for_storing(mesh, sdf, stable_poses)
        id = 0
        for grasps_for_pose, metrics_for_pose in zip(grasps, metrics):
            if (grasps_for_pose == None):
                continue 
            dexnet_handle.dataset.store_grasps_and_metrics(object_name, grasps_for_pose, metrics_for_pose, 
                                                            gripper=gripper.name, stable_pose_id=('pose_'+str(id)), 
                                                            force_overwrite=force_overwrite)
            id = id + 1

        dexnet_handle.close_database()
Exemplo n.º 8
0
    def get_stable_pose(self, object_name, stable_pose_id):
        # load gripper
        gripper = RobotGripper.load(GRIPPER_NAME, gripper_dir='/home/silvia/dex-net/data/grippers')

        # open Dex-Net API
        dexnet_handle = DexNet()
        dexnet_handle.open_database(self.database_path)
        dexnet_handle.open_dataset(self.dataset_name)

        # read the most robust grasp
        stable_pose = dexnet_handle.dataset.stable_pose(object_name, stable_pose_id=('pose_'+str(stable_pose_id)))

        dexnet_handle.close_database()

        pose_matrix = np.eye(4,4)
        pose_matrix[:3,:3] = stable_pose.T_obj_world.rotation
        pose_matrix[:3, 3] = stable_pose.T_obj_world.translation
        return pose_matrix
Exemplo n.º 9
0
    def test_antipodal_grasp_sampler(self):
        of = ObjFile(OBJ_FILENAME)
        sf = SdfFile(SDF_FILENAME)
        mesh = of.read()
        sdf = sf.read()
        obj = GraspableObject3D(sdf, mesh)

        gripper = RobotGripper.load(GRIPPER_NAME)

        ags = AntipodalGraspSampler(gripper, CONFIG)
        grasps = ags.generate_grasps(obj, target_num_grasps=NUM_TEST_CASES)

        # test with raw force closure function
        for i, grasp in enumerate(grasps):
            success, c = grasp.close_fingers(obj)
            if success:
                c1, c2 = c
                self.assertTrue(PointGraspMetrics3D.force_closure(c1, c2, CONFIG['sampling_friction_coef']))
Exemplo n.º 10
0
    def antipodal_grasp_sampler(self):
        of = ObjFile(OBJ_FILENAME)
        sf = SdfFile(SDF_FILENAME)
        mesh = of.read()
        sdf = sf.read()
        obj = GraspableObject3D(sdf, mesh)

        gripper = RobotGripper.load(GRIPPER_NAME)

        ags = AntipodalGraspSampler(gripper, CONFIG)
        grasps = ags.generate_grasps(obj, target_num_grasps=10)

        quality_config = GraspQualityConfigFactory.create_config(
            CONFIG['metrics']['force_closure'])
        quality_fn = GraspQualityFunctionFactory.create_quality_function(
            obj, quality_config)

        q_to_c = lambda quality: CONFIG['quality_scale']

        i = 0
        vis.figure()
        vis.mesh(obj.mesh.trimesh, style='surface')
        for grasp in grasps:
            print(grasp.frame)
            success, c = grasp.close_fingers(obj)
            if success:
                c1, c2 = c
                fn_fc = quality_fn(grasp).quality
                true_fc = PointGraspMetrics3D.force_closure(
                    c1, c2, quality_config.friction_coef)
        #print(grasp)
            T_obj_world = RigidTransform(from_frame='obj', to_frame='world')
            color = plt.get_cmap('hsv')(q_to_c(CONFIG['metrics']))[:-1]
            T_obj_gripper = grasp.gripper_pose(gripper)
            #vis.grasp(grasp,grasp_axis_color=color,endpoint_color=color)
            i += 1
        #T_obj_world = RigidTransform(from_frame='obj',to_frame='world')
        vis.show(False)
Exemplo n.º 11
0
def antipodal_grasp_sampler_for_storing_stable_poses(mesh, sdf, stable_poses):
    mass = 1.0
    CONFIG['obj_rescaling_type'] = RescalingType.RELATIVE
    obj = GraspableObject3D(sdf, mesh)

    gripper = RobotGripper.load(
        GRIPPER_NAME, gripper_dir='/home/silvia/dex-net/data/grippers')

    ags = AntipodalGraspSampler(gripper, CONFIG)

    quality_config = GraspQualityConfigFactory.create_config(
        CONFIG['metrics']['force_closure'])
    quality_function = GraspQualityFunctionFactory.create_quality_function(
        obj, quality_config)

    max_poses = len(stable_poses)
    grasps = [None] * max_poses
    metrics = [None] * max_poses
    all_grasps = ags.generate_grasps(obj, target_num_grasps=200, max_iter=4)

    for id, stable_pose in enumerate(stable_poses):
        print('sampling for stable pose: ', id)
        if id == max_poses:
            break
        grasps_pose = filter(
            lambda x: grasp_is_parallel_to_table(x.axis, stable_pose.r),
            all_grasps)
        #grasps_pose = ags.generate_grasps(obj,target_num_grasps=20, max_iter=5, stable_pose=stable_pose.r)
        grasps[id] = []
        metrics[id] = []
        for grasp in grasps_pose:
            quality = quality_function.quality(grasp)
            #quality = PointGraspMetrics3D.grasp_quality(grasp, obj, quality_config)
            grasps[id].append(copy.deepcopy(grasp))
            metrics[id].append(copy.deepcopy(quality.quality))
    return grasps, metrics
Exemplo n.º 12
0
 def test_init_gripper(self):
     gripper = RobotGripper.load(GRIPPER_NAME)
Exemplo n.º 13
0
# def camera_intrinsics_callback(data):
#     """ Callback for Camera Intrinsics """
#     global ir_intrinsics
#     ir_intrinsics = per.CameraIntrinsics('primesense_overhead', data.K[0], data.K[4], data.K[2], data.K[5], data.K[1], data.height, data.width)

if __name__ == '__main__':

    # initialize the ROS node
    rospy.init_node('Yumi_Control_Node')

    rospy.loginfo('Initializing YuMi')
    config = YamlConfig(
        '/home/autolab/Workspace/vishal_working/catkin_ws/src/gqcnn/cfg/ros_nodes/yumi_control_node.yaml'
    )
    robot, subscriber, arm, home_pose = init_robot(config)

    rospy.loginfo('Loading Gripper')
    gripper = RobotGripper.load('yumi_metal_spline')

    rospy.loginfo('Loading T_camera_world')
    T_camera_world = RigidTransform.load(
        '/home/autolab/Public/alan/calib/primesense_overhead/primesense_overhead_to_world.tf'
    )

    # create a subscriber to get GQCNN Grasp
    rospy.loginfo('Subscribing to GQCNN Grasp Topic')
    rospy.Subscriber('/gqcnn_grasp', GQCNNGrasp, grasp_callback)

    rospy.spin()
Exemplo n.º 14
0
 def _set_gripper(self):
     return RobotGripper.load(self.config['gripper'])
Exemplo n.º 15
0
    T_camera_robot = RigidTransform(
        rotation=RigidTransform.random_rotation(),
        translation=RigidTransform.random_translation(),
        from_frame='camera',
        to_frame='robot')

    # fake transformation from a known 3D object model frame of reference to the camera frame of reference
    # in practice this could be computed using point cloud registration
    T_obj_camera = RigidTransform(
        rotation=RigidTransform.random_rotation(),
        translation=RigidTransform.random_translation(),
        from_frame='obj',
        to_frame='camera')

    # load gripper
    gripper = RobotGripper.load(gripper_name)

    # open Dex-Net API
    dexnet_handle = DexNet()
    dexnet_handle.open_database(database_path)
    dexnet_handle.open_dataset(dataset_name)

    # read the most robust grasp
    sorted_grasps = dexnet_handle.dataset.sorted_grasps(object_name,
                                                        metric_name,
                                                        gripper=gripper_name)
    most_robust_grasp = sorted_grasps[0][
        0]  # Note: in general this step will require collision checking

    # transform into the robot reference frame for control
    T_gripper_obj = most_robust_grasp.gripper_pose(gripper)
Exemplo n.º 16
0
def worker(i, sample_nums, grasp_amount, good_grasp):  #主要是抓取采样器以及打分    100  20
    """
    brief: 对制定的模型,利用随机采样算法,进行抓取姿态的检测和打分
    param [in]  i 处理第i个mesh模型
    param [in]  sample_nums 每个对象模型返回的目标抓取数量
    param [in]  grasp_amount
    """

    #截取目标对象名称
    object_name = file_list_all[i][len(home_dir) + 35:]
    print('a worker of task {} start'.format(object_name))

    #读取初始配置文件,读取并在内存中复制了一份
    yaml_config = YamlConfig(home_dir + "/code/dex-net/test/config.yaml")
    #设置夹名称
    gripper_name = 'panda'
    #根据设置的夹爪名称加载夹爪配置
    gripper = RobotGripper.load(gripper_name,
                                home_dir + "/code/dex-net/data/grippers")
    #设置抓取采样的方法
    grasp_sample_method = "antipodal"
    if grasp_sample_method == "uniform":
        ags = UniformGraspSampler(gripper, yaml_config)
    elif grasp_sample_method == "gaussian":
        ags = GaussianGraspSampler(gripper, yaml_config)
    elif grasp_sample_method == "antipodal":
        #使用对映点抓取,输入夹爪与配置文件
        ags = AntipodalGraspSampler(gripper, yaml_config)
    elif grasp_sample_method == "gpg":
        ags = GpgGraspSampler(gripper, yaml_config)
    elif grasp_sample_method == "point":
        ags = PointGraspSampler(gripper, yaml_config)
    else:
        raise NameError("Can't support this sampler")
    print("Log: do job", i)
    #设置obj模型文件与sdf文件路径
    if os.path.exists(str(file_list_all[i]) +
                      "/google_512k/nontextured.obj") and os.path.exists(
                          str(file_list_all[i]) +
                          "/google_512k/nontextured.sdf"):
        of = ObjFile(str(file_list_all[i]) + "/google_512k/nontextured.obj")
        sf = SdfFile(str(file_list_all[i]) + "/google_512k/nontextured.sdf")
    else:
        print("can't find any obj or sdf file!")
        raise NameError("can't find any obj or sdf file!")

    #根据路径读取模型与sdf文件
    mesh = of.read()
    sdf = sf.read()
    #构建抓取模型类
    obj = GraspableObject3D(sdf, mesh)
    print("Log: opened object", i + 1, object_name)

    #########################################
    #设置
    force_closure_quality_config = {}  #设置力闭合  字典
    canny_quality_config = {}
    #生成一个起点是2.0终点是0.75   步长为-0.4  (递减)的等距数列fc_list_sub1 (2.0, 0.75, -0.4)
    fc_list_sub1 = np.arange(2.0, 0.75, -0.3)
    #生成一个起点是0.5终点是0.36   步长为-0.05的等距数列fc_list_sub2  (0.5, 0.36, -0.05)
    fc_list_sub2 = np.arange(0.5, 0.36, -0.1)

    #将上面两个向量接起来,变成一个长条向量,使用不同的步长,目的是为了在更小摩擦力的时候,有更多的分辨率
    fc_list = np.concatenate([fc_list_sub1, fc_list_sub2])
    print("判断摩擦系数")
    print(fc_list)
    for value_fc in fc_list:
        #对value_fc保留2位小数,四舍五入
        value_fc = round(value_fc, 2)
        #更改内存中配置中的摩擦系数,而没有修改硬盘中的yaml文件
        yaml_config['metrics']['force_closure']['friction_coef'] = value_fc
        yaml_config['metrics']['robust_ferrari_canny'][
            'friction_coef'] = value_fc
        #把每个摩擦力值当成键,
        force_closure_quality_config[
            value_fc] = GraspQualityConfigFactory.create_config(
                yaml_config['metrics']['force_closure'])
        canny_quality_config[
            value_fc] = GraspQualityConfigFactory.create_config(
                yaml_config['metrics']['robust_ferrari_canny'])

    #####################准备开始采样############################
    #填充一个与摩擦数量相同的数组,每个对应的元素都是0
    good_count_perfect = np.zeros(len(fc_list))
    count = 0
    #设置每个摩擦值需要计算的最少抓取数量 (根据指定输入值20)
    minimum_grasp_per_fc = grasp_amount
    #如果每个摩擦系数下,有效的抓取(满足力闭合或者其他判断标准)小于要求值,就一直循环查找,直到所有摩擦系数条件下至少都存在20个有效抓取
    while np.sum(good_count_perfect < minimum_grasp_per_fc) != 0:
        #开始使用antipodes sample获得对映随机抓取,此时并不判断是否满足力闭合,只是先采集满足夹爪条件的抓取
        #如果一轮多次随机采样之后,发现无法获得指定数量的随机抓取,就会重复迭代计算3次,之后放弃,并把已经找到的抓取返回来
        grasps = ags.generate_grasps(obj,
                                     target_num_grasps=sample_nums,
                                     grasp_gen_mult=10,
                                     max_iter=10,
                                     vis=False,
                                     random_approach_angle=True)
        count += len(grasps)
        #循环对每个采样抓取进行判断
        for j in grasps:
            tmp, is_force_closure = False, False
            #循环对某个采样抓取应用不同的抓取摩擦系数,判断是否是力闭合
            for ind_, value_fc in enumerate(fc_list):
                value_fc = round(value_fc, 2)
                tmp = is_force_closure
                #判断在当前给定的摩擦系数下,抓取是否是力闭合的
                is_force_closure = PointGraspMetrics3D.grasp_quality(
                    j, obj, force_closure_quality_config[value_fc], vis=False)
                #假设当前,1号摩擦力为1.6 抓取不是力闭合的,但是上一个0号摩擦系数2.0 条件下抓取是力闭合的
                if tmp and not is_force_closure:
                    #当0号2.0摩擦系数条件下采样的good抓取数量还不足指定的最低数量20
                    if good_count_perfect[ind_ - 1] < minimum_grasp_per_fc:
                        #以0号摩擦系数作为边界
                        canny_quality = PointGraspMetrics3D.grasp_quality(
                            j,
                            obj,
                            canny_quality_config[round(fc_list[ind_ - 1], 2)],
                            vis=False)
                        good_grasp.append((j, round(fc_list[ind_ - 1],
                                                    2), canny_quality))
                        #在0号系数的good抓取下计数加1
                        good_count_perfect[ind_ - 1] += 1
                    #当前抓取j的边界摩擦系数找到了,退出摩擦循环,判断下一个抓取
                    break
                #如果当前1号摩擦系数1.6条件下,该抓取j本身就是力闭合的,且摩擦系数是列表中的最后一个(所有的摩擦系数都判断完了)
                elif is_force_closure and value_fc == fc_list[-1]:
                    if good_count_perfect[ind_] < minimum_grasp_per_fc:
                        #以当前摩擦系数作为边界
                        canny_quality = PointGraspMetrics3D.grasp_quality(
                            j, obj, canny_quality_config[value_fc], vis=False)
                        good_grasp.append((j, value_fc, canny_quality))
                        good_count_perfect[ind_] += 1
                    #当前抓取j关于当前摩擦系数1.6判断完毕,而且满足所有的摩擦系数,就换到下一个摩擦系数
                    break
        print('Object:{} GoodGrasp:{}'.format(object_name,
                                              good_count_perfect))  #判断

    object_name_len = len(object_name)
    object_name_ = str(object_name) + " " * (25 - object_name_len)
    if count == 0:
        good_grasp_rate = 0
    else:
        good_grasp_rate = len(good_grasp) / count
    print('Gripper:{} Object:{} Rate:{:.4f} {}/{}'.format(
        gripper_name, object_name_, good_grasp_rate, len(good_grasp), count))
Exemplo n.º 17
0
def worker(i, sample_nums, grasp_amount, good_grasp):
    object_name = file_list_all[i][len(home_dir) + 35:]
    print('a worker of task {} start'.format(object_name))

    yaml_config = YamlConfig(home_dir +
                             "/code/grasp-pointnet/dex-net/test/config.yaml")
    gripper_name = 'robotiq_85'
    gripper = RobotGripper.load(
        gripper_name, home_dir + "/code/grasp-pointnet/dex-net/data/grippers")
    grasp_sample_method = "antipodal"
    if grasp_sample_method == "uniform":
        ags = UniformGraspSampler(gripper, yaml_config)
    elif grasp_sample_method == "gaussian":
        ags = GaussianGraspSampler(gripper, yaml_config)
    elif grasp_sample_method == "antipodal":
        ags = AntipodalGraspSampler(gripper, yaml_config)
    elif grasp_sample_method == "gpg":
        ags = GpgGraspSampler(gripper, yaml_config)
    elif grasp_sample_method == "point":
        ags = PointGraspSampler(gripper, yaml_config)
    else:
        raise NameError("Can't support this sampler")
    print("Log: do job", i)
    if os.path.exists(str(file_list_all[i]) + "/google_512k/nontextured.obj"):
        of = ObjFile(str(file_list_all[i]) + "/google_512k/nontextured.obj")
        sf = SdfFile(str(file_list_all[i]) + "/google_512k/nontextured.sdf")
    else:
        print("can't find any obj or sdf file!")
        raise NameError("can't find any obj or sdf file!")
    mesh = of.read()
    sdf = sf.read()
    obj = GraspableObject3D(sdf, mesh)
    print("Log: opened object", i + 1, object_name)

    force_closure_quality_config = {}
    canny_quality_config = {}
    fc_list_sub1 = np.arange(2.0, 0.75, -0.4)
    fc_list_sub2 = np.arange(0.5, 0.36, -0.05)
    fc_list = np.concatenate([fc_list_sub1, fc_list_sub2])
    for value_fc in fc_list:
        value_fc = round(value_fc, 2)
        yaml_config['metrics']['force_closure']['friction_coef'] = value_fc
        yaml_config['metrics']['robust_ferrari_canny'][
            'friction_coef'] = value_fc

        force_closure_quality_config[
            value_fc] = GraspQualityConfigFactory.create_config(
                yaml_config['metrics']['force_closure'])
        canny_quality_config[
            value_fc] = GraspQualityConfigFactory.create_config(
                yaml_config['metrics']['robust_ferrari_canny'])

    good_count_perfect = np.zeros(len(fc_list))
    count = 0
    minimum_grasp_per_fc = grasp_amount
    while np.sum(good_count_perfect < minimum_grasp_per_fc) != 0:
        grasps = ags.generate_grasps(obj,
                                     target_num_grasps=sample_nums,
                                     grasp_gen_mult=10,
                                     vis=False,
                                     random_approach_angle=True)
        count += len(grasps)
        for j in grasps:
            tmp, is_force_closure = False, False
            for ind_, value_fc in enumerate(fc_list):
                value_fc = round(value_fc, 2)
                tmp = is_force_closure
                is_force_closure = PointGraspMetrics3D.grasp_quality(
                    j, obj, force_closure_quality_config[value_fc], vis=False)
                if tmp and not is_force_closure:
                    if good_count_perfect[ind_ - 1] < minimum_grasp_per_fc:
                        canny_quality = PointGraspMetrics3D.grasp_quality(
                            j,
                            obj,
                            canny_quality_config[round(fc_list[ind_ - 1], 2)],
                            vis=False)
                        good_grasp.append((j, round(fc_list[ind_ - 1],
                                                    2), canny_quality))
                        good_count_perfect[ind_ - 1] += 1
                    break
                elif is_force_closure and value_fc == fc_list[-1]:
                    if good_count_perfect[ind_] < minimum_grasp_per_fc:
                        canny_quality = PointGraspMetrics3D.grasp_quality(
                            j, obj, canny_quality_config[value_fc], vis=False)
                        good_grasp.append((j, value_fc, canny_quality))
                        good_count_perfect[ind_] += 1
                    break
        print('Object:{} GoodGrasp:{}'.format(object_name, good_count_perfect))

    object_name_len = len(object_name)
    object_name_ = str(object_name) + " " * (25 - object_name_len)
    if count == 0:
        good_grasp_rate = 0
    else:
        good_grasp_rate = len(good_grasp) / count
    print('Gripper:{} Object:{} Rate:{:.4f} {}/{}'.format(
        gripper_name, object_name_, good_grasp_rate, len(good_grasp), count))
Exemplo n.º 18
0
from autolab_core import YamlConfig
try:
    from mayavi import mlab
except ImportError:
    print("can not import mayavi")
    mlab = None
from dexnet.grasping import GpgGraspSampler  # temporary way for show 3D gripper using mayavi
import pcl
import glob

# global configurations:
home_dir = os.environ['HOME']
yaml_config = YamlConfig(home_dir +
                         "/code/grasp-pointnet/dex-net/test/config.yaml")
gripper_name = 'robotiq_85'
gripper = RobotGripper.load(
    gripper_name, home_dir + "/code/grasp-pointnet/dex-net/data/grippers")
ags = GpgGraspSampler(gripper, yaml_config)
save_fig = True  # save fig as png file
show_fig = False  # show the mayavi figure
generate_new_file = False  # whether generate new file for collision free grasps
check_pcd_grasp_points = False

if not show_fig:
    mlab.options.offscreen = True


def get_file_name(file_dir_):
    file_list = []
    for root, dirs, files in os.walk(file_dir_):
        # print(root)  # current path
        if root.count('/') == file_dir_.count('/') + 1:
Exemplo n.º 19
0
from meshpy.obj_file import ObjFile
from meshpy.sdf_file import SdfFile
from dexnet.grasping import GraspableObject3D
import numpy as np
from dexnet.visualization.visualizer3d import DexNetVisualizer3D as Vis
from dexnet.grasping import RobotGripper
from autolab_core import YamlConfig
from mayavi import mlab
from dexnet.grasping import GpgGraspSampler  # temporary way for show 3D gripper using mayavi
import glob

# global configurations:
home_dir = os.environ["HOME"] + "/code/PointNetGPD"
yaml_config = YamlConfig(home_dir + "/dex-net/test/config.yaml")
gripper_name = "robotiq_85"
gripper = RobotGripper.load(gripper_name, home_dir + "/dex-net/data/grippers")
ags = GpgGraspSampler(gripper, yaml_config)
save_fig = False  # save fig as png file
show_fig = True  # show the mayavi figure
generate_new_file = False  # whether generate new file for collision free grasps
check_pcd_grasp_points = False


def open_npy_and_obj(name_to_open_):
    npy_m_ = np.load(name_to_open_)
    file_dir = home_dir + "/PointNetGPD/data/ycb-tools/models/ycb/"
    object_name_ = name_to_open_.split("/")[-1][:-4]
    ply_name_ = file_dir + object_name_ + "/google_512k/nontextured.ply"
    if not check_pcd_grasp_points:
        of = ObjFile(file_dir + object_name_ + "/google_512k/nontextured.obj")
        sf = SdfFile(file_dir + object_name_ + "/google_512k/nontextured.sdf")
Exemplo n.º 20
0
    def read_grasps(self,
                    object_name,
                    stable_pose_id,
                    max_grasps=10,
                    visualize=False):
        # load gripper
        gripper = RobotGripper.load(
            GRIPPER_NAME, gripper_dir='/home/silvia/dex-net/data/grippers')

        # open Dex-Net API
        dexnet_handle = DexNet()
        dexnet_handle.open_database(self.database_path)
        dexnet_handle.open_dataset(self.dataset_name)

        # read the most robust grasp
        sorted_grasps, metrics = dexnet_handle.dataset.sorted_grasps(
            object_name,
            stable_pose_id=('pose_' + str(stable_pose_id)),
            metric='force_closure',
            gripper=gripper.name)

        if (len(sorted_grasps)) == 0:
            print('no grasps for this stable pose')
            if visualize:
                stable_pose = dexnet_handle.dataset.stable_pose(
                    object_name,
                    stable_pose_id=('pose_' + str(stable_pose_id)))
                obj = dexnet_handle.dataset.graspable(object_name)
                vis.figure()
                T_table_world = RigidTransform(from_frame='table',
                                               to_frame='world')
                T_obj_world = Visualizer3D.mesh_stable_pose(
                    obj.mesh.trimesh,
                    stable_pose.T_obj_world,
                    T_table_world=T_table_world,
                    color=(0.5, 0.5, 0.5),
                    style='surface',
                    plot_table=True,
                    dim=0.15)
                vis.show(False)
            return None, None

        contact_points = map(get_contact_points, sorted_grasps)

        # ------------------------ VISUALIZATION CODE ------------------------
        if visualize:
            low = np.min(metrics)
            high = np.max(metrics)
            if low == high:
                q_to_c = lambda quality: CONFIG['quality_scale']
            else:
                q_to_c = lambda quality: CONFIG['quality_scale'] * (
                    quality - low) / (high - low)

            stable_pose = dexnet_handle.dataset.stable_pose(
                object_name, stable_pose_id=('pose_' + str(stable_pose_id)))
            obj = dexnet_handle.dataset.graspable(object_name)
            vis.figure()

            T_table_world = RigidTransform(from_frame='table',
                                           to_frame='world')
            T_obj_world = Visualizer3D.mesh_stable_pose(
                obj.mesh.trimesh,
                stable_pose.T_obj_world,
                T_table_world=T_table_world,
                color=(0.5, 0.5, 0.5),
                style='surface',
                plot_table=True,
                dim=0.15)
            for grasp, metric in zip(sorted_grasps, metrics):
                color = plt.get_cmap('hsv')((q_to_c(metric)))[:-1]
                vis.grasp(grasp,
                          T_obj_world=stable_pose.T_obj_world,
                          grasp_axis_color=color,
                          endpoint_color=color)
            vis.show(False)
        # ------------------------ END VISUALIZATION CODE ---------------------------

        # ------------------------ START COLLISION CHECKING ---------------------------
        #stable_pose = dexnet_handle.dataset.stable_pose(object_name, stable_pose_id=('pose_'+str(stable_pose_id)))
        #graspable = dexnet_handle.dataset.graspable(object_name)
        #cc = GraspCollisionChecker(gripper).set_graspable_object(graspable, stable_pose.T_obj_world)
        stable_pose_matrix = self.get_stable_pose(object_name, stable_pose_id)
        # CLOSE DATABASE
        dexnet_handle.close_database()
        gripper_poses = []

        for grasp in sorted_grasps[:max_grasps]:
            gripper_pose_matrix = np.eye(4, 4)
            center_world = np.matmul(
                stable_pose_matrix,
                [grasp.center[0], grasp.center[1], grasp.center[2], 1])
            axis_world = np.matmul(
                stable_pose_matrix,
                [grasp.axis_[0], grasp.axis_[1], grasp.axis_[2], 1])
            gripper_angle = math.atan2(axis_world[1], axis_world[0])
            gripper_pose_matrix[:3, 3] = center_world[:3]
            gripper_pose_matrix[0, 0] = math.cos(gripper_angle)
            gripper_pose_matrix[0, 1] = -math.sin(gripper_angle)
            gripper_pose_matrix[1, 0] = math.sin(gripper_angle)
            gripper_pose_matrix[1, 1] = math.cos(gripper_angle)
            #if visualize:
            #    vis.figure()
            #    vis.gripper_on_object(gripper, grasp, obj, stable_pose=stable_pose.T_obj_world)
            #    vis.show(False)
            gripper_poses.append(gripper_pose_matrix)

        return contact_points, gripper_poses
Exemplo n.º 21
0
import numpy as np
import os

from autolab_core import Point, YamlConfig, RigidTransform
from dexnet.constants import READ_ONLY_ACCESS
from dexnet.grasping import RobotGripper
from dexnet.database import Hdf5Database

config_dir = 'cfg/tools/generate_oblique_xyzq.yaml'
save_dir = '/data/hdf5_stats/grasp_axes_2/'

config = YamlConfig(config_dir)
gripper = RobotGripper.load(config['gripper'])
table_offset = config['collision_checking']['table_offset']

if not os.path.exists(save_dir):
    os.mkdir(save_dir)

database = Hdf5Database(config['database_name'], access_level=READ_ONLY_ACCESS)
target_object_keys = config['target_objects']
dataset_names = target_object_keys.keys()
datasets = [database.dataset(dn) for dn in dataset_names]

labels = []
beta = []
orig_beta = []
alpha = []
cnt = 0
obj_cnt = 0
for dataset in datasets:
    object_keys = dataset.object_keys
Exemplo n.º 22
0
    def antipodal_grasp_sampler(self):
    	#of = ObjFile(OBJ_FILENAME)
    	#sf = SdfFile(SDF_FILENAME)
    	#mesh = of.read()
    	#sdf = sf.read()
        #obj = GraspableObject3D(sdf, mesh)
        mass = 1.0
        CONFIG['obj_rescaling_type'] = RescalingType.RELATIVE
        mesh_processor = MeshProcessor(OBJ_FILENAME, CONFIG['cache_dir'])
        mesh_processor.generate_graspable(CONFIG)
        mesh = mesh_processor.mesh
        sdf = mesh_processor.sdf
        obj = GraspableObject3D(sdf, mesh)

        gripper = RobotGripper.load(GRIPPER_NAME)

        ags = AntipodalGraspSampler(gripper, CONFIG)
        grasps = ags.sample_grasps(obj, num_grasps=100)
        print(len(grasps))
  
        quality_config = GraspQualityConfigFactory.create_config(CONFIG['metrics']['robust_ferrari_canny'])
        #quality_config = GraspQualityConfigFactory.create_config(CONFIG['metrics']['force_closure'])
        quality_fn = GraspQualityFunctionFactory.create_quality_function(obj, quality_config)
        

        vis.figure()
        #vis.points(PointCloud(nparraypoints), scale=0.001, color=np.array([0.5,0.5,0.5]))
        #vis.plot3d(nparraypoints)

        metrics = []
        vis.mesh(obj.mesh.trimesh, style='surface')
        for grasp in grasps:
            success, c = grasp.close_fingers(obj)
            if success:
                c1, c2 = c
                #fn_fc = quality_fn(grasp).quality
                true_fc = PointGraspMetrics3D.grasp_quality(grasp, obj, quality_config)
                true_fc = true_fc
                metrics.append(true_fc)

    	    #color = quality_fn(grasp).quality
            #if (true_fc > 1.0):
            #    true_fc = 1.0
            #color = (1.0-true_fc, true_fc, 0.0)

        low = np.min(metrics)
        high = np.max(metrics)
        print(low)
        print(high)
        if low == high:
            q_to_c = lambda quality: CONFIG['quality_scale']
        else:
            q_to_c = lambda quality: CONFIG['quality_scale'] * (quality - low) / (high - low)
        print(len(metrics))
        for grasp, metric in zip(grasps, metrics):
            color = plt.get_cmap('hsv')(q_to_c(metric))[:-1]
            print(color)
            vis.grasp(grasp, grasp_axis_color=color,endpoint_color=color)


        vis.show(False)
Exemplo n.º 23
0
def generate_gqcnn_dataset(dataset_path, database, target_object_keys,
                           env_rv_params, gripper_name, config):
    """
    Generates a GQ-CNN TensorDataset for training models with new grippers, quality metrics, objects, and cameras.

    Parameters
    ----------
    dataset_path : str
        path to save the dataset to
    database : :obj:`Hdf5Database`
        Dex-Net database containing the 3D meshes, grasps, and grasp metrics
    target_object_keys : :obj:`OrderedDict`
        dictionary mapping dataset names to target objects (either 'all' or a list of specific object keys)
    env_rv_params : :obj:`OrderedDict`
        parameters of the camera and object random variables used in sampling (see meshpy_berkeley.UniformPlanarWorksurfaceImageRandomVariable for more info)
    gripper_name : str
        name of the gripper to use
    config : :obj:`autolab_core.YamlConfig`
        other parameters for dataset generation

    Notes
    -----
    Required parameters of config are specified in Other Parameters

    Other Parameters
    ----------------    
    images_per_stable_pose : int
        number of object and camera poses to sample for each stable pose
    stable_pose_min_p : float
        minimum probability of occurrence for a stable pose to be used in data generation (used to prune bad stable poses
    
    gqcnn/crop_width : int
        width, in pixels, of crop region around each grasp center, before resize (changes the size of the region seen by the GQ-CNN)
    gqcnn/crop_height : int
        height, in pixels,  of crop region around each grasp center, before resize (changes the size of the region seen by the GQ-CNN)
    gqcnn/final_width : int
        width, in pixels,  of final transformed grasp image for input to the GQ-CNN (defaults to 32)
    gqcnn/final_height : int
        height, in pixels,  of final transformed grasp image for input to the GQ-CNN (defaults to 32)

    table_alignment/max_approach_table_angle : float
        max angle between the grasp axis and the table normal when the grasp approach is maximally aligned with the table normal
    table_alignment/max_approach_offset : float
        max deviation from perpendicular approach direction to use in grasp collision checking
    table_alignment/num_approach_offset_samples : int
        number of approach samples to use in collision checking

    collision_checking/table_offset : float
        max allowable interpenetration between the gripper and table to be considered collision free
    collision_checking/table_mesh_filename : str
        path to a table mesh for collision checking (default data/meshes/table.obj)
    collision_checking/approach_dist : float
        distance, in meters, between the approach pose and final grasp pose along the grasp axis
    collision_checking/delta_approach : float
        amount, in meters, to discretize the straight-line path from the gripper approach pose to the final grasp pose

    tensors/datapoints_per_file : int
        number of datapoints to store in each unique tensor file on disk
    tensors/fields : :obj:`dict`
        dictionary mapping field names to dictionaries specifying the data type, height, width, and number of channels for each tensor

    debug : bool
        True (or 1) if the random seed should be set to enforce deterministic behavior, False (0) otherwise
    vis/candidate_grasps : bool
        True (or 1) if the collision free candidate grasps should be displayed in 3D (for debugging)
    vis/rendered_images : bool
        True (or 1) if the rendered images for each stable pose should be displayed (for debugging)
    vis/grasp_images : bool
        True (or 1) if the transformed grasp images should be displayed (for debugging)
    """
    # read data gen params
    output_dir = dataset_path
    gripper = RobotGripper.load(gripper_name)
    image_samples_per_stable_pose = config['images_per_stable_pose']
    stable_pose_min_p = config['stable_pose_min_p']

    # read gqcnn params
    gqcnn_params = config['gqcnn']
    im_crop_height = gqcnn_params['crop_height']
    im_crop_width = gqcnn_params['crop_width']
    im_final_height = gqcnn_params['final_height']
    im_final_width = gqcnn_params['final_width']
    cx_crop = float(im_crop_width) / 2
    cy_crop = float(im_crop_height) / 2

    # open database
    dataset_names = target_object_keys.keys()
    datasets = [database.dataset(dn) for dn in dataset_names]

    # set target objects
    for dataset in datasets:
        if target_object_keys[dataset.name] == 'all':
            target_object_keys[dataset.name] = dataset.object_keys

    # setup grasp params
    table_alignment_params = config['table_alignment']
    min_grasp_approach_offset = -np.deg2rad(
        table_alignment_params['max_approach_offset'])
    max_grasp_approach_offset = np.deg2rad(
        table_alignment_params['max_approach_offset'])
    max_grasp_approach_table_angle = np.deg2rad(
        table_alignment_params['max_approach_table_angle'])
    num_grasp_approach_samples = table_alignment_params[
        'num_approach_offset_samples']

    phi_offsets = []
    if max_grasp_approach_offset == min_grasp_approach_offset:
        phi_inc = 1
    elif num_grasp_approach_samples == 1:
        phi_inc = max_grasp_approach_offset - min_grasp_approach_offset + 1
    else:
        phi_inc = (max_grasp_approach_offset - min_grasp_approach_offset) / (
            num_grasp_approach_samples - 1)

    phi = min_grasp_approach_offset
    while phi <= max_grasp_approach_offset:
        phi_offsets.append(phi)
        phi += phi_inc

    # setup collision checking
    coll_check_params = config['collision_checking']
    approach_dist = coll_check_params['approach_dist']
    delta_approach = coll_check_params['delta_approach']
    table_offset = coll_check_params['table_offset']

    table_mesh_filename = coll_check_params['table_mesh_filename']
    if not os.path.isabs(table_mesh_filename):
        table_mesh_filename = os.path.join(
            os.path.dirname(os.path.realpath(__file__)), '..',
            table_mesh_filename)
    table_mesh = ObjFile(table_mesh_filename).read()

    # set tensor dataset config
    tensor_config = config['tensors']
    tensor_config['fields']['depth_ims_tf_table']['height'] = im_final_height
    tensor_config['fields']['depth_ims_tf_table']['width'] = im_final_width
    tensor_config['fields']['obj_masks']['height'] = im_final_height
    tensor_config['fields']['obj_masks']['width'] = im_final_width

    # add available metrics (assuming same are computed for all objects)
    metric_names = []
    dataset = datasets[0]
    obj_keys = dataset.object_keys
    if len(obj_keys) == 0:
        raise ValueError('No valid objects in dataset %s' % (dataset.name))

    obj = dataset[obj_keys[0]]
    grasps = dataset.grasps(obj.key, gripper=gripper.name)
    grasp_metrics = dataset.grasp_metrics(obj.key,
                                          grasps,
                                          gripper=gripper.name)
    metric_names = grasp_metrics[grasp_metrics.keys()[0]].keys()
    for metric_name in metric_names:
        tensor_config['fields'][metric_name] = {}
        tensor_config['fields'][metric_name]['dtype'] = 'float32'

    # init tensor dataset
    tensor_dataset = TensorDataset(output_dir, tensor_config)
    tensor_datapoint = tensor_dataset.datapoint_template

    # setup log file
    experiment_log_filename = os.path.join(output_dir,
                                           'dataset_generation.log')
    formatter = logging.Formatter('%(asctime)s %(levelname)s: %(message)s')
    hdlr = logging.FileHandler(experiment_log_filename)
    hdlr.setFormatter(formatter)
    logging.getLogger().addHandler(hdlr)
    root_logger = logging.getLogger()

    # copy config
    out_config_filename = os.path.join(output_dir, 'dataset_generation.json')
    ordered_dict_config = collections.OrderedDict()
    for key in config.keys():
        ordered_dict_config[key] = config[key]
    with open(out_config_filename, 'w') as outfile:
        json.dump(ordered_dict_config, outfile)

    # 1. Precompute the set of valid grasps for each stable pose:
    #    i) Perpendicular to the table
    #   ii) Collision-free along the approach direction

    # load grasps if they already exist
    grasp_cache_filename = os.path.join(output_dir, CACHE_FILENAME)
    if os.path.exists(grasp_cache_filename):
        logging.info('Loading grasp candidates from file')
        candidate_grasps_dict = pkl.load(open(grasp_cache_filename, 'rb'))
    # otherwise re-compute by reading from the database and enforcing constraints
    else:
        # create grasps dict
        candidate_grasps_dict = {}

        # loop through datasets and objects
        for dataset in datasets:
            logging.info('Reading dataset %s' % (dataset.name))
            for obj in dataset:
                if obj.key not in target_object_keys[dataset.name]:
                    continue

                # init candidate grasp storage
                candidate_grasps_dict[obj.key] = {}

                # setup collision checker
                collision_checker = GraspCollisionChecker(gripper)
                collision_checker.set_graspable_object(obj)

                # read in the stable poses of the mesh
                stable_poses = dataset.stable_poses(obj.key)
                for i, stable_pose in enumerate(stable_poses):
                    # render images if stable pose is valid
                    if stable_pose.p > stable_pose_min_p:
                        candidate_grasps_dict[obj.key][stable_pose.id] = []

                        # setup table in collision checker
                        T_obj_stp = stable_pose.T_obj_table.as_frames(
                            'obj', 'stp')
                        T_obj_table = obj.mesh.get_T_surface_obj(
                            T_obj_stp,
                            delta=table_offset).as_frames('obj', 'table')
                        T_table_obj = T_obj_table.inverse()
                        collision_checker.set_table(table_mesh_filename,
                                                    T_table_obj)

                        # read grasp and metrics
                        grasps = dataset.grasps(obj.key, gripper=gripper.name)
                        logging.info(
                            'Aligning %d grasps for object %s in stable %s' %
                            (len(grasps), obj.key, stable_pose.id))

                        # align grasps with the table
                        aligned_grasps = [
                            grasp.perpendicular_table(stable_pose)
                            for grasp in grasps
                        ]

                        # check grasp validity
                        logging.info(
                            'Checking collisions for %d grasps for object %s in stable %s'
                            % (len(grasps), obj.key, stable_pose.id))
                        for aligned_grasp in aligned_grasps:
                            # check angle with table plane and skip unaligned grasps
                            _, grasp_approach_table_angle, _ = aligned_grasp.grasp_angles_from_stp_z(
                                stable_pose)
                            perpendicular_table = (
                                np.abs(grasp_approach_table_angle) <
                                max_grasp_approach_table_angle)
                            if not perpendicular_table:
                                continue

                            # check whether any valid approach directions are collision free
                            collision_free = False
                            for phi_offset in phi_offsets:
                                rotated_grasp = aligned_grasp.grasp_y_axis_offset(
                                    phi_offset)
                                collides = collision_checker.collides_along_approach(
                                    rotated_grasp, approach_dist,
                                    delta_approach)
                                if not collides:
                                    collision_free = True
                                    break

                            # store if aligned to table
                            candidate_grasps_dict[obj.key][
                                stable_pose.id].append(
                                    GraspInfo(aligned_grasp, collision_free))

                            # visualize if specified
                            if collision_free and config['vis'][
                                    'candidate_grasps']:
                                logging.info('Grasp %d' % (aligned_grasp.id))
                                vis.figure()
                                vis.gripper_on_object(gripper, aligned_grasp,
                                                      obj,
                                                      stable_pose.T_obj_world)
                                vis.show()

        # save to file
        logging.info('Saving to file')
        pkl.dump(candidate_grasps_dict, open(grasp_cache_filename, 'wb'))

    # 2. Render a dataset of images and associate the gripper pose with image coordinates for each grasp in the Dex-Net database

    # setup variables
    obj_category_map = {}
    pose_category_map = {}

    cur_pose_label = 0
    cur_obj_label = 0
    cur_image_label = 0

    # render images for each stable pose of each object in the dataset
    render_modes = [RenderMode.SEGMASK, RenderMode.DEPTH_SCENE]
    for dataset in datasets:
        logging.info('Generating data for dataset %s' % (dataset.name))

        # iterate through all object keys
        object_keys = dataset.object_keys
        for obj_key in object_keys:
            obj = dataset[obj_key]
            if obj.key not in target_object_keys[dataset.name]:
                continue

            # read in the stable poses of the mesh
            stable_poses = dataset.stable_poses(obj.key)
            for i, stable_pose in enumerate(stable_poses):

                # render images if stable pose is valid
                if stable_pose.p > stable_pose_min_p:
                    # log progress
                    logging.info('Rendering images for object %s in %s' %
                                 (obj.key, stable_pose.id))

                    # add to category maps
                    if obj.key not in obj_category_map.keys():
                        obj_category_map[obj.key] = cur_obj_label
                    pose_category_map['%s_%s' %
                                      (obj.key,
                                       stable_pose.id)] = cur_pose_label

                    # read in candidate grasps and metrics
                    candidate_grasp_info = candidate_grasps_dict[obj.key][
                        stable_pose.id]
                    candidate_grasps = [g.grasp for g in candidate_grasp_info]
                    grasp_metrics = dataset.grasp_metrics(obj.key,
                                                          candidate_grasps,
                                                          gripper=gripper.name)

                    # compute object pose relative to the table
                    T_obj_stp = stable_pose.T_obj_table.as_frames('obj', 'stp')
                    T_obj_stp = obj.mesh.get_T_surface_obj(T_obj_stp)

                    # sample images from random variable
                    T_table_obj = RigidTransform(from_frame='table',
                                                 to_frame='obj')
                    scene_objs = {
                        'table': SceneObject(table_mesh, T_table_obj)
                    }
                    urv = UniformPlanarWorksurfaceImageRandomVariable(
                        obj.mesh,
                        render_modes,
                        'camera',
                        env_rv_params,
                        stable_pose=stable_pose,
                        scene_objs=scene_objs)

                    render_start = time.time()
                    render_samples = urv.rvs(
                        size=image_samples_per_stable_pose)
                    render_stop = time.time()
                    logging.info('Rendering images took %.3f sec' %
                                 (render_stop - render_start))

                    # visualize
                    if config['vis']['rendered_images']:
                        d = int(np.ceil(
                            np.sqrt(image_samples_per_stable_pose)))

                        # binary
                        vis2d.figure()
                        for j, render_sample in enumerate(render_samples):
                            vis2d.subplot(d, d, j + 1)
                            vis2d.imshow(render_sample.renders[
                                RenderMode.SEGMASK].image)

                        # depth table
                        vis2d.figure()
                        for j, render_sample in enumerate(render_samples):
                            vis2d.subplot(d, d, j + 1)
                            vis2d.imshow(render_sample.renders[
                                RenderMode.DEPTH_SCENE].image)
                        vis2d.show()

                    # tally total amount of data
                    num_grasps = len(candidate_grasps)
                    num_images = image_samples_per_stable_pose
                    num_save = num_images * num_grasps
                    logging.info('Saving %d datapoints' % (num_save))

                    # for each candidate grasp on the object compute the projection
                    # of the grasp into image space
                    for render_sample in render_samples:
                        # read images
                        binary_im = render_sample.renders[
                            RenderMode.SEGMASK].image
                        depth_im_table = render_sample.renders[
                            RenderMode.DEPTH_SCENE].image
                        # read camera params
                        T_stp_camera = render_sample.camera.object_to_camera_pose
                        shifted_camera_intr = render_sample.camera.camera_intr

                        # read pixel offsets
                        cx = depth_im_table.center[1]
                        cy = depth_im_table.center[0]

                        # compute intrinsics for virtual camera of the final
                        # cropped and rescaled images
                        camera_intr_scale = float(im_final_height) / float(
                            im_crop_height)
                        cropped_camera_intr = shifted_camera_intr.crop(
                            im_crop_height, im_crop_width, cy, cx)
                        final_camera_intr = cropped_camera_intr.resize(
                            camera_intr_scale)

                        # create a thumbnail for each grasp
                        for grasp_info in candidate_grasp_info:
                            # read info
                            grasp = grasp_info.grasp
                            collision_free = grasp_info.collision_free

                            # get the gripper pose
                            T_obj_camera = T_stp_camera * T_obj_stp.as_frames(
                                'obj', T_stp_camera.from_frame)
                            grasp_2d = grasp.project_camera(
                                T_obj_camera, shifted_camera_intr)

                            # center images on the grasp, rotate to image x axis
                            dx = cx - grasp_2d.center.x
                            dy = cy - grasp_2d.center.y
                            translation = np.array([dy, dx])

                            binary_im_tf = binary_im.transform(
                                translation, grasp_2d.angle)
                            depth_im_tf_table = depth_im_table.transform(
                                translation, grasp_2d.angle)

                            # crop to image size
                            binary_im_tf = binary_im_tf.crop(
                                im_crop_height, im_crop_width)
                            depth_im_tf_table = depth_im_tf_table.crop(
                                im_crop_height, im_crop_width)

                            # resize to image size
                            binary_im_tf = binary_im_tf.resize(
                                (im_final_height, im_final_width),
                                interp='nearest')
                            depth_im_tf_table = depth_im_tf_table.resize(
                                (im_final_height, im_final_width))

                            # visualize the transformed images
                            if config['vis']['grasp_images']:
                                grasp_center = Point(
                                    depth_im_tf_table.center,
                                    frame=final_camera_intr.frame)
                                tf_grasp_2d = Grasp2D(
                                    grasp_center,
                                    0,
                                    grasp_2d.depth,
                                    width=gripper.max_width,
                                    camera_intr=final_camera_intr)

                                # plot 2D grasp image
                                vis2d.figure()
                                vis2d.subplot(2, 2, 1)
                                vis2d.imshow(binary_im)
                                vis2d.grasp(grasp_2d)
                                vis2d.subplot(2, 2, 2)
                                vis2d.imshow(depth_im_table)
                                vis2d.grasp(grasp_2d)
                                vis2d.subplot(2, 2, 3)
                                vis2d.imshow(binary_im_tf)
                                vis2d.grasp(tf_grasp_2d)
                                vis2d.subplot(2, 2, 4)
                                vis2d.imshow(depth_im_tf_table)
                                vis2d.grasp(tf_grasp_2d)
                                vis2d.title('Coll Free? %d' %
                                            (grasp_info.collision_free))
                                vis2d.show()

                                # plot 3D visualization
                                vis.figure()
                                T_obj_world = vis.mesh_stable_pose(
                                    obj.mesh,
                                    stable_pose.T_obj_world,
                                    style='surface',
                                    dim=0.5)
                                vis.gripper(gripper,
                                            grasp,
                                            T_obj_world,
                                            color=(0.3, 0.3, 0.3))
                                vis.show()

                            # form hand pose array
                            hand_pose = np.r_[grasp_2d.center.y,
                                              grasp_2d.center.x,
                                              grasp_2d.depth, grasp_2d.angle,
                                              grasp_2d.center.y -
                                              shifted_camera_intr.cy,
                                              grasp_2d.center.x -
                                              shifted_camera_intr.cx,
                                              grasp_2d.width_px]

                            # store to data buffers
                            tensor_datapoint[
                                'depth_ims_tf_table'] = depth_im_tf_table.raw_data
                            tensor_datapoint[
                                'obj_masks'] = binary_im_tf.raw_data
                            tensor_datapoint['hand_poses'] = hand_pose
                            tensor_datapoint['collision_free'] = collision_free
                            tensor_datapoint['obj_labels'] = cur_obj_label
                            tensor_datapoint['pose_labels'] = cur_pose_label
                            tensor_datapoint['image_labels'] = cur_image_label

                            for metric_name, metric_val in grasp_metrics[
                                    grasp.id].iteritems():
                                coll_free_metric = (
                                    1 * collision_free) * metric_val
                                tensor_datapoint[
                                    metric_name] = coll_free_metric
                            tensor_dataset.add(tensor_datapoint)

                        # update image label
                        cur_image_label += 1

                    # update pose label
                    cur_pose_label += 1

                    # force clean up
                    gc.collect()

            # update object label
            cur_obj_label += 1

            # force clean up
            gc.collect()

    # save last file
    tensor_dataset.flush()

    # save category mappings
    obj_cat_filename = os.path.join(output_dir, 'object_category_map.json')
    json.dump(obj_category_map, open(obj_cat_filename, 'w'))
    pose_cat_filename = os.path.join(output_dir, 'pose_category_map.json')
    json.dump(pose_category_map, open(pose_cat_filename, 'w'))
Exemplo n.º 24
0
try:
    from mayavi import mlab
except ImportError:
    print("can not import mayavi")
    mlab = None
from dexnet.grasping import GpgGraspSampler  # temporary way for show 3D gripper using mayavi
import pcl
import glob
import argparse

# global configurations:
home_dir = os.environ['HOME']
dexnet_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
yaml_config = YamlConfig(dexnet_dir + "/test/config.yaml")
gripper_name = 'robotiq_85'
gripper = RobotGripper.load(gripper_name, dexnet_dir + "/data/grippers")
ags = GpgGraspSampler(gripper, yaml_config)
save_fig = False  # save fig as png file
show_fig = True  # show the mayavi figure
generate_new_file = False  # whether generate new file for collision free grasps
check_pcd_grasp_points = False


def get_file_name(file_dir_):
    file_list = []
    for root, dirs, files in os.walk(file_dir_):
        # print(root)  # current path
        if root.count('/') == file_dir_.count('/') + 1:
            file_list.append(root)
        # print(dirs)  # all the directories in current path
        # print(files)  # all the files in current path
Exemplo n.º 25
0
def worker(i, sample_nums, grasp_amount, good_grasp):
    object_name = file_list_all[i][len(home_dir) + 48:-12]
    print('a worker of task {} start'.format(object_name))

    yaml_config = YamlConfig(home_dir +
                             "/Projects/PointNetGPD/dex-net/test/config.yaml")
    gripper_name = 'robotiq_85'
    gripper = RobotGripper.load(
        gripper_name, home_dir + "/Projects/PointNetGPD/dex-net/data/grippers")
    grasp_sample_method = "antipodal"
    if grasp_sample_method == "uniform":
        ags = UniformGraspSampler(gripper, yaml_config)
    elif grasp_sample_method == "gaussian":
        ags = GaussianGraspSampler(gripper, yaml_config)
    elif grasp_sample_method == "antipodal":
        ags = AntipodalGraspSampler(gripper, yaml_config)
    elif grasp_sample_method == "gpg":
        ags = GpgGraspSampler(gripper, yaml_config)
    elif grasp_sample_method == "point":
        ags = PointGraspSampler(gripper, yaml_config)
    else:
        raise NameError("Can't support this sampler")
    print("Log: do job", i)
    if os.path.exists(str(file_list_all[i]) + "/nontextured.obj"):
        of = ObjFile(str(file_list_all[i]) + "/nontextured.obj")
        sf = SdfFile(str(file_list_all[i]) + "/nontextured.sdf")
    else:
        print("can't find any obj or sdf file!")
        raise NameError("can't find any obj or sdf file!")
    mesh = of.read()
    sdf = sf.read()
    obj = GraspableObject3D(sdf, mesh)
    print("Log: opened object", i + 1, object_name)

    force_closure_quality_config = {}
    canny_quality_config = {}
    fc_list_sub1 = np.arange(2.0, 0.75, -0.4)
    fc_list_sub2 = np.arange(0.5, 0.36, -0.05)
    fc_list = np.concatenate(
        [fc_list_sub1,
         fc_list_sub2])  # 摩擦系数列表  fc_list [2.  1.6  1.2  0.8  0.5  0.45 0.4 ]
    print("fc_list", fc_list, "fc_list[-1]", fc_list[-1])
    for value_fc in fc_list:
        value_fc = round(value_fc, 2)
        yaml_config['metrics']['force_closure']['friction_coef'] = value_fc
        yaml_config['metrics']['robust_ferrari_canny'][
            'friction_coef'] = value_fc

        force_closure_quality_config[
            value_fc] = GraspQualityConfigFactory.create_config(
                yaml_config['metrics']['force_closure'])
        canny_quality_config[
            value_fc] = GraspQualityConfigFactory.create_config(
                yaml_config['metrics']['robust_ferrari_canny'])

    good_count_perfect = np.zeros(len(fc_list))
    count = 0
    minimum_grasp_per_fc = grasp_amount

    # 各个摩擦系数生成一些抓取
    while np.sum(good_count_perfect < minimum_grasp_per_fc
                 ) != 0 and good_count_perfect[-1] < minimum_grasp_per_fc:
        print("[INFO]:good | mini", good_count_perfect, minimum_grasp_per_fc)
        print("[INFO]:good < mini", good_count_perfect < minimum_grasp_per_fc,
              np.sum(good_count_perfect < minimum_grasp_per_fc))
        grasps = ags.generate_grasps(
            obj,
            target_num_grasps=sample_nums,
            grasp_gen_mult=10,  # 生成抓取姿态
            vis=False,
            random_approach_angle=True)  # 随机调整抓取方向
        print("\033[0;32m%s\033[0m" %
              "[INFO] Worker{} generate {} grasps.".format(i, len(grasps)))
        count += len(grasps)
        for j in grasps:  # 遍历生成的抓取姿态, 判断是否为力闭合, 及其对应的摩擦系数
            tmp, is_force_closure = False, False
            for ind_, value_fc in enumerate(fc_list):  # 为每个摩擦系数分配抓取姿态
                value_fc = round(value_fc, 2)
                tmp = is_force_closure
                is_force_closure = PointGraspMetrics3D.grasp_quality(
                    j,
                    obj,  # 依据摩擦系数 value_fc 评估抓取姿态
                    force_closure_quality_config[value_fc],
                    vis=False)
                print("[INFO] is_force_closure:", is_force_closure,
                      "value_fc:", value_fc, "tmp:", tmp)
                if tmp and not is_force_closure:  # 前一个摩擦系数下为力闭合, 当前摩擦系数下非力闭合, 即找到此抓取对应的最小摩擦系数
                    print("[debug] tmp and not is_force_closure")
                    if good_count_perfect[ind_ - 1] < minimum_grasp_per_fc:
                        canny_quality = PointGraspMetrics3D.grasp_quality(
                            j,
                            obj,
                            canny_quality_config[round(fc_list[ind_ - 1], 2)],
                            vis=False)
                        good_grasp.append(
                            (j, round(fc_list[ind_ - 1],
                                      2), canny_quality))  # 保存前一个抓取
                        good_count_perfect[ind_ - 1] += 1
                    break

                elif is_force_closure and value_fc == fc_list[
                        -1]:  # 力闭合并且摩擦系数最小
                    print(
                        "[debug] is_force_closure and value_fc == fc_list[-1]")
                    if good_count_perfect[ind_] < minimum_grasp_per_fc:
                        canny_quality = PointGraspMetrics3D.grasp_quality(
                            j, obj, canny_quality_config[value_fc], vis=False)
                        good_grasp.append((j, value_fc, canny_quality))
                        good_count_perfect[ind_] += 1
                    break
        print('Worker:', i,
              'Object:{} GoodGrasp:{}'.format(object_name, good_count_perfect))

    object_name_len = len(object_name)
    object_name_ = str(object_name) + " " * (25 - object_name_len)
    if count == 0:
        good_grasp_rate = 0
    else:
        good_grasp_rate = len(good_grasp) / count
    print(
        'Worker:', i, 'Gripper:{} Object:{} Rate:{:.4f} {}/{}'.format(
            gripper_name, object_name_, good_grasp_rate, len(good_grasp),
            count))
Exemplo n.º 26
0
try:
    from mayavi import mlab
except ImportError:
    print("Can not import mayavi")
    mlab = None
sys.path.append(
    path.dirname(path.dirname(path.dirname(path.abspath("__file__")))))
sys.path.append(os.environ['HOME'] + "/code/grasp-pointnet/PointNetGPD")

# global config:
yaml_config = YamlConfig(
    os.environ['HOME'] +
    "/Projects/GPD_PointNet/dex-net/test/sample_config.yaml")
gripper_name = 'robotiq_85'
gripper = RobotGripper.load(
    gripper_name,
    os.environ['HOME'] + "/Projects/GPD_PointNet/dex-net/data/grippers")
ags = GpgGraspSamplerPclPcd(gripper, yaml_config)
# ags = AntipodalGraspSampler(gripper, yaml_config)
value_fc = 0.4  # no use, set a random number
num_grasps = 40
num_workers = 20
max_num_samples = 150
n_voxel = 500

minimal_points_send_to_point_net = 20
marker_life_time = 8

show_bad_grasp = False
save_grasp_related_file = False
Exemplo n.º 27
0
    print("Please install grasp msgs from https://github.com/TAMS-Group/gpd_grasp_msgs in your ROS workspace")
    exit()

try:
    from mayavi import mlab
except ImportError:
    print("Can not import mayavi")
    mlab = None
sys.path.append(path.dirname(path.dirname(path.dirname(path.abspath("__file__")))))
sys.path.append(os.environ['HOME'] + "/code/PointNetGPD/PointNetGPD")
from main_test import test_network, model, args

# global config:
yaml_config = YamlConfig(os.environ['HOME'] + "/code/PointNetGPD/dex-net/test/config.yaml")
gripper_name = 'robotiq_85'
gripper = RobotGripper.load(gripper_name, os.environ['HOME'] + "/code/PointNetGPD/dex-net/data/grippers")
ags = GpgGraspSamplerPcl(gripper, yaml_config)
value_fc = 0.4  # no use, set a random number
num_grasps = 40
num_workers = 20
max_num_samples = 150
n_voxel = 500

minimal_points_send_to_point_net = 20
marker_life_time = 8

show_bad_grasp = False
save_grasp_related_file = False

show_final_grasp = args.show_final_grasp
tray_grasp = args.tray_grasp
Exemplo n.º 28
0
def save_dexnet_objects(output_path, database, target_object_keys, config,
                        pointers, num):
    slice_dataset = False
    files = []

    if not os.path.exists(output_path):
        os.mkdir(output_path)
    for each_file in os.listdir(output_path):
        os.remove(output_path + '/' + each_file)
    gripper = RobotGripper.load(config['gripper'])

    # Setup grasp params:
    table_alignment_params = config['table_alignment']
    min_grasp_approach_offset = -np.deg2rad(
        table_alignment_params['max_approach_offset'])
    max_grasp_approach_offset = np.deg2rad(
        table_alignment_params['max_approach_offset'])
    max_grasp_approach_table_angle = np.deg2rad(
        table_alignment_params['max_approach_table_angle'])
    num_grasp_approach_samples = table_alignment_params[
        'num_approach_offset_samples']

    phi_offsets = []
    if max_grasp_approach_offset == min_grasp_approach_offset:
        phi_inc = 1
    elif num_grasp_approach_samples == 1:
        phi_inc = max_grasp_approach_offset - min_grasp_approach_offset + 1
    else:
        phi_inc = (max_grasp_approach_offset - min_grasp_approach_offset) / (
            num_grasp_approach_samples - 1)

    phi = min_grasp_approach_offset
    while phi <= max_grasp_approach_offset:
        phi_offsets.append(phi)
        phi += phi_inc

    # Setup collision checking
    coll_check_params = config['collision_checking']
    approach_dist = coll_check_params['approach_dist']
    delta_approach = coll_check_params['delta_approach']
    table_offset = coll_check_params['table_offset']
    stable_pose_min_p = config['stable_pose_min_p']

    table_mesh_filename = coll_check_params['table_mesh_filename']
    if not os.path.isabs(table_mesh_filename):
        table_mesh_filename = os.path.join(DATA_DIR, table_mesh_filename)
    #     #table_mesh_filename = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', table_mesh_filename)
    # table_mesh = ObjFile(table_mesh_filename).read()

    dataset_names = target_object_keys.keys()
    datasets = [database.dataset(dn) for dn in dataset_names]

    if slice_dataset:
        datasets = [dataset.subset(0, 100) for dataset in datasets]

    start = 0
    for dataset in datasets:
        target_object_keys[dataset.name] = []
        end = start + len(dataset.object_keys)
        for cnt, _id in enumerate(pointers.obj_ids):
            if _id >= end or _id < start:
                continue
            target_object_keys[dataset.name].append(dataset.object_keys[_id -
                                                                        start])
            files.append(
                tuple([
                    dataset.object_keys[_id - start], pointers.tensor[cnt],
                    pointers.array[cnt], pointers.depths[cnt]
                ]))
        start += end
    print(target_object_keys)
    print("target object keys:", len(target_object_keys['3dnet']),
          len(target_object_keys['kit']))
    files = np.array(files,
                     dtype=[('Object_id', (np.str_, 40)), ('Tensor', int),
                            ('Array', int), ('Depth', float)])

    # Precompute set of valid grasps
    candidate_grasps_dict = {}

    counter = 0
    for dataset in datasets:
        for obj in dataset:
            if obj.key not in target_object_keys[dataset.name]:
                continue
            print("Object in subset")
            # Initiate candidate grasp storage
            candidate_grasps_dict[obj.key] = {}

            # Setup collision checker
            collision_checker = GraspCollisionChecker(gripper)
            collision_checker.set_graspable_object(obj)

            # Read in the stable poses of the mesh
            stable_poses = dataset.stable_poses(obj.key)
            try:
                stable_pose = stable_poses[pointers.pose_num[counter]]
            except IndexError:
                print(
                    "Problems with reading pose. Tensor %d, Array %d, Pose %d"
                    % (pointers.tensor[counter], pointers.array[counter],
                       pointers.pose_num[counter]))
                print("Stable poses:", stable_poses)
                print("Pointers pose:", pointers.pose_num[counter])
                counter += 1
                print("Continue.")
                continue
            print("Read in stable pose")
            candidate_grasps_dict[obj.key][stable_pose.id] = []

            # Setup table in collision checker
            T_obj_stp = stable_pose.T_obj_table.as_frames('obj', 'stp')
            T_obj_table = obj.mesh.get_T_surface_obj(
                T_obj_stp, delta=table_offset).as_frames('obj', 'table')
            T_table_obj = T_obj_table.inverse()

            collision_checker.set_table(table_mesh_filename, T_table_obj)

            # read grasp and metrics
            grasps = dataset.grasps(obj.key, gripper=gripper.name)

            # align grasps with the table
            aligned_grasps = [
                grasp.perpendicular_table(stable_pose) for grasp in grasps
            ]
            i = 0
            found = False
            if len(aligned_grasps) < pointers.grasp_num[counter]:
                raise IndexError
            print("pointers grasp num", pointers.grasp_num[counter])
            print("tensor", pointers.tensor[counter])
            print("array", pointers.array[counter])
            # Check grasp validity
            for aligned_grasp in aligned_grasps:
                # Check angle with table plane and skip unaligned grasps
                _, grasp_approach_table_angle, _ = aligned_grasp.grasp_angles_from_stp_z(
                    stable_pose)
                perpendicular_table = (np.abs(grasp_approach_table_angle) <
                                       max_grasp_approach_table_angle)
                if not perpendicular_table:
                    continue

                # Check whether any valid approach directions are collision free
                collision_free = False
                for phi_offset in phi_offsets:
                    rotated_grasp = aligned_grasp.grasp_y_axis_offset(
                        phi_offset)
                    collides = collision_checker.collides_along_approach(
                        rotated_grasp, approach_dist, delta_approach)
                    if not collides:
                        collision_free = True
                        break

                # Store if aligned to table
                if i == pointers.grasp_num[counter]:
                    found, contact_points = aligned_grasp.close_fingers(obj)
                    print("Contact points", contact_points)
                    if not found:
                        print("Could not find contact points. continue.")
                        break
                    else:
                        print("Original metric: ", pointers.metrics[counter])
                        print(
                            "Metrics mapped point: ",
                            dataset.grasp_metrics(obj.key, [aligned_grasp],
                                                  gripper=gripper.name))
                        candidate_grasps_dict[obj.key][stable_pose.id].append(
                            GraspInfo(aligned_grasp, collision_free, [
                                contact_points[0].point,
                                contact_points[1].point
                            ]))
                        # logging.info('Grasp %d' % (aligned_grasp.id))
                        # vis.figure()
                        # vis.gripper_on_object(gripper, aligned_grasp, obj, stable_pose.T_obj_world, plot_table=False)
                        # vis.show()
                        break

                i += 1

            if found:
                # Save files
                print("Saving file: ", obj.key)
                savefile = ObjFile(DATA_DIR + "/meshes/dexnet/" + obj.key +
                                   ".obj")
                savefile.write(obj.mesh)
                # print("Vertices:", obj.mesh.vertices.shape)
                # print("Triangles:", obj.mesh.triangles.shape)
                mesh = o3d.geometry.TriangleMesh(
                    o3d.utility.Vector3dVector(obj.mesh.vertices),
                    o3d.utility.Vector3iVector(obj.mesh.triangles))
                o3d.io.write_triangle_mesh(
                    DATA_DIR + '/PerfectPredictions/3D_meshes/' +
                    "%05d_%03d.ply" %
                    (pointers.tensor[counter], pointers.array[counter]), mesh)
                # Save stable poses
                save_stp = StablePoseFile(DATA_DIR + "/meshes/dexnet/" +
                                          obj.key + ".stp")
                save_stp.write(stable_poses)
                # Save candidate grasp info
                pkl.dump(
                    candidate_grasps_dict[obj.key],
                    open(DATA_DIR + "/meshes/dexnet/" + obj.key + ".pkl",
                         'wb'))
                # Save grasp metrics
                candidate_grasp_info = candidate_grasps_dict[obj.key][
                    stable_pose.id]
                candidate_grasps = [g.grasp for g in candidate_grasp_info]
                grasp_metrics = dataset.grasp_metrics(obj.key,
                                                      candidate_grasps,
                                                      gripper=gripper.name)
                write_metrics = json.dumps(grasp_metrics)
                f = open(DATA_DIR + "/meshes/dexnet/" + obj.key + ".json", "w")
                f.write(write_metrics)
                f.close()
            counter += 1
            if num is not None and counter >= num:
                break
    with open(DATA_DIR + '/meshes/dexnet/files.csv', 'w') as csv_file:
        csv_writer = csv.writer(csv_file, delimiter=',')
        for point in files:
            csv_writer.writerow(point)
Exemplo n.º 29
0
try:
    from mayavi import mlab
except ImportError:
    print("Can not import mayavi")
    mlab = None
sys.path.append(
    path.dirname(path.dirname(path.dirname(path.abspath("__file__")))))
sys.path.append(os.environ['HOME'] + "/code/grasp-pointnet/PointNetGPD")
from main_test import test_network, model, args

# global config:
yaml_config = YamlConfig(os.environ['HOME'] +
                         "/code/grasp-pointnet/dex-net/test/config.yaml")
gripper_name = 'robotiq_85'
gripper = RobotGripper.load(
    gripper_name,
    os.environ['HOME'] + "/code/grasp-pointnet/dex-net/data/grippers")
ags = GpgGraspSamplerPcl(gripper, yaml_config)
value_fc = 0.4  # no use, set a random number
num_grasps = 40
num_workers = 20
max_num_samples = 150
n_voxel = 500

minimal_points_send_to_point_net = 20
marker_life_time = 8

show_bad_grasp = False
save_grasp_related_file = False

show_final_grasp = args.show_final_grasp
Exemplo n.º 30
0
    else:
        good_grasp_rate = len(good_grasp) / count
    print("Gripper:{} Object:{} Rate:{:.4f} {}/{}".
          format(gripper_name, object_name_, good_grasp_rate, len(good_grasp), count))


if __name__ == "__main__":
    if len(sys.argv) > 1:
        filename_prefix = sys.argv[1]
    else:
        filename_prefix = "default"
    home_dir = os.environ["HOME"]
    file_dir = home_dir + "/code/PointNetGPD/PointNetGPD/data/ycb-tools/models/ycb"
    yaml_config = YamlConfig(home_dir + "/code/PointNetGPD/dex-net/test/config.yaml")
    gripper_name = "robotiq_85"
    gripper = RobotGripper.load(gripper_name, home_dir + "/code/PointNetGPD/dex-net/data/grippers")
    grasp_sample_method = "antipodal"
    file_list_all = get_file_name(file_dir)
    object_numbers = file_list_all.__len__()

    job_list = np.arange(object_numbers)
    job_list = list(job_list)
    pool_size = 1  # number of jobs did at same time
    assert (pool_size <= len(job_list))
    # Initialize pool
    pool = []
    for _ in range(pool_size):
        job_i = job_list.pop(0)
        pool.append(multiprocessing.Process(target=do_job, args=(job_i,)))
    [p.start() for p in pool]
    # refill