def test_ik_arm_pose(self):
        env = Environment()
        attrs = {"name": ["pr2"], "pose": [(0, 0, 0)], "_type": ["Robot"], "geom": [], "backHeight": [0.2], "lGripper": [0.5], "rGripper": [0.5]}
        attrs["lArmPose"] = [(0,0,0,0,0,0,0)]
        attrs["rArmPose"] = [(0,0,0,0,0,0,0)]
        attr_types = {"name": str, "pose": matrix.Vector3d, "_type": str, "geom": PR2, "backHeight": matrix.Value, "lArmPose": matrix.Vector7d, "rArmPose": matrix.Vector7d, "lGripper": matrix.Value, "rGripper": matrix.Value}
        robot = parameter.Object(attrs, attr_types)
        # Set the initial arm pose so that pose is not close to joint limit
        robot.lArmPose = np.array([[np.pi/4, np.pi/8, np.pi/2, -np.pi/2, np.pi/8, -np.pi/8, np.pi/2]]).T
        robot.rArmPose = np.array([[-np.pi/4, np.pi/8, -np.pi/2, -np.pi/2, -np.pi/8, -np.pi/8, np.pi/2]]).T

        attrs = {"name": ["can"], "geom": (0.04, 0.25), "pose": ["undefined"], "rotation": [(0, 0, 0)], "_type": ["Can"]}
        attr_types = {"name": str, "geom": BlueCan, "pose": matrix.Vector3d, "rotation": matrix.Vector3d, "_type": str}
        can = parameter.Object(attrs, attr_types)
        can.pose = np.array([[5.77887566e-01,  -1.26743678e-01,   8.37601627e-01]]).T
        can.rotation = np.array([[np.pi/4, np.pi/4, np.pi/4]]).T
        # Create openRAVEBody for each parameter
        can_body = OpenRAVEBody(env, can.name, can.geom)
        robot_body = OpenRAVEBody(env, robot.name, robot.geom)
        # Set the poses and dof values for each body
        can_body.set_pose(can.pose, can.rotation)
        robot_body.set_pose(robot.pose)
        robot_body.set_dof(robot.backHeight, robot.lArmPose, robot.lGripper, robot.rArmPose, robot.rGripper)
        # Solve the IK solution
        ik_arm = robot_body.ik_arm_pose(can.pose, can.rotation)[0]
        robot_body.set_dof(ik_arm[:1], robot.lArmPose, robot.lGripper, ik_arm[1:], robot.rGripper)
        robot_trans = robot_body.env_body.GetLink("r_gripper_tool_frame").GetTransform()
        robot_pos = OpenRAVEBody.obj_pose_from_transform(robot_trans)
        # resulted robot eepose should be exactly the same as that can pose
        self.assertTrue(np.allclose(can.pose.flatten(), robot_pos[:3], atol = 1e-4))
        self.assertTrue(np.allclose(can.rotation.flatten(), robot_pos[3:], atol = 1e-4))
        """
Beispiel #2
0
    def sample_rp_from_ee(t, plan, ee_poses, robot):
        target_pose = target.value[:, 0]
        dummy_body = OpenRAVEBody(plan.env, robot.name+'_dummy', robot.geom)
        possible_rps = []
        for ee_pose in ee_poses:
            base_pose = sampling.get_col_free_base_pose_around_target(t, plan, ee_pose.value, robot).reshape((1,3))
            ik_arm_poses = dummy_body.ik_arm_pose(ee_pose[0], ee_pose[1])
            cur_arm_poses = np.c_[robot.backHeight, robot.rArmPose].reshape((8,))
            chosen_pose = sampling.closest_arm_pose(ik_arm_poses, cur_arm_poses)
            torso_pose, arm_pose = chosen_pose[0], chosen_pose[1:]
            possible_rp = np.hstack((base_pose, torso_pose, robot.lArmPose[:,t], robot.lGripper[:,t], rArmPose, robot.rGripper[:,t]))
            possible_rps.append(possible_rp)

        dummy_body.delete()
        if len(possible_rps) <= 0:
            import ipdb; ipdb.set_trace()

        return possible_rps
Beispiel #3
0
    def sample_rp_from_ee(t, plan, ee_poses, robot):
        target_pose = target.value[:, 0]
        dummy_body = OpenRAVEBody(plan.env, robot.name + '_dummy', robot.geom)
        possible_rps = []
        for ee_pose in ee_poses:
            base_pose = sampling.get_col_free_base_pose_around_target(
                t, plan, ee_pose.value, robot).reshape((1, 3))
            ik_arm_poses = dummy_body.ik_arm_pose(ee_pose[0], ee_pose[1])
            cur_arm_poses = np.c_[robot.backHeight, robot.rArmPose].reshape(
                (8, ))
            chosen_pose = sampling.closest_arm_pose(ik_arm_poses,
                                                    cur_arm_poses)
            torso_pose, arm_pose = chosen_pose[0], chosen_pose[1:]
            possible_rp = np.hstack(
                (base_pose, torso_pose, robot.lArmPose[:, t],
                 robot.lGripper[:, t], rArmPose, robot.rGripper[:, t]))
            possible_rps.append(possible_rp)

        dummy_body.delete()
        if len(possible_rps) <= 0:
            print "something went wrong"
            # import ipdb; ipdb.set_trace()

        return possible_rps
    def test_ik_arm_pose(self):
        env = Environment()
        attrs = {
            "name": ["pr2"],
            "pose": [(0, 0, 0)],
            "_type": ["Robot"],
            "geom": [],
            "backHeight": [0.2],
            "lGripper": [0.5],
            "rGripper": [0.5]
        }
        attrs["lArmPose"] = [(0, 0, 0, 0, 0, 0, 0)]
        attrs["rArmPose"] = [(0, 0, 0, 0, 0, 0, 0)]
        attr_types = {
            "name": str,
            "pose": matrix.Vector3d,
            "_type": str,
            "geom": PR2,
            "backHeight": matrix.Value,
            "lArmPose": matrix.Vector7d,
            "rArmPose": matrix.Vector7d,
            "lGripper": matrix.Value,
            "rGripper": matrix.Value
        }
        robot = parameter.Object(attrs, attr_types)
        # Set the initial arm pose so that pose is not close to joint limit
        robot.lArmPose = np.array([[
            np.pi / 4, np.pi / 8, np.pi / 2, -np.pi / 2, np.pi / 8, -np.pi / 8,
            np.pi / 2
        ]]).T
        robot.rArmPose = np.array([[
            -np.pi / 4, np.pi / 8, -np.pi / 2, -np.pi / 2, -np.pi / 8,
            -np.pi / 8, np.pi / 2
        ]]).T

        attrs = {
            "name": ["can"],
            "geom": (0.04, 0.25),
            "pose": ["undefined"],
            "rotation": [(0, 0, 0)],
            "_type": ["Can"]
        }
        attr_types = {
            "name": str,
            "geom": BlueCan,
            "pose": matrix.Vector3d,
            "rotation": matrix.Vector3d,
            "_type": str
        }
        can = parameter.Object(attrs, attr_types)
        can.pose = np.array([[5.77887566e-01, -1.26743678e-01,
                              8.37601627e-01]]).T
        can.rotation = np.array([[np.pi / 4, np.pi / 4, np.pi / 4]]).T
        # Create openRAVEBody for each parameter
        can_body = OpenRAVEBody(env, can.name, can.geom)
        robot_body = OpenRAVEBody(env, robot.name, robot.geom)
        # Set the poses and dof values for each body
        can_body.set_pose(can.pose, can.rotation)
        robot_body.set_pose(robot.pose)
        robot_body.set_dof(robot.backHeight, robot.lArmPose, robot.lGripper,
                           robot.rArmPose, robot.rGripper)
        # Solve the IK solution
        ik_arm = robot_body.ik_arm_pose(can.pose, can.rotation)[0]
        robot_body.set_dof(ik_arm[:1], robot.lArmPose, robot.lGripper,
                           ik_arm[1:], robot.rGripper)
        robot_trans = robot_body.env_body.GetLink(
            "r_gripper_tool_frame").GetTransform()
        robot_pos = OpenRAVEBody.obj_pose_from_transform(robot_trans)
        # resulted robot eepose should be exactly the same as that can pose
        self.assertTrue(
            np.allclose(can.pose.flatten(), robot_pos[:3], atol=1e-4))
        self.assertTrue(
            np.allclose(can.rotation.flatten(), robot_pos[3:], atol=1e-4))
        """