示例#1
0
    def test_get_ik_arm_pose(self):
        robot = self.setup_robot()
        can = self.setup_can()
        test_env = self.setup_environment()

        robot_body = OpenRAVEBody(test_env, robot.name, robot.geom)

        # robot.pose = np.array([-.5, 0, 0]).reshape((3,1))
        robot.pose = np.array([-0., 0, 0]).reshape((3, 1))
        robot_body.set_pose(robot.pose)
        dof_value_map = {
            "backHeight": robot.backHeight,
            "lArmPose": robot.lArmPose.flatten(),
            "lGripper": robot.lGripper,
            "rArmPose": robot.rArmPose.flatten(),
            "rGripper": robot.rGripper
        }
        robot_body.set_dof(dof_value_map)
        # can.pose = np.array([-0.31622543, -0.38142561,  1.19321209]).reshape((3,1))
        # can.pose = np.array([0.5, -0.2,  .8]).reshape((3,1))
        can.pose = np.array([.5, .2, .8]).reshape((3, 1))
        # can.rotation = np.array([ 0.04588155, -0.38504402,  0.19207589]).reshape((3,1))
        can.rotation = np.array([-.0, -0., 0.]).reshape((3, 1))
        can_body = OpenRAVEBody(test_env, can.name, can.geom)
        can_body.set_pose(can.pose, can.rotation)
示例#2
0
 def test_baxter_ik(self):
     env = ParamSetup.setup_env()
     baxter = ParamSetup.setup_baxter()
     can = ParamSetup.setup_green_can(geom=(0.02, 0.25))
     baxter_body = OpenRAVEBody(env, baxter.name, baxter.geom)
     can_body = OpenRAVEBody(env, can.name, can.geom)
     baxter_body.set_transparency(0.5)
     can_body.set_transparency(0.5)
     manip = baxter_body.env_body.GetManipulator('right_arm')
     robot = baxter_body.env_body
     can = can_body.env_body
     dof = robot.GetActiveDOFValues()
     #Open the Gripper so there won't be collisions between gripper and can
     dof[9], dof[-1] = 0.02, 0.02
     robot.SetActiveDOFValues(dof)
     iktype = IkParameterizationType.Transform6D
     thetas = np.linspace(0, np.pi * 2, 10)
     target_trans = OpenRAVEBody.transform_from_obj_pose([0.9, -0.23, 0.93],
                                                         [0, 0, 0])
     can_body.env_body.SetTransform(target_trans)
     target_trans[:3, :3] = target_trans[:3, :3].dot(
         matrixFromAxisAngle([0, np.pi / 2, 0])[:3, :3])
     can_trans = target_trans
     body_trans = np.eye(4)
     """
     To check whether baxter ik model works, uncomment the following
     """
     # env.SetViewer('qtcoin')
     for theta in thetas:
         can_trans[:3, :3] = target_trans[:3, :3].dot(
             matrixFromAxisAngle([theta, 0, 0])[:3, :3])
         solution = manip.FindIKSolutions(
             IkParameterization(can_trans, iktype),
             IkFilterOptions.CheckEnvCollisions)
         if len(solution) > 0:
             print "Solution found with pose and rotation:"
             print OpenRAVEBody.obj_pose_from_transform(can_trans)
         else:
             print "Solution not found with pose and rotation:"
             print OpenRAVEBody.obj_pose_from_transform(can_trans)
         for sols in solution:
             dof[10:17] = sols
             robot.SetActiveDOFValues(dof)
             time.sleep(.2)
             body_trans[:3, :3] = can_trans[:3, :3].dot(
                 matrixFromAxisAngle([0, -np.pi / 2, 0])[:3, :3])
             can_body.env_body.SetTransform(body_trans)
             self.assertTrue(
                 np.allclose([0.9, -0.23, 0.93],
                             manip.GetTransform()[:3, 3]))
示例#3
0
    def test_add_obstacle(self):
        attrs = {
            "geom": [],
            "pose": [(3, 5)],
            "_type": ["Obstacle"],
            "name": ["obstacle"]
        }
        attr_types = {
            "geom": Obstacle,
            "pose": Vector2d,
            "_type": str,
            "name": str
        }
        obstacle = parameter.Object(attrs, attr_types)

        env = Environment()
        """
        to ensure the _add_obstacle is working, uncomment the line below to make
        sure the obstacle is being created
        """
        # env.SetViewer('qtcoin')

        obstacle_body = OpenRAVEBody(env, obstacle.name, obstacle.geom)
        obstacle_body.set_pose([2, 0])
        arr = np.eye(4)
        arr[0, 3] = 2
        self.assertTrue(np.allclose(obstacle_body.env_body.GetTransform(),
                                    arr))
示例#4
0
 def lazy_spawn_or_body(self, param, name, geom):
     if param.openrave_body is not None:
         assert geom == param.openrave_body._geom
         assert self._env == param.openrave_body.env_body.GetEnv()
     else:
         param.openrave_body = OpenRAVEBody(self._env, name, geom)
     return param.openrave_body
示例#5
0
    def sample_rp_from_ee(self, env, rs_param, ee_pose):
        """
            Using Inverse Kinematics to solve all possible robot armpose and basepose that reaches this ee_pose

            robot_pose: list of full robot pose with format of(basepose, backHeight, lArmPose, lGripper, rArmPose, rGripper)
        """
        ee_pos, ee_rot = ee_pose[0], ee_pose[1]
        base_raidus = np.linspace(-1, 1, num=BASE_SAMPLE_SIZE)
        poses = list(itertools.product(base_raidus, base_raidus))
        # sample
        dummy_body = OpenRAVEBody(env, rs_param.name, PR2())
        possible_robot_pose = []
        for pos in poses:
            bp = pos + ee_pos[:2]
            vec = ee_pos[:2] - bp
            vec = vec / np.linalg.norm(vec)
            theta = math.atan2(vec[1], vec[0])
            bp = np.array([bp[0], bp[1], theta])
            dummy_body.set_pose(bp)
            arm_poses = robot_body.ik_arm_pose(ee_pos, ee_rot)
            for arm_pose in arm_poses:
                possible_robot_pose.append(
                    (bp, ee_arm_poses[0], rs_param.lArmPose, rs_param.lGripper,
                     ee_arm_poses[1:], rs_param.rGripper))

        import ipdb
        ipdb.set_trace()
        dummy_body.delete()
        return possible_robot_pose
示例#6
0
    def test_add_circle(self):
        attrs = {
            "geom": [1],
            "pose": [(3, 5)],
            "_type": ["Can"],
            "name": ["can0"]
        }
        attr_types = {
            "geom": circle.GreenCircle,
            "pose": Vector2d,
            "_type": str,
            "name": str
        }
        green_can = parameter.Object(attrs, attr_types)

        attrs = {
            "geom": [1],
            "pose": [(3, 5)],
            "_type": ["Can"],
            "name": ["can0"]
        }
        attr_types = {
            "geom": circle.BlueCircle,
            "pose": Vector2d,
            "_type": str,
            "name": str
        }
        blue_can = parameter.Object(attrs, attr_types)

        env = Environment()
        """
        to ensure the _add_circle and create_cylinder is working, uncomment the
        line below to make sure cylinders are being created
        """
        # env.SetViewer('qtcoin')

        green_body = OpenRAVEBody(env, "can0", green_can.geom)
        blue_body = OpenRAVEBody(env, "can1", blue_can.geom)

        green_body.set_pose([2, 0])
        arr = np.eye(4)
        arr[0, 3] = 2
        self.assertTrue(np.allclose(green_body.env_body.GetTransform(), arr))
        blue_body.set_pose(np.array([0, -1]))
        arr = np.eye(4)
        arr[1, 3] = -1
        self.assertTrue(np.allclose(blue_body.env_body.GetTransform(), arr))
示例#7
0
    def test_ee_reachable(self):

        # InGripper, Robot, Can

        robot = ParamSetup.setup_baxter()
        test_env = ParamSetup.setup_env()
        rPose = ParamSetup.setup_baxter_pose()
        ee_pose = ParamSetup.setup_ee_pose()

        pred = baxter_predicates.BaxterEEReachablePos("ee_reachable", [robot, rPose, ee_pose], ["Robot", "RobotPose", "EEPose"], test_env)
        pred2 = baxter_predicates.BaxterEEReachableRot("ee_reachable_rot", [robot, rPose, ee_pose], ["Robot", "RobotPose", "EEPose"], test_env)
        baxter = pred._param_to_body[robot]
        # Since this predicate is not yet concrete
        self.assertFalse(pred.test(0))

        ee_pose.value = np.array([[1.2, -0.1, 0.925]]).T
        ee_pose.rotation = np.array([[0,0,0]]).T
        ee_targ = ParamSetup.setup_green_can()
        ee_body = OpenRAVEBody(test_env, "EE_Pose", ee_targ.geom)
        ee_body.set_pose(ee_pose.value[:, 0], ee_pose.rotation[:, 0])

        robot.lArmPose = np.zeros((7,7))
        robot.lGripper = np.ones((1, 7))*0.02
        robot.rGripper = np.ones((1, 7))*0.02
        robot.pose = np.zeros((1,7))
        robot.rArmPose = np.zeros((7,7))
        # initialized pose value is not right
        self.assertFalse(pred.test(0))

        # Find IK Solution
        trajectory = []
        trajectory.append(baxter.get_ik_from_pose([1.2-3*const.APPROACH_DIST, -0.1, 0.925], [0,0,0], "right_arm")[0])    #s=-3
        trajectory.append(baxter.get_ik_from_pose([1.2-2*const.APPROACH_DIST, -0.1, 0.925], [0,0,0], "right_arm")[0])    #s=-2
        trajectory.append(baxter.get_ik_from_pose([1.2-const.APPROACH_DIST, -0.1, 0.925], [0,0,0], "right_arm")[0])       #s=-1
        trajectory.append(baxter.get_ik_from_pose([1.2, -0.1, 0.925], [0,0,0], "right_arm")[0])                     #s=0
        trajectory.append(baxter.get_ik_from_pose([1.2, -0.1, 0.925+const.RETREAT_DIST], [0,0,0], "right_arm")[0])        #s=1
        trajectory.append(baxter.get_ik_from_pose([1.2, -0.1, 0.925+2*const.RETREAT_DIST], [0,0,0], "right_arm")[0])      #s=2
        trajectory.append(baxter.get_ik_from_pose([1.2, -0.1, 0.925+3*const.RETREAT_DIST], [0,0,0], "right_arm")[0])      #s=3
        trajectory = np.array(trajectory)


        robot.rArmPose = trajectory.T
        # Predicate should succeed in the grasping post at t=3,
        # EEreachableRot should always pass since rotation is right all the time
        self.assertFalse(pred.test(0))
        self.assertTrue(pred2.test(0))
        self.assertFalse(pred.test(1))
        self.assertTrue(pred2.test(1))
        self.assertFalse(pred.test(2))
        self.assertTrue(pred2.test(2))
        self.assertTrue(pred.test(3))
        self.assertTrue(pred2.test(3))
        # Since finding ik introduced some error, we relax the tolerance to 1e-3
        if const.TEST_GRAD: pred.expr.expr.grad(pred.get_param_vector(3), True, 1e-3)

        if const.TEST_GRAD: pred2.expr.expr.grad(pred2.get_param_vector(3), True, 1e-3)
示例#8
0
    def test_ee_reachable(self):

        # InGripper, Robot, Can

        robot = ParamSetup.setup_pr2()
        test_env = ParamSetup.setup_env()
        rPose = ParamSetup.setup_pr2_pose()
        ee_pose = ParamSetup.setup_ee_pose()

        pred = pr2_predicates.PR2EEReachablePos("ee_reachable", [robot, rPose, ee_pose], ["Robot", "RobotPose", "EEPose"], test_env)
        pred2 = pr2_predicates.PR2EEReachableRot("ee_reachable_rot", [robot, rPose, ee_pose], ["Robot", "RobotPose", "EEPose"], test_env)
        pr2 = pred._param_to_body[robot]
        # Since this predicate is not yet concrete
        self.assertFalse(pred.test(0))
        # ee_pose.value = np.array([[0.951, -0.188, 0.790675]]).T
        ee_pose.value = np.array([[0.440, -0.339, 0.791]]).T
        ee_pose.rotation = np.array([[0,0,0]]).T
        ee_targ = ParamSetup.setup_green_can()
        ee_body = OpenRAVEBody(test_env, "EE_Pose", ee_targ.geom)
        ee_body.set_pose(ee_pose.value[:, 0], ee_pose.rotation[:, 0])

        robot.lArmPose = np.zeros((7,7))
        robot.lGripper = np.ones((1, 7))*0.528
        robot.rArmPose = np.zeros((7,7))
        robot.rGripper = np.ones((1, 7))*0.528
        robot.pose = np.zeros((3,7))
        robot.backHeight = np.zeros((1,7))
        # initialized pose value is not right
        self.assertFalse(pred.test(0))

        # Find IK Solution
        trajectory = []
        trajectory.append(pr2.get_ik_from_pose([0.440-3*const.APPROACH_DIST, -0.339, 0.791], [0,0,0], "rightarm_torso")[0])    #s=-3
        trajectory.append(pr2.get_ik_from_pose([0.440-2*const.APPROACH_DIST, -0.339, 0.791], [0,0,0], "rightarm_torso")[0])    #s=-2
        trajectory.append(pr2.get_ik_from_pose([0.440-const.APPROACH_DIST, -0.339, 0.791], [0,0,0], "rightarm_torso")[0])       #s=-1
        trajectory.append(pr2.get_ik_from_pose([0.440, -0.339, 0.791], [0,0,0], "rightarm_torso")[0])                     #s=0
        trajectory.append(pr2.get_ik_from_pose([0.440, -0.339, 0.791+const.RETREAT_DIST], [0,0,0], "rightarm_torso")[0])        #s=1
        trajectory.append(pr2.get_ik_from_pose([0.440, -0.339, 0.791+2*const.RETREAT_DIST], [0,0,0], "rightarm_torso")[0])      #s=2
        trajectory.append(pr2.get_ik_from_pose([0.440, -0.339, 0.791+3*const.RETREAT_DIST], [0,0,0], "rightarm_torso")[0])      #s=3
        trajectory = np.array(trajectory).T

        robot.backHeight = trajectory[[0], :]
        robot.rArmPose = trajectory[1:, :]
        # Predicate should succeed in the grasping post at t=3,
        # EEreachableRot should always pass since rotation is right all the time
        self.assertFalse(pred.test(0))
        self.assertTrue(pred2.test(0))
        self.assertFalse(pred.test(1))
        self.assertTrue(pred2.test(1))
        self.assertFalse(pred.test(2))
        self.assertTrue(pred2.test(2))
        self.assertTrue(pred.test(3))
        self.assertTrue(pred2.test(3))
示例#9
0
 def test_closest_arm_pose(self):
     env = ParamSetup.setup_env()
     # env.SetViewer('qtcoin')
     can = ParamSetup.setup_blue_can()
     robot = ParamSetup.setup_pr2()
     can.pose = np.array([[0, -.2, .8]]).T
     can_body = OpenRAVEBody(env, can.name, can.geom)
     can_body.set_pose(can.pose.flatten(), can.rotation.flatten())
     can_body.set_transparency(.7)
     robot.pose = np.array([[-.5, 0, 0]]).T
     robot_body = OpenRAVEBody(env, robot.name, robot.geom)
     robot_body.set_transparency(.7)
     robot_body.set_pose(robot.pose.flatten())
     dof_value_map = {
         "backHeight": robot.backHeight,
         "lArmPose": robot.lArmPose.flatten(),
         "lGripper": robot.lGripper,
         "rArmPose": robot.rArmPose.flatten(),
         "rGripper": robot.rGripper
     }
     robot_body.set_dof(dof_value_map)
     can_trans = OpenRAVEBody.transform_from_obj_pose(
         can.pose, can.rotation)
     rot_mat = matrixFromAxisAngle([0, np.pi / 2, 0])
     rot_mat = can_trans[:3, :3].dot(rot_mat[:3, :3])
     can_trans[:3, :3] = rot_mat
     torso_pose, arm_pose = sampling.get_torso_arm_ik(
         robot_body, can_trans, robot.rArmPose)
     dof_value_map = {
         "backHeight": robot.backHeight,
         "lArmPose": robot.lArmPose.flatten(),
         "lGripper": robot.lGripper,
         "rArmPose": robot.rArmPose.flatten(),
         "rGripper": robot.rGripper
     }
     robot_body.set_dof(dof_value_map)
示例#10
0
    def test_sample_ee_from_target(self):
        solver = can_solver.CanSolver()
        env = ParamSetup.setup_env()
        # env.SetViewer('qtcoin')
        target = ParamSetup.setup_target()
        target.value = np.array([[0, 0, 0]]).T
        target.rotation = np.array([[1.1, .3, 0]]).T

        dummy_targ_geom = can.BlueCan(0.04, 0.25)
        target_body = OpenRAVEBody(env, target.name, dummy_targ_geom)
        target_body.set_pose(target.value.flatten(), target.rotation.flatten())
        target_body.set_transparency(.7)

        robot = ParamSetup.setup_pr2()
        robot_body = OpenRAVEBody(env, robot.name, robot.geom)
        robot_body.set_transparency(.7)
        robot_body.set_pose(robot.pose.flatten())
        dof_value_map = {
            "backHeight": robot.backHeight,
            "lArmPose": robot.lArmPose.flatten(),
            "lGripper": robot.lGripper,
            "rArmPose": robot.rArmPose.flatten(),
            "rGripper": robot.rGripper
        }
        robot_body.set_dof(dof_value_map)

        dummy_ee_pose_geom = can.GreenCan(.03, .3)
        ee_list = list(
            enumerate(
                sampling.get_ee_from_target(target.value, target.rotation)))
        for ee_pose in ee_list:
            ee_pos, ee_rot = ee_pose[1]
            body = OpenRAVEBody(env, "dummy" + str(ee_pose[0]),
                                dummy_ee_pose_geom)
            body.set_pose(ee_pos, ee_rot)
            body.set_transparency(.9)
示例#11
0
 def test_exception(self):
     env = Environment()
     attrs = {
         "geom": [],
         "pose": [(3, 5)],
         "_type": ["Can"],
         "name": ["can0"]
     }
     attr_types = {
         "geom": DummyGeom,
         "pose": Vector2d,
         "_type": str,
         "name": str
     }
     green_can = parameter.Object(attrs, attr_types)
     with self.assertRaises(OpenRAVEException) as cm:
         green_body = OpenRAVEBody(env, "can0", green_can.geom)
     self.assertTrue("Geometry not supported" in cm.exception.message)
示例#12
0
    def sample_rp_from_ee(t, plan, ee_poses, robot):
        target_pose = target.value[:, 0]
        dummy_body = OpenRAVEBody(plan.env, robot.name + '_dummy', robot.geom)
        possible_rps = []
        for ee_pose in ee_poses:
            base_pose = sampling.get_col_free_base_pose_around_target(
                t, plan, ee_pose.value, robot).reshape((1, 3))
            ik_arm_poses = dummy_body.ik_arm_pose(ee_pose[0], ee_pose[1])
            cur_arm_poses = np.c_[robot.backHeight, robot.rArmPose].reshape(
                (8, ))
            chosen_pose = sampling.closest_arm_pose(ik_arm_poses,
                                                    cur_arm_poses)
            torso_pose, arm_pose = chosen_pose[0], chosen_pose[1:]
            possible_rp = np.hstack(
                (base_pose, torso_pose, robot.lArmPose[:, t],
                 robot.lGripper[:, t], rArmPose, robot.rGripper[:, t]))
            possible_rps.append(possible_rp)

        dummy_body.delete()
        if len(possible_rps) <= 0:
            print "something went wrong"
            # import ipdb; ipdb.set_trace()

        return possible_rps
示例#13
0
    def test_ik_arm_pose(self):
        env = Environment()
        attrs = {
            "name": ["pr2"],
            "pose": [(0, 0, 0)],
            "_type": ["Robot"],
            "geom": [],
            "backHeight": [0.2],
            "lGripper": [0.5],
            "rGripper": [0.5]
        }
        attrs["lArmPose"] = [(0, 0, 0, 0, 0, 0, 0)]
        attrs["rArmPose"] = [(0, 0, 0, 0, 0, 0, 0)]
        attr_types = {
            "name": str,
            "pose": matrix.Vector3d,
            "_type": str,
            "geom": PR2,
            "backHeight": matrix.Value,
            "lArmPose": matrix.Vector7d,
            "rArmPose": matrix.Vector7d,
            "lGripper": matrix.Value,
            "rGripper": matrix.Value
        }
        robot = parameter.Object(attrs, attr_types)
        # Set the initial arm pose so that pose is not close to joint limit
        robot.lArmPose = np.array([[
            np.pi / 4, np.pi / 8, np.pi / 2, -np.pi / 2, np.pi / 8, -np.pi / 8,
            np.pi / 2
        ]]).T
        robot.rArmPose = np.array([[
            -np.pi / 4, np.pi / 8, -np.pi / 2, -np.pi / 2, -np.pi / 8,
            -np.pi / 8, np.pi / 2
        ]]).T

        attrs = {
            "name": ["can"],
            "geom": (0.04, 0.25),
            "pose": ["undefined"],
            "rotation": [(0, 0, 0)],
            "_type": ["Can"]
        }
        attr_types = {
            "name": str,
            "geom": BlueCan,
            "pose": matrix.Vector3d,
            "rotation": matrix.Vector3d,
            "_type": str
        }
        can = parameter.Object(attrs, attr_types)
        can.pose = np.array([[5.77887566e-01, -1.26743678e-01,
                              8.37601627e-01]]).T
        can.rotation = np.array([[np.pi / 4, np.pi / 4, np.pi / 4]]).T
        # Create openRAVEBody for each parameter
        can_body = OpenRAVEBody(env, can.name, can.geom)
        robot_body = OpenRAVEBody(env, robot.name, robot.geom)
        # Set the poses and dof values for each body
        can_body.set_pose(can.pose, can.rotation)
        robot_body.set_pose(robot.pose)
        robot_body.set_dof(robot.backHeight, robot.lArmPose, robot.lGripper,
                           robot.rArmPose, robot.rGripper)
        # Solve the IK solution
        ik_arm = robot_body.ik_arm_pose(can.pose, can.rotation)[0]
        robot_body.set_dof(ik_arm[:1], robot.lArmPose, robot.lGripper,
                           ik_arm[1:], robot.rGripper)
        robot_trans = robot_body.env_body.GetLink(
            "r_gripper_tool_frame").GetTransform()
        robot_pos = OpenRAVEBody.obj_pose_from_transform(robot_trans)
        # resulted robot eepose should be exactly the same as that can pose
        self.assertTrue(
            np.allclose(can.pose.flatten(), robot_pos[:3], atol=1e-4))
        self.assertTrue(
            np.allclose(can.rotation.flatten(), robot_pos[3:], atol=1e-4))
        """