コード例 #1
0
class ArmECM(PyRep):
    def __init__(self, pr):

        #super(ArmECM, self).__init__(scenePath)
        """self.pr = PyRep()
        self.pr.launch(scenePath)
        self.pr.start()
        self.pr.step()"""
        self.pr = pr
        self.base_handle = Shape('L0_respondable_ECM')
        self.j1_handle = Joint('J1_ECM')
        self.j2_handle = Joint('J2_ECM')
        self.j3_handle = Joint('J3_ECM')
        self.j4_handle = Joint('J4_ECM')

        self.left_cam = VisionSensor('Vision_sensor_left')
        self.right_cam = VisionSensor('Vision_sensor_right')

    def getJointAngles(self):

        pos1 = self.j1_handle.get_joint_position()
        pos2 = self.j2_handle.get_joint_position()
        pos3 = self.j3_handle.get_joint_position()
        pos4 = self.j4_handle.get_joint_position()
        return np.array([pos1, pos2, pos3, pos4])

    def setJointAngles(self, jointAngles):

        self.j1_handle.set_joint_position(jointAngles[0])
        self.j2_handle.set_joint_position(jointAngles[1])
        self.j3_handle.set_joint_position(jointAngles[2])
        self.j4_handle.set_joint_position(jointAngles[3])

    def getCurrentPose(self):

        return self.left_cam.get_pose(relative_to=self.base_handle)

    def getStereoImagePairs(self):
        return self.left_cam.capture_rgb(), self.right_cam.capture_rgb()

    def getDepthImagePairs(self):
        return self.left_cam.capture_depth(True), self.right_cam.capture_depth(
            True)

    """def stopSim(self):
コード例 #2
0
ファイル: conditions.py プロジェクト: yanweifu/RLBench
 def __init__(self, joint: Joint, position: float):
     """in radians if revoloute, or meters if prismatic"""
     self._joint = joint
     self._original_pos = joint.get_joint_position()
     self._pos = position
コード例 #3
0
ファイル: camera.py プロジェクト: zuoguoqing/pyrobot
class LoCoBotCamera(Camera):
    """docstring for SimpleCamera"""

    def __init__(self, configs, simulator):

        self.sim = simulator.sim
        self.rgb_cam = VisionSensor("kinect_rgb")
        self.depth_cam = VisionSensor("kinect_depth")
        self.rgb_cam.set_render_mode(RenderMode.OPENGL3)
        self.depth_cam.set_render_mode(RenderMode.OPENGL3)

        # Pan and tilt related variables.
        self.pan_joint = Joint("LoCoBot_head_pan_joint")
        self.tilt_joint = Joint("LoCoBot_head_tilt_joint")

    def get_rgb(self):

        return self.rgb_cam.capture_rgb()

    def get_depth(self):

        return self.depth_cam.capture_depth()

    def get_rgb_depth(self):

        return self.get_rgb(), self.get_depth()

    def get_intrinsics(self):

        # Todo: Remove this after we fix intrinsics
        raise NotImplementedError
        """
		Returns the instrinsic matrix of the camera

		:return: the intrinsic matrix (shape: :math:`[3, 3]`)
		:rtype: np.ndarray
		"""
        # fx = self.configs['Camera.fx']
        # fy = self.configs['Camera.fy']
        # cx = self.configs['Camera.cx']
        # cy = self.configs['Camera.cy']
        Itc = np.array([[fx, 0, cx], [0, fy, cy], [0, 0, 1]])
        return Itc

    def pix_to_3dpt(self, rs, cs, in_cam=False):
        """
		Get the 3D points of the pixels in RGB images.

		:param rs: rows of interest in the RGB image.
		           It can be a list or 1D numpy array
		           which contains the row indices.
		           The default value is None,
		           which means all rows.
		:param cs: columns of interest in the RGB image.
		           It can be a list or 1D numpy array
		           which contains the column indices.
		           The default value is None,
		           which means all columns.
		:param in_cam: return points in camera frame,
		               otherwise, return points in base frame

		:type rs: list or np.ndarray
		:type cs: list or np.ndarray
		:type in_cam: bool

		:returns: tuple (pts, colors)

		          pts: point coordinates in world frame
		          (shape: :math:`[N, 3]`)

		          colors: rgb values for pts_in_cam
		          (shape: :math:`[N, 3]`)

		:rtype: tuple(np.ndarray, np.ndarray)
		"""

        raise NotImplementedError

    def get_current_pcd(self, in_cam=True):
        """
		Return the point cloud at current time step (one frame only)

		:param in_cam: return points in camera frame,
		               otherwise, return points in base frame

		:type in_cam: bool
		:returns: tuple (pts, colors)

		          pts: point coordinates in world frame (shape: :math:`[N, 3]`)

		          colors: rgb values for pts_in_cam (shape: :math:`[N, 3]`)
		:rtype: tuple(np.ndarray, np.ndarray)
		"""

        raise NotImplementedError

    @property
    def state(self):
        """
		Return the current pan and tilt joint angles of the robot camera.

		:return:
		        pan_tilt: A list the form [pan angle, tilt angle]
		:rtype: list
		"""
        return self.get_state()

    def get_state(self):
        """
		Return the current pan and tilt joint angles of the robot camera.

		:return:
		        pan_tilt: A list the form [pan angle, tilt angle]
		:rtype: list
		"""
        return [self.get_pan(), self.get_tilt()]

    def get_pan(self):
        """
		Return the current pan joint angle of the robot camera.

		:return:
		        pan: Pan joint angle
		:rtype: float
		"""
        return self.pan_joint.get_joint_position()

    def get_tilt(self):
        """
		Return the current tilt joint angle of the robot camera.

		:return:
		        tilt: Tilt joint angle
		:rtype: float
		"""
        return self.tilt_joint.get_joint_position()

    def set_pan(self, pan, wait=True):
        """
		Sets the pan joint angle to the specified value.

		:param pan: value to be set for pan joint
		:param wait: wait until the pan angle is set to
		             the target angle.

		:type pan: float
		:type wait: bool
		"""

        self.pan_joint.set_joint_position(pan)
        # [self.sim.step() for _ in range(50)]

    def set_tilt(self, tilt, wait=True):
        """
		Sets the tilt joint angle to the specified value.

		:param tilt: value to be set for the tilt joint
		:param wait: wait until the tilt angle is set to
		             the target angle.

		:type tilt: float
		:type wait: bool
		"""

        self.tilt_joint.set_joint_position(tilt)

    def set_pan_tilt(self, pan, tilt, wait=True):
        """
		Sets both the pan and tilt joint angles to the specified values.

		:param pan: value to be set for pan joint
		:param tilt: value to be set for the tilt joint
		:param wait: wait until the pan and tilt angles are set to
		             the target angles.

		:type pan: float
		:type tilt: float
		:type wait: bool
		"""

        self.set_pan(pan)
        self.set_tilt(tilt)

    def reset(self):
        """
		This function resets the pan and tilt joints by actuating
		them to their home configuration.
		"""
        self.set_pan_tilt(self.configs.CAMERA.RESET_PAN, self.configs.CAMERA.RESET_TILT)
コード例 #4
0
class ArmPSM(PyRep):
    def __init__(self, pr, armNumber=1):
        """self.pr = PyRep()
        self.pr.launch(scenePath)
        self.pr.start()
        self.pr.step()"""
        self.pr = pr
        self.psm = armNumber
        self.ik_mode = 1

        self.base_handle = Shape('RCM_PSM{}'.format(self.psm))
        self.j1_handle = Joint('J1_PSM{}'.format(self.psm))
        self.j2_handle = Joint('J2_PSM{}'.format(self.psm))
        self.j3_handle = Joint('J3_PSM{}'.format(self.psm))
        self.j4_handle = Joint('J1_TOOL{}'.format(self.psm))
        self.j5_handle = Joint('J2_TOOL{}'.format(self.psm))
        self.j6d_handle = Joint('J3_dx_TOOL{}'.format(self.psm))
        self.j6s_handle = Joint('J3_sx_TOOL{}'.format(self.psm))

        self.j5_dummy_handle = Dummy('J2_virtual_TOOL{}'.format(self.psm))

        self.j6d_tip_dummy_handle = Dummy('J3_dx_tip_TOOL{}'.format(self.psm))
        self.j6s_tip_dummy_handle = Dummy('J3_sx_tip_TOOL{}'.format(self.psm))

        self.ik_target_dx_dummy_handle = Dummy('IK_target_dx_PSM{}'.format(
            self.psm))
        self.ik_target_sx_dummy_handle = Dummy('IK_target_sx_PSM{}'.format(
            self.psm))
        self.marker = Shape('yaw_{}'.format(self.psm))
        self.EE_virtual_handle = Dummy('EE_virtual_TOOL{}'.format(self.psm))

        self.ik_signal = IntegerSignal("run_IK_PSM{}".format(self.psm))
        self.dyn_signal = IntegerSignal("run_dyn_PSM{}".format(self.psm))

        #Set IK mode off to save on computation for VREP:
        self.setIkMode(0)
        #Set dynamics mode off to save on compuation time for VREP:
        self.setDynamicsMode(0)

    #dyn_mode = 1 turns on dynamics
    #dyn_mode = 0 turns off dynamics
    def setDynamicsMode(self, dyn_mode):
        self.dyn_mode = dyn_mode
        self.dyn_signal.set(self.dyn_mode)

    #ik_mode = 1 turns on ik_mode
    #ik_mode = 0 turns off ik_mode
    def setIkMode(self, ik_mode):
        self.ik_mode = ik_mode
        self.ik_signal.set(self.ik_mode)

    def posquat2Matrix(self, pos, quat):
        T = np.eye(4)
        T[0:3,
          0:3] = quaternions.quat2mat([quat[-1], quat[0], quat[1], quat[2]])
        T[0:3, 3] = pos

        return np.array(T)

    def matrix2posquat(self, T):
        pos = T[0:3, 3]
        quat = quaternions.mat2quat(T[0:3, 0:3])
        quat = [quat[1], quat[2], quat[3], quat[0]]

        return np.array(pos), np.array(quat)

    def getJawAngle(self):
        pos6d = self.j6d_handle.get_joint_position()
        pos6s = self.j6s_handle.get_joint_position()
        jawAngle = 0.5 * (pos6d + pos6s) / 0.4106
        return jawAngle

    def getJointAngles(self):
        pos1 = self.j1_handle.get_joint_position()
        pos2 = self.j2_handle.get_joint_position()
        pos3 = self.j3_handle.get_joint_position()
        pos4 = self.j4_handle.get_joint_position()
        pos5 = self.j5_handle.get_joint_position()
        pos6s = self.j6s_handle.get_joint_position()
        pos6d = self.j6d_handle.get_joint_position()

        pos6 = 0.5 * (pos6d - pos6s)
        jawAngle = 0.5 * (pos6d + pos6s) / 0.4106

        jointAngles = np.array([pos1, pos2, pos3, pos4, pos5, pos6])

        return jointAngles, jawAngle

    def getJointVelocities(self):
        vel1 = self.j1_handle.get_joint_velocity()
        vel2 = self.j2_handle.get_joint_velocity()
        vel3 = self.j3_handle.get_joint_velocity()
        vel4 = self.j4_handle.get_joint_velocity()
        vel5 = self.j5_handle.get_joint_velocity()
        vel6s = self.j6s_handle.get_joint_velocity()
        vel6d = self.j6d_handle.get_joint_velocity()

        vel6 = 0.5 * (vel6s - vel6d)
        jawVel = 0.5 * (vel6s + vel6d) / 0.4106

        jointVelocities = np.array([vel1, vel2, vel3, vel4, vel5, vel6])

        return jointVelocities, jawVel

    def setJointAngles(self, jointAngles, jawAngle):

        self.j1_handle.set_joint_position(jointAngles[0])
        self.j2_handle.set_joint_position(jointAngles[1])
        self.j3_handle.set_joint_position(jointAngles[2])
        self.j4_handle.set_joint_position(jointAngles[3])
        self.j5_handle.set_joint_position(jointAngles[4])

        pos6s = 0.4106 * jawAngle - jointAngles[5]
        pos6d = 0.4106 * jawAngle + jointAngles[5]

        self.j6s_handle.set_joint_position(pos6s)
        self.j6d_handle.set_joint_position(pos6d)

    def getPoseAtJoint(self, j):
        if j == 0:
            pose = self.base_handle.get_pose()
            pos, quat = pose[0:3], pose[3:]
        elif j == 1:
            pose = self.j2_handle.get_pose(relative_to=self.base_handle)
            pos, quat = pose[0:3], pose[3:]
            T = self.posquat2Matrix(pos, quat)
            rot90x = [[1, 0, 0, 0], [0, 0, -1, 0], [0, 1, 0, 0], [0, 0, 0, 1]]
            pos, quat = self.matrix2posquat(np.dot(T, rot90x))
        elif j == 2:
            pose = self.j3_handle.get_pose(relative_to=self.base_handle)
            pos, quat = pose[0:3], pose[3:]
            T = self.posquat2Matrix(pos, quat)
            rot = [[0, 0, 1, 0], [-1, 0, 0, 0], [0, -1, 0, 0], [0, 0, 0, 1]]
            pos, quat = self.matrix2posquat(np.dot(T, rot))
        elif j == 3:
            pose = self.j4_handle.get_pose(relative_to=self.base_handle)
            pos, quat = pose[0:3], pose[3:]
            T = self.posquat2Matrix(pos, quat)
            rot = [[
                -1,
                0,
                0,
                0,
            ], [0, -1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]]
            pos, quat = self.matrix2posquat(np.dot(T, rot))
        elif j == 4:
            pose = self.j5_handle.get_pose(relative_to=self.base_handle)
            pos, quat = pose[0:3], pose[3:]
            T = self.posquat2Matrix(pos, quat)
            rot = [[0, 0, -1, 0], [1, 0, 0, 0], [0, -1, 0, 0], [0, 0, 0, 1]]
            pos, quat = self.matrix2posquat(np.dot(T, rot))
        elif j == 5:
            pose = self.j5_dummy_handle.get_pose(relative_to=self.base_handle)
            pos, quat = pose[0:3], pose[3:]
        else:
            pose = self.EE_virtual_handle.get_pose(
                relative_to=self.base_handle)
            pos, quat = pose[0:3], pose[3:]
            if j != 6:
                T = self.posquat2Matrix(pos, quat)

                ct = np.cos(0)
                st = np.sin(0)

                ca = np.cos(-np.pi / 2.0)
                sa = np.sin(-np.pi / 2.0)

                T_x = np.array([[1, 0, 0, 0], [0, ca, -sa, 0], [0, sa, ca, 0],
                                [0, 0, 0, 1]])
                T_z = np.array([[ct, -st, 0, 0], [st, ct, 0, 0],
                                [0, 0, 1, 0.0102], [0, 0, 0, 1]])
                T = np.dot(np.dot(T, T_x), T_z)

                pos, quat = self.matrix2posquat(T)

        return np.array(pos), np.array(quat)

    def getPoseAtEE(self):
        return self.getPoseAtJoint(6)

    #def getVelocityAtEE(self):
    #   return self.EE_virtual_handle.get_velocity()

    def setPoseAtEE(self, pos, quat, jawAngle):
        theta = 0.4106 * jawAngle

        b_T_ee = self.posquat2Matrix(pos, quat)

        ee_T_sx = np.array([
            [9.99191168e-01, 4.02120491e-02, -5.31786338e-06, 4.17232513e-07],
            [-4.01793160e-02, 9.98383134e-01, 4.02087139e-02, -1.16467476e-04],
            [1.62218404e-03, -4.01759782e-02, 9.99191303e-01, -3.61323357e-04],
            [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 1.00000000e+00]
        ])

        ee_T_dx = np.array([[
            -9.99191251e-01, -4.02099858e-02, -1.98098369e-06, 4.17232513e-07
        ], [-4.01773877e-02, 9.98383193e-01, -4.02091818e-02, -1.16467476e-04],
                            [
                                1.61878841e-03, -4.01765831e-02,
                                -9.99191284e-01, -3.61323357e-04
                            ],
                            [
                                0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
                                1.00000000e+00
                            ]])

        b_T_sx = np.dot(b_T_ee, ee_T_sx)
        b_T_dx = np.dot(b_T_ee, ee_T_dx)

        ct = np.cos(theta)
        st = np.sin(theta)

        x_T_ts = np.array([[
            ct,
            -st,
            0,
            -st * 0.009,
        ], [st, ct, 0, ct * 0.009], [
            0,
            0,
            1,
            0,
        ], [0, 0, 0, 1]])

        pos_sx, quat_sx = self.matrix2posquat(np.dot(b_T_sx, x_T_ts))
        pos_dx, quat_dx = self.matrix2posquat(np.dot(b_T_dx, x_T_ts))

        self.ik_target_dx_dummy_handle.set_pose(np.r_[pos_dx, quat_dx],
                                                relative_to=self.base_handle)
        self.ik_target_sx_dummy_handle.set_pose(np.r_[pos_sx, quat_sx],
                                                relative_to=self.base_handle)

    def get_marker_position(self, relative_to=None):
        return self.marker.get_position(relative_to)

    """def stopSim(self):
コード例 #5
0
ファイル: specificworker.py プロジェクト: pawanw17/grasping
class SpecificWorker(GenericWorker):
    def __init__(self, proxy_map):
        super(SpecificWorker, self).__init__(proxy_map)

    def __del__(self):
        print('SpecificWorker destructor')
        self.pr.stop()
        self.pr.shutdown()

    def setParams(self, params):
        SCENE_FILE = params["scene_dir"]
        self.pr = PyRep()
        self.pr.launch(SCENE_FILE, headless=False)
        self.pr.start()

        self.cameras = {}
        cam = VisionSensor("Camera_Shoulder")
        self.cameras["Camera_Shoulder"] = {
            "handle":
            cam,
            "id":
            1,
            "angle":
            np.radians(cam.get_perspective_angle()),
            "width":
            cam.get_resolution()[0],
            "height":
            cam.get_resolution()[1],
            "depth":
            3,
            "focal":
            cam.get_resolution()[0] /
            np.tan(np.radians(cam.get_perspective_angle())),
            "position":
            cam.get_position(),
            "rotation":
            cam.get_quaternion(),
            "image_rgb":
            np.array(0),
            "image_rgbd":
            np.array(0),
            "depth":
            np.ndarray(0)
        }

        self.grasping_objects = {}
        can = Shape("can")
        self.grasping_objects["002_master_chef_can"] = {
            "handle": can,
            "sim_pose": None,
            "pred_pose_rgb": None,
            "pred_pose_rgbd": None
        }

        with (open("objects_pcl.pickle", "rb")) as file:
            self.object_pcl = pickle.load(file)

        self.intrinsics = np.array(
            [[
                self.cameras["Camera_Shoulder"]["focal"], 0.00000000e+00,
                self.cameras["Camera_Shoulder"]["width"] / 2.0
            ],
             [
                 0.00000000e+00, self.cameras["Camera_Shoulder"]["focal"],
                 self.cameras["Camera_Shoulder"]["height"] / 2.0
             ], [0.00000000e+00, 0.00000000e+00, 1.00000000e+00]])

        self.arm_ops = {
            "MoveToHome": 1,
            "MoveToObj": 2,
            "CloseGripper": 3,
            "OpenGripper": 4
        }

        self.grasping_iter = 10

        self.arm_base = Shape("gen3")
        self.arm_target = Dummy("target")
        self.gripper = Joint("RG2_openCloseJoint")

    def compute(self):
        print('SpecificWorker.compute...')
        try:
            self.pr.step()

            # open the arm gripper
            self.move_gripper(self.arm_ops["OpenGripper"])

            # read arm camera RGB signal
            cam = self.cameras["Camera_Shoulder"]
            image_float = cam["handle"].capture_rgb()
            depth = cam["handle"].capture_depth(in_meters=False)
            image = cv2.normalize(src=image_float,
                                  dst=None,
                                  alpha=0,
                                  beta=255,
                                  norm_type=cv2.NORM_MINMAX,
                                  dtype=cv2.CV_8U)
            cam["image_rgb"] = RoboCompCameraRGBDSimple.TImage(
                width=cam["width"],
                height=cam["height"],
                depth=3,
                focalx=cam["focal"],
                focaly=cam["focal"],
                image=image.tobytes())
            cam["image_rgbd"] = RoboCompCameraRGBDSimple.TImage(
                width=cam["width"],
                height=cam["height"],
                depth=3,
                focalx=cam["focal"],
                focaly=cam["focal"],
                image=image.tobytes())
            cam["depth"] = RoboCompCameraRGBDSimple.TDepth(
                width=cam["width"],
                height=cam["height"],
                depthFactor=1.0,
                depth=depth.tobytes())

            # get objects's poses from simulator
            for obj_name in self.grasping_objects.keys():
                self.grasping_objects[obj_name][
                    "sim_pose"] = self.grasping_objects[obj_name][
                        "handle"].get_pose()

            # get objects' poses from RGB
            pred_poses = self.objectposeestimationrgb_proxy.getObjectPose(
                cam["image_rgb"])
            self.visualize_poses(image_float, pred_poses, "rgb_pose.png")
            for pose in pred_poses:
                if pose.objectname in self.grasping_objects.keys():
                    obj_trans = [pose.x, pose.y, pose.z]
                    obj_quat = [pose.qx, pose.qy, pose.qz, pose.qw]
                    obj_pose = self.process_pose(obj_trans, obj_quat)
                    self.grasping_objects[
                        pose.objectname]["pred_pose_rgb"] = obj_pose

            # get objects' poses from RGBD
            pred_poses = self.objectposeestimationrgbd_proxy.getObjectPose(
                cam["image_rgbd"], cam["depth"])
            self.visualize_poses(image_float, pred_poses, "rgbd_pose.png")
            for pose in pred_poses:
                if pose.objectname in self.grasping_objects.keys():
                    obj_trans = [pose.x, pose.y, pose.z]
                    obj_quat = [pose.qx, pose.qy, pose.qz, pose.qw]
                    obj_pose = self.process_pose(obj_trans, obj_quat)
                    self.grasping_objects[
                        pose.objectname]["pred_pose_rgbd"] = obj_pose

            # create a dummy for arm path planning
            approach_dummy = Dummy.create()
            approach_dummy.set_name("approach_dummy")

            # initialize approach dummy in embedded lua scripts
            call_ret = self.pr.script_call(
                "initDummy@gen3", vrepConst.sim_scripttype_childscript)

            # set object pose for the arm to follow
            # NOTE : choose simulator or predicted pose
            dest_pose = self.grasping_objects["002_master_chef_can"][
                "pred_pose_rgbd"]
            dest_pose[
                2] += 0.04  # add a small offset along z-axis to grasp the object top

            # set dummy pose with the pose of object to be grasped
            approach_dummy.set_pose(dest_pose)

            # move gen3 arm to the object
            self.move_arm(approach_dummy, self.arm_ops["MoveToObj"])

            # close the arm gripper
            self.move_gripper(self.arm_ops["CloseGripper"])

            # change approach dummy pose to the final destination pose
            dest_pose[2] += 0.4
            approach_dummy.set_pose(dest_pose)

            # move gen3 arm to the final destination
            self.move_arm(approach_dummy, self.arm_ops["MoveToObj"])

            # remove the created approach dummy
            approach_dummy.remove()

        except Exception as e:
            print(e)
        return True

    def process_pose(self, obj_trans, obj_rot):
        # convert an object pose from camera frame to world frame
        # define camera pose and z-axis flip matrix
        cam_trans = self.cameras["Camera_Shoulder"]["position"]
        cam_rot_mat = R.from_quat(self.cameras["Camera_Shoulder"]["rotation"])
        z_flip = R.from_matrix(np.array([[-1, 0, 0], [0, -1, 0], [0, 0, 1]]))
        # get object position in world coordinates
        obj_trans = np.dot(
            cam_rot_mat.as_matrix(),
            np.dot(z_flip.as_matrix(),
                   np.array(obj_trans).reshape(-1, )))
        final_trans = obj_trans + cam_trans
        # get object orientation in world coordinates
        obj_rot_mat = R.from_quat(obj_rot)
        final_rot_mat = obj_rot_mat * z_flip * cam_rot_mat
        final_rot = final_rot_mat.as_quat()
        # return final object pose in world coordinates
        final_pose = list(final_trans)
        final_pose.extend(list(final_rot))
        return final_pose

    def visualize_poses(self, image, poses, img_name):
        # visualize the predicted poses on RGB image
        image = np.uint8(image * 255.0)
        for pose in poses:
            # visualize only defined objects
            if pose.objectname not in self.grasping_objects.keys():
                continue
            obj_pcl = self.object_pcl[pose.objectname]
            obj_trans = np.array([pose.x, pose.y, pose.z])
            if img_name == "rgb_pose.png":
                obj_trans[2] -= 0.2
            obj_rot = R.from_quat([pose.qx, pose.qy, pose.qz,
                                   pose.qw]).as_matrix()
            proj_pcl = self.vertices_reprojection(obj_pcl, obj_rot, obj_trans,
                                                  self.intrinsics)
            image = self.draw_pcl(image,
                                  proj_pcl,
                                  r=1,
                                  color=(randint(0, 255), randint(0, 255),
                                         randint(0, 255)))
        cv2.imwrite(os.path.join("output", img_name), image)

    def vertices_reprojection(self, vertices, r, t, k):
        # re-project vertices in pixel space
        p = np.matmul(k, np.matmul(r, vertices.T) + t.reshape(-1, 1))
        p[0] = p[0] / (p[2] + 1e-5)
        p[1] = p[1] / (p[2] + 1e-5)
        return p[:2].T

    def draw_pcl(self, img, p2ds, r=1, color=(255, 0, 0)):
        # draw object point cloud on RGB image
        h, w = img.shape[0], img.shape[1]
        for pt_2d in p2ds:
            pt_2d[0] = np.clip(pt_2d[0], 0, w)
            pt_2d[1] = np.clip(pt_2d[1], 0, h)
            img = cv2.circle(img, (int(pt_2d[0]), int(pt_2d[1])), r, color, -1)
        return img

    def move_arm(self, dummy_dest, func_number):
        # move arm to destination
        # NOTE : this function is using remote lua scripts embedded in the arm
        # for better path planning, so make sure to use the correct arm model
        call_function = True
        init_pose = np.array(
            self.arm_target.get_pose(relative_to=self.arm_base))
        # loop until the arm reached the object
        while True:
            # step the simulation
            self.pr.step()
            # set function index to the desired operation
            if call_function:
                try:
                    # call thearded child lua scripts via PyRep
                    call_ret = self.pr.script_call(
                        "setFunction@gen3",
                        vrepConst.sim_scripttype_childscript,
                        ints=[func_number])
                except Exception as e:
                    print(e)
            # get current poses to compare
            actual_pose = self.arm_target.get_pose(relative_to=self.arm_base)
            object_pose = dummy_dest.get_pose(relative_to=self.arm_base)
            # compare poses to check for operation end
            pose_diff = np.abs(np.array(actual_pose) - np.array(init_pose))
            if call_function and pose_diff[0] > 0.01 or pose_diff[
                    1] > 0.01 or pose_diff[2] > 0.01:
                call_function = False
            # check whether the arm reached the target
            dest_pose_diff = np.abs(
                np.array(actual_pose) - np.array(object_pose))
            if dest_pose_diff[0] < 0.015 and dest_pose_diff[
                    1] < 0.015 and dest_pose_diff[2] < 0.015:
                break

    def move_gripper(self, func_number):
        # open or close the arm gripper
        # NOTE : this function is using remote lua scripts embedded in the arm
        # for better path planning, so make sure to use the correct arm model
        call_function = True
        open_percentage = init_position = self.gripper.get_joint_position()
        # loop until the gripper is completely open (or closed)
        for iter in range(self.grasping_iter):
            # step the simulation
            self.pr.step()
            # set function index to the desired operation
            if call_function:
                try:
                    # call thearded child lua scripts via PyRep
                    call_ret = self.pr.script_call(
                        "setFunction@gen3",
                        vrepConst.sim_scripttype_childscript,
                        ints=[func_number])
                except Exception as e:
                    print(e)
                # compare the gripper position to determine whether the gripper moved
                if abs(self.gripper.get_joint_position() -
                       init_position) > 0.005:
                    call_function = False
            # compare the gripper position to determine whether the gripper closed or opened
            if not call_function and abs(open_percentage - self.gripper.
                                         get_joint_position()) < 0.003:
                break
            #actualizamos el porcentaje de apertura
            open_percentage = self.gripper.get_joint_position()
コード例 #6
0
class StereoVisionRobot:
    def __init__(self,
                 min_distance,
                 max_distance,
                 default_joint_limit_type="none"):
        self.cam_left = VisionSensor("vs_cam_left#")
        self.cam_right = VisionSensor("vs_cam_right#")
        self.tilt_left = Joint("vs_eye_tilt_left#")
        self.tilt_right = Joint("vs_eye_tilt_right#")
        self.pan_left = Joint("vs_eye_pan_left#")
        self.pan_right = Joint("vs_eye_pan_right#")
        self.min_distance = min_distance
        self.max_distance = max_distance
        self.default_joint_limit_type = default_joint_limit_type
        self._pan_max = rad(10)
        self._tilt_max = rad(10)
        self._tilt_speed = 0
        self._pan_speed = 0
        self.episode_reset()

    # def episode_reset(self, vergence=True):
    def episode_reset(self):
        ### reset joints position / speeds
        self.reset_speed()
        self.tilt_right.set_joint_position(0)
        self.tilt_left.set_joint_position(0)
        fixation_distance = np.random.uniform(self.min_distance,
                                              self.max_distance)
        random_vergence = rad(to_angle(fixation_distance))
        self.pan_left.set_joint_position(random_vergence / 2)
        self.pan_right.set_joint_position(-random_vergence / 2)

    def reset_speed(self):
        self._tilt_speed = 0
        self._pan_speed = 0

    def _check_pan_limit(self, left, right, joint_limit_type=None):
        joint_limit_type = self.default_joint_limit_type if joint_limit_type is None else joint_limit_type
        if joint_limit_type == "stick":
            return np.clip(left, 0,
                           self._pan_max), np.clip(right, -self._pan_max, 0)
        if joint_limit_type == "none":
            return left, right

    def _check_tilt_limit(self, left, right, joint_limit_type=None):
        joint_limit_type = self.default_joint_limit_type if joint_limit_type is None else joint_limit_type
        if joint_limit_type == "stick":
            return np.clip(left, -self._tilt_max,
                           self._tilt_max), np.clip(right, -self._tilt_max,
                                                    self._tilt_max)
        if joint_limit_type == "none":
            return left, right

    def reset_vergence_position(self, joint_limit_type=None):
        mean = (self.pan_right.get_joint_position() +
                self.pan_left.get_joint_position()) / 2
        left, right = self._check_pan_limit(mean, mean, joint_limit_type)
        self.pan_left.set_joint_position(left)
        self.pan_right.set_joint_position(right)

    def set_vergence_position(self, alpha, joint_limit_type=None):
        rad_alpha = rad(alpha)
        mean = (self.pan_right.get_joint_position() +
                self.pan_left.get_joint_position()) / 2
        left, right = self._check_pan_limit(mean + rad_alpha / 2,
                                            mean - rad_alpha / 2,
                                            joint_limit_type)
        self.pan_left.set_joint_position(left)
        self.pan_right.set_joint_position(right)

    def set_delta_vergence_position(self, delta, joint_limit_type=None):
        rad_delta = rad(delta)
        left, right = self._check_pan_limit(
            self.pan_left.get_joint_position() + rad_delta / 2,
            self.pan_right.get_joint_position() - rad_delta / 2,
            joint_limit_type)
        self.pan_left.set_joint_position(left)
        self.pan_right.set_joint_position(right)

    def set_pan_position(self, alpha, joint_limit_type=None):
        rad_alpha = rad(alpha)
        vergence = self.pan_left.get_joint_position(
        ) - self.pan_right.get_joint_position()
        left, right = self._check_pan_limit((vergence / 2) + rad_alpha,
                                            -(vergence / 2) + rad_alpha,
                                            joint_limit_type)
        self.pan_left.set_joint_position(left)
        self.pan_right.set_joint_position(right)

    def set_delta_pan_speed(self, delta, joint_limit_type=None):
        self._pan_speed += rad(delta)
        left, right = self._check_pan_limit(
            self.pan_left.get_joint_position() + self._pan_speed,
            self.pan_right.get_joint_position() + self._pan_speed,
            joint_limit_type)
        self.pan_left.set_joint_position(left)
        self.pan_right.set_joint_position(right)

    def set_tilt_position(self, alpha, joint_limit_type=None):
        rad_alpha = rad(alpha)
        left, right = self._check_tilt_limit(rad_alpha, rad_alpha,
                                             joint_limit_type)
        self.tilt_left.set_joint_position(left)
        self.tilt_right.set_joint_position(right)

    def set_delta_tilt_speed(self, delta, joint_limit_type=None):
        self._tilt_speed += rad(delta)
        left, right = self._check_tilt_limit(
            self.tilt_left.get_joint_position() -
            self._tilt_speed,  # minus to get natural upward movement
            self.tilt_right.get_joint_position() -
            self._tilt_speed,  # minus to get natural upward movement
            joint_limit_type)
        self.tilt_left.set_joint_position(left)
        self.tilt_right.set_joint_position(right)

    def get_tilt_position(self):
        return deg(self.tilt_right.get_joint_position())

    def get_pan_position(self):
        return deg((self.pan_right.get_joint_position() +
                    self.pan_left.get_joint_position()) / 2)

    def get_vergence_position(self):
        return deg(self.pan_left.get_joint_position() -
                   self.pan_right.get_joint_position())

    def get_position(self):
        return self.tilt_position, self.pan_position, self.vergence_position

    def get_vergence_error(self, other_distance):
        return self.vergence_position - to_angle(other_distance)

    def get_tilt_speed(self):
        return deg(self._tilt_speed)

    def get_pan_speed(self):
        return deg(self._pan_speed)

    def get_speed(self):
        return self.tilt_speed, self.pan_speed

    def set_action(self, action, joint_limit_type=None):
        self.set_delta_tilt_speed(float(action[0]), joint_limit_type)
        self.set_delta_pan_speed(float(action[1]), joint_limit_type)
        self.set_delta_vergence_position(float(action[2]), joint_limit_type)

    def set_position(self, position, joint_limit_type):
        self.set_tilt_position(position[0], joint_limit_type)
        self.set_pan_position(position[1], joint_limit_type)
        self.set_vergence_position(position[2], joint_limit_type)

    def get_vision(self):
        return self.cam_left.capture_rgb(), self.cam_right.capture_rgb()

    tilt_position = property(get_tilt_position)
    pan_position = property(get_pan_position)
    vergence_position = property(get_vergence_position)
    position = property(get_position)
    tilt_speed = property(get_tilt_speed)
    pan_speed = property(get_pan_speed)
    speed = property(get_speed)
コード例 #7
0
class TestJoints(TestCore):

    def setUp(self):
        super().setUp()
        self.prismatic = Joint('prismatic_joint')
        self.prismatic_ctr = Joint('prismatic_joint_control_loop')
        self.revolute = Joint('revolute_joint')

    def test_get_joint_type(self):
        self.assertEqual(self.prismatic.get_joint_type(), JointType.PRISMATIC)

    def test_get_set_joint_mode(self):
        self.prismatic.set_joint_mode(JointMode.IK)
        self.assertEqual(self.prismatic.get_joint_mode(), JointMode.IK)

    def test_get_set_joint_position(self):
        self.prismatic.set_joint_position(0.5)
        pos = self.prismatic.get_joint_position()
        self.assertEqual(pos, 0.5)

    def test_get_set_joint_target_position(self):
        self.prismatic_ctr.set_joint_target_position(0.5)
        pos = self.prismatic_ctr.get_joint_target_position()
        self.assertEqual(pos, 0.5)
        # Now step a few times to drive the joint
        [self.pyrep.step() for _ in range(10)]
        self.assertAlmostEqual(
            self.prismatic_ctr.get_joint_position(), 0.5, delta=0.01)

    def test_get_set_joint_target_velocity(self):
        self.prismatic.set_joint_target_velocity(5.0)
        vel = self.prismatic.get_joint_target_velocity()
        self.assertEqual(vel, 5.0)
        # Now step a few times to drive the joint
        [self.pyrep.step() for _ in range(10)]
        self.assertAlmostEqual(
            self.prismatic.get_joint_position(), 0.5, delta=0.01)

    def test_get_set_joint_force(self):
        [self.pyrep.step() for _ in range(10)]
        # Set a really high velocity (torque control)
        self.prismatic.set_joint_target_velocity(-99999)
        self.prismatic.set_joint_force(0.6)
        self.pyrep.step()
        force = self.prismatic.get_joint_force()
        self.assertAlmostEqual(force, 0.6, delta=0.01)

    def test_get_velocity(self):
        self.prismatic.set_joint_target_velocity(0.5)
        self.pyrep.step()
        self.assertGreater(self.prismatic.get_joint_velocity(), 0.1)

    def test_get_set_joint_interval(self):
        self.revolute.set_joint_interval(False, [-2.0, 2.0])
        cyclic, interval = self.revolute.get_joint_interval()
        self.assertFalse(cyclic)
        self.assertEqual(interval, [-2.0, 2.0])

    def test_get_joint_upper_velocity_limit(self):
        limit = self.prismatic.get_joint_upper_velocity_limit()
        self.assertEqual(limit, 10.0)

    def test_get_set_control_loop_enabled(self):
        self.assertFalse(self.prismatic.is_control_loop_enabled())
        self.prismatic.set_control_loop_enabled(True)
        self.assertTrue(self.prismatic.is_control_loop_enabled())

    def test_get_set_motor_enabled(self):
        self.assertTrue(self.prismatic.is_motor_enabled())
        self.prismatic.set_motor_enabled(False)
        self.assertFalse(self.prismatic.is_motor_enabled())

    def test_get_set_motor_locked_at_zero_velocity(self):
        self.assertFalse(self.prismatic.is_motor_locked_at_zero_velocity())
        self.prismatic.set_motor_locked_at_zero_velocity(True)
        self.assertTrue(self.prismatic.is_motor_locked_at_zero_velocity())