Exemplo n.º 1
0
    def _rotate_gripper_z(self, target_angle, num_step=None):
        _, current_angle = vrep.simxGetObjectOrientation(
            self.sim_client, self.UR5_target_handle, -1,
            vrep.simx_opmode_blocking)
        # if target_angle > np.pi:
        #     target_angle -= np.pi
        # if target_angle < -np.pi:
        #     target_angle += np.pi
        if target_angle > np.pi + 0.2:
            target_angle -= 2 * np.pi
        if target_angle < -np.pi - 0.2:
            target_angle += 2 * np.pi
        ori_direction = np.asarray([0., 0., target_angle - current_angle[2]])
        if num_step is not None:
            num_move_step = num_step
        else:
            diff = abs(np.rad2deg(ori_direction[2]))
            num_move_step = int((diff // 5) + 1)

        ori_move_step = ori_direction / num_move_step
        for step_iter in range(num_move_step):
            vrep.simxSetObjectOrientation(
                self.sim_client, self.UR5_target_handle, -1,
                (current_angle[0] + ori_move_step[0], current_angle[1] +
                 ori_move_step[1], current_angle[2] + ori_move_step[2]),
                vrep.simx_opmode_blocking)
            _, current_angle = vrep.simxGetObjectOrientation(
                self.sim_client, self.UR5_target_handle, -1,
                vrep.simx_opmode_blocking)
        vrep.simxSetObjectOrientation(
            self.sim_client, self.UR5_target_handle, -1,
            (current_angle[0], current_angle[1], target_angle),
            vrep.simx_opmode_blocking)
        time.sleep(0.1)
Exemplo n.º 2
0
    def setup_sim_camera(self):

        # Get handle to camera
        sim_ret, self.cam_handle = vrep.simxGetObjectHandle(
            self.sim_client, 'Vision_sensor_ortho', vrep.simx_opmode_blocking)

        # Get camera pose and intrinsics in simulation
        sim_ret, cam_position = vrep.simxGetObjectPosition(
            self.sim_client, self.cam_handle, -1, vrep.simx_opmode_blocking)
        sim_ret, cam_orientation = vrep.simxGetObjectOrientation(
            self.sim_client, self.cam_handle, -1, vrep.simx_opmode_blocking)
        cam_trans = np.eye(4, 4)
        cam_trans[0:3, 3] = np.asarray(cam_position)
        cam_orientation = [
            -cam_orientation[0], -cam_orientation[1], -cam_orientation[2]
        ]
        cam_rotm = np.eye(4, 4)
        cam_rotm[0:3, 0:3] = np.linalg.inv(utils.euler2rotm(cam_orientation))
        self.cam_pose = np.dot(
            cam_trans, cam_rotm
        )  # Compute rigid transformation representating camera pose
        self.cam_intrinsics = np.asarray([[618.62, 0, 320], [0, 618.62, 240],
                                          [0, 0, 1]])
        self.cam_depth_scale = 1

        # Get background image
        self.bg_color_img, self.bg_depth_img = self.get_camera_data()
        self.bg_depth_img = self.bg_depth_img * self.cam_depth_scale
Exemplo n.º 3
0
 def setup_sim_camera(self, resolution_x=1024., resolution_y=1024.):
     # Get handle to camera
     perspectiveAngle = np.deg2rad(54.70)
     self.cam_intrinsics = np.asarray(
         [[
             resolution_x / (2 * np.tan(perspectiveAngle / 2)), 0,
             resolution_x / 2
         ],
          [
              0, resolution_y / (2 * np.tan(perspectiveAngle / 2)),
              resolution_y / 2
          ], [0, 0, 1]])
     _, self.cam_handle = vrep.simxGetObjectHandle(
         self.sim_client, 'Vision_sensor_persp_0',
         vrep.simx_opmode_blocking)
     # Get camera pose and intrinsics in simulation
     _, cam_position = vrep.simxGetObjectPosition(self.sim_client,
                                                  self.cam_handle, -1,
                                                  vrep.simx_opmode_blocking)
     _, cam_orientation = vrep.simxGetObjectOrientation(
         self.sim_client, self.cam_handle, -1, vrep.simx_opmode_blocking)
     cam_trans = np.eye(4, 4)
     cam_trans[0:3, 3] = np.asarray(cam_position)
     cam_orientation = [
         cam_orientation[0], cam_orientation[1], cam_orientation[2]
     ]
     cam_rotm = np.eye(4, 4)
     cam_rotm[0:3, 0:3] = utils.euler2rotm(cam_orientation)
     self.cam_pose = np.dot(
         cam_trans, cam_rotm
     )  # Compute rigid transformation representating camera pose
     self.cam_depth_scale = 1
Exemplo n.º 4
0
	def rotate_object(self, axis, heightmap_rotation_angle, workspace_limits):
		#tool_rotation_angle = (heightmap_rotation_angle % np.pi) - np.pi / 2
		tool_rotation_angle = math.radians(np.float(heightmap_rotation_angle))
	

		sim_ret, UR5_target_position = vrep.simxGetObjectPosition(self.sim_client, self.UR5_target_handle, -1,
																	vrep.simx_opmode_blocking)

		# Compute gripper orientation and rotation increments
		sim_ret, gripper_orientation = vrep.simxGetObjectOrientation(self.sim_client, self.UR5_target_handle, -1,
																		vrep.simx_opmode_blocking)
		rotation_step = 0.15 if (tool_rotation_angle - gripper_orientation[axis] > 0) else -0.15
		num_rotation_steps = int(np.floor((tool_rotation_angle - gripper_orientation[axis]) / rotation_step))

		# Simultaneously move and rotate gripper
		if axis==0:
			for step_iter in range(num_rotation_steps):
				vrep.simxSetObjectOrientation(self.sim_client, self.UR5_target_handle, -1, (
				gripper_orientation[axis] + rotation_step * min(step_iter, num_rotation_steps), gripper_orientation[1],  gripper_orientation[2]),
												vrep.simx_opmode_blocking)
				
		elif axis==1:
			for step_iter in range(num_rotation_steps):
				vrep.simxSetObjectOrientation(self.sim_client, self.UR5_target_handle, -1, (
				gripper_orientation[0], gripper_orientation[axis] + rotation_step * min(step_iter, num_rotation_steps), gripper_orientation[2]),
												vrep.simx_opmode_blocking)
		elif axis==2:
			for step_iter in range(num_rotation_steps):
				vrep.simxSetObjectOrientation(self.sim_client, self.UR5_target_handle, -1, (
				gripper_orientation[0], gripper_orientation[1], gripper_orientation[axis] + rotation_step * min(step_iter, num_rotation_steps)),
												vrep.simx_opmode_blocking)
Exemplo n.º 5
0
	def get_ur5_orientation(self):
		_, gripper_orientation = vrep.simxGetObjectOrientation(self.sim_client, self.UR5_target_handle, -1,
																		vrep.simx_opmode_blocking)
		gripper_orientation[0] = math.degrees(gripper_orientation[0])
		gripper_orientation[1] = math.degrees(gripper_orientation[1])
		gripper_orientation[2] = math.degrees(gripper_orientation[2])
		return gripper_orientation
Exemplo n.º 6
0
 def get_single_obj_orientations(self, object_handle):
     _, obj_orientation = vrep.simxGetObjectOrientation(
         self.sim_client, object_handle, -1, vrep.simx_opmode_blocking)
     a = []
     for i in obj_orientation:
         temp = i if i >= 0 else i + 2 * np.pi
         a.append(temp)
     return np.asarray(a)
Exemplo n.º 7
0
 def getangle_360(self):
     sim_ret, UR3_target_orientation = vrep.simxGetObjectOrientation(
         self.clientID, self.UR3_target_handle, -1,
         vrep.simx_opmode_blocking)
     if (UR3_target_orientation[0] < 0):
         UR3_target_angle_360 = -(UR3_target_orientation[1] + math.pi / 2)
     else:
         UR3_target_angle_360 = UR3_target_orientation[1] + math.pi / 2
     return UR3_target_angle_360 / math.pi * 180
Exemplo n.º 8
0
    def get_obj_positions_and_orientations(self):

        obj_positions = []
        obj_orientations = []
        for object_handle in self.object_handles:
            sim_ret, object_position = vrep.simxGetObjectPosition(
                self.sim_client, object_handle, -1, vrep.simx_opmode_blocking)
            sim_ret, object_orientation = vrep.simxGetObjectOrientation(
                self.sim_client, object_handle, -1, vrep.simx_opmode_blocking)
            obj_positions.append(object_position)
            obj_orientations.append(object_orientation)

        return obj_positions, obj_orientations
Exemplo n.º 9
0
    def setVisionSensor(self):
        sim_ret, self.VS_left_handle = vrep.simxGetObjectHandle(
            self.clientID, 'Vision_sensor_left', vrep.simx_opmode_blocking)
        ret, self.VS_left_position = vrep.simxGetObjectPosition(
            self.clientID, self.VS_left_handle, -1, vrep.simx_opmode_blocking)
        ret, self.VS_left_orientation = vrep.simxGetObjectOrientation(
            self.clientID, self.VS_left_handle, -1, vrep.simx_opmode_blocking)
        self.left_cam_intrinsics = np.asarray([[561.82, 0, 320],
                                               [0, 561.82, 240], [0, 0,
                                                                  1]])  # 内部参数
        self.leftmat = utils.Creat_posemat(self.VS_left_orientation,
                                           self.VS_left_position)

        sim_ret, self.VS_right_handle = vrep.simxGetObjectHandle(
            self.clientID, 'Vision_sensor_right', vrep.simx_opmode_blocking)
        ret, self.VS_right_position = vrep.simxGetObjectPosition(
            self.clientID, self.VS_right_handle, -1, vrep.simx_opmode_blocking)
        ret, self.VS_right_orientation = vrep.simxGetObjectOrientation(
            self.clientID, self.VS_right_handle, -1, vrep.simx_opmode_blocking)
        self.right_cam_intrinsics = np.asarray([[561.82, 0, 320],
                                                [0, 561.82, 240], [0, 0,
                                                                   1]])  # 内部参数
        self.rightmat = utils.Creat_posemat(self.VS_right_orientation,
                                            self.VS_right_position)
    def get_obj_masks(self):
        # from scipy.spatial.transform import Rotation as R
        obj_contours = []
        obj_number = len(self.obj_mesh_ind)
        # scene = trimesh.Scene()
        for object_idx in range(obj_number):
            # Get object pose in simulation
            sim_ret, obj_position = vrep.simxGetObjectPosition(
                self.sim_client, self.object_handles[object_idx], -1,
                vrep.simx_opmode_blocking)
            sim_ret, obj_orientation = vrep.simxGetObjectOrientation(
                self.sim_client, self.object_handles[object_idx], -1,
                vrep.simx_opmode_blocking)
            obj_trans = np.eye(4, 4)
            obj_trans[0:3, 3] = np.asarray(obj_position)
            obj_orientation = [
                obj_orientation[0], obj_orientation[1], obj_orientation[2]
            ]

            obj_rotm = np.eye(4, 4)
            obj_rotm[0:3, 0:3] = utils.obj_euler2rotm(obj_orientation)
            obj_pose = np.dot(
                obj_trans, obj_rotm
            )  # Compute rigid transformation representating camera pose
            # load .obj files
            obj_mesh_file = os.path.join(
                self.obj_mesh_dir,
                self.mesh_list[self.obj_mesh_ind[object_idx]])
            # print(obj_mesh_file)

            mesh = trimesh.load_mesh(obj_mesh_file)

            if obj_mesh_file.split('/')[-1] == '2.obj' or obj_mesh_file.split(
                    '/')[-1] == '6.obj':
                mesh.apply_transform(obj_pose)
            else:
                # rest
                transformation = np.array([[0, 0, 1, 0], [0, -1, 0, 0],
                                           [1, 0, 0, 0], [0, 0, 0, 1]])
                mesh.apply_transform(transformation)
                mesh.apply_transform(obj_pose)

            # scene.add_geometry(mesh)
            obj_contours.append(mesh.vertices[:, 0:2])
        # scene.show()
        return obj_contours
    def get_test_obj_masks(self):
        obj_contours = []
        obj_number = len(self.test_obj_mesh_files)
        for object_idx in range(obj_number):
            # Get object pose in simulation
            sim_ret, obj_position = vrep.simxGetObjectPosition(
                self.sim_client, self.object_handles[object_idx], -1,
                vrep.simx_opmode_blocking)
            sim_ret, obj_orientation = vrep.simxGetObjectOrientation(
                self.sim_client, self.object_handles[object_idx], -1,
                vrep.simx_opmode_blocking)
            obj_trans = np.eye(4, 4)
            obj_trans[0:3, 3] = np.asarray(obj_position)
            obj_orientation = [
                obj_orientation[0], obj_orientation[1], obj_orientation[2]
            ]

            obj_rotm = np.eye(4, 4)
            obj_rotm[0:3, 0:3] = utils.obj_euler2rotm(obj_orientation)
            obj_pose = np.dot(
                obj_trans, obj_rotm
            )  # Compute rigid transformation representating camera pose
            # load .obj files
            obj_mesh_file = self.test_obj_mesh_files[object_idx]
            # print(obj_mesh_file)

            mesh = trimesh.load_mesh(obj_mesh_file)

            if obj_mesh_file.split('/')[-1] == '2.obj' or obj_mesh_file.split(
                    '/')[-1] == '6.obj':
                mesh.apply_transform(obj_pose)
            else:
                # rest
                transformation = np.array([[0, 0, 1, 0], [0, -1, 0, 0],
                                           [1, 0, 0, 0], [0, 0, 0, 1]])
                mesh.apply_transform(transformation)
                mesh.apply_transform(obj_pose)

            obj_contours.append(mesh.vertices[:, 0:2])
        return obj_contours
    def get_obj_mask(self, obj_ind):
        # Get object pose in simulation
        sim_ret, obj_position = vrep.simxGetObjectPosition(
            self.sim_client, self.object_handles[obj_ind], -1,
            vrep.simx_opmode_blocking)
        sim_ret, obj_orientation = vrep.simxGetObjectOrientation(
            self.sim_client, self.object_handles[obj_ind], -1,
            vrep.simx_opmode_blocking)
        obj_trans = np.eye(4, 4)
        obj_trans[0:3, 3] = np.asarray(obj_position)
        obj_orientation = [
            obj_orientation[0], obj_orientation[1], obj_orientation[2]
        ]

        obj_rotm = np.eye(4, 4)
        obj_rotm[0:3, 0:3] = utils.obj_euler2rotm(obj_orientation)
        obj_pose = np.dot(
            obj_trans, obj_rotm
        )  # Compute rigid transformation representating camera pose

        # load .obj files
        obj_mesh_file = os.path.join(
            self.obj_mesh_dir, self.mesh_list[self.obj_mesh_ind[obj_ind]])
        mesh = trimesh.load_mesh(obj_mesh_file)

        # transform the mesh to world frame
        if obj_mesh_file.split('/')[-1] == '2.obj' or obj_mesh_file.split(
                '/')[-1] == '6.obj':
            mesh.apply_transform(obj_pose)
        else:
            # rest
            transformation = np.array([[0, 0, 1, 0], [0, -1, 0, 0],
                                       [1, 0, 0, 0], [0, 0, 0, 1]])
            mesh.apply_transform(transformation)
            mesh.apply_transform(obj_pose)

        obj_contour = mesh.vertices[:, 0:2]

        return obj_contour
Exemplo n.º 13
0
    def push(self, position, heightmap_rotation_angle, workspace_limits):
        print('Executing: push at (%f, %f, %f)' %
              (position[0], position[1], position[2]))

        # Compute tool orientation from heightmap rotation angle
        tool_rotation_angle = (heightmap_rotation_angle % np.pi) - np.pi / 2

        # Adjust pushing point to be on tip of finger
        position[2] = position[2] + 0.015

        # Compute pushing direction
        push_orientation = [1.0, 0.0]
        push_direction = np.asarray([
            push_orientation[0] * np.cos(heightmap_rotation_angle) -
            push_orientation[1] * np.sin(heightmap_rotation_angle),
            push_orientation[0] * np.sin(heightmap_rotation_angle) +
            push_orientation[1] * np.cos(heightmap_rotation_angle)
        ])

        # Move gripper to location above pushing point
        pushing_point_margin = 0.1
        location_above_pushing_point = (position[0], position[1],
                                        position[2] + pushing_point_margin)

        # Compute gripper position and linear movement increments
        tool_position = location_above_pushing_point
        sim_ret, UR5_target_position = vrep.simxGetObjectPosition(
            self.sim_client, self.UR5_target_handle, -1,
            vrep.simx_opmode_blocking)
        move_direction = np.asarray([
            tool_position[0] - UR5_target_position[0],
            tool_position[1] - UR5_target_position[1],
            tool_position[2] - UR5_target_position[2]
        ])
        move_magnitude = np.linalg.norm(move_direction)
        move_step = 0.05 * move_direction / move_magnitude + 1e-8
        num_move_steps = int(np.floor(move_direction[0] / move_step[0]))

        # Compute gripper orientation and rotation increments
        sim_ret, gripper_orientation = vrep.simxGetObjectOrientation(
            self.sim_client, self.UR5_target_handle, -1,
            vrep.simx_opmode_blocking)
        rotation_step = 0.3 if (
            tool_rotation_angle - gripper_orientation[1] > 0) else -0.3
        num_rotation_steps = int(
            np.floor((tool_rotation_angle - gripper_orientation[1]) /
                     rotation_step))

        # Simultaneously move and rotate gripper
        for step_iter in range(max(num_move_steps, num_rotation_steps)):
            vrep.simxSetObjectPosition(
                self.sim_client, self.UR5_target_handle, -1,
                (UR5_target_position[0] +
                 move_step[0] * min(step_iter, num_move_steps),
                 UR5_target_position[1] +
                 move_step[1] * min(step_iter, num_move_steps),
                 UR5_target_position[2] +
                 move_step[2] * min(step_iter, num_move_steps)),
                vrep.simx_opmode_blocking)
            vrep.simxSetObjectOrientation(
                self.sim_client, self.UR5_target_handle, -1,
                (np.pi / 2, gripper_orientation[1] +
                 rotation_step * min(step_iter, num_rotation_steps),
                 np.pi / 2), vrep.simx_opmode_blocking)
        vrep.simxSetObjectPosition(
            self.sim_client, self.UR5_target_handle, -1,
            (tool_position[0], tool_position[1], tool_position[2]),
            vrep.simx_opmode_blocking)
        vrep.simxSetObjectOrientation(
            self.sim_client, self.UR5_target_handle, -1,
            (np.pi / 2, tool_rotation_angle, np.pi / 2),
            vrep.simx_opmode_blocking)

        # Ensure gripper is closed
        self.close_gripper()

        # Approach pushing point
        self.move_to(position, None)

        # Compute target location (push to the right)
        push_length = 0.1
        target_x = min(
            max(position[0] + push_direction[0] * push_length,
                workspace_limits[0][0]), workspace_limits[0][1])
        target_y = min(
            max(position[1] + push_direction[1] * push_length,
                workspace_limits[1][0]), workspace_limits[1][1])
        push_length = np.sqrt(
            np.power(target_x - position[0], 2) +
            np.power(target_y - position[1], 2))

        # Move in pushing direction towards target location
        self.move_to([target_x, target_y, position[2]], None)

        # Move gripper to location above grasp target
        self.move_to([target_x, target_y, location_above_pushing_point[2]],
                     None)
Exemplo n.º 14
0
    def grasp(self, position, heightmap_rotation_angle, workspace_limits):
        print('Executing: grasp at (%f, %f, %f)' %
              (position[0], position[1], position[2]))

        # Compute tool orientation from heightmap rotation angle
        tool_rotation_angle = (heightmap_rotation_angle % np.pi) - np.pi / 2

        # Avoid collision with floor
        position = np.asarray(position).copy()
        position[2] = max(position[2] - 0.04, workspace_limits[2][0] + 0.02)

        # Move gripper to location above grasp target
        grasp_location_margin = 0.15
        # sim_ret, UR5_target_handle = vrep.simxGetObjectHandle(self.sim_client,'UR5_target',vrep.simx_opmode_blocking)
        location_above_grasp_target = (position[0], position[1],
                                       position[2] + grasp_location_margin)

        # Compute gripper position and linear movement increments
        tool_position = location_above_grasp_target
        sim_ret, UR5_target_position = vrep.simxGetObjectPosition(
            self.sim_client, self.UR5_target_handle, -1,
            vrep.simx_opmode_blocking)
        move_direction = np.asarray([
            tool_position[0] - UR5_target_position[0],
            tool_position[1] - UR5_target_position[1],
            tool_position[2] - UR5_target_position[2]
        ])
        move_magnitude = np.linalg.norm(move_direction)
        move_step = 0.05 * move_direction / move_magnitude + 1e-8
        num_move_steps = int(np.floor(move_direction[0] / move_step[0]))

        # Compute gripper orientation and rotation increments
        sim_ret, gripper_orientation = vrep.simxGetObjectOrientation(
            self.sim_client, self.UR5_target_handle, -1,
            vrep.simx_opmode_blocking)
        rotation_step = 0.3 if (
            tool_rotation_angle - gripper_orientation[1] > 0) else -0.3
        num_rotation_steps = int(
            np.floor((tool_rotation_angle - gripper_orientation[1]) /
                     rotation_step))

        # Simultaneously move and rotate gripper
        for step_iter in range(max(num_move_steps, num_rotation_steps)):
            vrep.simxSetObjectPosition(
                self.sim_client, self.UR5_target_handle, -1,
                (UR5_target_position[0] +
                 move_step[0] * min(step_iter, num_move_steps),
                 UR5_target_position[1] +
                 move_step[1] * min(step_iter, num_move_steps),
                 UR5_target_position[2] +
                 move_step[2] * min(step_iter, num_move_steps)),
                vrep.simx_opmode_blocking)
            vrep.simxSetObjectOrientation(
                self.sim_client, self.UR5_target_handle, -1,
                (np.pi / 2, gripper_orientation[1] +
                 rotation_step * min(step_iter, num_rotation_steps),
                 np.pi / 2), vrep.simx_opmode_blocking)
        vrep.simxSetObjectPosition(
            self.sim_client, self.UR5_target_handle, -1,
            (tool_position[0], tool_position[1], tool_position[2]),
            vrep.simx_opmode_blocking)
        vrep.simxSetObjectOrientation(
            self.sim_client, self.UR5_target_handle, -1,
            (np.pi / 2, tool_rotation_angle, np.pi / 2),
            vrep.simx_opmode_blocking)

        # Ensure gripper is open
        self.open_gripper()

        # Approach grasp target
        self.move_to(position, None)

        # Close gripper to grasp target
        gripper_full_closed = self.close_gripper()

        # Move gripper to location above grasp target
        self.move_to(location_above_grasp_target, None)

        # Check if grasp is successful
        gripper_full_closed = self.close_gripper()
        grasp_success = not gripper_full_closed

        # Move the grasped object elsewhere
        grasped_object_name = None
        if grasp_success:
            object_positions = np.asarray(self.get_obj_positions())
            object_positions = object_positions[:, 2]
            grasped_object_ind = np.argmax(object_positions)
            grasped_object_handle = self.object_handles[grasped_object_ind]
            grasped_object_name = self.object_names[grasped_object_ind]
            del self.object_handles[grasped_object_ind]
            del self.object_names[grasped_object_ind]
            vrep.simxSetObjectPosition(
                self.sim_client, grasped_object_handle, -1,
                (-1, -1 + 0.05 * float(grasped_object_ind), 0.1),
                vrep.simx_opmode_blocking)
        return grasped_object_name
    def grasp(self, position, heightmap_rotation_angle, workspace_limits):
        print('Executing: grasp at (%f, %f, %f)' %
              (position[0], position[1], position[2]))
        # Compute tool orientation from heightmap rotation angle
        tool_rotation_angle = (heightmap_rotation_angle % np.pi) - np.pi / 2

        # Avoid collision with floor
        position = np.asarray(position).copy()
        position[2] = max(position[2] - 0.04, workspace_limits[2][0] + 0.02)

        # Move gripper to location above grasp target
        grasp_location_margin = 0.15
        # sim_ret, UR5_target_handle = vrep.simxGetObjectHandle(self.sim_client,'UR5_target',vrep.simx_opmode_blocking)
        location_above_grasp_target = (position[0], position[1],
                                       position[2] + grasp_location_margin)

        # Compute gripper position and linear movement increments
        tool_position = location_above_grasp_target
        sim_ret, UR5_target_position = vrep.simxGetObjectPosition(
            self.sim_client, self.UR5_target_handle, -1,
            vrep.simx_opmode_blocking)
        move_direction = np.asarray([
            tool_position[0] - UR5_target_position[0],
            tool_position[1] - UR5_target_position[1],
            tool_position[2] - UR5_target_position[2]
        ])
        move_magnitude = np.linalg.norm(move_direction)
        move_step = 0.05 * move_direction / move_magnitude
        num_move_steps = int(np.floor(move_direction[0] / move_step[0]))

        # Compute gripper orientation and rotation increments
        sim_ret, gripper_orientation = vrep.simxGetObjectOrientation(
            self.sim_client, self.UR5_target_handle, -1,
            vrep.simx_opmode_blocking)
        rotation_step = 0.3 if (
            tool_rotation_angle - gripper_orientation[1] > 0) else -0.3
        num_rotation_steps = int(
            np.floor((tool_rotation_angle - gripper_orientation[1]) /
                     rotation_step))

        # Simultaneously move and rotate gripper
        for step_iter in range(max(num_move_steps, num_rotation_steps)):
            vrep.simxSetObjectPosition(
                self.sim_client, self.UR5_target_handle, -1,
                (UR5_target_position[0] +
                 move_step[0] * min(step_iter, num_move_steps),
                 UR5_target_position[1] +
                 move_step[1] * min(step_iter, num_move_steps),
                 UR5_target_position[2] +
                 move_step[2] * min(step_iter, num_move_steps)),
                vrep.simx_opmode_blocking)
            vrep.simxSetObjectOrientation(
                self.sim_client, self.UR5_target_handle, -1,
                (np.pi / 2, gripper_orientation[1] +
                 rotation_step * min(step_iter, num_rotation_steps),
                 np.pi / 2), vrep.simx_opmode_blocking)
        vrep.simxSetObjectPosition(
            self.sim_client, self.UR5_target_handle, -1,
            (tool_position[0], tool_position[1], tool_position[2]),
            vrep.simx_opmode_blocking)
        vrep.simxSetObjectOrientation(
            self.sim_client, self.UR5_target_handle, -1,
            (np.pi / 2, tool_rotation_angle, np.pi / 2),
            vrep.simx_opmode_blocking)

        # Ensure gripper is open
        self.open_gripper()

        # Approach grasp target
        self.move_to(position, None)

        # Get images before grasping
        color_img, depth_img = self.get_camera_data()
        depth_img = depth_img * self.cam_depth_scale  # Apply depth scale from calibration

        # Get heightmaps beforew grasping
        color_heightmap, depth_heightmap = utils.get_heightmap(
            color_img, depth_img, self.cam_intrinsics, self.cam_pose,
            workspace_limits, 0.002)  # heightmap resolution from args
        valid_depth_heightmap = depth_heightmap.copy()
        valid_depth_heightmap[np.isnan(valid_depth_heightmap)] = 0

        # Close gripper to grasp target
        gripper_full_closed = self.close_gripper()

        # Move gripper to location above grasp target
        self.move_to(location_above_grasp_target, None)

        # Check if grasp is successful
        gripper_full_closed = self.close_gripper()
        grasp_success = not gripper_full_closed

        # Move the grasped object elsewhere
        if grasp_success:
            object_positions = np.asarray(self.get_obj_positions())
            object_positions = object_positions[:, 2]
            grasped_object_ind = np.argmax(object_positions)
            print('grasp obj z position', max(object_positions))
            grasped_object_handle = self.object_handles[grasped_object_ind]
            vrep.simxSetObjectPosition(
                self.sim_client, grasped_object_handle, -1,
                (-0.5, 0.5 + 0.05 * float(grasped_object_ind), 0.1),
                vrep.simx_opmode_blocking)

            return grasp_success, color_img, depth_img, color_heightmap, valid_depth_heightmap, grasped_object_ind
        else:
            return grasp_success, None, None, None, None, None
Exemplo n.º 16
0
def getObjectOrientation(sim_client, obj_handle):
    sim_ret, orientation = vrep.simxGetObjectOrientation(
        sim_client, obj_handle, -1, VREP_BLOCKING)
    return sim_ret, np.asarray(orientation)