Ejemplo n.º 1
0
        def setup_sim_camera(self):

            # Get handle to camera
            sim_ret, self.cam_handle = client.simxGetObjectHandle(
                'vision_sensor', client.simxServiceCall())

            # Get camera pose and intrinsics in simulation
            sim_ret, cam_position = client.simxGetObjectPosition(
                self.cam_handle, -1, client.simxServiceCall())
            sim_ret, cam_orientation = client.simxGetObjectOrientation(
                self.cam_handle, -1, client.simxServiceCall())
            cam_trans = np.eye(4, 4)
            cam_trans[0:3, 3] = np.asarray(cam_position)
            cam_orientation = [
                -cam_orientation[0], -cam_orientation[1], -cam_orientation[2]
            ]
            cam_rotm = np.eye(4, 4)
            cam_rotm[0:3, 0:3] = np.linalg.inv(euler2rotm(cam_orientation))
            self.cam_pose = np.dot(
                cam_trans, cam_rotm
            )  # Compute rigid transformation representating camera pose
            self.cam_intrinsics = np.asarray([[618.62, 0, 320],
                                              [0, 618.62, 240], [0, 0, 1]])
            self.cam_depth_scale = 1

            # Get background image
            self.bg_color_img, self.bg_depth_img = self.get_camera_data()
            self.bg_depth_img = self.bg_depth_img * self.cam_depth_scale
Ejemplo n.º 2
0
    def push(self, position, heightmap_rotation_angle, workspace_limits):
        print('Executing: push at (%f, %f, %f)' % (position[0], position[1], position[2]))

        # Compute tool orientation from heightmap rotation angle
        push_orientation = [1.0, 0.0]
        tool_rotation_angle = heightmap_rotation_angle / 2
        tool_orientation = np.asarray([push_orientation[0] * np.cos(tool_rotation_angle) - push_orientation[1] * np.sin(tool_rotation_angle), push_orientation[0] * np.sin(tool_rotation_angle) + push_orientation[1] * np.cos(tool_rotation_angle), 0.0]) * np.pi
        tool_orientation_angle = np.linalg.norm(tool_orientation)
        tool_orientation_axis = tool_orientation / tool_orientation_angle
        tool_orientation_rotm = utils.angle2rotm(tool_orientation_angle, tool_orientation_axis, point=None)[:3, :3]

        # Compute push direction and endpoint (push to right of rotated heightmap)
        push_direction = np.asarray([push_orientation[0] * np.cos(heightmap_rotation_angle) - push_orientation[1] * np.sin(heightmap_rotation_angle), push_orientation[0] * np.sin(heightmap_rotation_angle) + push_orientation[1] * np.cos(heightmap_rotation_angle), 0.0])
        target_x = min(max(position[0] + push_direction[0] * 0.1, workspace_limits[0][0]), workspace_limits[0][1])
        target_y = min(max(position[1] + push_direction[1] * 0.1, workspace_limits[1][0]), workspace_limits[1][1])
        push_endpoint = np.asarray([target_x, target_y, position[2]])
        push_direction.shape = (3, 1)

        # Compute tilted tool orientation during push
        tilt_axis = np.dot(utils.euler2rotm(np.asarray([0, 0, np.pi / 2]))[:3, :3], push_direction)
        tilt_rotm = utils.angle2rotm(-np.pi / 8, tilt_axis, point=None)[:3, :3]
        tilted_tool_orientation_rotm = np.dot(tilt_rotm, tool_orientation_rotm)
        tilted_tool_orientation_axis_angle = utils.rotm2angle(tilted_tool_orientation_rotm)
        tilted_tool_orientation = tilted_tool_orientation_axis_angle[0] * np.asarray(tilted_tool_orientation_axis_angle[1:4])

        # Push only within workspace limits
        position = np.asarray(position).copy()
        position[0] = min(max(position[0], workspace_limits[0][0]), workspace_limits[0][1])
        position[1] = min(max(position[1], workspace_limits[1][0]), workspace_limits[1][1])
        position[2] = max(position[2] + 0.005, workspace_limits[2][0] + 0.005)  # Add buffer to surface

        home_position = [0.49, 0.11, 0.03]

        # Attempt push
        self.tcp_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        self.tcp_socket.connect((self.tcp_host_ip, self.tcp_port))
        tcp_command = "def process():\n"
        tcp_command += " set_digital_out(8,True)\n"
        tcp_command += " movej(p[%f,%f,%f,%f,%f,%f],a=%f,v=%f,t=0,r=0.09)\n" % (position[0], position[1], position[2] + 0.1, tool_orientation[0], tool_orientation[1], tool_orientation[2], self.joint_acc * 0.5, self.joint_vel * 0.5)
        tcp_command += " movej(p[%f,%f,%f,%f,%f,%f],a=%f,v=%f,t=0,r=0.00)\n" % (position[0], position[1], position[2], tool_orientation[0], tool_orientation[1], tool_orientation[2], self.joint_acc * 0.1, self.joint_vel * 0.1)
        tcp_command += " movej(p[%f,%f,%f,%f,%f,%f],a=%f,v=%f,t=0,r=0.00)\n" % (push_endpoint[0], push_endpoint[1], push_endpoint[2], tilted_tool_orientation[0], tilted_tool_orientation[1], tilted_tool_orientation[2], self.joint_acc * 0.1, self.joint_vel * 0.1)
        tcp_command += " movej(p[%f,%f,%f,%f,%f,%f],a=%f,v=%f,t=0,r=0.03)\n" % (position[0], position[1], position[2] + 0.1, tool_orientation[0], tool_orientation[1], tool_orientation[2], self.joint_acc * 0.5, self.joint_vel * 0.5)
        tcp_command += " movej(p[%f,%f,%f,%f,%f,%f],a=%f,v=%f,t=0,r=0.00)\n" % (home_position[0], home_position[1], home_position[2], tool_orientation[0], tool_orientation[1], tool_orientation[2], self.joint_acc * 0.5, self.joint_vel * 0.5)
        tcp_command += "end\n"
        self.tcp_socket.send(str.encode(tcp_command))
        self.tcp_socket.close()

        # Block until robot reaches target tool position and gripper fingers have stopped moving
        state_data = self.get_state()
        while True:
            state_data = self.get_state()
            actual_tool_pose = self.parse_tcp_state_data(state_data, 'cartesian_info')
            if all([np.abs(actual_tool_pose[j] - home_position[j]) < self.tool_pose_tolerance[j] for j in range(3)]):
                break
        push_success = True
        time.sleep(0.5)

        return push_success
Ejemplo n.º 3
0
        def __init__(self):
            # Create object handles
            _, self.target_right_handle = client.simxGetObjectHandle(
                "target_right", client.simxServiceCall())
            _, self.connector_handle = client.simxGetObjectHandle(
                'RG2_attachPoint', client.simxServiceCall())
            _, self.sensor_handle = client.simxGetObjectHandle(
                'RG2_attachProxSensor', client.simxServiceCall())
            _, self.gripper_joint_handle = client.simxGetObjectHandle(
                'RG2_openCloseJoint#0', client.simxServiceCall())
            _, self.cube_handle = client.simxGetObjectHandle(
                "cube", client.simxServiceCall())
            _, self.right_force_sensor_handle = client.simxGetObjectHandle(
                "RG2_rightForceSensor#0", client.simxServiceCall())
            _, self.vision_sensor_handle = client.simxGetObjectHandle(
                'vision_sensor', client.simxServiceCall())
            _, self.side_vision_sensor_handle = client.simxGetObjectHandle(
                'side_vision_sensor', client.simxServiceCall())

            # not sure about the values
            self.cam_intrinsics = np.asarray([[618.62, 0, 320],
                                              [0, 618.62, 240], [0, 0, 1]])
            self.workspace_limits = np.asarray([[-2, 2], [-6, -2],
                                                [-5,
                                                 -4]])  # note workspace limits
            self.heightmap_resolution = 0.02  # No idea
            # cam pose in vrep
            self.cam_pose = np.asarray([[1, 0, 0, 0],
                                        [0, -0.70710679, -0.70710678, 1],
                                        [0, 0.70710678, -0.70710679, 0.5]])

            # Initiate simulation options
            client.simxSynchronous(False)
            client.simxGetSimulationStepStarted(
                client.simxDefaultSubscriber(simulationStepStarted))
            client.simxGetSimulationStepDone(
                client.simxDefaultSubscriber(simulationStepDone))
            client.simxStartSimulation(client.simxDefaultPublisher())

            client.simxAddStatusbarMessage("Starting!!!",
                                           client.simxDefaultPublisher())
            print('Started Simulation!')

            #
            self.camera_position = client.simxGetObjectPosition(
                self.vision_sensor_handle, -1, client.simxServiceCall())
            self.camera_orientation = client.simxGetObjectOrientation(
                self.vision_sensor_handle, -1, client.simxServiceCall())
            self.rotation_matrix = euler2rotm(self.camera_orientation[1])
            self.camera_transformation = client.simxGetObjectMatrix(
                self.vision_sensor_handle, -1, client.simxServiceCall())

            self.model = reinforcement_net(use_cuda=False)
Ejemplo n.º 4
0
 def _get_coord(self,
                obj_id,
                position,
                orientation,
                vol_bnds=None,
                voxel_size=None):
     # if vol_bnds is not None, return coord in voxel, else, return world coord
     coord = self.voxel_coord[obj_id]
     mat = euler2rotm(p.getEulerFromQuaternion(orientation))
     coord = (mat @ (coord.T)).T + np.asarray(position)
     if vol_bnds is not None:
         coord = np.round(
             (coord - vol_bnds[:, 0]) / voxel_size).astype(np.int)
     return coord
Ejemplo n.º 5
0
    def restart_real(self):
        # Compute tool orientation from heightmap rotation angle
        grasp_orientation = [1.0, 0.0]
        tool_rotation_angle = -np.pi / 4
        tool_orientation = np.asarray([grasp_orientation[0] * np.cos(tool_rotation_angle) - grasp_orientation[1] * np.sin(tool_rotation_angle), grasp_orientation[0] * np.sin(tool_rotation_angle) + grasp_orientation[1] * np.cos(tool_rotation_angle), 0.0]) * np.pi
        tool_orientation_angle = np.linalg.norm(tool_orientation)
        tool_orientation_axis = tool_orientation / tool_orientation_angle
        tool_orientation_rotm = utils.angle2rotm(tool_orientation_angle, tool_orientation_axis, point=None)[:3, :3]

        tilt_rotm = utils.euler2rotm(np.asarray([-np.pi / 4, 0, 0]))
        tilted_tool_orientation_rotm = np.dot(tilt_rotm, tool_orientation_rotm)
        tilted_tool_orientation_axis_angle = utils.rotm2angle(tilted_tool_orientation_rotm)
        tilted_tool_orientation = tilted_tool_orientation_axis_angle[0] * np.asarray(tilted_tool_orientation_axis_angle[1:4])

        # Move to box grabbing position
        box_grab_position = [0.5, -0.35, -0.12]
        self.tcp_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        self.tcp_socket.connect((self.tcp_host_ip, self.tcp_port))
        tcp_command = "def process():\n"
        tcp_command += " set_digital_out(8,False)\n"
        tcp_command += " movej(p[%f,%f,%f,%f,%f,%f],a=%f,v=%f,t=0,r=0.09)\n" % (box_grab_position[0], box_grab_position[1], box_grab_position[2] + 0.1, tilted_tool_orientation[0], tilted_tool_orientation[1], tilted_tool_orientation[2], self.joint_acc, self.joint_vel)
        tcp_command += " movej(p[%f,%f,%f,%f,%f,%f],a=%f,v=%f,t=0,r=0.00)\n" % (box_grab_position[0], box_grab_position[1], box_grab_position[2], tool_orientation[0], tool_orientation[1], tool_orientation[2], self.joint_acc, self.joint_vel)
        tcp_command += " set_digital_out(8,True)\n"
        tcp_command += "end\n"
        self.tcp_socket.send(str.encode(tcp_command))
        self.tcp_socket.close()

        # Block until robot reaches box grabbing position and gripper fingers have stopped moving
        state_data = self.get_state()
        tool_analog_input2 = self.parse_tcp_state_data(state_data, 'tool_data')
        while True:
            state_data = self.get_state()
            new_tool_analog_input2 = self.parse_tcp_state_data(state_data, 'tool_data')
            actual_tool_pose = self.parse_tcp_state_data(state_data, 'cartesian_info')
            if tool_analog_input2 < 3.7 and (abs(new_tool_analog_input2 - tool_analog_input2) < 0.01) and all([np.abs(actual_tool_pose[j] - box_grab_position[j]) < self.tool_pose_tolerance[j] for j in range(3)]):
                break
            tool_analog_input2 = new_tool_analog_input2

        # Move to box release position
        box_release_position = [0.5, 0.08, -0.12]
        home_position = [0.49, 0.11, 0.03]
        self.tcp_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        self.tcp_socket.connect((self.tcp_host_ip, self.tcp_port))
        tcp_command = "def process():\n"
        tcp_command += " movej(p[%f,%f,%f,%f,%f,%f],a=%f,v=%f,t=0,r=0.00)\n" % (box_release_position[0], box_release_position[1], box_release_position[2], tool_orientation[0], tool_orientation[1], tool_orientation[2], self.joint_acc * 0.1, self.joint_vel * 0.1)
        tcp_command += " movej(p[%f,%f,%f,%f,%f,%f],a=%f,v=%f,t=0,r=0.00)\n" % (box_release_position[0], box_release_position[1], box_release_position[2] + 0.3, tool_orientation[0], tool_orientation[1], tool_orientation[2], self.joint_acc * 0.02, self.joint_vel * 0.02)
        tcp_command += " movej(p[%f,%f,%f,%f,%f,%f],a=%f,v=%f,t=0,r=0.29)\n" % (box_grab_position[0] - 0.05, box_grab_position[1] + 0.1, box_grab_position[2] + 0.3, tilted_tool_orientation[0], tilted_tool_orientation[1], tilted_tool_orientation[2], self.joint_acc * 0.5, self.joint_vel * 0.5)
        tcp_command += " movej(p[%f,%f,%f,%f,%f,%f],a=%f,v=%f,t=0,r=0.00)\n" % (box_grab_position[0] - 0.05, box_grab_position[1] + 0.1, box_grab_position[2], tool_orientation[0], tool_orientation[1], tool_orientation[2], self.joint_acc * 0.5, self.joint_vel * 0.5)
        tcp_command += " movej(p[%f,%f,%f,%f,%f,%f],a=%f,v=%f,t=0,r=0.00)\n" % (box_grab_position[0], box_grab_position[1], box_grab_position[2], tool_orientation[0], tool_orientation[1], tool_orientation[2], self.joint_acc * 0.1, self.joint_vel * 0.1)
        tcp_command += " movej(p[%f,%f,%f,%f,%f,%f],a=%f,v=%f,t=0,r=0.00)\n" % (box_grab_position[0] + 0.05, box_grab_position[1], box_grab_position[2], tool_orientation[0], tool_orientation[1], tool_orientation[2], self.joint_acc * 0.1, self.joint_vel * 0.1)
        tcp_command += " set_digital_out(8,False)\n"
        tcp_command += " movej(p[%f,%f,%f,%f,%f,%f],a=%f,v=%f,t=0,r=0.09)\n" % (box_grab_position[0], box_grab_position[1], box_grab_position[2] + 0.1, tilted_tool_orientation[0], tilted_tool_orientation[1], tilted_tool_orientation[2], self.joint_acc, self.joint_vel)
        tcp_command += " movej(p[%f,%f,%f,%f,%f,%f],a=%f,v=%f,t=0,r=0.00)\n" % (home_position[0], home_position[1], home_position[2], tool_orientation[0], tool_orientation[1], tool_orientation[2], self.joint_acc, self.joint_vel)
        tcp_command += "end\n"
        self.tcp_socket.send(str.encode(tcp_command))
        self.tcp_socket.close()

        # Block until robot reaches home position
        state_data = self.get_state()
        tool_analog_input2 = self.parse_tcp_state_data(state_data, 'tool_data')
        while True:
            state_data = self.get_state()
            new_tool_analog_input2 = self.parse_tcp_state_data(state_data, 'tool_data')
            actual_tool_pose = self.parse_tcp_state_data(state_data, 'cartesian_info')
            if tool_analog_input2 > 3.0 and (abs(new_tool_analog_input2 - tool_analog_input2) < 0.01) and all([np.abs(actual_tool_pose[j] - home_position[j]) < self.tool_pose_tolerance[j] for j in range(3)]):
                break
            tool_analog_input2 = new_tool_analog_input2
Ejemplo n.º 6
0
    def grasp(self, position, heightmap_rotation_angle, workspace_limits):
        print('Executing: grasp at (%f, %f, %f)' % (position[0], position[1], position[2]))

        # Compute tool orientation from heightmap rotation angle
        grasp_orientation = [1.0, 0.0]
        if heightmap_rotation_angle > np.pi:
            heightmap_rotation_angle = heightmap_rotation_angle - 2 * np.pi
        tool_rotation_angle = heightmap_rotation_angle / 2
        tool_orientation = np.asarray([grasp_orientation[0] * np.cos(tool_rotation_angle) - grasp_orientation[1] * np.sin(tool_rotation_angle), grasp_orientation[0] * np.sin(tool_rotation_angle) + grasp_orientation[1] * np.cos(tool_rotation_angle), 0.0]) * np.pi
        tool_orientation_angle = np.linalg.norm(tool_orientation)
        tool_orientation_axis = tool_orientation / tool_orientation_angle
        tool_orientation_rotm = utils.angle2rotm(tool_orientation_angle, tool_orientation_axis, point=None)[:3, :3]

        # Compute tilted tool orientation during dropping into bin
        tilt_rotm = utils.euler2rotm(np.asarray([-np.pi / 4, 0, 0]))
        tilted_tool_orientation_rotm = np.dot(tilt_rotm, tool_orientation_rotm)
        tilted_tool_orientation_axis_angle = utils.rotm2angle(tilted_tool_orientation_rotm)
        tilted_tool_orientation = tilted_tool_orientation_axis_angle[0] * np.asarray(tilted_tool_orientation_axis_angle[1:4])

        # Attempt grasp
        position = np.asarray(position).copy()
        position[2] = max(position[2] - 0.05, workspace_limits[2][0])
        self.tcp_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        self.tcp_socket.connect((self.tcp_host_ip, self.tcp_port))
        tcp_command = "def process():\n"
        tcp_command += " set_digital_out(8,False)\n"
        tcp_command += " movej(p[%f,%f,%f,%f,%f,%f],a=%f,v=%f,t=0,r=0.09)\n" % (position[0], position[1], position[2] + 0.1, tool_orientation[0], tool_orientation[1], 0.0, self.joint_acc * 0.5, self.joint_vel * 0.5)
        tcp_command += " movej(p[%f,%f,%f,%f,%f,%f],a=%f,v=%f,t=0,r=0.00)\n" % (position[0], position[1], position[2], tool_orientation[0], tool_orientation[1], 0.0, self.joint_acc * 0.1, self.joint_vel * 0.1)
        tcp_command += " set_digital_out(8,True)\n"
        tcp_command += "end\n"
        self.tcp_socket.send(str.encode(tcp_command))
        self.tcp_socket.close()

        # Block until robot reaches target tool position and gripper fingers have stopped moving
        state_data = self.get_state()
        tool_analog_input2 = self.parse_tcp_state_data(state_data, 'tool_data')
        timeout_t0 = time.time()
        while True:
            state_data = self.get_state()
            new_tool_analog_input2 = self.parse_tcp_state_data(state_data, 'tool_data')
            actual_tool_pose = self.parse_tcp_state_data(state_data, 'cartesian_info')
            timeout_t1 = time.time()
            if (tool_analog_input2 < 3.7 and (abs(new_tool_analog_input2 - tool_analog_input2) < 0.01) and all([np.abs(actual_tool_pose[j] - position[j]) < self.tool_pose_tolerance[j] for j in range(3)])) or (timeout_t1 - timeout_t0) > 5:
                break
            tool_analog_input2 = new_tool_analog_input2

        # Check if gripper is open (grasp might be successful)
        gripper_open = tool_analog_input2 > 0.26

        # # Check if grasp is successful
        # grasp_success =  tool_analog_input2 > 0.26

        home_position = [0.49, 0.11, 0.03]
        bin_position = [0.5, -0.45, 0.1]

        # If gripper is open, drop object in bin and check if grasp is successful
        grasp_success = False
        if gripper_open:

            # Pre-compute blend radius
            blend_radius = min(abs(bin_position[1] - position[1]) / 2 - 0.01, 0.2)

            # Attempt placing
            self.tcp_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
            self.tcp_socket.connect((self.tcp_host_ip, self.tcp_port))
            tcp_command = "def process():\n"
            tcp_command += " movej(p[%f,%f,%f,%f,%f,%f],a=%f,v=%f,t=0,r=%f)\n" % (position[0], position[1], bin_position[2], tool_orientation[0], tool_orientation[1], 0.0, self.joint_acc, self.joint_vel, blend_radius)
            tcp_command += " movej(p[%f,%f,%f,%f,%f,%f],a=%f,v=%f,t=0,r=%f)\n" % (bin_position[0], bin_position[1], bin_position[2], tilted_tool_orientation[0], tilted_tool_orientation[1], tilted_tool_orientation[2], self.joint_acc, self.joint_vel, blend_radius)
            tcp_command += " set_digital_out(8,False)\n"
            tcp_command += " movej(p[%f,%f,%f,%f,%f,%f],a=%f,v=%f,t=0,r=0.0)\n" % (home_position[0], home_position[1], home_position[2], tool_orientation[0], tool_orientation[1], 0.0, self.joint_acc * 0.5, self.joint_vel * 0.5)
            tcp_command += "end\n"
            self.tcp_socket.send(str.encode(tcp_command))
            self.tcp_socket.close()
            # print(tcp_command) # Debug

            # Measure gripper width until robot reaches near bin location
            state_data = self.get_state()
            measurements = []
            while True:
                state_data = self.get_state()
                tool_analog_input2 = self.parse_tcp_state_data(state_data, 'tool_data')
                actual_tool_pose = self.parse_tcp_state_data(state_data, 'cartesian_info')
                measurements.append(tool_analog_input2)
                if abs(actual_tool_pose[1] - bin_position[1]) < 0.2 or all([np.abs(actual_tool_pose[j] - home_position[j]) < self.tool_pose_tolerance[j] for j in range(3)]):
                    break

            # If gripper width did not change before reaching bin location, then object is in grip and grasp is successful
            if len(measurements) >= 2:
                if abs(measurements[0] - measurements[1]) < 0.1:
                    grasp_success = True

        else:
            self.tcp_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
            self.tcp_socket.connect((self.tcp_host_ip, self.tcp_port))
            tcp_command = "def process():\n"
            tcp_command += " movej(p[%f,%f,%f,%f,%f,%f],a=%f,v=%f,t=0,r=0.09)\n" % (position[0], position[1], position[2] + 0.1, tool_orientation[0], tool_orientation[1], 0.0, self.joint_acc * 0.5, self.joint_vel * 0.5)
            tcp_command += " movej(p[%f,%f,%f,%f,%f,%f],a=%f,v=%f,t=0,r=0.0)\n" % (home_position[0], home_position[1], home_position[2], tool_orientation[0], tool_orientation[1], 0.0, self.joint_acc * 0.5, self.joint_vel * 0.5)
            tcp_command += "end\n"
            self.tcp_socket.send(str.encode(tcp_command))
            self.tcp_socket.close()

        # Block until robot reaches home location
        state_data = self.get_state()
        tool_analog_input2 = self.parse_tcp_state_data(state_data, 'tool_data')
        actual_tool_pose = self.parse_tcp_state_data(state_data, 'cartesian_info')
        while True:
            state_data = self.get_state()
            new_tool_analog_input2 = self.parse_tcp_state_data(state_data, 'tool_data')
            actual_tool_pose = self.parse_tcp_state_data(state_data, 'cartesian_info')
            if (abs(new_tool_analog_input2 - tool_analog_input2) < 0.01) and all([np.abs(actual_tool_pose[j] - home_position[j]) < self.tool_pose_tolerance[j] for j in range(3)]):
                break
            tool_analog_input2 = new_tool_analog_input2

        return grasp_success