Ejemplo n.º 1
0
import sys
import urx
from urx.robotiq_two_finger_gripper import Robotiq_Two_Finger_Gripper
import time

rob = urx.Robot("192.168.1.102")
robotiqgrip = Robotiq_Two_Finger_Gripper(rob)

for i in range(0,255,10):
    robotiqgrip.gripper_action(i)
    time.sleep(0.2)

for i in range(255,0,-10):
    robotiqgrip.gripper_action(i)
    time.sleep(0.2)


#robotiqgrip.open_gripper()
#robotiqgrip.close_gripper()

Ejemplo n.º 2
0
robot.movej(angular_pos, acc=acc,
            vel=vel)  # Bouge le robot en articulaire (radian)
print(robot.get_pose())  # Renvoie le (x, y, z) du TCP
robot.up()
robot.down()
robot.translate([0.1, 0.1, 0.1])

# Commandes pince
'''
Les différentes commandes sont disponibles dans le fichier python-urx-0.11.0/urx/robotiq_two_fingers_gripper.py
dans la classe Robotiq_Two_Finger_Gripper                                             
'''
gripper.open_gripper()  # Ferme entièrement le gripper
gripper.close_gripper()  # Ouvre entièrement le gripper
gripper.gripper_action(
    150)  # Ouvre le gripper à une certaine taille (0:ouvert, 255: fermé)
print(gripper.send_opening(
    TCP_Mon_Ordi, TCP_PORT_GRIPPER))  # Retourne l'ouverture de la pince
robot.close()  # Fermeture de la connection

# Commandes capteur de force ouvrir une connection avec le capteur de force
robot = urx.Robot(TCP_Robot, useForce=True)
robot.set_tcp(
    (0, 0, 0.3, 0, 0, -0.43)
)  # Position du Tool Center Point par rapport au bout du robot (x,y,z,Rx,Ry,Rz)
# (en mm et radians)
robot.set_payload(1, (0, 0, 0.1))
print(robot.force_sensor.getZForce())
robot.force_sensor.tare()  # Tare le capteur de force
robot.force_sensor.getNormForce()  # Retourne la norme de la force en Newton
print('Force en Z positif')
Ejemplo n.º 3
0
from urx.robotiq_two_finger_gripper import Robotiq_Two_Finger_Gripper

l = 0.1
v = 0.07
a = 0.1
r = 0.05

rob = urx.Robot("192.168.0.3")
robotiqgrip = Robotiq_Two_Finger_Gripper(rob)

PORT = 63352
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(("192.168.0.3", PORT))

time.sleep(0.1)
robotiqgrip.gripper_action(128)

time.sleep(0.1)
robotiqgrip.close_gripper()

time.sleep(0.1)
robotiqgrip.open_gripper()
rob.close()
'''
import socket
import time
import struct

HOST = "192.168.0.3" # The remote host
PORT_63352 = 63352
SOCKET_NAME = "gripper_socket"
class ur5_reach:
    def __init__(self):
        rospy.Subscriber(name='/tag_detections',
                         data_class=AprilTagDetectionArray,
                         callback=self.on_sub,
                         queue_size=1)  # queue_size 是在订阅者接受消息不够快的时候保留的消息的数量
        #image_sub = rospy.Subscriber("/camera/rgb/image_raw",data_class=Image,callback=self.rgb_callback)
        self.target_on_base = None
        self.rbt = URRobot('192.168.31.53')

        self.gripper = Robotiq_Two_Finger_Gripper(self.rbt)
        #self.gripper.gripper_action(0)

        self.pos_init = [0.1, -0.3, 0.3, 0, pi, 0]
        self.rbt.movel(self.pos_init, 0.3, 0.3)
        #self.gripper.gripper_action(255)
        time.sleep(2)
        self.compute_Q = False
        self.noise_eps = 0
        self.random_eps = 0
        self.use_target_net = False
        self.exploit = False
        self.has_object = True
        self.block_gripper = False  #reach:true pick:false
        self.detection_target_storage = []
        #self.detection_object_storage = []
        self.gripper_state = [0, 0]
        self.pick = False
        self.object_reach = False
        #self.pick = True

        #seed = 0
        #set_global_seeds(seed)
        policy_file = "./policy_best_picknplace.pkl"

        # Load policy.
        with open(policy_file, 'rb') as f:
            self.policy = pickle.load(f)

    def detection_object_by_color(self):
        #u,v=detect_ball()
        return Real_XY_of_BOX_on_Camera(u, v, z)

    # rbt Transfer (rbt_base to frame_base)
    # UR5--Base
    def on_sub(self, data):
        # target Transfer (kinect2_rgb_optical_frame to frame_base),get g
        # Camera--Base
        detections = data.detections

        for i in range(len(detections)):
            if detections[i].id == 12:
                self.detection_target = detections[i]
                self.detection_target_storage.append([
                    self.detection_target.pose.pose.position.x,
                    self.detection_target.pose.pose.position.y,
                    self.detection_target.pose.pose.position.z
                ])
            '''
            if detections[i].id == 13:
                self.detection_object = detections[i]
                self.detection_object_storage.append([self.detection_object.pose.pose.position.x, self.detection_object.pose.pose.position.y, self.detection_object.pose.pose.position.z])
            '''

        pose_on_camera_target = self.detection_target_storage[0]
        #pose_on_camera_object = self.detection_object_storage[0]
        pose_on_camera_object = self.detection_object_by_color().ravel()

        self.target_on_base = self.camera2base(pose_on_camera_target)
        self.object_on_base = self.camera2base(pose_on_camera_object)

        pose_rbt = self.rbt.getl()

        grip_pos = [
            -pose_rbt[1] + 0.75, pose_rbt[0] + 0.74, pose_rbt[2] + 0.65
        ]  # grip_pos 末端位置
        target_pos = [
            -self.target_on_base[1] + 0.75, self.target_on_base[0] + 0.74,
            self.target_on_base[2] + 0.70
        ]  # 目的地位置, 目的地比夹手末端在z轴上高5cm.

        if self.pick == True and sum(
                abs(
                    np.asarray(grip_pos) - np.asarray([
                        -self.object_on_base[1] + 0.75, self.object_on_base[0]
                        + 0.74, self.object_on_base[2] + 0.625
                    ]))) < 0.1:
            object_pos = grip_pos  # 物体位置
            object_rel_pos = [0, 0, 0]
            gripper_state = [0.025, 0.025]
            self.object_reach = True

        if self.object_reach == True:
            object_pos = grip_pos  # 物体位置
            object_rel_pos = [0, 0, 0]
            gripper_state = [0.025, 0.025]
        else:
            object_pos = [
                -self.object_on_base[1] + 0.75, self.object_on_base[0] + 0.74,
                self.object_on_base[2] + 0.625
            ]  # 物体位置
            object_rel_pos = [
                object_pos[0] - grip_pos[0], object_pos[1] - grip_pos[1],
                object_pos[2] - grip_pos[2]
            ]
            gripper_state = self.gripper_state

        achieved_goal = np.squeeze(object_pos.copy())

        o = [
            np.concatenate([
                grip_pos,
                np.asarray(object_pos).ravel(),
                np.asarray(object_rel_pos).ravel(), gripper_state
            ])
        ]  # grip_pos
        ag = [achieved_goal]  # grip_pos object_pos
        g = [target_pos]

        print(o)

        # run model get action, pos_delta (frame_base)
        # 这个action是仿真下的。1 action = 0.033 仿真观测 = 0.033真实观测。
        action = self.policy.get_actions(
            o,
            ag,
            g,
            compute_Q=self.compute_Q,
            noise_eps=self.noise_eps if not self.exploit else 0.,
            random_eps=self.random_eps if not self.exploit else 0.,
            use_target_net=self.use_target_net)

        if self.compute_Q:
            u, Q = action
        else:
            u = action

        if u.ndim == 1:
            # The non-batched case should still have a reasonable shape.
            u = u.reshape(1, -1)
        pos_ctrl, gripper_ctrl = action[:3], action[3]
        if gripper_ctrl < 0:
            self.pick = True
        else:
            self.pick = False

        pos_ctrl *= 0.033  #simulation action -> real world action.(1 sim action = 0.033 real action)
        gripper_ctrl *= 0.7636
        rot_ctrl = [
            1., 0., 1., 0.
        ]  # fixed rotation of the end effector, expressed as a quaternion

        gripper_ctrl = np.array([gripper_ctrl, gripper_ctrl])
        assert gripper_ctrl.shape == (2, )
        if self.block_gripper:
            gripper_ctrl = np.zeros_like(gripper_ctrl)

        action = np.concatenate([pos_ctrl, rot_ctrl, gripper_ctrl])  # action
        action, gripper_st = np.split(action, (1 * 7, ))
        action = action.reshape(1, 7)

        pos_delta = action[:, :3]
        #pos_delta = pos_ctrl
        self.gripper_state += gripper_st
        if self.gripper_state[0] > 0.05:
            self.gripper_state = [0.05, 0.05]
        if self.gripper_state[0] < 0:
            self.gripper_state = [0, 0]

        act_on_base = pose_rbt[:3]
        her_pose = [act_on_base[0], act_on_base[1],
                    act_on_base[2]]  # 末端关于UR3基坐标系的位置

        # test on rule and get right position
        rule_pose = [0, 0, 0, 0, 0, 0]
        rule_pose[0:3] = [
            self.target_on_base[0], self.target_on_base[1],
            self.target_on_base[2]
        ]
        rule_pose[3] = pose_rbt[3]
        rule_pose[4] = pose_rbt[4]
        rule_pose[5] = pose_rbt[5]  # rx, ry, rz 固定

        # judge if the target is reached
        '''
        rule_pose_np = np.asarray(rule_pose[:3]).ravel()
        her_pose_np = np.asarray(her_pose[:3]).ravel()
        if np.matmul((rule_pose_np-her_pose_np).T, rule_pose_np-her_pose_np) < 0.02:
        '''
        if abs(her_pose[0] - rule_pose[0]) + abs(her_pose[1] -
                                                 rule_pose[1]) < 0.04:
            print("reach the target")
            return

        act_on_base[0] += pos_delta[0][1]  # left right
        act_on_base[1] -= pos_delta[0][0]  # front behind
        act_on_base[2] += pos_delta[0][2]  # up down
        robot_delta = [
            act_on_base[0], act_on_base[1], act_on_base[2], pose_rbt[3],
            pose_rbt[4], pose_rbt[5]
        ]

        self.rbt.movel(tpose=robot_delta, acc=0.1, vel=0.25)
        self.gripper.gripper_action(int(255 - 5100 * self.gripper_state[0]))

    def camera2base(self, pose_on_camera):
        Tm = np.array([[-0.9992, 0.0082, 0.0383, 0.0183],
                       [-0.0036, 0.9543, -0.2987, -0.2376],
                       [-0.0390, -0.2986, -0.9536, 1.1127], [0, 0, 0, 1.0000]])
        pose_on_base = np.dot(
            [pose_on_camera[0], pose_on_camera[1], pose_on_camera[2], 1], Tm.T)
        pose_on_base[0] = pose_on_base[0] - 0.03
        pose_on_base[1] = pose_on_base[1] * 0.98 - 0.01
        pose_on_base[2] = pose_on_base[2] * 0.94 + 0.09
        return pose_on_base
Ejemplo n.º 5
0
import sys
import urx
from urx.robotiq_two_finger_gripper import Robotiq_Two_Finger_Gripper
import time

rob = urx.Robot("192.168.1.102")
robotiqgrip = Robotiq_Two_Finger_Gripper(rob)

if(len(sys.argv) != 2):
    print("false")
    sys.exit()

if(sys.argv[1] == "close") :
    robotiqgrip.close_gripper()
if(sys.argv[1] == "open") :
    robotiqgrip.open_gripper()
else:
    robotiqgrip.gripper_action(int(sys.argv[1]))
# for i in range(25):
#     print(i)
#     robotiqgrip.gripper_action(i  * 10)
#     time.sleep(0.01)

rob.close()
Ejemplo n.º 6
0
import urx
from urx.robotiq_two_finger_gripper import Robotiq_Two_Finger_Gripper

robot = urx.Robot('192.168.1.109')
gripper = Robotiq_Two_Finger_Gripper(robot)

gripper.gripper_action(96)
gripper.close_gripper()

class URRobotController(object):
    """Universal Robot (URx) controller RTC.

        This class is using urx package.
    """
    # Exceptions
    URxException = urx.urrobot.RobotException

    # Private member
    __instance = None
    __robot = None
    __gripper = None
    __sync_mode = False
    _accel = 0.6
    _velocity = 0.6

    # singleton
    def __new__(cls, ip="192.168.1.101", realtime=True):
        if not cls.__instance:
            cls.__instance = object.__new__(cls, ip, realtime)
        return cls.__instance

    def __init__(self, ip="192.168.1.101", realtime=True):
        if self.__robot:
            logging.info("instance is already exist")
            return

        logging.basicConfig(format='%(asctime)s:%(levelname)s: %(message)s',
                            level=logging.INFO)

        # not use realtime port in stub mode
        if ip == "localhost":
            self.__realtime = False
        else:
            self.__realtime = realtime

        logging.info("Create URRobotController IP: " + ip, self.__realtime)

        try:
            self.__robot = urx.Robot(ip, use_rt=self.__realtime)
            self.__robot.set_tcp((0, 0, 0, 0, 0, 0))
            self.__robot.set_payload(0, (0, 0, 0))
        except self.URxException:
            logging.error("URx exception was ooccured in init robot")
            self.__robot = None
            return
        except Exception as e:
            logging.error("exception: " + format(str(e)) + " in init robot")
            self.__robot = None
            return

        try:
            self.__gripper = Robotiq_Two_Finger_Gripper(self.__robot)
        except self.URxException:
            logging.error("URx exception was ooccured in init gripper")
            self.__gripper = None
        except Exception as e:
            logging.error("exception: " + format(str(e)) + " in init gripper")
            self.__gripper = None

        self._update_send_time()

        # use pause() for service port
        self._joints_goal = []
        self._pause = False

    def __del__(self):
        self.finalize()

    def __enter__(self):
        return self

    def __exit__(self, exc_type, exc_value, traceback):
        self.finalize()

    def _update_send_time(self):
        self.__latest_send_time = datetime.now()

    def _expire_send_time(self):
        diff = datetime.now() - self.__latest_send_time
        # prevent control until 100ms after
        if diff.microseconds > 100000:
            return True
        else:
            return False

    def set_middle(self, middle):
        self._middle = middle

    def unset_middle(self):
        self._middle = None

    @property
    def robot_available(self):
        """Get robot instance is available or not.

        Note:
            None.

        Args:
            None.

        Returns:
            True: Available.
            False: NOT available.
        """
        if self.__robot:
            return True
        else:
            return False

    @property
    def gripper_available(self):
        """Get gripper instance is available or not.

        Note:
            None.

        Args:
            None.

        Returns:
            True: Available.
            False: NOT available.
        """
        if self.__gripper:
            return True
        else:
            return False

    @property
    def vel(self):
        """Get velocity for move.

        Note:
            None.

        Args:
            None.

        Returns:
            v: Target velocity.
        """
        return self._velocity

    @vel.setter
    def vel(self, v=None):
        """Set velocity for move.

        Note:
            None.

        Args:
            None.

        Returns:
            v: Target velocity.
        """
        if v:
            self._velocity = v

    @property
    def acc(self):
        """Set accel and velocity for move.

        Note:
            None.

        Args:
            None.

        Returns:
            a: Target acceleration.
        """
        return self._accel

    @acc.setter
    def acc(self, a=None):
        """Get accel and velocity for move.

        Note:
            None.

        Args:
            a: Target acceleration.

        Returns:
            True: Success.
        """
        if a:
            self._accel = a

    def finalize(self):
        """Finalize URRobotController instance.

        Note:
            None.

        Args:
            None.

        Returns:
            True: Success.
            False: Failed.
        """
        if self.__robot:
            self.__robot.close()
            self.__robot = None
            return True
        else:
            logging.error("robot is not initialized in " +
                          sys._getframe().f_code.co_name)
            return False

    def set_payload(self, weight, vector=(0, 0, 0)):
        """Set payload in kg and cog.

        Note:
            None.

        Args:
            weight: Weight in kg.
            vector: Center of gravity in 3d vector.

        Returns:
            True: Success.
            False: Failed.
        """
        if self.__robot:
            # set payload in kg & cog
            self.__robot.set_payload(weight, vector)
            self._update_send_time()
            return True
        else:
            logging.error("robot is not initialized in " +
                          sys._getframe().f_code.co_name)
            return False

    @property
    def is_moving(self):
        """Get status of runnning program.

        Note:
            None.

        Args:
            None.

        Returns:
            True: Program is running.
            False: Program is NOT running.
        """
        if self.__robot:
            r = self.__robot
            return r.is_program_running() or not self._expire_send_time()
        else:
            logging.error("robot is not initialized in " +
                          sys._getframe().f_code.co_name)
            return False

    @property
    def sync_mode(self):
        """Get synchronous mode.

        Note:
            None.

        Args:
            None.

        Returns:
            True: Synchronous mode.
            False: Asynchronous mode.
        """
        return self.__sync_mode

    @sync_mode.setter
    def sync_mode(self, val):
        """Set synchronous mode (wait until program ends).

        Note:
            None.

        Args:
            None.

        Returns:
            True: Success.
        """
        self.__sync_mode = val

    def movel(self, pos, a=None, v=None):
        """Move the robot in a linear path.

        Note:
            None.

        Args:
            a: Target acceleration.
            v: Target velocity.

        Returns:
            True: Success.
            False: Failed.
        """
        if self.__robot:
            self.acc = a
            self.vel = v
            self.__robot.movel(pos,
                               acc=self.acc,
                               vel=self.vel,
                               wait=self.sync_mode)
            self._update_send_time()
            return True
        else:
            logging.error("robot is not initialized in " +
                          sys._getframe().f_code.co_name)
            return False

    def movej(self, joints, a=None, v=None):
        """Move the robot by joint movement.

        Note:
            None.

        Args:
            a: Target acceleration.
            v: Target velocity.

        Returns:
            True: Success.
            False: Failed.
        """
        if self._pause:
            return False

        if self.__robot:
            self._joints_goal = joints
            self.acc = a
            self.vel = v
            self.__robot.movej(joints,
                               acc=self.acc,
                               vel=self.vel,
                               wait=self.sync_mode)
            self._update_send_time()
            return True
        else:
            logging.error("robot is not initialized in " +
                          sys._getframe().f_code.co_name)
            return False

    def movels(self, poslist, a=None, v=None):
        """Sequential move the robot in a linear path.

        Note:
            None.

        Args:
            poslist: Target position list
            a: Target acceleration.
            v: Target velocity.

        Returns:
            True: Success.
            False: Failed.
        """
        if self.__robot:
            self.acc = a
            self.vel = v
            self.__robot.movels(poslist,
                                acc=self.acc,
                                vel=self.vel,
                                wait=self.sync_mode)
            self._update_send_time()
            return True
        else:
            logging.error("robot is not initialized in " +
                          sys._getframe().f_code.co_name)
            return False

    def translate_tool(self, vec, a=None, v=None):
        """Move tool in base coordinate, keeping orientation.

        Note:
            None.

        Args:
            poslist: Target position list
            a: Target acceleration.
            v: Target velocity.

        Returns:
            True: Success.
            False: Failed.
        """
        if self.__robot:
            self.acc = a
            self.vel = v
            self.__robot.translate_tool(vec,
                                        acc=self.acc,
                                        vel=self.vel,
                                        wait=self.sync_mode)
            self._update_send_time()
            return True
        else:
            logging.error("robot is not initialized in " +
                          sys._getframe().f_code.co_name)
            return False

    def getl(self):
        """Get TCP position.

        Note:
            None.

        Args:
            None.

        Returns:
            position: list of TCP position (X, Y, Z, Rx, Ry, Rz)
                      if failed to get, return (0, 0, 0, 0, 0, 0)
        """
        if self.__robot:
            return self.__robot.getl()
        else:
            logging.error("robot is not initialized in " +
                          sys._getframe().f_code.co_name)
            return (0, 0, 0, 0, 0, 0)

    def getj(self):
        """Get joint position.

        Note:
            None.

        Args:
            None.

        Returns:
            position: list of joint position (J0, J1, J2, J3, J4, J5)
                      if failed to get, return (0, 0, 0, 0, 0, 0)
        """
        if self.__robot:
            return self.__robot.getj()
        else:
            logging.error("robot is not initialized in " +
                          sys._getframe().f_code.co_name)
            return (0, 0, 0, 0, 0, 0)

    def get_force(self):
        """Get TCP force.

        Note:
            None.

        Args:
            None.

        Returns:
            force: value of TCP force
                   if failed to get, return 0
        """
        if not self.__realtime:
            logging.info("cannot use realtime port in " +
                         sys._getframe().f_code.co_name)
            return 0
        elif self.__robot:
            return self.__robot.get_force()
        else:
            logging.error("robot is not initialized in " +
                          sys._getframe().f_code.co_name)
            return 0

    def start_freedrive(self, time=60):
        """Start freedrive mode.

        Note:
            None.

        Args:
            time: Time to keep freedrive mode in seconds (default=60s).

        Returns:
            True: Success.
            False: Failed.
        """
        if self.__robot:
            self.__robot.set_freedrive(True, timeout=time)
            self._update_send_time()
            return True
        else:
            logging.error("robot is not initialized in " +
                          sys._getframe().f_code.co_name)
            return False

    def end_freedrive(self):
        """End freedrive mode.

        Note:
            None.

        Args:
            None.

        Returns:
            True: Success.
            False: Failed.
        """
        if self.__robot:
            self.__robot.set_freedrive(None)
            self._update_send_time()
            return True
        else:
            logging.error("robot is not initialized in " +
                          sys._getframe().f_code.co_name)
            return False

    def open_gripper(self):
        """Open gripper.

        Note:
            None.

        Args:
            None.

        Returns:
            True: Success.
            False: Failed.
        """
        if self.__robot and self.__gripper:
            try:
                self.__gripper.open_gripper()
                self._update_send_time()
            except self.URxException:
                logging.error("URx exception was ooccured in " +
                              sys._getframe().f_code.co_name)
                return False
            except Exception as e:
                logging.error("except: " + format(str(e)) + " in " +
                              sys._getframe().f_code.co_name)
                return False
        else:
            logging.error("robot or gripper is not initialized in " +
                          sys._getframe().f_code.co_name)
            return False

        return True

    def gripper_action(self, value):
        """gripper action.

        Note:
            None.

        Args:
            value: value from 0 to 255.
                   0 is open, 255 is close.

        Returns:
            True: Success.
            False: Failed.
        """
        if value < 0 or value > 255:
            return False

        if self.__robot and self.__gripper:
            try:
                self.__gripper.gripper_action(value)
                self._update_send_time()
            except self.URxException:
                logging.error("URx exception was ooccured in " +
                              sys._getframe().f_code.co_name)
                return False
            except Exception as e:
                logging.error("except: " + format(str(e)) + " in " +
                              sys._getframe().f_code.co_name)
                return False
        else:
            logging.error("robot or gripper is not initialized in " +
                          sys._getframe().f_code.co_name)
            return False

        return True

    def close_gripper(self):
        """Close gripper.

        Note:
            None.

        Args:
            None.

        Returns:
            True: Success.
            False: Failed.
        """
        if self.__robot and self.__gripper:
            try:
                self.__gripper.close_gripper()
                self._update_send_time()
            except self.URxException:
                logging.error("URx exception was ooccured in " +
                              sys._getframe().f_code.co_name)
                return False
            except Exception as e:
                logging.error("except: " + format(str(e)) + " in " +
                              sys._getframe().f_code.co_name)
                return False
        else:
            logging.error("robot or gripper is not initialized in " +
                          sys._getframe().f_code.co_name)
            return False

        return True

    def pause(self):
        """Pause.

        Note:
            None.

        Args:
            None.

        Returns:
            True: Success.
            False: Failed.
        """
        if self.__robot:
            self._pause = True
            self.__robot.stopj(self.acc)

            self._update_send_time()
            return True
        else:
            logging.error("robot is not initialized in " +
                          sys._getframe().f_code.co_name)
            return False

    def resume(self):
        """Resume.

        Note:
            None.

        Args:
            None.

        Returns:
            True: Success.
            False: Failed.
        """
        if self.__robot:
            self._pause = False
            return True
        else:
            logging.error("robot is not initialized in " +
                          sys._getframe().f_code.co_name)
            return False

    def stopj(self, a=None):
        """Decelerate joint speeds to zero.

        Note:
            None.

        Args:
            a: joint acceleration [rad/s^2]

        Returns:
            True: Success.
            False: Failed.
        """
        if self.__robot:
            self.acc = a
            self.__robot.stopj(self.acc)

            self._update_send_time()
            return True
        else:
            logging.error("robot is not initialized in " +
                          sys._getframe().f_code.co_name)
            return False

    def get_pos(self):
        """get current transform from base to to tcp

        Note:
            None.

        Args:
            None.

        Returns:
            True: Success.
            False: Failed.
        """
        pose = []
        if self.__robot:
            pose = self.__robot.get_pos()
            self._update_send_time()
            return True, pose
        else:
            logging.error("robot is not initialized in " +
                          sys._getframe().f_code.co_name)
            return False, pose

    def get_joints_goal(self):
        return self._joints_goal
Ejemplo n.º 8
0
class Robot():
    def __init__(self, tcp_host_ip):

        self.camera_width = 640
        self.camera_height = 480
        self.workspace_limits = np.array([[-0.01, 0.12], [0.63, 0.76],
                                          [0.02, 0.08]])
        self.heightmap_resolution = 0.0013 / 4.0

        # robot gripper parameter
        self.finger_length = 0.125
        self.l0 = 0.0125
        self.l1 = 0.1
        self.l2l = 0.019
        self.l2r = 0.01
        logging.basicConfig(level=logging.WARN)
        self.rob = urx.Robot(tcp_host_ip)  #"192.168.1.102"

        self.control_exted_thumb = Arduino_motor()
        self.tcp_host_ip = tcp_host_ip
        self.resetRobot()
        self.resetFT300Sensor()
        self.setCamera()
        self.collision_check_initialization(bowl_position=[0.05, 0.695, 0.03])

    def resetRobot(self):
        self.go_to_home_up()
        self.rob.set_tcp((0, 0.0, 0, 0, 0, 0))
        time.sleep(0.2)
        self.control_exted_thumb.set_thumb_length_int(180)
        self.robotiqgrip = Robotiq_Two_Finger_Gripper(self.rob)
        self.robotiqgrip.gripper_activate()
        self.gp_control_distance(0.03)
        self.go_to_home()
        self.baseTee = self.rob.get_pose()
        self.baseTee.orient = np.array([[0, 1, 0], [1, 0, 0], [0, 0, -1]])
        print("reset")

    def resetFT300Sensor(self):
        HOST = self.tcp_host_ip
        PORT = 63351
        self.serialFT300Sensor = socket.socket(socket.AF_INET,
                                               socket.SOCK_STREAM)
        self.serialFT300Sensor.connect((HOST, PORT))

    def getFT300SensorData(self):
        while True:
            data = str(self.serialFT300Sensor.recv(1024),
                       "utf-8").replace("(", "").replace(")", "").split(",")
            try:
                data = [float(x) for x in data]
                if len(data) == 6:
                    break
            except:
                pass
        return data

    def setCamera(self):
        self.cam_intrinsics = np.asarray(
            [[612.0938720703125, 0, 321.8862609863281],
             [0, 611.785888671875, 238.18316650390625], [0, 0, 1]])
        self.eeTcam = np.array([[0, -1, 0, 0.142], [1, 0, 0, -0.003],
                                [0, 0, 1, 0.0934057 + 0.03], [0, 0, 0, 1]])

        self.baseTcam = np.matmul(self.baseTee.get_matrix(), self.eeTcam)

    def getCameraData(self, data_order=0):
        # Set the camera settings.
        self.setCamera()
        # Setup:
        pipeline = rs.pipeline()
        config = rs.config()
        config.enable_stream(rs.stream.depth, self.camera_width,
                             self.camera_height, rs.format.z16, 30)
        config.enable_stream(rs.stream.color, self.camera_width,
                             self.camera_height, rs.format.bgr8, 30)
        profile = pipeline.start(config)
        depth_scale = profile.get_device().first_depth_sensor(
        ).get_depth_scale()
        #print("depth_scale", depth_scale)

        # Store next frameset for later processing:
        frames = pipeline.wait_for_frames()
        color_frame = frames.get_color_frame()
        depth_frame = frames.get_depth_frame()

        # Cleanup:
        pipeline.stop()
        #print("Frames Captured")

        color = np.asanyarray(color_frame.get_data())
        #color_img_path = './picture_20210422/color_img/'+str(data_order)+'_color.jpg'
        #cv2.imwrite(color_img_path, color)

        # Create alignment primitive with color as its target stream:
        align = rs.align(rs.stream.color)
        frames = align.process(frames)

        # Update color and depth frames:
        aligned_depth_frame = frames.get_depth_frame()
        depth_image_raw = np.asanyarray(aligned_depth_frame.get_data())
        depth_image = depth_image_raw * depth_scale
        #depth_img_path = './picture_20210422/depth_img/'+str(data_order)+'_depth.jpg'
        #cv2.imwrite(depth_img_path, depth_image*1000)
        #colorizer = rs.colorizer()
        #colorized_depth = np.asanyarray(colorizer.colorize(aligned_depth_frame).get_data())
        return color, depth_image

        #return color, depth_image, color_img_path, depth_img_path

    def gp_control_int(self, aperture_int, delay_time=0.2):
        self.robotiqgrip.gripper_action(aperture_int)
        time.sleep(delay_time)

    def gp_control_distance(self, aperture_distance, delay_time=0.2):  #meter
        int_list = np.array([
            0, 20, 40, 60, 80, 100, 120, 130, 140, 150, 160, 170, 180, 185,
            190, 195, 200, 205, 210
        ])  #
        distance_list = np.array([
            125, 116.39, 105.33, 93.91, 82.48, 70.44, 58.09, 51.90, 45.47,
            39.05, 32.84, 26.76, 19.65, 16.39, 13.32, 10.20, 7.76, 3.89, 0
        ]) / 1000
        func_from_distance_to_int = interpolate.interp1d(distance_list,
                                                         int_list,
                                                         kind=3)
        self.robotiqgrip.gripper_action(
            int(func_from_distance_to_int(aperture_distance)))
        time.sleep(delay_time)

    def go_to_home(self):
        home_position = [100.28, -80.88, -126.70, -61.56, 90.35, -81.93]
        Hong_joint0 = math.radians(home_position[0])
        Hong_joint1 = math.radians(home_position[1])
        Hong_joint2 = math.radians(home_position[2])
        Hong_joint3 = math.radians(home_position[3])
        Hong_joint4 = math.radians(home_position[4])
        Hong_joint5 = math.radians(home_position[5])

        self.rob.movej((Hong_joint0, Hong_joint1, Hong_joint2, Hong_joint3,
                        Hong_joint4, Hong_joint5), 0.3, 0.5)

    def go_to_home_up(self):
        home_position = [100.21, -77.82, -121.66, -69.66, 90.38, -81.95]
        Hong_joint0 = math.radians(home_position[0])
        Hong_joint1 = math.radians(home_position[1])
        Hong_joint2 = math.radians(home_position[2])
        Hong_joint3 = math.radians(home_position[3])
        Hong_joint4 = math.radians(home_position[4])
        Hong_joint5 = math.radians(home_position[5])

        self.rob.movej((Hong_joint0, Hong_joint1, Hong_joint2, Hong_joint3,
                        Hong_joint4, Hong_joint5), 0.3, 0.5)

    def Frame(pos, ori):
        mat = R.from_quat(ori).as_matrix()
        F = np.concatenate([
            np.concatenate([mat, [[0, 0, 0]]], axis=0),
            np.reshape([*pos, 1.], [-1, 1])
        ],
                           axis=1)
        return F

    def exe_scoop(self,
                  pos,
                  rot_z,
                  ini_aperture,
                  theta=60 * pi / 180):  # rot_z rad   aperture distance
        if self.collision_check_scooping(pos, rot_z, ini_aperture,
                                         theta) == True:
            print("Collision!")
            return -1
        self.go_to_home()
        self.rob.translate((0, 0, 0.06), acc=0.5, vel=0.8)
        if ini_aperture == 0.05:
            self.rob.set_tcp((0.0295, 0.0, 0.3389, 0, 0, 0))  #need change
            time.sleep(0.3)
        elif ini_aperture == 0.04:
            self.rob.set_tcp((0.0245, 0.0, 0.3396, 0, 0, 0))  #need change
            time.sleep(0.3)
        elif ini_aperture == 0.035:
            self.rob.set_tcp((0.022, 0.0, 0.3396, 0, 0, 0))  #need change
            time.sleep(0.3)
        elif ini_aperture == 0.03:
            self.rob.set_tcp((0.0195, 0.0, 0.3414, 0, 0, 0))  #need change
            time.sleep(0.3)
        elif ini_aperture == 0.025:
            self.rob.set_tcp((0.017, 0.0, 0.3414, 0, 0, 0))  #need change
            time.sleep(0.3)
        elif ini_aperture == 0.02:
            self.rob.set_tcp((0.0145, 0.0, 0.342, 0, 0, 0))  #need change
            time.sleep(0.3)
        elif ini_aperture == 0.015:
            self.rob.set_tcp((0.012, 0.0, 0.3425, 0, 0, 0))  #need change
            time.sleep(0.3)
        else:
            raise NameError("Wrong ini aperture!")
        self.gp_control_distance(ini_aperture, delay_time=0.2)
        self.control_exted_thumb.set_thumb_length_int(0)
        eefPose = self.rob.get_pose()
        time.sleep(0.5)
        eefPose = eefPose.get_pose_vector()
        self.rob.translate((pos[0] - eefPose[0], pos[1] - eefPose[1], 0),
                           acc=0.5,
                           vel=0.8)
        self.rob.movel_tool((0, 0, 0, 0, 0, rot_z), acc=0.5, vel=0.8)
        self.rob.translate((0, 0, pos[2] + 0.01 - eefPose[2]),
                           acc=0.05,
                           vel=0.05)
        self.rob.movel_tool((0, 0, 0, 0, pi / 2 - theta, 0),
                            acc=0.5,
                            vel=0.8,
                            wait=True)
        self.rob.translate((0, 0, -0.02), acc=0.01, vel=0.05, wait=False)
        ini_torque_y = self.getFT300SensorData()[4]
        time0 = time.time()
        num_large_force = 0
        while True:
            if num_large_force == 3:
                self.rob.stopl()
                break
            torque_y = self.getFT300SensorData()[4]
            if torque_y > ini_torque_y + 0.1:
                num_large_force += 1
            if time.time() - time0 > 2.8:
                break
        time.sleep(0.1)

        ini_torque_y = self.getFT300SensorData()[4]
        shortest_thumb_length = self.control_exted_thumb.shortest_thumb_length
        longest_thumb_length = self.finger_length - ini_aperture / tan(theta)
        for current_thumb_length in np.arange(
                shortest_thumb_length, longest_thumb_length + (1e-8),
            (longest_thumb_length - shortest_thumb_length) / 10):
            self.control_exted_thumb.set_thumb_length(current_thumb_length)
            torque_y = self.getFT300SensorData()[4]
            if torque_y > ini_torque_y + 0.2:
                break

        for aperture_distance in np.arange(ini_aperture, -1e-5, -0.002):
            aperture_angle = self.from_aperture_distance_to_angle(
                aperture_distance, self.l0, self.l1, self.l2l, self.l2r)
            next_aperture_angle = self.from_aperture_distance_to_angle(
                aperture_distance - 0.002, self.l0, self.l1, self.l2l,
                self.l2r)
            translate_dir_dis, extension_distance = self.scooping_parameter_finger_fixed(
                aperture_angle, next_aperture_angle, theta * 180 / pi, self.l0,
                self.l1, self.l2l, self.l2r, self.finger_length)
            current_thumb_length += extension_distance
            self.control_exted_thumb.set_thumb_length(current_thumb_length)
            self.gp_control_distance(max(aperture_distance, 0), delay_time=0)
            self.rob.translate_tool((sin(theta) * translate_dir_dis[0] +
                                     cos(theta) * translate_dir_dis[1], 0,
                                     cos(theta) * translate_dir_dis[0] -
                                     sin(theta) * translate_dir_dis[1]),
                                    acc=0.1,
                                    vel=0.4,
                                    wait=False)
            #time.sleep(0.5)
        self.rob.translate_tool((0, 0, -0.12), acc=0.3, vel=0.8, wait=True)
        self.go_to_home_up()
        self.go_to_home()
        self.gp_control_distance(0.03)
        while True:
            whether_successful = input("Whether successful? (y or n)")
            if whether_successful == 'n':
                return 0
            elif whether_successful == 'y':
                return 1

    def finger_tip_position_wrt_gripper_frame(self, alpha, l0, l1, l2l, l2r,
                                              flength_l, flength_r):
        alpha = alpha * pi / 180
        left_fingertip_position = [
            -l0 - l1 * sin(alpha) + l2l, -l1 * cos(alpha) - flength_l
        ]
        right_fingertip_position = [
            l0 + l1 * sin(alpha) - l2r, -l1 * cos(alpha) - flength_r
        ]
        return left_fingertip_position, right_fingertip_position

    def from_aperture_distance_to_angle(self, distance, l0, l1, l2l,
                                        l2r):  #angle
        return asin((distance + l2l + l2r - 2 * l0) / (2 * l1)) * 180 / pi

    def scooping_parameter_finger_fixed(self, current_alpha, next_alpha, theta,
                                        l0, l1, l2l, l2r, flength_r):
        theta = theta * pi / 180
        current_flength_l = flength_r - (2 * l0 + 2 * l1 * sin(
            current_alpha * pi / 180) - l2l - l2r) / tan(theta)
        current_left_fingertip_pos_g, current_right_fingertip_pos_g = self.finger_tip_position_wrt_gripper_frame(
            current_alpha, l0, l1, l2l, l2r, current_flength_l, flength_r)
        next_left_fingertip_pos_g, next_right_fingertip_pos_g = self.finger_tip_position_wrt_gripper_frame(
            next_alpha, l0, l1, l2l, l2r, current_flength_l, flength_r)
        traslating_wst_g = [
            current_right_fingertip_pos_g[0] - next_right_fingertip_pos_g[0],
            current_right_fingertip_pos_g[1] - next_right_fingertip_pos_g[1]
        ]
        traslating_wst_world = np.array([[sin(theta), -cos(theta)],
                                         [cos(theta), sin(theta)]]).dot(
                                             np.array([[traslating_wst_g[0]],
                                                       [traslating_wst_g[1]]
                                                       ])).ravel().tolist()
        next_left_fingertip_pos_g = [
            next_left_fingertip_pos_g[0] + traslating_wst_g[0],
            next_left_fingertip_pos_g[1] + traslating_wst_g[1]
        ]
        next_right_fingertip_pos_g = [
            next_right_fingertip_pos_g[0] + traslating_wst_g[0],
            next_right_fingertip_pos_g[1] + traslating_wst_g[1]
        ]
        extension_distance = -(
            next_right_fingertip_pos_g[1] +
            (2 * l0 + 2 * l1 * sin(next_alpha * pi / 180) - l2l - l2r) /
            tan(theta) - next_left_fingertip_pos_g[1])
        return traslating_wst_world, extension_distance

    def set_joint_positions_collision_check(self, body, joints, values):
        assert len(joints) == len(values)
        for joint, value in zip(joints, values):
            p.resetJointState(body, joint, value)

    def collision_check_initialization(self, bowl_position):
        self.THUMB_JOINT_INDICES = [0]
        physicsClient = p.connect(p.DIRECT)
        p.setAdditionalSearchPath(pybullet_data.getDataPath())
        p.setPhysicsEngineParameter(enableFileCaching=0)
        p.setGravity(0, 0, -9.8)
        p.configureDebugVisualizer(p.COV_ENABLE_GUI, False)
        p.configureDebugVisualizer(p.COV_ENABLE_SHADOWS, True)
        p.resetDebugVisualizerCamera(cameraDistance=1.400,
                                     cameraYaw=58.000,
                                     cameraPitch=-42.200,
                                     cameraTargetPosition=(0.0, 0.0, 0.0))
        self.CLIENT = 0
        self.finger_collision_check = p.loadURDF(
            '/home/terry/catkin_ws/src/scoop/models/urdf/Finger-2.9-urdf.urdf',
            basePosition=[0, 0, 0],
            baseOrientation=[0, 0, 1, 0],
            useFixedBase=False)
        self.thumb_collision_check = p.loadURDF(
            '/home/terry/catkin_ws/src/scoop/models/urdf/Assem-1.9-urdf.urdf',
            basePosition=[0, 0, 0],
            baseOrientation=[0, 0, 1, 0],
            useFixedBase=False)
        self.bowl_position = bowl_position
        self.bowl_circular_collision_check = p.loadURDF(
            '/home/terry/catkin_ws/src/scoop/models/urdf/Bowl-circular-urdf.urdf',
            basePosition=self.bowl_position,
            baseOrientation=[0, 0, 0, 1],
            useFixedBase=True)
        self.obstacles_collision_check = [self.bowl_circular_collision_check]

    def collision_check_scooping(self,
                                 pos,
                                 rot_z,
                                 ini_aperture,
                                 theta=60 * pi / 180):
        if Point([pos[0], pos[1]]).within(
                Point([self.bowl_position[0],
                       self.bowl_position[1]]).buffer(0.06)) == False:
            return True
        if Point([
                pos[0] - ini_aperture * sin(rot_z),
                pos[1] - ini_aperture * cos(rot_z)
        ]).within(
                Point([self.bowl_position[0],
                       self.bowl_position[1]]).buffer(0.055)) == False:
            return True

        start_conf_thumb = [
            self.control_exted_thumb.shortest_thumb_length -
            (self.finger_length - ini_aperture / tan(theta))
        ]
        #start_conf_thumb = [0.1-(self.finger_length-ini_aperture/tan(theta))]
        fingerOrientation_raw = R.from_dcm([[0, 1, 0], [1, 0, 0], [
            0, 0, -1
        ]]) * R.from_euler('z', rot_z, degrees=False) * R.from_euler(
            'y', pi / 2 - theta, degrees=False)
        fingerOrientation = fingerOrientation_raw.as_quat().tolist()
        ThumbTFinger_normal = fingerOrientation_raw.as_dcm()[:, 0].tolist()
        fingertipPosition = pos
        p.resetBasePositionAndOrientation(self.finger_collision_check,
                                          fingertipPosition, fingerOrientation)
        collision_fn = [
            len(
                p.getClosestPoints(bodyA=self.finger_collision_check,
                                   bodyB=self.bowl_circular_collision_check,
                                   distance=0,
                                   physicsClientId=self.CLIENT)) != 0
        ]

        fingerBasePosition = list(
            p.getLinkState(self.finger_collision_check, 0)[0])
        thumbOrientation = fingerOrientation
        for finger_thumb_distance in np.arange(
                ini_aperture + self.l2l + self.l2r,
                ini_aperture - 0.0001 + self.l2l + self.l2r,
                -ini_aperture / 5):
            thumbBasePosition = [
                fingerBasePosition[0] -
                finger_thumb_distance * ThumbTFinger_normal[0],
                fingerBasePosition[1] -
                finger_thumb_distance * ThumbTFinger_normal[1],
                fingerBasePosition[2] -
                finger_thumb_distance * ThumbTFinger_normal[2]
            ]
            self.set_joint_positions_collision_check(
                self.thumb_collision_check, self.THUMB_JOINT_INDICES,
                start_conf_thumb)
            p.resetBasePositionAndOrientation(self.thumb_collision_check,
                                              thumbBasePosition,
                                              thumbOrientation)
            collision_fn.append(
                len(
                    p.getClosestPoints(
                        bodyA=self.thumb_collision_check,
                        bodyB=self.bowl_circular_collision_check,
                        distance=0,
                        physicsClientId=self.CLIENT)) != 0)
        result = False
        for element in collision_fn:
            result = result or element
        #print(collision_fn)
        return result

    def fromResultToRobotParameter(self, MaskRCNNResult, workspace_limits):
        random.shuffle(MaskRCNNResult)
        for ObjectPoseIndex in range(len(MaskRCNNResult)):
            ObjectPose = MaskRCNNResult[ObjectPoseIndex]
            if ObjectPose['z'] == 0:
                continue
            CamTObject = np.array(
                [ObjectPose['x'], ObjectPose['y'], ObjectPose['z'], 1])
            BaseTObject = np.matmul(self.baseTcam, CamTObject).tolist()[0][0:3]
            #print('BaseTObject', BaseTObject)
            #print('workspace_limits', self.workspace_limits)
            if BaseTObject[0] <= workspace_limits[0][0] or BaseTObject[
                    0] >= workspace_limits[0][1] or BaseTObject[
                        1] <= workspace_limits[1][0] or BaseTObject[
                            1] >= workspace_limits[1][1] or BaseTObject[
                                2] <= workspace_limits[2][0] or BaseTObject[
                                    2] >= workspace_limits[2][1]:
                continue
            NormalInCamera = ObjectPose['normal'] / np.linalg.norm(
                ObjectPose['normal'])
            NormalInTcp = np.matmul(self.eeTcam[0:3, 0:3],
                                    NormalInCamera).tolist()
            alpha0 = atan2(NormalInTcp[1], NormalInTcp[0])  # is yaw
            if str(alpha0) == 'nan':
                continue
            beta = -(pi / 2 + atan2(
                NormalInTcp[2], sqrt(NormalInTcp[0]**2 + NormalInTcp[1]**2))
                     )  #tune psi
            if str(beta) == 'nan':
                continue
            if abs(beta) < (5 * pi / 180):
                alpha_set = [
                    0, pi / 4, -pi / 4, pi * 3 / 4, -pi * 3 / 4, pi, pi / 2,
                    -pi / 2
                ]
                rot_z = alpha_set[random.randint(0, len(alpha_set))]
                pos = [
                    BaseTObject[0] + 0.01 * sin(rot_z),
                    BaseTObject[1] + 0.01 * cos(rot_z), BaseTObject[2]
                ]
            else:
                rot_z = round(alpha0 / (pi / 4)) * (pi / 4)
                pos = [
                    BaseTObject[0] + 0.01 * cos(beta) * sin(rot_z),
                    BaseTObject[1] + 0.01 * cos(beta) * cos(rot_z),
                    BaseTObject[2]
                ]
            return pos, rot_z
        else:
            return None, None