Beispiel #1
0
#!/usr/bin/env python3

# A basic example of using BlueInterface for joint positions control.
# It allows a user to record four sets of joint positions by manually moving the arm to each
# position and pressing enter. It then plays back a trajectory comprised of the four sets of
# joint positions in an infinite loop.

import sys
from blue_interface import BlueInterface
import numpy as np
import time

if __name__ == '__main__':
    arm = "right"
    address = "127.0.0.1"

    blue = BlueInterface(arm, address)
    blue.set_joint_positions(np.zeros(7), duration=3.0)

    while True:
        pass
    # blue.shutdown()
for i in range(num_cycles):  #iterate over the number of cycles
    if shuff == 'y':
        random.shuffle(
            location_list
        )  #randomly shuffles the order of the list so that it goes to the poses in a different random order each time
    print "Cycle #%d" % (i + 1)
    for j in range(len(location_list)):  #iterate over the shuffled pose list

        curr_position = kk.get_joint_positions()
        num_steps = 20
        final = np.array(location_list[j][1])
        for i in range(num_steps):
            increment = (final - curr_position) / num_steps
            incremental = curr_position + i * increment
            kk.set_joint_positions(
            )  # tell the robot to go to a set of joint angles

        print('position set')
        time.sleep(3)  #wait for any residual motion to subside
        measured_pose = vv.get_vive_tracker_data(
        )  #measure the pose using the vive system
        #print(measured_position)
        if measured_pose == 0:
            print "Don't forget to restart the Vive system on the other computer. Quitting now."
            sys.exit()
        pos = measured_pose.pose.position
        ori = measured_pose.pose.orientation
        measured_position = {
            'vive_position': [pos.x, pos.y, pos.z],
            'vive_orientation': [ori.x, ori.y, ori.z, ori.w]
        }
Beispiel #3
0
    # If no argument is passed for replay frequency, play the recording at the rate it was recorded.
    if args.frequency == 0:
        frequency = record_frequency  # In Hertz
    else:
        frequency = args.frequency

    input("Press enter to start replay. To exit, press <ctrl+c>.")

    while True:
        try:
            last_time = 0.0
            for i in range(len(joint_angle_list)):
                #if len(joint_angle_list[i]) == 7:
                blue.set_joint_positions(
                    np.array(joint_angle_list[i])
                )  # tell the robot to go to a set of joint angles
                #blue.command_gripper(gripper_list[i], 30.0)
                if gripper_list[i] > 0:
                    blue.command_gripper(2, 15.0)
                else:
                    blue.command_gripper(-0.8, 10.0)
                sleep_time = 1.0 / frequency - (time.time() - last_time)
                if sleep_time > 0:
                    time.sleep(sleep_time)
                last_time = time.time()

        except:
            print(sys.exc_info()[0])
            print("Something went wrong... exiting")
            break
Beispiel #4
0
if __name__ == '__main__':
    arm_side = "right"
    assert len(sys.argv) == 2
    address = sys.argv[1]

    # Python interface
    blue = BlueInterface(arm_side, address)
    home = [
        -0.433836, -1.16384, 0.75438465, -1.58150699, -0.05635529, -1.67967716,
        -0.13010218
    ]
    home = [0.0, -1.571, 0.0, -1.571, 0.0, -1.571, 0.0]
    blue.set_joint_positions(  # TODO: this sometimes fights with the IK solver, and makes the robot really jittery
        home,
        duration=1.0,
        # soft_position_control=True
    )
    position_control_mode = blue._control_mode  # super sketchy

    # Set up a publisher for our cartesian pose command (a little sketchy)
    RBC = blue._RBC
    pose_target_publisher = RBC.publisher(
        "/{}_arm/pose_target/command".format(arm_side),
        "geometry_msgs/PoseStamped")

    # 3D mouse ~~~
    mouse = spacemouse.SpaceMouse()

    # Initial position
    target_pose = blue.get_cartesian_pose()
Beispiel #5
0
class BlueRobotEnv(object):
    def __init__(self, from_pixels=True, height=100, width=100, camera_id=0, control="torque"):
        side = "right"
        ip = "127.0.0.1"
        self.blue = BlueInterface(side, ip)
        self.blue.calibrate_gripper()
        self._from_pixels = from_pixels
        self.control = control
        self._frame_skip = frame_skip
        self.channels_first=True
        
        self.camera = StreamVideo(height=height, width=width, camera_id=camera_id)
        self.camera.start()

        # true and normalized action spaces
        self._true_action_space = spaces.Box(
            low=[-3.3999, -2.2944, -2.6761, -2.2944, -2.6761, -2.2944, -2.6761, 0.0],
            high=[2.3412, 0, 2.6761, 0, 2.6761, 0, 2.676, 0.0],
            shape=np.zeros(8),
            dtype=np.float32
        )
        self._norm_action_space = spaces.Box(
            low=-1.0,
            high=1.0,
            shape=self._true_action_space.shape,
            dtype=np.float32
        )

        # create observation space
        if from_pixels:
            shape = [3, height, width] if self.channels_first else [height, width, 3]
            self._observation_space = spaces.Box(
                low=0, high=255, shape=shape, dtype=np.uint8
            )
        else:
            self._observation_space = _spec_to_box(
                self._env.observation_spec().values()
            )
            
        self._state_space = _spec_to_box(
                self._env.observation_spec().values()
        )
        
        self.current_state = None

        # set seed
        self.seed(seed=task_kwargs.get('random', 1))

    def __delete__(self):
        self.camera.stop()

    @property
    def observation_space(self):
        return self._observation_space

    @property
    def state_space(self):
        return self._state_space

    @property
    def action_space(self):
        return self._norm_action_space

    def step(self,a):
        for _ in range(self._frame_skip):
            # time_step = self._env.step(action)
            # reward += time_step.reward or 0
            if control=="torque":
                self.blue.set_joint_torques(a[:-1])
                self.blue.command_gripper(a[-1], 20.0, wait=True)
            else:
                self.blue.set_joint_positions(a[:-1], duration=0.1)
                self.blue.command_gripper(a[-1], 20.0, wait=True)
        obs = self._get_obs()
        reward = 0
        done = False
        return obs, reward, done, None

    def reset(self):
        init_pos = np.array([0, -2.31, 1.57, -0.75, -1.57, 0, 0])
        self.blue.set_joint_positions(init_pos, duration=0.0)
        self.blue.calibrate_gripper()
        self.blue.command_gripper(0.0, 20.0, wait=False)
        obs = self._get_obs()
        return obs

    def _get_obs(self):
        if self._from_pixels:
            obs = self.render()
            if self._channels_first:
                obs = obs.transpose(2, 0, 1).copy()
        else:
            obs = get_robot_joints()
        return obs

    def render(self):
        return self.camera()

    @property
    def get_qpos(self):
        joint = self.blue.get_joint_positions()
        gripper = self.blue.get_gripper_position()
        return np.concatenate((joint,gripper),axis=0)

    @property
    def get_qvel(self):
        joint = self.blue.get_joint_velocities()
        gripper = 0
        return np.concatenate((joint,gripper),axis=0)

    def get_robot_joints(self):
        return np.concatenate([
            self.get_qpos,
            self.get_qvel,
        ])
Beispiel #6
0
    args = parser.parse_args()

    arm = consts.default_arm
    address = args.address
    port = args.port
    filename = args.tape_file

    blue = BlueInterface(arm, address, port) #creates object of class KokoInterface at the IP in quotes with the name 'blue'

    print("..")
    recorded_positions = []
    error = 0.5

    blue.disable_control()
    blue.disable_gripper()

    #TODO: make number of positions a command line arg
    with open(filename, 'rb') as handle:
        recorded_positions = pickle.load(handle)

    input("Press enter to start trajectory.")

    while True:
        for desired_position in recorded_positions:
            current_position = (blue.get_gripper_position(), blue.get_joint_positions())
            blue.set_joint_positions(np.array(desired_position[1]))
            blue.command_gripper(desired_position[0], 2.0, True)
            while np.linalg.norm(desired_position[1] - current_position[1]) > error:
                current_position = (blue.get_gripper_position(), blue.get_joint_positions())
                time.sleep(0.1)
            # Adjust the offset
            target_position[0] -= 0.4
            target_position[2] += 0.3
            target_orientation = list(euler2quat(ori))  # w, x, y, z
            # target_orientation = target_orientation[1:] + target_orientation[:1]

            # Compute IK solution
            goal_curr = blue.inverse_kinematics(target_position,
                                                target_orientation)
            # Send command to robot
            if goal_curr != []:
                goal = goal_curr
                print("goal: ", goal)
                blue.set_joint_positions(goal,
                                         duration=3,
                                         soft_position_control=False)
                blue.command_gripper(grab_strength, 10.0, wait=False)

        # Wait for system to settle
        i += 1
        time.sleep(3)

    # Direct motor angle mapping approach
    else:
        if "Right hand" in hands_data.keys():
            hand_data = hands_data["Right hand"]
            pos = hand_data['palm_position']
            ori = [
                hand_data['palm_pitch'], hand_data['palm_roll'],
                hand_data['palm_yaw']
#!/usr/bin/env python3

# A basic example of using BlueInterface for joint positions control.

from blue_interface import BlueInterface
import numpy as np
import sys

assert len(sys.argv) == 2

side = "right"
ip = sys.argv[1]
blue = BlueInterface(side, ip)

current_joints = blue.get_joint_positions()
blue.set_joint_positions(current_joints, duration=3.0)

while True:
    pass

# When this script terminates (eg via Ctrl+C), the robot will automatically
# stop all controllers and go back to gravity compensation mode

Beispiel #9
0
import numpy as np
import time

side = "right"
ip = "127.0.0.1"
blue = BlueInterface(side, ip)

# Compute IK solution
target_position = [0.4, 0, 0]  # x, y, z
target_orientation = [0.6847088, -0.17378805, -0.69229771,
                      -0.1472938]  # x, y, z, w
target_joint_positions = blue.inverse_kinematics(target_position,
                                                 target_orientation)

# Send command to robot
blue.set_joint_positions(target_joint_positions, duration=5)

# Wait for system to settle
time.sleep(5)

# Print results
joint_positions = blue.get_joint_positions()
pose = blue.get_cartesian_pose()
print_aligned = lambda left, right: print("{:30} {}".format(
    left, np.round(right, 4)))
print_aligned("Target joint positions: ", target_joint_positions)
print_aligned("End joint positions: ", joint_positions)
print_aligned("Target cartesian position:", target_position)
print_aligned("End cartesian position:", pose["position"])
print_aligned("Target orientation:", target_orientation)
print_aligned("End orientation:", pose["orientation"])