Exemplo n.º 1
0
    joint_angle_list = []  #Initialize the list to hold our joint positions
    pose_list = []
    gripper_list = []

    input(
        "Press enter to start recording. To finish recording press <ctrl+c>.")

    try:
        last_time = 0.0
        while True:
            position = blue.get_joint_positions(
            )  #record the pose, this function returns a dictionary object
            joint_angle_list.append(position)
            pose = blue.get_cartesian_pose()
            pose_list.append(pose)
            gripper_pos = blue.get_gripper_position()
            gripper_list.append(gripper_pos)
            print("Position recorded!")
            #while time.time() - last_time < 1.0/frequency:
            #    pass
            sleep_time = 1.0 / frequency - (time.time() - last_time)
            if sleep_time > 0:
                time.sleep(sleep_time)
            last_time = time.time()
    except:
        print(joint_angle_list)

    if len(joint_angle_list) == 0:
        print('You did not save any positions')
    else:
        pickle.dump(
Exemplo n.º 2
0
class BlueRobotEnv(object):
    def __init__(self, from_pixels=True, height=100, width=100, camera_id=0, control="torque"):
        side = "right"
        ip = "127.0.0.1"
        self.blue = BlueInterface(side, ip)
        self.blue.calibrate_gripper()
        self._from_pixels = from_pixels
        self.control = control
        self._frame_skip = frame_skip
        self.channels_first=True
        
        self.camera = StreamVideo(height=height, width=width, camera_id=camera_id)
        self.camera.start()

        # true and normalized action spaces
        self._true_action_space = spaces.Box(
            low=[-3.3999, -2.2944, -2.6761, -2.2944, -2.6761, -2.2944, -2.6761, 0.0],
            high=[2.3412, 0, 2.6761, 0, 2.6761, 0, 2.676, 0.0],
            shape=np.zeros(8),
            dtype=np.float32
        )
        self._norm_action_space = spaces.Box(
            low=-1.0,
            high=1.0,
            shape=self._true_action_space.shape,
            dtype=np.float32
        )

        # create observation space
        if from_pixels:
            shape = [3, height, width] if self.channels_first else [height, width, 3]
            self._observation_space = spaces.Box(
                low=0, high=255, shape=shape, dtype=np.uint8
            )
        else:
            self._observation_space = _spec_to_box(
                self._env.observation_spec().values()
            )
            
        self._state_space = _spec_to_box(
                self._env.observation_spec().values()
        )
        
        self.current_state = None

        # set seed
        self.seed(seed=task_kwargs.get('random', 1))

    def __delete__(self):
        self.camera.stop()

    @property
    def observation_space(self):
        return self._observation_space

    @property
    def state_space(self):
        return self._state_space

    @property
    def action_space(self):
        return self._norm_action_space

    def step(self,a):
        for _ in range(self._frame_skip):
            # time_step = self._env.step(action)
            # reward += time_step.reward or 0
            if control=="torque":
                self.blue.set_joint_torques(a[:-1])
                self.blue.command_gripper(a[-1], 20.0, wait=True)
            else:
                self.blue.set_joint_positions(a[:-1], duration=0.1)
                self.blue.command_gripper(a[-1], 20.0, wait=True)
        obs = self._get_obs()
        reward = 0
        done = False
        return obs, reward, done, None

    def reset(self):
        init_pos = np.array([0, -2.31, 1.57, -0.75, -1.57, 0, 0])
        self.blue.set_joint_positions(init_pos, duration=0.0)
        self.blue.calibrate_gripper()
        self.blue.command_gripper(0.0, 20.0, wait=False)
        obs = self._get_obs()
        return obs

    def _get_obs(self):
        if self._from_pixels:
            obs = self.render()
            if self._channels_first:
                obs = obs.transpose(2, 0, 1).copy()
        else:
            obs = get_robot_joints()
        return obs

    def render(self):
        return self.camera()

    @property
    def get_qpos(self):
        joint = self.blue.get_joint_positions()
        gripper = self.blue.get_gripper_position()
        return np.concatenate((joint,gripper),axis=0)

    @property
    def get_qvel(self):
        joint = self.blue.get_joint_velocities()
        gripper = 0
        return np.concatenate((joint,gripper),axis=0)

    def get_robot_joints(self):
        return np.concatenate([
            self.get_qpos,
            self.get_qvel,
        ])
Exemplo n.º 3
0
#!/usr/bin/env python3

# A basic example of reading information about Blue's current state.

import time

import numpy as np

from blue_interface import BlueInterface

side = "right"
ip = "127.0.0.1"
blue = BlueInterface(side, ip)


def print_aligned(left, right):
    print("{:30} {}".format(left, np.round(right, 4)))


while True:
    print_aligned("End Effector Position:", blue.get_cartesian_pose()["position"])
    print_aligned("End Effector Orientation:", blue.get_cartesian_pose()["orientation"])
    print_aligned("Joint Positions:", blue.get_joint_positions())
    print_aligned("Joint Velocities:", blue.get_joint_velocities())
    print_aligned("Joint Torques:", blue.get_joint_torques())
    print_aligned("Gripper Position:", blue.get_gripper_position())
    print_aligned("Gripper Effort:", blue.get_gripper_effort())
    print("=" * 30)
    time.sleep(0.5)
Exemplo n.º 4
0
    args = parser.parse_args()

    arm = consts.default_arm
    address = args.address
    port = args.port
    filename = args.tape_file

    blue = BlueInterface(arm, address, port) #creates object of class KokoInterface at the IP in quotes with the name 'blue'

    print("..")
    recorded_positions = []
    error = 0.5

    blue.disable_control()
    blue.disable_gripper()

    #TODO: make number of positions a command line arg
    with open(filename, 'rb') as handle:
        recorded_positions = pickle.load(handle)

    input("Press enter to start trajectory.")

    while True:
        for desired_position in recorded_positions:
            current_position = (blue.get_gripper_position(), blue.get_joint_positions())
            blue.set_joint_positions(np.array(desired_position[1]))
            blue.command_gripper(desired_position[0], 2.0, True)
            while np.linalg.norm(desired_position[1] - current_position[1]) > error:
                current_position = (blue.get_gripper_position(), blue.get_joint_positions())
                time.sleep(0.1)
    port = args.port
    num_positions = args.num_positions
    filename = args.tape_file

    blue = BlueInterface(arm, address, port) #creates object of class KokoInterface at the IP in quotes with the name 'blue'

    recorded_positions = []
    error = 0.1

    blue.disable_control()
    blue.disable_gripper()

    #TODO: make number of positions a command line arg
    for i in range(num_positions):
        input("Press enter to record current joint position " + str(i) + ".")
        recorded_positions.append((blue.get_gripper_position(), blue.get_joint_positions()))

    with open(filename, 'wb') as handle:
        pickle.dump(recorded_positions, handle, protocol=pickle.HIGHEST_PROTOCOL)

    input("Press enter to start trajectory.")

    while True:
        for desired_position in recorded_positions:
            current_position = (blue.get_gripper_position(), blue.get_joint_positions())
            blue.set_joint_positions(np.array(desired_position[1]))
            blue.command_gripper(desired_position[0], 10.0, True)
            while np.linalg.norm(desired_position[1] - current_position[1]) > error:
                current_position = (blue.get_gripper_position(), blue.get_joint_positions())
                time.sleep(0.1)