コード例 #1
0
def center(robot: Robot, axis, ft=None, t=2.):
    base_t_tcp = robot.base_t_tcp()
    limits = np.ones(6)

    axis_idx = pose_elements.index(axis)
    rot = axis_idx > 2
    if ft is None:
        ft = 2 if rot else 15
    if rot:
        selection = np.zeros(3, int)
        selection[axis_idx - 3] = 1
        selection = (*(1 - selection), *selection)
    else:
        selection = np.zeros(6, int)
        selection[axis_idx] = 1
    wrench = np.zeros(6)
    wrench[axis_idx] = ft

    transforms = []  # type: List[Transform]
    for m in 1, -1:
        robot.ctrl.forceMode(base_t_tcp, selection, wrench * m, 2, limits)
        settle(robot, t=t / 2)
        transforms.append(robot.base_t_tcp())
    robot.ctrl.forceModeStop()
    a, b = transforms

    transform_center = Transform.lerp(a, b, 0.5)
    robot.ctrl.moveL(transform_center)
    return transform_center
コード例 #2
0
def get_stable_table_pose(robot: Robot,
                          force_down=25,
                          force_side=15,
                          torque=2):
    # align with table (z, rx, ry)
    robot.ctrl.forceMode(robot.base_t_tcp(), (1, 1, 1, 1, 1, 1),
                         (0, 0, force_down, 0, 0, 0), 2, np.ones(6))
    settle(robot, t=2)

    center(robot, 'rz', ft=torque)
    for axis in 'xy':
        center(robot, axis, ft=force_side)

    base_t_tcp = robot.base_t_tcp()
    robot.ctrl.teachMode()
    return base_t_tcp
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--ur-ip', required=True)
    parser.add_argument('--image-topic', required=True)
    parser.add_argument('--exposure', type=float, default=1 / 30)
    parser.add_argument('--folder', required=True)
    parser.add_argument('--demonstration', required=True)
    parser.add_argument('--backwards', action='store_true')
    parser.add_argument('--bayered', action='store_true')

    rospy.init_node('image_capture_node', anonymous=True)

    args = parser.parse_args()

    folder = Path(args.folder)
    folder.mkdir(exist_ok=True)

    image_folder = folder / 'calibration-images'
    image_folder.mkdir()

    robot = Robot.from_ip(args.ur_ip)

    demonstration = np.load(args.demonstration)
    if args.backwards:
        demonstration = demonstration[::-1]

    print('demonstration length:', len(demonstration))
    prune_idxs = demo.prune(demonstration, min_q_dist=0.02)
    print('pruned length:', len(prune_idxs))
    pruned = demonstration[prune_idxs]
    fps_idxs = demo.farthest_point_sampling(pruned, n=100)

    cur_idx = 0
    poses = []
    for i, fps_idx in enumerate(tqdm(fps_idxs)):
        assert fps_idx >= cur_idx
        # subpath = pruned[cur_idx:fps_idx + 1]
        # demo.replay(subpath, rtde_c, blend=0.01)  # TODO: blending is still shaky?

        des_q = pruned[fps_idx][0]
        robot.ctrl.moveJ(des_q)

        now = time.time()
        while True:
            img = rospy.wait_for_message(args.image_topic, Image)  # type: Image
            if img.header.stamp.to_sec() - args.exposure > now:
                break
        img = image_to_numpy(img)
        if args.bayered:
            img = cv2.cvtColor(img, cv2.COLOR_BAYER_BG2BGR)
        img_path = image_folder / '{}.png'.format(i)
        cv2.imwrite(str(img_path), img[..., ::-1])

        poses.append(robot.recv.getActualTCPPose())

        cur_idx = fps_idx
    np.save(folder / 'image_tcp_poses.npy', poses)
コード例 #4
0
def record_data(name='data.npy', zero_payload=True):
    qs = np.load('qs.npy')
    r = Robot.from_ip('192.168.1.130')
    if zero_payload:
        r.ctrl.setPayload(0, (0, 0, 0))
    r.ctrl.zeroFtSensor()
    data = []
    for q in qs:
        r.ctrl.moveJ(q, 0.25)
        time.sleep(0.25)
        data.append(list(r.base_t_tcp()) + list(r.ft_tcp()))

    np.save(name, data)
    return data
コード例 #5
0
def record_qs():
    r = Robot.from_ip('192.168.1.130')
    r.ctrl.setPayload(1.7, (0, -0.008, 0.07))
    r.ctrl.teachMode()
    data = []
    while True:
        if 'n' in input().lower():
            break
        else:
            data.append(r.recv.getActualQ())
    r.ctrl.endTeachMode()

    np.save('qs.npy', data)
    return data
コード例 #6
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--ur-ip', type=str, required=True)
    parser.add_argument('--n', type=int, default=3)
    parser.add_argument('--grid-size', type=float, default=0.05)
    parser.add_argument('--tcp-tool-offset', type=float, default=0.012)
    args = parser.parse_args()

    robot = Robot.from_ip(args.ur_ip)
    robot.ctrl.zeroFtSensor()

    table_poses, start_offset = collect_table_poses(robot, n=args.n)
    table_t_base = calibrate_from_table_poses(
        table_poses=table_poses,
        start_offset=start_offset,
        grid_size=args.grid_size,
        tcp_tool_offset=args.tcp_tool_offset,
    )
    table_t_base.save('table_t_base')
コード例 #7
0
def main():
    # record_data('data.npy', zero_payload=True)
    # quit()

    data = np.load('data.npy')
    base_t_tcps = [Transform.from_xyz_rotvec(d[:6]) for d in data]
    ft_tcps = [d[6:] for d in data]
    tool_mass, tcp_p_cog = calibrate_payload(base_t_tcps, ft_tcps)
    print(tool_mass, tcp_p_cog)
    r = Robot.from_ip('192.168.1.130')
    r.ctrl.setPayload(tool_mass, tcp_p_cog)

    data_after = np.load('data.npy')
    print('force max', np.abs(data_after[:, 6:9]).max())
    print('force mean', np.abs(data_after[:, 6:9]).mean())
    print('force rms', np.sqrt((data_after[:, 6:9]**2).mean()))
    print()
    print('torque max', np.abs(data_after[:, 9:]).max())
    print('torque mean', np.abs(data_after[:, 9:]).mean())
コード例 #8
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--ur-ip', required=True)
    parser.add_argument('--file', required=True)
    parser.add_argument('--replay', action='store_true')
    parser.add_argument('--backwards', action='store_true')
    args = parser.parse_args()

    robot = Robot.from_ip(args.ur_ip)

    if args.replay:
        demonstration = np.load(args.file)
        if args.backwards:
            demonstration = demonstration[::-1]
        print(len(demonstration), 'samples in original')
        prune_idxs = prune(demonstration)
        pruned = demonstration[prune_idxs]
        print(len(pruned), 'samples after pruning')
        input('replay?')
        replay(pruned, robot)
    else:
        demonstration = capture(robot)
        np.save(args.file, demonstration)
コード例 #9
0
# scripts/example_use_poses.py
from ur_control import Robot
from transform3d import Transform
import numpy as np

r = Robot.from_ip('192.168.1.130')
q_safe = (3.317805290222168, -1.3769071859172364, -1.9077906608581543,
          -1.4512933057597657, -4.752126518880026, -1.590332333241598)

base_t_taskboard = Transform.load('base_t_taskboard')
base_t_kitlayout = Transform.load('base_t_kitlayout')

board_t_tcp_desired = Transform(p=(0, 0, 0.24), rotvec=(np.pi, 0, 0))

base_t_tcp = base_t_taskboard @ board_t_tcp_desired
# uncomment the following line to move above the kitlayout frame
# base_t_tcp = base_t_kitlayout @ board_t_tcp_desired
r.ctrl.moveJ(q_safe)
r.ctrl.moveL(base_t_tcp)
コード例 #10
0
ファイル: run.py プロジェクト: RasmusHaugaard/wrs-servo
np.random.seed(args.seed)

if 'pinol' in args.config:
    max_err = 0.0035
elif 'm4' in args.config:
    max_err = 0.005
elif 'm3' in args.config:
    max_err = 0.0035
else:
    raise ValueError()

config = json.load(open(args.config))

rospy.init_node('servo')

r_b = Robot.from_ip('192.168.1.129')
r_c = Robot.from_ip('192.168.1.131')

scene, state = utils.load_scene()

# cam safe q
r_c.ctrl.moveJ([
    2.2288451194763184, -1.490114526157715, 1.5134428183185022,
    -0.8572123807719727, -1.3860304991351526, -0.5580604712115687
])
for q, r in zip(config['robots_q_init'], [r_b, r_c]):
    r.ctrl.moveJ(q)

r_b.zero_ft_sensor()
base_t_tcp_b_init = r_b.base_t_tcp()
コード例 #11
0
def main():
    rospy.init_node('iros_vision_main_node')
    r = Robot.from_ip('192.168.1.130')
    base_t_taskboard, base_t_kitlayout = capture_images_and_find_boards(r)
    print('base_t_taskboard', base_t_taskboard)
    print('base_t_kitlayout', base_t_kitlayout)