Esempio n. 1
0
def main():
    rospy.init_node('test', anonymous=True)
    raw_data = np.load('record_demo.npy')
    # ipdb.set_trace()
    filtered_data = gaussian_filter1d(raw_data.T, sigma=5).T
    # filtered_data = util.filter_static_points(filtered_data,0.03)
    marker_pub = rospy.Publisher("/iiwa/visualization_marker",
                                 Marker,
                                 queue_size=100)
    rospy.sleep(0.5)
    rgba_tuple = [
        random.uniform(0, 1),
        random.uniform(0, 1),
        random.uniform(0.5, 1), 1
    ]
    for idx, point in enumerate(filtered_data):
        # ipdb.set_trace()
        now_pose = Pose()
        now_pose.position.x = point[0]
        now_pose.position.y = point[1]
        now_pose.position.z = point[2]
        util.send_traj_point_marker(marker_pub=marker_pub,
                                    pose=now_pose,
                                    id=idx,
                                    rgba_tuple=rgba_tuple)
Esempio n. 2
0
def main():
    rospy.init_node('test', anonymous=True)
    # raw_data = np.load('record_demo.npy')
    raw_data = np.load('./demonstrations/record_demo_1_.npy')
    # ipdb.set_trace()
    filtered_data = gaussian_filter1d(raw_data.T, sigma=5).T
    # filtered_data = util.filter_static_points(filtered_data,0.03)
    marker_pub = rospy.Publisher("/iiwa/visualization_marker",
                                 Marker,
                                 queue_size=100)
    rospy.sleep(0.5)
    rgba_tuple = [
        random.uniform(0, 1),
        random.uniform(0, 1),
        random.uniform(0.5, 1), 1
    ]

    now_pose = Pose()  # buffer pose

    #calculate mean point
    mean_point = [0.0, 0.0, 0.0]  #x,y,z
    for idx, point in enumerate(filtered_data):
        mean_point[0] += point[0]
        mean_point[1] += point[1]
        mean_point[2] += point[2]
    for i in range(len(mean_point)):
        mean_point[i] = mean_point[i] / len(filtered_data)

    #show meanpoint rviz with marker
    now_pose.position.x = mean_point[0]
    now_pose.position.y = mean_point[1]
    now_pose.position.z = mean_point[2]
    util.send_traj_point_marker(marker_pub=marker_pub,
                                pose=now_pose,
                                id=1000,
                                rgba_tuple=[0.1, 0.1, 0.1, 1])

    # calculate offset traj on zero center point
    filtered_data_zero = filtered_data
    for idx, point in enumerate(filtered_data):
        filtered_data_zero[idx][:3] = point[:3] - mean_point

    #display offset traj on zero center point
    for idx, point in enumerate(filtered_data_zero):
        # ipdb.set_trace()
        now_pose.position.x = point[0]  #+mean_point[0]
        now_pose.position.y = point[1]  #+mean_point[1]
        now_pose.position.z = point[2]  #+mean_point[2]
        util.send_traj_point_marker(marker_pub=marker_pub,
                                    pose=now_pose,
                                    id=idx,
                                    rgba_tuple=rgba_tuple)
Esempio n. 3
0
def main():

    # moveit ros
    moveit_commander.roscpp_initialize(sys.argv)
    rospy.init_node('cartesian_move_test', anonymous=True)
    robot = moveit_commander.RobotCommander()
    group = moveit_commander.MoveGroupCommander('arm')
    scene = moveit_commander.PlanningSceneInterface()
    planning_frame = group.get_planning_frame()
    
    print "============ Reference frame: %s" % planning_frame
    eef_link = group.get_end_effector_link()
    print "============ End effector: %s" % eef_link
    group_names = robot.get_group_names()
    print "============ Robot Groups:", robot.get_group_names()
    print "============ Printing robot state"
    print robot.get_current_state()
    print ""

    print(group.get_current_pose())
    joint_goal = group.get_current_joint_values()
    joint_goal = [0, 0, 0, 0, 0, 0, 0]
    group.go(joint_goal, wait=True)
    group.stop()

    raw_data = np.load(
        '/home/hayashi/worksp/tomato_ws/src/tomato_ws/handtracking/src/handtracking/hand_pub.npy')
    print(raw_data, raw_data.shape)

    filtered_data = gaussian_filter1d(raw_data.T, sigma=15).T
    filtered_data = util.filter_static_points(filtered_data)
    marker_pub = rospy.Publisher(
        "/visualization_marker", Marker, queue_size=100)
    rospy.sleep(0.5)
    rgba_tuple = [random.uniform(0.5, 1), random.uniform(
        0.5, 1), random.uniform(0.5, 1), 1]
    display('publish visualization marker')
    print(filtered_data, raw_data.shape)

    # convert to world
    waypoints = []
    for idx, point in enumerate(filtered_data):
        # ipdb.set_trace()
        now_pose = Pose()
        now_pose.position.x = point[0]
        now_pose.position.y = point[1]
        now_pose.position.z = point[2]
        
        pos = now_pose.position
        ori = now_pose.orientation
        mat = np.dot(translation_matrix((pos.x, pos.y, pos.z)),
                     quaternion_matrix((ori.x, ori.y, ori.z, ori.w)))
        transform_mat = np.dot(translation_matrix(
            (0.077, 0.035, 0.188)), quaternion_matrix((-0.500, 0.500, -0.500, 0.500)))
        new_mat = np.dot(transform_mat, mat)

        new_trans = translation_from_matrix(new_mat)
        new_quat = quaternion_from_matrix(new_mat)

        new_pose = Pose()
        new_pose.position.x = new_trans[0]
        new_pose.position.y = new_trans[1]
        new_pose.position.z = new_trans[2]

        new_pose.orientation.x = 2.6944e-5
        new_pose.orientation.y = 0.70713
        new_pose.orientation.z = 2.5173e-5
        new_pose.orientation.w = 0.70708

        util.send_traj_point_marker(
            marker_pub=marker_pub, frameid="/world", pose=new_pose, id=idx+300, rgba_tuple=rgba_tuple)

        waypoints.append(copy.deepcopy(new_pose))

    group.set_max_velocity_scaling_factor(0.01)
    for i in range (3):
        (plan, fraction) = group.compute_cartesian_path(
            waypoints,   # waypoints to follow
            0.005,       # eef_step
            0.0)         # jump_threshold

    # with open ('plan_handreplay.txt', 'w') as f: f.write (str(plan))
    group.execute(plan, wait=True)
    print group.get_current_pose()
    group.stop()
    group.clear_pose_targets()

    # Shut down MoveIt cleanly
    moveit_commander.roscpp_shutdown()
    moveit_commander.os._exit(0)
Esempio n. 4
0
dmp = DMPs_discrete(n_dmps=filtered_data.shape[1], n_bfs=200)
dmp.imitate_path(y_des=filtered_data.T)
# dmp.y0[2] = dmp.y0[2] 
# dmp.goal[2] = dmp.goal[2] - 0.1
# dmp.goal[1] = dmp.goal[1]+ 0.1
y_track_d, dy_track_d, ddy_track_d = dmp.rollout()

marker_pub = rospy.Publisher("/visualization_marker", Marker, queue_size=100)
rospy.sleep(0.5)
rgba_tuple = [random.uniform(0, 1), random.uniform(0, 1), random.uniform(0.5, 1), 1]
for idx, point in enumerate(y_track_d):
    now_pose = Pose()
    now_pose.position.x = point[0]
    now_pose.position.y = point[1]
    now_pose.position.z = point[2]  
    util.send_traj_point_marker(marker_pub=marker_pub, pose=now_pose, id=idx+400, rgba_tuple=rgba_tuple)

# moveit
moveit_commander.roscpp_initialize(sys.argv)

robot = moveit_commander.RobotCommander()
group = moveit_commander.MoveGroupCommander('manipulator')
reference_frame = 'world'
group.set_pose_reference_frame(reference_frame)

plan = util.ik_cartesain_path(group, y_track_d)
util.execute_plan(group,plan)
# print "demonstration"
# print raw_data[-1]
# print "DMP"
# print group.get_current_pose().pose
Esempio n. 5
0
def main():
    rospy.init_node('test', anonymous=True)
    # raw_data = np.load('record_demo.npy')
    # raw_data = np.load('./demonstrations/record_demo_1_.npy')
    raw_data = np.load(
        '/home/hayashi/worksp/tomato_ws/src/tomato_ws/handtracking/src/handtracking/hand_pub.npy'
    )
    print(raw_data, raw_data.shape)
    # ipdb.set_trace()
    filtered_data = gaussian_filter1d(raw_data.T, sigma=15).T
    # filtered_data = raw_data
    filtered_data = util.filter_static_points(filtered_data)
    marker_pub = rospy.Publisher("/visualization_marker",
                                 Marker,
                                 queue_size=100)
    rospy.sleep(0.5)
    rgba_tuple = [
        random.uniform(0.5, 1),
        random.uniform(0.5, 1),
        random.uniform(0.5, 1), 1
    ]
    display('publish visualization marker')
    print(filtered_data, raw_data.shape)

    #convert to world
    for idx, point in enumerate(filtered_data):
        # ipdb.set_trace()
        now_pose = Pose()
        now_pose.position.x = point[0]
        now_pose.position.y = point[1]
        now_pose.position.z = point[2]
        now_pose.orientation.x = point[3]
        now_pose.orientation.y = point[4]
        now_pose.orientation.z = point[5]
        now_pose.orientation.w = point[6]
        # util.send_traj_point_marker(marker_pub=marker_pub,frameid="/camera_color_optical_frame", pose=now_pose, id=idx, rgba_tuple=rgba_tuple)

        pos = now_pose.position
        ori = now_pose.orientation
        mat = np.dot(translation_matrix((pos.x, pos.y, pos.z)),
                     quaternion_matrix((ori.x, ori.y, ori.z, ori.w)))
        transform_mat = np.dot(
            translation_matrix((0.077, 0.035, 0.188)),
            quaternion_matrix((-0.500, 0.500, -0.500, 0.500)))
        new_mat = np.dot(transform_mat, mat)

        new_trans = translation_from_matrix(new_mat)
        new_quat = quaternion_from_matrix(new_mat)

        new_pose = Pose()
        new_pose.position.x = new_trans[0]
        new_pose.position.y = new_trans[1]
        new_pose.position.z = new_trans[2]
        new_pose.orientation.x = new_quat[0]
        new_pose.orientation.y = new_quat[1]
        new_pose.orientation.z = new_quat[2]
        new_pose.orientation.w = new_quat[3]
        util.send_traj_point_marker(marker_pub=marker_pub,
                                    frameid="/world",
                                    pose=new_pose,
                                    id=idx + 300,
                                    rgba_tuple=rgba_tuple)