Example #1
0
def make_traj_multi_stage(req, demo_name, stage_num, tool_stage_info, prev_exp_cloud_pc2, verb_data_accessor, transform_type):
    assert len(req.object_clouds) == 1
    exp_tool_cloud = None if stage_num == 0 else ru.pc2xyzrgb(prev_exp_cloud_pc2)[0]
    exp_target_cloud = ru.pc2xyzrgb(req.object_clouds[0])[0]
    clouds_frame_id = req.object_clouds[0].header.frame_id
    world_to_grip_transform_func = make_world_to_grip_transform_tf("%s_gripper_tool_frame" % current_stage_info.arms_used)
    return make_traj_multi_stage_do_work(demo_name, exp_target_cloud, clouds_frame_id,
                                         stage_num, tool_stage_info, exp_tool_cloud,
                                         verb_data_accessor, world_to_grip_transform_func, transform_type)
Example #2
0
def load_cloud_from_sensor(input_topic, frame):
  import sensor_msgs.msg
  msg = rospy.wait_for_message(input_topic, sensor_msgs.msg.PointCloud2)
  xyz, rgb = ros_utils.pc2xyzrgb(msg)
  xyz = xyz.reshape(-1,3)
  xyz = ros_utils.transform_points(xyz, Globals.tf_listener, frame, msg.header.frame_id)
  return xyz
Example #3
0
def get_transformed_clouds(bag,times):
    """
    get transformed point clouds at times    
    """
    listener = mytf.TransformListener()

    all_clouds = []
    all_cloud_times = []
    all_hmats = []
    for (i,(topic, msg, t)) in enumerate(bag.read_messages()):
        if topic=="/tf":
            listener.callback(msg)
        elif topic.endswith("points"):
            xyz, bgr = ros_utils.pc2xyzrgb(msg)            
            all_clouds.append((xyz, bgr))
            try:
                all_hmats.append(listener.transformer.lookup_transform_mat("/base_footprint",msg.header.frame_id))
            except Exception:
                all_hmats.append(np.eye(4))
            all_cloud_times.append(t.to_sec())
    
    cloud_inds = np.searchsorted(all_cloud_times, times)
    
    clouds = []
    for i in cloud_inds:
        xyz,rgb = all_clouds[i]
        hmat = all_hmats[i]
        xyz1 = np.c_[xyz.reshape(-1,3), np.ones((xyz.size/3,1))]
        xyz1_tf = np.dot(xyz1, hmat.T)
        xyz_tf = xyz1_tf[:,:3].reshape(xyz.shape)
        clouds.append((xyz_tf, rgb))

    return clouds
Example #4
0
    def execute(self,userdata):
        """
        - move head to the right place
        - get a point cloud
        returns: success, failure
        """
        global HANDLES
        HANDLES=[]
        
        # Globals.pr2.head.set_pan_tilt(0, HEAD_TILT)
        Globals.pr2.larm.goto_posture('side')
        Globals.pr2.rarm.goto_posture('side')
        Globals.pr2.join_all()
        if HUMAN_GET_ROPE:
            xyz,frame = human_get_rope()
            xyz = ros_utils.transform_points(xyz, Globals.pr2.tf_listener, "base_footprint", frame)
            pose_array = conversions.array_to_pose_array(xyz,"base_footprint")
            HANDLES.append(Globals.rviz.draw_curve(pose_array,id=3250864,rgba=(0,0,1,1)))
            xyz = curves.unif_resample(xyz, 100,tol=.01)

        else:
            msg = rospy.wait_for_message("/preprocessor/points", sensor_msgs.msg.PointCloud2)
            xyz, rgb = ros_utils.pc2xyzrgb(msg)
            xyz = xyz.reshape(-1,3)
            xyz = ros_utils.transform_points(xyz, Globals.pr2.tf_listener, "base_footprint", msg.header.frame_id)
        userdata.points = xyz
                
        return "success"
Example #5
0
def get_transformed_clouds(bag, times):
    """
    get transformed point clouds at times    
    """
    listener = mytf.TransformListener()

    all_clouds = []
    all_cloud_times = []
    all_hmats = []
    for (i, (topic, msg, t)) in enumerate(bag.read_messages()):
        if topic == "/tf":
            listener.callback(msg)
        elif topic.endswith("points"):
            xyz, bgr = ros_utils.pc2xyzrgb(msg)
            all_clouds.append((xyz, bgr))
            try:
                all_hmats.append(
                    listener.transformer.lookup_transform_mat(
                        "/base_footprint", msg.header.frame_id))
            except Exception:
                all_hmats.append(np.eye(4))
            all_cloud_times.append(t.to_sec())

    cloud_inds = np.searchsorted(all_cloud_times, times)

    clouds = []
    for i in cloud_inds:
        xyz, rgb = all_clouds[i]
        hmat = all_hmats[i]
        xyz1 = np.c_[xyz.reshape(-1, 3), np.ones((xyz.size / 3, 1))]
        xyz1_tf = np.dot(xyz1, hmat.T)
        xyz_tf = xyz1_tf[:, :3].reshape(xyz.shape)
        clouds.append((xyz_tf, rgb))

    return clouds
Example #6
0
def load_cloud_from_sensor(input_topic, frame):
    import sensor_msgs.msg
    msg = rospy.wait_for_message(input_topic, sensor_msgs.msg.PointCloud2)
    xyz, rgb = ros_utils.pc2xyzrgb(msg)
    xyz = xyz.reshape(-1, 3)
    xyz = ros_utils.transform_points(xyz, Globals.tf_listener, frame,
                                     msg.header.frame_id)
    return xyz
Example #7
0
def filter_pc2(cloud_pc2):
    cloud_xyz = (pc2xyzrgb(cloud_pc2)[0]).reshape(-1,3)
    cloud_xyz_down = voxel_downsample(cloud_xyz, .02)
    graph = ri.points_to_graph(cloud_xyz_down, .03)
    cc = ri.largest_connected_component(graph)
    good_xyzs = np.array([graph.node[node_id]["xyz"] for node_id in cc.nodes()])
    pose_array = juc.array_to_pose_array(good_xyzs, "base_footprint")
    Globals.handles.append(Globals.rviz.draw_curve(pose_array, rgba = (1,1,0,1), type=Marker.CUBE_LIST, width=.001, ns="segmentation"))
    raw_input("press enter when done looking")
    del Globals.handles[:]
    return xyz2pc(good_xyzs, cloud_pc2.header.frame_id)
def human_get_rope():
    point_cloud = rospy.wait_for_message("/camera/depth_registered/points", sm.PointCloud2)
    
    xyz, bgr = ros_utils.pc2xyzrgb(point_cloud)
    
    xys = roi.get_polyline(bgr,'draw curve')
    uvs = np.int32(xys)[:,::-1]
    us,vs = uvs.T
    xyzs = xyz[us,vs]
    xyzs_good = xyzs[np.isfinite(xyzs).all(axis=1)]
    print "%i out of %i labeled points are good"%(len(xyzs_good), len(xyzs))
    xyzs_unif = curves.unif_resample(xyzs_good,100,tol=.002)
    return xyzs_unif
Example #9
0
def read_from_ros(cloud_topic, frame='base_footprint'):
  '''format: ros:/my/topic/points(:frame)'''
  import rospy
  from brett2 import ros_utils
  import sensor_msgs
  import time
  if rospy.get_name() == '/unnamed':
    rospy.init_node('cloud_reader', disable_signals=True)
  msg = rospy.wait_for_message(cloud_topic, sensor_msgs.msg.PointCloud2)
  xyz, rgb = ros_utils.pc2xyzrgb(msg)
  xyz = xyz.reshape(-1, 3)
  tf_listener = ros_utils.get_tf_listener()
  return ros_utils.transform_points(xyz, tf_listener, frame, msg.header.frame_id)
Example #10
0
    def execute(self, userdata):
        """
        - move head to the right place
        - get a point cloud
        returns: success, failure
        """
        Globals.handles = []
        draw_table()

        Globals.pr2.rgrip.set_angle(.08)
        Globals.pr2.lgrip.set_angle(.08)
        Globals.pr2.join_all()
        if not args.use_tracking:
            Globals.pr2.larm.goto_posture('side')
            Globals.pr2.rarm.goto_posture('side')
        else:
            try:
                increment_pose(Globals.pr2.rarm, [0, 0, .02])
            except PR2.IKFail:
                print "couldn't raise right arm"
            try:
                increment_pose(Globals.pr2.larm, [0, 0, .02])
            except PR2.IKFail:
                print "couldn't raise left arm"
        Globals.pr2.rgrip.set_angle(.08)
        Globals.pr2.lgrip.set_angle(.08)
        Globals.pr2.join_all()

        if args.delay_before_look > 0:
            rospy.loginfo('sleeping for %f secs before looking',
                          args.delay_before_look)
            rospy.sleep(args.delay_before_look)

        if args.test:
            xyz = np.squeeze(
                np.asarray(demos[select_from_list(demos.keys())]["cloud_xyz"]))
        elif args.use_tracking:
            msg = rospy.wait_for_message("/tracker/object", TrackedObject)
            xyz = [(pt.x, pt.y, pt.z) for pt in msg.rope.nodes]
        else:
            msg = rospy.wait_for_message(args.cloud_topic,
                                         sensor_msgs.msg.PointCloud2)
            xyz, rgb = ros_utils.pc2xyzrgb(msg)
            xyz = xyz.reshape(-1, 3)
            xyz = ros_utils.transform_points(xyz, Globals.pr2.tf_listener,
                                             "base_footprint",
                                             msg.header.frame_id)

        userdata.points = xyz

        return "success"
Example #11
0
def main():
  import argparse
  parser = argparse.ArgumentParser()
  parser.add_argument('--dataset', default='overhand_knot', help='name of dataset')
  parser.add_argument('--method', choices=['geodesic_dist', 'shape_context', 'geodesic_dist+shape_context'], default='geodesic_dist', help='matching algorithm')
  parser.add_argument('--input_mode', choices=['from_dataset', 'kinect', 'file'], default='kinect', help='input cloud acquisition method')
  parser.add_argument('--cloud_topic', default='/preprocessor/kinect1/points', help='ros topic for kinect input mode')
  parser.add_argument('--input_file', help='input file for --input_mode=file')
  parser.add_argument('--show_shape_context', action='store_true')
  args = parser.parse_args()

  dataset = recognition.DataSet.LoadFromTaskDemos(args.dataset)

  # read input to match
  input_xyz = None
  if args.input_mode == 'from_dataset':
    key = select_from_list(sorted(dataset.keys()))
    input_xyz = np.squeeze(np.asarray(dataset[key]['cloud_xyz']))
  elif args.input_mode == 'kinect':
    import rospy
    from brett2 import ros_utils
    import sensor_msgs
    import time
    if rospy.get_name() == '/unnamed':
      rospy.init_node('matcher', disable_signals=True)
    msg = rospy.wait_for_message(args.cloud_topic, sensor_msgs.msg.PointCloud2)
    xyz, rgb = ros_utils.pc2xyzrgb(msg)
    xyz = xyz.reshape(-1, 3)
    tf_listener = ros_utils.get_tf_listener()
    time.sleep(1) # so tf works
    input_xyz = ros_utils.transform_points(xyz, tf_listener, "ground", msg.header.frame_id)
  elif args.input_mode == 'file':
    input_xyz = np.load(args.input_file)['cloud_xyz']
  else:
    raise NotImplementedError

  # create the matcher
  matcher = None
  if args.method == 'geodesic_dist':
    matcher = recognition.GeodesicDistMatcher(dataset)
  elif args.method == 'shape_context':
    matcher = recognition.ShapeContextMatcher(dataset)
  elif args.method == 'geodesic_dist+shape_context':
    matcher = recognition.CombinedNNMatcher(dataset, [recognition.GeodesicDistMatcher, recognition.ShapeContextMatcher], [1, 0.1])
  else:
    raise NotImplementedError

  # do the matching
  best_match_name, best_match_cost = matcher.match(input_xyz)
  print 'Best match name:', best_match_name, 'with cost:', best_match_cost
  draw_comparison(left_cloud=input_xyz, right_cloud=dataset[best_match_name], show_shape_context=args.show_shape_context)
Example #12
0
def read_from_ros(cloud_topic, frame='ground'):
    '''format: ros:/my/topic/points(:frame)'''
    import rospy
    from brett2 import ros_utils
    import sensor_msgs
    import time
    if rospy.get_name() == '/unnamed':
        rospy.init_node('cloud_reader', disable_signals=True)
    msg = rospy.wait_for_message(cloud_topic, sensor_msgs.msg.PointCloud2)
    xyz, rgb = ros_utils.pc2xyzrgb(msg)
    xyz = xyz.reshape(-1, 3)
    tf_listener = ros_utils.get_tf_listener()
    return ros_utils.transform_points(xyz, tf_listener, frame,
                                      msg.header.frame_id)
Example #13
0
def human_get_rope():
    point_cloud = rospy.wait_for_message("/camera/depth_registered/points",
                                         sm.PointCloud2)

    xyz, bgr = ros_utils.pc2xyzrgb(point_cloud)

    xys = roi.get_polyline(bgr, 'draw curve')
    uvs = np.int32(xys)[:, ::-1]
    us, vs = uvs.T
    xyzs = xyz[us, vs]
    xyzs_good = xyzs[np.isfinite(xyzs).all(axis=1)]
    print "%i out of %i labeled points are good" % (len(xyzs_good), len(xyzs))
    xyzs_unif = curves.unif_resample(xyzs_good, 100, tol=.002)
    return xyzs_unif
Example #14
0
def create_obstacle_env(env):
    clone=env.CloneSelf(1)
    point_cloud = rospy.wait_for_message("/drop/points", sm.PointCloud2)     
    xyz, bgr = ros_utils.pc2xyzrgb(point_cloud)  
    xyz = ros_utils.transform_points(xyz, Globals.pr2.tf_listener, "base_footprint", point_cloud.header.frame_id)    
    #xyz = xyz[250:, 250:]
    clusters = tabletop.segment_tabletop_scene(xyz,'base_footprint')
    bounds = tabletop.get_table_dimensions(xyz)
    kinbodies.create_box_from_bounds(clone,bounds,"table")
    print len(clusters),"clusters"
    for (i_clu,clu) in enumerate(clusters):
        x,y,z,r,h = tabletop.get_cylinder(clu)
        #print kinbodies.create_cylinder(clone, (x,y,z),r,h, name="cylinder%i"%i_clu)
    return clone
def handle_initialization_request(req):
    "rope initialization service"

    xyz, _ = pc2xyzrgb(req.cloud)
    xyz = np.squeeze(xyz)
    obj_type = determine_object_type(xyz, plotting=False)
    print "object type:", obj_type

    if obj_type == "towel":
        xyz = xyz[(xyz[:, 2] - np.median(xyz[:, 2])) < .05, :]
        center, (height, width), angle = cv2.minAreaRect(
            xyz[:, :2].astype('float32').reshape(1, -1, 2))
        angle *= np.pi / 180
        corners = np.array(center)[None, :] + np.dot(
            np.array([[-height / 2, -width / 2], [-height / 2, width / 2],
                      [height / 2, width / 2], [height / 2, -width / 2]]),
            np.array([[np.cos(angle), np.sin(angle)],
                      [-np.sin(angle), np.cos(angle)]]))
        resp = bs.InitializationResponse()
        resp.objectInit.type = "towel_corners"
        resp.objectInit.towel_corners.polygon.points = [
            gm.Point(corner[0], corner[1], np.median(xyz[:, 2]))
            for corner in corners
        ]
        resp.objectInit.towel_corners.header = req.cloud.header
        print req.cloud.header
        poly_pub.publish(resp.objectInit.towel_corners)

    if obj_type == "rope":
        total_path_3d = find_path_through_point_cloud(xyz, plotting=False)
        resp = bs.InitializationResponse()
        resp.objectInit.type = "rope"
        rope = resp.objectInit.rope = bm.Rope()

        rope.header = req.cloud.header
        rope.nodes = [gm.Point(x, y, z) for (x, y, z) in total_path_3d]
        rope.radius = .006
        rospy.logwarn(
            "TODO: actually figure out rope radius from data. setting to .4cm")

        pose_array = gm.PoseArray()
        pose_array.header = req.cloud.header
        pose_array.poses = [
            gm.Pose(position=point, orientation=gm.Quaternion(0, 0, 0, 1))
            for point in rope.nodes
        ]
        marker_handles[0] = rviz.draw_curve(pose_array, 0)

    return resp
Example #16
0
def handle_initialization_request(req):
    "rope initialization service"
    
    xyz, _ = pc2xyzrgb(req.cloud)
    xyz = np.squeeze(xyz)
    obj_type = determine_object_type(xyz)
    print "object type:", obj_type
    
    
    if obj_type == "towel":
        xyz = xyz[(xyz[:,2]-np.median(xyz[:,2]))<.05,:]
        center, (height, width), angle = cv2.minAreaRect(xyz[:,:2].astype('float32').reshape(1,-1,2))
        angle *= np.pi/180
        corners = np.array(center)[None,:] + np.dot(
            np.array([[-height/2,-width/2],
                      [-height/2,width/2],
                      [height/2, width/2],
                      [height/2, -width/2]]),
            np.array([[np.cos(angle), np.sin(angle)],
                      [-np.sin(angle),np.cos(angle)]])) 
        resp = bs.InitializationResponse()
        resp.objectInit.type = "towel_corners"
        resp.objectInit.towel_corners.polygon.points = [gm.Point(corner[0], corner[1], np.median(xyz[:,2])) for corner in corners]
        resp.objectInit.towel_corners.header = req.cloud.header
        print req.cloud.header
        poly_pub.publish(resp.objectInit.towel_corners)
    
    if obj_type == "rope":
        total_path_3d = find_path_through_point_cloud(xyz, plotting=False)            
        resp = bs.InitializationResponse()
        resp.objectInit.type = "rope"
        rope = resp.objectInit.rope = bm.Rope()
        
        rope.header = req.cloud.header
        rope.nodes = [gm.Point(x,y,z) for (x,y,z) in total_path_3d]    
        rope.radius = .005
        rospy.logwarn("TODO: actually figure out rope radius from data. setting to .5cm")
        
        pose_array = gm.PoseArray()
        pose_array.header = req.cloud.header
        pose_array.poses = [gm.Pose(position=point, orientation=gm.Quaternion(0,0,0,1)) for point in rope.nodes]
        marker_handles[0] = rviz.draw_curve(pose_array,0)

        
    
    return resp
Example #17
0
    def execute(self,userdata):
        """
        - move head to the right place
        - get a point cloud
        returns: success, failure
        """
        Globals.handles = []
        draw_table()

        Globals.pr2.rgrip.set_angle(.08)
        Globals.pr2.lgrip.set_angle(.08)
        Globals.pr2.join_all()
        if not args.use_tracking:
            Globals.pr2.larm.goto_posture('side')
            Globals.pr2.rarm.goto_posture('side')
        else:
            try: increment_pose(Globals.pr2.rarm, [0,0,.02])
            except PR2.IKFail: print "couldn't raise right arm"
            try: increment_pose(Globals.pr2.larm, [0,0,.02])
            except PR2.IKFail: print "couldn't raise left arm"
        Globals.pr2.rgrip.set_angle(.08)
        Globals.pr2.lgrip.set_angle(.08)
        Globals.pr2.join_all()

        if args.delay_before_look > 0:
            rospy.loginfo('sleeping for %f secs before looking', args.delay_before_look)
            rospy.sleep(args.delay_before_look)

        if args.test:
            xyz = np.squeeze(np.asarray(demos[select_from_list(demos.keys())]["cloud_xyz"]))
        elif args.use_tracking:
            msg = rospy.wait_for_message("/tracker/object", TrackedObject)
            xyz = [(pt.x, pt.y, pt.z) for pt in msg.rope.nodes]
        else:
            msg = rospy.wait_for_message(args.cloud_topic, sensor_msgs.msg.PointCloud2)
            xyz, rgb = ros_utils.pc2xyzrgb(msg)
            xyz = xyz.reshape(-1,3)
            xyz = ros_utils.transform_points(xyz, Globals.pr2.tf_listener, "base_footprint", msg.header.frame_id)

        ELOG.log('LookAtObject', 'xyz', xyz)
        userdata.points = xyz

        return "success"
Example #18
0
def get_center():
    pc = rospy.wait_for_message("/camera/depth_registered/points",
                                sm.PointCloud2)
    xyz, bgr = pc2xyzrgb(pc)
    bgr = bgr.copy()
    hsv = cv2.cvtColor(bgr, cv2.COLOR_BGR2HSV)
    h = hsv[:, :, 0]
    s = hsv[:, :, 1]
    v = hsv[:, :, 2]

    mask = ((h < 10) | (h > 150)) & (s > 100) & (v > 100)
    labels, max_label = ndi.label(mask)
    centers = []
    for label in xrange(1, max_label + 1):
        us, vs = np.nonzero(labels == label)
        if len(us) >= 8:
            pts = xyz[us, vs, :]
            centers.append(pts.mean(axis=0))
    centers = np.array(centers)

    if len(centers) > 0:
        listener.waitForTransform(pc.header.frame_id, "l_gripper_tool_frame",
                                  rospy.Time(0), rospy.Duration(1))
        gripper_trans, _ = listener.lookupTransform(pc.header.frame_id,
                                                    "l_gripper_tool_frame",
                                                    rospy.Time(0))
        gripper_trans = np.array(gripper_trans)
        dists = ssd.cdist(centers, gripper_trans[None, :])
        closest_center = centers[dists.argmin()]
        if dists.min() < .025:
            print "successfully got point"
            return gripper_trans, closest_center
        else:
            print "fail"
            return None

    contours = cv2.findContours(mask.astype('uint8'), cv2.RETR_LIST,
                                cv2.CHAIN_APPROX_NONE)[0]
    cv2.drawContours(bgr, contours, -1, (0, 255, 0))
    cv2.imshow("bgr", bgr)
    cv2.waitKey(10)
Example #19
0
def get_center():
    pc = rospy.wait_for_message("/camera/depth_registered/points", sm.PointCloud2)  
    xyz, bgr = pc2xyzrgb(pc)
    bgr = bgr.copy()
    hsv = cv2.cvtColor(bgr,cv2.COLOR_BGR2HSV)
    h=hsv[:,:,0]
    s=hsv[:,:,1]
    v=hsv[:,:,2]
    
    mask = ((h<10) | (h>150)) & (s > 100) & (v > 100)
    labels,max_label = ndi.label(mask)
    centers = []
    for label in xrange(1, max_label+1):
        us, vs = np.nonzero(labels == label)
        if len(us) >= 8:
            pts = xyz[us, vs, :]
            centers.append(pts.mean(axis=0))
    centers= np.array(centers)
    
    if len(centers) > 0:
        listener.waitForTransform(pc.header.frame_id, "l_gripper_tool_frame", rospy.Time(0), rospy.Duration(1))    
        gripper_trans,_ = listener.lookupTransform(pc.header.frame_id, "l_gripper_tool_frame", rospy.Time(0))
        gripper_trans = np.array(gripper_trans)
        dists = ssd.cdist(centers, gripper_trans[None,:])
        closest_center = centers[dists.argmin()]
        if dists.min() < .025:
            print "successfully got point"
            return gripper_trans, closest_center
        else:
            print "fail"
            return None
            
    
    contours = cv2.findContours(mask.astype('uint8'),cv2.RETR_LIST,cv2.CHAIN_APPROX_NONE)[0]
    cv2.drawContours(bgr,contours,-1,(0,255,0))
    cv2.imshow("bgr",bgr)
    cv2.waitKey(10)
Example #20
0
    def callback(request):
        global HANDLES
        HANDLES = []
        xyzs, _ = pc2xyzrgb(request.point_cloud)
        new_mins = xyzs.reshape(-1, 3).min(axis=0)
        new_maxes = xyzs.reshape(-1, 3).max(axis=0)
        new_obj_xyz = (new_mins + new_maxes) / 2
        new_obj_dim = new_maxes - new_mins
        f.fit(np.atleast_2d(obj_xyz), np.atleast_2d(new_obj_xyz), 0.001, 0.001)

        new_xyzs, new_mats = f.transform_frames(old_xyzs, conversions.quats2mats(old_quats))
        new_quats = conversions.mats2quats(new_mats)

        show_object_and_trajectory(new_xyzs, new_obj_xyz, obj_quat, new_obj_dim, NEW_ID)
        show_object_and_trajectory(xyzquat[:, :3], obj_xyz, obj_quat, obj_dim, OLD_ID)

        Globals.pr2.update_rave()
        joint_positions, inds = trajectories.make_joint_traj(
            new_xyzs, new_quats, Globals.pr2.rarm.manip, "base_link", "r_wrist_roll_link", filter_options=18
        )
        response = PushResponse()

        if joint_positions is not None:
            Globals.pr2.rarm.goto_joint_positions(joint_positions[0])
            # current_joints = Globals.pr2.rarm.get_joint_positions()[None,:]
            joint_positions, joint_velocities, times = retiming.make_traj_with_limits(
                joint_positions, Globals.pr2.rarm.vel_limits, Globals.pr2.rarm.acc_limits, smooth=True
            )
            # joint_velocities = kinematics_utils.get_velocities(joint_positions, times, tol=.001)

            Globals.pr2.rarm.follow_timed_joint_trajectory(joint_positions, joint_velocities, times)

            response.success = True
        else:
            response.success = False
        return response
Example #21
0
parser = argparse.ArgumentParser()
parser.add_argument("out")
parser.add_argument("--cloud_in", default="/drop/points", nargs="?")
parser.add_argument("--dont_transform", action="store_true")
args = parser.parse_args()

from brett2 import ros_utils
from jds_utils import conversions
import roslib

roslib.load_manifest('tf')
import tf
import rospy
import numpy as np
import sensor_msgs.msg as sm
from brett2.ros_utils import transformPointCloud2

rospy.init_node("get_point_cloud")
listener = tf.TransformListener()
rospy.sleep(.3)

pc = rospy.wait_for_message(args.cloud_in, sm.PointCloud2)
if args.dont_transform:
    pc_tf = pc
else:
    pc_tf = transformPointCloud2(pc, listener, "base_footprint",
                                 pc.header.frame_id)

xyz, bgr = ros_utils.pc2xyzrgb(pc_tf)
np.savez(args.out, xyz=xyz, bgr=bgr)
Example #22
0
def main():
    import argparse
    parser = argparse.ArgumentParser()
    parser.add_argument('--dataset',
                        default='overhand_knot',
                        help='name of dataset')
    parser.add_argument('--method',
                        choices=[
                            'geodesic_dist', 'shape_context',
                            'geodesic_dist+shape_context'
                        ],
                        default='geodesic_dist',
                        help='matching algorithm')
    parser.add_argument('--input_mode',
                        choices=['from_dataset', 'kinect', 'file'],
                        default='kinect',
                        help='input cloud acquisition method')
    parser.add_argument('--cloud_topic',
                        default='/preprocessor/kinect1/points',
                        help='ros topic for kinect input mode')
    parser.add_argument('--input_file',
                        help='input file for --input_mode=file')
    parser.add_argument('--show_shape_context', action='store_true')
    args = parser.parse_args()

    dataset = recognition.DataSet.LoadFromTaskDemos(args.dataset)

    # read input to match
    input_xyz = None
    if args.input_mode == 'from_dataset':
        key = select_from_list(sorted(dataset.keys()))
        input_xyz = np.squeeze(np.asarray(dataset[key]['cloud_xyz']))
    elif args.input_mode == 'kinect':
        import rospy
        from brett2 import ros_utils
        import sensor_msgs
        import time
        if rospy.get_name() == '/unnamed':
            rospy.init_node('matcher', disable_signals=True)
        msg = rospy.wait_for_message(args.cloud_topic,
                                     sensor_msgs.msg.PointCloud2)
        xyz, rgb = ros_utils.pc2xyzrgb(msg)
        xyz = xyz.reshape(-1, 3)
        tf_listener = ros_utils.get_tf_listener()
        time.sleep(1)  # so tf works
        input_xyz = ros_utils.transform_points(xyz, tf_listener, "ground",
                                               msg.header.frame_id)
    elif args.input_mode == 'file':
        input_xyz = np.load(args.input_file)['cloud_xyz']
    else:
        raise NotImplementedError

    # create the matcher
    matcher = None
    if args.method == 'geodesic_dist':
        matcher = recognition.GeodesicDistMatcher(dataset)
    elif args.method == 'shape_context':
        matcher = recognition.ShapeContextMatcher(dataset)
    elif args.method == 'geodesic_dist+shape_context':
        matcher = recognition.CombinedNNMatcher(
            dataset,
            [recognition.GeodesicDistMatcher, recognition.ShapeContextMatcher],
            [1, 0.1])
    else:
        raise NotImplementedError

    # do the matching
    best_match_name, best_match_cost = matcher.match(input_xyz)
    print 'Best match name:', best_match_name, 'with cost:', best_match_cost
    draw_comparison(left_cloud=input_xyz,
                    right_cloud=dataset[best_match_name],
                    show_shape_context=args.show_shape_context)
Example #23
0
from image_proc.pcd_io import load_xyzrgb
import rope_vision.rope_initialization as ri
import numpy as np
import sys
import networkx as nx
import itertools
from mayavi import mlab
from brett2 import ros_utils
import sensor_msgs.msg as sm
            
    
import rospy
rospy.init_node("test_rope_init")
cloud = rospy.wait_for_message("/preprocessor/points", sm.PointCloud2)
xyz,rgb = ros_utils.pc2xyzrgb(cloud)
# xyz, rgb = load_xyzrgb("data000000000004.pcd")
# xyz = xyz.reshape(-1,3)
#xyz = np.squeeze(np.loadtxt("/tmp/rope.txt"))
total_path_3d = ri.find_path_through_point_cloud(xyz, plotting=True)
Example #24
0
import roslib
roslib.load_manifest("verb_msgs")
from verb_msgs.srv import *
from brett2.ros_utils import xyzrgb2pc
import geometry_msgs.msg as gm
import sensor_msgs.msg as sm
import numpy as np

if __name__ == "__main__":
    if rospy.get_name() == "/unnamed":
        rospy.init_node("test_tabletop", disable_signals=True)
    pr2 = PR2.create()
    rviz = ros_utils.RvizWrapper.create()
    rospy.sleep(1)
    point_cloud = rospy.wait_for_message("/drop/points", sm.PointCloud2)
    xyz, bgr = ros_utils.pc2xyzrgb(point_cloud)
    xyz = ros_utils.transform_points(xyz, pr2.tf_listener, "base_footprint",
                                     point_cloud.header.frame_id)
    clusters = tabletop.segment_tabletop_scene(xyz,
                                               "base_footprint",
                                               plotting2d=True,
                                               plotting3d=True)

    if False:
        push_svc = rospy.ServiceProxy("push", Push)
        req = PushRequest()
        xyz0 = clusters[np.argmax(map(len, clusters))].reshape(1, -1, 3)
        rgb0 = np.zeros(xyz0.shape, 'uint8')
        point_cloud = xyzrgb2pc(xyz0, rgb0, "base_footprint")
        req.point_cloud = point_cloud
        push_svc.call(req)
Example #25
0
def exec_traj(req, traj_ik_func=ik_functions.do_traj_ik_graph_search, obj_pc=None, obj_name="", can_move_lower=True):
    assert isinstance(req, ExecTrajectoryRequest)
    traj = req.traj
    return exec_traj_do_work(traj.l_gripper_poses.poses, traj.l_gripper_angles,
                             traj.r_gripper_poses.poses, traj.r_gripper_angles,
                             traj_ik_func, ros_utils.pc2xyzrgb(obj_pc)[0], obj_name, can_move_lower)
Example #26
0
import roslib;
roslib.load_manifest("verb_msgs")
from verb_msgs.srv import *
from brett2.ros_utils import xyzrgb2pc
import geometry_msgs.msg as gm
import sensor_msgs.msg as sm
import numpy as np
            
            
if __name__ == "__main__":
    if rospy.get_name() == "/unnamed": rospy.init_node("test_tabletop", disable_signals=True)
    pr2 = PR2.create()
    rviz = ros_utils.RvizWrapper.create()
    rospy.sleep(1)
    point_cloud = rospy.wait_for_message("/drop/points", sm.PointCloud2)     
    xyz, bgr = ros_utils.pc2xyzrgb(point_cloud)    
    xyz = ros_utils.transform_points(xyz, pr2.tf_listener, "base_footprint", point_cloud.header.frame_id)
    clusters = tabletop.segment_tabletop_scene(xyz,"base_footprint",plotting2d=True, plotting3d=True)
    
    if False:
        push_svc = rospy.ServiceProxy("push",Push)
        req = PushRequest()
        xyz0 = clusters[np.argmax(map(len,clusters))].reshape(1,-1,3)
        rgb0 = np.zeros(xyz0.shape,'uint8')
        point_cloud = xyzrgb2pc(xyz0, rgb0, "base_footprint")
        req.point_cloud = point_cloud
        push_svc.call(req)
        
    if False:
        pour_srv = rospy.ServiceProxy("pour", Pour)
        req = PourRequest()
Example #27
0
from jds_image_proc.pcd_io import load_xyzrgb
import rope_vision.rope_initialization as ri
import numpy as np
import sys
import networkx as nx
import itertools
from mayavi import mlab
from brett2 import ros_utils
import sensor_msgs.msg as sm

import rospy
rospy.init_node("test_rope_init")
cloud = rospy.wait_for_message("/preprocessor/points", sm.PointCloud2)
xyz, rgb = ros_utils.pc2xyzrgb(cloud)
# xyz, rgb = load_xyzrgb("data000000000004.pcd")
# xyz = xyz.reshape(-1,3)
#xyz = np.squeeze(np.loadtxt("/tmp/rope.txt"))
total_path_3d = ri.find_path_through_point_cloud(xyz, plotting=True)
Example #28
0
bagfile = traj["bag_file"].value

bag = rosbag.Bag(bagfile)
seg_dicts = yamlfile["segments"]

listener = mytf.TransformListener()

t_start_bag = bag.read_messages().next()[2].to_sec()

cloud_times, cloud_xyzs, cloud_bgrs = [],[],[]
for (topic, msg, t) in bag.read_messages():
    if topic=="/tf":
        listener.callback(msg)
    elif hasattr(msg, "row_step"):

        xyz, bgr = pc2xyzrgb(msg)
        try:
            xyz_tf = transform_points(xyz, listener, traj["ref_frame"].value, args.kinect_frame)
            cloud_xyzs.append(xyz_tf)
            cloud_bgrs.append(bgr)  
            cloud_times.append(t.to_sec())
        except Exception:
            print "failed to transform points"
            traceback.print_exc()
cloud_times = np.array(cloud_times) 
cloud_times -= t_start_bag

traj_times = np.asarray(traj["times"])-t_start_bag
for (i_seg,seg_dict) in enumerate(seg_dicts):
    i_start, i_stop = np.searchsorted(traj_times, (seg_dict["start"],seg_dict["stop"]))
    seg_name = "%s.%.2i"%(traj.name,i_seg)
import roslib;
roslib.load_manifest("rospy")
import rospy
rospy.init_node("write clouds",disable_signals = True)

from brett2.ros_utils import pc2xyzrgb
import sensor_msgs.msg as sm
import numpy as np

while not rospy.is_shutdown():
    fname = raw_input("filename?: ")
    
    print "waiting for point cloud on input topic"
    msg = rospy.wait_for_message("cloud_in", sm.PointCloud2)
    print "ok"
    
    xyz,_ = pc2xyzrgb(msg)
    np.savetxt(fname, np.squeeze(xyz))
    

def callback(req):
    xyz, rgb = pc2xyzrgb(req.cloud_in)
    rgb = rgb.copy()
    gc = GetClick()
    cv2.setMouseCallback("rgb", gc.callback)
    while not gc.done:
        cv2.imshow("rgb", rgb)
        cv2.waitKey(10)

    gm = GetMouse()
    xy_corner1 = np.array(gc.xy)

    cv2.setMouseCallback("rgb", gm.callback)
    while not gm.done:
        img = rgb.copy()

        if gm.xy is not None:
            xy_corner2 = np.array(gm.xy)
            cv2.rectangle(img, tuple(xy_corner1), tuple(xy_corner2), (0,255, 0))
        cv2.imshow("rgb",img)
        cv2.waitKey(10)
        
    xy_tl = np.array([xy_corner1, xy_corner2]).min(axis=0)
    xy_br = np.array([xy_corner1, xy_corner2]).max(axis=0)
    #mask = np.zeros(xyz.shape[:2],dtype='uint8')
    #rectangle = gm.xy + tuple(2*(center - np.r_[gm.xy]))
    #tmp1 = np.zeros((1, 13 * 5))
    #tmp2 = np.zeros((1, 13 * 5))    
    #result = cv2.grabCut(rgb,mask,rectangle,tmp1, tmp2,10,mode=cv2.GC_INIT_WITH_RECT)
        
    #from cv2 import cv
    #mask[mask == cv.GC_BGD] = 0
    #mask[mask == cv.GC_PR_BGD] = 63
    #mask[mask == cv.GC_FGD] = 255  
    #mask[mask == cv.GC_PR_FGD] = 192
    #cv2.imshow('mask',mask)
    #cv2.waitKey(10)


    #table_height = get_table_height(xyz)
    
    if args.grabcut:
        rospy.loginfo("using grabcut")
        xl, yl = xy_tl
        w, h = xy_br - xy_tl
        mask = np.zeros((h,w),dtype='uint8')    
        mask.fill(cv2.GC_PR_BGD)
        if args.init == "height":
            initial_height_thresh = stats.scoreatpercentile(xyz[yl:yl+h, xl:xl+w,2].flatten(), 50)
            mask[xyz[yl:yl+h, xl:xl+w,2] > initial_height_thresh] = cv2.GC_PR_FGD
        elif args.init == "middle":
            mask[h//4:3*h//4, w//4:3*w//4] = cv2.GC_PR_FGD
    
        tmp1 = np.zeros((1, 13 * 5))
        tmp2 = np.zeros((1, 13 * 5))    
        cv2.grabCut(rgb[yl:yl+h, xl:xl+w, :],mask,(0,0,0,0),tmp1, tmp2,10,mode=cv2.GC_INIT_WITH_MASK)
    else:
        rospy.loginfo("using table height")        
        table_height = rospy.get_param("table_height")
        xl, yl = xy_tl
        w, h = xy_br - xy_tl
        mask = np.zeros((h,w),dtype='uint8')
        mask[np.isfinite(xyz[yl:yl+h, xl:xl+w,2]) & 
             (xyz[yl:yl+h, xl:xl+w,2] > table_height+.02)] = 1

    mask = mask % 2
    if (args.erode > 0):
        mask = ndi.binary_erosion(mask, utils_images.disk(args.erode)).astype('uint8')
    contours = cv2.findContours(mask,cv2.RETR_LIST,cv2.CHAIN_APPROX_NONE)[0]
    cv2.drawContours(rgb[yl:yl+h, xl:xl+w, :],contours,-1,(0,255,0),thickness=2)
    
    cv2.imshow('rgb', rgb)
    cv2.waitKey(10)

    zsel = xyz[yl:yl+h, xl:xl+w, 2]
    mask = (mask%2==1) & np.isfinite(zsel)# & (zsel - table_height > -1)

    
    resp = ProcessCloudResponse()
    xyz_sel = xyz[yl:yl+h, xl:xl+w,:][mask.astype('bool')]
    rgb_sel = rgb[yl:yl+h, xl:xl+w,:][mask.astype('bool')]
    resp.cloud_out = xyzrgb2pc(xyz_sel, rgb_sel, req.cloud_in.header.frame_id)
    
    if (args.plotting):
        pose_array = conversions.array_to_pose_array(xyz_sel, req.cloud_in.header.frame_id)
        Globals.handles = [Globals.rviz.draw_curve(pose_array, rgba = (1,1,0,1), type=Marker.CUBE_LIST, width=.001, ns="segmentation")]
    
    return resp
Example #31
0
        arg_names = F[verb]["arg_names"]
        print "arguments: %s" % " ".join(arg_names)

        pr2.join_all()

        pc = rospy.wait_for_message("/drop/points", sm.PointCloud2)
        pc_tf = ros_utils.transformPointCloud2(pc, pr2.tf_listener,
                                               "base_footprint",
                                               pc.header.frame_id)

        n_sel = F[verb]["nargs"].value
        arg_clouds = []
        for i_sel in xrange(n_sel):
            pc_sel = seg_svc.call(
                ProcessCloudRequest(cloud_in=pc_tf)).cloud_out
            xyz, rgb = ros_utils.pc2xyzrgb(pc_sel)
            arg_clouds.append(xyz.reshape(-1, 3))

        make_and_execute_verb_traj(verb, arg_clouds)
        raw_input("press enter to continue")

    elif verb in ["swap-tool"]:

        l_switch_posture = np.array(
            [0.773, 0.595, 1.563, -1.433, 0.478, -0.862, .864])

        lr = select_from_list(["left", "right"])
        if lr == 'l':
            arm, grip, jpos = pr2.larm, pr2.lgrip, l_switch_posture
        else:
            arm, grip, jpos = pr2.rarm, pr2.rgrip, PR2.mirror_arm_joints(
if os.path.exists(outfilename): os.remove(outfilename)
outfile = h5py.File(outfilename, "w")

def array_contains(arr, pt):
    return np.any(np.all((pt[None,:] == arr), axis=1))

def get_good_inds(orig_xyz, good_xyz):
    good_inds = []
    for i, xyz in enumerate(orig_xyz):
        if array_contains(good_xyzs, xyz):
            good_inds.append(i)
    return good_inds

for object_name in object_names:
    pc_sel = seg_svc.call(ProcessCloudRequest(cloud_in = pc)).cloud_out
    xyz, rgb = ros_utils.pc2xyzrgb(pc_sel)
    xyz = xyz.reshape(-1,3)
    rgb = rgb.reshape(-1,3)
    if args.do_filtering:
        xyz_down = voxel_downsample(xyz, .02)
        graph = ri.points_to_graph(xyz_down, .03)
        cc = ri.largest_connected_component(graph)
        good_xyzs = np.array([graph.node[node_id]["xyz"] for node_id in cc.nodes()])
        good_inds = get_good_inds(xyz, good_xyzs)
        good_rgbs = rgb[good_inds]
    else:
        good_xyzs, good_rgbs = xyz, rgb

    outfile.create_group(object_name)
    outfile[object_name]["xyz"] = good_xyzs
    #outfile[object_name]["rgb"] = good_rgbs
Example #33
0
def callback(req):
    assert isinstance(req, MakeTrajectoryRequest)
    if req.verb not in h5file:
        rospy.logerr("verb %s was not found in library",req.verb)
    
    demo_group = h5file[req.verb]["demos"]
    new_object_clouds = [pc2xyzrgb(cloud)[0] for cloud in req.object_clouds]
    
    rospy.logwarn("using first demo")
    best_demo_name = demo_group.keys()[0]
    best_demo = demo_group[best_demo_name]
                           
    assert "transform" in verb_info[req.verb]                       
        
    transform_type = verb_info[req.verb]["transform"]
    x_nd = voxel_downsample(asarray(best_demo["object_clouds"].values()[0]),.02)
    y_md = voxel_downsample(new_object_clouds[0],.02)
    
    if transform_type == "tps":
        warp = registration.tps_rpm(x_nd, y_md, plotting = 4, reg_init = 2, reg_final = .1, n_iter = 39)
    elif transform_type == "translation2d":
        warp = registration.Translation2d()
        warp.fit(x_nd, y_md)
    elif transform_type == "rigid2d":
        warp = registration.Rigid2d()
        warp.fit(x_nd, y_md)
    else:
        raise Exception("transform type %s is not yet implemented"%transform_type)        

    this_verb_info = verb_info[req.verb]
    l_offset,r_offset = np.zeros(3), np.zeros(3)
    if "r_tool" in this_verb_info:
        r_offset = asarray(tool_info[this_verb_info["r_tool"]]["translation"])
    if "l_tool" in this_verb_info:
        l_offset = asarray(tool_info[this_verb_info["l_tool"]]["translation"])
    if "r_offset" in this_verb_info:
        r_offset += asarray(this_verb_info["r_offset"])
    if "l_offset" in this_verb_info:
        l_offset += asarray(this_verb_info["l_offset"])



    arms_used = best_demo["arms_used"].value
    warped_demo = warping.transform_demo(warp, best_demo,arms_used in 'lb', arms_used in 'rb',object_clouds=True, l_offset = l_offset, r_offset = r_offset)
    
    

    resp = MakeTrajectoryResponse()
    traj = resp.traj
        
    if arms_used == "l":
        traj.arms_used = "l"
        traj.gripper0_poses.poses = xyzs_quats_to_poses(warped_demo["l_gripper_xyzs"], warped_demo["l_gripper_quats"])
        traj.gripper0_angles = warped_demo["l_gripper_angles"]
        if "l_tool" in this_verb_info: traj.gripper0_angles *= 0
    elif arms_used == "r":
        traj.arms_used = "r"
        traj.gripper0_poses.poses = xyzs_quats_to_poses(warped_demo["r_gripper_xyzs"], warped_demo["r_gripper_quats"])
        traj.gripper0_angles = warped_demo["r_gripper_angles"]
        if "r_tool" in this_verb_info: traj.gripper0_angles *= 0
    elif arms_used == "b":
        traj.arms_used = "b"
        traj.gripper0_poses.poses = xyzs_quats_to_poses(warped_demo["l_gripper_xyzs"], warped_demo["l_gripper_quats"])
        traj.gripper1_poses.poses = xyzs_quats_to_poses(warped_demo["r_gripper_xyzs"], warped_demo["r_gripper_quats"])
        traj.gripper0_angles = warped_demo["l_gripper_angles"]
        traj.gripper1_angles = warped_demo["r_gripper_angles"]

        if "l_tool" in this_verb_info: traj.gripper0_angles *= 0
        if "r_tool" in this_verb_info: traj.gripper1_angles *= 0
        
    traj.gripper0_poses.header.frame_id = req.object_clouds[0].header.frame_id
    traj.gripper1_poses.header.frame_id = req.object_clouds[0].header.frame_id

    Globals.handles = []
    plot_original_and_warped_demo(best_demo, warped_demo, traj)
    return resp
def save_pcs(clouds_pc2, save_file):
    os.chdir(osp.join(osp.dirname(__file__), "exp_pcs"))
    for i, cloud_pc2 in enumerate(clouds_pc2):
        np.savetxt("%s.pc%i" % (save_file, i), voxel_downsample(pc2xyzrgb(cloud_pc2)[0], 0.02))
def handle_initialization_request(req):
    "rope initialization service"
    
    if args.save_requests:
        fname = "/tmp/init_req_%i.pkl"%time.time()
        with open(fname,"w") as fh:
            cPickle.dump(req, fh)
            print "saved", fname
    
    xyz, _ = pc2xyzrgb(req.cloud)
    xyz = np.squeeze(xyz)
    obj_type = determine_object_type(xyz, plotting=args.plotting)
    print "object type:", obj_type
    
    
    if obj_type == "towel":
        xyz = xyz[(xyz[:,2]-np.median(xyz[:,2]))<.05,:]
        center, (height, width), angle = cv2.minAreaRect(xyz[:,:2].astype('float32').reshape(1,-1,2))
        angle *= np.pi/180
        corners = np.array(center)[None,:] + np.dot(
            np.array([[-height/2,-width/2],
                      [-height/2,width/2],
                      [height/2, width/2],
                      [height/2, -width/2]]),
            np.array([[np.cos(angle), np.sin(angle)],
                      [-np.sin(angle),np.cos(angle)]])) 
        resp = bs.InitializationResponse()
        resp.objectInit.type = "towel_corners"
        resp.objectInit.towel_corners.polygon.points = [gm.Point(corner[0], corner[1], np.median(xyz[:,2])) for corner in corners]
        resp.objectInit.towel_corners.header = req.cloud.header
        print req.cloud.header
        poly_pub.publish(resp.objectInit.towel_corners)
    
    elif obj_type == "rope":
        if args.simple_rope_init:
            total_path_3d = ri.find_path_through_point_cloud_simple(xyz, plotting=args.plotting)            
        else:
            total_path_3d = ri.find_path_through_point_cloud(xyz, plotting=args.plotting)            
        resp = bs.InitializationResponse()
        resp.objectInit.type = "rope"
        rope = resp.objectInit.rope = bm.Rope()
        
        rope.header = req.cloud.header
        rope.nodes = [gm.Point(x,y,z) for (x,y,z) in total_path_3d]    
        rope.radius = .006
        print "lengths:", [np.linalg.norm(total_path_3d[i+1] - total_path_3d[i]) for i in xrange(len(total_path_3d)-1)]        
        rospy.loginfo("created a rope with %i nodes, each has length %.2f, radius %.2f", len(rope.nodes), 
                      np.linalg.norm(total_path_3d[i+1] - total_path_3d[i]), rope.radius)
        
        pose_array = gm.PoseArray()
        pose_array.header = req.cloud.header
        pose_array.poses = [gm.Pose(position=point, orientation=gm.Quaternion(0,0,0,1)) for point in rope.nodes]
#        marker_handles[0] = rviz.draw_curve(pose_array,0)

    elif obj_type == "box":
        xyz = xyz[abs(xyz[:,2]-np.median(xyz[:,2]))<.05,:]
        center, (height, width), angle = cv2.minAreaRect(xyz[:,:2].astype('float32').reshape(1,-1,2))
        angle *= np.pi/180
        print 'width', width, 'height', height, 'angle', 'median', np.median(xyz[:,2]),angle*180/np.pi

        resp = bs.InitializationResponse()
        resp.objectInit.type = "box"
        rope = resp.objectInit.box = bm.Box()

        #rope.header = req.cloud.header                                                 
        rope.center.x = center[0]
        rope.center.y = center[1]
        rope.center.z = np.median(xyz[:,2])/2.0
        rope.extents.x = height
        rope.extents.y = width
        rope.extents.z = np.median(xyz[:,2])
        rope.angle = angle
    
    return resp
    rgb_sel = rgb[yl:yl+h, xl:xl+w,:][mask.astype('bool')]
    resp.cloud_out = xyzrgb2pc(xyz_sel, rgb_sel, req.cloud_in.header.frame_id)
    
    if (args.plotting):
        pose_array = conversions.array_to_pose_array(xyz_sel, req.cloud_in.header.frame_id)
        Globals.handles = [Globals.rviz.draw_curve(pose_array, rgba = (1,1,0,1), type=Marker.CUBE_LIST, width=.001, ns="segmentation")]
    
    return resp
    

if __name__ == "__main__":
    if args.test:        
        if rospy.get_name() == "/unnamed": 
            rospy.init_node("test_interactive_segmentation_service",disable_signals=True)
            listener = tf.TransformListener()
            pc = rospy.wait_for_message("/drop/points", sm.PointCloud2)     
            pc_tf = transformPointCloud2(pc, listener, "base_footprint", pc.header.frame_id)            
            Globals.setup()            
            req = ProcessCloudRequest()
            req.cloud_in = pc_tf
            resp = callback(req)
            xyz, rgb = pc2xyzrgb(resp.cloud_out)
            pose_array = conversions.array_to_pose_array(xyz.reshape(-1,3), 'base_footprint')
            Globals.handles.append(Globals.rviz.draw_curve(pose_array,rgba=(1,0,0,1),type=Marker.CUBE_LIST,width=.002, ns = 'segmentation'))
            
    else:
        rospy.init_node("test_interactive_segmentation_service",disable_signals=True)
        Globals.setup()
        service = rospy.Service("interactive_segmentation", ProcessCloud, callback)
        time.sleep(1000000)
Example #37
0
def make_traj_multi_stage(req, current_stage_info, stage_num, prev_stage_info, prev_exp_clouds, verb_data_accessor, to_gripper_frame_func=None):
    assert isinstance(req, MakeTrajectoryRequest)
    prev_exp_clouds = None if stage_num == 0 else [pc2xyzrgb(cloud)[0] for cloud in prev_exp_clouds]
    cur_exp_clouds = [pc2xyzrgb(cloud)[0] for cloud in req.object_clouds]
    clouds_frame_id = req.object_clouds[0].header.frame_id
    return make_traj_multi_stage_do_work(current_stage_info, cur_exp_clouds, clouds_frame_id, stage_num, prev_stage_info, prev_exp_clouds, verb_data_accessor, to_gripper_frame_func)
Example #38
0
def make_traj(req):
    """
    Generate a trajectory using warping
    See MakeTrajectory service description
    (TODO) should be able to specify a specific demo
    """
    assert isinstance(req, MakeTrajectoryRequest)

    new_object_clouds = [pc2xyzrgb(cloud)[0] for cloud in req.object_clouds]

    scene_info = "PLACEHOLDER"
    best_demo_name, best_demo_info = verbs.get_closest_demo(
        req.verb, scene_info)
    best_demo_data = verbs.get_demo_data(best_demo_name)

    transform_type = "tps"

    old_object_clouds = [
        best_demo_data["object_clouds"][obj_name]["xyz"]
        for obj_name in best_demo_data["object_clouds"].keys()
    ]

    if len(old_object_clouds) > 1:
        raise Exception("i don't know what to do with multiple object clouds")
    x_nd = voxel_downsample(old_object_clouds[0], .02)
    y_md = voxel_downsample(new_object_clouds[0], .02)

    if transform_type == "tps":
        #warp = registration.tps_rpm_zrot(x_nd, y_md, plotting=2,reg_init=2,reg_final=.05, n_iter=10, verbose=False)
        warp = registration.tps_rpm(x_nd,
                                    y_md,
                                    plotting=2,
                                    reg_init=2,
                                    reg_final=.05,
                                    n_iter=10,
                                    verbose=False)
    elif transform_type == "translation2d":
        warp = registration.Translation2d()
        warp.fit(x_nd, y_md)
    elif transform_type == "rigid2d":
        warp = registration.Rigid2d()
        warp.fit(x_nd, y_md)
    else:
        raise Exception("transform type %s is not yet implemented" %
                        transform_type)

    l_offset, r_offset = np.zeros(3), np.zeros(3)
    #if "r_tool" in best_demo_info:
    #r_offset = asarray(tool_info[this_verb_info["r_tool"]]["translation"])
    #if "l_tool" in best_demo_info:
    #l_offset = asarray(tool_info[this_verb_info["l_tool"]]["translation"])

    arms_used = best_demo_info["arms_used"]
    warped_demo_data = warping.transform_verb_demo(warp, best_demo_data)

    resp = MakeTrajectoryResponse()
    traj = resp.traj

    traj.arms_used = arms_used
    if arms_used in "lb":
        traj.l_gripper_poses.poses = juc.xyzs_quats_to_poses(
            warped_demo_data["l_gripper_tool_frame"]["position"],
            warped_demo_data["l_gripper_tool_frame"]["orientation"])
        traj.l_gripper_angles = warped_demo_data["l_gripper_joint"]
        traj.l_gripper_poses.header.frame_id = req.object_clouds[
            0].header.frame_id
        if "l_tool" in best_demo_info: traj.l_gripper_angles *= 0
    if arms_used in "rb":
        traj.r_gripper_poses.poses = juc.xyzs_quats_to_poses(
            warped_demo_data["r_gripper_tool_frame"]["position"],
            warped_demo_data["r_gripper_tool_frame"]["orientation"])
        traj.r_gripper_angles = warped_demo_data["r_gripper_joint"]
        traj.r_gripper_poses.header.frame_id = req.object_clouds[
            0].header.frame_id
        if "r_tool" in best_demo_info: traj.r_gripper_angles *= 0

    Globals.handles = []
    plot_original_and_warped_demo(best_demo_data, warped_demo_data, traj)

    pose_array = conversions.array_to_pose_array(y_md, 'base_footprint')
    Globals.handles.append(
        Globals.rviz.draw_curve(pose_array,
                                rgba=(0, 0, 1, 1),
                                width=.01,
                                type=Marker.CUBE_LIST))
    return resp
Example #39
0
def make_traj(req):
    """
    Generate a trajectory using warping
    See MakeTrajectory service description
    (TODO) should be able to specify a specific demo
    """
    assert isinstance(req, MakeTrajectoryRequest)
    
    new_object_clouds = [pc2xyzrgb(cloud)[0] for cloud in req.object_clouds]
    
    scene_info = "PLACEHOLDER"
    best_demo_name, best_demo_info = verbs.get_closest_demo(req.verb, scene_info)    
    best_demo_data = verbs.get_demo_data(best_demo_name)
        
    transform_type = "tps"
    
    old_object_clouds = [best_demo_data["object_clouds"][obj_name]["xyz"]
            for obj_name in best_demo_data["object_clouds"].keys()]

    if len(old_object_clouds) > 1:
        raise Exception("i don't know what to do with multiple object clouds")
    x_nd = voxel_downsample(old_object_clouds[0],.02)
    y_md = voxel_downsample(new_object_clouds[0],.02)
    
    if transform_type == "tps":
        #warp = registration.tps_rpm_zrot(x_nd, y_md, plotting=2,reg_init=2,reg_final=.05, n_iter=10, verbose=False)
        warp = registration.tps_rpm(x_nd, y_md, plotting=2,reg_init=2,reg_final=.05, n_iter=10, verbose=False)
    elif transform_type == "translation2d":
        warp = registration.Translation2d()
        warp.fit(x_nd, y_md)
    elif transform_type == "rigid2d":
        warp = registration.Rigid2d()
        warp.fit(x_nd, y_md)
    else:
        raise Exception("transform type %s is not yet implemented"%transform_type)        

    l_offset,r_offset = np.zeros(3), np.zeros(3)
    #if "r_tool" in best_demo_info:
        #r_offset = asarray(tool_info[this_verb_info["r_tool"]]["translation"])
    #if "l_tool" in best_demo_info:
        #l_offset = asarray(tool_info[this_verb_info["l_tool"]]["translation"])


    arms_used = best_demo_info["arms_used"]
    warped_demo_data = warping.transform_verb_demo(warp, best_demo_data)        

    resp = MakeTrajectoryResponse()
    traj = resp.traj

    traj.arms_used = arms_used
    if arms_used in "lb":
        traj.l_gripper_poses.poses = xyzs_quats_to_poses(warped_demo_data["l_gripper_tool_frame"]["position"], warped_demo_data["l_gripper_tool_frame"]["orientation"])
        traj.l_gripper_angles = warped_demo_data["l_gripper_joint"]
        traj.l_gripper_poses.header.frame_id = req.object_clouds[0].header.frame_id
        if "l_tool" in best_demo_info: traj.l_gripper_angles *= 0
    if arms_used in "rb":
        traj.r_gripper_poses.poses = xyzs_quats_to_poses(warped_demo_data["r_gripper_tool_frame"]["position"], warped_demo_data["r_gripper_tool_frame"]["orientation"])
        traj.r_gripper_angles = warped_demo_data["r_gripper_joint"]
        traj.r_gripper_poses.header.frame_id = req.object_clouds[0].header.frame_id
        if "r_tool" in best_demo_info: traj.r_gripper_angles *= 0

    Globals.handles = []
    plot_original_and_warped_demo(best_demo_data, warped_demo_data, traj)

    pose_array = conversions.array_to_pose_array(y_md, 'base_footprint')
    Globals.handles.append(Globals.rviz.draw_curve(pose_array, rgba = (0,0,1,1),width=.01,type=Marker.CUBE_LIST))
    return resp
    zsel = xyz[yl:yl+h, xl:xl+w, 2]
    mask = (mask%2==1) & np.isfinite(zsel)# & (zsel - table_height > -1)
    
    resp = ProcessCloudResponse()
    xyz_sel = xyz[yl:yl+h, xl:xl+w,:][mask.astype('bool')]
    rgb_sel = rgb[yl:yl+h, xl:xl+w,:][mask.astype('bool')]
    resp.cloud_out = xyzrgb2pc(xyz_sel, rgb_sel, req.cloud_in.header.frame_id)
    return resp
    

if __name__ == "__main__":
    if args.test:        
        if rospy.get_name() == "/unnamed": 
            rospy.init_node("test_interactive_segmentation_service",disable_signals=True)
            listener = tf.TransformListener()
            pc = rospy.wait_for_message("/drop/points", sm.PointCloud2)     
            pc_tf = transformPointCloud2(pc, listener, "base_footprint", pc.header.frame_id)            
            Globals.setup()            
            req = ProcessCloudRequest()
            req.cloud_in = pc_tf
            resp = callback(req)
            xyz, rgb = pc2xyzrgb(resp.cloud_out)
            pose_array = conversions.array_to_pose_array(xyz.reshape(-1,3), 'base_footprint')
            Globals.handles.append(Globals.rviz.draw_curve(pose_array,rgba=(1,0,0,1),type=Marker.CUBE_LIST,width=.002, ns = 'segmentation'))
            
    else:
        rospy.init_node("test_interactive_segmentation_service",disable_signals=True)
        Globals.setup()
        service = rospy.Service("interactive_segmentation", ProcessCloud, callback)
        rospy.spin()
def callback(req):
    xyz, rgb = pc2xyzrgb(req.cloud_in)
    rgb = rgb.copy()
    gc = GetClick()
    cv2.setMouseCallback("rgb", gc.callback)
    while not gc.done:
        cv2.imshow("rgb", rgb)
        cv2.waitKey(10)

    gm = GetMouse()
    xy_corner1 = np.array(gc.xy)

    cv2.setMouseCallback("rgb", gm.callback)
    while not gm.done:
        img = rgb.copy()

        if gm.xy is not None:
            xy_corner2 = np.array(gm.xy)
            cv2.rectangle(img, tuple(xy_corner1), tuple(xy_corner2), (0,255, 0))
        cv2.imshow("rgb",img)
        cv2.waitKey(10)
        
    xy_tl = np.array([xy_corner1, xy_corner2]).min(axis=0)
    xy_br = np.array([xy_corner1, xy_corner2]).max(axis=0)
    #mask = np.zeros(xyz.shape[:2],dtype='uint8')
    #rectangle = gm.xy + tuple(2*(center - np.r_[gm.xy]))
    #tmp1 = np.zeros((1, 13 * 5))
    #tmp2 = np.zeros((1, 13 * 5))    
    #result = cv2.grabCut(rgb,mask,rectangle,tmp1, tmp2,10,mode=cv2.GC_INIT_WITH_RECT)
        
    #from cv2 import cv
    #mask[mask == cv.GC_BGD] = 0
    #mask[mask == cv.GC_PR_BGD] = 63
    #mask[mask == cv.GC_FGD] = 255  
    #mask[mask == cv.GC_PR_FGD] = 192
    #cv2.imshow('mask',mask)
    #cv2.waitKey(10)


    #table_height = get_table_height(xyz)
    xl, yl = xy_tl
    w, h = xy_br - xy_tl
    mask = np.zeros((h,w),dtype='uint8')    
    mask.fill(cv2.GC_PR_BGD)
    if args.init == "height":
        initial_height_thresh = stats.scoreatpercentile(xyz[yl:yl+h, xl:xl+w,2].flatten(), 50)
        mask[xyz[yl:yl+h, xl:xl+w,2] > initial_height_thresh] = cv2.GC_PR_FGD
    elif args.init == "middle":
        print int(yl+h/4),int(yl+3*h/4),int(xl+w/4),int(xl+3*w/4)
        mask[h//4:3*h//4, w//4:3*w//4] = cv2.GC_PR_FGD
        print mask.mean()

    tmp1 = np.zeros((1, 13 * 5))
    tmp2 = np.zeros((1, 13 * 5))    
    cv2.grabCut(rgb[yl:yl+h, xl:xl+w, :],mask,(0,0,0,0),tmp1, tmp2,10,mode=cv2.GC_INIT_WITH_MASK)
            
    contours = cv2.findContours(mask.astype('uint8')%2,cv2.RETR_LIST,cv2.CHAIN_APPROX_NONE)[0]
    cv2.drawContours(rgb[yl:yl+h, xl:xl+w, :],contours,-1,(0,255,0))
    
    cv2.imshow('rgb', rgb)
    cv2.waitKey(10)

    zsel = xyz[yl:yl+h, xl:xl+w, 2]
    mask = (mask%2==1) & np.isfinite(zsel)# & (zsel - table_height > -1)
    
    resp = ProcessCloudResponse()
    xyz_sel = xyz[yl:yl+h, xl:xl+w,:][mask.astype('bool')]
    rgb_sel = rgb[yl:yl+h, xl:xl+w,:][mask.astype('bool')]
    resp.cloud_out = xyzrgb2pc(xyz_sel, rgb_sel, req.cloud_in.header.frame_id)
    return resp
Example #42
0
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("out")
parser.add_argument("--cloud_in",default="/drop/points",nargs = "?")
parser.add_argument("--dont_transform", action="store_true")
args = parser.parse_args()

from brett2 import ros_utils
from jds_utils import conversions
import roslib
roslib.load_manifest('tf')
import tf
import rospy
import numpy as np
import sensor_msgs.msg as sm
from brett2.ros_utils import transformPointCloud2

rospy.init_node("get_point_cloud")
listener = tf.TransformListener()
rospy.sleep(.3)

pc = rospy.wait_for_message(args.cloud_in, sm.PointCloud2)
if args.dont_transform:
    pc_tf = pc
else:
    pc_tf = transformPointCloud2(pc, listener, "base_footprint", pc.header.frame_id)

xyz, bgr = ros_utils.pc2xyzrgb(pc_tf)
np.savez(args.out, xyz=xyz, bgr=bgr)
Example #43
0
def find_closest_demo(verb, exp_clouds_pc2):
    exp_clouds = [pc2xyzrgb(cloud)[0] for cloud in exp_clouds_pc2]
    closest_demo_name = scene_diff.get_closest_demo(verb, exp_clouds)
    return closest_demo_name
Example #44
0
def handle_initialization_request(req):
    "rope initialization service"

    if args.save_requests:
        fname = "/tmp/init_req_%i.pkl" % time.time()
        with open(fname, "w") as fh:
            cPickle.dump(req, fh)
            print "saved", fname

    xyz, _ = pc2xyzrgb(req.cloud)
    xyz = np.squeeze(xyz)
    obj_type = determine_object_type(xyz, plotting=args.plotting)
    print "object type:", obj_type

    if obj_type == "towel":
        xyz = xyz[(xyz[:, 2] - np.median(xyz[:, 2])) < .05, :]
        center, (height, width), angle = cv2.minAreaRect(
            xyz[:, :2].astype('float32').reshape(1, -1, 2))
        angle *= np.pi / 180
        corners = np.array(center)[None, :] + np.dot(
            np.array([[-height / 2, -width / 2], [-height / 2, width / 2],
                      [height / 2, width / 2], [height / 2, -width / 2]]),
            np.array([[np.cos(angle), np.sin(angle)],
                      [-np.sin(angle), np.cos(angle)]]))
        resp = bs.InitializationResponse()
        resp.objectInit.type = "towel_corners"
        resp.objectInit.towel_corners.polygon.points = [
            gm.Point(corner[0], corner[1], np.median(xyz[:, 2]))
            for corner in corners
        ]
        resp.objectInit.towel_corners.header = req.cloud.header
        print req.cloud.header
        poly_pub.publish(resp.objectInit.towel_corners)

    if obj_type == "rope":
        total_path_3d = find_path_through_point_cloud(xyz,
                                                      plotting=args.plotting)
        resp = bs.InitializationResponse()
        resp.objectInit.type = "rope"
        rope = resp.objectInit.rope = bm.Rope()

        rope.header = req.cloud.header
        rope.nodes = [gm.Point(x, y, z) for (x, y, z) in total_path_3d]
        rope.radius = .006
        print "lengths:", [
            np.linalg.norm(total_path_3d[i + 1] - total_path_3d[i])
            for i in xrange(len(total_path_3d) - 1)
        ]
        rospy.loginfo(
            "created a rope with %i nodes, each has length %.2f, radius %.2f",
            len(rope.nodes),
            np.linalg.norm(total_path_3d[i + 1] - total_path_3d[i]),
            rope.radius)

        pose_array = gm.PoseArray()
        pose_array.header = req.cloud.header
        pose_array.poses = [
            gm.Pose(position=point, orientation=gm.Quaternion(0, 0, 0, 1))
            for point in rope.nodes
        ]


#        marker_handles[0] = rviz.draw_curve(pose_array,0)

    return resp
Example #45
0
    def callback(request):
        Globals.pr2.rarm.goto_posture('side')
        Globals.pr2.larm.goto_posture('side')

        
        
        #Globals.rviz.remove_all_markers()
        #draw_table()        
        new_cloud1, _ = pc2xyzrgb(request.object_clouds[0])
        new_cloud2, _ = pc2xyzrgb(request.object_clouds[1])
        new_cloud1, new_cloud2 = sorted([new_cloud1, new_cloud2], key = lambda x: np.squeeze(x).ptp(axis=0).prod())
        
        new_cloud1 = new_cloud1.reshape(-1,3)
        new_cloud2 = new_cloud2.reshape(-1,3)
        
        
        new_xyz1 = (new_cloud1.max(axis=0) + new_cloud1.min(axis=0))/2
        new_xyz2 = (new_cloud2.max(axis=0) + new_cloud2.min(axis=0))/2

        f.fit(np.array([xyz1, xyz2]), np.array([new_xyz1, new_xyz2]), 1e6, 1e-3)
    
        new_gripper_xyzs, new_gripper_mats = f.transform_frames(old_gripper_xyzs, conversions.quats2mats(old_gripper_quats))
        new_gripper_quats = conversions.mats2quats(new_gripper_mats)
        #print "warning: using old oreitnations"
    
        show_objects_and_trajectory(new_gripper_xyzs, np.array([new_xyz1, new_xyz2]), np.array([quat1, quat2]), obj_dims, (0,1,0,1))
        show_objects_and_trajectory(old_gripper_xyzs, np.array([xyz1, xyz2]), np.array([quat1, quat2]), obj_dims, (0,0,1,1))
        grid_handle = draw_grid(Globals.rviz, f.transform_points, old_gripper_xyzs.min(axis=0), old_gripper_xyzs.max(axis=0), "base_footprint")
        HANDLES.append(grid_handle)

        Globals.pr2.join_all()        
        Globals.pr2.update_rave()


        best_traj_info, best_feasible_frac = None, 0

        env = Globals.pr2.robot.GetEnv()
        Globals.pr2.update_rave()
        collision_env = create_obstacle_env(env)
        basemanip = openravepy.interfaces.BaseManipulation(collision_env.GetRobot("pr2"),plannername=None)
        rospy.sleep(.1)
        #collision_env.SetViewer("qtcoin")
        #raw_input("press enter to continue")

        for (lr, arm) in zip("lr",[Globals.pr2.larm,Globals.pr2.rarm]):
            name = arm.manip.GetName()
            manip = collision_env.GetRobot('pr2').GetManipulator(name)
            rospy.loginfo("trying to plan to initial position with %s",name)
            first_mat1 = np.eye(4)
            first_mat1[:3,:3] = new_gripper_mats[0]
            first_mat1[:3,3] = new_gripper_xyzs[0]
            first_mat = transform_relative_pose_for_ik(manip, first_mat1, "world", "%s_gripper_tool_frame"%lr)
            collision_env.GetRobot("pr2").SetActiveManipulator(name)
            trajobj = None
            try:
                trajobj = basemanip.MoveToHandPosition([first_mat],seedik=16,outputtrajobj=True,execute=0)
                rospy.loginfo("planning succeeded")
            except Exception:
                rospy.loginfo("planning failed")
                traceback.print_exc()
                print "initial ik result", manip.FindIKSolutions(first_mat,0)
                continue
            
            rospy.loginfo("trying ik")
            Globals.pr2.update_rave()
            joint_positions, inds = trajectories.make_joint_traj(new_gripper_xyzs, new_gripper_quats, manip, 'base_footprint', '%s_gripper_tool_frame'%lr, filter_options = 1+16)
            feasible_frac = len(inds)/len(new_gripper_xyzs)            
            print inds
            if feasible_frac > best_feasible_frac:
                best_feasible_frac = feasible_frac
                best_traj_info = dict(
                    feasible_frac = feasible_frac,
                    lr = 'l' if name == 'leftarm' else 'r',
                    manip = manip,
                    arm = arm,
                    initial_traj = trajobj,
                    joint_positions = joint_positions)
        
        collision_env.Destroy()
        response = PourResponse()
        
        if best_feasible_frac > .75:
            
            if best_traj_info["initial_traj"] is not None:
                follow_rave_traj(best_traj_info["arm"], best_traj_info["initial_traj"])
            else:
                print "no initial traj"
                #print "feasible inds", best_traj_info["inds"]
            
            body_traj = np.zeros(len(best_traj_info["joint_positions"]),dtype=trajectories.BodyState)
            lr = best_traj_info["lr"]
            body_traj["%s_arm"%lr] = best_traj_info["joint_positions"]
            body_traj["%s_gripper"%lr] = gripper_angles

            trajectories.follow_body_traj(Globals.pr2, body_traj, times, 
                    r_arm = (lr=='r'), r_gripper = (lr=='r'), l_arm = (lr=='l'), l_gripper= (lr=='l'))
            Globals.pr2.join_all()
            
            response.success = True
        else:
            rospy.logerr("could not execute trajectory because not enough points are reachable")
            response.success = False
        return response