Beispiel #1
1
    def calculate_mk2_ik(self):

        #goal_point = Point(0.298, -0.249, -0.890) home position

        goal_pose = Pose()

        # Goto position 1
        goal_point = Point(0.298, -0.249, -0.890)
        #goal_point = Point(0.298, -0.5, -1.0)
        goal_pose.position = goal_point
        quat = quaternion_from_euler(0.0, 0.0, 0.0) # roll, pitch, yaw
        goal_pose.orientation = Quaternion(*quat.tolist())
        moveit_goal = self.create_move_group_pose_goal(goal_pose, group="manipulator", end_link_name="link_7", plan_only=False)
        rospy.loginfo("Sending goal...")
        self.moveit_ac.send_goal(moveit_goal)
        rospy.loginfo("Waiting for result...")
        self.moveit_ac.wait_for_result(rospy.Duration(10.0))
        moveit_result = self.moveit_ac.get_result()

        # Goto position 2
        goal_point = Point(0.305, -0.249, -1.05)
        goal_pose.position = goal_point
        quat = quaternion_from_euler(0.0, 0.0, 0.0) # roll, pitch, yaw
        goal_pose.orientation = Quaternion(*quat.tolist())
        moveit_goal = self.create_move_group_pose_goal(goal_pose, group="manipulator", end_link_name="link_7", plan_only=False)
        rospy.loginfo("Sending goal...")
        self.moveit_ac.send_goal(moveit_goal)
        rospy.loginfo("Waiting for result...")
        self.moveit_ac.wait_for_result(rospy.Duration(10.0))
        moveit_result = self.moveit_ac.get_result()
def move_to_box(objcolorl):
    if objcolorl == 0:
        #move to green box
        pose = Pose()
        pose.orientation = Quaternion(1.00, 0.0, 0.00, 0.00)
        pose.position = Point(0.570, -0.176, 0.283)

        path = '/home/ynazon1/Pictures/green_success.png' 
        img = cv2.imread(path)
        msg = cv_bridge.CvBridge().cv2_to_imgmsg(img, encoding="bgr8")
        pubpic = rospy.Publisher('/robot/xdisplay', Image, latch=True, queue_size=1)
        pubpic.publish(msg)
        # Sleep to allow for image to be published.
        rospy.sleep(1)

    else:
        #move to blue box
        pose = Pose()
        pose.orientation = Quaternion(1.00, 0.0, 0.00, 0.00)
        pose.position = Point(0.708, -0.153, 0.258)

        path = '/home/ynazon1/Pictures/red_success.png' 
        img = cv2.imread(path)
        msg = cv_bridge.CvBridge().cv2_to_imgmsg(img, encoding="bgr8")
        pubpic = rospy.Publisher('/robot/xdisplay', Image, latch=True, queue_size=1)
        pubpic.publish(msg)
        # Sleep to allow for image to be published.
        rospy.sleep(1)
    request_pose(pose, "left", left_group)
def pose_at_distance2(pose1, pose2, distance):
    """
    Returns a pose that has the same orientation as the original
    but the position is at a distance from the original.
    Very usefull when you want to mantain a distance from an object. 
    It takes into account where the robot is
    """

    new_pose2 = Pose()
    new_pose2.position = substract_vector(pose2.position, pose1.position)
    new_pose2 = pose_at_distance(new_pose2, distance)

    new_pose2.position = add_vectors(new_pose2.position, pose1.position)
    return new_pose2
    def __init__(self, from_frame, to_frame, position, orientation):
        self.server = InteractiveMarkerServer("posestamped_im")
        o = orientation
        r, p, y = euler_from_quaternion([o.x, o.y, o.z, o.w])
        rospy.loginfo("Publishing transform and PoseStamped from: " +
                      from_frame + " to " + to_frame +
                      " at " + str(position.x) + " " + str(position.y) + " " + str(position.z) +
                      " and orientation " + str(o.x) + " " + str(o.y) +
                      " " + str(o.z) + " " + str(o.w) + " or rpy " +
                      str(r) + " " + str(p) + " " + str(y))
        self.menu_handler = MenuHandler()
        self.from_frame = from_frame
        self.to_frame = to_frame
        # Transform broadcaster
        self.tf_br = tf.TransformBroadcaster()

        self.pub = rospy.Publisher('/posestamped', PoseStamped, queue_size=1)
        rospy.loginfo("Publishing posestampeds at topic: " + str(self.pub.name))
        pose = Pose()
        pose.position = position
        pose.orientation = orientation
        self.last_pose = pose
        self.print_commands(pose)
        self.makeGraspIM(pose)

        self.server.applyChanges()
Beispiel #5
0
 def find_joint_pose(self, pose, targ_x=0.0, targ_y=0.0, targ_z=0.0,
                     targ_ox=0.0, targ_oy=0.0, targ_oz=0.0):
     '''
     Finds the joint position of the arm given some pose and the
     offsets from it (to avoid opening the structure all the time
     outside of the function).
     '''
     ik_srv = "ExternalTools/right/PositionKinematicsNode/IKService"
     iksvc = rospy.ServiceProxy(ik_srv, SolvePositionIK)
     ik_request = SolvePositionIKRequest()
     the_pose = deepcopy(pose)
     the_pose['position'] = Point(x=targ_x + self.baxter_off.linear.x,
                                  y=targ_y + self.baxter_off.linear.y,
                                  z=targ_z + self.baxter_off.linear.z)
     angles = tf.transformations.quaternion_from_euler( \
         targ_ox + self.baxter_off.angular.x, \
         targ_oy + self.baxter_off.angular.y, \
         targ_oz + self.baxter_off.angular.z)
     the_pose['orientation'] = Quaternion(x=angles[0],
                                          y=angles[1],
                                          z=angles[2],
                                          w=angles[3])
     approach_pose = Pose()
     approach_pose.position = the_pose['position']
     approach_pose.orientation = the_pose['orientation']
     hdr = Header(stamp=rospy.Time.now(), frame_id='base')
     pose_req = PoseStamped(header=hdr, pose=approach_pose)
     ik_request.pose_stamp.append(pose_req)
     resp = iksvc(ik_request)
     return dict(zip(resp.joints[0].name, resp.joints[0].position))
Beispiel #6
0
def main(argv):
    x = float(sys.argv[1])
    y = float(sys.argv[2])
    th = float(sys.argv[3])

    print("x: %.3f  y: %.3f  Th: %.1f") % (x, y, th) # print 2D pose data to terminal
	
    time.sleep(0.5)
    #publish pose data to ros topic
    msg = Pose()

    q = tf.transformations.quaternion_from_euler(0, 0, th)

    msg.position = Point(x,y,0)

    msg.orientation.x = q[0]
    msg.orientation.y = q[1]
    msg.orientation.z = q[2]
    msg.orientation.w = q[3]

    pose_pub.publish(msg)
    
    key_pressed = False
    while key_pressed == False:
         key_pressed = select.select([sys.stdin], [], [], 0) == ([sys.stdin], [], []) # is key pressed?
         time.sleep(0.5)
         
    sys.exit()
Beispiel #7
0
	def project(self, p, radius, width, height) :
		#print p
		#print width
		#print height
		#print "radius"
		#print radius

		xFOV = 63.38
		yFOV = 48.25
		cx = width /2
		cy = height /2
		fx = cx / np.tan((xFOV/2) * np.pi / 180)
		fy = cy / np.tan((yFOV/2) * np.pi / 180)
		
		toball = np.zeros(3)
		toball[0] = (p[0] - cx) / fx
		toball[1] = -(p[1] - cy) / fy
		toball[2] = 1
		toball = toball / np.linalg.norm(toball) #normalize so we can then multiply by distance
		distance = self.pixel_radius / radius
		toball = toball * distance

		pose = Pose()
		pose.position = Point(toball[0], toball[1], toball[2])
		pose.orientation = Quaternion(0,0,0,1)
		#print "FOUND ORANGE BALL!!!!"
		#print toball
		return pose
Beispiel #8
0
    def publishSphere2(self, pose, color, scale, lifetime=None):
        """
        Publish a sphere Marker. This renders a smoother, flatter-looking sphere.

        @param pose (numpy matrix, numpy ndarray, ROS Pose)
        @param color name (string) or RGB color value (tuple or list)
        @param scale (ROS Vector3, float)
        @param lifetime (float, None = never expire)
        """

        if (self.muted == True):
            return True

        # Convert input pose to a ROS Pose Msg
        if (type(pose) == numpy.matrix) or (type(pose) == numpy.ndarray):
            sphere_pose = mat_to_pose(pose)
        elif type(pose) == Pose:
            sphere_pose = pose
        elif type(pose) == Point:
            pose_msg = Pose()
            pose_msg.position = pose
            sphere_pose = pose_msg
        else:
            rospy.logerr("Pose is unsupported type '%s' in publishSphere()", type(pose).__name__)
            return False

        # Convert input scale to a ROS Vector3 Msg
        if type(scale) == Vector3:
            sphere_scale = scale
        elif type(scale) == float:
            sphere_scale = Vector3(scale, scale, scale)
        else:
            rospy.logerr("Scale is unsupported type '%s' in publishSphere()", type(scale).__name__)
            return False

        # Increment the ID number
        self.sphere_marker.id += 1

        # Get the default parameters
        sphere_marker = self.sphere_marker2 # sphere_marker2 = SPHERE_LIST

        if lifetime == None:
            sphere_marker.lifetime = rospy.Duration(0.0) # 0 = Marker never expires
        else:
            sphere_marker.lifetime = rospy.Duration(lifetime) # in seconds

        # Set the timestamp
        sphere_marker.header.stamp = rospy.Time.now()

        # Set marker size
        sphere_marker.scale = sphere_scale

        # Set marker color
        sphere_marker.color = self.getColor(color)

        # Set the pose of one sphere in the list
        sphere_marker.points[0] = sphere_pose.position
        sphere_marker.colors[0] = self.getColor(color)

        return self.publishMarker(sphere_marker)
 def __init__(self, nodes) :
     self.node_zone = MarkerArray()
     for node in nodes.nodes :
         marker = Marker()
         marker.header.frame_id = "/map"
         marker.type = marker.LINE_STRIP
         marker.scale.x = 0.1
         marker.color.a = 0.8
         marker.color.r = 0.7
         marker.color.g = 0.1
         marker.color.b = 0.2
         node._get_coords()
         count=0
         for i in node.vertices :
             #print i[0], i[1]
             Vert = Point()
             Vert.z = 0.05
             Vert.x = node.px + i[0]
             Vert.y = node.py + i[1]
             marker.points.append(Vert)
             #vertname = "%s-%d" %(node.name, count)
             Pos = Pose()
             Pos.position = Vert
             # OJO: esto hay que hacerlo de alguna forma
             #self._vertex_marker(vertname, Pos, vertname)
             count+=1
         marker.points.append(marker.points[0])
         self.node_zone.markers.append(marker)
     
     idn = 0
     for m in self.node_zone.markers:
         m.id = idn
         idn += 1 
Beispiel #10
0
 def callback_Distance(data):
   #print data
   global i_distance
   m = marker_array.markers[markers_list[id_]]
   m.header.stamp = rospy.Time.now()
   m.action = 0
   m.type = m.SPHERE
   p = Pose()
   #print data.data[0], data.data[1], data.data[2]
   
   p.position = Point(data.data[0], data.data[1], data.data[2])
   if data.data[3] == data.data[4] :
     scale = data.data[3]
   else :
     i_distance = i_distance + 1
     scale = data.data[3]
     if i_distance%2 == 0:
       scale = data.data[4]
   m.pose = p
   m.scale.x = scale * 2
   m.scale.y = scale * 2
   m.scale.z = scale * 2
   m.color.r = 0
   m.color.g = 1
   m.color.b = 0
   m.color.a = 0.2
   m.lifetime = rospy.Duration(0)
   m.frame_locked = False
            def detect_faces_cb_ros_pkg(userdata, status, result):

                '''This code detects faces using the ROS package and then checks the list of people who are not able to walk.
                If the distance of people who are not able to walk is close to the detected faces they are popped from the list.
                First perosn who is closer than ---closestPersonDistance--- are sent to userdata'''

                LOCATION_LIST_DISTANCE = 1
                print "CallBack is called"
                people = result.face_positions
                # pose_in_map = transform_pose(people.pos, people.header.frame_id, "/map")
                if userdata.location_list:
                    for person in people:
                        pose_in_stereo = Pose()
                        pose_in_stereo.position = person.pos

                        pose_in_map = transform_pose(pose_in_stereo, userdata.message.header.frame_id, "/map")

                        for not_walking in userdata.location_list:
                            distance_from_list = ((pose_in_map.position.x - not_walking[0])**2 + (pose_in_map.position.pos.y - not_walking[1])**2)
                            if distance_from_list < LOCATION_LIST_DISTANCE:
                                people.pop(people.index(person))

                closestPerson = None
                closestPersonDistance = sys.maxint
                for person in people:
                    dist = person.pos.z
                    if dist < closestPersonDistance:
                        closestPerson = person
                        closestPersonDistance = dist
                    # FIXME: blacklist already seen people
                userdata.closest_person = closestPerson
                print "Closest person (from find_person_with_visit_checker): ", closestPerson
                return succeeded if closestPerson else aborted
def get_grasp_pose(g_decision_making_state, object_grasp_pose, object_grasp_pose_set):
    
	# load pre-computer grasps
	# loaded_file = numpy.load('../grasp_data/' + g_decision_making_state.object_name + '.npz' )
	
	# loaded_grasp = loaded_file['grasp'][0]
	# print len(loaded_grasp[g_decision_making_state.object_name])

	loaded_file = numpy.load('../grasp_data/' + g_decision_making_state.object_name + '.npz' )
	loaded_grasp = loaded_file['grasp'][0]
	num_of_grasp = len(loaded_grasp[g_decision_making_state.object_name])
	temp = object_grasp_pose
	for i in range(0,num_of_grasp):
		print i
		print_angles(loaded_grasp[g_decision_making_state.object_name][i])
		print_angles(temp)
		object_grasp_pose_temp = temp
		print_angles(temp)
		object_grasp_pose_set[i] = Pose()
		object_grasp_pose_set[i].position = temp.position
		object_grasp_pose_set[i].orientation = rotate_orientation(object_grasp_pose_temp.orientation, loaded_grasp[g_decision_making_state.object_name][i].orientation)

		move_ok = plan_and_move_end_effector_to(object_grasp_pose, 0.3, 
                                            g_bin_positions[g_decision_making_state.bin_id]['map_robot_position_2_ik_seed'])
		print_angles(object_grasp_pose_set[i])
		# raw_input("press Enter to continue...")

	return object_grasp_pose_set
Beispiel #13
0
def publisher(paths):
    publisher = rospy.Publisher('pathfinder', Path, queue_size=20)
    rospy.init_node('path_publisher', anonymous=True)
    rate = rospy.Rate(10) # 10 hertz
    msgs=[] #list for all the messages we need to publish
    for path in paths:
        msg = Path() #new path message
        msg.header = create_std_h() #add a header to the path message
        for x,y,z in path:
            #create a pose for each position in the path
            #position is the only field we care about, leave the rest as 0
            ps = PoseStamped()
            ps.header=create_std_h()
            i=Pose()
            i.position=Point(x,y,z)
            ps.pose=i
            msg.poses.append(ps)
        
        msgs.append(msg)
    while not rospy.is_shutdown(): # while we're going
        #publish each message
        for m in msgs:
            #rospy.loginfo(m)
            publisher.publish(m)
        rate.sleep()
Beispiel #14
0
            def obj_response_cb(userdata, response):
                if response.exists:
                    pose = Pose()
                    #Wont there be problems mixing float32 with 64? TODO
                    pose.position = Point(response.base_coordinates.x, response.base_coordinates.y, 0)
                    #TODO, this may give problems
                    pose.orientation = Quaternion(*quaternion_from_euler(0, 0, response.base_coordinates.z))
                    if MANTAIN_DISTANCE_FROM_OBJECT:
                        pose = pose_at_distance(pose, DISTANCE_FROM_OBJECT)
                    userdata.object_location = pose

                    release_pose = ObjectPose()
                    release_pose.pose = response.arm_coordinates
                    userdata.object_release_location = release_pose

                    print "\n\n printing pose for move to object"
                    print pose
                    print "\n\n printing pose for releasing the object"
                    print release_pose
                    print "\n that was the pose of the object\n\n"
                    print userdata.object_name
                    print "is the object name"
                    return succeeded
                else:
                    userdata.object_location = None
                    return aborted
Beispiel #15
0
 def execute(self, userdata):
     userdata.orders = [DrinkOrder('david', 'coke'), DrinkOrder('michael', 'milk')]
     pose = Pose()
     pose.position = Point(0.059, 6.26, 0.0)
     pose.orientation = Quaternion(0, 0, -0.59, 0.8)
     userdata.guests_location = pose
     return succeeded
Beispiel #16
0
def transform_pose(pose_in, transform):
    '''
    Transforms a pose

    Takes a pose defined in the frame defined by transform (i.e. the frame in which transform is the origin) and 
    returns it in the frame in which transform is defined.  Calling this with the origin pose will return transform.
    This returns
        
        pose.point = transform_point(pose_in.point, transform)
        
        pose.orientation = transform_orientation(pose_in.orientation, transform)
    
    **Args:**

        **pose (geometry_msgs.msg.Pose):** Pose to transform

        **transform (geometry_msgs.msg.Pose):** The transform
    
    **Returns:**
        A geometry_msgs.msg.Pose
    '''
    pose = Pose()
    pose.position = transform_point(pose_in.position, transform)
    pose.orientation = transform_quaternion(pose_in.orientation, transform)
    return pose
def eef_pose_pub():
  rospy.init_node('eef_publisher')
  rospy.loginfo("Publishing right EEF gripper location")
  listener = tf.TransformListener()
  pub = rospy.Publisher('eef_pose', Pose, queue_size=10)

  DEFAULT_LINK = '/right_ee_link'
  DEFAULT_RATE = 100

  # Pull from param server the hz and EEF link
  eef_link = rospy.get_param("~eef_link", DEFAULT_LINK)
  publish_rate = rospy.get_param("~eef_rate", DEFAULT_RATE)

  rate = rospy.Rate(publish_rate)
  while not rospy.is_shutdown():
    try: 
      trans, rot = listener.lookupTransform('/base_link', eef_link, rospy.Time(0))
    except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
      continue
    msg = Pose()
    msg.position = Point()
    msg.position.x, msg.position.y, msg.position.z = trans[0], trans[1], trans[2]
    msg.orientation = Quaternion() 
    msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w = rot[0], rot[1], rot[2], rot[3]
    pub.publish(msg)
    rate.sleep()
def homogeneous_product_pose_transform(pose, transform):
    """
     pose should be Pose() msg
     transform is Transform()
    """
    # 1. Get the transform homogenous matrix.
    Ht = tf.transformations.quaternion_matrix([transform.rotation.x, transform.rotation.y,
                                             transform.rotation.z, transform.rotation.w])

    Tt = [transform.translation.x, transform.translation.y, transform.translation.z]

    Ht[:3,3] = Tt

    # 2.Original pose to homogenous matrix
    H0 = tf.transformations.quaternion_matrix([pose.orientation.x, pose.orientation.y,
                                               pose.orientation.z, pose.orientation.w])

    T0 = [pose.position.x, pose.position.y, pose.position.z]
    H0[:3,3] = T0

    H1 = numpy.dot(H0, Ht)

    # Get results from matrix
    result_position = H1[:3,3].T
    result_position = geometry_msgs.msg.Vector3(result_position[0], result_position[1], result_position[2])
    result_quat     = tf.transformations.quaternion_from_matrix(H1)
    result_quat     = geometry_msgs.msg.Quaternion(result_quat[0], result_quat[1], result_quat[2], result_quat[3])
    result_pose             = Pose()
    result_pose.position    = result_position
    result_pose.orientation = result_quat
    return result_pose
    def _create_vertices_array(self) :

        for node in self.topo_map.nodes :
            marker = Marker()
            marker.header.frame_id = "/map"
            marker.type = marker.LINE_STRIP
            #marker.lifetime=2
            marker.scale.x = 0.1
            marker.color.a = 0.8
            marker.color.r = 0.7
            marker.color.g = 0.1
            marker.color.b = 0.2
            node._get_coords()
            count=0
            for i in node.vertices :
                #print i[0], i[1]
                Vert = Point()
                Vert.z = 0.05
                Vert.x = node.px + i[0]
                Vert.y = node.py + i[1]
                marker.points.append(Vert)
                vertname = "%s-%d" %(node.name, count)
                Pos = Pose()
                Pos.position = Vert
                self._vertex_marker(vertname, Pos, vertname)
                count+=1
            marker.points.append(marker.points[0])
            self.node_zone.markers.append(marker)
        
        idn = 0
        for m in self.node_zone.markers:
            m.id = idn
            idn += 1 
    def __init__(self):

        smach.StateMachine.__init__(self, [succeeded, preempted, aborted],
        output_keys=['object_data'])

        with self:

            pose = Pose()
            pose.position = Point(0, 0, 0)
            pose.orientation = Quaternion(*quaternion_from_euler(0, 0, 45))
            
            smach.StateMachine.add(
                'N2_go_to_dep_policy',
                MoveActionState("/base_link", pose=pose),
                transitions = {succeeded: 'look_for_objects'})

            def moped_enable_cb(userdata, response):
                if response.correct != None:
                    rospy.loginfo("ENABLE_CLOSE_OBJECT_SEARCH response: " + \
                                  str(response.correct))
                    return succeeded
                else:
                    return aborted
                      
            smach.StateMachine.add(
            'ENABLE_CLOSE_OBJECT_SEARCH',
            ServiceState('/iri_moped_handler/enable', enable,
            response_cb = moped_enable_cb,
            #request_key = 'object_name',
            request = True),
            transitions = {succeeded:'look_for_objects'})

            # FIXME: why is there a smaller ctimeout & retry on aborted?
            # If something goes wrong it'd rather have it fail than having
            # it keep hanging on forever... --Siegfried
            smach.StateMachine.add(
                'look_for_objects',
                TopicReaderState(
                    topic_name='/iri_moped_handler/outputOPL',
                    msg_type=ObjectPoseList,
                    timeout=10),
                remapping= {'message' : 'object_data'},
                transitions = {succeeded: 'DISABLE_CLOSE_OBJECT_SEARCH', aborted: 'look_for_objects'})

            def moped_disable_cb(userdata, response):
                if response.correct != None:
                    rospy.loginfo("DISABLE_CLOSE_OBJECT_SEARCH response: " + \
                                  str(response.correct))
                    return succeeded
                else:
                    return aborted

            smach.StateMachine.add(
                'DISABLE_CLOSE_OBJECT_SEARCH',
                ServiceState('/iri_moped_handler/enable', enable,
                    response_cb = moped_disable_cb,
                    #request_key = 'object_name',
                    request = False),
                transitions = {succeeded: succeeded})
Beispiel #21
0
def move_to_object(xposl, yposl, zposl, objcolorl):
    global grip_force
    pose = Pose()
    pose.position = Point(xposl-0.01, yposl, 0.00)
    pose.orientation = Quaternion(1.00, 0.0, 0.00, 0.00)


    request_pose(pose, "left", left_group)
    rospy.sleep(0.5)

    # Get left hand range state
    dist = baxter_interface.analog_io.AnalogIO('left_hand_range').state()
    rospy.sleep(1)
    if dist > 65000:
        print "Out of Range"
        truez = -0.13
    else:
        print "DISTANCE %f" % dist
        truez = dist/1000
        truez = truez - 0.06
        truez = - (truez)
        print truez


    poset = Pose()
    poset.position = Point(xposl, yposl, truez)
    poset.orientation = Quaternion(1.00, 0.0, 0.00, 0.00)
    request_pose(poset, "left", left_group)

    left_gripper.close()
    rospy.sleep(0.5)
    if grip_force == 0:
        left_gripper.open()
        move_to_vision()
    else:
        pose = Pose()
        pose.position = Point(xposl-0.01, yposl+0.01, 0.150)
        pose.orientation = Quaternion(1.00, 0.0, 0.00, 0.00)
        request_pose(pose, "left", left_group)
        if grip_force == 0:
            left_gripper.open()
            move_to_vision()
            return
        move_to_box(objcolorl)
        left_gripper.open()
        move_to_vision()
Beispiel #22
0
def move_to_vision():
    # Set pose
    pose = Pose()
    pose.orientation = Quaternion(1.00, 0.0, 0.00, 0.00)
    pose.position = Point(0.712, 0.316, 0.250)

    # Request service
    request_pose(pose,"left", left_group)
def callback(data):
	#rospy.loginfo("kinect x: %f, y %f, z: %f", data.pose.position.x, data.pose.position.y, data.pose.orientation.z)
	pos = Pose()
	pos.position = data.pose.position
	#pub = rospy.Publisher('PosArrayFovis', PoseArray)
	pos.orientation = data.pose.orientation
	posArr.poses.append(pos)
	pub.publish(posArr)
Beispiel #24
0
 def getPose(self):
     p=self.endpoint_pose()
     if len(p.keys())==0:
         rospy.logerr("ERROR: Pose is empty, you may want to wait a bit before calling getPose to populate data")
         return None
     pose=Pose()        
     pose.position=Point(*p["position"])
     pose.orientation=Quaternion(*p["orientation"])
     return pose
def getMsg():
	
	pub_navsat = rospy.Publisher('/rtklib/rover', NavSatFix)
	pub_pose = rospy.Publisher('/rtklib/pose', PoseStamped)
	
	#Initialize the RTKLIB ROS node	
	rospy.init_node('rtklib_messages', anonymous=True)
	
	#Define the publishing frequency of the node
	rate = rospy.Rate(10)
	
	#Create a socket
	sock = socket.socket()
	
	#Get the address of the local host
	host = socket.gethostname()
	
	#Connect to the RTKRCV server that is bound to port xxxx
	port = 5801
	sock.connect((host,port))
	
	e2 = 6.69437999014e-3
	a = 6378137.0
	
	while not rospy.is_shutdown():
		
		navsat = NavSatFix()
		ecef_xyz = Point()
		ecef_pose = Pose()
		ecef_stampedPose = PoseStamped()
		
		ecef_stampedPose = 
		navsat.header.stamp = rospy.Time.now()
		
		#Get the position message from the RTKRCV server
		msgStr = sock.recv(1024)
		
		#Split the message
		msg = msgStr.split()
		
		navsat.latitude = float(msg[2])
		navsat.longitude = float(msg[3])
		navsat.altitude = float(msg[4])
		
		N = 1.0*a/np.sqrt(1-e2*(np.sin(float(msg[2])*np.pi/180.0)**2))
		
		ecef_xyz.x = (N+float(msg[4]))*np.cos(float(msg[2])*np.pi/180.0)*np.cos(float(msg[3])*np.pi/180.0)
		ecef_xyz.y = (N+float(msg[4]))*np.cos(float(msg[2])*np.pi/180.0)*np.sin(float(msg[3])*np.pi/180.0)
		ecef_xyz.z = (N*(1-e2)+float(msg[4]))*np.sin(float(msg[2])*np.pi/180.0)
		
		ecef_pose.position = ecef_xyz
		ecef_stampedPose.pose = ecef_pose
		
		pub_navsat.publish(navsat)
		pub_pose.publish(ecef_stampedPose)
		
		rate.sleep()
Beispiel #26
0
def init():
    #Wake up Baxter
    baxter_interface.RobotEnable().enable()
    rospy.sleep(0.25)
    print "Baxter is enabled"
    
    print "Intitializing clients for services"
    global ik_service_left
    ik_service_left = rospy.ServiceProxy(
            "ExternalTools/left/PositionKinematicsNode/IKService",
            SolvePositionIK)

    global ik_service_right
    ik_service_right = rospy.ServiceProxy(
            "ExternalTools/right/PositionKinematicsNode/IKService",
            SolvePositionIK)

    global obj_loc_service 
    obj_loc_service = rospy.ServiceProxy(
        "object_location_service", ObjLocation)

    global stopflag
    stopflag = False
    #Taken from the MoveIt Tutorials
    moveit_commander.roscpp_initialize(sys.argv)
    robot = moveit_commander.RobotCommander()

    global scene
    scene = moveit_commander.PlanningSceneInterface()

    #Activate Left Arm to be used with MoveIt
    global left_group
    left_group = MoveGroupCommander("left_arm")
    left_group.set_goal_position_tolerance(0.01)
    left_group.set_goal_orientation_tolerance(0.01)
    
    
    global right_group
    right_group = MoveGroupCommander("right_arm")
    pose_right = Pose()
    pose_right.position = Point(0.793, -0.586, 0.329)
    pose_right.orientation = Quaternion(1.0, 0.0, 0.0, 0.0)
    request_pose(pose_right, "right", right_group)
    

    global left_gripper
    left_gripper = baxter_interface.Gripper('left')
    left_gripper.calibrate()
    print left_gripper.parameters()

    global end_effector_subs
    end_effector_subs = rospy.Subscriber("/robot/end_effector/left_gripper/state", EndEffectorState, end_effector_callback)
    rospy.sleep(1)

    global pubpic
    pubpic = rospy.Publisher('/robot/xdisplay', Image, latch=True, queue_size=1)
    rospy.sleep(1)
Beispiel #27
0
def get_pose(self):
    """ Get the pose from the local_data
    and return a ROS geometry_msgs.Pose
    """
    pose = Pose()
    pose.position = get_position(self)
    pose.orientation = get_orientation(self)

    return pose
Beispiel #28
0
def generatePath(cmap, start, goal, parents):
	global path_pub
	global way_pub
	global curmap
	#Create GridCells() message to display path and waypoints in rviz
	path = GridCells()
	path.cell_width = cmap.info.resolution
	path.cell_height = cmap.info.resolution
	path.header.frame_id = 'map'
	way = GridCells()
	way.cell_width = cmap.info.resolution
	way.cell_height = cmap.info.resolution
	way.header.frame_id = 'map'
	waycells = [mapToWorldPos(cmap, goal)]
	#Create a list of cells starting with the start position
	cells = [mapToWorldPos(cmap, start)]
	#trace path from goal back to start
	current = parents[normalize(goal, cmap.info.width)]
	lastpt = goal
	last_ang = math.atan2((lastpt.y-current.y),(lastpt.x-current.x))
	p = Pose()
	p.position = mapToWorldPos(cmap, goal)
	p.orientation.z = 0
	ret = [p]
	while current != start:
	    cells.append(mapToWorldPos(cmap, current))
	    #if we change travel direction, add a waypoint
	    ang = math.atan2((lastpt.y-current.y),(lastpt.x-current.x))
	    if (abs(ang-last_ang) > 0.1):
			waycells.append(mapToWorldPos(cmap, lastpt))
			p.position = mapToWorldPos(cmap, lastpt)
			p.orientation.z = math.sin(ang/2)
			ret.append(p)
	    last_ang = ang
	    lastpt = current
	    current = parents[normalize(current, cmap.info.width)]
	path.cells = cells
	way.cells = list(reversed(waycells))
	path_pub.publish(path)
	way_pub.publish(way)
	resp = GetPathResponse()
	resp.path = ret
	return resp
    def __init__(self):

        smach.StateMachine.__init__(self, [succeeded, preempted, aborted],
                output_keys=['person_name', 'person_pos'])

        with self:

            smach.StateMachine.add(
                'FIND_PERSON',
                FindPersonStateMachine(),
                transitions={succeeded: 'MOVE_TO_PERSON', aborted: 'ROTATE'})
            # outputs: closest_person

            def move_to_person_goal_cb(userdata, nav_goal):
                position = Point(userdata.closest_person.x, userdata.closest_person.y, 0)
                distance_vector = multiply_vector(normalize_vector(position), 1)
                position = substract_vector(position, distance_vector)

                nav_goal.target_pose.pose.position = position
                nav_goal.target_pose.pose.orientation = Quaternion(*quaternion_from_euler(0, 0, 0))
                userdata.person_pos = nav_goal.target_pose.pose

                return nav_goal

            smach.StateMachine.add(
                'MOVE_TO_PERSON',
                TimeoutContainer(APPROACH_UNKNOWN_PERSON_TIMEOUT, MoveActionState(
                    goal_cb=move_to_person_goal_cb,
                    input_keys=['closest_person'],
                    output_keys=['person_pos'])),
                transitions={succeeded: 'PERSON_FOUND_TTS',
                    aborted: 'RECOGNIZE_FACE', preempted: 'RECOGNIZE_FACE'})

            smach.StateMachine.add(
                'PERSON_FOUND_TTS',
                SpeakActionState("I've found you!"),
                transitions={succeeded: 'RECOGNIZE_FACE'})

            smach.StateMachine.add(
                'RECOGNIZE_FACE',
                TimeoutContainer(RECOGNIZE_UNKNOWN_PERSON_TIMEOUT,
                    RecognizeFaceStateMachine()),
                transitions={succeeded: succeeded, unknown_face: 'ROTATE',
                    aborted: 'ROTATE', preempted: 'ROTATE'}, 
                remapping={"out_person_name":"person_name"})
            # outputs: 'person_name'

            pose = Pose()
            pose.position = Point(0, 0, 0)
            pose.orientation = Quaternion(*quaternion_from_euler(0, 0, 30))

            smach.StateMachine.add(
                'ROTATE',
                MoveActionState("/base_link", pose=pose),
                transitions={succeeded: 'FIND_PERSON'})
def facecb(data):
    if len(data.faces) > 0:
        pa = PoseArray()
        pa.header = data.header
        for face in data.faces:
            print ".",
            face_pose = Pose()
            face_pose.position = face.position
            face_pose.orientation.w = 1.0
            pa.poses.append(face_pose)
        face_pub.publish(pa)
Beispiel #31
0
    def GoToPose(self, x = 3.6, y = 0, qx = 0, qy = 0, qz = 0, qw = 1):
        print('GoToPose start')
        msg = PoseStamped()
        msg.header.frame_id = 'map'
        # msg.header.stamp = self.get_clock().now()

        point = Point()
        point.x = float(x)
        point.y = float(y)
        quaternion = Quaternion()
        quaternion.x = float(qx)
        quaternion.y = float(qy)
        quaternion.z = float(qx)
        quaternion.w = float(qw)

        print(point, quaternion)

        pose = Pose()
        pose.position = point
        pose.orientation = quaternion

        msg.pose = pose
        self.publisher_gotopose.publish(msg)
Beispiel #32
0
    def test_safe_load_collision_matrix2(self, test_folder,
                                         delete_test_folder):
        r = self.cls(donbot_urdf(),
                     path_to_data_folder=test_folder,
                     calc_self_collision_matrix=True)
        r.update_self_collision_matrix()
        scm = r.get_self_collision_matrix()

        box = self.cls.from_world_body(make_world_body_box())
        p = Pose()
        p.position = Point(0, 0, 0)
        p.orientation = Quaternion(0, 0, 0, 1)
        r.attach_urdf_object(box, u'gripper_tool_frame', p)
        r.update_self_collision_matrix()
        scm_with_obj = r.get_self_collision_matrix()

        r.detach_sub_tree(box.get_name())
        r.load_self_collision_matrix(test_folder)
        assert scm == r.get_self_collision_matrix()

        r.attach_urdf_object(box, u'gripper_tool_frame', p)
        r.load_self_collision_matrix(test_folder)
        assert scm_with_obj == r.get_self_collision_matrix()
Beispiel #33
0
    def __should_flip_contact_info(self, contact_info):
        """
        :type contact_info: ContactInfo
        :rtype: bool
        """
        new_p = Pose()
        new_p.position = Point(*contact_info.position_on_a)
        new_p.orientation.w = 1

        self.__move_hack(new_p)
        hack_id = self.__get_pybullet_object_id(self.hack_name)
        body_a_id = self.robot.get_pybullet_id()
        try:
            contact_info3 = ContactInfo(*[
                x for x in p.getClosestPoints(hack_id, body_a_id, 0.001)
                if abs(x[8] + 0.005) < 0.0005
            ][0])
            return not (contact_info3.body_unique_id_b == body_a_id
                        and contact_info3.link_index_b
                        == self.robot.get_pybullet_link_id(
                            contact_info.link_a))
        except Exception as e:
            return True
Beispiel #34
0
    def move_cartesian_path_s(self, data=0):

        move_group = moveit_commander.MoveGroupCommander(data.arm_name[0])

        waypoints = []

        wpose = Pose()

        wpose.position = data.goal_position
        wpose.orientation = data.goal_orientation

        waypoints.append(copy.deepcopy(wpose))

        (plan, fraction) = move_group.compute_cartesian_path(
            waypoints,  # waypoints to follow
            0.01,  # eef_step
            0.0)  # jump_threshold

        # Note: We are just planning, not asking move_group to actually move the robot yet:
        move_group.execute(plan, wait=True)
        print "move_cartesian_path ends"
        print "current pose", move_group.get_current_pose().pose
        return arm_move_srvResponse(w_flag=1)
Beispiel #35
0
def moveModel(model, position, r, p, y):
    rospy.wait_for_service('/gazebo/set_model_state')
    try:
        service = rospy.ServiceProxy('/gazebo/set_model_state', SetModelState)
        twist = Twist()
        pose = Pose()
        pose.position = position
        #rpy to quaternion:
        quat = quaternion_from_euler(r, p, y)
        pose.orientation.x = quat[0]
        pose.orientation.y = quat[1]
        pose.orientation.z = quat[2]
        pose.orientation.w = quat[3]
        req = SetModelState()
        req.model_name = model
        req.pose = pose
        req.twist = twist
        req.reference_frame = ""

        resp = service(req)

    except rospy.ServiceException, e:
        print("Service call failed: %s" % e)
Beispiel #36
0
    def test_5_move_armangle(self):
        self.client.move_joints(0, pi / 100, 0, -pi / 2, 0, pi / 2, 0)

        goal_pose = Pose()
        goal_pose.position = Point(-0.4, -0.5, .3)
        goal_pose.orientation = orientation_from_rpy(0, pi, 0)
        goal_arm_angle = pi / 2
        resp = self.client.move_ptp_armangle(goal_pose, goal_arm_angle)
        self.assert_last_srv_call_failed(resp,
                                         RLLErrorCode.MOVEIT_PLANNING_FAILED)

        self.assert_move_ptp_success(goal_pose)
        resp = self.client.move_lin_armangle(goal_pose, goal_arm_angle, True)
        self.assert_last_srv_call_failed(
            resp, RLLErrorCode.ONLY_PARTIAL_PATH_PLANNED)

        goal_arm_angle = 3 * pi / 2
        resp = self.client.move_lin_armangle(goal_pose, goal_arm_angle, True)
        self.assert_last_srv_call_failed(resp,
                                         RLLErrorCode.INVALID_INPUT)
        resp = self.client.move_ptp_armangle(goal_pose, goal_arm_angle)
        self.assert_last_srv_call_failed(resp,
                                         RLLErrorCode.INVALID_INPUT)
Beispiel #37
0
  def move_path(self, positions, orientations):
    """
    Moves robot to list of specified positions and orientations in the world coordinate system
    :param positions: list of Points
    :param orientations: list of Quaternions
    :return: Boolean whether last pose is reached within tolerance
    """
    waypoints = []
    assert len(positions) == len(orientations)
    for i in range(len(positions)):
      pose_goal = Pose()
      pose_goal.position = positions[i]
      pose_goal.orientation = orientations[i]
      waypoints.append(pose_goal)

    #print("WAYPOINTS", waypoints)

    (plan, fraction) = self.group.compute_cartesian_path(waypoints,   # waypoints to follow
                                                         0.01,        # eef_step
                                                         0.0)         # jump_threshold
    print("PLAN:", plan)
    print("FRAC:", fraction)

    #display_trajectory = moveit_msgs.msg.DisplayTrajectory()
    #display_trajectory.trajectory_start = self.robot.get_current_state()
    #display_trajectory.trajectory.append(plan)
    #self.display_trajectory_publisher.publish(display_trajectory)

    self.group.execute(plan, wait=True)
    #self.group.execute(plan, wait=False)

    #pose_goal = Pose()
    #pose_goal.orientation = orientations[-1]
    #pose_goal.position = positions[-1]
    #current_pose = self.group.get_current_pose().pose
    #return all_close(pose_goal, current_pose, 0.01)
    return True
Beispiel #38
0
    def callback(self, msg):
        self.arr = msg
        if len(self.arr.people) is not 0:
            try:
                (position, quaternion) = self.listener.lookupTransform(
                    'base_link', 'head_camera_rgb_optical_frame',
                    rospy.Time(0))
                pose = Pose()
                pose.position.x = position[0]
                pose.position.y = position[1]
                pose.position.z = position[2]
                facePose = Pose()
                facePose.position = self.arr.people[0].pos

                # newMat = np.dot(pose_to_transform(pose), pose_to_transform(facePose))
                ps = PoseStamped()
                # ps.pose = transform_to_pose(newMat)

                ps.pose.position.x = facePose.position.z
                ps.pose.position.y = -facePose.position.x
                ps.pose.position.z = -facePose.position.y
                ps.pose.position.x += pose.position.x
                ps.pose.position.y += pose.position.y
                ps.pose.position.z += pose.position.z
                ps.pose.position.x -= 0.45
                ps.pose.position.y -= 0
                ps.pose.position.z -= 0.43

                ps.pose.orientation.x = 0.727
                ps.pose.orientation.y = 0
                ps.pose.orientation.z = 0
                ps.pose.orientation.w = 0.687
                ps.header.frame_id = 'base_link'
                self.pose = ps
            except (tf.LookupException, tf.ConnectivityException,
                    tf.ExtrapolationException):
                pass
def main():
    global g_limb, g_position_neutral, g_orientation_hand_down
    init()

    # Move the arm to its neutral position
    g_limb.move_to_neutral()

    rospy.loginfo("Old Hand Pose:\n %s" % str(g_limb._tip_states.states[0].pose))
    rospy.loginfo("Old Joint Angles:\n %s" % str(g_limb.joint_angles()))

    # Create a new pose (Position and Orientation) to solve for
    target_pose = Pose()
    target_pose.position = copy.deepcopy(g_position_neutral)
    target_pose.orientation = copy.deepcopy(g_orientation_hand_down)

    target_pose.position.x += 0.2 # Add 20cm to the x axis position of the hand

    # Call the IK service to solve for joint angles for the desired pose
    target_joint_angles = g_limb.ik_request(target_pose, "right_hand")

    # The IK Service returns false if it can't find a joint configuration
    if target_joint_angles is False:
        rospy.logerr("Couldn't solve for position %s" % str(target_pose))
        return

    # Set the robot speed (takes a value between 0 and 1)
    g_limb.set_joint_position_speed(0.3)

    # Send the robot arm to the joint angles in target_joint_angles, wait up to 2 seconds to finish
    g_limb.move_to_joint_positions(target_joint_angles, timeout=2)

    # Find the new coordinates of the hand and the angles the motors are currently at
    new_hand_pose = copy.deepcopy(g_limb._tip_states.states[0].pose)
    new_angles = g_limb.joint_angles()
    rospy.loginfo("New Hand Pose:\n %s" % str(new_hand_pose))
    rospy.loginfo("Target Joint Angles:\n %s" % str(target_joint_angles))
    rospy.loginfo("New Joint Angles:\n %s" % str(new_angles))
Beispiel #40
0
    def publish_object_marker(self):

        world_markerframe_quaternion = tfms.quaternion_multiply(
            self._object_kinematics._world_bodyframe_quaternion,
            BODY_MARKERFRAME_QUATERNION)

        world_bodyframe_rot = tfms.quaternion_matrix(
            self._object_kinematics._world_bodyframe_quaternion)
        world_diskcenter_marker_vec = np.matmul(world_bodyframe_rot,
                                                BODY_MARKERFRAME_POSITION)

        marker_pose = Pose()
        marker_pose.position = Point(
            self._object_kinematics._disk_center_position.x +
            world_diskcenter_marker_vec[0],
            self._object_kinematics._disk_center_position.y +
            world_diskcenter_marker_vec[1],
            self._object_kinematics._disk_center_position.z +
            world_diskcenter_marker_vec[2])

        marker_pose.orientation = Quaternion(world_markerframe_quaternion[0],
                                             world_markerframe_quaternion[1],
                                             world_markerframe_quaternion[2],
                                             world_markerframe_quaternion[3])

        marker = Marker(
            type=Marker.MESH_RESOURCE,
            action=Marker.ADD,
            id=0,
            pose=marker_pose,
            scale=Vector3(0.001, .001, .001),
            header=Header(frame_id="world"),
            color=ColorRGBA(.70, .70, .70, 1),
            mesh_resource=
            "package://rockwalk_kinematics/object_model/rockwalk_cone.dae")

        self._pub_kinematics._object_marker_publisher.publish(marker)
def handle_start_service(msg):

    # Publica la informacion de posicion y cantidad de obstaculos en el topico RobotPosition
    RobotPosition = Pose()
    RobotPosition.position = msg.start.position
    RobotPosition.orientation = msg.start.orientation

    pubRobotPosition.publish(RobotPosition)

    # Publica la informacion de posicion y cantidad de obstaculos en el topico GeneralPositions
    GeneralPositions = GeneralPos()
    GeneralPositions.start = msg.start
    GeneralPositions.goal = msg.goal
    GeneralPositions.n_obstacles = msg.n_obstacles
    GeneralPositions.obstacles = msg.obstacles

    pubGeneralPositions.publish(GeneralPositions)

    #Publica el estado cero 1 en el topico RobotStatus - Este estado representa que el robot se le ha ordenado iniciar
    RobotStatus.data = 1

    pubRobotStatus.publish(RobotStatus)

    return StartServiceResponse()
    def get_target_position(self, target_name):
        position = copy.deepcopy(self.goal_list[target_name])
        # print("before transformation",position)

        p = Pose()
        p.position = position

        robot_pose = self.get_robot_pose()

        quaternion = (robot_pose.orientation.x, robot_pose.orientation.y,
                      robot_pose.orientation.z, robot_pose.orientation.w)
        euler = euler_from_quaternion(quaternion)
        yaw = -euler[2]

        x = position.x - robot_pose.position.x
        y = position.y - robot_pose.position.y

        position.x = math.cos(yaw) * x - math.sin(yaw) * y
        position.y = math.sin(yaw) * x + math.cos(yaw) * y
        # print("after transformation",position, yaw)

        position = self.baselink2arm(p).position

        return position
Beispiel #43
0
    def _getPose(self):
        pos = self.sc.get_position()
        ori = self.sc.get_orientation(quaternion=True)

        if ROS_ENABLE:
            pose_msg = Pose()

            pos_msg = Point()
            pos_msg.x = pos[0]
            pos_msg.y = pos[1]
            pos_msg.z = pos[2]

            ori_msg = Quaternion()
            ori_msg.x = ori[0]
            ori_msg.y = ori[1]
            ori_msg.z = ori[2]
            ori_msg.w = ori[3]

            pose_msg.position = pos_msg
            pose_msg.orientation = ori_msg

            self.pose_pub.publish(pose_msg)

        return pos, ori
 def find_joint_pose(self,
                     pose,
                     targ_x=0.0,
                     targ_y=0.0,
                     targ_z=0.0,
                     targ_ox=0.0,
                     targ_oy=0.0,
                     targ_oz=0.0):
     '''
     Finds the joint position of the arm given some pose and the
     offsets from it (to avoid opening the structure all the time
     outside of the function).
     '''
     ik_srv = "ExternalTools/right/PositionKinematicsNode/IKService"
     iksvc = rospy.ServiceProxy(ik_srv, SolvePositionIK)
     ik_request = SolvePositionIKRequest()
     the_pose = deepcopy(pose)
     the_pose['position'] = Point(x=targ_x + self.baxter_off.linear.x,
                                  y=targ_y + self.baxter_off.linear.y,
                                  z=targ_z + self.baxter_off.linear.z)
     angles = tf.transformations.quaternion_from_euler( \
         targ_ox + self.baxter_off.angular.x, \
         targ_oy + self.baxter_off.angular.y, \
         targ_oz + self.baxter_off.angular.z)
     the_pose['orientation'] = Quaternion(x=angles[0],
                                          y=angles[1],
                                          z=angles[2],
                                          w=angles[3])
     approach_pose = Pose()
     approach_pose.position = the_pose['position']
     approach_pose.orientation = the_pose['orientation']
     hdr = Header(stamp=rospy.Time.now(), frame_id='base')
     pose_req = PoseStamped(header=hdr, pose=approach_pose)
     ik_request.pose_stamp.append(pose_req)
     resp = iksvc(ik_request)
     return dict(zip(resp.joints[0].name, resp.joints[0].position))
    def get_closest_stop_line_pose(self, closest_waypoint_index):
        if closest_waypoint_index is 0:
            return 0

        light_stop_line_positions = self.config['stop_line_positions']
        waypoint = self.waypoints[closest_waypoint_index]
        min_distance = float('inf')
        current_stop_line_position = waypoint.pose.pose.position
        for stop_line in light_stop_line_positions:
            stop_line_position = Point()
            stop_line_position.x = stop_line[0]
            stop_line_position.y = stop_line[1]
            stop_line_position.z = 0

            distance = waypoint_help_function.direct_position_distance(
                stop_line_position, waypoint.pose.pose.position)
            if distance < min_distance:
                min_distance = distance
                current_stop_line_position = stop_line_position

        current_stop_line_pose = Pose()
        current_stop_line_pose.position = current_stop_line_position

        return current_stop_line_pose
Beispiel #46
0
    def __init__(self,
                 body,
                 topic,
                 mesh_path=None,
                 mesh_scale=(1,1,1),
                 mesh_rgba=(0,0,0,1)):

        self.body = body
        self.pub = rospy.Publisher(topic, PoseStamped, queue_size=1)


        # prepare the published stuff
        # these will simply be updated and not re-created
        point = Point()
        quat = Quaternion()
        pose = Pose()
        pose.position = point
        pose.orientation = quat
        self.pose = pose
        self.pose_stamped = PoseStamped()
        self.pose_stamped.header.frame_id = '/world'

        # start with the current state of the body
        self._update_pose()
Beispiel #47
0
	def navigateTo(self, num):
		if (self.lastFacePos is not None and self.lock.acquire(blocking=False)):
			self.expressions.nod_head()
			pointStamped = self.getPointStampedInFrame(pointStamped, "base_link")
			distance_to_base = sqrt(pointStamped.point.x ** 2 + pointStamped.point.y ** 2 + pointStamped.point.z ** 2)
			unit_vector = { "x": pointStamped.point.x / distance_to_base,
							"y": pointStamped.point.y / distance_to_base }

			pointStamped.point.x = unit_vector["x"] * (distance_to_base - 0.5)
			pointStamped.point.y = unit_vector["y"] * (distance_to_base - 0.5)

			quaternion = Quaternion()
			quaternion.w = 1

			pose = Pose()
			pose.position = pointStamped.point
			pose.orientation = quaternion

			poseStamped = PoseStamped()
			poseStamped.header = pointStamped.header
			pointStamped.pose = pose

			self.move_pub.publish(poseStamped)
			self.lock.release()
    def test_4_move_lin_collision(self):
        # ensuring same global configuration for IK
        resp = self.client.move_joints(0, -pi / 100, 0, -pi / 2, 0, pi / 2, 0)
        self.assert_last_srv_call_success(resp)
        goal_pose = Pose()
        goal_pose.position = Point(-0.15, .4, .2)
        goal_pose.orientation = orientation_from_rpy(0, pi, 0)
        self.assert_move_ptp_success(goal_pose)

        goal_pose.position.z = .1
        self.assert_move_lin_success(goal_pose)

        goal_pose.position.z = 0.0
        resp = self.client.move_lin(goal_pose)
        self.assert_last_srv_call_failed(resp,
                                         RLLErrorCode.NO_IK_SOLUTION_FOUND)

        goal_pose.position.y = .55
        goal_pose.position.z = .1
        self.assert_move_lin_success(goal_pose)

        goal_pose.position.y = 0.7
        resp = self.client.move_lin(goal_pose)
        self.assert_last_srv_call_failed(resp, RLLErrorCode.GOAL_IN_COLLISION)
Beispiel #49
0
    def a_star_callback(self, req):
        """
		Brief: Performs A* from current robot position to requested goal
		Inputs: req.goal [PoseStamped]
		Return: waypoints_xy [PoseArray]
		"""

        # Start timing
        stopwatch = Stopwatch()

        # Wait for odom and cspace
        while not (self.got_robot_pose and self.got_cspace):
            pass

        # Convert start and goal to (i,j)
        goal = req.goal.pose.position
        start_i, start_j = og_xy_to_ij(self.start_x, self.start_y, self.cspace)
        goal_i, goal_j = og_xy_to_ij(goal.x, goal.y, self.cspace)
        start = (start_i, start_j)
        goal = (goal_i, goal_j)

        # Invalid start exception
        if og_get_ij(start_i, start_j, self.cspace) != 0:

            # Search for valid local starting coordinates
            for (i_off, j_off) in self.cspace_offsets:
                i_alt = start_i + i_off
                j_alt = start_j + j_off
                if og_get_ij(i_alt, j_alt, self.cspace):
                    start_i = i_alt
                    start_j = j_alt
                    found_alt_start = True
                    break

            # Publish warning or exception
            if found_alt_start:
                self.debug_pub.publish('Warning: Invalid start, rerouted')
            else:
                self.debug_pub.publish('Exception: Invalid start')
                return CalcWaypointsResponse(PoseArray(), Float64(0.0),
                                             String('start'))

        # Invalid goal exception
        if og_get_ij(goal_i, goal_j, self.cspace) != 0:
            self.debug_pub.publish('Exception: Invalid goal')
            return CalcWaypointsResponse(PoseArray(), Float64(0.0),
                                         String('goal'))

        # Run A* pathfinding
        frontier = PriorityQueue()
        frontier.put(start, 0)
        came_from = {}
        came_from[start] = None
        g_cost = {}
        g_cost[start] = 0

        while not frontier.empty():

            # Expand highest priority node
            curr = frontier.pop()
            if curr == goal:
                break

            # Generate neighbors
            i = curr[0]
            j = curr[1]
            neighbors = []
            for i_n in range(i - 1, i + 2):
                for j_n in range(j - 1, j + 2):
                    if og_get_ij(i_n, j_n, self.cspace) == 0:
                        neighbors.append((i_n, j_n))

            # Explore neighbors
            for next in neighbors:

                # Compute g-cost from curr
                new_g_cost = g_cost[curr] + self.edge_cost(curr, next)

                # Expand or improve frontier
                if next not in g_cost or new_g_cost < g_cost[next]:
                    came_from[next] = curr
                    g_cost[next] = new_g_cost
                    priority = g_cost[next] + self.h_cost(next, goal)
                    frontier.put(next, priority)

        # No path found exception
        if curr != goal:
            self.debug_pub.publish('Exception: No path found')
            return CalcWaypointsResponse(PoseArray(), Float64(0.0),
                                         String('path'))

        # Construct waypoints from start to goal
        waypoints_ij = [goal]
        while True:
            first = came_from[waypoints_ij[0]]
            if first == start:
                break
            else:
                waypoints_ij.insert(0, first)

        # Convert waypoints from (i,j) to (x,y)
        waypoints_xy = PoseArray()
        cells = []
        for waypoint_ij in waypoints_ij:
            i = waypoint_ij[0]
            j = waypoint_ij[1]
            x, y = og_ij_to_xy(i, j, self.cspace)
            wp = Pose()
            wp.position = Point(x, y, 0.0)
            waypoints_xy.poses.append(wp)

        # Publish gridcell topics to RViz
        if req.pub_gridcells.data:
            og_pub_gridcells(waypoints_ij, 0.008, self.waypoint_gc_pub,
                             self.cspace)

        # Finish timing
        dur = stopwatch.stop()
        timing_msg = 'A* pathfinding: ' + str(dur) + ' [s]'
        self.timing_pub.publish(timing_msg)

        # Return service response object
        self.debug_pub.publish('Path found!')
        time_cost = g_cost[goal]
        return CalcWaypointsResponse(waypoints_xy, Float64(time_cost),
                                     String('none'))
Beispiel #50
0
def main():
    rospy.init_node("drone_simulation_node", anonymous=True)

    file_path = '/home/msdc/jcgarciaca/catkin_ws/src/general_tests/trajectory/trajectory.txt'
    f = open(file_path, 'r')
    for x in f:
        data_str = x.split(',')
        data = []
        for num, value in enumerate(data_str):
            data.append(float(value.split('\n')[0]))
        points.append(data)

    marker_points_pub = rospy.Publisher("points_marker", Marker, queue_size=1)
    marker_lines_pub = rospy.Publisher("lines_marker", Marker, queue_size=1)
    pose_pub = rospy.Publisher('drone_pose', PoseArray, queue_size=1)

    origin, xaxis, yaxis, zaxis = (0, 0, 0), (1, 0, 0), (0, 1, 0), (0, 0, 1)
    frame_name_r = 'map'
    rospy.loginfo('Node started')

    points_r = Marker()
    points_r.header.frame_id = frame_name_r
    points_r.header.stamp = rospy.Time.now()
    points_r.ns = "points_and_lines"
    points_r.action = Marker.ADD
    points_r.pose.orientation.w = 1.0

    lines_r = Marker()
    lines_r.header.frame_id = frame_name_r
    lines_r.header.stamp = rospy.Time.now()
    lines_r.ns = "points_and_lines"
    lines_r.action = Marker.ADD
    lines_r.pose.orientation.w = 1.0

    points_r.id = 0
    lines_r.id = 1

    points_r.type = Marker.POINTS
    lines_r.type = Marker.LINE_STRIP

    points_r.scale.x = 0.04
    points_r.scale.y = 0.04
    points_r.color.r = 1.0
    points_r.color.a = 1.0

    lines_r.scale.x = 0.02
    lines_r.scale.y = 0.02
    lines_r.color.g = 1.0
    lines_r.color.a = 1.0

    max_num = len(points) + 1  # 4

    while not rospy.is_shutdown():
        points_r.points = []
        lines_r.points = []
        poses_array = PoseArray()
        poses_array.header.frame_id = 'map'
        ind = 0
        for point in points:
            point_tmp = Point()
            point_tmp.x = point[0]
            point_tmp.y = point[1]
            point_tmp.z = point[2]

            pose_ref = Pose()
            pose_ref.position = deepcopy(point_tmp)
            q = transformations.quaternion_about_axis(radians(point[3]), zaxis)
            pose_ref.orientation.x = q[0]
            pose_ref.orientation.y = q[1]
            pose_ref.orientation.z = q[2]
            pose_ref.orientation.w = q[3]

            points_r.points.append(point_tmp)
            lines_r.points.append(point_tmp)
            poses_array.poses.append(pose_ref)

            marker_points_pub.publish(points_r)
            marker_lines_pub.publish(lines_r)
            pose_pub.publish(poses_array)

            if len(lines_r.points) == max_num:
                tmp_points = []
                for count, line_p in enumerate(lines_r.points):
                    if count > 0:
                        tmp_points.append(line_p)
                lines_r.points = []
                for d in tmp_points:
                    lines_r.points.append(d)
            ind += 1
            rospy.sleep(1.)
        rospy.sleep(1.)
Beispiel #51
0
    def calculate_mk2_ik(self):

        #goal_point = Point(0.298, -0.249, -0.890) home position
        group = moveit_commander.MoveGroupCommander("manipulator")
        goal_pose = Pose()

        # Goto position 1 [HOME]
        # x ,y , z
        goal_point = Point(-0.0297, 0.3455, 0.41)
        goal_pose.position = goal_point
        # roll, pitch, yaw
        quat = quaternion_from_euler(0.0, 0.0, 0.0)
        # hardcode quaternion
        goal_pose.orientation = Quaternion(0.005, -0.005, 0.707, 0.707)
        moveit_goal = self.create_move_group_pose_goal(
            goal_pose,
            group="manipulator",
            end_link_name="end_Link",
            plan_only=False)
        rospy.loginfo("Sending goal...")
        self.moveit_ac.send_goal(moveit_goal)
        rospy.loginfo("Waiting for result...")
        self.moveit_ac.wait_for_result(rospy.Duration(10.0))
        moveit_result = self.moveit_ac.get_result()
        time.sleep(1)
        print("Home position: ")
        print(group.get_current_pose().pose)
        #Gripper [OPEN]
        gripper = moveit_commander.MoveGroupCommander("arm_claw")
        joint_positions = [-0.7, 0.7]
        gripper.set_joint_value_target(joint_positions)
        gripper.go()

        # Goto position 2 [PICK]
        goal_point = Point(-0.245807, 0.306796, 0.0821576)
        goal_pose.position = goal_point
        quat = quaternion_from_euler(0.2, 0.2, 0.2)  # roll, pitch, yaw
        goal_pose.orientation = Quaternion(-0.199651, 0.11139, 0.859318,
                                           0.459499)
        moveit_goal = self.create_move_group_pose_goal(
            goal_pose,
            group="manipulator",
            end_link_name="end_Link",
            plan_only=False)
        rospy.loginfo("Sending goal...")
        self.moveit_ac.send_goal(moveit_goal)
        rospy.loginfo("Waiting for result...")
        self.moveit_ac.wait_for_result(rospy.Duration(10.0))
        moveit_result = self.moveit_ac.get_result()
        time.sleep(5)
        print "Pick position: "
        print group.get_current_pose().pose

        #Gripper [CLOSED]
        gripper = moveit_commander.MoveGroupCommander("arm_claw")
        joint_positions = [0.0, 0.0]
        gripper.set_joint_value_target(joint_positions)
        gripper.go()

        # Goto position 3 [HOME]
        # x ,y , z
        goal_point = Point(-0.0297, 0.3455, 0.41)
        goal_pose.position = goal_point
        # roll, pitch, yaw
        quat = quaternion_from_euler(0.0, 0.0, 0.0)
        # hardcode quaternion
        goal_pose.orientation = Quaternion(0.005, -0.005, 0.707, 0.707)
        moveit_goal = self.create_move_group_pose_goal(
            goal_pose,
            group="manipulator",
            end_link_name="end_Link",
            plan_only=False)
        rospy.loginfo("Sending goal...")
        self.moveit_ac.send_goal(moveit_goal)
        rospy.loginfo("Waiting for result...")
        self.moveit_ac.wait_for_result(rospy.Duration(10.0))
        moveit_result = self.moveit_ac.get_result()
        time.sleep(1)
        print("Home position: ")
        print(group.get_current_pose().pose)

        # Goto position 4 [PUT]
        goal_point = Point(0.110921, 0.383053, 0.0912104)
        goal_pose.position = goal_point
        quat = quaternion_from_euler(0.2, 0.2, 0.2)  # roll, pitch, yaw
        goal_pose.orientation = Quaternion(-0.100325, 0.191939, 0.542264,
                                           0.811815)
        moveit_goal = self.create_move_group_pose_goal(
            goal_pose,
            group="manipulator",
            end_link_name="end_Link",
            plan_only=False)
        rospy.loginfo("Sending goal...")
        self.moveit_ac.send_goal(moveit_goal)
        rospy.loginfo("Waiting for result...")
        self.moveit_ac.wait_for_result(rospy.Duration(10.0))
        moveit_result = self.moveit_ac.get_result()
        time.sleep(5)
        print "Put position: "
        print group.get_current_pose().pose
        #Gripper [OPEN]
        gripper = moveit_commander.MoveGroupCommander("arm_claw")
        joint_positions = [-0.7, 0.7]
        gripper.set_joint_value_target(joint_positions)
        gripper.go()

        # Goto position 5 [HOME]
        # x ,y , z
        goal_point = Point(-0.0297, 0.3455, 0.41)
        goal_pose.position = goal_point
        # roll, pitch, yaw
        quat = quaternion_from_euler(0.0, 0.0, 0.0)
        # hardcode quaternion
        goal_pose.orientation = Quaternion(0.005, -0.005, 0.707, 0.707)
        moveit_goal = self.create_move_group_pose_goal(
            goal_pose,
            group="manipulator",
            end_link_name="end_Link",
            plan_only=False)
        rospy.loginfo("Sending goal...")
        self.moveit_ac.send_goal(moveit_goal)
        rospy.loginfo("Waiting for result...")
        self.moveit_ac.wait_for_result(rospy.Duration(10.0))
        moveit_result = self.moveit_ac.get_result()
        time.sleep(1)
        print("Home position: ")
        print(group.get_current_pose().pose)
Beispiel #52
0
def trans2pose(trans):
    pose = Pose()
    pose.orientation = trans.rotation
    pose.position = trans.translation
    return pose
def tfPoseToGeometry(pose):
    result = Pose()
    result.position = tfPositionToGeometry(pose[0])
    result.orientation = tfOrientationToGeometry(pose[1])
    return result
Beispiel #54
0
    def execute_next(self):
        action = self.actions.pop(0)
        direction = None
        if action == "MoveF" or action == "MoveB":
            current_pose = self.pose
            quat = (current_pose.orientation.x, current_pose.orientation.y,
                    current_pose.orientation.z, current_pose.orientation.w)
            euler = tf.transformations.euler_from_quaternion(quat)
            current_yaw = euler[2]
            if current_yaw > (-math.pi / 4.0) and current_yaw < (math.pi /
                                                                 4.0):
                print "Case 1"
                #raw_input()
                target_pose = copy.deepcopy(current_pose)
                target_pose.position.x += 0.5
                #direction = 'x'
                #incr y co-ordinate
            elif current_yaw > (math.pi /
                                4.0) and current_yaw < (3.0 * math.pi / 4.0):
                print "Case 2"
                #raw_input()
                target_pose = copy.deepcopy(current_pose)
                target_pose.position.y += 0.5
                #direction = 'y'
                #decr x co
            elif current_yaw > (-3.0 * math.pi /
                                4.0) and current_yaw < (-math.pi / 4.0):
                print "Case 3"
                #raw_input()
                target_pose = copy.deepcopy(current_pose)
                target_pose.position.y -= 0.5
                #direction = '-y'
            else:
                print "Case 4"
                #raw_input()
                target_pose = copy.deepcopy(current_pose)
                target_pose.position.x -= 0.5
                #direction = '-x'
            PID(target_pose, "linear").publish_velocity()

        elif action == "TurnCW" or action == "TurnCCW":
            current_pose = self.pose
            quat = (current_pose.orientation.x, current_pose.orientation.y,
                    current_pose.orientation.z, current_pose.orientation.w)
            euler = tf.transformations.euler_from_quaternion(quat)
            yaw = euler[2]
            if action == "TurnCW":
                target_yaw = yaw - (math.pi / 2.0)
                if target_yaw < -math.pi:
                    target_yaw += (math.pi * 2)
            else:
                target_yaw = yaw + (math.pi / 2.0)
                if target_yaw >= (math.pi):
                    target_yaw -= (math.pi * 2)
            target_pose = Pose()
            target_pose.position = current_pose.position
            target_quat = Quaternion(*tf.transformations.quaternion_from_euler(
                euler[0], euler[1], target_yaw))
            target_pose.orientation = target_quat
            print target_pose.orientation
            PID(target_pose, "rotational").publish_velocity()

        else:
            print "Invalid action"
            exit(-1)
        if len(self.actions) == 0:
            self.status_publisher.publish(self.free)
yy = yy.flatten()
zz = zz.flatten()


def add_random_rotation(roll, pitch, yaw):
    roll += np.random.random() * 0.25
    pitch += -0.1 + np.random.random() * 0.2
    yaw += -0.1 + np.random.random() * 0.2
    return roll, pitch, yaw


i = 0
j = 0
rts = []
while not rospy.is_shutdown():
    pose.position = Point(xx[i], yy[i], zz[i])  # , offset=sphere_origin)
    pose.orientation = Quaternion(*tf.transformations.quaternion_from_euler(
        *add_random_rotation(*rotation)))

    group.set_pose_target(pose)
    success = group.go(True, wait=True)
    group.stop()
    group.clear_pose_targets()

    rospy.sleep(2)

    if not success:
        actual = group.get_current_pose().pose.position
        difference = point_difference(pose.position, actual)
        if difference > 1e-02:
            rospy.logerr('\nFailed for position:\n{}\n'.format(pose))
Beispiel #56
0
def dq_to_geometry_msgs_pose(dq):
    p = Pose()
    p.orientation = dq_to_geometry_msgs_quaternion(rotation(dq))
    p.position = dq_to_geometry_msgs_point(translation(dq))
    return p
    def get_lsq_triangulation_error(self,observation,epsilon=0.5):
        # Get the camera intrinsics of both cameras
        P1 = self.get_cam_intrinsic()
        P2 = self.get_neighbor_cam_intrinsic()

        # Convert world coordinates to local camera coordinates for both cameras
        # We get the camera extrinsics as follows
        trans,rot = self.listener.lookupTransform(self.rotors_machine_name+'/xtion_rgb_optical_frame','world', rospy.Time(0)) #target to source frame
        (r,p,y) = tf.transformations.euler_from_quaternion(rot)
        H1 = tf.transformations.euler_matrix(r,p,y,axes='sxyz')
        H1[0:3,3] = trans
        print(str(self.env_id)+' '+str(self.rotors_machine_name)+':'+str(trans))
        trans,rot = self.listener.lookupTransform(self.rotors_neighbor_name+'/xtion_rgb_optical_frame','world', rospy.Time(0)) #target to source frame
        (r,p,y) = tf.transformations.euler_from_quaternion(rot)
        H2 = tf.transformations.euler_matrix(r,p,y,axes='sxyz')
        H2[0:3,3] = trans
        print(str(self.env_id)+' '+str(self.rotors_neighbor_name)+':'+str(trans))

        #Concatenate Intrinsic Matrices
        intrinsic = np.array((P1[0:3,0:3],P2[0:3,0:3]))
        #Concatenate Extrinsic Matrices
        extrinsic = np.array((H1,H2))
        joints_gt = self.get_joints_gt()
        error = 0

        self.triangulated_root = PoseArray()
        for k,v in dict_joints.items():

            p2d1 = self.joints[dict_joints[k],0:2]
            p2d2 = self.joints_neighbor[dict_joints[k],0:2]
            points_2d = np.array((p2d1,p2d2))

            # Solve system of equations using least squares to estimate person position in robot 1 camera frame
            estimate_root  = self.lstsq_triangulation(intrinsic,extrinsic,points_2d,2)
            es_root_cam = Point();es_root_cam.x = estimate_root[0];es_root_cam.y = estimate_root[1];es_root_cam.z = estimate_root[2]

            err_cov  = PoseWithCovarianceStamped()
            joints = 0
            if v>4: #because head, eyes and ears lead to high error for the actor
            # if v==5 or v == 6:
                p = Pose()
                p.position = es_root_cam
                self.triangulated_root.poses.append(p)
                gt = joints_gt.poses[alpha_to_gt_joints[k]].position
                error += (self.get_distance_from_point(gt,es_root_cam))

                err_cov.pose.pose.position = es_root_cam
                err_cov.pose.covariance[0] = (gt.x - es_root_cam.x)**2
                err_cov.pose.covariance[7] = (gt.y - es_root_cam.y)**2
                err_cov.pose.covariance[14] = (gt.z - es_root_cam.z)**2
                err_cov.header.stamp = rospy.Time()
                err_cov.header.frame_id = 'world'
                self.triangulated_cov_pub[alpha_to_gt_joints[k]].publish(err_cov)
                joints+=1

        # Publish all estimates
        self.triangulated_root.header.stamp = rospy.Time()
        self.triangulated_root.header.frame_id = 'world'
        self.triangulated_pub.publish(self.triangulated_root)



        is_in_desired_pos = False
        error = error/joints
        if error<=epsilon:
            # error = 0.4*error/epsilon
            is_in_desired_pos = True
        # else:
        #     error = 0.4

        return [is_in_desired_pos,error]
    def get_lsq_triangulation_error_with_noisy_gt(self,observation,epsilon=0.5):

        noisy_joints = self.get_noisy_joints()
        stamp = noisy_joints.header.stamp
        noisy_joints_neighbor = self.get_noisy_joints_neighbor(stamp)
        try:
            self.joints = np.array(noisy_joints.res).reshape((14,2))
            self.joints_prev = self.joints
        except:
            self.joints = self.joints_prev
            print('Unable to find noisy joints')
        try:
            self.joints_neighbor = np.array(noisy_joints_neighbor.res).reshape((14,2))
            self.joints_neighbor_prev = self.joints_neighbor
        except:
            self.joints_neighbor = self.joints_neighbor_prev
            print('Unable to find noisy joints from neighbor')
        # Get the camera intrinsics of both cameras
        P1 = self.get_cam_intrinsic()
        P2 = self.get_neighbor_cam_intrinsic()

        # Convert world coordinates to local camera coordinates for both cameras
        # We get the camera extrinsics as follows
        try:
            trans,rot = self.listener.lookupTransform(self.rotors_machine_name+'/xtion_rgb_optical_frame','world', stamp) #target to source frame
            self.trans_prev = trans
            self.rot_prev = rot
        except:
            trans = self.trans_prev
            rot = self.rot_prev
            print('Robot rotation unavailable')
        (r,p,y) = tf.transformations.euler_from_quaternion(rot)
        H1 = tf.transformations.euler_matrix(r,p,y,axes='sxyz')
        H1[0:3,3] = trans

        try:
            trans,rot = self.listener.lookupTransform(self.rotors_neighbor_name+'/xtion_rgb_optical_frame','world', stamp) #target to source frame
            self.trans2_prev = trans
            self.rot2_prev = rot
        except:
            trans = self.trans2_prev
            rot = self.rot2_prev
            print('Robot neighbor rotation unavailable')
        (r,p,y) = tf.transformations.euler_from_quaternion(rot)
        H2 = tf.transformations.euler_matrix(r,p,y,axes='sxyz')
        H2[0:3,3] = trans


        #Concatenate Intrinsic Matrices
        intrinsic = np.array((P1[0:3,0:3],P2[0:3,0:3]))
        #Concatenate Extrinsic Matrices
        extrinsic = np.array((H1,H2))
        joints_gt = self.get_joints_gt()
        error = 0
        self.triangulated_root = PoseArray()
        for k in range(len(gt_joints)):

            p2d1 = self.joints[k,:]
            p2d2 = self.joints_neighbor[k,:]
            points_2d = np.array((p2d1,p2d2))

            # Solve system of equations using least squares to estimate person position in robot 1 camera frame
            estimate_root  = self.lstsq_triangulation(intrinsic,extrinsic,points_2d,2)
            es_root_cam = Point();es_root_cam.x = estimate_root[0];es_root_cam.y = estimate_root[1];es_root_cam.z = estimate_root[2]

            err_cov  = PoseWithCovarianceStamped()
            #if v>4: #because head, eyes and ears lead to high error for the actor
            # if v==5 or v == 6:
            p = Pose()
            p.position = es_root_cam
            self.triangulated_root.poses.append(p)
            gt = joints_gt.poses[gt_joints[gt_joints_names[k]]].position
            error += (self.get_distance_from_point(gt,es_root_cam))

            err_cov.pose.pose.position = es_root_cam
            err_cov.pose.covariance[0] = (gt.x - es_root_cam.x)**2
            err_cov.pose.covariance[7] = (gt.y - es_root_cam.y)**2
            err_cov.pose.covariance[14] = (gt.z - es_root_cam.z)**2
            err_cov.header.stamp = rospy.Time()
            err_cov.header.frame_id = 'world'
            self.triangulated_cov_pub[k].publish(err_cov)

        # Publish all estimates
        self.triangulated_root.header.stamp = rospy.Time()
        self.triangulated_root.header.frame_id = 'world'
        self.triangulated_pub.publish(self.triangulated_root)



        is_in_desired_pos = False
        error = error/len(gt_joints)
        if error<=epsilon:
            # error = 0.4*error/epsilon
            is_in_desired_pos = True
        # else:
        #     error = 0.4

        return [is_in_desired_pos,error]
    def measurements_process(self, measurements):
        self.traffics_array = MarkerArray()

        # Function to get car position on map.
        ego_position = self.map.get_position_on_map([
            measurements.player_measurements.transform.location.x,
            measurements.player_measurements.transform.location.y,
            measurements.player_measurements.transform.location.z
        ])
        # Function to get orientation of the road car is in.
        ego_orientation = self.map.get_lane_orientation([
            measurements.player_measurements.transform.location.x,
            measurements.player_measurements.transform.location.y,
            measurements.player_measurements.transform.location.z
        ])
        # publish ego vehicle speed
        self.ego_speed = Float64()
        self.ego_speed.data = float(
            measurements.player_measurements.forward_speed) * KMH2MS
        self.ego_speed_pub.publish(self.ego_speed)
        # publish ego vehicle pose
        ego_point = Point()
        ego_point.x = ego_position[0]
        ego_point.y = ego_position[1]
        ego_point.z = 0.
        ego_h = np.arctan2(
            measurements.player_measurements.transform.orientation.x,
            measurements.player_measurements.transform.orientation.y)
        ego_h = ego_h if ego_h > 0 else 2 * np.pi + ego_h
        e2_quaternion = tf.transformations.quaternion_from_euler(
            0, 0, ego_h + np.pi / 2)  # roll pitch yall
        ego_quaternion = Quaternion()
        ego_quaternion.x = e2_quaternion[0]
        ego_quaternion.y = e2_quaternion[1]
        ego_quaternion.z = e2_quaternion[2]
        ego_quaternion.w = e2_quaternion[3]
        ego_pose_frame = Pose()
        ego_pose_frame.position = ego_point
        ego_pose_frame.orientation = ego_quaternion
        self.ego_pose_pub.publish(ego_pose_frame)

        # publish ego marker
        self.create_marker(0,
                           0,
                           0,
                           shape=2,
                           cr=1.0,
                           cg=0.,
                           cb=0.,
                           marker_scale=3.0)
        # mark on the biv image
        new_window_width = (float(WINDOW_HEIGHT) / float(
            self.map_shape[0])) * float(self.map_shape[1])
        w_pos = int(ego_position[0] *
                    (float(WINDOW_HEIGHT) / float(self.map_shape[0])))
        h_pos = int(ego_position[1] *
                    (new_window_width / float(self.map_shape[1])))
        self.ego_position = np.array(ego_position) * self.map.pixel_density
        self.ego_orientation = ego_orientation

        self.pedestrians = Pedestrians()
        agent_positions = measurements.non_player_agents
        for agent in agent_positions:
            if agent.HasField('vehicle'):
                agent_position = self.map.get_position_on_map([
                    agent.vehicle.transform.location.x,
                    agent.vehicle.transform.location.y,
                    agent.vehicle.transform.location.z
                ])
                self.create_marker(-agent_position[0] + ego_position[0],
                                   agent_position[1] - ego_position[1],
                                   0,
                                   shape=1,
                                   cr=0.0,
                                   cg=1.,
                                   cb=0.,
                                   marker_scale=2.0)

            if agent.HasField('pedestrian'):
                self.add_pedestrian(agent)
                agent_position = self.map.get_position_on_map([
                    agent.pedestrian.transform.location.x,
                    agent.pedestrian.transform.location.y,
                    agent.pedestrian.transform.location.z
                ])
                self.create_marker(-agent_position[0] + ego_position[0],
                                   agent_position[1] - ego_position[1],
                                   0,
                                   shape=1,
                                   cr=0.0,
                                   cg=1.,
                                   cb=1.)
        self.traffics_markers_pub.publish(self.traffics_array)
        self.traffics_array = None
        self.traffics_count = 0

        self.pub_pedestrian()

        if self.image_biv is not None:
            image_biv_frame = CvBridge().cv2_to_imgmsg(
                cv2.circle(np.copy(self.image_biv), (w_pos, h_pos), 8,
                           (0, 0, 255), -1), "rgb8")
            self.image_biv_pub.publish(image_biv_frame)
def main():

    # Initialize node
    rospy.init_node('minimization')

    # Start Moveit Commander
    InitializeMoveitCommander()

    # Initialization of KDL Kinematics for right and left grippers
    robot = URDF.from_xml_file(
        "~/catkin_ws/src/baxter_common/baxter_description/urdf/baxter.urdf")

    global kdl_kin_left, kdl_kin_right
    kdl_kin_left = KDLKinematics(robot, "base", "left_gripper")
    kdl_kin_right = KDLKinematics(robot, "base", "right_gripper")

    # Bounds for SLSQP: s0, s1, e0, e1, w0, w1, w2 (left,right)
    bnds = ((-1.70167993878, 1.70167993878), (-2.147, 1.047), (-3.05417993878,
                                                               3.05417993878),
            (-0.05, 2.618), (-3.059, 3.059), (-1.57079632679, 2.094),
            (-3.059, 3.059), (-1.70167993878, 1.70167993878), (-2.147, 1.047),
            (-3.05417993878, 3.05417993878), (-0.05, 2.618), (-3.059, 3.059),
            (-1.57079632679, 2.094), (-3.059, 3.059)
            )  # lower and upper bounds for each q (length 14)

    # Initial guess
    #x0 = np.full((14,1), 0.75)
    #x0 = np.array([[random.uniform(bnds[i][0]/2.,bnds[i][1]/2.)] for i in range(14)])
    initial_left = Pose()
    initial_left.position = Point(
        x=0.3,  #(P_left_current[0,0] + P_right_current[0,0])/2.,
        y=(P_left_current[1, 0] + P_right_current[1, 0]) / 2.,
        z=(P_left_current[2, 0] + P_right_current[2, 0]) / 2.,
    )
    initial_left.orientation = Quaternion(
        x=0.0,
        y=0.0,
        z=0.0,
        w=1.0,
    )

    initial_right = Pose()
    initial_right.position = Point(
        x=0.3,  #(P_left_current[0,0] + P_right_current[0,0])/2.,
        y=(P_left_current[1, 0] + P_right_current[1, 0]) / 2.,
        z=(P_left_current[2, 0] + P_right_current[2, 0]) / 2.,
    )
    initial_right.orientation = Quaternion(
        x=0.0,
        y=0.0,
        z=0.0,
        w=1.0,
    )

    x0_left = kdl_kin_left.inverse(initial_left)
    x0_right = kdl_kin_left.inverse(initial_right)

    x0 = np.array([[x0_left[0]], [x0_left[1]], [x0_left[2]], [x0_left[3]],
                   [x0_left[4]], [x0_left[5]], [x0_left[6]], [x0_right[0]],
                   [x0_right[1]], [x0_right[2]], [x0_right[3]], [x0_right[4]],
                   [x0_right[5]], [x0_right[6]]])

    x0 = x0 + random.uniform(-0.2, 0.2)

    # Constraint equality
    Handoff_separation = np.array([[0.0], [0.1], [0.0], [math.pi / 1.], [0],
                                   [-math.pi / 2.]])
    cons = (
        {
            'type': 'eq',
            'fun': lambda q: JS_to_PrPlRrl(q)[9, 0] - Handoff_separation[0, 0]
        },  #x-sep-distance = 0
        {
            'type': 'eq',
            'fun': lambda q: JS_to_PrPlRrl(q)[10, 0] - Handoff_separation[2, 0]
        },  #y-sep-distance = 0
        {
            'type': 'eq',
            'fun': lambda q: JS_to_PrPlRrl(q)[11, 0] - Handoff_separation[1, 0]
        },  #z-sep-distance = 0.1
        {
            'type':
            'eq',
            'fun':
            lambda q: math.fabs(JS_to_PrPlRrl(q)[6, 0]) - Handoff_separation[3,
                                                                             0]
        },  #roll = pi
        {
            'type':
            'eq',
            'fun':
            lambda q: math.fabs(JS_to_PrPlRrl(q)[7, 0]) - Handoff_separation[4,
                                                                             0]
        },  #pitch = 0
        {
            'type': 'eq',
            'fun': lambda q: JS_to_PrPlRrl(q)[8, 0] - Handoff_separation[5, 0]
        },  #yaw = -pi/2
        #{'type': 'ineq', 'fun': lambda q: JS_to_PrPlRrl(q)[0,0]-0.3},									#x-pos > 0.3 m to help avoid collisions
        {
            'type':
            'eq',
            'fun':
            lambda q: JS_to_PrPlRrl(q)[0, 0] -
            (P_left_current[0, 0] + P_right_current[0, 0]) / 2.
        },
        {
            'type':
            'eq',
            'fun':
            lambda q: JS_to_PrPlRrl(q)[1, 0] -
            (P_left_current[1, 0] + P_right_current[1, 0]) / 2.
        })
    #{'type': 'eq', 'fun': lambda q: JS_to_PrPlRrl(q)[0,0] - (P_left_current[0,0] + P_right_current[0,0])/2.})

    # Minimization
    before = rospy.get_rostime()
    result = minimize(minimization,
                      x0,
                      method='SLSQP',
                      jac=jacobian,
                      bounds=bnds,
                      constraints=cons,
                      tol=0.1,
                      options={'maxiter': 100})
    after = rospy.get_rostime()

    print "\nNumber of iterations: \n", result.success, result.nit
    print result

    q_left = result.x[0:7]
    q_right = result.x[7:14]

    pose_left = kdl_kin_left.forward(q_left)
    pose_right = kdl_kin_right.forward(q_right)

    print "\nPose left: \n", pose_left
    print "Pose right: \n", pose_right

    #R_left = pose_left[:3,:3]
    #R_right = pose_right[:3,:3]

    #X_left,Y_left,Z_left = euler_from_matrix(R_left, 'sxyz')
    #X_right,Y_right,Z_right = euler_from_matrix(R_right, 'sxyz')

    q = np.array([
        q_left[0], q_left[1], q_left[2], q_left[3], q_left[4], q_left[5],
        q_left[6], q_right[0], q_right[1], q_right[2], q_right[3], q_right[4],
        q_right[5], q_right[6]
    ])
    #P = JS_to_PrPlRrl(q)

    handoff_distance = math.sqrt((pose_left[0, 3] - pose_right[0, 3])**2 +
                                 (pose_left[1, 3] - pose_right[1, 3])**2 +
                                 (pose_left[2, 3] - pose_right[2, 3])**2)
    print "\nDistance between handoff: ", handoff_distance
    print "Minimization time: ", (
        after.secs - before.secs) + (after.nsecs - before.nsecs) / 1e+9

    if result.success == False or math.fabs(handoff_distance - 0.1) > 0.05:
        print "Minimization was a failure."
    elif result.success == True:
        print "Minimization was a success."
        print "Planning a trajectory in MoveIt!"

        #s0, s1, e0, e1, w0, w1, w2
        joints = {
            'left_s0': q_left[0],
            'left_s1': q_left[1],
            'left_e0': q_left[2],
            'left_e1': q_left[3],
            'left_w0': q_left[4],
            'left_w1': q_left[5],
            'left_w2': q_left[6],
            'right_s0': q_right[0],
            'right_s1': q_right[1],
            'right_e0': q_right[2],
            'right_e1': q_right[3],
            'right_w0': q_right[4],
            'right_w1': q_right[5],
            'right_w2': q_right[6]
        }

        group_both_arms.set_joint_value_target(joints)
        plan_both = group_both_arms.plan()

        print "Trajectory time (nsec): ", plan_both.joint_trajectory.points[
            len(plan_both.joint_trajectory.points) - 1].time_from_start