def __init__(self):

    ## The number of child subprocesses.
    self._threads = \
        rospy.get_param("rapp_speech_detection_sphinx4_threads")

    ## The subprocesses structure that contains information used for the
    # subprocess handling
    self._availableProcesses = [{
      'sphinx': SpeechRecognitionSphinx4(), \
      'running': False, \
      'configuration_hash': 0\
      } for i in range(self._threads)]

    ## Thread conditional variable used for the subprocess scheduling
    self._lock = threading.Condition()
    ## Total service callback threads waiting to execute
    self._threadCounter = 0

    serv_batch_topic = \
        rospy.get_param("rapp_speech_detection_sphinx4_total_topic")
    if(not serv_batch_topic):
      rospy.logerror("Sphinx4 Speech detection batch topic param not found")

    ## Ros service server for sphinx speech recognition
    self._speech_recognition_batch_service = rospy.Service( \
        serv_batch_topic, SpeechRecognitionSphinx4TotalSrv, \
        self.handleSpeechRecognitionCallback)
コード例 #2
0
    def run(self, verbose):
        # yrange = None
        # scopez = m3t.M3Scope(xwidth=100, yrange=yrange)
       
        ros_rate = rospy.Rate(np.amax(self.rates_idx))
       
        try:
            rospy.loginfo("Starting "+str(len(self.components_idx))+" publish threads!")
            for idx, val in enumerate(self.components_idx):
                t = PublisherThread(self.scope, self.comps[val], self.fields_idx[idx], self.dataTypes_idx[idx], self.rates_idx[idx]) 
                with self.lock: 
                    self.publishers[(self.components_idx[idx], self.fields_idx[idx], self.dataTypes_idx[idx])] = t                    
                t.start()
                
            while True and not rospy.is_shutdown():
                self.rt_proxy.step()
                rospy.Rate(self.max_rate).sleep()
            
        except:
            e = sys.exc_info()[0]
            rospy.logerror("Error e: " + str(e) + ". Stopping threads!")

        for t in self.publishers:
            t.stop()
            t.join()
                
        self.rt_proxy.stop(force_safeop=False)  # allow other clients to continue running
コード例 #3
0
ファイル: solution.py プロジェクト: xunilrj/sandbox
    def callback(self, joint_values):
        # First, we must extract information about the kinematics of the robot from its URDF.
        # We will start at the root and add links and joints to lists
        link_name = self.robot.get_root()
        link_names = []
        joints = []
        while True:
            # Find the joint connected at the end of this link, or its "child"
            # Make sure this link has a child
            if link_name not in self.robot.child_map:
                break
            # Make sure it has a single child (we don't deal with forks)
            if len(self.robot.child_map[link_name]) != 1:
                rospy.logerror("Forked kinematic chain!");
                break
            # Get the name of the child joint, as well as the link it connects to
            (joint_name, next_link_name) = self.robot.child_map[link_name][0]
            # Get the actual joint based on its name
            if joint_name not in self.robot.joint_map:
                rospy.logerror("Joint not found in map")
                break;
            joints.append(self.robot.joint_map[joint_name])
            link_names.append(next_link_name)

            # Move to the next link
            link_name = next_link_name

        # Compute all the transforms based on the information we have
        all_transforms = self.compute_transforms(link_names, joints, joint_values)

        # Publish all the transforms
        self.pub_tf.publish(all_transforms)
def main():
    rospy.init_node('stamina_calibration_tf_publisher')
    robot_description = rospy.get_param("robot_description")
    r = rospy.Rate(5)
    
    if (len(rospy.myargv()) < 3):
        rospy.logerr(usage())
        return
    else:
        # the keys are the optical frames of the calibrated cameras
        # the values are the fixed joints of the cameras and their base
        cam_dict = {}
        urdf_xml = rospy.myargv()[1]
        try:
            for i in xrange(2, len(rospy.myargv())):
                [cam_link, joint] = map(lambda x: x.strip("/"), rospy.myargv()[i].split(':'))
                cam_dict[cam_link] = joint
        except:
            rospy.logerror(usage())
            return
    
    print cam_dict
    c = CalibrationManager(robot_description, urdf_xml, cam_dict)
    
    while not rospy.is_shutdown():
        if c.is_updated():
            c.update()
        
        try:
            r.sleep()
        except:
            rospy.loginfo("Shutting down")
コード例 #5
0
ファイル: scan2.py プロジェクト: OSURoboticsClub/Rover2016
def right_scan(img):
    global new_right_img
    # convert ROS image message to numpy array for OpenCV
    try:
        new_right_img = bridge.imgmsg_to_cv2(img, desired_encoding="bgr8")
    except CvBridgeError as e:
        rospy.logerror(e)
コード例 #6
0
ファイル: scan2.py プロジェクト: OSURoboticsClub/Rover2016
def left_scan(img):
    global new_left_img
    # convert ROS image message to numpy array for OpenCV
    try:
        new_left_img = bridge.imgmsg_to_cv2(img, "bgr8")
    except CvBridgeError as e:
        rospy.logerror(e)
コード例 #7
0
ファイル: get_remote_map.py プロジェクト: jeguzzi/alma_robot
def main():
    rospy.init_node('get_remote_map_node', anonymous=False)

    if not (rospy.has_param('server_uri') and rospy.has_param('world')):
        rospy.logfatal("no server_uri or world params provided")
        return

    server = rospy.get_param("server_uri", None)
    world = rospy.get_param("world", None)
    resolution = rospy.get_param("resolution", 5)

    url = ("{server}/worlds/{world}/map.gmapping?resolution={resolution}"
           .format(**locals()))

    rospack = rospkg.RosPack()
    # TODO: not nice
    path = "{root}/worlds/{world}/map".format(
        root=rospack.get_path('alma_remote_planner'),
        world=world)

    response = requests.get(url, stream=True)
    rospy.loginfo(
        "Sent request to {url}. Got response {response}".format(**locals()))
    if response.status_code != 200:
        rospy.logerror("No valid response")
    else:
        z = zipfile.ZipFile(StringIO.StringIO(response.content))
        z.extractall(path=path)
コード例 #8
0
ファイル: nao_camera.py プロジェクト: PHPDOTSQL/nao_robot
    def main_loop(self):
        img = Image()
        while not rospy.is_shutdown():
            #print "getting image..",
            image = self.camProxy.getImageRemote(self.nameId)
            #print "ok"
            # TODO: better time
            img.header.stamp = rospy.Time.now()
            img.header.frame_id = self.frame_id
            img.height = image[1]
            img.width = image[0]
            nbLayers = image[2]
            #colorspace = image[3]
            if image[3] == kYUVColorSpace:
                encoding = "mono8"
            elif image[3] == kRGBColorSpace:
                encoding = "rgb8"
            elif image[3] == kBGRColorSpace:
                encoding = "bgr8"
            else:
                rospy.logerror("Received unknown encoding: {0}".format(image[3]))

            img.encoding = encoding
            img.step = img.width * nbLayers
            img.data = image[6]
            self.info_.width = img.width
            self.info_.height = img.height
            self.info_.header = img.header
            self.pub_img_.publish(img)
            self.pub_info_.publish(self.info_)
            rospy.sleep(0.0001)# TODO: is this necessary?


        self.camProxy.unsubscribe(self.nameId)
コード例 #9
0
ファイル: client_cmd.py プロジェクト: hugoxia/code
    def on_message(self, unused_channel, basic_deliver, properties, body):
        """Invoked by pika when a message is delivered from RabbitMQ. The
        channel is passed for your convenience. The basic_deliver object that
        is passed in carries the exchange, routing key, delivery tag and
        a redelivered flag for the message. The properties passed in is an
        instance of BasicProperties with the message properties and the body
        is the message that was sent.

        :param pika.channel.Channel unused_channel: The channel object
        :param pika.Spec.Basic.Deliver: basic_deliver method
        :param pika.Spec.BasicProperties: properties
        :param str|unicode body: The message body

        """
        rospy.loginfo('Received message # %s from %s: %s',
                      basic_deliver.delivery_tag, properties.app_id, body)
        self.acknowledge_message(basic_deliver.delivery_tag)
        rosmsg = ClientCommand()
        request = json.loads(body)
        rosmsg.id = request['request_id']
        request_type = request['request_type']
        if request_type == 'move':
            rosmsg.position_x = request['request_body']['position']['x']
            rosmsg.position_y = request['request_body']['position']['y']
            rosmsg.position_z = 0
            rosmsg.tag_epc = request['request_body']['position']['rfid']
            self._pub.publish(rosmsg)
        else:
            rospy.logerror("invalid request type %s", request_type)
コード例 #10
0
def bfs(root_cam, observations, cameras_seen, checkerboards_seen):
    q = [root_cam]

    frame = PyKDL.Frame()
    tf_listener = tf.TransformListener()
    while not rospy.is_shutdown():
        try:                
            tf_listener.waitForTransform("/base_link", "/checkerboard_id", rospy.Time(0), rospy.Duration(10))
            t = tf_listener.lookupTransform("/base_link", "/checkerboard_id", rospy.Time(0))
            v = PyKDL.Vector(t[0][0],t[0][1],t[0][2])
            r = PyKDL.Rotation.Quaternion(t[1][0],t[1][1],t[1][2], t[1][3])
            frame = PyKDL.Frame(r, v)
            break
        except (tf.LookupException, tf.ConnectivityException):
            rospy.logerror("No transform from /base_link to /checkerboard_id found. Retrying...")
    
    cameras_seen[root_cam] = frame
    while q:
        cam1, q = q[0], q[1:]  # pop
        cam1_pose = cameras_seen[cam1]

        for cam2, checkerboards in observations[cam1].iteritems():
            assert checkerboards
            if cam2 not in cameras_seen:
                q.append(cam2)

                # Cam2Pose = Cam1Pose * Cam1->CB * CB->Cam2
                cameras_seen[cam2] = cam1_pose * checkerboards[0][0] * checkerboards[0][1].Inverse()

            for cb in checkerboards:
                if cb[2] not in checkerboards_seen:
                    checkerboards_seen[cb[2]] = cam1_pose * cb[0]
コード例 #11
0
def createDynEntitiesForRviz(entity, frame, type_elt):

  _entity_ = entity.replace('-', '_')
  
  addInstructionDyn("ros.rosPublish.add('vector', 'rviz_"+_entity_+"', '/RvizFeatureSoTRH/"+_entity_+"') if not 'rviz_"+_entity_+"' in ros.rosPublish.list() else None")
  addInstructionDyn("convert_"+_entity_+" = EntityToMatrix('convert_"+entity+"')")

  if type_elt == "":
    rospy.logerror("Entity type: UNKNOW. WRANING!")
  elif type_elt == "Point":
    addInstructionDyn("convert_"+_entity_+".sin1.value = robot.entities['"+entity+"'].position")
    addInstructionDyn("convert_"+_entity_+".sin2.value = robot.entities['"+entity+"'].position")
  elif type_elt == "Versor":
    addInstructionDyn("convert_"+_entity_+".sin1.value = robot.entities['"+entity+"'].versor")
    addInstructionDyn("convert_"+_entity_+".sin2.value = robot.entities['"+entity+"'].versor")
  elif type_elt == "Line" or type_elt == "Plane":
    addInstructionDyn("convert_"+_entity_+".sin1.value = robot.entities['"+entity+"'].position")
    addInstructionDyn("convert_"+_entity_+".sin2.value = robot.entities['"+entity+"'].normal")
  elif type_elt == "Plane":
    addInstructionDyn("convert_"+_entity_+".sin1.value = robot.entities['"+entity+"'].position")
    addInstructionDyn("convert_"+_entity_+".sin2.value = robot.entities['"+entity+"'].normal")

  addInstructionDyn("plug(convert_"+_entity_+".sout, ros.rosPublish.signal('rviz_"+_entity_+"'))")
    
  addInstructionDyn("robot.plug['convert_"+_entity_+"'] = convert_"+_entity_)
  addInstructionRviz([_entity_, frame, type_elt])
コード例 #12
0
	def __init__(self):
		if rospy.has_param('joint_names'):
			self.joint_names = rospy.get_param('joint_names')
		else:
			rospy.logerror("joint_names not available")
			return
		self.configuration = [0,0,0,0,0,0,0]
		self.lock = threading.Lock()
		self.received_state = False
		self.listener = tf.TransformListener()
		time.sleep(0.5)
		self.service = rospy.Service("move_cart_abs", MoveCart, self.cbIKSolverAbs)
		#self.service = rospy.Service("move_cart_rel", MoveCart, self.cbIKSolverRel)
		self.client = actionlib.SimpleActionClient('joint_trajectory_action', JointTrajectoryAction)
		self.as_ = actionlib.SimpleActionServer("move_cart_rel", MoveCartAction, execute_cb=self.cbIKSolverRel)
		if not self.client.wait_for_server(rospy.Duration(15)):
			rospy.logerr("arm action server not ready within timeout, aborting...")
			return
		else:
			rospy.logdebug("arm action server ready")

		rospy.Subscriber('/joint_states', JointState, self.joint_states_callback)
		#self.thread = threading.Thread(target=self.joint_states_listener)
		#self.thread.start()
		rospy.wait_for_service('get_ik')
		self.iks = rospy.ServiceProxy('get_ik', GetPositionIK)
コード例 #13
0
 def _add_waypoint_pose(self):
     current_pose = self._get_current_pose()
     
     if (None != current_pose):
         self._append_waypoint_pose(current_pose.pose.pose)
     else:
         rospy.logerror("Invalid waypoint pose")
コード例 #14
0
 def find_checkerboard_pose(self, ros_image, cam_info):
 #we need to convert the ros image to an opencv image
     try:
         image = self.bridge.imgmsg_to_cv(ros_image, "mono8")
     except CvBridgeError, e:
         rospy.logerror("Error importing image %s" % e)
         return
コード例 #15
0
 def getAnglesFromPose(self,pose):
     if type(pose)==Pose:
         goal=PoseStamped()
         goal.header.frame_id="/base"
         goal.pose=pose
     else:
         goal=pose
     
     
     if not self.ik:
         rospy.logerror("ERROR: Inverse Kinematics service was not loaded")
         return None
     goalstr="%f,%f,%f"%(goal.pose.position.x,goal.pose.position.y,goal.pose.position.z)
     ikreq = SolvePositionIKRequest()
     
     ikreq.pose_stamp.append(goal)
     try:
         resp = self.iksvc(ikreq)
         if (resp.isValid[0]):
             return resp.joints[0]
         else:
             rospy.logerr("FAILURE - No Valid Joint Solution Found for %s"%goalstr)
             return None
     except rospy.ServiceException,e :
         rospy.loginfo("Service call failed: %s" % (e,))
         return None
コード例 #16
0
    def callback(self, range_msg, img_msg, userdata, finishdata):
        if self._finished:
            return

        height = range_msg.range
        try:
            image = self._bridge.imgmsg_to_cv2(img_msg,
                                               desired_encoding='bgr8')
        except CvBridgeError as error:
            rospy.logerror(error)

        output = self.on_execute(userdata, image, height)

        if output is None:
            # Don't terminate this state yet,
            # try again with new sensor input.
            return

        self._finished = True

        finishdata.append(output)

        self._height_sub.sub.unregister()
        self._image_sub.sub.unregister()
        self._trigger_event.set()
コード例 #17
0
ファイル: robot.py プロジェクト: argenos/ros_multirobot
    def list_to_trajectory(self, trajectory, G):
        """
        Args:
            trajectory (list): List containing a tuple with the time and nodes in the roadmap. The nodes must contain a 'pos' attribute in pixels.
        Returns:
            path(Path): A nav_msgs path containing the poses of each node in world coordinates.
        """
        if not trajectory:
            rospy.logerror("Trajectory is emtpty")

        path = Path()
        path.header.frame_id = '/map'
        path.header.stamp = rospy.Time.now()

        #l = l[0::3]
        for w in trajectory:
            t, node = w
            x2, y2 = G.node[node]['pos'] #This is in pixels
            x2, y2 = self.img_to_map(x2,y2)
            p = PoseStamped()
            p.header.frame_id = '/map'
            p.header.stamp = path.header.stamp + rospy.Duration.from_sec(t)
            p.pose.position = Point(float(x2),float(y2),0.0)

            path.poses.append(p)

        s = path.poses[0]
        for p in path.poses:
            angle_to_goal=math.atan2(p.pose.position.y-s.pose.position.y, p.pose.position.x-s.pose.position.x)
            q_angle = tf.transformations.quaternion_from_euler(0, 0, angle_to_goal, axes='sxyz')
            q = Quaternion(*q_angle)
            p.pose.orientation = q
            s = p

        return path
コード例 #18
0
ファイル: tests.py プロジェクト: OSURoboticsClub/Rover2016
def handle_img(img):
    global new_crotch_img2
    # convert ROS image message to numpy array for OpenCV
    try:
        new_crotch_img2 = bridge.imgmsg_to_cv2(img, "bgr8")
    except CvBridgeError as e:
        rospy.logerror(e)
コード例 #19
0
ファイル: scenario.py プロジェクト: DLu/path_planning_metrics
 def get_xml(self):
     if self.type=='box':
         return box(self.get_spawn_name(), self.size, 
                     self.xyz, self.rpy, is_static=True)
     else:
         rospy.logerror("unknown type %s"%t)
         return None
コード例 #20
0
ファイル: PatternGenXY.py プロジェクト: ssafarik/Flylab
 def UpdatePatternPoints (self, pattern):      
     if (pattern.isDirty):  
         if (pattern.shape == 'constant'):
             pattern.points = self.GetPointsConstant(pattern)
         elif (pattern.shape == 'point'):
             pattern.points = self.GetPointsConstant(pattern)
         elif (pattern.shape == 'circle'):
             pattern.points = self.GetPointsCircle(pattern)
         elif (pattern.shape == 'square'):
             pattern.points = self.GetPointsSquare(pattern)
         elif (pattern.shape == 'flylogo'):
             pattern.points = self.GetPointsFlylogo(pattern)
         elif (pattern.shape == 'spiral'):
             pattern.points = self.GetPointsSpiral(pattern)
         elif (pattern.shape == 'ramp'):
             pattern.points = self.GetPointsRamp(pattern)
         elif (pattern.shape == 'peano'):
             pattern.points = self.GetPointsPeano(pattern)
         elif (pattern.shape == 'grid'):
             pattern.points = self.GetPointsGrid(pattern)
         elif (pattern.shape == 'raster'):
             pattern.points = self.GetPointsGridRaster(pattern)
         elif ((len(pattern.shape)==1) and (pattern.shape.isalnum())):
             pattern.points = self.GetPointsCharacter(pattern)
         elif (pattern.shape == 'bypoints'):
             pass
         elif (pattern.shape == 'none'):
             pattern.points = []
         else:
             pattern.points = []
             rospy.logerror('PatternGen: unknown pattern')
         
         pattern.isDirty = False
コード例 #21
0
  def __init__(self):    
	  
	  # Dependencies
    self.serv_topic = rospy.get_param('rapp_knowrob_wrapper_create_ontology_alias')
    if(not self.serv_topic):
      rospy.logerror("rapp_knowrob_wrapper_create_ontology_alias param not found")
    rospy.wait_for_service(self.serv_topic)
    
    self.serv_topic = rospy.get_param('rapp_knowrob_wrapper_register_image_object_to_ontology')
    if(not self.serv_topic):
      rospy.logerror("rapp_knowrob_wrapper_register_image_object_to_ontology param not found")
    rospy.wait_for_service(self.serv_topic)   

    #Declare Callbacks
    self.serv_topic = rospy.get_param("rapp_caffe_wrapper_image_classification")
    if(not self.serv_topic):
      rospy.logerror("rapp_caffe_wrapper_image_classification")
    self.serv=rospy.Service(self.serv_topic, imageClassificationSrv, self.imageClassificationDataHandler)  

    self.serv_topic = rospy.get_param("rapp_caffe_wrapper_get_ontology_class_equivalent")
    if(not self.serv_topic):
      rospy.logerror("rapp_caffe_wrapper_get_ontology_class_equivalent")
    self.serv=rospy.Service(self.serv_topic, ontologyClassBridgeSrv, self.ontologyClassBridgeDataHandler)     
    
    self.serv_topic = rospy.get_param("rapp_caffe_wrapper_register_image_to_ontology")
    if(not self.serv_topic):
      rospy.logerror("rapp_caffe_wrapper_register_image_to_ontology")
    self.serv=rospy.Service(self.serv_topic, registerImageToOntologySrv, self.registerImageToOntologyDataHandler)  
コード例 #22
0
ファイル: cartesian_control.py プロジェクト: xunilrj/sandbox
    def process_link_recursive(self, link, T, joint_values):
        if link not in self.robot.child_map: 
            self.x_current = T
            return
        for i in range(0,len(self.robot.child_map[link])):
            (joint_name, next_link) = self.robot.child_map[link][i]
            if joint_name not in self.robot.joint_map:
                rospy.logerror("Joint not found in map")
                continue
            current_joint = self.robot.joint_map[joint_name]        

            trans_matrix = tf.transformations.translation_matrix((current_joint.origin.xyz[0], 
                                                                  current_joint.origin.xyz[1],
                                                                  current_joint.origin.xyz[2]))
            rot_matrix = tf.transformations.euler_matrix(current_joint.origin.rpy[0], 
                                                         current_joint.origin.rpy[1],
                                                         current_joint.origin.rpy[2], 'rxyz')
            origin_T = numpy.dot(trans_matrix, rot_matrix)
            current_joint_T = numpy.dot(T, origin_T)
            if current_joint.type != 'fixed':
                if current_joint.name not in joint_values.name:
                    rospy.logerror("Joint not found in list")
                    continue
                # compute transform that aligns rotation axis with z
                aligned_joint_T = numpy.dot(current_joint_T, self.align_with_z(current_joint.axis))
                self.joint_transforms.append(aligned_joint_T)
                index = joint_values.name.index(current_joint.name)
                angle = joint_values.position[index]
                joint_rot_T = tf.transformations.rotation_matrix(angle, 
                                                                 numpy.asarray(current_joint.axis))
                next_link_T = numpy.dot(current_joint_T, joint_rot_T) 
            else:
                next_link_T = current_joint_T

            self.process_link_recursive(next_link, next_link_T, joint_values)
コード例 #23
0
ファイル: nao_camera.py プロジェクト: ahornung/nao_robot
    def main_loop(self):
        img = Image()
        r = rospy.Rate(self.fps)
        while not rospy.is_shutdown():
            image = self.camProxy.getImageRemote(self.nameId)
            stampAL = image[4] + image[5]*1e-6
            #print image[5],  stampAL, "%lf"%(stampAL)
            img.header.stamp = rospy.Time(stampAL)
            img.header.frame_id = self.frame_id
            img.height = image[1]
            img.width = image[0]
            nbLayers = image[2]
            if image[3] == kYUVColorSpace:
                encoding = "mono8"
            elif image[3] == kRGBColorSpace:
                encoding = "rgb8"
            elif image[3] == kBGRColorSpace:
                encoding = "bgr8"
            elif image[3] == kYUV422ColorSpace:
                encoding = "yuv422" # this works only in ROS groovy and later
            else:
                rospy.logerror("Received unknown encoding: {0}".format(image[3]))

            img.encoding = encoding
            img.step = img.width * nbLayers
            img.data = image[6]
            infomsg = self.cim.getCameraInfo()
            infomsg.header = img.header
            self.pub_info_.publish(infomsg)
            self.pub_img_.publish(img)
            r.sleep()


        self.camProxy.unsubscribe(self.nameId)
コード例 #24
0
    def get_points_3d(self, left_points, right_points):
        """ this method assumes that corresponding points are in the right order
            and returns a list of 3d points """

        # both lists must be of the same lenghth otherwise return None
        if len(left_points) != len(right_points):
            rospy.logerror("The number of left points and the number of right points is not the same")
            return None

        points_3d = []
        for i in range(len(left_points)):
            a = left_points[i]
            b = right_points[i]
            disparity = abs(a[0]-b[0])
            pt = Util.convertStereo(a[0], a[1], disparity, self.info)
            points_3d.append(pt) 
        # IPython.embed()
        # publish the points if in debug mode
        if DEBUG:
            markers = MarkerArray()
            for i in range(len(points_3d)):
                point = points_3d[i]
                marker = Util.createMarker(tfx.pose(point).msg.PoseStamped(), id=i+1, lifetime=2)
                markers.markers.append(marker)

        self.points_3d_pub.publish(markers)
        return points_3d
コード例 #25
0
ファイル: pr2_arm.py プロジェクト: rgleichman/berkeley_demos
 def get_controller_state(self):
     with self.ctrl_state_lock:
         if self.ctrl_state_dict is None:
             rospy.logerror("[pr2_arm_base] get_controller_state NOT IMPLEMENTED!")
         elif len(self.ctrl_state_dict) == 0:
             rospy.logwarn("[pr2_arm_base] Controller state not yet published.")
         return self.ctrl_state_dict
コード例 #26
0
ファイル: cb_detector.py プロジェクト: MorS25/camera_pose
 def find_checkerboard_pose(self, ros_image, corners_x, corners_y, spacing_x, spacing_y, width_scaling, height_scaling):
   #we need to convert the ros image to an opencv image
   try:
     image = self.bridge.imgmsg_to_cv(ros_image, "mono8")
   except CvBridgeError, e:
     rospy.logerror("Error importing image %s" % e)
     return
コード例 #27
0
ファイル: cb_detector.py プロジェクト: ahendrix/calibration
 def callback(self, ros_image):
   #we need to convert the ros image to an opencv image
   try:
     image = self.bridge.imgmsg_to_cv(ros_image, "mono8")
   except CvBridgeError, e:
     rospy.logerror("Error importing image %s" % e)
     return
コード例 #28
0
    def resolve(self):
        """ Resolves

        :return: entity in the <area_description> of the <surface_designator> that is closest to the robot
        """
        # Get the surface as an entity
        surface = self._surface_designator.resolve()
        if surface is None:
            rospy.logerror("Cannot resolve surface designator")
            return None

        # Get all entities and check which ones are on the table
        all_entities = self._robot.ed.get_entities()
        entities = []
        for e in all_entities:
            point = robot_skills.util.kdl_conversions.VectorStamped(frame_id=e.frame_id, vector=e._pose.p)
            if surface.in_volume(point=point, volume_id=self.area_description):
                entities.append(e)

        # Remove all entities that are too large or too small
        entities = [e for e in entities if (e.shape.z_max - e.shape.z_min) > MIN_GRAB_OBJECT_HEIGHT]
        entities = [e for e in entities if (e.shape.y_max - e.shape.y_min) < MAX_GRAB_OBJECT_WIDTH]
        entities = [e for e in entities if (e.shape.x_max - e.shape.x_min) < MAX_GRAB_OBJECT_WIDTH]

        # Check if there are any
        if not entities:
            return None

        # Sort the entities and return the closest one
        base_pose = self._robot.base.get_location().frame
        entities = sorted(entities, key=lambda e: e.distance_to_2d(base_pose.p))
        return entities[0]
コード例 #29
0
def parseMonitorSpec(monitor):

  m_id = monitor.monitor_id
  exp_id = monitor.monitored_expr.id
  e_risen = monitor.event_risen

  comp = monitor.comparison_type
  type_ = monitor.monitored_variable_type
  lower_bound = monitor.lower_bound
  upper_bound = monitor.upper_bound

  if comp == monitor.LESS:
    fun = lambda x: x < lower_bound
    print "fun = lambda x: x < "+str(lower_bound)
  elif comp == monitor.MORE:
    fun = lambda x: x > upper_bound
    print "fun = lambda x: x > "+str(upper_bound)
  elif comp == monitor.IN_INTERVAL:
    fun = lambda x: x > lower_bound and x < upper_bound
    print "fun = lambda x: x > "+str(lower_bound)+" and x < "+str(upper_bound)
  elif comp == monitor.OUT_INTERVAL:
    fun = lambda x: x < lower_bound or x > upper_bound
    print "fun = lambda x: x < "+str(upper_bound)+" or x > "+str(upper_bound)
  else:
    rospy.logerror("Unknow comparaison type")

  return [m_id, exp_id, e_risen, fun]
コード例 #30
0
ファイル: video_node.py プロジェクト: OARK/OARK
    def start_stream(self, req):
        """A ROS service to start streaming video towards a certain address.
        The address supplied should be a simple IPv4 address in the form
        of a string. This string is supplied directly to gstreamer's udpsink.
        """
        with self._stream_mutex:
            if self._streamer is not None:
                #Shut down old stream
                self._streamer.shutdown()

            success = True
            try:
                #initialise and start the streamer
                self._streamer = VideoStream(self._elements,
                                             source=self._video_src,
                                             address=req.address,
                                             port=self._port)
                self._streamer.stream()
                rospy.loginfo('Video streaming to ' + str(req.address))
            except StreamException, se:
                rospy.logerror('Could not start video stream: ' + str(se))
                self._streamer = None
                success = False

            #Return a C-style error code.
            return ConnectedResponse(1 if success else 0)
コード例 #31
0
            rospy.loginfo(rospy.get_caller_id() +
                          ', forward right (left, right)=(%s,%s)' %
                          (y - 0.1, (-(0.2 + y) - 0.1)))
            self.set_speed(self.motor_left_ID, y - 0.1)
            self.set_speed(self.motor_right_ID, (-(0.2 + y) - 0.1))
        else:
            self.all_stop()

    # raw L/R motor commands (speed, speed)
    def on_cmd_raw(self, msg):
        rospy.loginfo(rospy.get_caller_id() + ' cmd_raw=%s', msg.data)
        move_data_recv = json.loads(msg.data)
        self.set_speed(self.motor_left_ID, float(move_data_recv['left']))
        self.set_speed(self.motor_right_ID, float(move_data_recv['right']))

    # simple string commands (left/right/forward/backward/stop)
    def on_cmd_str(self, msg):
        rospy.loginfo(rospy.get_caller_id() + ' cmd_str=%s', msg.data)
        self.move_dir(msg.data.lower())


# initialization
if __name__ == '__main__':
    move = Move()
    try:
        move.start()
    finally:
        rospy.logerror("move node failed")
        # stop motors before exiting
        move.all_stop()
コード例 #32
0
ファイル: move_base.py プロジェクト: HiroIshida/scikit-robot
    def move_trajectory_sequence(self, trajectory_points, time_list, stop=True,
                                 start_time=None, send_action=None):
        """Move base following the trajectory points at each time points

        trajectory-points [ list of #f(x y yaw) ([m] for x, y; [rad] for yaw) ]
        time-list [list of time span [sec] ]
        stop [ stop after msec moveing ]
        start-time [ robot will move at start-time [sec or ros::Time] ]
        send-action [ send message to action server, it means robot will move ]

        """
        if len(trajectory_points) != len(time_list):
            raise ValueError
        while self.odom_msg is None:
            rospy.sleep(0.01)
        odom_coords = geometry_pose_to_coords(self.odom_msg.pose)
        goal = control_msgs.msg.FollowJointTrajectoryActionGoal()
        msg = trajectory_msgs.msg.JointTrajectory()
        msg.joint_names = self.move_base_trajectory_joint_names
        if start_time is not None:
            msg.header.stamp = start_time
        else:
            msg.header.stamp = rospy.Time.now()
        coords_list = [odom_coords]
        for traj_point in trajectory_points:
            cds = Coordinates(pos=odom_coords.translation,
                              rot=odom_coords.rotation)
            cds.translate((traj_point[0], traj_point[1], 0.0))
            cds.rotate(traj_point[2], 'z')
            coords_list.append(cds)
        cur_cds = odom_coords
        cur_yaw = cur_cds.rpy_angle()[0][0]
        cur_time = 0
        pts_msg_list = []
        for i, (cur_cds, next_cds) in enumerate(zip(
                coords_list[:-1],
                coords_list[1:])):
            next_time = time_list[i]
            tra = cur_cds.transformation(next_cds)
            rot = cur_cds.rotate_vector(tra.translation)
            diff_yaw = rotation_distance(cur_cds.rotation, next_cds.rotation)
            pts_msg_list.append(
                trajectory_msgs.msg.JointTrajectoryPoint(
                    positions=[cur_cds.translation[0],
                               cur_cds.translation[1],
                               cur_yaw],
                    velocities=[rot[0] / next_time,
                                rot[1] / next_time,
                                tra.rpy_angle()[0][0] / next_time],
                    time_from_start=rospy.Time(cur_time)))
            cur_time += next_time
            cur_cds = next_cds
            if tra.rpy_angle()[0][0] > 0:
                cur_yaw += abs(diff_yaw)
            else:
                cur_yaw -= abs(diff_yaw)

        # append last point
        if stop:
            velocities = [0, 0, 0]
        else:
            velocities = [rot[0] / next_time,
                          rot[1] / next_time,
                          tra.rpy_angle()[0][0] / next_time]
        pts_msg_list.append(
            trajectory_msgs.msg.JointTrajectoryPoint(
                positions=[cur_cds.translation[0],
                           cur_cds.translation[1],
                           cur_yaw],
                velocities=velocities,
                time_from_start=rospy.Time(cur_time)))

        msg.points = pts_msg_list
        goal.goal.trajectory = msg
        if send_action:
            if self.move_base_trajectory_action is None:
                rospy.logerror(
                    'send_action is True, '
                    'but move_base_trajectory_action is not found')
                return False
            self.move_base_trajectory_action.send_goal(goal.goal)
            if self.move_base_trajectory_action.wait_for_result():
                return self.move_base_trajectory_action.get_result()
            else:
                return False
        return goal
コード例 #33
0
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.

__author__ = "[email protected] (Edward Venator)"

import roslib; roslib.load_manifest("cwru_utilities")
import rospy
from tabletop_object_detector.srv import TabletopSegmentation

if __name__ == '__main__':
    rospy.init_node('tabletop_object_detector_wrapper')
    rospy.loginfo('Waiting for tabletop_segmentation service')
    rospy.wait_for_service('tabletop_segmentation')
    tabletop_detection = rospy.ServiceProxy('tabletop_segmentation', TabletopSegmentation)
    rospy.loginfo('Connected to tabletop segmentation service')
    timer = rospy.Rate(.2)
    while not rospy.is_shutdown():
        resp = tabletop_detection()
        if resp.result == resp.SUCCESS:
            rospy.loginfo("Tabletop detection service returned %d clusters", len(resp.clusters))
        elif resp.result == resp.NO_TABLE:
            rospy.loginfo("No table detected")
        elif resp.result == resp.NO_CLOUD_RECEIVED:
            rospy.logwarn("No cloud received")
        elif resp.result == resp.OTHER_ERROR:
            rospy.logerror("Tabletop segmentation error")
        timer.sleep()
コード例 #34
0
    def process(self, data):

        data = self.pre_processing(data)
        #Save the data in case of it's not a pre-programmed sentence
        old_data = data

        sentence = self.sentences['sentences'].get(data)

        #If there is not match, remove courtesy and try again
        if sentence is None:

            data = self.remove_part(data, 'you', 1)
            data = self.remove_part(data, 'can', 0)
            sentence = self.sentences['sentences'].get(data)

            if sentence is None:

                data = self.remove_part(data, 'please', -1)
                sentence = self.sentences['sentences'].get(data)

        if sentence is None:
            rospy.loginfo('Sentence not in the static vocabulary ...')
            data = old_data

            #Try to parse the sentence to retrieve keywords from the hot topic
            if self.hot_topic is not None:
                client = self.get_best_client(self.hot_topic, data)
                if client is not None:
                    #Reset the hot topic
                    self.hot_topic = None
                    self.publish_data(client.callback(data))
                    return
                rospy.loginfo('The sentence does not match the hot topic ...')

            #Try to parse the sentence to retrieve keywords if there is clients
            if self.clients:
                client = self.get_best_client(self.clients, data)
                if client is not None:
                    self.publish_data(client.callback(data))
                    return
                rospy.loginfo(
                    'The sentence does not match any passive keyword ...')
        """if sentence is None and self.clients:
            data = old_data
            client = self.get_best_client(self.clients)
            if client is not None:
                self.publish_data(client.callback(data))
                return"""
        """
        #Try to parse the sentence to retrieve keywords if there is clients
        if sentence is None and self.clients:
            data = old_data
            relevance = {client:0 for client in self.clients}
            print self.clients

            for client, patterns in self.clients.iteritems():
                for keyword, weight in patterns.iteritems():

                    if keyword in data:
                        relevance[client] += weight
                    elif keyword == '*':
                        relevance[client] = sys.maxint
            
            best = max(relevance, key=lambda key: relevance[key])
            print relevance

            if relevance[best] != 0:
                self.publish_data(best.callback(data))
                return
        """

        if sentence is None:
            rospy.logwarn('Did not understand ...')
            return

        sentence = str(sentence)
        rospy.loginfo(
            'Understood {} from the static vocabulary'.format(sentence))

        intent = self.intents['intents'].get(sentence)

        if sentence is None:
            rospy.logerror(rospy.get_caller_id() +
                           ' There is no intent to match to this sentence ...')
            return

        for action in intent["actions"]:
            action = str(action)
            tmp = self.actions['actions'].get(action)

            if sentence is None:
                rospy.logerror(rospy.get_caller_id() +
                               ' There is no action match to this intent ...')
                return

            #TODO: Do smth with the action

            rospy.loginfo(rospy.get_caller_id() + 'Displaying face :' +
                          str(tmp['face']))
            rospy.loginfo(rospy.get_caller_id() + 'Displaying gesture :' +
                          str(tmp['gesture']))
            rospy.loginfo(rospy.get_caller_id() + 'Displaying voice :' +
                          str(tmp['voice']))

        rospy.loginfo(rospy.get_caller_id() + ' QT says :' + intent["answer"])

        text = intent["answer"]

        if text[0] == '$':
            tmp = text[1:].split('~')

            for c in self.classes:
                if c.__class__.__name__ == tmp[0]:
                    try:
                        text = getattr(c, tmp[1])()
                        #print '###############################################'
                        return
                    except AttributeError:
                        rospy.logfatal(
                            rospy.get_caller_id() +
                            ' The class "{}" has no method named "{}" !'.
                            format(tmp[0], tmp[1]))
                        return

            if text == intent["answer"]:
                rospy.logfatal(
                    rospy.get_caller_id() +
                    ' The class "{}" does not exists in the module "features" !'
                    .format(tmp[0], tmp[1]))
                return

        self.publish_data(text)
コード例 #35
0
    def __init__(self, config={}):
        self.UR5RobotiqUtils_config = default_config
        self.UR5RobotiqUtils_config.update(config)

        rospy.init_node('ur5_robotiq_utils', anonymous=True)
        rospy.on_shutdown(self.cleanup)

        self.create_publishers_and_subscribers()

        #############################
        # Initiate member variables #
        #############################
        self.action_feedback = None
        self.current_goal_id = None

        ###############
        # Moveit init #
        ###############

        #### initialize move_group API ####
        moveit_commander.roscpp_initialize(sys.argv)

        #### initialize robot ####
        self.robot = moveit_commander.RobotCommander()

        #### initialize move group ####
        self.arm = moveit_commander.MoveGroupCommander('manipulator')

        #### set tool link ####
        self.arm.set_end_effector_link("tool0")

        #### get name of end-effector link ####
        self.ee_link = self.arm.get_end_effector_link()

        #### get reference frame for pose targets ####
        self.reference_frame = "base_link"

        #### setup ur5 reference frame ####
        self.arm.set_pose_reference_frame(self.reference_frame)

        #### allow replanning ####
        self.arm.allow_replanning(True)

        #### allow some leeway in position (m) and orientation (rad) ####
        self.arm.set_goal_position_tolerance(0.01)
        self.arm.set_goal_orientation_tolerance(0.1)

        #### neutral position for reset ####
        self.neutral_joint_positions = [
            2.996885061264038, -1.3486784140216272, 1.112940788269043,
            -0.805460278187887, -0.4705269972430628, -1.7602842489825647
        ]

        #### joint names ####
        self.joint_names = [
            'shoulder_pan_joint', 'shoulder_lift_joint', 'elbow_joint',
            'wrist_1_joint', 'wrist_2_joint', 'wrist_3_joint'
        ]

        #### Initialize IK Service ####
        try:
            self.moveit_ik = rospy.ServiceProxy('compute_ik', GetPositionIK)
            self.moveit_ik.wait_for_service()

        except rospy.ServiceException, e:
            rospy.logerror("Service call failed: %s" % e)
コード例 #36
0
def image_callback(ros_data):
	global frame
	try:
		frame = bridge.imgmsg_to_cv2(ros_data, "bgr8")
	except CvBridgeError as e:
		rospy.logerror(e)
コード例 #37
0
def handle_calculate_IK(req):
    global g_kuka_dh_model

    rospy.loginfo("Received %s eef-poses from the plan" % len(req.poses))
    if len(req.poses) < 1:
        print "No valid poses received"
        return -1
    else:

        ### Your FK code here
        ## CREATION OF THE DH MODEL WAS MADE BELOW ( in main ) ...
        ## AS A GLOBAL VARIABLE ( g_kuka_dh_model )

        # Initialize service response
        joint_trajectory_list = []
        for x in xrange(0, len(req.poses)):
            # IK code starts here
            joint_trajectory_point = JointTrajectoryPoint()

            # Extract end-effector position and orientation from request
            # px,py,pz = end-effector position
            # roll, pitch, yaw = end-effector orientation
            px = req.poses[x].position.x
            py = req.poses[x].position.y
            pz = req.poses[x].position.z

            (roll, pitch, yaw) = tf.transformations.euler_from_quaternion([
                req.poses[x].orientation.x, req.poses[x].orientation.y,
                req.poses[x].orientation.z, req.poses[x].orientation.w
            ])

            ### Your IK code here
            ## Extract data for ik solver
            _xyz = np.zeros((3, 1))
            _xyz[0, 0] = px
            _xyz[1, 0] = py
            _xyz[2, 0] = pz

            _rpy = np.zeros((3, 1))
            _rpy[0, 0] = roll
            _rpy[1, 0] = pitch
            _rpy[2, 0] = yaw

            ## Request ik solution
            _joints = g_kuka_dh_model.inverse(_xyz, _rpy)

            ## Assemble solution with a just-in-case check
            theta1 = 0.0
            theta2 = 0.0
            theta3 = 0.0
            theta4 = 0.0
            theta5 = 0.0
            theta6 = 0.0

            if _joints is not None:
                theta1 = _joints[0]
                theta2 = _joints[1]
                theta3 = _joints[2]
                theta4 = _joints[3]
                theta5 = _joints[4]
                theta6 = _joints[5]
            else:
                rospy.logerror('Non solvable ik request')
                print 'Non solvable ik request'

            # Populate response for the IK request
            # In the next line replace theta1,theta2...,theta6 by your joint angle variables
            joint_trajectory_point.positions = [
                theta1, theta2, theta3, theta4, theta5, theta6
            ]
            joint_trajectory_list.append(joint_trajectory_point)

        rospy.loginfo("length of Joint Trajectory List: %s" %
                      len(joint_trajectory_list))
        return CalculateIKResponse(joint_trajectory_list)
コード例 #38
0
ファイル: test.py プロジェクト: Addrick/Robots
 def get_transform(self):
     try:
         self.transform = self.tf_buffer.lookup_transform("base_link", "laser", rospy.Time(0), rospy.Duration(1.0))
     except (tf2_ros.LookupException, tf2_ros.ConnectivityException, tf2_ros.ExtrapolationException):
         rospy.logerror("Error getting transform")
         print "Error"
コード例 #39
0
            response = update_kb(
                rosplan_service.KnowledgeUpdateServiceRequest.REMOVE_GOAL, msg)
        else:
            rospy.logerr(
                'Error : Unknown update_type, admisible values are ADD_KNOWLEDGE, ADD_GOAL, REMOVE_KNOWLEDGE and REMOVE_GOAL'
            )
            rospy.logwarn('Knowledge base not updated...!')
            return False
    except rospy.ServiceException, e:
        rospy.logerr('Service call failed: %s' % e)
    if (response.success):
        rospy.loginfo('Knowledge base successfully updated')
        # service call succesful
        return True
    else:
        rospy.logerror('Could not update world model, failed')
        # service call failed
        return False


def replan():
    rospy.logwarn(
        'Some component reported failure, therefore replan is needed')
    msg = String()
    msg.data = 'e_failure'
    event_out_pub.publish(msg)
    return False


def publish_success():
    rospy.loginfo('Plan succesfully completed, everyone is happy now : )')
コード例 #40
0
ファイル: behavior_manager.py プロジェクト: wklharry/hrl
 def preempt(self):
     rospy.logerror("[behavior] preempt must be overrided!")
コード例 #41
0
 def set_angles(self, angles):
     for j, v in angles.items():
         if j not in self.joints:
             rospy.logerror('Invalid joint name "' + j + '"')
             continue
         self._pub_joints[j].publish(v)
コード例 #42
0
ファイル: nao_tactile.py プロジェクト: sosswald/nao_robot
class NaoTactile(ALModule):
    "Sends callbacks for tactile touch, bumper press and foot contact to ROS"

    def __init__(self, moduleName):
        # get connection from command line:
        from optparse import OptionParser

        parser = OptionParser()
        parser.add_option(
            "--ip",
            dest="ip",
            default="",
            help=
            "IP/hostname of broker. Default is system's default IP address.",
            metavar="IP")
        parser.add_option(
            "--port",
            dest="port",
            default=0,
            help="IP/hostname of broker. Default is automatic port.",
            metavar="PORT")
        parser.add_option(
            "--pip",
            dest="pip",
            default="127.0.0.1",
            help="IP/hostname of parent broker. Default is 127.0.0.1.",
            metavar="IP")
        parser.add_option("--pport",
                          dest="pport",
                          default=9559,
                          help="port of parent broker. Default is 9559.",
                          metavar="PORT")

        (options, args) = parser.parse_args()
        self.ip = options.ip
        self.port = int(options.port)
        self.pip = options.pip
        self.pport = int(options.pport)
        self.moduleName = moduleName

        self.init_almodule()

        # ROS initialization:
        rospy.init_node('nao_tactile')

        # init. messages:
        self.tactile = TactileTouch()
        self.bumper = Bumper()
        self.tactilePub = rospy.Publisher("tactile_touch",
                                          TactileTouch,
                                          queue_size=10)
        self.bumperPub = rospy.Publisher("bumper", Bumper, queue_size=10)

        try:
            footContact = self.memProxy.getData("footContact", 0)
        except RuntimeError:
            footContact = None

        if footContact is None:
            self.hasFootContactKey = False
            rospy.loginfo(
                "Foot contact key is not present in ALMemory, will not publish to foot_contact topic."
            )
        else:
            self.hasFootContactKey = True
            self.footContactPub = rospy.Publisher("foot_contact",
                                                  Bool,
                                                  latch=True,
                                                  queue_size=10)
            self.footContactPub.publish(footContact > 0.0)

        # constants in TactileTouch and Bumper will not be available in callback functions
        # as they are executed in the parent broker context (i.e. on robot),
        # so they have to be copied to member variables
        self.tactileTouchFrontButton = TactileTouch.buttonFront
        self.tactileTouchMiddleButton = TactileTouch.buttonMiddle
        self.tactileTouchRearButton = TactileTouch.buttonRear
        self.bumperRightButton = Bumper.right
        self.bumperLeftButton = Bumper.left

        self.subscribe()

        rospy.loginfo("nao_tactile initialized")

    def init_almodule(self):
        # before we can instantiate an ALModule, an ALBroker has to be created
        rospy.loginfo("Connecting to NaoQi at %s:%d", self.pip, self.pport)
        try:
            self.broker = ALBroker("%sBroker" % self.moduleName, self.ip,
                                   self.port, self.pip, self.pport)
        except RuntimeError, e:
            print("Could not connect to NaoQi's main broker")
            exit(1)
        ALModule.__init__(self, self.moduleName)

        self.memProxy = ALProxy("ALMemory", self.pip, self.pport)
        # TODO: check self.memProxy.version() for > 1.6
        if self.memProxy is None:
            rospy.logerror("Could not get a proxy to ALMemory on %s:%d",
                           self.pip, self.pport)
            exit(1)
        'init_pose_topic_name': 'initialpose'
    }

    args = {}

    for name in arg_defaults:
        try:
            if rospy.search_param(name):
                args[name] = rospy.get_param(
                    '~' + name
                )  # Adding the name of the node, because the para has the namespace of the node
            else:
                args[name] = arg_defaults[name]
            #print name
        except rospy.ROSException, e:
            rospy.logerror('%s: %s' % (e, _name))

    server = PointPathManager(
        _name,
        frame_id=args['frame_id'],
        goto_planner=args['goto_planner'],
        init_pose_topic_name=args['init_pose_topic_name'])

    t_sleep = 0.5
    running = True

    while not rospy.is_shutdown() and running:

        try:
            rospy.sleep(t_sleep)
        except rospy.exceptions.ROSInterruptException:
コード例 #44
0
		rospy.spin()

	def on_tf(self, data):

		pose = data.pose[1]

		h = Header()
		h.stamp = rospy.Time.now()

		t = TransformStamped()
		t.transform.translation = pose.position
		t.transform.rotation = pose.orientation
		t.header = h

		print "Published Data:\n", t
		self.pub_tf.publish(t)

# Main function.
if __name__ == '__main__':
	# Initialize the node and name it.
	print "Initiating simulation node..."
	rospy.init_node('Simulation')
    
	try:
		Simulation_server = Simulation()

	except rospy.ROSInterruptException:
		rospy.logerror("Failed to start server node.")
		pass
コード例 #45
0
def getColumns(t, msg, imageFile=""):
    #this function gets the data that is necessary for the csv file - one row at a time from the current msg.
    msgType = str(type(msg))
    msgType = msgType[msgType.index('.') + 1:]
    msgType = msgType[:msgType.index('\'')]

    if (msgType == '_sensor_msgs__LaserScan'):
        columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs, \
                msg.angle_min, msg.angle_max, msg.angle_increment, msg.time_increment, msg.scan_time, msg.range_min, msg.range_max ]
        for i in range(len(msg.ranges)):
            columns.append(msg.ranges[i])

        if len(msg.intensities) >= 1:
            for i in range(len(msg.intensities)):
                columns.append(msg.intensities[i])

    elif (msgType == '_sensor_msgs__Imu'):
        columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs, \
                msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w, \
                msg.angular_velocity.x, msg.angular_velocity.y, msg.angular_velocity.z, \
                msg.linear_acceleration.x, msg.linear_acceleration.y, msg.linear_acceleration.z]

    elif (msgType == '_sensor_msgs__Joy'):
        columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs, \
                msg.axes[0], msg.axes[1], msg.axes[2], msg.axes[3], msg.axes[4], msg.axes[5],msg.axes[6],msg.axes[7], \
                msg.buttons[0], msg.buttons[1], msg.buttons[2], msg.buttons[3], msg.buttons[4],\
                msg.buttons[5], msg.buttons[6], msg.buttons[7], msg.buttons[8], msg.buttons[9],\
                msg.buttons[10]]

    elif (msgType == '_sensor_msgs__NavSatFix'):
        columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs, \
                msg.latitude, msg.longitude, msg.altitude, msg.position_covariance]

    elif (msgType == '_gps_common__GPSFix'):
        columns =[t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs, msg.status.status, \
                msg.latitude, msg.longitude, msg.altitude, msg.position_covariance, msg.position_covariance_type, \
                msg.track, msg.speed, msg.climb, msg.time, msg.pitch, msg.roll, msg.dip ]
    elif (msgType == '_sensor_msgs__Image'):
        columns = [
            t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs,
            imageFile
        ]

    elif (msgType == '_umrr_driver__radar_msg'):
        columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs, \
                msg.sensorID, msg.numTargets, msg.mode, msg.submode, msg.status, \
                msg.targetRange, msg.targetAngle, msg.targetRadialSpeed, msg.targetSig2Threhold, msg.targetType ]
    elif (msgType == '_risc_msgs__Observed_angles'):
        columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs, \
                msg.Obj[0].name, msg.Obj[0].landmarks[0].name, msg.Obj[0].landmarks[0].visible,msg.Obj[0].landmarks[0].azim,msg.Obj[0].landmarks[0].elev,msg.Obj[0].landmarks[1].name, msg.Obj[0].landmarks[1].visible,msg.Obj[0].landmarks[1].azim,msg.Obj[0].landmarks[1].elev,msg.Obj[0].landmarks[2].name, msg.Obj[0].landmarks[2].visible,msg.Obj[0].landmarks[2].azim,msg.Obj[0].landmarks[2].elev,msg.Obj[0].landmarks[3].name, msg.Obj[0].landmarks[3].visible,msg.Obj[0].landmarks[3].azim,msg.Obj[0].landmarks[3].elev, msg.Obj[0].quads[0].name, msg.Obj[0].quads[0].visible,msg.Obj[0].quads[0].azim,msg.Obj[0].quads[0].elev, \
                 msg.Obj[1].name, msg.Obj[1].landmarks[0].name, msg.Obj[1].landmarks[0].visible,msg.Obj[1].landmarks[0].azim,msg.Obj[1].landmarks[0].elev,msg.Obj[1].landmarks[1].name, msg.Obj[1].landmarks[1].visible,msg.Obj[1].landmarks[1].azim,msg.Obj[1].landmarks[1].elev,msg.Obj[1].landmarks[2].name, msg.Obj[1].landmarks[2].visible,msg.Obj[1].landmarks[2].azim,msg.Obj[1].landmarks[2].elev,msg.Obj[1].landmarks[3].name, msg.Obj[1].landmarks[3].visible,msg.Obj[1].landmarks[3].azim,msg.Obj[1].landmarks[3].elev, msg.Obj[1].quads[0].name, msg.Obj[1].quads[0].visible,msg.Obj[1].quads[0].azim,msg.Obj[1].quads[0].elev]

    elif (msgType == '_tf2_msgs__TFMessage'):
        columns = [t, msg.transforms[0].header.seq, msg.transforms[0].header.stamp.secs, msg.transforms[0].header.stamp.nsecs, \
                msg.transforms[0].header.frame_id, msg.transforms[0].child_frame_id, msg.transforms[0].transform.translation.x, msg.transforms[0].transform.translation.y, msg.transforms[0].transform.translation.z, msg.transforms[0].transform.rotation.x,msg.transforms[0].transform.rotation.y,msg.transforms[0].transform.rotation.z,msg.transforms[0].transform.rotation.w]

    elif (msgType == '_ardrone_autonomy__Navdata'):
        columns = [
            t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs,
            msg.batteryPercent, msg.state, msg.magX, msg.magY, msg.magZ,
            msg.pressure, msg.temp, msg.wind_speed, msg.wind_angle,
            msg.wind_comp_angle, msg.rotX, msg.rotY, msg.rotZ, msg.altd,
            msg.vx, msg.vy, msg.vz, msg.ax, msg.ay, msg.az, msg.motor1,
            msg.motor2, msg.motor3, msg.motor4, msg.tags_count, msg.tags_type,
            msg.tags_xc, msg.tags_yc, msg.tags_width, msg.tags_height,
            msg.tags_orientation, msg.tags_distance, msg.tm
        ]

    elif (msgType == '_risc_msgs__Controls'):
        columns = [
            t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs,
            msg.Obj[0].name, msg.Obj[0].phi, msg.Obj[0].theta, msg.Obj[0].psi,
            msg.Obj[0].alt
        ]


## two quads
    elif (msgType == '_risc_msgs__Cortex'):
        columns = [
            t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs,
            msg.Obj[0].name, msg.Obj[0].visible, msg.Obj[0].x, msg.Obj[0].y,
            msg.Obj[0].z, msg.Obj[0].u, msg.Obj[0].v, msg.Obj[0].w,
            msg.Obj[0].phi, msg.Obj[0].theta, msg.Obj[0].psi, msg.Obj[0].p,
            msg.Obj[0].q, msg.Obj[0].r, msg.Obj[1].name, msg.Obj[1].visible,
            msg.Obj[1].x, msg.Obj[1].y, msg.Obj[1].z, msg.Obj[1].u,
            msg.Obj[1].v, msg.Obj[1].w, msg.Obj[1].phi, msg.Obj[1].theta,
            msg.Obj[1].psi, msg.Obj[1].p, msg.Obj[1].q, msg.Obj[1].r
        ]

    elif (msgType == '_risc_msgs__Waypoints'):
        columns = [
            t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs,
            msg.Obj[0].name, msg.Obj[0].x, msg.Obj[0].y, msg.Obj[0].z,
            msg.Obj[0].heading
        ]

    elif (msgType == '_risc_msgs__Landmarks'):
        columns = [
            t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs,
            msg.Obj[0].name, msg.Obj[0].x, msg.Obj[0].y, msg.Obj[0].z,
            msg.Obj[1].name, msg.Obj[1].x, msg.Obj[1].y, msg.Obj[1].z,
            msg.Obj[2].name, msg.Obj[2].x, msg.Obj[2].y, msg.Obj[2].z,
            msg.Obj[3].name, msg.Obj[3].x, msg.Obj[3].y, msg.Obj[3].z
        ]

    else:
        rospy.logerror("Unexpected error - AGH!")
        usage()
        sys.exit(2)

    return columns
コード例 #46
0
def getColumns(t, msg, imageFile = ""):
	#this function gets the data that is necessary for the csv file - one row at a time from the current msg.
	msgType = str(type(msg))
	msgType = msgType[msgType.index('.')+1:]
	msgType = msgType[:msgType.index('\'')]

	if (msgType == '_sensor_msgs__LaserScan'):
		columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs, \
		                     msg.angle_min, msg.angle_max, msg.angle_increment, msg.time_increment, msg.scan_time, msg.range_min, msg.range_max ]
		for i in range(len(msg.ranges)):
			columns.append(msg.ranges[i])

		if len(msg.intensities) >= 1:
			for i in range(len(msg.intensities)):
				columns.append(msg.intensities[i])

	elif (msgType == '_sensor_msgs__Imu'):
		columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs, \
		                     msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w, \
		                     msg.angular_velocity.x, msg.angular_velocity.y, msg.angular_velocity.z, \
		                     msg.linear_acceleration.x, msg.linear_acceleration.y, msg.linear_acceleration.z]

	elif (msgType == '_sensor_msgs__NavSatFix'):
		columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs, \
		                     msg.latitude, msg.longitude, msg.altitude, msg.position_covariance]

	elif (msgType == '_gps_common__GPSFix'):
		columns =[t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs, msg.status.status, \
		                     msg.latitude, msg.longitude, msg.altitude, msg.position_covariance, msg.position_covariance_type, \
		                     msg.track, msg.speed, msg.climb, msg.time, msg.pitch, msg.roll, msg.dip ]
	elif (msgType == '_sensor_msgs__Image'):
		columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs, imageFile]
	elif (msgType == '_umrr_driver__radar_msg'):
		columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs, \
					msg.sensorID, msg.numTargets, msg.mode, msg.submode, msg.status, \
					msg.targetRange, msg.targetAngle, msg.targetRadialSpeed, msg.targetSig2Threhold, msg.targetType ]
	elif (msgType == '_nav_msgs__Odometry'):
		columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs, \
					msg.pose.pose.position.x, msg.pose.pose.position.y, msg.pose.pose.position.z, \
					msg.pose.pose.orientation.x, msg.pose.pose.orientation.y, msg.pose.pose.orientation.z, msg.pose.pose.orientation.w ]

	elif (msgType == '_tuw_vehicle_msgs__Wheelspeeds'):
		columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs, \
					msg.fr, msg.fl, msg.rr, msg.rl]
	elif (msgType == '_tuw_vehicle_msgs__BatteryState'):
		columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs, \
					msg.current]
	elif (msgType == '_geometry_msgs__PoseWithCovarianceStamped'):
		columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs, \
					msg.pose.pose.position.x, msg.pose.pose.position.y, msg.pose.pose.position.z, \
					msg.pose.pose.orientation.x, msg.pose.pose.orientation.y, msg.pose.pose.orientation.z, msg.pose.pose.orientation.w , msg.pose.covariance ]

        elif (msgType == '_arf_msgs__controllerData'):
                columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs ]

                for i in range(len(msg.data)):
                        columns.append(ord(msg.data[i]))



	else:
		rospy.logerror("Unexpected error - AGH!")
		usage()
		sys.exit(2)

	return columns
コード例 #47
0
    tty.setraw(sys.stdin.fileno())
    select.select([sys.stdin], [], [], 0)
    key = sys.stdin.read(1)
    termios.tcsetattr(sys.stdin, termios.TCSADRAIN, settings)
    return key


if __name__ == "__main__":
    settings = termios.tcgetattr(sys.stdin)
    rospy.init_node('keyboard_teleop')
    """ ROS Parameters
    """
    pressed_key_pub = rospy.Publisher('pressed_key', String, queue_size=10)
    pressed_key_msg = String()
    try:
        print msg
        while (1):
            key = getKey()
            print key
            if (key == '\x03' or key == '\x1B'
                ):  #If the user pressed Ctrl+C or Scape finish the program
                break
            pressed_key_msg.data = key
            pressed_key_pub.publish(pressed_key_msg)

    except:
        rospy.logerror("keyboard_teleop.py: exception")

    finally:
        termios.tcsetattr(sys.stdin, termios.TCSADRAIN, settings)
コード例 #48
0
    def callback_image(self, data):
        if (self.navdata_msg != None and \
            self.navdata_msg.state == 3):  # The actor-critic system works only when the drone is flying

            rospy.logdebug("Episode : " + str(self.count) + " Replay Buffer " +
                           str(self.buff.count()))

            loss = 0

            # Rmoving inf from laser ranges
            if self.laser_msg != None:
                laser_ranges = np.asarray(self.laser_msg.ranges)
                laser_ranges[laser_ranges == inf] = 0

            # Calculating laser punishment
            laser_cost = 0.0
            if self.laser_msg != None:
                inverted_ranges = 1 - (laser_ranges / self.laser_msg.range_max)
                gaussian_ranges = np.multiply(
                    inverted_ranges,
                    signal.gaussian(self.feature_dim,
                                    (self.feature_dim / 2 * 0.8)))
                laser_cost = -np.sum(gaussian_ranges) / self.feature_dim
            rospy.loginfo("Laser range punishment: " + str(laser_cost))

            # Calculating the punishment for colliding
            is_colliding = False
            collision_cost = 0.0
            if self.colliding_flag:
                is_colliding = True
                collision_cost = -10.0
            rospy.logdebug("Collisions punishment: " + str(collision_cost))

            # Calculating the time elapsed from the last respawn
            actual_time = rospy.get_rostime()
            time_stamp = actual_time.secs + \
                         actual_time.nsecs / 1000000000
            time_elapsed = time_stamp - self.start_time
            rospy.logdebug("Time elapsed: " + str(time_elapsed))

            # Calculating the aruco distance reward
            aruco_dist = 0.0
            aruco_cost = 0.0
            if self.aruco_msg != None:
                aruco_dist = np.sqrt(self.aruco_msg.linear.x**2 + \
                                     self.aruco_msg.linear.y**2 + \
                                     self.aruco_msg.linear.z**2)
                if aruco_dist == 0.0 or aruco_dist > self.aruco_limit:
                    aruco_dist = self.aruco_limit
                aruco_cost = 1.0 - (aruco_dist / self.aruco_limit)
                rospy.logdebug("Aruco distance reward: " + str(aruco_cost))

            # Calculating the traveled distance reward and the altitude punishment
            trav_dist = 0.0
            alt_cost = 0.0
            if self.model_states_pose_msg != None:
                actual_pos = self.model_states_pose_msg.position
                trav_dist = np.sqrt((actual_pos.x - self.start_pos.linear.x)**2 + \
                                    (actual_pos.y - self.start_pos.linear.y)**2)
                trav_cost = trav_dist / time_elapsed
                alt_cost = -abs(1 - actual_pos.z)
            rospy.logdebug("Travel distance reward: " + str(trav_cost))

            # Calculating the angular velocity punishment
            angular_cost = 0
            if self.imu_msg != None:
                angular_cost = -abs(self.imu_msg[6])
            rospy.loginfo("Angular punishment: " + str(angular_cost))

            # Calculating the step reward
            step_reward = collision_cost + \
                          (10 * aruco_cost) + \
                          trav_cost + alt_cost \
                          + laser_cost #+ angular_cost
            rospy.logdebug("Step reward: " + str(step_reward))

            # Calculating the total reward
            self.total_reward += step_reward
            rospy.logdebug("Total reward: " + str(self.total_reward))

            image_features = np.zeros(self.feature_dim)
            if self.input_mode == "camera":
                rospy.logwarn(self.input_mode + " CAMERA")
                # Reading the camera image from the topic
                try:
                    cv_image = self.bridge.imgmsg_to_cv2(data, "bgr8")
                except CvBridgeError as err:
                    rospy.logerror(err)

                # Risizeing the image to 160x80 pixel for the convolutional network
                cv_image_resized = cv2.resize(cv_image, (160, 80))
                #array_image = np.asarray(cv_image_resized)
                array_image = img_to_array(cv_image_resized)
                input_image = np.zeros((1, 80, 160, 3))
                input_image[0] = array_image
                input_image /= 255.0

                # Calculating image features
                with self.graph.as_default():
                    image_features = self.autoencoder_network.run_network(
                        input_image)
            elif self.laser_msg != None:
                rospy.logwarn(self.input_mode + " LASER")
                # Reading laser data as state features
                image_features = laser_ranges / self.laser_msg.range_max

            # Adding the features to the features list
            if len(self.queue) == 0:
                self.queue.append(image_features)
                self.queue.append(image_features)
                self.queue.append(image_features)
            else:
                self.queue.popleft()
                self.queue.append(image_features)

            rospy.logdebug("Queue length: " + str(len(self.queue)))
            rospy.logdebug("Image Features: " + str(image_features.shape))
            rospy.logdebug("Imu length: " + str(len(self.imu_msg)))

            # Create the state vector
            new_state = np.concatenate(
                (self.queue[0].flatten(), self.queue[1].flatten(),
                 self.queue[2].flatten()))
            if self.imu_input_mod == 1:
                new_state = np.concatenate((new_state, self.imu_msg))
            if self.input_mode == "laser":
                # Creating aruco features
                aruco_features = np.zeros(self.aruco_dim)
                if self.aruco_msg != None:
                    if self.aruco_msg.linear.x == 0 or self.aruco_msg.linear.x > self.aruco_limit:
                        aruco_features[0] = 1.0
                    else:
                        aruco_features[
                            0] = self.aruco_msg.linear.x / self.aruco_limit
                    if self.aruco_msg.linear.y == 0 or self.aruco_msg.linear.y > self.aruco_limit:
                        aruco_features[1] = 1.0
                    else:
                        aruco_features[
                            1] = self.aruco_msg.linear.y / self.aruco_limit
                    if self.aruco_msg.linear.z == 0 or self.aruco_msg.linear.y > self.aruco_limit:
                        aruco_features[2] = 1.0
                    else:
                        aruco_features[
                            2] = self.aruco_msg.linear.z / self.aruco_limit
                    #aruco_features[3] = self.aruco_msg.angular.x / np.pi
                    #aruco_features[4] = self.aruco_msg.angular.y / np.pi
                    #aruco_features[5] = self.aruco_msg.angular.z / np.pi

                # Creating altitude feature
                altitude_feature = np.zeros(1)
                if self.model_states_pose_msg != None:
                    altitude_value = self.model_states_pose_msg.position.z
                    if altitude_value > self.altitude_limit:
                        altitude_feature[0] = 1.0
                    else:
                        altitude_feature[
                            0] = altitude_value / self.altitude_limit
                new_state = new_state = np.concatenate(
                    (new_state, aruco_features, altitude_feature))

            rospy.logdebug("State length: " + str(len(new_state)))
            rospy.loginfo("State: " + str(new_state))

            # Add replay buffer
            done = False
            if is_colliding or \
               (aruco_cost > 0.8):
                done = True
            self.buff.add(self.state, self.action[0], step_reward, new_state,
                          done)

            # Calculating new action
            with self.graph.as_default():
                a_t_original = self.actor.model.predict(
                    new_state.reshape(1, new_state.shape[0]))
            self.action_noise[0][0] = self.train_indicator * max(self.epsilon, 0) * \
                                   self.ou.function(a_t_original[0][0],  0.3, 0.5, 0.1)
            self.action_noise[0][1] = self.train_indicator * max(self.epsilon, 0) * \
                                   self.ou.function(a_t_original[0][1],  0.0, 0.5, 0.1)
            self.action_noise[0][2] = self.train_indicator * max(self.epsilon, 0) * \
                                   self.ou.function(a_t_original[0][2], 0.0, 0.5, 0.1)

            self.action[0][0] = a_t_original[0][0] + self.action_noise[0][0]
            self.action[0][1] = a_t_original[0][1] + self.action_noise[0][1]
            self.action[0][2] = a_t_original[0][2] + self.action_noise[0][2]
            #        with self.graph.as_default():
            #          a_t_original = self.actor.model.predict(new_state.reshape(1, new_state.shape[0]))
            #        self.action_noise[0][0] = self.train_indicator * max(self.epsilon, 0) * \
            #                               self.ou.function(a_t_original[0][0],  0.3, 0.6, 0.1)
            #        self.action_noise[0][1] = self.train_indicator * max(self.epsilon, 0) * \
            #                               self.ou.function(a_t_original[0][1],  0.0, 0.5, 0.1)
            #        self.action_noise[0][2] = self.train_indicator * max(self.epsilon, 0) * \
            #                               self.ou.function(a_t_original[0][2], 0.0, 0.9, 0.1)
            #        self.action_noise[0][3] = self.train_indicator * max(self.epsilon, 0) * \
            #                               self.ou.function(a_t_original[0][3], 0.0, 0.5, 0.1)
            #
            #        self.action[0][0] = a_t_original[0][0] + self.action_noise[0][0]
            #        self.action[0][1] = a_t_original[0][1] + self.action_noise[0][1]
            #        self.action[0][2] = a_t_original[0][2] + self.action_noise[0][2]
            #        self.action[0][3] = a_t_original[0][3] + self.action_noise[0][3]

            rospy.loginfo("motor comand plus noise: " + str( self.action[0][2]) + \
                          " original motor command: " + str(a_t_original[0][2]) + \
                          " noise: " + str(self.action_noise[0][2]))

            # Perform an action
            cmd_input = Twist()
            #        cmd_input.linear.x = self.action[0][0]
            #        cmd_input.linear.y = self.action[0][1]
            #        cmd_input.linear.z = self.action[0][2]
            #        cmd_input.angular.z = self.action[0][3]
            cmd_input.linear.x = self.action[0][0]
            cmd_input.linear.y = 0.0
            cmd_input.linear.z = self.action[0][1]
            # CAMBIO TEST MOMENTANEOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO
            #        if abs(a_t_original[0][2]) > 0.7:
            #            self.action[0][2] = 0.0
            cmd_input.angular.z = self.action[0][2]
            self.cmd_vel_pub.publish(cmd_input)

            # Updating the state
            self.state = new_state

            #Do the batch update
            batch = self.buff.getBatch(self.batch_size)
            states = np.asarray([e[0] for e in batch])
            actions = np.asarray([e[1] for e in batch])
            rewards = np.asarray([e[2] for e in batch])
            new_states = np.asarray([e[3] for e in batch])
            dones = np.asarray([e[4] for e in batch])
            y_t = np.asarray([e[1] for e in batch])

            # Calculating Q value
            with self.graph.as_default():
                target_q_values = self.critic.target_model.predict(
                    [new_states,
                     self.actor.target_model.predict(new_states)])

            for k in range(len(batch)):
                if dones[k]:
                    y_t[k] = rewards[k]
                else:
                    y_t[k] = rewards[k] + self.gamma * target_q_values[k]

            # Training the network
            with self.graph.as_default():
                if (self.train_indicator):
                    loss += self.critic.model.train_on_batch([states, actions],
                                                             y_t)
                    a_for_grad = self.actor.model.predict(states)
                    grads = self.critic.gradients(states, a_for_grad)
                    self.actor.train(states, grads)
                    self.actor.target_train()
                    self.critic.target_train()

            rospy.logdebug("Episode: " + str(self.count) + \
                          " Step: " + str(self.step) + \
                          " Action: " + str(self.action) + \
                          " Reward: " + str(step_reward) + \
                          " Loss: " + str(loss))

            self.step += 1

            # Starting a newe episode if the drone collide with something or if it's close enough to an aruco board
            if is_colliding or \
               (aruco_cost > 0.8):
                model_state_msg = ModelState()
                empty_msg = Empty()

                # Generating a new random position chosen between 4
                new_position = random.sample(self.position_list, 1)

                # Generating a random orientation
                angle = random.random() * 2 * np.pi

                rospy.loginfo("New position: " + str(new_position) + \
                              "New angle: " + str(angle))

                # Creating the model state message to send to set_model_space topic
                model_state_msg.model_name = "quadrotor"
                model_state_msg.pose.position.x = new_position[0][0]
                model_state_msg.pose.position.y = new_position[0][1]
                model_state_msg.pose.position.z = 0.0
                quaternion = quaternion_from_euler(0, 0, angle)
                model_state_msg.pose.orientation.x = quaternion[0]
                model_state_msg.pose.orientation.y = quaternion[1]
                model_state_msg.pose.orientation.z = quaternion[2]
                model_state_msg.pose.orientation.w = quaternion[3]
                model_state_msg.reference_frame = "world"

                self.start_pos.linear.x = new_position[0][0]
                self.start_pos.linear.y = new_position[0][1]
                self.start_pos.linear.z = 0.0

                # Reseting the episode starting time
                actual_time = rospy.get_rostime()
                self.start_time =  actual_time.secs + \
                                   actual_time.nsecs / 1000000000
                # Reseting the image queue
                self.queue = deque([])
                # Reseting state, action and noise vectors
                self.action = np.zeros((1, self.action_dim))
                self.action_noise = np.zeros((1, self.action_dim))
                self.state = np.zeros(self.state_dim)

                # Saving the weights
                with self.graph.as_default():
                    if (self.train_indicator):
                        rospy.loginfo("Saving the weights")
                        self.actor.model.save_weights(self.networks_dir +
                                                      "actormodel.h5",
                                                      overwrite=True)
                        with open(self.networks_dir + "actormodel.json",
                                  "w") as outfile:
                            json.dump(self.actor.model.to_json(), outfile)

                        self.critic.model.save_weights(self.networks_dir +
                                                       "criticmodel.h5",
                                                       overwrite=True)
                        with open(self.networks_dir + "criticmodel.json",
                                  "w") as outfile:
                            json.dump(self.critic.model.to_json(), outfile)

                rospy.loginfo("TOTAL REWARD @ " + str(self.count) +
                              "-th Episode  : Reward " +
                              str(self.total_reward))
                rospy.loginfo("Total Step: " + str(self.step))
                self.total_reward = 0.0
                self.count += 1
                self.step = 0

                # reset the actions
                cmd_input.linear.x = 0.0
                cmd_input.linear.y = 0.0
                cmd_input.linear.z = 0.0
                cmd_input.angular.z = 0.0
                self.cmd_vel_pub.publish(cmd_input)

                # Reinitializing position, orientation and status of the drone
                self.land_pub.publish(empty_msg)
                self.model_state_pub.publish(model_state_msg)
                self.reset_pub.publish(empty_msg)
                rospy.sleep(0.5)
                self.takeoff_pub.publish(empty_msg)
                rospy.sleep(0.5)
                self.colliding_flag = False
コード例 #49
0
  def recordPerformance(self,req):
    try:
      res = recordUserCognitiveTestPerformanceSrvResponse()


      serv_topic = rospy.get_param('rapp_knowrob_wrapper_create_ontology_alias')
      if(not serv_topic):
        rospy.logerror("mysql_wrapper_rapp_read_data topic param not found")
        res.trace.append("mysql_wrapper_rapp_read_data topic param not found")
        res.error="mysql_wrapper_rapp_read_data topic param not found"
        res.success=False
        return res
      rospy.wait_for_service(serv_topic)
      knowrob_service = rospy.ServiceProxy(serv_topic, createOntologyAliasSrv)
      createOntologyAliasReq = createOntologyAliasSrvRequest()
      createOntologyAliasReq.username=req.username
      createOntologyAliasResponse = knowrob_service(createOntologyAliasReq)
      if(createOntologyAliasResponse.success!=True):
        res.trace=createOntologyAliasResponse.trace
        res.error=createOntologyAliasResponse.error
        res.success=False
        return res

      serv_topic = rospy.get_param('rapp_knowrob_wrapper_user_performance_cognitve_tests')
      if(not serv_topic):
        rospy.logerror("rapp_knowrob_wrapper_user_performance_cognitve_tests")
        res.trace.append("mysql_wrapper_rapp_read_data topic param not found")
        res.error="mysql_wrapper_rapp_read_data topic param not found"
        res.success=False
        return res



      serv_topic = rospy.get_param('rapp_knowrob_wrapper_record_user_cognitive_tests_performance')
      if(not serv_topic):
        rospy.logerror("rapp_knowrob_wrapper_record_user_cognitive_tests_performance not found")
        res.trace.append("rapp_knowrob_wrapper_record_user_cognitive_tests_performance topic param not found")
        res.error="rapp_knowrob_wrapper_record_user_cognitive_tests_performance topic param not found"
        res.success=False
        return res
      rospy.wait_for_service(serv_topic)
      knowrob_service = rospy.ServiceProxy(serv_topic, recordUserPerformanceCognitiveTestsSrv)
      userPerformanceEntry = recordUserPerformanceCognitiveTestsSrvRequest()
      userPerformanceEntry.test=req.test
      #userPerformanceEntry.test_type=req.testType
      userPerformanceEntry.patient_ontology_alias=createOntologyAliasResponse.ontology_alias
      userPerformanceEntry.timestamp=int(time.time())
      userPerformanceEntry.score=req.score
      userPerformanceEntryResponse = knowrob_service(userPerformanceEntry)
      if(userPerformanceEntryResponse.success!=True):
        res.trace=userPerformanceEntryResponse.trace
        res.trace.append("Submitting query to ontology failed, either test or user ontology alias do not exist or test not of the given type")
        res.error=userPerformanceEntryResponse.error+"Submitting query to ontology failed, either test or user ontology alias do not exist or test not of the given type"
        res.success=False
        return res
      else:
        res.success=True
        res.userCognitiveTestPerformanceEntry=userPerformanceEntryResponse.cognitive_test_performance_entry


    except IndexError:
      res.trace.append("Wrong Query Input Format, check for empty required columns list or wrong/incomplete Query data format")
      res.success=False
      #print "Wrong Query Input Format, check for empty required columns list or wrong/incomplete Query data format"
    except IOError:
      print "Error: can\'t find login file or read data"
      res.success=False
      res.trace.append("Error: can\'t find login file or read data")
    return res
コード例 #50
0
class NaoVisionInterface(ALModule, NaoqiNode):
    "sss"

    def __init__(self, moduleName):
        # ROS initialization
        NaoqiNode.__init__(self, Constants.NODE_NAME)

        # NAOQi initialization
        self.ip = ""
        self.port = 10601
        self.moduleName = moduleName
        self.init_almodule()

        #~ Variable initialization
        self.faces = FaceDetected()
        self.face_detection_enabled = False
        self.motion_detection_enabled = False
        self.landmark_detection_enabled = False

        #~ ROS initializations
        self.subscribeFaceSrv = rospy.Service(
            "nao_vision/face_detection/enable", Empty,
            self.serveSubscribeFaceSrv)
        self.unsubscribeFaceSrv = rospy.Service(
            "nao_vision/face_detection/disable", Empty,
            self.serveUnsubscribeFaceSrv)
        self.subscribeMotionSrv = rospy.Service(
            "nao_vision/motion_detection/enable", Empty,
            self.serveSubscribeMotionSrv)
        self.unsubscribeMotionSrv = rospy.Service(
            "nao_vision/motion_detection/disable", Empty,
            self.serveUnsubscribeMotionSrv)
        self.subscribeLandmarkSrv = rospy.Service(
            "nao_vision/landmark_detection/enable", Empty,
            self.serveSubscribeLandmarkSrv)
        self.unsubscribeLandmarkSrv = rospy.Service(
            "nao_vision/landmark_detection/disable", Empty,
            self.serveUnsubscribeLandmarkSrv)
        self.learnFaceSrv = rospy.Service(
            'nao_vision/face_detection/learn_face', LearnFace,
            self.learnFaceSrv)
        self.forgetPersonSrv = rospy.Service(
            'nao_vision/face_detection/forget_person', LearnFace,
            self.forgetPersonSrv)

        self.subscribe()

        rospy.loginfo("nao_vision_interface initialized")

    def init_almodule(self):
        # before we can instantiate an ALModule, an ALBroker has to be created
        rospy.loginfo("Connecting to NaoQi at %s:%d", self.pip, self.pport)
        try:
            self.broker = ALBroker("%sBroker" % self.moduleName, self.ip,
                                   self.port, self.pip, self.pport)
        except RuntimeError, e:
            print("Could not connect to NaoQi's main broker")
            exit(1)
        ALModule.__init__(self, self.moduleName)

        self.memProxy = ALProxy("ALMemory", self.pip, self.pport)
        if self.memProxy is None:
            rospy.logerror("Could not get a proxy to ALMemory on %s:%d",
                           self.pip, self.pport)
            exit(1)

        self.landmarkDetectionProxy = ALProxy("ALLandMarkDetection", self.pip,
                                              self.pport)
        if self.landmarkDetectionProxy is None:
            rospy.logerror(
                "Could not get a proxy to ALLandMarkDetection on %s:%d",
                self.pip, self.pport)
            exit(1)

        self.movementDetectionProxy = ALProxy("ALMovementDetection", self.pip,
                                              self.pport)
        if self.movementDetectionProxy is None:
            rospy.logerror(
                "Could not get a proxy to ALMovementDetection on %s:%d",
                self.pip, self.pport)
            exit(1)

        self.faceDetectionProxy = ALProxy("ALFaceDetection", self.pip,
                                          self.pport)
        if self.faceDetectionProxy is None:
            rospy.logerror("Could not get a proxy to ALFaceDetection on %s:%d",
                           self.pip, self.pport)
            exit(1)
コード例 #51
0
ファイル: base.py プロジェクト: cjrunci/clearpath_common
 def cmd_vel(self, linear_velocity, angular_velocity):
     rospy.logerror(
         'Base class contains no command model. Use kinematic.py or raw.py.'
     )
     raise NotImplementedError()
コード例 #52
0
# enables GPIO pin that controls I2C bus for the IMU

import rospy
import pigpio


def enableIMU():
    #on current PiHat for SigSent, enable pin is on GPIO 23
    gpio_pin = 23

    #enable node
    rospy.init_node('enableIMU')

    #init pigpio
    p = pigpio.pi()

    #set GPIO to HIGH, enabling i2c bus
    p.write(gpio_pin, 1)

    #happy little message to let us know whats going on
    rospy.loginfo("IMU enabled on pin {}".format(gpio_pin))


if __name__ == '__main__':
    try:
        enableIMU()
    except rospy.ROSInterruptException:
        rospy.logerror(
            "Could not enable IMU pin on GPIO pin {}".format(gpio_pin))
        pass
コード例 #53
0
    def update_pddl_file(self):

        self.clean_pddl()

        types = ""
        predicates = ""
        actions = ""

        pddl_dirs = []
        if rospy.has_param('~pddl_packages'):
            pddl_pkgs = rospy.get_param('~pddl_packages')

            if isinstance(pddl_pkgs, (list, )):
                for pkg in pddl_pkgs:
                    try:
                        pkg_path = rospack.get_path(pkg)
                        pddl_dirs.append(pkg_path)
                    except:
                        rospy.logerror("No existe package[" + pkg + "]")
                        pass
            elif isinstance(pddl_pkgs, basestring):
                try:
                    pkg_path = rospack.get_path(pddl_pkgs)
                    pddl_dirs.append(pkg_path)
                except:
                    rospy.logerror("No existe package[" + pddl_pkgs + "]")
                    pass
            else:
                rospy.logerror("[pddl_builder] Unable to process: " +
                               pddl_pkgs)

            rospy.loginfo("[pddl_builder] set pddl_dirs to:")
            rospy.loginfo(pddl_dirs)
        else:
            rospy.logwarn("[pddl_builder] set pddl_pkgs to all")
            pddl_dirs.append(os.environ['ROS_PACKAGE_PATH'].split(':')[0])

        type_files = self.get_pddl_files_in_ws(pddl_dirs, "types.pddl")
        predicate_files = self.get_pddl_files_in_ws(pddl_dirs,
                                                    "predicates.pddl")
        action_files = self.get_pddl_files_in_ws(pddl_dirs, "actions.pddl")
        function_files = self.get_pddl_files_in_ws(pddl_dirs, "functions.pddl")

        types_content = self.get_file_content(type_files)
        predicates_content = self.get_file_content(predicate_files)
        functions_content = self.get_file_content(function_files)
        actions_content = self.get_file_content(action_files)
        template_content = self.get_file_content([self.template])

        types_content = self.remove_duplicated(types_content)
        predicates_content = self.remove_duplicated(predicates_content)

        template_content = template_content.replace("$$TYPES$$", types_content)
        template_content = template_content.replace("$$PREDICATES$$",
                                                    predicates_content)
        template_content = template_content.replace("$$FUNCTIONS$$",
                                                    functions_content)
        template_content = template_content.replace("$$ACTIONS$$",
                                                    actions_content)
        template_content = template_content.replace("$$DOMAIN$$", self.domain)

        try:
            with open(self.pddl_out, "w") as file:
                file.write(template_content)
        except IOError as e:
            rospy.logerr("I/O error %s returned the invalid value %s", e.errno,
                         e.strerror)
            return
コード例 #54
0
    def callback(self, unordered_joint_values):
        # We will start at the root
        link = self.robot.get_root()
        # This will hold our results, which we will publish at the end
        all_transforms = tf.msg.tfMessage()
        # Prepare structures to hold information
        link_transforms = []
        joints = []
        joint_values = []
        links = []
        # Append the root with a fake fixed joint
        links.append(link)
        (jn, nl) = self.robot.child_map[link][0]
        root_joint = self.robot.joint_map[jn]
        root_joint.type = 'fixed'
        joints.append(root_joint)
        joint_values.append(0)
        # Cycle through the joints and collect relevant information
        while True:
            # Find the joint connected at the end of this link, or its "child"
            # Make sure this link has a child
            if link not in self.robot.child_map:
                break
            # Make sure it has a single child (we don't deal with forks)
            if len(self.robot.child_map[link]) != 1:
                rospy.logerror("Forked kinematic chain!")
                break
            # Get the name of the child joint, as well as the link it connects to
            (joint_name, next_link) = self.robot.child_map[link][0]
            # Get the actual joint based on its name
            if joint_name not in self.robot.joint_map:
                rospy.logerror("Joint not found in map")
                break
            joint = self.robot.joint_map[joint_name]
            # Append link transform to list
            link_transforms.append(joint.origin)
            joints.append(joint)
            # Find the relevant joint axis and value and append to list
            if joint.type != 'fixed' and joint.name in unordered_joint_values.name:
                index = unordered_joint_values.name.index(joint.name)
                joint_values.append(unordered_joint_values.position[index])
            else:
                joint_values.append(0)
            # Append the next link to list
            links.append(next_link)
            # Move to the next link
            link = next_link

        # Append one final identity link transform which is not used
        link_transforms.append(link_transforms[-1])
        link_transforms[-1].rpy = (0, 0, 0)
        link_transforms[-1].xyz = (0, 0, 0)

        # Compute all transforms
        transforms = self.forward_kinematics(link_transforms, joints,
                                             joint_values)

        # Check if the right number if transforms has been returned
        if len(transforms) != len(links):
            print 'Transforms returned: ' + str(len(transforms))
            rospy.logerror("Incorrect number of transforms returned")
            return

        # Convert the result into a message and prepare it to be published
        for i in range(0, len(transforms)):
            transform_msg = convert_to_message(transforms[i], links[i],
                                               "world_origin")
            all_transforms.transforms.append(transform_msg)

        # Publish all the transforms
        self.pub_tf.publish(all_transforms)
コード例 #55
0
    client = actionlib.SimpleActionClient('Deliver',
                                          ow_lander.msg.DeliverAction)

    client.wait_for_server()

    goal = ow_lander.msg.DeliverGoal()

    goal.delivery.x = args.x_start
    goal.delivery.y = args.y_start
    goal.delivery.z = args.z_start

    # Sends the goal to the action server.
    client.send_goal(goal)

    # Waits for the server to finish performing the action.
    client.wait_for_result()

    # Prints out the result of executing the action
    return client.get_result()


if __name__ == '__main__':
    try:
        # Initializes a rospy node so that the deliver client can
        # publish and subscribe over ROS.
        rospy.init_node('deliver_client_py')
        result = deliver_client()
        rospy.loginfo("Result: %s", result)
    except rospy.ROSInterruptException:
        rospy.logerror("program interrupted before completion")
コード例 #56
0
 def set_angles(self, angles):
     for j, v in angles.items():
         if j not in self.joints:
             rospy.logerror(j)
             continue
         self._pub_joints[j].publish(v)
コード例 #57
0
    def testTblUserWriteReadDeleteCheck(self):
        #Write
        serv_topic = rospy.get_param(
            'rapp_mysql_wrapper_user_write_data_topic')
        if (not serv_topic):
            rospy.logerror(
                "mysql_wrapper_user_write_data topic param not found")
        rospy.wait_for_service(serv_topic)
        db_service = rospy.ServiceProxy(serv_topic, writeDataSrv)
        req = writeDataSrv()
        req.req_cols = [
            "name", "email", "pwd", "activated", "language", "ontology_alias"
        ]
        #req.req_cols=["idsd","macddr", "model","owner", "timestamp"]
        entry1 = StringArrayMsg()
        entry1 = [
            "'rappMysqlTestTemp1'", "'*****@*****.**'",
            "'rappMysqlTestTemp1TestPass'", "'Y'", "'el'", "NULL"
        ]
        entry2 = StringArrayMsg()
        entry2 = [
            "'rappMysqlTestTemp2'", "'*****@*****.**'",
            "'rappMysqlTestTemp2TestPass'", "'Y'", "'el'", "NULL"
        ]
        req.req_data = [StringArrayMsg(s=entry1), StringArrayMsg(s=entry2)]

        response = db_service(req.req_cols, req.req_data)
        self.assertEqual(response.trace[0], "Success")
        self.assertTrue(response.success.data)
        #Read what was written
        serv_topic = rospy.get_param(
            'rapp_mysql_wrapper_user_fetch_data_topic')
        if (not serv_topic):
            rospy.logerror(
                "mysql_wrapper_user_fetch_data topic param not found")
        rospy.wait_for_service(serv_topic)
        db_service = rospy.ServiceProxy(serv_topic, fetchDataSrv)
        req = fetchDataSrv()
        req.req_cols = ["name", "email"]
        entry1 = ["name", "rappMysqlTestTemp1"]
        req.where_data = [StringArrayMsg(s=entry1)]
        response = db_service(req.req_cols, req.where_data)
        self.assertEqual(response.trace[0], "Success")
        self.assertTrue(response.success.data)
        self.assertEqual(response.res_data[0].s[0], "rappMysqlTestTemp1")
        #self.assertEqual(response.res_data[1].s[0],"*****@*****.**")
        #Update written
        serv_topic = rospy.get_param(
            'rapp_mysql_wrapper_user_update_data_topic')
        if (not serv_topic):
            rospy.logerror(
                "mysql_wrapper_user_update_data topic param not found")
        rospy.wait_for_service(serv_topic)
        db_service = rospy.ServiceProxy(serv_topic, updateDataSrv)
        req = updateDataSrv()
        req.set_cols = ["name='rappMysqlTestTemp1'"]
        entry1 = ["name", "rappMysqlTestTemp2"]
        req.where_data = [StringArrayMsg(s=entry1)]
        response = db_service(req.set_cols, req.where_data)
        self.assertEqual(response.trace[0], "Success")
        self.assertTrue(response.success.data)
        #Read again
        serv_topic = rospy.get_param(
            'rapp_mysql_wrapper_user_fetch_data_topic')
        if (not serv_topic):
            rospy.logerror(
                "mysql_wrapper_user_read_data topic param not found")
        rospy.wait_for_service(serv_topic)
        db_service = rospy.ServiceProxy(serv_topic, fetchDataSrv)
        req = fetchDataSrv()
        req.req_cols = ["name", "email"]
        entry1 = ["name", "rappMysqlTestTemp1"]
        req.where_data = [StringArrayMsg(s=entry1)]
        response = db_service(req.req_cols, req.where_data)
        self.assertEqual(response.trace[0], "Success")
        self.assertTrue(response.success.data)
        self.assertEqual(response.res_data[0].s[0], "rappMysqlTestTemp1")
        self.assertEqual(response.res_data[1].s[0], "rappMysqlTestTemp1")
        #Delete updated
        serv_topic = rospy.get_param(
            'rapp_mysql_wrapper_user_delete_data_topic')
        if (not serv_topic):
            rospy.logerror(
                "mysql_wrapper_user_delete_data topic param not found")
        rospy.wait_for_service(serv_topic)
        db_service = rospy.ServiceProxy(serv_topic, deleteDataSrv)
        req = deleteDataSrv()
        entry1 = ["name", "rappMysqlTestTemp1"]
        req.where_data = [StringArrayMsg(s=entry1)]
        response = db_service(req.where_data)
        self.assertEqual(response.trace[0], "Success")
        self.assertTrue(response.success.data)
        #Check if it was deleted
        serv_topic = rospy.get_param(
            'rapp_mysql_wrapper_user_fetch_data_topic')
        if (not serv_topic):
            rospy.logerror(
                "mysql_wrapper_user_read_data topic param not found")
        rospy.wait_for_service(serv_topic)
        db_service = rospy.ServiceProxy(serv_topic, fetchDataSrv)
        req = fetchDataSrv()
        req.req_cols = ["name", "email"]
        entry1 = ["name", "rappMysqlTestTemp1"]
        req.where_data = [StringArrayMsg(s=entry1)]
        response = db_service(req.req_cols, req.where_data)
        self.assertEqual(response.trace[0], "Success")
        self.assertTrue(response.success.data)
        self.assertTrue((len(response.res_data) < 1))
コード例 #58
0
class NaoAudioInterface(ALModule, NaoqiNode):
    def __init__(self, moduleName):
        # ROS initialization
        NaoqiNode.__init__(self, Constants.NODE_NAME)

        # NAOQi initialization
        self.ip = ""
        self.port = 10602
        self.moduleName = moduleName
        self.init_almodule()

        #~ ROS initializations
        self.playFileSrv = rospy.Service("nao_audio/play_file", AudioPlayback,
                                         self.playFileSrv)
        self.masterVolumeSrv = rospy.Service("nao_audio/master_volume",
                                             AudioMasterVolume,
                                             self.handleAudioMasterVolumeSrv)
        self.enableRecordSrv = rospy.Service("nao_audio/record", AudioRecorder,
                                             self.handleRecorderSrv)

        self.audioSourceLocalizationPub = rospy.Publisher(
            "nao_audio/audio_source_localization", AudioSourceLocalization)

        self.subscribe()

        rospy.loginfo(Constants.NODE_NAME + " initialized")

    def init_almodule(self):
        # before we can instantiate an ALModule, an ALBroker has to be created
        rospy.loginfo("Connecting to NaoQi at %s:%d", self.pip, self.pport)
        try:
            self.broker = ALBroker("%sBroker" % self.moduleName, self.ip,
                                   self.port, self.pip, self.pport)
        except RuntimeError, e:
            print("Could not connect to NaoQi's main broker")
            exit(1)
        ALModule.__init__(self, self.moduleName)

        #~ Memory proxy registration
        self.memProxy = ALProxy("ALMemory", self.pip, self.pport)
        if self.memProxy is None:
            rospy.logerror("Could not get a proxy to ALMemory on %s:%d",
                           self.pip, self.pport)
            exit(1)
        #~ ALAudioProxy registration
        self.audioPlayerProxy = ALProxy("ALAudioPlayer", self.pip, self.pport)
        if self.audioPlayerProxy is None:
            rospy.logerror("Could not get a proxy to ALAudioPlayer on %s:%d",
                           self.pip, self.pport)
            exit(1)
        #~ ALAudioRecorder proxy registration
        self.audioRecorderProxy = ALProxy("ALAudioRecorder", self.pip,
                                          self.pport)
        if self.audioRecorderProxy is None:
            rospy.logerror("Could not get a proxy to ALAudioRecorder on %s:%d",
                           self.pip, self.pport)
            exit(1)
        #~ ALAudioDevice proxy registration
        self.audioDeviceProxy = ALProxy("ALAudioDevice", self.pip, self.pport)
        if self.audioDeviceProxy is None:
            rospy.logerror("Could not get a proxy to ALAudioDevice on %s:%d",
                           self.pip, self.pport)
            exit(1)
        #~ ALAudioSourceLocalization registration
        self.audioSourceLocalizationProxy = ALProxy(
            "ALAudioSourceLocalization", self.pip, self.pport)
        if self.audioSourceLocalizationProxy is None:
            rospy.logerror(
                "Could not get a proxy to ALAudioSourceLocalization on %s:%d",
                self.pip, self.pport)
            exit(1)
        #~ ALAudioSourceLocalization parameter trimming
        self.audioSourceLocalizationProxy.setParameter("EnergyComputation",
                                                       True)
        self.audioSourceLocalizationProxy.setParameter("Sensibility", 0.8)
コード例 #59
0
ファイル: behavior_manager.py プロジェクト: wklharry/hrl
 def execute(self):
     rospy.logerror("[behavior] execute must be overrided!")
コード例 #60
0
def getColumns(t, objnum, msg, imageFile = ""):
    t = t.to_sec()
    #this function gets the data that is necessary for the csv file - one row at a time from the current msg.
    msgType = str(type(msg))
    msgType = msgType[msgType.index('.')+1:]
    msgType = msgType[:msgType.index('\'')]

    if (msgType == '_sensor_msgs__LaserScan'):
        columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs, \
                msg.angle_min, msg.angle_max, msg.angle_increment, msg.time_increment, msg.scan_time, msg.range_min, msg.range_max ]
        for i in range(len(msg.ranges)):
            columns.append(msg.ranges[i])

        if len(msg.intensities) >= 1:
            for i in range(len(msg.intensities)):
                columns.append(msg.intensities[i])

    elif (msgType == '_sensor_msgs__Imu'):
        columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs, \
                msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w, \
                msg.angular_velocity.x, msg.angular_velocity.y, msg.angular_velocity.z, \
                msg.linear_acceleration.x, msg.linear_acceleration.y, msg.linear_acceleration.z]

    elif (msgType == '_sensor_msgs__Joy'):
        columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs, \
                msg.axes[0], msg.axes[1], msg.axes[2], msg.axes[3], msg.axes[4], msg.axes[5],msg.axes[6],msg.axes[7], \
                msg.buttons[0], msg.buttons[1], msg.buttons[2], msg.buttons[3], msg.buttons[4],\
                msg.buttons[5], msg.buttons[6], msg.buttons[7], msg.buttons[8], msg.buttons[9],\
                msg.buttons[10]]

    elif (msgType == '_sensor_msgs__NavSatFix'):
        columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs, \
                msg.latitude, msg.longitude, msg.altitude, msg.position_covariance]

    elif (msgType == '_gps_common__GPSFix'):
        columns =[t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs, msg.status.status, \
                msg.latitude, msg.longitude, msg.altitude, msg.position_covariance, msg.position_covariance_type, \
                msg.track, msg.speed, msg.climb, msg.time, msg.pitch, msg.roll, msg.dip ]
    elif (msgType == '_sensor_msgs__Image'):
        columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs, imageFile]

    elif (msgType == '_umrr_driver__radar_msg'):
        columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs, \
                msg.sensorID, msg.numTargets, msg.mode, msg.submode, msg.status, \
                msg.targetRange, msg.targetAngle, msg.targetRadialSpeed, msg.targetSig2Threhold, msg.targetType ]
    elif (msgType == '_risc_msgs__Observed_angles'):
        columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs, \
                msg.Obj[0].name, msg.Obj[0].landmarks[0].name, msg.Obj[0].landmarks[0].visible,msg.Obj[0].landmarks[0].azim,msg.Obj[0].landmarks[0].elev,msg.Obj[0].landmarks[1].name, msg.Obj[0].landmarks[1].visible,msg.Obj[0].landmarks[1].azim,msg.Obj[0].landmarks[1].elev,msg.Obj[0].landmarks[2].name, msg.Obj[0].landmarks[2].visible,msg.Obj[0].landmarks[2].azim,msg.Obj[0].landmarks[2].elev,msg.Obj[0].landmarks[3].name, msg.Obj[0].landmarks[3].visible,msg.Obj[0].landmarks[3].azim,msg.Obj[0].landmarks[3].elev]

    elif (msgType == '_tf2_msgs__TFMessage'):
        columns = [t, msg.transforms[0].header.seq, msg.transforms[0].header.stamp.secs, msg.transforms[0].header.stamp.nsecs, \
                msg.transforms[0].header.frame_id, msg.transforms[0].child_frame_id, msg.transforms[0].transform.translation.x, msg.transforms[0].transform.translation.y, msg.transforms[0].transform.translation.z, msg.transforms[0].transform.rotation.x,msg.transforms[0].transform.rotation.y,msg.transforms[0].transform.rotation.z,msg.transforms[0].transform.rotation.w]

    elif (msgType == '_ardrone_autonomy__Navdata'):
        columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs, msg.batteryPercent, msg.state, msg.magX, msg.magY, msg.magZ, msg.pressure, msg.temp, msg.wind_speed, msg.wind_angle, msg.wind_comp_angle, msg.rotX, msg.rotY, msg.rotZ, msg.altd, msg.vx, msg.vy, msg.vz, msg.ax, msg.ay, msg.az, msg.motor1, msg.motor2, msg.motor3, msg.motor4, msg.tags_count, msg.tags_type, msg.tags_xc, msg.tags_yc, msg.tags_width, msg.tags_height, msg.tags_orientation, msg.tags_distance, msg.tm]
 
    elif (msgType == '_std_msgs__Bool'):
        rospy.loginfo("recognizes Boolean")
        columns = [t, msg.data]

    elif (msgType == '_roscopter__Status'):
        rospy.loginfo("recognizes roscopter Status")
        columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs,  msg.battery_voltage, msg.battery_current,  msg.battery_remaining, msg.sensors_enabled]

    elif (msgType == '_geometry_msgs__TwistStamped'):
        rospy.loginfo("recognizes TwistStamped")
        columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs,  msg.twist.linear.x, msg.twist.linear.y, msg.twist.linear.z,msg.twist.angular.x,msg.twist.angular.y,msg.twist.angular.z]

    elif (msgType == '_risc_msgs__Controls'):
        rospy.loginfo("recognizes Controls")
        columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs,  msg.Obj[0].name, msg.Obj[0].phi, msg.Obj[0].theta,msg.Obj[0].psi,msg.Obj[0].T]

    elif (msgType == '_risc_msgs__Trajectories'):
        rospy.loginfo("recognizes trajectories")
        columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs,  msg.Obj[0].name, msg.Obj[0].x,  msg.Obj[0].y, msg.Obj[0].z, msg.Obj[0].psi, msg.Obj[0].xdot, msg.Obj[0].ydot, msg.Obj[0].zdot, msg.Obj[0].psidot,msg.Obj[0].xddot,msg.Obj[0].yddot, msg.Obj[0].zddot, msg.Obj[0].psiddot]
## two quads
    elif (msgType == '_risc_msgs__Cortex'):
        num = int(float(objnum)) - 1
        columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs,  msg.Obj[num].name, msg.Obj[num].visible,msg.Obj[num].x,  msg.Obj[num].y, msg.Obj[num].z, msg.Obj[num].u, msg.Obj[num].v, msg.Obj[num].w, msg.Obj[num].phi, msg.Obj[num].theta,msg.Obj[num].psi,msg.Obj[num].p, msg.Obj[num].q, msg.Obj[num].r]

# for one object three markers
#    elif (msgType == '_risc_msgs__Mocap_data'):
#        columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs,  msg.Obj[0].name, msg.Obj[0].residual,msg.Obj[0].marker[0].x, msg.Obj[0].marker[0].y, msg.Obj[0].marker[0].z,msg.Obj[0].marker[1].x,msg.Obj[0].marker[1].y,msg.Obj[0].marker[1].z,msg.Obj[0].marker[2].x,msg.Obj[0].marker[2].y,msg.Obj[0].marker[2].z]
# for two objects first with 6 markers and the second with 4
    elif (msgType == '_risc_msgs__Mocap_data'):
        columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs,  msg.Obj[0].name, msg.Obj[0].residual,msg.Obj[0].marker[0].x, msg.Obj[0].marker[0].y, msg.Obj[0].marker[0].z,msg.Obj[0].marker[1].x,msg.Obj[0].marker[1].y,msg.Obj[0].marker[1].z,msg.Obj[0].marker[2].x,msg.Obj[0].marker[2].y,msg.Obj[0].marker[2].z,msg.Obj[0].marker[3].x,msg.Obj[0].marker[3].y,msg.Obj[0].marker[3].z,msg.Obj[0].marker[4].x,msg.Obj[0].marker[4].y,msg.Obj[0].marker[4].z,msg.Obj[0].marker[5].x,msg.Obj[0].marker[5].y,msg.Obj[0].marker[5].z,msg.Obj[1].name, msg.Obj[1].residual,msg.Obj[1].marker[0].x, msg.Obj[1].marker[0].y, msg.Obj[1].marker[0].z,msg.Obj[1].marker[1].x,msg.Obj[1].marker[1].y,msg.Obj[1].marker[1].z,msg.Obj[1].marker[2].x,msg.Obj[1].marker[2].y,msg.Obj[1].marker[2].z,msg.Obj[1].marker[3].x,msg.Obj[1].marker[3].y,msg.Obj[1].marker[3].z]

    elif (msgType == '_risc_msgs__Waypoints'):
        columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs,  msg.Obj[0].name, msg.Obj[0].x,  msg.Obj[0].y, msg.Obj[0].z,msg.Obj[0].heading]

    elif (msgType == '_risc_msgs__Landmarks'):
        columns = [t, msg.header.seq, msg.header.stamp.secs, msg.header.stamp.nsecs,  msg.Obj[0].name, msg.Obj[0].x,  msg.Obj[0].y, msg.Obj[0].z,msg.Obj[1].name, msg.Obj[1].x,  msg.Obj[1].y, msg.Obj[1].z,msg.Obj[2].name, msg.Obj[2].x,  msg.Obj[2].y, msg.Obj[2].z,msg.Obj[3].name, msg.Obj[3].x,  msg.Obj[3].y, msg.Obj[3].z]

    elif (msgType == '_geometry_msgs__Point32'):
        columns = [t, msg.x, msg.y, msg.z]

    elif (msgType == '_three_pi_control__motor_command'):
        columns = [t, msg.left, msg.right]
    elif (msgType == '_three_pi_control__data_log'):
        num = int(float(objnum)) - 1
        columns = [t, msg.distance_error[num],  msg.velocity_error[num], msg.path_error[num], msg.distance[num],  msg.velocity[num],]
    else:
        rospy.logerror("Unexpected error - AGH!")
        usage()
        sys.exit(2)

    return columns