def navigateToNode(self, node, threshold): g_mf = Pose(node.x, node.y, node.th) g_rf = g_mf.inFrame(self.pose) arrived = self.sNavigation.navigateTo(g_rf.x * self.k, g_rf.y * self.k) self.pose = OdomConverter.getRobotInMap() self.pose = Pose(self.pose[0] / self.k, self.pose[1] / self.k, self.pose[2]) dist = numpy.linalg.norm(self.pose.toVector()[0:2] - g_mf.toVector()[0:2]) * self.k arrived = True if dist < threshold else False start = time.time() while not arrived and not self.is_stopped: g_rf = g_mf.inFrame(self.pose) angle = math.atan2(g_rf.y * self.k, g_rf.x * self.k) #radians x_dist = g_rf.x * self.k / math.cos(angle) self.sMotion.moveTo(0.0, 0.0, angle) arrived = self.sMotion.moveTo(x_dist, 0.0, 0.0, 15) self.pose = OdomConverter.getRobotInMap() self.pose = Pose(self.pose[0] / self.k, self.pose[1] / self.k, self.pose[2]) dist = numpy.linalg.norm(self.pose.toVector()[0:2] - g_mf.toVector()[0:2]) * self.k arrived = True if dist < threshold else False if time.time() - start > 15: if arrived is False: break return arrived, dist
def assumeNodeOrientation(self, node_theta): robot_theta = OdomConverter.getRobotInMap()[2] if robot_theta < 0.0: robot_theta = 2 * math.pi + robot_theta if node_theta < 0.0: node_theta = 360.0 + node_theta node_theta = math.radians(node_theta) angle = float(node_theta - robot_theta) if abs(angle) > math.pi: angle = float(angle - 2 * math.pi * math.copysign(1.0, angle)) self.sMotion.moveTo(0.0, 0.0, angle)
def askToBeHelped(self): self.file.unloadCurrentWork(draw=DRAW) setnode_apar = self.graph setnode_cpar = "%s %s %s %s %s" % (str(self.volume), str( self.speed), str(self.pitch), self.language, self.username) sn = SetNode(setnode_apar, setnode_cpar, self.session) sn.run() self.loadGraph(self.graph_file) self.final_goal = findNodeFromLabel(self.final_goal.getLabel()) self.pose = OdomConverter.getRobotInMap() self.pose = Pose(self.pose[0] / self.k, self.pose[1] / self.k, self.pose[2])
def run(self): try: self.onCharger = self.sMemory.getData(MEMORY_CHARGER) except: self.onCharger = False # # Get destination node through dialogue if not self.isAvailable(self.goal_id): self.goal_full = self.sp.dialog(self.__class__.__name__, self.location_options, sentence="0", checkValidity=True, askForConfirmation=True, sayFinalReply=False, noisy=self.asr) self.goal_id = self.getIDFromAttribute(self.location_params, "full", self.goal_full) if self.goal_id == "charger": bye_sentences = [ s.encode('utf-8') for s in self.sp.script["Chitchat"] ["goodbye"][self.language] ] self.sp.say(bye_sentences[randint(0, len(bye_sentences) - 1)], speech.TAGS[1]) else: self.sp.monolog(self.__class__.__name__, "with-keyword", param={"$KEYWORD$": self.goal_full}, group="parameter-answer", tag=speech.TAGS[1]) else: self.goal_full = self.getAttributeFromID(self.location_params, self.goal_id, "full") if self.goal_id == "charger": bye_sentences = [ s.encode('utf-8') for s in self.sp.script["Chitchat"] ["goodbye"][self.language] ] self.sp.say(bye_sentences[randint(0, len(bye_sentences) - 1)], speech.TAGS[1]) self.final_goal = findNodeFromLabel(self.goal_id) if self.final_goal is None: self.logger.error(ERROR[1]) self.sp.monolog(self.__class__.__name__, "2", tag=speech.TAGS[1]) raise Exception, "Invalid goal" # # Initialize motion caressestools.setAutonomousAbilities(self.session, False, False, False, False, False) self.sRecharge.stopAll() self.sPosture.goToPosture("Stand", 0.5) self.sMotion.moveInit() self.pose = OdomConverter.getRobotInMap() self.pose = Pose(self.pose[0] / self.k, self.pose[1] / self.k, self.pose[2]) # # Leave the docking station if on top of it if self.onCharger: off = caressestools.getOffTheDockingStation(self.session) if off == False: self.logger.error(ERROR[2]) self.end() return else: self.onCharger = False self.pose = OdomConverter.getRobotInMap() self.pose = Pose(self.pose[0] / self.k, self.pose[1] / self.k, self.pose[2]) self.logger.info("Detected pose: (%g, %g, %g)" % (round(self.pose.x, 3), round(self.pose.y, 3), round(math.degrees(self.pose.th), 3))) goal_reached = False closest = 0 attempts = 0 while not goal_reached and not self.is_stopped: ## First reach the closest node if too far reachable_nodes = Graph.nodes[:] try: reachable_nodes.remove(findNodeFromLabel(CHARGER)) except: pass closest_nodes = sortNodesFromClosestToFurthest( self.pose, reachable_nodes) g_mf = Pose(closest_nodes[closest].x, closest_nodes[closest].y, closest_nodes[closest].th) dist = numpy.linalg.norm(self.pose.toVector()[0:2] - g_mf.toVector()[0:2]) * self.k if dist > self.closest_reached_threshold: self.logger.info( "Heading towards closest node in the graph: " + closest_nodes[closest].getLabel()) goal_reached, dist = self.navigateToNode( closest_nodes[closest], self.closest_reached_threshold) if not goal_reached: self.logger.info("Node not reached.") closest += 1 if closest < len(closest_nodes) - 1 else len( closest_nodes) - 1 continue if closest_nodes[0].getLabel() == self.final_goal.getLabel(): goal_reached = True ## Then navigate using A* algorithm path = self.computePath(closest_nodes[closest], self.final_goal) for index, path_node in enumerate(path[1:]): if self.is_stopped: break self.logger.info("Heading towards next node in the path: " + path_node.getLabel()) if path_node.getLabel( ) == CHARGER and caressestools.Settings.docking_station == True: docked = caressestools.goToDockingStation(self.session) if not docked: self.logger.info("Lost!") self.askToBeHelped() break else: goal_reached = True self.sMemory.insertData(MEMORY_FIXED_POSE, [ path_node.x * self.k, path_node.y * self.k, math.radians(path_node.th) ]) else: if index == len(path) - 1: goal_reached, dist = self.navigateToNode( path_node, self.goal_reached_threshold) else: goal_reached, dist = self.navigateToNode( path_node, self.node_reached_threshold) if not goal_reached: attempts += 1 self.logger.info("Node not reached.") if attempts == self.enough_attempts: self.sp.monolog(self.__class__.__name__, "0", tag=speech.TAGS[1]) self.askToBeHelped() attempts = 0 break self.logger.info("Node reached.") closest = 0 if not self.final_goal.getLabel() == CHARGER: self.assumeNodeOrientation(self.final_goal.th) else: if caressestools.Settings.docking_station == False: self.assumeNodeOrientation(self.final_goal.th) self.end()
def run(self): self.aa = caressestools.getAutonomousAbilities(self.session) caressestools.setAutonomousAbilities(self.session, False, False, False, False, False) self.sPosture.goToPosture("Stand", 0.5) #if self.output_handler is not None: # msg="[approached-user:false]" # self.output_handler.writeSupplyMessage("publish", "D6.1", msg) self.subscriber = self.sMemory.subscriber("SoundDetected") self.subscriber.signal.connect(self.onSoundDetected) self.sSoundDet.subscribe(self.__class__.__name__) # Register events # 'TouchChanged' touch = self.sMemory.subscriber("TouchChanged") id_touch = touch.signal.connect( functools.partial(self.onTouch, "TouchChanged")) # # Use NAOqi keyword detection instead of Google's to avoid too many useless API calls self.subscribeToKeywords() # # Wait for any user sentence... while not self.word_detected and not self.is_touched and not self.is_stopped: if self.notify_user: self.waitToBeCharged() self.notify_user = False # # ... and react if not self.is_stopped: i = 3 question_index = str( randint( i, len(self.sp.script[self.__class__.__name__][speech.OTHER]) - 1)) sentence_index = str(randint(0, i - 1)) question = self.sp.script[self.__class__.__name__]["other"][ question_index][self.language] sentence = self.sp.script[self.__class__.__name__]["other"][ sentence_index][self.language] if self.word_detected: self.sp.userSaid(self.user_input) time.sleep(2) full = "%s %s" % (question, sentence) else: full = sentence self.sp.say(full, speech.TAGS[1]) node = OdomConverter.getCurrentNode() try: on_charger = self.sMemory.getData(MEMORY_CHARGER) except: on_charger = True if node == "charger" or on_charger == True: left = caressestools.leaveChargerNode(self.session) if left == False: self.logger.error("ALRecharge getStatus() ERROR #4") return else: self.onCharger = False if self.output_handler is not None: if caressestools.Settings.interactionNode == "": msg = "[(:goal(?G1 (approached-user pepper-1) true)(?G2 move-closer-farther true)(?G3 accept-request true)) (:temporal (before ?G1 ?G2 [1500 inf])(before ?G2 ?G3 [1500 inf]))]" else: self.output_handler.writeSupplyMessage( "publish", "D6.2", "[(robot-at pepper-1):\"n/a\"]") time.sleep(0.5) msg = "[(:goal(?G1 (robot-at pepper-1) %s))]" % caressestools.Settings.interactionNode self.output_handler.writeSupplyMessage("publish", "D5.1", msg) caressestools.setAutonomousAbilities(self.session, self.aa[0], self.aa[1], self.aa[2], self.aa[3], self.aa[4]) self.sSoundDet.unsubscribe(self.__class__.__name__) try: self.sSpeechReco.unsubscribe("CARESSES/word-detected") except: pass
def main(startingNode, group, asr, loglevel, useFaceReco): experimental = group.lower() == "experimental" mode = "Experimental" if experimental else "Control" temp = asr.lower() == "noisy" asr = "noisy" if temp else "normal" fh.setLevel(logging.DEBUG) ch.setLevel(loglevels[loglevel]) logger.addHandler(ch) logger.addHandler(fh) # qi.logging.setLevel(qi.logging.VERBOSE) print("-----------------------------------------------") logger.info("Starting CAHRIM in %s mode" % mode) print("-----------------------------------------------\n") ## Load IPs and Ports robotIP = Settings.robotIP robotPort = Settings.robotPort cahrimIP = Settings.cahrimIP cahrimPort = int(Settings.cahrimPort) ## Connect to Pepper session = connectToPepper(robotIP, robotPort) ## Wake Pepper up and start autonomous abilities startPepper(session, startingNode) ## Start socket communication with uAAL-CAHRIM connected = False while not connected: try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect((cahrimIP, cahrimPort)) connected = True logger.info("Socket communication with uAAL-CAHRIM started.") except Exception as e: logger.warning(e) logger.info("Trying to reconnect...") time.sleep(2) provided_event = Event() output_handler = OutputMsgHandler(output_queue) input_handler = InputMsgHandler(session, output_handler, input_queue, provided_event, experimental, asr) ## Create threads' instances threads = [] threads.append(MsgReceiver(sock, input_handler)) threads.append(MsgSender(sock, output_queue)) threads.append(StateObserver(session, output_handler, asr)) # threads.append(BehaviourPatternEstimator(output_handler)) # threads.append(DBObserver(output_handler)) threads.append(OdomConverter(session, startingNode)) threads.append(DetectUserDepth(session, output_handler, useFaceReco)) # threads.append(ActivityAndLocationRecognition(output_handler)) threads.append(EstimateUserEmotion(session, output_handler)) ## Start all threads for t in threads: t.start() t_name = t.id if hasattr(t, "id") else t.__class__.__name__ logger.info("%s started." % t_name) print("-----------------------------------------------\n" " ===> All threads started.\n" "-----------------------------------------------\n") showImg(session, TABLET_IMG_DEFAULT) StateObserver.starting_node = startingNode try: while True: if StateObserver.killed_cahrim: logger.info("CAHRIM interrupted through fallback solution.") break except KeyboardInterrupt: logger.info( "CAHRIM interrupted by user. Shutting down all running threads... Please wait a few seconds." ) ## Stop all threads for t in reversed(threads): t.stop() time.sleep(1) unloadImg(session) stopPepper(session) sock.close()