def telegram_output(ctx: ContextWrapper): """ If all telegram chats should be in the same context, sends the content of rawio:out to every currently active chat. Otherwise it only sends output using the Pipe if it is a child process """ if ctx.conf(key=ALL_IN_ONE_CONTEXT_CONFIG_KEY): # TODO don't instantiate the updater every time token = ctx.conf(key=TOKEN_CONFIG_KEY) if not token: logger.error('telegram-token is not set. Shutting down telegramio') return Delete() updater: Updater = Updater(token) for chat_id in active_chats.keys(): updater.bot.send_message(chat_id=chat_id, text=ctx["rawio:out:changed"]) else: child_conn = ctx.conf(key=CHILD_CONN_CONFIG_KEY) if child_conn: # Child Process -> write to Pipe child_conn.send(ctx["rawio:out:changed"]) else: # Master Process -> State not needed return Delete()
def console_input(ctx: ContextWrapper): @receptor(ctx_wrap=ctx, write="rawio:in") def write_console_input(ctx_input, value: str): ctx_input["rawio:in"] = value @receptor(ctx_wrap=ctx, write="interloc:all") def push_console_interloc(ctx: ContextWrapper, console_node: Node): if ctx.push(parentpath="interloc:all", child=PropertyBase(name=DEFAULT_INTERLOC_ID, default_value=console_node)): logger.debug(f"Pushed {console_node} to interloc:all") @receptor(ctx_wrap=ctx, write="interloc:all") def pop_console_interloc(ctx: ContextWrapper): if ctx.pop(f"interloc:all:{DEFAULT_INTERLOC_ID}"): logger.debug(f"Popped interloc:all:{DEFAULT_INTERLOC_ID}") while not ctx.shutting_down(): input_value = input("> ") write_console_input(input_value) console_interloc_exists = f"interloc:all:{DEFAULT_INTERLOC_ID}" in ctx.enum( "interloc:all") # push Node if you got a greeting if input_value.strip() in get_phrase_list( "greeting") and not console_interloc_exists: # set up scientio sess: Session = ravestate_ontology.get_session() onto: Ontology = ravestate_ontology.get_ontology() # create scientio Node of type Person query = Node(metatype=onto.get_type("Person")) query.set_name("x") console_node_list = sess.retrieve(query) if not console_node_list: console_node = sess.create(query) logger.info( f"Created new Node in scientio session: {console_node}" ) elif len(console_node_list) == 1: console_node = console_node_list[0] else: logger.error( f'Found multiple Persons with name {DEFAULT_INTERLOC_ID} in scientio session. Cannot push node to interloc:all!' ) continue # push interloc-Node push_console_interloc(console_node) # pop Node if you got a farewell elif input_value.strip() in get_phrase_list( "farewells") and console_interloc_exists: pop_console_interloc()
def react_to_recognized_face(ctx: ContextWrapper): nonlocal face_names faces: RecognizedFaces = ctx[face_names.id()] rec_faces_dict: Dict = ctx[rec_faces.id()] phrases: List = ["Hey, aren't you, {}?!", "How are you doing, {}?!", "How are you, {}?!", "Whats up, {}?!", "Nice to see you, {}?!", "Looking great today, {}!", "Hello, {}!", "Hi, {}!", "Greetings, {}!", "Howdy, {}!", "Hey, {}!", "Greetings to {} over here!", "Hi there, {}!", "Gruse gott, {}!"] best_name_and_confidence = "", 0 for name_and_confidence in zip(faces.names, faces.confidence): logger.info(str(name_and_confidence[0])) if name_and_confidence[1] > best_name_and_confidence[1]: best_name_and_confidence = name_and_confidence if best_name_and_confidence[1] >= ctx.conf(key=FACE_CONFIDENCE_THRESHOLD): if best_name_and_confidence[0] not in rec_faces_dict.keys() or \ datetime.timestamp(datetime.now()) - rec_faces_dict.get(best_name_and_confidence[0]) > 300: rec_faces_dict.update({best_name_and_confidence[0]: datetime.timestamp(datetime.now())}) ctx[raw_out.id()] = phrases[randint(0, len(phrases) - 1)].format(best_name_and_confidence[0]) ctx[rec_faces.id()] = rec_faces_dict
def push_telegram_interloc(ctx: ContextWrapper, telegram_node: Node, name: str): """ Push the telegram_node into interloc:all:name """ if ctx.push(parentpath="interloc:all", child=PropertyBase(name=name, default_value=telegram_node)): logger.debug(f"Pushed {telegram_node} to interloc:all")
def create_subscriber(ctx: ContextWrapper): face_names = Ros2SubProperty( "face_names", topic=ctx.conf(key=ROS2_FACE_TOPIC_CONFIG), msg_type=RecognizedFaces, always_signal_changed=False) rec_faces = PropertyBase(name="rec_faces", default_value={}, always_signal_changed=False, allow_pop=True, allow_push=True) ctx.push(subscriber_parent.id(), face_names) ctx.push(subscriber_parent.id(), rec_faces) @state(read=(face_names.id(), rec_faces.id()), write=(rec_faces.id(), raw_out.id())) def react_to_recognized_face(ctx: ContextWrapper): nonlocal face_names faces: RecognizedFaces = ctx[face_names.id()] rec_faces_dict: Dict = ctx[rec_faces.id()] phrases: List = ["Hey, aren't you, {}?!", "How are you doing, {}?!", "How are you, {}?!", "Whats up, {}?!", "Nice to see you, {}?!", "Looking great today, {}!", "Hello, {}!", "Hi, {}!", "Greetings, {}!", "Howdy, {}!", "Hey, {}!", "Greetings to {} over here!", "Hi there, {}!", "Gruse gott, {}!"] best_name_and_confidence = "", 0 for name_and_confidence in zip(faces.names, faces.confidence): logger.info(str(name_and_confidence[0])) if name_and_confidence[1] > best_name_and_confidence[1]: best_name_and_confidence = name_and_confidence if best_name_and_confidence[1] >= ctx.conf(key=FACE_CONFIDENCE_THRESHOLD): if best_name_and_confidence[0] not in rec_faces_dict.keys() or \ datetime.timestamp(datetime.now()) - rec_faces_dict.get(best_name_and_confidence[0]) > 300: rec_faces_dict.update({best_name_and_confidence[0]: datetime.timestamp(datetime.now())}) ctx[raw_out.id()] = phrases[randint(0, len(phrases) - 1)].format(best_name_and_confidence[0]) ctx[rec_faces.id()] = rec_faces_dict mod.add(react_to_recognized_face) ctx.add_state(react_to_recognized_face)
def telegram_output(ctx: ContextWrapper): """ Sends the content of rawio:out to every currently active chat """ # TODO don't instantiate the updater every time token = ctx.conf(key=TOKEN_CONFIG_KEY) if not token: logger.error('telegram-token is not set. Shutting down telegramio') return Delete() updater: Updater = Updater(token) for chat_id in active_chats: updater.bot.send_message(chat_id=chat_id, text=ctx["rawio:out"])
def store_face_and_name(ctx: ContextWrapper): tokens = ctx["nlp:tokens"] triples = ctx["nlp:triples"] if len(tokens) == 1: name = tokens[0] elif triples[0].get_object().text and triples[0].match_either_lemma(pred={"be"}): name = triples[0].get_object().text else: ctx["rawio:out"] = "Sorry, what was the name?" return Emit() ctx["rawio:out"] = f"Got it, I'm sure I'll remember {name} next time I see that face!" # Create memory entry sess: Session = ravestate_ontology.get_session() onto: Ontology = ravestate_ontology.get_ontology() query = Node(metatype=onto.get_type("Person")) query.set_properties({"name": name}) node_list = sess.retrieve(query) if not node_list: node = sess.create(query) logger.info(f"Created new Node in scientio session: {node}") elif len(node_list) == 1: node = node_list[0] else: logger.error(f'Failed to create or retrieve Scientio Node for {name}!') return logger.info(f"Node ID for {name} in picture is {node.get_id()}!") # Store face vector with node id in redis try: redis_conn = redis.Redis( host=ctx.conf(key=REDIS_HOST_CONF), port=ctx.conf(key=REDIS_PORT_CONF), password=ctx.conf(key=REDIS_PASS_CONF)) redis_conn.set(node.get_id(), ctx["sendpics:face_vec"]) except redis.exceptions.ConnectionError as e: err_msg = "Looks like the redis connection is unavailable :-(" logger.error(err_msg) ctx['rawio:out'] = err_msg
def _run_private(self): context_wrapper = ContextWrapper(ctx=self.ctx, state=self.state_to_activate, spike_parents=self.parent_spikes, spike_payloads=self.spike_payloads) # -- Run state function try: result = self.state_to_activate(context_wrapper, *self.args, **self.kwargs) except: logger.error(f"An exception occurred while activating {self}: {traceback.format_exc()}") result = Resign() # -- Process state function result if isinstance(result, Emit): if self.state_to_activate.signal: self.ctx.emit( self.state_to_activate.signal, parents=self.parent_spikes, wipe=result.wipe, boring=self.state_to_activate.boring) else: logger.error(f"Attempt to emit spike from state {self.name}, which does not specify a signal name!") elif isinstance(result, Wipe): if self.state_to_activate.signal: self.ctx.wipe(self.state_to_activate.signal) else: logger.error(f"Attempt to wipe spikes from state {self.name}, which does not specify a signal name!") elif isinstance(result, Delete): self.ctx.rm_state(st=self.state_to_activate) elif isinstance(result, Resign): pass # -- Remove references between causal groups <-> self. Note: # Activations for receptors do not have constraints. if self.constraint: for signal in self.constraint.signals(): if signal.spike: with signal.spike.causal_group() as cg: cg.rejected(signal.spike, self, reason=2) signal.spike = None # -- Execute result-dependent causal-group action for cg in self._unique_consenting_causal_groups(): with cg: if not result or result.causal_group_action == StateResult.CAUSAL_GROUP_CONSUME: cg.consumed(self.resources()) elif result.causal_group_action == StateResult.CAUSAL_GROUP_RESIGN: cg.resigned(self) self.state_to_activate.activation_finished()
def _run_private(self): context_wrapper = ContextWrapper(ctx=self.ctx, st=self.state_to_activate, spike_parents=self.parent_spikes) # Run state function try: result = self.state_to_activate(context_wrapper, *self.args, **self.kwargs) except Exception as e: logger.error(f"An exception occurred while activating {self}: {e}") result = Resign() # Process state function result if isinstance(result, Emit): if self.state_to_activate.signal(): self.ctx.emit(self.state_to_activate.signal(), parents=self.parent_spikes, wipe=result.wipe) else: logger.error( f"Attempt to emit spike from state {self.name}, which does not specify a signal name!" ) elif isinstance(result, Wipe): if self.state_to_activate.signal(): self.ctx.wipe(self.state_to_activate.signal()) else: logger.error( f"Attempt to wipe spikes from state {self.name}, which does not specify a signal name!" ) elif isinstance(result, Delete): self.ctx.rm_state(st=self.state_to_activate) if result.resign: for cg in self._unique_consenting_causal_groups(): with cg: cg.resigned(self) return elif isinstance(result, Resign): for cg in self._unique_consenting_causal_groups(): with cg: cg.resigned(self) return # Let participating causal groups know about consumed properties for cg in self._unique_consenting_causal_groups(): with cg: cg.consumed(self.resources())
def sync_ros_properties(ctx: ContextWrapper): """ State that creates a ROS2-Node, registers all Ros2SubProperties and Ros2PubProperties in ROS2 and keeps them synced """ # check for ROS2 availability if not ROS2_AVAILABLE: logger.error( "ROS2 is not available, therefore all ROS2-Properties " "will be just normal properties without connection to ROS2!") return Delete() # get config stuff node_name = ctx.conf(key=NODE_NAME_CONFIG_KEY) if not node_name: logger.error( f"{NODE_NAME_CONFIG_KEY} is not set. Shutting down ravestate_ros2") return Delete() spin_frequency = ctx.conf(key=SPIN_FREQUENCY_CONFIG_KEY) if spin_frequency is None or spin_frequency < 0: logger.error( f"{SPIN_FREQUENCY_CONFIG_KEY} is not set or less than 0. Shutting down ravestate_ros2" ) return Delete() if spin_frequency == 0: spin_sleep_time = 0 else: spin_sleep_time = 1 / spin_frequency # init ROS rclpy.init() node = rclpy.create_node(node_name) global global_prop_set # current_props: hash -> subscription/publisher current_props: Dict = dict() # ROS-Context Sync Loop while not ctx.shutting_down(): # remove deleted props removed_props = current_props.keys() - global_prop_set for prop_hash in removed_props: item = current_props[prop_hash] if isinstance(item, rclpy.subscription.Subscription): node.destroy_subscription(item) elif isinstance(item, rclpy.publisher.Publisher): node.destroy_publisher(item) elif isinstance(item, rclpy.client.Client): node.destroy_client(item) current_props.pop(prop_hash) # add new props new_props = global_prop_set - current_props.keys() for prop in new_props: # register subscribers in ROS if isinstance(prop, Ros2SubProperty): # register in context @receptor(ctx_wrap=ctx, write=prop.id()) def ros_to_ctx_callback(ctx, msg, prop_name: str): ctx[prop_name] = msg prop.ros_to_ctx_callback = ros_to_ctx_callback prop.subscription = node.create_subscription( prop.msg_type, prop.topic, prop.ros_subscription_callback) current_props[prop.__hash__()] = prop.subscription # register publishers in ROS if isinstance(prop, Ros2PubProperty): prop.publisher = node.create_publisher(prop.msg_type, prop.topic) current_props[prop.__hash__()] = prop.publisher # register clients in ROS if isinstance(prop, Ros2CallProperty): prop.client = node.create_client(prop.service_type, prop.service_name) current_props[prop.__hash__()] = prop.client # replace prop with hash in global_props global_prop_set.remove(prop) global_prop_set.add(prop.__hash__()) # spin once rclpy.spin_once(node, timeout_sec=0) time.sleep(spin_sleep_time) node.destroy_node() rclpy.shutdown()
def under_test_context_wrapper(context_mock, state_mock): return ContextWrapper(ctx=context_mock, st=state_mock)
def telegram_run(ctx: ContextWrapper): """ Starts up the telegram bot and adds a handler to write incoming messages to rawio:in """ @receptor(ctx_wrap=ctx, write="rawio:in") def text_receptor(ctx: ContextWrapper, message_text: str): """ Writes the message_text to rawio:in """ ctx["rawio:in"] = message_text @receptor(ctx_wrap=ctx, write="rawio:pic_in") def photo_receptor(ctx: ContextWrapper, photo_path): """ Handles photo messages, write to rawio:pic_in """ ctx["rawio:pic_in"] = photo_path @receptor(ctx_wrap=ctx, write="interloc:all") def push_telegram_interloc(ctx: ContextWrapper, telegram_node: Node, name: str): """ Push the telegram_node into interloc:all:name """ if ctx.push(parentpath="interloc:all", child=PropertyBase(name=name, default_value=telegram_node)): logger.debug(f"Pushed {telegram_node} to interloc:all") def make_sure_effective_user_exists(update: Update): """ Retrieves scientio Node of User if it exists, otherwise creates it in the scientio session Calls the push_telegram_interloc receptor to push the scientio node into interloc:all Adds the User to the set of active_users and the chat to the set of active_chats """ active_chats[update.effective_chat.id] = (Timestamp(), None) if update.effective_user.id in active_users: active_users[update.effective_user.id].add( update.effective_chat.id) else: # set up scientio if ravestate_ontology.initialized.wait(): sess: Session = ravestate_ontology.get_session() onto: Ontology = ravestate_ontology.get_ontology() # create scientio Node of type TelegramPerson query = Node(metatype=onto.get_type("TelegramPerson")) prop_dict = {'telegram_id': update.effective_user.id} if update.effective_user.username: prop_dict['name'] = update.effective_user.username if update.effective_user.full_name: prop_dict['full_name'] = update.effective_user.full_name query.set_properties(prop_dict) node_list = sess.retrieve(query) if not node_list: telegram_node = sess.create(query) logger.info( f"Created new Node in scientio session: {telegram_node}" ) elif len(node_list) == 1: telegram_node = node_list[0] else: logger.error( f'Found multiple TelegramPersons that matched query: {update.message.chat_id} ' f'in scientio session. Cannot push node to interloc:all!' ) return # push chat-Node push_telegram_interloc(telegram_node, update.effective_chat.id) active_users[update.effective_user.id] = { update.effective_chat.id } def handle_text(bot: Bot, update: Update): """ Handle incoming text messages """ make_sure_effective_user_exists(update) text_receptor(update.effective_message.text) def handle_photo(bot: Bot, update: Update): """ Handle incoming photo messages. """ make_sure_effective_user_exists(update) photo_index = 2 # Seems like a good size index. TODO: Make configurable while photo_index >= len(update.effective_message.photo): photo_index -= 1 if photo_index < 0: logger.error( "Telegram photo handler was called, but no photo received!" ) return file_descr = bot.get_file( update.effective_message.photo[photo_index].file_id) photo = requests.get(file_descr.file_path) file_path = mkstemp()[1] with open(file_path, 'wb') as file: file.write(photo.content) photo_receptor(file_path) def handle_input_multiprocess(bot: Bot, update: Update): """ Handle incoming messages """ if update.effective_chat.id not in active_chats: add_new_child_process(update.effective_chat.id) # write (bot, update) to Pipe active_chats[update.effective_chat.id][0].update() active_chats[update.effective_chat.id][1].send((bot, update)) def add_new_child_process(chat_id): """ Adds the chat of the incoming message to the set of active_chats Creates new Ravestate Context in new Process for the new chat and sets up a bidirectional Pipe for communication between Master and Child Processes """ # start method has to be 'spawn' mp_context = mp.get_context('spawn') # Pipe to communicate between Master Process and all children parent_conn, child_conn = mp.Pipe() # create commandline args for child config file args = [] child_config_paths_list = ctx.conf(key=CHILD_FILES_CONFIG_KEY) for child_config_path in child_config_paths_list: args += ['-f', child_config_path] # set up new Process and override child_conn with the Pipe-Connection p = mp_context.Process(target=create_and_run_context, args=(*args, ), kwargs={ 'runtime_overrides': [(MODULE_NAME, CHILD_CONN_CONFIG_KEY, child_conn)] }) p.start() active_chats[chat_id] = (Timestamp(), parent_conn) def error(bot: Bot, update: Update, error: TelegramError): """ Log Errors caused by Updates. """ logger.warning('Update "%s" caused error "%s"', update, error) def _manage_children(updater): """ Receive messages from children via Pipe and then send them to corresponding Telegram Chat. Remove chats when they get older than the chat lifetime. :param updater: The Updater of the telegram-Bot """ chat_lifetime = ctx.conf( key=CHAT_LIFETIME) * 60 # conversion from minutes to seconds while not ctx.shutting_down(): removable_chats = set() removable_users = set() # wait for children to write to Pipe and then send message to chat tick_interval = 1. / ctx.conf(mod=Context.core_module_name, key=Context.tick_rate_config) time.sleep(tick_interval) for chat_id, (last_msg_timestamp, parent_pipe) in active_chats.items(): if parent_pipe.poll(): msg = parent_pipe.recv() if isinstance(msg, str): updater.bot.send_message(chat_id=chat_id, text=msg) else: logger.error( f'Tried sending non-str object as telegram message: {str(msg)}' ) # remove chat from active_chats if inactive for too long if last_msg_timestamp.age() > chat_lifetime: parent_pipe.close() removable_chats.add(chat_id) for chat_id in removable_chats: active_chats.pop(chat_id) for user_id, chat_ids in active_users.items(): # remove chat from chats that the user is part of chat_ids.discard(chat_id) if len(chat_ids) == 0: # user is no longer part of any active chats removable_users.add(user_id) for user_id in removable_users: active_users.pop(user_id) def _bootstrap_telegram_master(): """ Handle TelegramIO as the Master Process. Start the bot, and handle incoming telegram messages. """ token = ctx.conf(key=TOKEN_CONFIG_KEY) if not token: logger.error( f'{TOKEN_CONFIG_KEY} is not set. Shutting down telegramio') return Delete() child_config_paths_list = ctx.conf(key=CHILD_FILES_CONFIG_KEY) if not ctx.conf(key=ALL_IN_ONE_CONTEXT_CONFIG_KEY) and ( not child_config_paths_list or not isinstance(child_config_paths_list, list) or not all( os.path.isfile(child_config_path) for child_config_path in child_config_paths_list)): logger.error( f'{CHILD_FILES_CONFIG_KEY} is not set (correctly). Shutting down telegramio' ) return Delete() updater: Updater = Updater(token) # Get the dispatcher to register handlers dispatcher: Dispatcher = updater.dispatcher if ctx.conf(key=ALL_IN_ONE_CONTEXT_CONFIG_KEY): # handle noncommand-messages with the matching handler dispatcher.add_handler(MessageHandler(Filters.text, handle_text)) dispatcher.add_handler(MessageHandler(Filters.photo, handle_photo)) else: dispatcher.add_handler( MessageHandler(Filters.text | Filters.photo, handle_input_multiprocess)) # log all errors dispatcher.add_error_handler(error) # Start the Bot updater.start_polling() # non blocking if not ctx.conf(key=ALL_IN_ONE_CONTEXT_CONFIG_KEY): _manage_children(updater) def _bootstrap_telegram_child(): """ Handle TelegramIO as a Child Process. Listen to Pipe and handle incoming texts and photos. """ try: while not ctx.shutting_down(): # receive Bot,Update for telegram chat bot, update = child_conn.recv() # blocking if update.effective_message.photo: handle_photo(bot, update) elif update.effective_message.text: handle_text(bot, update) else: logger.error( f"{MODULE_NAME} received an update it cannot handle.") except EOFError: # Pipe was closed -> Parent was killed or parent has closed the pipe logger.info( "Pipe was closed, therefore the telegram-child will shut down." ) ctx.shutdown() child_conn = ctx.conf(key=CHILD_CONN_CONFIG_KEY) is_master_process = child_conn is None if is_master_process: _bootstrap_telegram_master() else: _bootstrap_telegram_child()
def push_console_interloc(ctx: ContextWrapper, console_node: Node): if ctx.push(parentpath="interloc:all", child=PropertyBase(name=DEFAULT_INTERLOC_ID, default_value=console_node)): logger.debug(f"Pushed {console_node} to interloc:all")
def pop_interloc(ctx: ContextWrapper): if ctx.pop(f"interloc:all:{id}"): logger.debug(f"Popped interloc:all:{id}")
def push_interloc(ctx: ContextWrapper, interlocutor_node: Node): if ctx.push(parentpath="interloc:all", child=PropertyBase(name=id, default_value=interlocutor_node)): logger.debug(f"Pushed {interlocutor_node} to interloc:all")
def handle_single_interlocutor_input(ctx: ContextWrapper, input_value: str, id="anonymous_interlocutor") -> None: """ Forwards input to `rawio:in` and manages creation/deletion of a singleton interlocutor. A new interlocutor node is pushed, when the input is a greeting, and there is no interlocutor present. The interlocutor is popped, if the input is a farewell, and there is an interlocutor present. * `ctx`: Context Wrapper of the calling state. Must have read permissions for `interloc:all`. * `input_value`: The string input value that should be written to `rawio:in`. * `id`: Name of the interlocutor context property, and initial name for the interlocutor's Neo4j node (until a proper name is set by persqa). """ @receptor(ctx_wrap=ctx, write="rawio:in") def write_input(ctx_input, value: str): ctx_input["rawio:in"] = value @receptor(ctx_wrap=ctx, write="interloc:all") def push_interloc(ctx: ContextWrapper, interlocutor_node: Node): if ctx.push(parentpath="interloc:all", child=PropertyBase(name=id, default_value=interlocutor_node)): logger.debug(f"Pushed {interlocutor_node} to interloc:all") @receptor(ctx_wrap=ctx, write="interloc:all") def pop_interloc(ctx: ContextWrapper): if ctx.pop(f"interloc:all:{id}"): logger.debug(f"Popped interloc:all:{id}") write_input(input_value) interloc_exists = f"interloc:all:{id}" in ctx.enum("interloc:all") # push Node if you got a greeting if input_value.strip() in get_phrase_list( "greeting") and not interloc_exists: # set up scientio sess: Session = ravestate_ontology.get_session() onto: Ontology = ravestate_ontology.get_ontology() # create scientio Node of type Person query = Node(metatype=onto.get_type("Person")) query.set_name(id) interlocutor_node_list = sess.retrieve(query) if not interlocutor_node_list: interlocutor_node = sess.create(query) logger.info( f"Created new Node in scientio session: {interlocutor_node}") else: interlocutor_node = interlocutor_node_list[0] # push interloc-node push_interloc(interlocutor_node) # pop Node if you got a farewell elif input_value.strip() in get_phrase_list( "farewells") and interloc_exists: pop_interloc()
def console_input(ctx: ContextWrapper): while not ctx.shutting_down(): input_value = input("> ") handle_single_interlocutor_input(ctx, input_value)
def telegram_run(ctx: ContextWrapper): """ Starts up the telegram bot and adds a handler to write incoming messages to rawio:in """ @receptor(ctx_wrap=ctx, write="rawio:in") def telegram_callback(ctx: ContextWrapper, message_text: str): """ Writes the message_text to rawio:in """ ctx["rawio:in"] = message_text @receptor(ctx_wrap=ctx, write="interloc:all") def push_telegram_interloc(ctx: ContextWrapper, telegram_node: Node, name: str): """ Push the telegram_node into interloc:all:name """ if ctx.push(parentpath="interloc:all", child=PropertyBase(name=name, default_value=telegram_node)): logger.debug(f"Pushed {telegram_node} to interloc:all") def handle_input(bot: Bot, update: Update): """ Handler for incoming messages Adds the chat/user of the incoming message to the set of active_chats/active_users Calls the telegram_callback receptor to process the incoming message Retrieves scientio Node of User if it exists, otherwise creates it in the scientio session Calls the push_telegram_interloc receptor to push the scientio node into interloc:all """ telegram_callback(update.effective_message.text) active_chats.add(update.effective_chat.id) if update.effective_user not in active_users: # set up scientio sess: Session = ravestate_ontology.get_session() onto: Ontology = ravestate_ontology.get_ontology() # create scientio Node of type TelegramPerson query = Node(metatype=onto.get_type("TelegramPerson")) prop_dict = {'telegram_id': update.effective_user.id} if update.effective_user.username: prop_dict['name'] = update.effective_user.username if update.effective_user.full_name: prop_dict['full_name'] = update.effective_user.full_name query.set_properties(prop_dict) node_list = sess.retrieve(query) if not node_list: telegram_node = sess.create(query) logger.info(f"Created new Node in scientio session: {telegram_node}") elif len(node_list) == 1: telegram_node = node_list[0] else: logger.error(f'Found multiple TelegramPersons that matched query: {update.message.chat_id} ' f'in scientio session. Cannot push node to interloc:all!') return # push chat-Node push_telegram_interloc(telegram_node, update.effective_chat.id) active_users.add(update.effective_user) def error(bot: Bot, update: Update, error: TelegramError): """ Log Errors caused by Updates. """ logger.warning('Update "%s" caused error "%s"', update, error) """Start the bot.""" # Create the EventHandler and pass it your bots token. token = ctx.conf(key=TOKEN_CONFIG_KEY) if not token: logger.error('telegram-token is not set. Shutting down telegramio') return Delete() updater: Updater = Updater(token) # Get the dispatcher to register handlers dp: Dispatcher = updater.dispatcher # handle noncommand-messages with handle_input dp.add_handler(MessageHandler(Filters.text, handle_input)) # log all errors dp.add_error_handler(error) # Start the Bot updater.start_polling()
def pop_console_interloc(ctx: ContextWrapper): if ctx.pop(f"interloc:all:{DEFAULT_INTERLOC_ID}"): logger.debug(f"Popped interloc:all:{DEFAULT_INTERLOC_ID}")
def context_wrapper_fixture(context_with_property_fixture, state_fixture): return ContextWrapper(ctx=context_with_property_fixture, st=state_fixture)