def load_model(self, model_path, session_id): from shutil import copyfile endpoints_file = './database_files/try_now_endpoints.yml' logger.debug("Making Temporary Try now model Path ") model_home_path = "/".join(model_path.split('/')[:-1]) + "/" + session_id os.mkdir(model_home_path) model_name = model_path.split('/')[-1] try_now_model_path = model_home_path + "/" + model_name copyfile(model_path, try_now_model_path) self.agent = create_agent(try_now_model_path, endpoints=endpoints_file) return {"Status": "Success", "Message": "Agent Loaded"}
def chat( model_path: Optional[Text] = None, endpoints: Optional[Text] = None, agent: Optional["Agent"] = None, interpreter: Optional[NaturalLanguageInterpreter] = None, ) -> None: """Chat to the bot within a Jupyter notebook. Args: model_path: Path to a combined Rasa model. endpoints: Path to a yaml with the action server is custom actions are defined. agent: Rasa Core agent (used if no Rasa model given). interpreter: Rasa NLU interpreter (used with Rasa Core agent if no Rasa model is given). """ if model_path: from rasa.run import create_agent agent = create_agent(model_path, endpoints) elif agent is not None and interpreter is not None: # HACK: this skips loading the interpreter and directly # sets it afterwards nlu_interpreter = RasaNLUInterpreter( "skip this and use given interpreter", lazy_init=True ) nlu_interpreter.interpreter = interpreter agent.interpreter = interpreter else: print_error( "You either have to define a model path or an agent and an interpreter." ) return print("Your bot is ready to talk! Type your messages here or send '/stop'.") loop = asyncio.get_event_loop() while True: message = input() if message == "/stop": break responses = loop.run_until_complete(agent.handle_text(message)) for response in responses: _display_bot_response(response)
def chat( model_path: Text = None, agent: "Agent" = None, interpreter: NaturalLanguageInterpreter = None, ) -> None: """Chat to the bot within a Jupyter notebook. Args: model_path: Path to a Rasa Stack model. agent: Rasa Core agent (used if no Rasa Stack model given). interpreter: Rasa NLU interpreter (used with Rasa Core agent if no Rasa Stack model is given). """ if model_path: from rasa.run import create_agent unpacked = model.get_model(model_path) agent = create_agent(unpacked) elif agent and interpreter: # HACK: this skips loading the interpreter and directly # sets it afterwards nlu_interpreter = RasaNLUInterpreter( "skip this and use given interpreter", lazy_init=True) nlu_interpreter.interpreter = interpreter agent.interpreter = interpreter else: print_error( "You either have to define a model path or an agent and an interpreter." ) print( "Your bot is ready to talk! Type your messages here or send '/stop'.") loop = asyncio.get_event_loop() while True: message = input() if message == "/stop": break responses = loop.run_until_complete(agent.handle_text(message)) for response in responses: _display_bot_response(response)
import speech_recognition as sr import asyncio import pprint as pretty_print import typing from typing import Any, Dict, Text, Optional from rasa.cli.utils import print_success, print_error from rasa.core.interpreter import NaturalLanguageInterpreter, RasaNLUInterpreter import rasa.model as model from rasa.run import create_agent model_path = 'model' agent = create_agent(model_path) loop = asyncio.get_event_loop() while True: r = sr.Recognizer() with sr.Microphone() as source: print('say something') audio = r.listen(source) res = r.recognize_google(audio) print(res) message = res if message == "/stop": break responses = loop.run_until_complete(agent.handle_text(message)) for response in responses: print(str(response) + '\n')