class NLU_unit(object): """ This object contains the interpreter for the NLU model, and tools to train and retrieve it Attributes: name(:class:`rasa_nlu.model.Interpreter`): Interpreter for the users FIB-related queries """ def __init__(self): self.interpreter = None """ Parameters: train (:obj:`bool`): indicates if it has to re-train the model This function loads a model from persistency (if train == False) or re-trains and loads the trained model """ def load(self, train=False): if train: training_data = load_data('./Data/Dataset.json') print("Data Loaded") trainer = Trainer(RasaNLUConfig("./config/config_spacy.json")) print("NLU Trainer launched") trainer.train(training_data) print("NLU Training done") model_directory = trainer.persist( 'models/nlu', fixed_model_name='current' ) # Returns the directory the model is stored in # where `model_directory points to the folder the model is persisted in self.interpreter = RasaNLUInterpreter("./models/nlu/default/current") print("NLU loaded") """ Parameters: query (:obj:`str`): query or user messages This function returns the intent as predicted by the interpreter """ def get_intent(self, query): parsed = self.interpreter.parse(query) return parsed['intent'] """ Parameters: query (:obj:`str`): query or user messages This function returns the entities as predicted by the interpreter """ def get_entities(self, query): parsed = self.interpreter.parse(query) return parsed['entities']
def test_rasa_nlu(test, version): from rasa_core.interpreter import RasaNLUInterpreter print('Loading model...') interpreter = RasaNLUInterpreter('models/rasa/benchmark/'+version) test['predict_intent'] = '' test['match'] = 0 print('Testing...') done = 0 total = len(test) for index, row in test.iterrows(): parse_data = interpreter.parse(row['utterance']) done += 1 printProgress(done,total) intent = parse_data['intent']; if intent is not None: test.at[index, 'predict_intent'] = intent['name'] if row['intent'] == intent['name']: test.at[index, 'match'] = 1 # Percentage of correct predictions missed = test[test['match'] == 0] accuracy = 100 * (1-len(missed)/len(test)) print('Rasa NLU scores %0.2f%% with %d false predictions in total %d samples' % (accuracy, len(missed), len(test))) save_csv(missed, 'missed/'+version+'/rasa.csv') result = test['match'].value_counts() test['match'].astype(int).plot.hist(); return result;
def run_nlu(self): """Parse the user_input and return the list of responses.""" interpreter = RasaNLUInterpreter( os.path.join(self.base_path, "models/nlu/default/", self._model_name)) while True: __import__('pprint').pprint(interpreter.parse(input("Type> ")))
def run_nlu(): interpreter = RasaNLUInterpreter("models/nlu/default/current") while True: t = raw_input("Message for NLU\n:>") d = interpreter.parse(t.decode('utf8')) pprint.pprint(d, indent=2) return
def run(self, dispatcher, tracker, domain): dispatcher.utter_message("looking for restaurants") input = tracker.latest_message["text"] interpreter = RasaNLUInterpreter('./models/nlu/default/chat') data = interpreter.parse(input) print(data) params = {} for ent in data["entities"]: params[ent["entity"]] = ent["value"] print(params) query = "select Restaurant_Name FROM restaurant" if len(params) != 0: filters = [ "{}='{}'".format("lower(" + k + ")", v) for k, v in params.items() ] print(filters) conditions = " and ".join(filters) print(conditions) query = " WHERE ".join([query, conditions]) print(query) a = session.execute(query) result_set = a.fetchall() print(result_set) res = [] for data in result_set: res = data[0] print(res) responses = [ "I'm sorry :( I couldn't find anything like that", "what about {}?", "{} is one option, but I know others too :)" ] print(len(result_set)) index = min(len(result_set), len(responses) - 1) print(responses[index].format(res)) dispatcher.utter_message(responses[index].format(res)) return []
class Bot(): """ Bot class to Process Data """ def __init__(self): self.inter = RasaNLUInterpreter(RASA_NLU_MODEL_PATH + RASA_NLU_MODEL_NAME) self.agent = Agent.load(RASA_CORE_MODEL_PATH, interpreter=self.inter) self.data = None self.channel = None self.output_channel = None self.sender = None def checkDefaultMessage(self, text_message): from db_models.logging import Logging parsed_data = self.inter.parse(text_message) confidence = parsed_data['intent']['confidence'] if (confidence <= float( os.environ.get('MINIMUM_INTENT_DEFAULT_CONFIDENCE'))): Logging.create(text=parsed_data['text'], intent=parsed_data['intent']['name'], confidence=confidence) return False return True def on_post(self, req, resp): """ This method will return response to user query """ try: data = req.bounded_stream.read() data = json.loads(data.decode('utf-8')) if (self.checkDefaultMessage(data['text'])): result = self.agent.handle_message(data['text']) resp.body = str(result[0]) else: resp.body = 'Default message' except Exception as e: print("Exception in bot- ", e)
from rasa_core.policies.memoization import MemoizationPolicy from rasa_core.interpreter import RasaNLUInterpreter logger = logging.getLogger(__name__) def run_restaurant_online(input_channel, interpreter, domain_file="restaurant_domain.yml", training_data_file='data/stories.md'): agent = Agent(domain_file, policies=[MemoizationPolicy(), KerasPolicy()], interpreter=interpreter) agent.train_online(training_data_file, input_channel=input_channel, max_history=2, batch_size=50, epochs=200, max_training_samples=300) return agent if __name__ == '__main__': logging.basicConfig(level="INFO") nlu_interpreter = RasaNLUInterpreter( './models/foodiebot/nlu/default/current') print(nlu_interpreter.parse(u"Send an email on [email protected]"))
# Load saved agent, in case no training is needed. agent = Agent.load('./models/dialogue', interpreter=interpreter, action_endpoint=action_endpoint) # In[5]: # Generate tree of interaction of utterances in possible dialogs treeFile = "images/dialogtree.html" agent.visualize("data/stories.md", output_file=treeFile, max_history=2) IFrame(src=treeFile, width=1000, height=600) # In[6]: interpreter.parse(u"Pass the remote") # Parsed phrase # In[7]: # Interpret a sentence def parsetxt(sent, deptree=False): pp = interpreter.parse(sent) # Parsed phrase # Print entities present in the sentence print(pd.DataFrame(pp['entities'])) df = pd.DataFrame(pp['intent_ranking']).style.apply( lambda x: ['background: lightgreen' if x.name == 0 else '' for i in x], axis=1) if (deptree): doc = nlp(sent) displacy.render(doc,
from rasa_core.interpreter import RasaNLUInterpreter import logging, io, json, warnings def pprint(o): # small helper to make dict dumps a bit prettier print(json.dumps(o, indent=2)) interpreter = RasaNLUInterpreter("models/nlu/default/current") pprint(interpreter.parse("search candidates with Ajax skills"))
def run_nlu(serve_forever=True): interpreter = RasaNLUInterpreter("models/nlu/default/current") while (True): text = input("Input text: ") print(interpreter.parse(text))
#!/usr/bin/env python import pprint from chatbot.config import CONF from rasa_core.interpreter import RasaNLUInterpreter interpreter = RasaNLUInterpreter(CONF.get_value('classification-model-path')) while 1: text = input('Enter text: ') pprint.pprint(interpreter.parse(text))
interpret = RasaNLUInterpreter('./models/nlu/longbi') extracted_intents = None extracted_values = None # entities values prediction = None #a = input("input:") a_list = ['过度依赖父母怎么办', '总咬指甲会怎么样', '为什么爱说谎'] # ,'过度胆小怎么办','害怕分离怎么办','孩子经常夹腿怎么办','5岁了还尿床怎么办','多动不专心怎么办','不自主的挤眉弄眼怎么办','走路说话晚怎么办','不看人不理人怎么办','孩子易冲动、爱攻击别人怎么办','家长如何帮助孩子对应压力','家长如何帮助孩子理解和表达自己的感受' #a_list=['孩子总是不理人是怎么样的'] #a=a_list[11] #a='孩子总是压力大怎么办' for a in a_list: extracted_entities = None extracted_intents = None prediction = interpret.parse(a) print(a, ':', prediction['intent']) if len(prediction.get("entities")) > 0: # extracted_entities = None extracted_entities = prediction.get("entities")[0]['entity'] #extracted_values = prediction.get("entities")[0]['value'] extracted_values = ''.join( prediction.get("entities")[0]['value'].split()) if len(prediction.get("entities")) > 1: print('sss') extracted_entities = dict() extracted_entities = \ {prediction.get("entities")[entity]['entity']: prediction.get("entities")[entity]['value'] for entity in range(len(prediction.get("entities")))}
#!/usr/bin/env python #-*- coding: utf-8 -*- from rasa_nlu.training_data import load_data from rasa_nlu.config import RasaNLUModelConfig from rasa_nlu.model import Trainer from rasa_nlu import config from rasa_nlu.model import Metadata, Interpreter from rasa_core.interpreter import RasaNLUInterpreter from rasa_core.agent import Agent interpreter = RasaNLUInterpreter("models/current/nlu") print(interpreter.parse("你好")) agent = Agent.load('models/dialogue', interpreter=interpreter) #print(agent.handle_message('你好呀')) print(agent.handle_message('你好呀')[0].get('text'))
class NLU_unit(object): """ This object contains the interpreter for the NLU model, and tools to train and retrieve it Attributes: interpreter_ca(:class:`rasa_nlu.model.Interpreter`): Interpreter for the users FIB-related queries in catalan interpreter_es(:class:`rasa_nlu.model.Interpreter`): Interpreter for the users FIB-related queries in spanish interpreter_en(:class:`rasa_nlu.model.Interpreter`): Interpreter for the users FIB-related queries in english """ def __init__(self): self.interpreter_ca = None self.interpreter_es = None self.interpreter_en = None """ Parameters: train (:obj:`bool`): indicates if it has to re-train the model This function loads a model from persistency (if train == False) or re-trains and loads the trained model """ def load(self, train=False, train_list=None): if train: print( colored( "INFO: Entrenando los siguientes idiomas: {}".format( train_list), 'red')) now = time() if not train_list or 'ca' in train_list: print(colored("INFO: Entrenando CA NLU model", 'red')) print(colored("INFO: Cargando dataset CA", 'red')) training_data_ca = load_data('./Data/Dataset_ca.json') trainer_ca = Trainer( config.load("./config/config_spacy_ca.yml")) print(colored("INFO: Generando características para CA", 'red')) trainer_ca.train(training_data_ca, num_threads=3) model_directory = trainer_ca.persist( 'models/nlu_ca', fixed_model_name='current' ) # Returns the directory the model is stored in print("Tiempo total para entrenar CA: {}".format( colored(time() - now, 'green'))) now = time() if not train_list or 'es' in train_list: print(colored("INFO: Entrenando ES NLU model", 'red')) print(colored("INFO: Cargando dataset ES", 'red')) training_data_es = load_data('./Data/Dataset_es.json') trainer_es = Trainer( config.load("./config/config_spacy_es.yml")) print(colored("INFO: Generando características para ES", 'red')) trainer_es.train(training_data_es, num_threads=3) model_directory = trainer_es.persist( 'models/nlu_es', fixed_model_name='current' ) # Returns the directory the model is stored in print("Tiempo total para entrenar ES: {}".format( colored(time() - now, 'green'))) now = time() if not train_list or 'en' in train_list: print(colored("INFO: Entrenando EN NLU model", 'red')) print(colored("INFO: Cargando dataset EN", 'red')) training_data_en = load_data('./Data/Dataset_en.json') trainer_en = Trainer( config.load("./config/config_spacy_en.yml")) print(colored("INFO: Generando características para EN", 'red')) trainer_en.train(training_data_en, num_threads=3) model_directory = trainer_en.persist( 'models/nlu_en', fixed_model_name='current' ) # Returns the directory the model is stored in print("Tiempo total para entrenar EN: {}".format( colored(time() - now, 'green'))) print(colored("INFO: Entrenamiento del NLU terminado", 'red')) # where `model_directory points to the folder the model is persisted in self.interpreter_ca = RasaNLUInterpreter( "./models/nlu_ca/default/current") self.interpreter_es = RasaNLUInterpreter( "./models/nlu_es/default/current") self.interpreter_en = RasaNLUInterpreter( "./models/nlu_en/default/current") """ Parameters: query (:obj:`str`): query or user messages This function returns the intent as predicted by the interpreter """ def get_intent(self, query, lang='es'): parsed = None if lang == 'ca': parsed = self.interpreter_ca.parse(query) elif lang == 'es': parsed = self.interpreter_es.parse(query) else: parsed = self.interpreter_en.parse(query) return parsed['intent'] def get_intent_ranking(self, query, lang='es'): parsed = None if lang == 'ca': parsed = self.interpreter_ca.parse(query) elif lang == 'es': parsed = self.interpreter_es.parse(query) else: parsed = self.interpreter_en.parse(query) return parsed['intent_ranking'] """ Parameters: query (:obj:`str`): query or user messages This function returns the entities as predicted by the interpreter """ def get_entities(self, query, lang='es'): parsed = None if lang == 'ca': parsed = self.interpreter_ca.parse(query) elif lang == 'es': parsed = self.interpreter_es.parse(query) else: parsed = self.interpreter_en.parse(query) return parsed['entities']
interpreter = RasaNLUInterpreter("./models/nlu/default/latest_nlu") agent = Agent.load("./models/dialogue",interpreter=interpreter) agent.is_ready() text="bla" text="12 yrs" text="12 years" text="in 12 august 2015 " text="since 1989" text="since 319896778" text="since 13 months" text="since august 2018" text="200 yrs" io=interpreter.parse(text) for i in range(len(io['entities'])): if io['entities'][0]['entity']=='DATE': print(io['entities'][0]['value']) agent.handle_text(text) agent.predict_next(text) digits=[i for i in re.findall('\d+', text )] if date: x months x years x days x weaks
class JarvisProcessor(): def __init__(self, log): self.logger = log directorioNLU = 'model/default/Jarvis' directorioDialogo = 'model/dialogue' if (os.path.isdir(directorioNLU)): self.interpreter = RasaNLUInterpreter( model_directory=directorioNLU) if (os.path.isdir(directorioDialogo)): with open("config/endpoint.yml", 'r') as stream: try: config = yaml.safe_load(stream) except yaml.YAMLError as exc: print(exc) action_endopoint = EndpointConfig( url=config["action_endpoint"]["url"]) tracker_store = MongoTrackerStore( domain=Domain.load('model/dialogue/domain.yml'), host=config["tracker_store"]["url"], db=config["tracker_store"]["db"], username=config["tracker_store"]["username"], password=config["tracker_store"]["password"]) self.agent = Agent.load(directorioDialogo, interpreter=self.interpreter, action_endpoint=action_endopoint, tracker_store=tracker_store) self._slots = {} def train_nlu(self): builder = ComponentBuilder(use_cache=False) self.__trainer_data = load_data("data/nlu.md") self.__trainer = Trainer(config.load("config/config.yml"), builder) self.__trainer.train(self.__trainer_data) self.__model_directory = self.__trainer.persist( 'model/', fixed_model_name='Jarvis') return self.__model_directory def train_dialogue(self, domain_file='domain.yml', stories_file='data/stories.md', model_path='model/dialogue', policy_config='config/config.yml'): return train.train_dialogue_model(domain_file=domain_file, stories_file=stories_file, output_path=model_path, policy_config=policy_config) def train_all(self): model_directory = self.train_nlu() self.agent = self.train_dialogue() return [model_directory, self.agent] def train_interactive(self): self.train_nlu() self.agent = self.train_dialogue() return interactive.run_interactive_learning(self.agent) def reiniciarSlots(self, senderid): tracker = self.agent.tracker_store.get_or_create_tracker( sender_id=senderid) temp = tracker.get_slot('persona') policy_config = 'config/config.yml' self.agent.execute_action(senderid, "action_restart", OutputChannel(), policy_config, 0.1) tracker = self.agent.tracker_store.get_or_create_tracker( sender_id=senderid) tracker.update(SlotSet('persona', temp)) self.agent.tracker_store.save(tracker) self.logger.info('Usuario ' + senderid + ' reiniciado') def procesarPeticion(self, peticion, senderid='default'): respuesta = {} reintenta = True cuenta = 1 while reintenta: try: respuesta["nlu"] = self.interpreter.parse(peticion) mensaje = self.agent.handle_text(text_message=peticion, sender_id=senderid) tracker = self.agent.tracker_store.get_or_create_tracker( sender_id=senderid) self._slots = self.__rellenaSlots(tracker) if not mensaje: raise Exception('Sin respuesta') for response in mensaje: respuesta["text"] = response["text"] if not self._slots['autores'] and self._slots['libro']: respuesta["nlu"]["intent"]["name"].replace("_autor", "") self.logger.info('Usuario ' + senderid + ':\n' + str(respuesta)) if respuesta["nlu"]["intent"]["confidence"] < 0.15: respuesta["nlu"]["intent"]["name"] = "no_entiendo" reintenta = False except Exception: policy_config = 'config/config.yml' self.agent.execute_action(senderid, "action_restart", OutputChannel(), policy_config, 0.1) if cuenta <= 0: raise Exception cuenta = cuenta - 1 return respuesta def formatearResultado(self, peticion): resultado = {} resultado['intent'] = peticion['nlu']['intent']['name'] resultado['entities'] = {} tmp = {} if resultado['intent'] == 'consulta_telefono' or resultado['intent'] == \ 'consulta_localizacion_empty' or resultado['intent'] == 'consulta_telefono_empty' \ or resultado['intent'] == 'consulta_localizacion' or resultado['intent'] == \ 'consulta_horario_close' or resultado['intent'] == 'consulta_horario_general' \ or resultado['intent'] == 'consulta_horario_open': if self._slots['localizacion'] is not None: tmp['localizacion'] = self._slots['localizacion'] elif resultado['intent'] == 'consulta_libros_kw' or resultado['intent'] == \ 'consulta_libro_kw' or resultado['intent'] == 'consulta_libros_titulo' \ or resultado['intent'] == 'consulta_libro_autor' or resultado['intent'] == \ 'consulta_libros_titulo_autor' or resultado['intent'] == 'consulta_libros_kw_autor' \ or resultado['intent'] == 'consulta_libro_kw_autor' or resultado['intent'] == \ 'consulta_libros_autor' or resultado['intent'] == 'consulta_libro_titulo_autor' \ or resultado['intent'] == 'consulta_libro_titulo': if self._slots['libro'] is not None: tmp['libro'] = self._slots['libro'] if self._slots['autores'] is not None: tmp['autores'] = self._slots['autores'] tmp['searchindex'] = self._slots['searchindex'] elif resultado['intent'] == 'busca_mas' or resultado['intent'] == \ 'mas_info_primero' or resultado['intent'] == 'mas_info_segundo' or \ resultado['intent'] == 'mas_info_tercero': if self._slots['libro'] is not None: tmp['libro'] = self._slots['libro'] if self._slots['autores'] is not None: tmp['autores'] = self._slots['autores'] tmp['searchindex'] = self._slots['searchindex'] resultado['message'] = peticion['text'] resultado['entities'] = tmp return resultado def __rellenaSlots(self, tracker): list = {} list['libro'] = tracker.get_slot("libro") list['articulos'] = tracker.get_slot("articulos") list['autores'] = tracker.get_slot("autores") list['localizacion'] = tracker.get_slot("localizacion") list['musica'] = tracker.get_slot("musica") list['pelicula'] = tracker.get_slot("pelicula") list['persona'] = tracker.get_slot("persona") list['searchindex'] = tracker.get_slot("searchindex") return list