def nlu(self): try: nlu_engine = SnipsNLUEngine.from_path(str(self.lang)) self.nlu_parsing = nlu_engine.parse(self.text) except Exception as e: print(e) self.translate(src=True) try: nlu_engine = SnipsNLUEngine.from_path(str(self.lang)) self.nlu_parsing = nlu_engine.parse(self.text) except Exception as e: print(e) nlu_engine = SnipsNLUEngine.from_path("en") self.nlu_parsing = nlu_engine.parse(self.translated) self.nlu_parsing["lang"] = self.lang try: slots = self.nlu_parsing["slots"] for x in slots: self.nlu_parsing[x["slotName"]] = x["value"]["value"] except Exception as e: print(e) print(self.nlu_parsing)
def test_train(self): # Given / When train(BEVERAGE_DATASET_PATH, str(self.tmp_file_path), config_path=None) # Then if not self.tmp_file_path.exists(): self.fail("No trained engine generated") msg = "Failed to create an engine from engine dict." with self.fail_if_exception(msg): SnipsNLUEngine.from_path(self.tmp_file_path)
def test_train(self): # Given / When train(self.beverage_dataset_path, str(self.tmp_file_path), config_path=None, verbose=False) # Then if not self.tmp_file_path.exists(): self.fail("No trained engine generated") msg = "Failed to create an engine from engine dict." with self.fail_if_exception(msg): SnipsNLUEngine.from_path(self.tmp_file_path)
def EngineMode(self, mode): """ Saving the engine to use the model for every question (Training Part) or Use the model if it already exists (Testing Part) :param mode: Test or Train string :return: Fitted Engine """ if mode == "Train": load_resources('snips_nlu_en') self.__engine = SnipsNLUEngine(config=CONFIG_EN) with io.open("dataset.json") as f: self.__dataset = json.load(f) self.__engine.fit(self.__dataset) #UnComment to save the model #self.__engine.persist("Z:\FCIS-ASU\Semester 8\ChatbotModel") elif mode == "Test": with io.open("dataset.json") as f: self.__dataset = json.load(f) self.__engine = SnipsNLUEngine.from_path( "Z:\FCIS-ASU\Semester 8\ChatbotModel")
def nlu2(self): global snips_engines print("Second nlu") if (self.mainlang in snips_engines.keys()): nlu_engine = snips_engines[self.mainlang] else: snips_engines[self.lang] = SnipsNLUEngine.from_path("models/" + self.mainlang) nlu_engine = snips_engines[self.mainlang] self.translate() self.nlu_parsing = nlu_engine.parse(self.translated) self.nlu_parsing["lang"] = self.lang try: slots = self.nlu_parsing["slots"] for x in slots: self.nlu_parsing[x["slotName"]] = x["value"]["value"] except Exception as e: print(e) if (self.probability >= float( self.nlu_parsing["intent"]["probability"])): neural = self.nn(str(self.lang), self.translated) if (neural == False and neural != None): self.nlu_parsing["intent"]["probability"] = float(0) elif (neural == True and neural != None): self.nlu_parsing["intent"]["probability"] = float(0.9) print(self.nlu_parsing)
def parse(training_path, query, verbose=False, intents_filter=None): """Load a trained NLU engine and play with its parsing API interactively""" import csv import logging from builtins import input, str from snips_nlu import SnipsNLUEngine from snips_nlu.cli.utils import set_nlu_logger if verbose == 1: set_nlu_logger(logging.INFO) elif verbose >= 2: set_nlu_logger(logging.DEBUG) if intents_filter: # use csv in order to properly handle commas and other special # characters in intent names intents_filter = next(csv.reader([intents_filter])) else: intents_filter = None engine = SnipsNLUEngine.from_path(training_path) if query: print_parsing_result(engine, query, intents_filter) return while True: query = input("Enter a query (type 'q' to quit): ").strip() if not isinstance(query, str): query = query.decode("utf-8") if query == "q": break print_parsing_result(engine, query, intents_filter)
def parse(training_path, query, verbose=False): """Load a trained NLU engine and play with its parsing API interactively""" import logging from builtins import input, str from snips_nlu import SnipsNLUEngine from snips_nlu.cli.utils import set_nlu_logger if verbose == 1: set_nlu_logger(logging.INFO) elif verbose >= 2: set_nlu_logger(logging.DEBUG) engine = SnipsNLUEngine.from_path(training_path) if query: print_parsing_result(engine, query) return while True: query = input("Enter a query (type 'q' to quit): ").strip() if not isinstance(query, str): query = query.decode("utf-8") if query == "q": break print_parsing_result(engine, query)
def respond(self, request_dict): """ Create a response to a user input through the CommandHandler :param request_dict: Dictionary of parameters associated with a user request, such as input_text, date, etc. :return : A string response from Voithos """ try: self.nlu_engine = SnipsNLUEngine.from_path(self.engine_path) # If the NLU engine is missing (eg retraining) except LoadingError: logging.exception('Failed to load NLU engine!') return self.error_msg cmd = self.cmd_handler.choose_command(request_dict) response = None cmd_name = None if not cmd: response = random.choice(self.unknown_cmd_responses) else: cmd_name = cmd.name try: response = cmd.respond() except Exception: logging.exception('Error generating response!') if not response: response = self.error_msg logging.info( f'Input "{request_dict["input_text"]}" generates response "{response}" from command "{cmd_name}"' ) return response
def startup() -> tuple: """ This function returns the NLU model and the ML model after verifying its presence in the root directory It also creates a trainable dataset if it isnt present in the root directory Parameters Required: None Return data: tuple index 1: NLU Model index 2: ML Model """ # Initializing NLPU if os.path.isdir("nlumodel"): # If trained model exists, load it nluengine = SnipsNLUEngine.from_path("nlumodel") print("Loaded local nlumodel save found in directory") else: # If model doesnt exist, then create a new one nluengine = create_nlp_model() print("Trained and loaded new model") # Checking for training dataset if not os.path.isfile("dataset.csv"): # If dataset doesnt exist, create it create_dataset() print("Dataset Created and saved") else: # If dataset exists, proceed print("Dataset Found") if not os.path.isfile("MLModel.pickle"): mlmodel = create_ML_model() else: # Model exists, load into program with open("MLModel.pickle", "rb") as handle: mlmodel = pickle.load(handle) return nluengine, mlmodel
def maybe_load_engine(self): """Load Snips engine if not already loaded.""" if self.engine: # Already loaded return if self.engine_path and self.engine_path.exists(): _LOGGER.debug("Loading Snips engine from %s", self.engine_path) self.engine = SnipsNLUEngine.from_path(self.engine_path)
def _load_model(self, locale): try: locale = NLU.get_locale(locale) path = NLU.ENGINES[locale] basepath = Path(settings.BASE_DIR) fullpath = basepath.joinpath("nabd", path).as_posix() self.nlu_engine = SnipsNLUEngine.from_path(fullpath) except Exception: print(traceback.format_exc())
def debug_inference(engine_path): engine = SnipsNLUEngine.from_path(engine_path) while True: query = input("Enter a query (type 'q' to quit): ").strip() if isinstance(query, bytes): query = query.decode("utf8") if query == "q": break print(json.dumps(engine.parse(query), indent=2))
def getCommand(sentence, language): # The variable "name", "intent" and "slot" needs to be extracted: # The language Model gets loaden # For additonal languages please alter/change here if language == "de": nlu_engine = SnipsNLUEngine.from_path( "SnipsNLU/RobotControlGerman_TrainedModel") else: nlu_engine = SnipsNLUEngine.from_path( "SnipsNLU/RobotControlEnglish_TrainedModel") parsing = nlu_engine.parse(sentence) intent = parsing["intent"]["intentName"] # Explaination, because otherwise I would forget why I did, what I did... # parsing is a dictionary # parsing["slots"] is a list # x is a dictionary, as well as the rest... for x in parsing["slots"]: if x["slotName"] == "robo_name": name = x["value"]["value"] print(name) if intent != "PO": # Explaination, because otherwise I would forget why I did, what I did... # parsing is a dictionary # parsing["slots"] is a list # x is a dictionary, as well as the rest... for x in parsing["slots"]: if x["slotName"] == "storage_slot": slot = x["value"]["value"] print(x["value"]["value"]) else: slot = "0" if intent == "None": name = "None" variable = "None" return intent, name, variable
def load_nlu_engine(self): """ Try to load the NLU engine from the local drive. :return : Trained SnipsNLUEngine or none """ nlu_engine = None try: nlu_engine = SnipsNLUEngine.from_path(self.engine_path) except LoadingError: logging.exception('Failed to load NLU engine!') return nlu_engine
def parse(training_path, query): """Load a trained NLU engine and play with its parsing API interactively""" engine = SnipsNLUEngine.from_path(training_path) if query: print_parsing_result(engine, query) return while True: query = input("Enter a query (type 'q' to quit): ").strip() if query == "q": break print_parsing_result(engine, query)
def main(): # TODO: plugin manager as static class plugin_manager = pm.PluginManager() # TODO: engine class as static class nlu_engine = SnipsNLUEngine.from_path("training/nlu_trained_engine") speech = LiveSpeech(lm=False, keyphrase='james', kws_threshold=1e-20) for keyword in speech: print("activated") # TODO: remove param engine because it will be static command(nlu_engine)
def getCommand(sentence, language): # The variable "name", "intent" and "slot" needs to be extracted: # Default values due to runtime thingies -> Exception etc intent = "None" name = "None" variable = "None" # The language Model gets loaden # For additonal languages please alter/change here if language == "de": nlu_engine = SnipsNLUEngine.from_path( "SnipsNLU/RobotControlDeutsch_TrainedModel") else: nlu_engine = SnipsNLUEngine.from_path( "SnipsNLU/RobotControlEnglish_TrainedModel") parsing = nlu_engine.parse(sentence) intent = parsing["intent"]["intentName"] # Explaination, because otherwise I would forget why I did, what I did... # parsing is a dictionary # parsing["slots"] is a list # x is a dictionary, as well as the rest... for x in parsing["slots"]: if x["slotName"] == "robo_name": name = x["value"]["value"] if intent.encode('utf-8') != "PO".encode('utf-8'): # Explaination, because otherwise I would forget why I did, what I did... # parsing is a dictionary # parsing["slots"] is a list # x is a dictionary, as well as the rest... for x in parsing["slots"]: if x["slotName"] == "storage_slot": variable = x["value"]["value"] return intent, name, variable
def parse(): text = request.form['javascript_data'] import io import json import os from snips_nlu import SnipsNLUEngine, load_resources from snips_nlu.default_configs import CONFIG_EN nlu_engine = SnipsNLUEngine.from_path("./nlu_engine") parsing = nlu_engine.parse(text) return (json.dumps(parsing))
def nlu2(self): print("Second nlu") nlu_engine = SnipsNLUEngine.from_path("en") self.translate() self.nlu_parsing = nlu_engine.parse(self.translated) self.nlu_parsing["lang"] = self.lang try: slots = self.nlu_parsing["slots"] for x in slots: self.nlu_parsing[x["slotName"]] = x["value"]["value"] except Exception as e: print(e) print(self.nlu_parsing)
def parse(training_path, query, verbose=False): """Load a trained NLU engine and play with its parsing API interactively""" if verbose: set_nlu_logger(logging.DEBUG) engine = SnipsNLUEngine.from_path(training_path) if query: print_parsing_result(engine, query) return while True: query = input("Enter a query (type 'q' to quit): ").strip() if query == "q": break print_parsing_result(engine, query)
def __init__(self): # load language resources load_resources(u"en") # create NLU Engine self.engine = SnipsNLUEngine(config=CONFIG_EN) # train engine """ with io.open("dataset.json") as f: dataset = json.load(f) print('start training') self.engine.fit(dataset=dataset) print('finished training') self.engine.persist('nlu_engine') """ self.engine = SnipsNLUEngine.from_path('nlu_engine') print('snips engine ready')
def test_engine_with_keyword_slot_filler_should_be_serializable(self): # Given dataset_stream = io.StringIO(""" --- type: intent name: SetLightColor utterances: - set the light to [color](blue) in the [room](kitchen) - please make the lights [color](red) in the [room](bathroom)""") dataset = Dataset.from_yaml_files("en", [dataset_stream]).json intent = "SetLightColor" slot_filler_config = { "unit_name": "keyword_slot_filler", "lowercase": True } parser_config = ProbabilisticIntentParserConfig( slot_filler_config=slot_filler_config) engine_config = NLUEngineConfig([parser_config]) engine = SnipsNLUEngine(engine_config).fit(dataset, intent) engine.persist(self.tmp_file_path) text = "I want Red lights in the kitchen now" # When loaded_engine = SnipsNLUEngine.from_path(self.tmp_file_path) res = loaded_engine.parse(text) # Then expected_slots = [ custom_slot( unresolved_slot(match_range={ START: 7, END: 10 }, value="Red", entity="color", slot_name="color"), "red"), custom_slot( unresolved_slot(match_range={ START: 25, END: 32 }, value="kitchen", entity="room", slot_name="room")) ] self.assertListEqual(expected_slots, res["slots"])
def get_nlu_engine(self): if not ENGINE_PATH_NEW.exists(): print("No engine found locally...") print("Searching in bucket...") if not self.cos_context.file_exist_in_bucket(NEW_ENGINE_NAME_ZIP): print("There are no engine in bucket!") print("Engine must be fitted! Please run 'start training'") return "" else: print("Found saved engine in bucket..") self._load_from_bucket(ENGINE_PATH_ZIP, NEW_ENGINE_NAME_ZIP, ENGINE_PATH_ZIP) print("Restored saved engine from bucket to '{0}'".format( ENGINE_PATH_ZIP)) self.get_nlu_engine() else: loaded_engine = SnipsNLUEngine.from_path(ENGINE_PATH_NEW) self.nlu_engine = loaded_engine print("Success! Engine was fitted...") return self.nlu_engine
def app(train_directory="data/train", endpoint="nlu"): nlu_engine = SnipsNLUEngine.from_path(train_directory) app = Flask(__name__) @app.route(f"/{endpoint}", methods=["GET", "POST"]) def nlu(): if request.method == "GET": text = request.args.get("text") lang = request.args.get("language", "en") elif request.method == "POST": if request.mimetype != 'application/json': return ('', 400) data = request.get_json() text = data.get("text") lang = data.get("language", "en") if text is None: return ('', 400) return nlu_engine.parse(text) return app
def rollback_nlu(self): result = False if not ENGINE_PATH_NEW.exists(): print("No backups exist locally..") if not self.cos_context.file_exist_in_bucket(OLD_ENGINE_NAME_ZIP): print("There are no backups in bucket..") print("Data rollback is not possible!") else: print("Found saved backups in bucket..") self._load_from_bucket(ENGINE_PATH_ZIP, OLD_ENGINE_NAME_ZIP, ENGINE_PATH_ZIP) print("Restored backup from bucket to '{0}'".format( ENGINE_PATH_ZIP)) self.rollback_nlu() else: loaded_engine = SnipsNLUEngine.from_path(ENGINE_PATH_NEW) self.nlu_engine = loaded_engine #Remove new/old local nlu folders. Save backup as new engine #shutil.rmtree(ENGINE_PATH_NEW) #shutil.rmtree(ENGINE_PATH_OLD) result = self._persist_nlu() print("Engine rollback was successful") return result
def __init__(self): self.nlu_engine = SnipsNLUEngine.from_path("./nlu_engine")
def __init__(self, model_path): self.engine = SnipsNLUEngine.from_path(model_path)
def label_data_with_snips_nlu_model(lang='en', save="", out='practice'): """ Label counterfactual training data :param lang: abbreviate language name of model :param save: path name where model is saved :return: csv file :rtype: file """ from snips_nlu import SnipsNLUEngine from snips_nlu.default_configs import CONFIG_EN from snips_nlu_metrics import compute_train_test_metrics, compute_cross_val_metrics import pickle import json model = source_result / "snips_semeval_2020_model_task1_{}".format(save) if Path(model).exists(): print("\n--> Loading Snips model...") nlu_engine = SnipsNLUEngine.from_path(model) if out == 'evaluate': print("--> [EVALUATION] Start labeling with Snips model...") pd_data = pandas.read_csv(test_task_1) pred = [] for i, row in pd_data.iterrows(): sentence = row['sentence'] sent_id = row['sentenceID'] print(i, sentence, "dffffffffffffffffffffffffffffffffff") sent_parse = nlu_engine.parse( sentence, intents=["Counterfactual", "NoCounterfactual"]) if sent_parse['intent']['intentName'] == "Counterfactual": pred.append((sent_id, 1)) elif sent_parse['intent']['intentName'] == "NoCounterfactual": pred.append((sent_id, 0)) else: sent_parse = nlu_engine.parse( sentence, top_n=3, intents=["Counterfactual", "NoCounterfactual"]) if sent_parse[1]['intent'][ 'intentName'] == "Counterfactual": pred.append((sent_id, 1)) print('NULL [1]- Counterfactual ') elif sent_parse[1]['intent'][ 'intentName'] == "NoCounterfactual": pred.append((sent_id, 0)) #pred.append((sent_id, 0)) print('NULL [1]- NoCounterfactual ') print(sent_parse['intent']['intentName']) results = pandas.DataFrame(data=pred, columns=["sentenceID", "pred_label"]) model_saved = source_result / \ "snips_semeval_2020_evaluation_task1_final_{}.csv".format(save) results.to_csv(model_saved, index=False) elif out == 'practice': print("--> [PRACTICE] Start labeling with Snips model...") test_task_prac_1 = source_data / "task1-train.csv" pd_data = pandas.read_csv(test_task_prac_1) pred = [] for i, row in pd_data.iterrows(): sentence = row['sentence'] sent_parse = nlu_engine.parse(sentence) if sent_parse['intent']['intentName'] == "Counterfactual": pred.append((row['sentenceID'], 1)) elif sent_parse['intent']['intentName'] == "NoCounterfactual": pred.append((row['sentenceID'], 0)) else: print(sent_parse['intent']['intentName']) #print(predict[0], row['gold_label']) results = pandas.DataFrame(data=pred, columns=["sentenceID", "pred_label"]) model_saved = source_result / \ "snips_semeval_2020_evaluation_pratice_task1_{}.csv".format(save) results.to_csv(model_saved, index=False)
import os import json import sys from snips_nlu import SnipsNLUEngine get_current_working_directory = os.getcwd() splitted_current_working_directory = os.path.split( get_current_working_directory) directory_index = splitted_current_working_directory.index('NLU') root_directory_path = os.path.join( *splitted_current_working_directory[:directory_index + 1]) engine = SnipsNLUEngine.from_path( os.path.join(root_directory_path, 'trained_model')) if __name__ == "__main__": user_intent_text = input() parsing = engine.parse(user_intent_text) intents = engine.get_intents(user_intent_text) print(json.dumps(parsing, indent=2)) print(json.dumps(intents, indent=2))
#!/usr/bin/env python from __future__ import unicode_literals, print_function import json import sys import struct from snips_nlu import SnipsNLUEngine nlu_engine = SnipsNLUEngine.from_path("trained_engine") def getMessage(): rawLength = sys.stdin.read(4) if len(rawLength) == 0: sys.exit(0) messageLength = struct.unpack('@I', rawLength)[0] message = sys.stdin.read(messageLength) return json.loads(message) # Encode a message for transmission, # given its content. def encodeMessage(messageContent): encodedContent = json.dumps(messageContent) encodedLength = struct.pack('@I', len(encodedContent)) return {'length': encodedLength, 'content': encodedContent} # Send an encoded message to stdout def sendMessage(encodedMessage): sys.stdout.write(encodedMessage['length']) sys.stdout.write(encodedMessage['content']) sys.stdout.flush()