def train(cfg_name, project_name): from rasa_nlu import training_data cfg = config.load(cfg_name) trainer = Trainer(cfg, component_builder) training_data = training_data.load_data(data) trainer.train(training_data) trainer.persist("test_projects", project_name=project_name)
def train(cfg_name, model_name): from rasa_nlu.train import create_persistor from rasa_nlu.converters import load_data config = RasaNLUConfig(cfg_name) trainer = Trainer(config) training_data = load_data(config['data']) trainer.train(training_data) persistor = create_persistor(config) trainer.persist("test_models", persistor, model_name=model_name)
def do_train(cfg, # type: RasaNLUModelConfig data, # type: Text path=None, # type: Text project=None, # type: Optional[Text] fixed_model_name=None, # type: Optional[Text] storage=None, # type: Text component_builder=None, # type: Optional[ComponentBuilder] **kwargs # type: Any ): # type: (...) -> Tuple[Trainer, Interpreter, Text] """Loads the trainer and the data and runs the training of the model.""" # Ensure we are training a model that we can save in the end # WARN: there is still a race condition if a model with the same name is # trained in another subprocess trainer = Trainer(cfg, component_builder) persistor = create_persistor(storage) training_data = load_data(data, cfg.language) interpreter = trainer.train(training_data, **kwargs) if path: persisted_path = trainer.persist(path, persistor, project, fixed_model_name) else: persisted_path = None return trainer, interpreter, persisted_path
def test_load_and_persist_without_train(component_builder): _config = utilities.base_test_conf("all_components") trainer = Trainer(_config, component_builder) persistor = create_persistor(_config) persisted_path = trainer.persist(_config['path'], persistor, model_name=_config['name']) loaded = utilities.load_interpreter_for_model(_config, persisted_path, component_builder) assert loaded.pipeline assert loaded.parse("hello", time=None) is not None
def test_train_with_empty_data(component_builder): _config = utilities.base_test_conf("all_components") trainer = Trainer(_config, component_builder) trainer.train(TrainingData()) persistor = create_persistor(_config) persisted_path = trainer.persist(_config['path'], persistor, model_name=_config['name']) loaded = utilities.load_interpreter_for_model(_config, persisted_path, component_builder) assert loaded.pipeline assert loaded.parse("hello") is not None assert loaded.parse("Hello today is Monday, again!") is not None
def train_nlu(): from rasa_nlu.converters import load_data from rasa_nlu.config import RasaNLUConfig from rasa_nlu.model import Trainer training_data = load_data("data/mobile_nlu_data.json") trainer = Trainer(RasaNLUConfig("mobile_nlu_model_config.json")) trainer.train(training_data) model_directory = trainer.persist("models/", project_name="ivr", fixed_model_name="demo") return model_directory
def test_train_with_empty_data(language, pipeline, component_builder, tmpdir): _config = RasaNLUModelConfig({"pipeline": pipeline, "language": language}) trainer = Trainer(_config, component_builder) trainer.train(TrainingData()) persistor = create_persistor(_config) persisted_path = trainer.persist(tmpdir.strpath, persistor, project_name="my_project") loaded = Interpreter.load(persisted_path, component_builder) assert loaded.pipeline assert loaded.parse("hello") is not None assert loaded.parse("Hello today is Monday, again!") is not None
def train_nlu(): from rasa_nlu.training_data import load_data from rasa_nlu import config from rasa_nlu.model import Trainer training_data = load_data('data/nlu_data/') trainer = Trainer(config.load("nlu_model_config.yml")) trainer.train(training_data) model_directory = trainer.persist('models/nlu', fixed_model_name="current") return model_directory
def do_train(config, component_builder=None): # type: (RasaNLUConfig, Optional[ComponentBuilder]) -> Tuple[Trainer, Interpreter, Text] """Loads the trainer and the data and runs the training of the specified model.""" # Ensure we are training a model that we can save in the end # WARN: there is still a race condition if a model with the same name is trained in another subprocess trainer = Trainer(config, component_builder) persistor = create_persistor(config) training_data = load_data(config['data']) interpreter = trainer.train(training_data) persisted_path = trainer.persist(config['path'], persistor, model_name=config['name']) return trainer, interpreter, persisted_path
def run_cv_evaluation(data, n_folds, nlu_config): from sklearn import metrics from sklearn.model_selection import StratifiedKFold from collections import defaultdict # type: (List[rasa_nlu.training_data.Message], int, RasaNLUConfig) -> Dict[Text, List[float]] """Stratified cross validation on data :param data: list of rasa_nlu.training_data.Message objects :param n_folds: integer, number of cv folds :param nlu_config: nlu config file :return: dictionary with key, list structure, where each entry in list corresponds to the relevant result for one fold """ trainer = Trainer(nlu_config) results = defaultdict(list) y_true = [e.get("intent") for e in data] skf = StratifiedKFold(n_splits=n_folds, random_state=11, shuffle=True) counter = 1 logger.info("Evaluation started") for train_index, test_index in skf.split(data, y_true): train = [data[i] for i in train_index] test = [data[i] for i in test_index] logger.debug("Fold: {}".format(counter)) logger.debug("Training ...") trainer.train(TrainingData(training_examples=train)) model_directory = trainer.persist("projects/") # Returns the directory the model is stored in logger.debug("Evaluation ...") interpreter = Interpreter.load(model_directory, nlu_config) test_y = [e.get("intent") for e in test] preds = [] for e in test: res = interpreter.parse(e.text) if res.get('intent'): preds.append(res['intent'].get('name')) else: preds.append(None) # compute fold metrics results["Accuracy"].append(metrics.accuracy_score(test_y, preds)) results["F1-score"].append(metrics.f1_score(test_y, preds, average='weighted')) results["Precision"] = metrics.precision_score(test_y, preds, average='weighted') # increase fold counter counter += 1 return dict(results)
def train_nlu_gao(): from rasa_nlu_gao.training_data import load_data from rasa_nlu_gao import config from rasa_nlu_gao.model import Trainer training_data = load_data('data/rasa_dataset_training.json') trainer = Trainer(config.load("config_embedding_bilstm.yml")) trainer.train(training_data) model_directory = trainer.persist('models/nlu_gao/', fixed_model_name="current") return model_directory
def zipped_nlu_model(): spacy_config_path = "sample_configs/config_pretrained_embeddings_spacy.yml" cfg = config.load(spacy_config_path) trainer = Trainer(cfg) td = training_data.load_data(DEFAULT_DATA_PATH) trainer.train(td) trainer.persist("test_models", project_name="test_model_pretrained_embeddings") model_dir_list = os.listdir(TEST_MODEL_PATH) # directory name of latest model model_dir = sorted(model_dir_list)[-1] # path of that directory model_path = os.path.join(TEST_MODEL_PATH, model_dir) zip_path = zip_folder(model_path) return zip_path
def train_models(languages): """Generate your trained model.""" utils.check_languages(languages) config = utils.load_config() for language in languages: click.echo(_("================== Processing {lang} ==================").format(lang=language)) training_data = load_data(utils.get_training_data_path(language, config)) trainer = Trainer(RasaNLUConfig(cmdline_args=config)) click.echo(_("Training data for language {lang}.").format(lang=language)) trainer.train(training_data) click.echo(_("Persisting trained data for {lang}.").format(lang=language)) model_dir = trainer.persist(utils.get_model_base_dir(language)) click.echo(_("Stored data for {lang} in {path}.").format(lang=language, path=model_dir)) click.echo(_("================ Finished Training ================"))
def train(nlu_config: Union[Text, RasaNLUModelConfig], data: Text, path: Optional[Text] = None, project: Optional[Text] = None, fixed_model_name: Optional[Text] = None, storage: Optional[Text] = None, component_builder: Optional[ComponentBuilder] = None, training_data_endpoint: Optional[EndpointConfig] = None, **kwargs: Any ) -> Tuple[Trainer, Interpreter, Text]: """Loads the trainer and the data and runs the training of the model.""" if isinstance(nlu_config, str): nlu_config = config.load(nlu_config) # Ensure we are training a model that we can save in the end # WARN: there is still a race condition if a model with the same name is # trained in another subprocess trainer = Trainer(nlu_config, component_builder) persistor = create_persistor(storage) if training_data_endpoint is not None: training_data = load_data_from_endpoint(training_data_endpoint, nlu_config.language) else: training_data = load_data(data, nlu_config.language) interpreter = trainer.train(training_data, **kwargs) if path: persisted_path = trainer.persist(path, persistor, project, fixed_model_name) else: persisted_path = None return trainer, interpreter, persisted_path
#!/usr/bin/python3 from rasa_nlu.training_data import load_data from rasa_nlu.model import Trainer from rasa_nlu import config import sys import os import os bindir = os.path.dirname(os.path.realpath(__file__)) if len(sys.argv) != 3: sys.stderr.write( "Usage: rasatrain.py <rasa-json-training-file> <output-directory>\n") exit(1) jsonfile = sys.argv[1] outdir = sys.argv[2] training_data = load_data(jsonfile) trainer = Trainer(config.load(bindir + "/config_spacy.yml")) trainer.train(training_data) model_directory = trainer.persist('.', None, '.', outdir) print(model_directory)
def trainer(data, configure, modelExp): training_data = load_data(data) trainer = Trainer(config.load(configure)) trainer.train(training_data) model_directory = trainer.persist(modelExp, fixed_model_name="noam_basic")
def train_nlu(): training_data = load_data(NLU_DATA) trainer = Trainer(RasaNLUConfig(CONFIG_PATH)) trainer.train(training_data) model_directory = trainer.persist('../models/nlu', fixed_model_name="current")
import logging import warnings logging.basicConfig(level="INFO") warnings.filterwarnings('ignore') # loading the nlu training samples training_data = load_data("nlu.md") # trainer to educate our pipeline trainer = Trainer(config.load("nlu_config.yml")) # train the model! interpreter = trainer.train(training_data) # store it for future use model_directory = trainer.persist("./models/nlu", project_name='legal_project', fixed_model_name="legal_model") # this will catch predictions the model isn't very certain about # there is a threshold for the NLU predictions as well as the action predictions fallback = FallbackPolicy(fallback_action_name="utter_unclear", core_threshold=0.2, nlu_threshold=0.1) agent = Agent('domain.yml', policies=[MemoizationPolicy(), KerasPolicy(epochs=200), fallback]) # loading our neatly defined training dialogues training_data = agent.load_data('stories.md') agent.train(training_data, validation_split=0.0) agent.persist('models/dialogue')
from rasa_nlu.training_data import load_data from rasa_nlu import config from rasa_nlu.components import ComponentBuilder from rasa_nlu.model import Trainer #Permet quelques économies de performances builder = ComponentBuilder(use_cache=True) #Chargement des données d'entrainement training_data = load_data('data/nlu') #Création de l'entraineur associé à la configuration trainer = Trainer(config.load("./nlu_config.yml"), builder) #Entrainement trainer.train(training_data) #Sauvegarde du modèle entrainé model_directory = trainer.persist('./projects/default/', fixed_model_name="model")
def train_nlu(config_data): training_data = load_data(config_data["data"]) trainer = Trainer(config.load('./config_tensorflow.json'), builder) trainer.train(training_data) model_directory = trainer.persist(config_data["path"], fixed_model_name='restaurantnlu')
from rasa_nlu.training_data import load_data from rasa_nlu.config import RasaNLUModelConfig from rasa_nlu.model import Trainer from rasa_nlu import config training_data = load_data('app/data/training_data.json') trainer = Trainer(config.load('app/data/config_spacy.yml')) trainer.train(training_data) model_directory = trainer.persist('./model', fixed_model_name='SpringBoardKMS')
def train_nlu(data, config_file, model_dir): training_data = load_data(data) trainer = Trainer(config.load(config_file), builder) trainer.train(training_data) model_directory = trainer.persist(model_dir, fixed_model_name='restaurantnlu')
from rasa_nlu.training_data import load_data from rasa_nlu.model import Trainer from rasa_nlu import config from rasa_nlu.model import Interpreter training_data = load_data('./data/move.md') trainer = Trainer(config.load('./config_spacy.json')) trainer.train(training_data) model_directory = trainer.persist('./model', project_name="commandnlu") interpreter = Interpreter.load(model_directory) def run(inp): guess = interpreter.parse(inp) return guess['intent']['name']
def train_nlu(train_data, config_data, model_dir): training_data = load_data(train_data) trainer = Trainer(config.load(config_data)) trainer.train(training_data) model_directory = trainer.persist(model_dir, fixed_model_name="nlu")
def train_nlu(): training_data = load_data(DATA) trainer = Trainer(config.load(CONFIG_NLU)) trainer.train(training_data) model_directory = trainer.persist(MODEL_DIR, fixed_model_name = 'chat') return model_directory
from rasa_nlu.model import Metadata, Interpreter from rasa_nlu.converters import load_data from rasa_nlu.config import RasaNLUConfig from rasa_nlu.model import Trainer import csv import sys training_data = load_data('/root/rasa_nlu/classification/testData5.json') trainer = Trainer(RasaNLUConfig("/root/rasa_nlu/classification/config_all.json")) trainer.train(training_data) model_directory = trainer.persist('./projects/default/') # Returns the directory the model is stored in # where `model_directory points to the folder the model is persisted in f = open(testcase.csv, ‘rb’) reader = csv.reader(f) for row in reader: interpreter = Interpreter.load(model_directory, RasaNLUConfig("/root/rasa_nlu/classification/config_all.json")) interpreter.parse(row) f.close()
app = Flask(__name__) @app.route("/nlu/train/<model>", methods=['POST']) def train(model): ## TODO: accept the model and check it is present already, then train # the model with new data OR else create it pass @app.route("/nlu/predict/<model>", methods=['POST']) def predict(model): # TODO: Call the model with user text, parse with RaseInterpreter and # return the indent and entities pass def format_return_data(): # TODO: parse the response and re-structure it pass from rasa_nlu.training_data import load_data from rasa_nlu.model import Trainer from rasa_nlu import config training_data = load_data('/home/mani/nlu/py36nlu/data.json') trainer = Trainer(config.load("/home/mani/nlu/py36nlu/config_spacy.yml")) instance = trainer.train(training_data) model_directory = trainer.persist('/home/mani/nlu/py36nlu/nlucode/') instance.parse(text="good bye")
def rasa_train_spacy(): training_data = load_data('train_dataset.json') trainer = Trainer(RasaNLUConfig("./config_spacy.json")) trainer.train(training_data) model_directory = trainer.persist('./models/') print(model_directory)
def train(model_dir="./models", project="default", data_dir="./intents"): training_data = load_data(data_dir) trainer = Trainer(config.load("nlu_config.yml")) trainer.train(training_data) model_directory = trainer.persist(model_dir, fixed_model_name=project) print(model_directory)
import os import json from rasa_nlu.training_data import load_data from rasa_nlu.model import Trainer from rasa_nlu import config pwd = os.path.dirname(os.path.abspath(__file__)) source_root = os.path.abspath(os.path.join(pwd, "..")) train_data_path = os.path.join( source_root, "assets/empty_nlu_model/raw_training_data.json") with open(train_data_path) as f: data = json.load(f) data.pop("regex_features") data.pop("key_words") data.pop("intent_rules") data.pop("intent_id2name") nlu_data = os.path.join(os.path.dirname(train_data_path), "training_data.json") with open(nlu_data, "w") as f: json.dump(data, f, ensure_ascii=False) nlu_config = os.path.join(source_root, "assets/config_jieba_mitie_sklean.yml") training_data = load_data(nlu_data) trainer = Trainer(config.load(nlu_config)) trainer.train(training_data) trainer.persist(source_root, project_name="assets", fixed_model_name="empty_nlu_model")
def train_nlu(data, config, model_dir): training_data = load_data(data) trainer = Trainer(RasaNLUConfig(config)) trainer.train(training_data) model_directory = trainer.persist(model_dir, fixed_model_name='assistantnlu')
def train_nlu(data, config_json, model_dir): print("Training NLU model...") training_data = load_data(data) trainer = Trainer(config.load(config_json)) trainer.train(training_data) model_directory = trainer.persist(model_dir, fixed_model_name = 'weathernlu')
def train_nlu(data, configs, model_dir): training_data = load_data(data) trainer = Trainer(RasaNLUConfig("config_spacy.json")) trainer.train(training_data) model_directory = trainer.persist(model_dir, fixed_model_name='weathernlu')
def train (data, config_file, model_dir): training_data = load_data(data) trainer = Trainer(config.load(config_file)) trainer.train(training_data) model_directory = trainer.persist(model_dir, fixed_model_name = 'chat')
def train_appointmentbot(): train_data = load_data('rasafiles/rasa_dataset.json') trainer = Trainer(config.load("rasafiles/config_spacy.yaml")) trainer.train(train_data) trainer.persist('./rasamodels/', fixed_model_name='appointmentbot')
def train_nlu(data, config, model_dir): training_data = load_data(data) trainer = Trainer(RasaNLUConfig(config)) trainer.train(training_data) trainer.persist('./models/nlu', fixed_model_name = 'mybotone')
print("\n\n") return gen_result rasa_train_dir = "./Rasa_train/" if os.path.isfile(os.path.join(rasa_train_dir, "model2.pkl")): print ("Loading from already trained_model") trained_model = pickle.load(open(os.path.join(rasa_train_dir, 'model2.pkl'), 'rb')) print ("Model loaded") else: print ("No model.pkl file... Training the rasa_model") training_data = load_data(os.path.join(rasa_train_dir,'rasa_train2.json')) trainer = Trainer(config.load(os.path.join(rasa_train_dir,'config.json'))) trainer.train(training_data) trained_model = trainer.persist(rasa_train_dir) print ("Training done and Model loaded") output_model = open(os.path.join(rasa_train_dir, 'model2.pkl'), 'wb') pickle.dump(trained_model, output_model) output_model.close() # where model_directory points to the model folder interpreter = Interpreter.load(trained_model) while True: print ("\n\n") input_sentence = input("Enter: ").lower() interpreter_result = (interpreter.parse(input_sentence)) intents = (interpreter.parse(input_sentence)['intent']['name'])
from rasa_nlu.training_data import load_data from rasa_nlu.model import Trainer from rasa_nlu import config from rasa_nlu.components import ComponentBuilder import sklearn builder = ComponentBuilder(use_cache=True) training_data = load_data('training-data.json') trainer = Trainer(config.load("rasa_config.yml"), builder) trainer.train(training_data) model_directory = trainer.persist('./model/')
#Rendering Audio for Output From Text def audio_output(text): engine = pyttsx3.init() engine.setProperty('voice', voice.id[1]) engine.setProperty('rate', 150) engine.say(text) engine.runAndWait() #Training the Model training_data = load_data("nlu.md") trainer = Trainer(config.load("config.yml")) interpreter = trainer.train(training_data) model_directory = trainer.persist("./models/nlu", fixed_model_name="current") #Evaluate NLU Model on Random Text def pprint(o): print(json.dumps(o, indent=2)) pprint(interpreter.parse("I am very sad. Could you send me a cat picture? ")) #Evaluating on Test Data run_evaluation("nlu.md", model_directory) class ApiAction(Action): def name(self):
def train_bot(data_json, config_file, model_dir): training_data = load_data(data_json) trainer = Trainer(config.load(config_file)) trainer.train(training_data) model_directory = trainer.persist(model_dir, fixed_model_name='vegabot')
def train_nlu(data, configs, model_dir): training_data = load_data(data) trainer = Trainer(config.load(configs)) trainer.train(training_data) model_directory = trainer.persist(model_dir, fixed_model_name = 'weathernlu')
def train_nlu(data, config, model_dir): training_data = load_data(data) trainer = Trainer(RasaNLUConfig(config)) trainer.train(training_data) trainer.persist(model_dir, fixed_model_name='nlu_model')
def train_nlu(): training_data = load_data('nlu.md') trainer = Trainer(config.load("config.yml")) trainer.train(training_data) model_directory = trainer.persist('models/nlu/', fixed_model_name="current") return model_directory
def train(data=None, config_file=None, model_dir=None, model_name=None): training_data = load_data(data) configuration = config.load(config_file) trainer = Trainer(configuration) trainer.train(training_data) trainer.persist(model_dir, fixed_model_name=model_name)
def rasa_train_MITIE(): training_data = load_data('data/examples/rasa/train_dataset.json') trainer = Trainer(RasaNLUConfig("./config_mitie.json")) trainer.train(training_data) model_directory = trainer.persist('./models/') print(model_directory)