Esempio n. 1
0
def test_blank_config():
    file_config = {}
    cmdline_args = {}
    env_vars = {}
    f = write_file_config(file_config)
    final_config = RasaNLUConfig(f.name, env_vars, cmdline_args)
    assert final_config.as_dict() == defaults
def train_nlu(data, config, model_dir):
    training_data = load_data(data)
    trainer = Trainer(RasaNLUConfig(config))
    trainer.train(training_data)
    model_directory = trainer.persist(model_dir, fixed_model_name='geonlu')
interpreter = ____

# Try it out
print(
    interpreter.parse(
        "I'm looking for a Mexican restaurant in the North of town"))

#SOLUTION
# Import necessary modules
from rasa_nlu.converters import load_data
from rasa_nlu.config import RasaNLUConfig
from rasa_nlu.model import Trainer

# Create args dictionary
args = {"pipeline": "spacy_sklearn"}

# Create a configuration and trainer
config = RasaNLUConfig(cmdline_args=args)
trainer = Trainer(config)

# Load the training data
training_data = load_data("./training_data.json")

# Create an interpreter by training the model
interpreter = trainer.train(training_data)

# Try it out
print(
    interpreter.parse(
        "I'm looking for a Mexican restaurant in the North of town"))
Esempio n. 4
0
 def __init__(self, model_dir):
     from rasa_nlu.model import Metadata, Interpreter
     self.interpreter = Interpreter.load(
         model_dir, RasaNLUConfig("nlu_model_config.json"))
     self._items = {}
Esempio n. 5
0
    def __init__(self,
                 channels_out,
                 channel_in='screen',
                 loglvl='',
                 config_override=''):
        """Initialises the core functionality and sets up various variables."""

        # TODO: add checks to confirm all necessary files are present and readable
        # (and writable if applicable)

        signal.signal(signal.SIGINT, self.handle_ctrl_c)

        self.logger = u.setup_custom_logger('root')

        if loglvl.lower().strip() == 'debug':
            self.logger.setLevel(logging.DEBUG)
            self.logger.info('Logging level set to DEBUG')
        elif loglvl.lower().strip() == 'info':
            self.logger.setLevel(logging.INFO)
            self.logger.info('Logging level set to INFO')
        elif loglvl.lower().strip() == 'warn':
            self.logger.setLevel(logging.WARN)
            self.logger.warn('Logging level set to WARN')
        elif loglvl.lower().strip() == '':
            self.logger.setLevel(logging.INFO)
        else:
            self.logger.warn(
                'Unrecognised log level input. Defaulting to INFO.')

        self.logger.info('Initialisation started')

        channel_in_accepted = ['screen']
        if channel_in not in channel_in_accepted:
            self.logger.error(
                'Unrecognised channel input value. Must be one of: ' +
                ', '.join(channel_in_accepted) + '.')
            self.before_quit()
        else:
            self.CHANNEL_IN = channel_in
            self.CHANNELS_OUT = channels_out
            self.CHANNELS_OUT[self.CHANNEL_IN] = True

        config = configparser.ConfigParser()

        if config_override.strip() != '':
            config_file = os.path.abspath(config_override.strip())
        else:
            config_file = os.path.abspath(
                os.path.join('config', 'mlb_config.ini'))

        try:
            dataset = config.read([config_file])
            if len(dataset) == 0:
                raise IOError

        except IOError as e:
            self.logger.error('Unable to open config file: ' +
                              str(config_file))
            self.before_quit()
        except configparser.Error as e:
            self.logger.error('Error with config file: ' + str(config_file))
            self.before_quit()

        try:
            # bot items
            self.botname = config.get('bot', 'name')
            self.botsubject = config.get('bot', 'subject')
            # file items
            self.history_file = os.path.abspath(
                config.get('files', 'history_file'))
            self.pickle_file = os.path.abspath(
                config.get('files', 'pickle_file'))
        except configparser.Error as e:
            self.logger.error('Error reading configuration ' + str(e))
            self.before_quit()

        self.unpickle_user_dict()
        self.SESSION_TIME_LIMIT = 10  # Time in minutes to consider a subsequent interaction to be from a new session
        self.user_id = '1234'
        self.user = self.get_user(self.user_id)

        self.user['msg_output'] = ''
        self.user['rude_count'] = 0

        self.show_highlight = False
        self.show_parse = False
        self.user_stats = False
        self.show_language = True

        #self.langs_handled = {'en':'English'}
        #self.langs_handled = {'fr':'French'}
        self.langs_handled = {'en': 'English', 'fr': 'French'}
        #self.langs_handled = {'en':'English', 'fr':'French', 'de':'German'}

        self.lang_interpreters = {}

        # This is a more generic equivalent of:
        #   self.interpreter_de = Interpreter.load('projects/default/current_de', RasaNLUConfig('config/mlb_config_de.json'))

        for lang in self.langs_handled:
            try:
                self.logger.info(
                    'Configuring interpreter for {language}'.format(
                        language=self.langs_handled[lang]))
                self.lang_interpreters[lang] = Interpreter.load(
                    'projects/default/current_{lang}'.format(lang=lang),
                    RasaNLUConfig(
                        'config/mlb_config_{lang}.json'.format(lang=lang)))
            except:
                self.logger.error(
                    'Error with creating interpreter for {language} (lang: {lang})'
                    .format(language=self.langs_handled[lang], lang=lang))
                self.logger.info(
                    'Maybe you need to train the model? Try equivalent of: python -m rasa_nlu.train -c config/mlb_config_XX.json'
                )
                self.before_quit()

        self.last_input = {}

        self.print_user_stats(self.user_stats)

        self.logger.info('Initialisation complete')
def run_nlu():
    interpreter = Interpreter.load('./models/nlu/default/foodnlu',
                                   RasaNLUConfig('config_spacy.json'))
    print(interpreter.parse(u"i would like to have a fish"))
Esempio n. 7
0
                        type=int,
                        help='port on which to run server')
    parser.add_argument(
        '-t',
        '--token',
        help=
        "auth token. If set, reject requests which don't provide this token as a query parameter"
    )
    parser.add_argument('-w', '--write', help='file where logs will be saved')

    return parser


if __name__ == "__main__":
    parser = create_argparser()
    cmdline_args = {
        key: val
        for key, val in vars(parser.parse_args()).items() if val is not None
    }
    config = RasaNLUConfig(cmdline_args.get("config"), os.environ,
                           cmdline_args)
    print(config.view())
    logging.basicConfig(filename=config.log_file, level=config.log_level)
    logging.captureWarnings(True)
    logging.debug(config.view())
    try:
        server = RasaNLUServer(config)
        server.start()
    except KeyboardInterrupt:
        server.stop()
Esempio n. 8
0
 def test_failure_on_invalid_lang(self):
     config = RasaNLUConfig("config_mitie.json")
     with pytest.raises(NotImplementedError):
         MITIETrainer(config.mitie_file, 'umpalumpa')
Esempio n. 9
0
def train(data, config, model_dir):
    train_data = load_data(data)
    trainer = Trainer(RasaNLUConfig(config))
    trainer.train(train_data)
    model_directory = trainer.persist(model_dir,
                                      fixed_model_name='request_bot')
Esempio n. 10
0
def run_nlu():
	interpreter = Interpreter.load('./models/nlu/default/weathernlu', RasaNLUConfig('config_spacy.json'))
	print(interpreter.parse("I am planning my holiday to Lithuania. I wonder what is the weather out there."))
Esempio n. 11
0
# Import necessary modules
from rasa_nlu.config import RasaNLUConfig
from rasa_nlu.model import Trainer

pipeline = ["nlp_spacy", "tokenizer_spacy", "ner_crf"]

# Create a config that uses this pipeline
config = RasaNLUConfig(cmdline_args={'pipeline': pipeline})

# Create a trainer that uses this config
trainer = Trainer(config)

# Create an interpreter by training the model
interpreter = trainer.train(training_data)

# Parse some messages
print(interpreter.parse("show me Chinese food in the centre of town"))
print(interpreter.parse("I want an Indian restaurant in the west"))
print(interpreter.parse("are there any good pizza places in the center?"))
Esempio n. 12
0
def predict_restaurant_nlu(model_dir):
    from rasa_nlu.model import Metadata, Interpreter
    interpreter = Interpreter.load(model_dir, RasaNLUConfig("../restaurantData/config_nlu.json"))
    intent_entities = interpreter.parse('thanks, good bye')
    return intent_entities
Esempio n. 13
0
def train_restaurant_nlu():
    train_data = load_data('../restaurantData/franken_data.json')
    trainer = Trainer(RasaNLUConfig("../restaurantData/config_mitie.json"))
    trainer.train(train_data)
    model_dir = trainer.persist('../rst_models/rst_nlu/current')
    return model_dir
Esempio n. 14
0
from rasa_nlu.converters import load_data
from rasa_nlu.config import RasaNLUConfig
from rasa_nlu.model import Trainer

training_data = load_data('data/nlu.md')
trainer = Trainer(RasaNLUConfig("nlu_model_config.json"))
trainer.train(training_data)
model_directory = trainer.persist(
    './projects/default/')  # Returns the directory the model is stored in

from rasa_core.domain import TemplateDomain
domain = TemplateDomain.load("restaurant_domain.yml")
Esempio n. 15
0
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import os

import logging

from rasa_nlu.server import create_app

from rasa_nlu.config import RasaNLUConfig

if __name__ == '__main__':
    # Running in WSGI container, configuration will be loaded from the default location
    # There is no common support for WSGI runners to pass arguments to the application, hence we need to fallback to
    # a default location for the configuration where all the settings should be placed in.
    rasa_config = RasaNLUConfig(env_vars=os.environ)
    app = create_app(rasa_config)
    logging.info("Finished setting up application")
    app.run()
Esempio n. 16
0
def default_config():
    return RasaNLUConfig(CONFIG_DEFAULTS_PATH)
Esempio n. 17
0
def run_train(_config):
    config = RasaNLUConfig(cmdline_args=_config)
    (trained, path) = do_train(config)
    return trained, path
Esempio n. 18
0
def run_nlu():
    interpreter = Interpreter.load('./models/nlu/default/pramata_chatter',
                                   RasaNLUConfig('config_spacy.json'))
    print(interpreter.parse(u"Who is my risky customer?"))
Esempio n. 19
0
def train_nlu(data ='./data/data.json', config = './config_sapcy.json', model_dir = './models/nlu'):
	training_data = load_data(data)
	trainer = Trainer(RasaNLUConfig(config))
	trainer.train(training_data)
	model_directory = trainer.persist(model_dir, fixed_model_name = 'stagebot')
	return jsonify({"Training" : model_directory }), 200
Esempio n. 20
0
def run_nlu():
    interpreter = Interpreter.load('./models/nlu/default/assistantnlu',
                                   RasaNLUConfig('config_spacy.json'))
    print(interpreter.parse(u"XCNS"))
Esempio n. 21
0
def run_nlu():
	query = request.args['q']
	print(query)
	interpreter = Interpreter.load('./models/nlu/default/stagebot', RasaNLUConfig('config_sapcy.json'))
	response = interpreter.parse(query)
	return jsonify({"Engine Status" : response}), 200
Esempio n. 22
0
            request.setResponseCode(200)
            response = yield self.data_router.start_train_process(
                data_string, kwargs)
            returnValue(
                json_to_string(
                    {'info': 'new model trained: {}'.format(response)}))
        except AlreadyTrainingError as e:
            request.setResponseCode(403)
            returnValue(json_to_string({"error": "{}".format(e)}))
        except InvalidProjectError as e:
            request.setResponseCode(404)
            returnValue(json_to_string({"error": "{}".format(e)}))
        except TrainingException as e:
            request.setResponseCode(500)
            returnValue(json_to_string({"error": "{}".format(e)}))


if __name__ == '__main__':
    # Running as standalone python application
    arg_parser = create_argparser()
    cmdline_args = {
        key: val
        for key, val in list(vars(arg_parser.parse_args()).items())
        if val is not None
    }
    rasa_nlu_config = RasaNLUConfig(cmdline_args.get("config"), os.environ,
                                    cmdline_args)
    rasa = RasaNLU(rasa_nlu_config)
    logger.info('Started http server on port %s' % rasa_nlu_config['port'])
    rasa.app.run('0.0.0.0', rasa_nlu_config['port'])
Esempio n. 23
0
    results["F1-score"].append(metrics.f1_score(y, preds, average='weighted'))
    results["Precision"].append(
        metrics.precision_score(y, preds, average='weighted'))


if __name__ == '__main__':  # pragma: no cover
    parser = create_argparser()
    args = parser.parse_args()

    # manual check argument dependency
    if args.mode == "crossvalidation":
        if args.model is not None:
            parser.error("Crossvalidation will train a new model \
                         - do not specify external model")

    nlu_config = RasaNLUConfig(args.config, os.environ, vars(args))
    logging.basicConfig(level=nlu_config['log_level'])

    if args.mode == "crossvalidation":
        data = training_data.load_data(args.data)
        data = prepare_data(data, cutoff=5)
        results = run_cv_evaluation(data, int(args.folds), nlu_config)
        logger.info("CV evaluation (n={})".format(args.folds))
        for k, v in results.train.items():
            logger.info("train {}: {:.3f} ({:.3f})".format(
                k, np.mean(v), np.std(v)))
        for k, v in results.test.items():
            logger.info("test {}: {:.3f} ({:.3f})".format(
                k, np.mean(v), np.std(v)))

    elif args.mode == "evaluation":
Esempio n. 24
0
df.to_csv(r'Tag_results.csv', index=None, header=True)  # Don't forget to add '.csv' at the end of the path

def load_model_tag(x):
    interpreter = Interpreter.load('models/current/nlu_model')
    x = interpreter.parse(x)
    print(x)

load_model_tag("")


# ! python -m spacy download en
import spacy
import en_core_web_sm

nlp = en_core_web_sm.load()
doc = nlp(
    "Hi,     I got a Samung TV series 7 with HDR support, a Vizio soundar connected by HDMI with Atmos support and an apple TV 4K connected to the sound bar by HDMI.     My issue is that I can have HDR or Atmos, but no both at the same time. When I launch netflix from the apple TV and HDR is turned on, the movie starts flickering.     If I launch the built-in Netflix app I can get HDR but no Atmos.     I already talked to my set-box support and the soundbar manufacturer as well, but no one gives me a solution.     I tried with different ports, different cables, connecting directly the apple TV to my TV, resetting to factory settings, but nothing works.     TV specs:Model UE50NU7020Soft version: 1252     This is my last try, could be something related to the TV? Any setting?     Thanks  ")
for ent in doc.ents:
    print(ent.text, ent.label_)
    # doc.similarity(nlp("request"))


from rasa_nlu.config import RasaNLUConfig
from rasa_nlu.model import Trainer

config = RasaNLUConfig(cmdline_args={"pipeline": "spacy_sklearn"})
trainer = Trainer(config)
interpreter = trainer.train(training_data)

Esempio n. 25
0
def test_default_config():
    final_config = RasaNLUConfig()
    assert dict(final_config.items()) == defaults
Esempio n. 26
0
from rasa_nlu.converters import load_data
from rasa_nlu.config import RasaNLUConfig
from rasa_nlu.model import Trainer

training_data = load_data('data/iHCM_intents/navigation')
trainer = Trainer(RasaNLUConfig("configs/config_spacy.json"))
trainer.train(training_data)
model_directory = trainer.persist('projects/')  # Returns the directory the model is stored in
Esempio n. 27
0
from rasa_nlu.converters import load_data
from rasa_nlu.config import RasaNLUConfig
from rasa_nlu.model import Trainer
import os
import shutil

# Path to save the model
SAVE_PATH = './models/trained/'

# Load the training data
training_data = load_data('./data/training.json')
print('training data loaded...')

# Load and train the model
print('training initiated...')
trainer = Trainer(RasaNLUConfig('./config.json'))
trainer.train(training_data)

# Save the model
model_directory = trainer.persist('./models/')
if os.path.exists(SAVE_PATH):  # removed old model if exists
    shutil.rmtree(SAVE_PATH)
    print('{} is removed to save the new model.'.format(SAVE_PATH))

os.rename(model_directory, SAVE_PATH)  # rename new model to SAVE_PATH

print('done. model saved @ {}'.format(SAVE_PATH))
def run_nlu():
    interpreter = Interpreter.load('./models/nlu/default/geonlu',
                                   RasaNLUConfig('config_spacy.json'))
    print(
        interpreter.parse(
            "My name is Alex,I want to find coordinates of Panjab"))
Esempio n. 29
0
def train_rasa_nlu():
    training_data = load_data('data/intent.md')
    trainer = Trainer(RasaNLUConfig("data/config_nlu.json"))
    trainer.train(training_data)
    model_directory = trainer.persist('models/nlu/', model_name=model_name)
    return model_directory
Esempio n. 30
0
import warnings
warnings.simplefilter("ignore")


from rasa_nlu.config import RasaNLUConfig
from rasa_nlu.model import Trainer
from rasa_nlu.converters import load_data

training_data = load_data("training_data.json")

pipeline = [
    "nlp_spacy",
    "tokenizer_spacy",
    "ner_crf"
]

config = RasaNLUConfig(cmdline_args={"pipeline": pipeline})

trainer = Trainer(config)
model_directory = trainer.persist('./models/')

print(f"Saving metadata at {model_directory}")
interpreter = trainer.train(training_data)
Esempio n. 31
0
from rasa_nlu.converters import load_data
from rasa_nlu.config import RasaNLUConfig
from rasa_nlu.model import Trainer

training_data = load_data('/root/rasa_nlu/classification/testData2.json')
trainer = Trainer(RasaNLUConfig("/root/rasa_nlu/classification/nlu_config.json"))
trainer.train(training_data)
model_directory = trainer.persist('./projects/default/')  # Returns the directory the model is stored in