示例#1
0
    def test_train(self):
        # Given / When
        train(BEVERAGE_DATASET_PATH, str(self.tmp_file_path), config_path=None)

        # Then
        if not self.tmp_file_path.exists():
            self.fail("No trained engine generated")
        msg = "Failed to create an engine from engine dict."
        with self.fail_if_exception(msg):
            with self.tmp_file_path.open(mode="r", encoding="utf8") as f:
                trained_engine_dict = json.load(f)
            SnipsNLUEngine.from_dict(trained_engine_dict)
    def test_main_train_engine(self):
        # Given
        args = [BEVERAGE_DATASET_PATH, self.tmp_file_path]
        with patch.object(sys, "argv", mk_sys_argv(args)):
            # When
            main_train_engine()

            # Then
            if not os.path.exists(self.tmp_file_path):
                self.fail("No trained engine generated")
            msg = "Failed to create an engine from engine dict."
            with self.fail_if_exception(msg):
                with io.open(self.tmp_file_path, "r", encoding="utf8") as f:
                    trained_engine_dict = json.load(f)
                SnipsNLUEngine.from_dict(trained_engine_dict)
示例#3
0
    def test_main_train_engine(self):
        # Given
        args = [BEVERAGE_DATASET_PATH, self.tmp_file_path]
        with patch.object(sys, "argv", mk_sys_argv(args)):
            # When
            main_train_engine()

            # Then
            if not os.path.exists(self.tmp_file_path):
                self.fail("No trained engine generated")
            msg = "Failed to create an engine from engine dict."
            with self.fail_if_exception(msg):
                with io.open(self.tmp_file_path, "r", encoding="utf8") as f:
                    trained_engine_dict = json.load(f)
                SnipsNLUEngine.from_dict(trained_engine_dict)
示例#4
0
def parse(event, content):
    s3 = boto3.resource('s3')
    obj = s3.Object(os.environ.get('RESOURCE_BUCKET'), 'trained_engine.json')
    load_resources('en')
    trained_model = json.load(obj.get().get('Body'))
    engine = SnipsNLUEngine.from_dict(trained_model)

    return engine.parse(event.get('statement'))
示例#5
0
    def __init__(self, modelFilePath, action, intentsDictionary):
        self.modelFilePath = modelFilePath
        self.action = action
        self.intentsDictionary = intentsDictionary

        with io.open(self.modelFilePath) as f:
            model = json.load(f)

        self.nlu_engine = SnipsNLUEngine.from_dict(model)
示例#6
0
    def __init__(self, lang, trained_engine_file):
        self.__lang = lang
        self.__trained_engine_file = trained_engine_file

        load_resources(self.__lang)

        with io.open(self.__trained_engine_file) as f:
            engine_dict = json.load(f)

        self.__loaded_engine = SnipsNLUEngine.from_dict(engine_dict)
示例#7
0
def debug_inference(engine_path):
    with io.open(os.path.abspath(engine_path), "r", encoding="utf8") as f:
        engine_dict = json.load(f)
    engine = SnipsNLUEngine.from_dict(engine_dict)

    while True:
        query = input("Enter a query (type 'q' to quit): ").strip()
        if isinstance(query, bytes):
            query = query.decode("utf8")
        if query == "q":
            break
        print(json.dumps(engine.parse(query), indent=2))
示例#8
0
def debug_inference(engine_path):
    with io.open(os.path.abspath(engine_path), "r", encoding="utf8") as f:
        engine_dict = json.load(f)

    load_resources(engine_dict["dataset_metadata"]["language_code"])
    engine = SnipsNLUEngine.from_dict(engine_dict)

    while True:
        query = input("Enter a query (type 'q' to quit): ").strip()
        if isinstance(query, bytes):
            query = query.decode("utf8")
        if query == "q":
            break
        print(json.dumps(engine.parse(query), indent=2))
示例#9
0
def debug_inference(engine_path):
    with Path(engine_path).open("r", encoding="utf8") as f:
        engine_dict = json.load(f)

    load_resources(engine_dict["dataset_metadata"]["language_code"])
    engine = SnipsNLUEngine.from_dict(engine_dict)

    while True:
        query = input("Enter a query (type 'q' to quit): ").strip()
        if isinstance(query, bytes):
            query = query.decode("utf8")
        if query == "q":
            break
        print(json.dumps(engine.parse(query), indent=2))
示例#10
0
def main_engine_inference():
    args = vars(parse_inference_args(sys.argv[1:]))

    training_path = args.pop("training_path")
    with io.open(os.path.abspath(training_path), "r", encoding="utf8") as f:
        engine_dict = json.load(f)
    engine = SnipsNLUEngine.from_dict(engine_dict)
    language = engine._dataset_metadata[  # pylint: disable=protected-access
        "language_code"]
    load_resources(language)

    while True:
        query = input("Enter a query (type 'q' to quit): ").strip()
        if isinstance(query, bytes):
            query = query.decode("utf8")
        if query == "q":
            break
        print(json.dumps(engine.parse(query), indent=2))
示例#11
0
def parse(training_path, query):
    """Load a trained NLU engine and play with its parsing API interactively"""
    training_path = Path(training_path)
    with training_path.open("r", encoding="utf8") as f:
        engine_dict = json.load(f)
    language = engine_dict["dataset_metadata"]["language_code"]
    load_resources(language)
    engine = SnipsNLUEngine.from_dict(engine_dict)

    if query:
        print_parsing_result(engine, query)
        return

    while True:
        query = input("Enter a query (type 'q' to quit): ").strip()
        if query == "q":
            break
        print_parsing_result(engine, query)
示例#12
0
文件: cli.py 项目: lym0302/snips-nlu
def main_engine_inference():
    args = vars(parse_inference_args(sys.argv[1:]))

    training_path = args.pop("training_path")
    with io.open(os.path.abspath(training_path), "r", encoding="utf8") as f:
        engine_dict = json.load(f)
    engine = SnipsNLUEngine.from_dict(engine_dict)
    language = engine._dataset_metadata[  # pylint: disable=protected-access
        "language_code"]
    load_resources(language)

    while True:
        query = input("Enter a query (type 'q' to quit): ").strip()
        if isinstance(query, bytes):
            query = query.decode("utf8")
        if query == "q":
            break
        print(json.dumps(engine.parse(query), indent=2))
示例#13
0
    def fit(self, training_file_path, trained_directory_path):
        filename, _ = os.path.splitext(os.path.basename(training_file_path))

        # TODO check what should be in the base Interpreter class

        trained_path = os.path.join(trained_directory_path,
                                    '%s.trained.json' % filename)
        checksum_path = os.path.join(trained_directory_path,
                                     '%s.checksum' % filename)

        with open(training_file_path) as f:
            training_str = f.read()
            self._training_data = json.loads(training_str)
            self._lang = self._training_data['language']
            self._log.info('Loading resources for language %s' % self._lang)
            load_resources(self._lang)

        same, computed_checksum = self.checksum_match(training_str,
                                                      checksum_path)

        # Checksums match, load the engine from trained file
        if same and os.path.isfile(trained_path):
            self._log.info('Checksum matched, loading trained engine')
            with open(trained_path) as f:
                self._engine = SnipsNLUEngine.from_dict(json.load(f))
        else:
            self._log.info('Checksum has changed, retraining the engine')
            self._engine = SnipsNLUEngine()
            self._engine.fit(self._training_data)

            with open(trained_path, mode='w') as f:
                json.dump(self._engine.to_dict(), f)

            with open(checksum_path, mode='w') as f:
                f.write(computed_checksum)

        self._entity_parser = BuiltinEntityParser(self._lang)
        self._meta = {
            k: list(v.keys())
            for k, v in
            self._engine._dataset_metadata['slot_name_mappings'].items()
        }
示例#14
0
import sys

reload(sys)
sys.setdefaultencoding('utf8')
import snips_nlu

snips_nlu.load_resources("es")

import io
import json
from snips_nlu import SnipsNLUEngine, load_resources

with io.open("trained.json") as f:
    engine_dict = json.load(f)

engine = SnipsNLUEngine.from_dict(engine_dict)

#phrase = raw_input("Pregunta: ")


def pregunta(phrase):
    r = engine.parse(unicode(phrase))
    return json.dumps(r, indent=2)


from SPARQLWrapper import SPARQLWrapper, JSON


def consulta_formula1(formula1):

    sparql = SPARQLWrapper("http://localhost:8890/sparql/AutomovilismoNuevo")
示例#15
0
def start():
    load_resources("en")
    with io.open("trained.json") as f:
        engine_dict = json.load(f)
    engine = SnipsNLUEngine.from_dict(engine_dict)
    return engine
示例#16
0
        sendToHost(s, result)


if __name__ == '__main__':
    global NLU_engine

    if len(sys.argv) < 1:
        print "Syntax: %s <trained_assistant_path> [<port>]" % sys.argv[0]
        sys.exit(1)

    DATASET_PATH = sys.argv[1]
    PORT = sys.argv[2] if len(sys.argv) == 3 else 80

    if PORT > 65535:
        print "Error. Provide a valid port number"
        sys.exit(1)

    with codecs.open(DATASET_PATH, 'r', 'utf-8') as ds:
        dataset_dict = json.load(ds)

    load_resources(u"en")
    NLU_engine = SnipsNLUEngine.from_dict(dataset_dict)
    httpd = SocketServer.TCPServer(("", PORT), Handler)
    print time.asctime(), "Server Starts - port:%s" % (PORT)
    try:
        httpd.serve_forever()
    except KeyboardInterrupt:
        pass
    httpd.server_close()
    print time.asctime(), "Server Stops - port:%s" % (PORT)
示例#17
0
    def reload_engine(self):
        with io.open(self.__trained_engine_file) as f:
            engine_dict = json.load(f)

        self.__loaded_engine = SnipsNLUEngine.from_dict(engine_dict)
        return
示例#18
0
from __future__ import unicode_literals, print_function

import io
import json
from os.path import dirname, abspath, join

from snips_nlu import SnipsNLUEngine

MODEL_PATH = join(dirname(abspath(__file__)), "model_output.json")

with io.open(MODEL_PATH) as f:
    model = json.load(f)

nlu_engine = SnipsNLUEngine.from_dict(model)

text = input('Enter text: ')
parsing = nlu_engine.parse(text)
print(json.dumps(parsing, indent=2))