def from_path(cls, path): """Load a :class:`ProbabilisticIntentParser` instance from a path The data at the given path must have been generated using :func:`~ProbabilisticIntentParser.persist` """ path = Path(path) model_path = path / "intent_parser.json" if not model_path.exists(): raise OSError("Missing probabilistic intent parser model file: " "%s" % model_path.name) with model_path.open() as f: model = json.load(f) parser = cls(config=cls.config_type.from_dict(model["config"])) classifier = None intent_classifier_path = path / "intent_classifier" if intent_classifier_path.exists(): classifier = load_processing_unit(intent_classifier_path) slot_fillers = dict() for slot_filler_conf in model["slot_fillers"]: intent = slot_filler_conf["intent"] slot_filler_path = path / slot_filler_conf["slot_filler_name"] slot_filler = load_processing_unit(slot_filler_path) slot_fillers[intent] = slot_filler parser.intent_classifier = classifier parser.slot_fillers = slot_fillers return parser
def from_dict(cls, unit_dict): """Creates a :class:`SnipsNLUEngine` instance from a dict The dict must have been generated with :func:`~SnipsNLUEngine.to_dict` Raises: ValueError: When there is a mismatch with the model version """ model_version = unit_dict.get("model_version") if model_version is None or model_version != __model_version__: raise ValueError( "Incompatible data model: persisted object=%s, python lib=%s" % (model_version, __model_version__)) dataset_metadata = unit_dict["dataset_metadata"] if dataset_metadata is not None: load_resources(dataset_metadata["language_code"]) nlu_engine = cls(config=unit_dict["config"]) # pylint:disable=protected-access nlu_engine._dataset_metadata = dataset_metadata # pylint:enable=protected-access nlu_engine.intent_parsers = [ load_processing_unit(parser_dict) for parser_dict in unit_dict["intent_parsers"] ] return nlu_engine
def from_dict(cls, unit_dict): """Creates a :class:`ProbabilisticIntentParser` instance from a dict The dict must have been generated with :func:`~ProbabilisticIntentParser.to_dict` """ slot_fillers = { intent: load_processing_unit(slot_filler_dict) for intent, slot_filler_dict in iteritems(unit_dict["slot_fillers"])} classifier = None if unit_dict["intent_classifier"] is not None: classifier = load_processing_unit(unit_dict["intent_classifier"]) parser = cls(config=cls.config_type.from_dict(unit_dict["config"])) parser.intent_classifier = classifier parser.slot_fillers = slot_fillers return parser
def from_path(cls, path, **shared): """Load a :class:`SnipsNLUEngine` instance from a directory path The data at the given path must have been generated using :func:`~SnipsNLUEngine.persist` Args: path (str): The path where the nlu engine is stored. """ directory_path = Path(path) model_path = directory_path / "nlu_engine.json" if not model_path.exists(): raise OSError("Missing nlu engine model file: %s" % model_path.name) with model_path.open(encoding="utf8") as f: model = json.load(f) model_version = model.get("model_version") if model_version is None or model_version != __model_version__: raise ValueError( "Incompatible data model: persisted object=%s, python lib=%s" % (model_version, __model_version__)) dataset_metadata = model["dataset_metadata"] if dataset_metadata is not None: language = dataset_metadata["language_code"] resources_dir = directory_path / "resources" / language if resources_dir.is_dir(): load_resources_from_dir(resources_dir) if shared.get(BUILTIN_ENTITY_PARSER) is None: path = model["builtin_entity_parser"] if path is not None: parser_path = directory_path / path shared[BUILTIN_ENTITY_PARSER] = BuiltinEntityParser.from_path( parser_path) if shared.get(CUSTOM_ENTITY_PARSER) is None: path = model["custom_entity_parser"] if path is not None: parser_path = directory_path / path shared[CUSTOM_ENTITY_PARSER] = CustomEntityParser.from_path( parser_path) nlu_engine = cls(config=model["config"], **shared) # pylint:disable=protected-access nlu_engine._dataset_metadata = dataset_metadata # pylint:enable=protected-access intent_parsers = [] for intent_parser_name in model["intent_parsers"]: intent_parser_path = directory_path / intent_parser_name intent_parser = load_processing_unit(intent_parser_path, **shared) intent_parsers.append(intent_parser) nlu_engine.intent_parsers = intent_parsers return nlu_engine
def from_path(cls, path): """Load a :class:`SnipsNLUEngine` instance from a directory path The data at the given path must have been generated using :func:`~SnipsNLUEngine.persist` Args: path (str): The path where the nlu engine is stored. """ directory_path = Path(path) model_path = directory_path / "nlu_engine.json" if not model_path.exists(): raise OSError("Missing nlu engine model file: %s" % model_path.name) with model_path.open() as f: model = json.load(f) model_version = model.get("model_version") if model_version is None or model_version != __model_version__: raise ValueError( "Incompatible data model: persisted object=%s, python lib=%s" % (model_version, __model_version__)) resources_dir = (directory_path / "resources") if resources_dir.is_dir(): for subdir in resources_dir.iterdir(): load_resources_from_dir(subdir) nlu_engine = cls(config=model["config"]) # pylint:disable=protected-access nlu_engine._dataset_metadata = model["dataset_metadata"] # pylint:enable=protected-access intent_parsers = [] for intent_parser_name in model["intent_parsers"]: intent_parser_path = directory_path / intent_parser_name intent_parser = load_processing_unit(intent_parser_path) intent_parsers.append(intent_parser) nlu_engine.intent_parsers = intent_parsers return nlu_engine
def from_dict(cls, unit_dict): """Creates a :class:`SnipsNLUEngine` instance from a dict The dict must have been generated with :func:`~SnipsNLUEngine.to_dict` Raises: ValueError: When there is a mismatch with the model version """ model_version = unit_dict.get("model_version") if model_version is None or model_version != __model_version__: raise ValueError( "Incompatible data model: persisted object=%s, python lib=%s" % (model_version, __model_version__)) nlu_engine = cls(config=unit_dict["config"]) # pylint:disable=protected-access nlu_engine._dataset_metadata = unit_dict["dataset_metadata"] # pylint:enable=protected-access nlu_engine.intent_parsers = [ load_processing_unit(parser_dict) for parser_dict in unit_dict["intent_parsers"]] return nlu_engine