def load( cls, meta: Dict[Text, Any], model_dir: Text = None, model_metadata: Metadata = None, cached_component=None, **kwargs: Any, ) -> "Featurizer": """Loads the trained model from the provided directory.""" if not model_dir or not meta.get("file"): logger.debug( f"Failed to load model. " f"Maybe the path '{os.path.abspath(model_dir)}' doesn't exist?" ) return cls(component_config=meta) file_name = meta.get("file") tf_model_path = os.path.join(model_dir, f'{file_name}.tf_model') model = tf.keras.models.load_model(tf_model_path) char_to_idx = io_utils.pickle_load( os.path.join(model_dir, f'{file_name}.char_to_idx.pkl')) index_label_id_mapping = io_utils.pickle_load( os.path.join(model_dir, f'{file_name}.index_label_id_mapping.pkl')) index_tag_id_mapping = io_utils.pickle_load( os.path.join(model_dir, f'{file_name}.index_tag_id_mapping .pkl')) return cls(component_config=meta, index_label_id_mapping=index_label_id_mapping, index_tag_id_mapping=index_tag_id_mapping, model=model, char_to_idx=char_to_idx)
def load(cls, path: Text) -> Policy: filename = os.path.join(path, "sklearn_model.pkl") zero_features_filename = os.path.join(path, "zero_state_features.pkl") if not os.path.exists(path): raise OSError("Failed to load dialogue model. Path {} " "doesn't exist".format(os.path.abspath(filename))) featurizer = TrackerFeaturizer.load(path) assert isinstance(featurizer, MaxHistoryTrackerFeaturizer), ( "Loaded featurizer of type {}, should be " "MaxHistoryTrackerFeaturizer.".format(type(featurizer).__name__)) meta_file = os.path.join(path, "sklearn_policy.json") meta = json.loads(rasa.shared.utils.io.read_file(meta_file)) zero_state_features = io_utils.pickle_load(zero_features_filename) policy = cls( featurizer=featurizer, priority=meta["priority"], zero_state_features=zero_state_features, ) state = io_utils.pickle_load(filename) vars(policy).update(state) logger.info("Loaded sklearn model") return policy
def load(cls, path: Union[Text, Path]) -> Policy: filename = Path(path) / "sklearn_model.pkl" zero_features_filename = Path(path) / "zero_state_features.pkl" if not Path(path).exists(): raise OSError( f"Failed to load dialogue model. Path {filename.absolute()} " f"doesn't exist.") featurizer = TrackerFeaturizer.load(path) assert isinstance(featurizer, MaxHistoryTrackerFeaturizer), ( f"Loaded featurizer of type {type(featurizer).__name__}, should be " f"MaxHistoryTrackerFeaturizer.") meta_file = Path(path) / "sklearn_policy.json" meta = json.loads(rasa.shared.utils.io.read_file(meta_file)) zero_state_features = io_utils.pickle_load(zero_features_filename) policy = cls( featurizer=featurizer, priority=meta["priority"], zero_state_features=zero_state_features, ) state = io_utils.pickle_load(filename) vars(policy).update(state) logger.info("Loaded sklearn model") return policy
def _load_from_files(cls, meta: Dict[Text, Any], model_dir: Text): file_name = meta.get("file") model_dir = Path(model_dir) data_example = io_utils.pickle_load(model_dir / f"{file_name}.data_example.pkl") label_data = io_utils.pickle_load(model_dir / f"{file_name}.label_data.pkl") index_label_id_mapping = io_utils.json_unpickle( model_dir / f"{file_name}.index_label_id_mapping.pkl") index_tag_id_mapping = io_utils.json_unpickle( model_dir / f"{file_name}.index_tag_id_mapping.pkl") # jsonpickle converts dictionary keys to strings index_label_id_mapping = { int(key): value for key, value in index_label_id_mapping.items() } if index_tag_id_mapping is not None: index_tag_id_mapping = { int(key): value for key, value in index_tag_id_mapping.items() } return ( index_label_id_mapping, index_tag_id_mapping, label_data, meta, data_example, )
def load( cls, meta: Dict[Text, Any], model_dir: Text = None, model_metadata: Metadata = None, cached_component=None, **kwargs: Any, ) -> "Featurizer": """Loads the trained model from the provided directory.""" if not model_dir or not meta.get("file"): logger.debug( f"Failed to load model. " f"Maybe the path '{os.path.abspath(model_dir)}' doesn't exist?" ) return cls(component_config=meta) file_name = meta['file'] gensim_model_path = os.path.join(model_dir, f"{file_name}.model") hash_embedding_path = os.path.join(model_dir, f'{file_name}.hash_embedding.pkl') model_class = cls.model_class(meta) model = model_class.load(gensim_model_path) hash_embedding = io_utils.pickle_load(hash_embedding_path) return cls(component_config=meta, model=model, hash_embedding=hash_embedding)
def load(cls, path: Text) -> "TEDPolicy": """Loads a policy from the storage. **Needs to load its featurizer** """ if not os.path.exists(path): raise Exception(f"Failed to load TED policy model. Path " f"'{os.path.abspath(path)}' doesn't exist.") model_path = Path(path) tf_model_file = model_path / f"{SAVE_MODEL_FILE_NAME}.tf_model" featurizer = TrackerFeaturizer.load(path) if not (model_path / f"{SAVE_MODEL_FILE_NAME}.data_example.pkl").is_file(): return cls(featurizer=featurizer) loaded_data = io_utils.json_unpickle( model_path / f"{SAVE_MODEL_FILE_NAME}.data_example.pkl") label_data = io_utils.json_unpickle( model_path / f"{SAVE_MODEL_FILE_NAME}.label_data.pkl") meta = io_utils.pickle_load(model_path / f"{SAVE_MODEL_FILE_NAME}.meta.pkl") priority = io_utils.json_unpickle( model_path / f"{SAVE_MODEL_FILE_NAME}.priority.pkl") model_data_example = RasaModelData(label_key=LABEL_IDS, data=loaded_data) meta = train_utils.update_similarity_type(meta) model = TED.load( str(tf_model_file), model_data_example, data_signature=model_data_example.get_signature(), config=meta, max_history_tracker_featurizer_used=isinstance( featurizer, MaxHistoryTrackerFeaturizer), label_data=label_data, ) # build the graph for prediction predict_data_example = RasaModelData( label_key=LABEL_IDS, data={ feature_name: features for feature_name, features in model_data_example.items() if DIALOGUE in feature_name }, ) model.build_for_predict(predict_data_example) return cls(featurizer=featurizer, priority=priority, model=model, **meta)
def load(cls, meta: Dict[Text, Any], model_dir=None, model_metadata=None, cached_component=None, **kwargs): file_name = meta.get("classifier_file") classifier_file = os.path.join(model_dir, file_name) return utils_io.pickle_load(classifier_file)
def _load_model_utilities(cls, model_path: Path) -> Dict[Text, Any]: """Loads model's utility attributes. Args: model_path: Path where model is to be persisted. """ tf_model_file = model_path / f"{cls._metadata_filename()}.tf_model" loaded_data = io_utils.pickle_load( model_path / f"{cls._metadata_filename()}.data_example.pkl") label_data = io_utils.pickle_load( model_path / f"{cls._metadata_filename()}.label_data.pkl") fake_features = io_utils.pickle_load( model_path / f"{cls._metadata_filename()}.fake_features.pkl") label_data = RasaModelData(data=label_data) meta = io_utils.pickle_load(model_path / f"{cls._metadata_filename()}.meta.pkl") priority = io_utils.json_unpickle( model_path / f"{cls._metadata_filename()}.priority.pkl") entity_tag_specs = rasa.shared.utils.io.read_json_file( model_path / f"{cls._metadata_filename()}.entity_tag_specs.json") entity_tag_specs = [ EntityTagSpec( tag_name=tag_spec["tag_name"], ids_to_tags={ int(key): value for key, value in tag_spec["ids_to_tags"].items() }, tags_to_ids={ key: int(value) for key, value in tag_spec["tags_to_ids"].items() }, num_tags=tag_spec["num_tags"], ) for tag_spec in entity_tag_specs ] return { "tf_model_file": tf_model_file, "loaded_data": loaded_data, "fake_features": fake_features, "label_data": label_data, "meta": meta, "priority": priority, "entity_tag_specs": entity_tag_specs, }
def _load_model_utilities(cls, model_path: Path) -> Dict[Text, Any]: """Loads model's utility attributes. Args: model_path: Path where model is to be persisted. """ model_utilties = super()._load_model_utilities(model_path) label_quantiles = io_utils.pickle_load( model_path / f"{cls._metadata_filename()}.label_quantiles.pkl") model_utilties.update({"label_quantiles": label_quantiles}) return model_utilties
def load(cls, meta: Dict[Text, Any], model_dir: Text = None, model_metadata: Metadata = None, cached_component: Optional[ "IntentRankingCanonicalExampleInjector"] = None, **kwargs: Any) -> "IntentRankingCanonicalExampleInjector": from rasa.utils.io import pickle_load file_name = meta.get("file") path = os.path.join(model_dir, file_name) if os.path.exists(path): return cls(meta, **pickle_load(path)) else: return cls(meta)
def load(cls, path: Union[Text, Path]) -> "TEDPolicy": """Loads a policy from the storage. **Needs to load its featurizer** """ model_path = Path(path) if not model_path.exists(): logger.error( f"Failed to load TED policy model. Path " f"'{model_path.absolute()}' doesn't exist." ) return tf_model_file = model_path / f"{SAVE_MODEL_FILE_NAME}.tf_model" featurizer = TrackerFeaturizer.load(path) if not (model_path / f"{SAVE_MODEL_FILE_NAME}.data_example.pkl").is_file(): return cls(featurizer=featurizer) loaded_data = io_utils.pickle_load( model_path / f"{SAVE_MODEL_FILE_NAME}.data_example.pkl" ) label_data = io_utils.pickle_load( model_path / f"{SAVE_MODEL_FILE_NAME}.label_data.pkl" ) zero_state_features = io_utils.pickle_load( model_path / f"{SAVE_MODEL_FILE_NAME}.zero_state_features.pkl" ) label_data = RasaModelData(data=label_data) meta = io_utils.pickle_load(model_path / f"{SAVE_MODEL_FILE_NAME}.meta.pkl") priority = io_utils.json_unpickle( model_path / f"{SAVE_MODEL_FILE_NAME}.priority.pkl" ) model_data_example = RasaModelData( label_key=LABEL_KEY, label_sub_key=LABEL_SUB_KEY, data=loaded_data ) meta = train_utils.update_similarity_type(meta) model = TED.load( str(tf_model_file), model_data_example, data_signature=model_data_example.get_signature(), config=meta, max_history_tracker_featurizer_used=isinstance( featurizer, MaxHistoryTrackerFeaturizer ), label_data=label_data, ) # build the graph for prediction predict_data_example = RasaModelData( label_key=LABEL_KEY, label_sub_key=LABEL_SUB_KEY, data={ feature_name: features for feature_name, features in model_data_example.items() if feature_name in STATE_LEVEL_FEATURES + FEATURES_TO_ENCODE + [DIALOGUE] }, ) model.build_for_predict(predict_data_example) return cls( featurizer=featurizer, priority=priority, model=model, zero_state_features=zero_state_features, **meta, )