def validate_requirements(component_names: List[Text]) -> None: """Validates that all required importable python packages are installed. Args: component_names: The list of component names. """ from rasa.nlu import registry # Validate that all required packages are installed failed_imports = {} for component_name in component_names: component_class = registry.get_component_class(component_name) unavailable_packages = find_unavailable_packages( component_class.required_packages()) if unavailable_packages: failed_imports[component_name] = unavailable_packages if failed_imports: # pragma: no cover dependency_component_map = defaultdict(list) for component, missing_dependencies in failed_imports.items(): for dependency in missing_dependencies: dependency_component_map[dependency].append(component) missing_lines = [ f"{d} (needed for {', '.join(cs)})" for d, cs in dependency_component_map.items() ] missing = "\n - ".join(missing_lines) raise MissingDependencyException( f"Not all required importable packages are installed to use " f"the configured NLU pipeline. " f"To use this pipeline, you need to install the " f"missing modules: \n" f" - {missing}\n" f"Please install the packages that contain the missing modules.")
def validate_requirements(component_names: List[Text]) -> None: """Validates that all required importable python packages are installed. Args: component_names: The list of component names. """ from rasa.nlu import registry # Validate that all required packages are installed failed_imports = set() for component_name in component_names: component_class = registry.get_component_class(component_name) failed_imports.update( find_unavailable_packages(component_class.required_packages()) ) if failed_imports: # pragma: no cover # if available, use the development file to figure out the correct # version numbers for each requirement raise Exception( f"Not all required importable packages are installed. " f"To use this pipeline, you need to install the " f"missing dependencies. " f"Please install the package(s) that contain the module(s): " f"{', '.join(failed_imports)}" )
def validate_requirements(component_names: List[Optional[Text]]) -> None: """Validates that all required importable python packages are installed. Raises: InvalidConfigException: If one of the component names is `None`, likely indicates that a custom implementation is missing this property or that there is an invalid configuration file that we did not catch earlier. Args: component_names: The list of component names. """ from rasa.nlu import registry # Validate that all required packages are installed failed_imports = {} for component_name in component_names: if component_name is None: raise InvalidConfigException( "Your pipeline configuration contains a component that is missing " "a name. Please double check your configuration or if this is a " "custom component make sure to implement the name property for " "the component." ) component_class = registry.get_component_class(component_name) unavailable_packages = find_unavailable_packages( component_class.required_packages() ) if unavailable_packages: failed_imports[component_name] = unavailable_packages if failed_imports: # pragma: no cover dependency_component_map = defaultdict(list) for component, missing_dependencies in failed_imports.items(): for dependency in missing_dependencies: dependency_component_map[dependency].append(component) missing_lines = [ f"{d} (needed for {', '.join(cs)})" for d, cs in dependency_component_map.items() ] missing = "\n - ".join(missing_lines) raise MissingDependencyException( f"Not all required importable packages are installed to use " f"the configured NLU pipeline. " f"To use this pipeline, you need to install the " f"missing modules: \n" f" - {missing}\n" f"Please install the packages that contain the missing modules." )
def inner( diet: DIETClassifier, pipeline: Optional[List[Dict[Text, Any]]] = None, training_data: str = nlu_data_path, message_text: Text = "Rasa is great!", expect_intent: bool = True, ) -> Message: if not pipeline: pipeline = [ { "name": "WhitespaceTokenizer" }, { "name": "CountVectorsFeaturizer" }, ] loaded_pipeline = [ registry.get_component_class(component.pop("name"))(component) for component in copy.deepcopy(pipeline) ] importer = RasaFileImporter(training_data_paths=[training_data]) training_data = importer.get_nlu_data() for component in loaded_pipeline: component.train(training_data) diet.train(training_data=training_data) message = Message(data={TEXT: message_text}) for component in loaded_pipeline: component.process(message) message2 = copy.deepcopy(message) classified_message = diet.process([message])[0] if expect_intent: assert classified_message.data["intent"]["name"] loaded_diet = create_diet(diet.component_config, load=True) classified_message2 = loaded_diet.process([message2])[0] assert classified_message2.fingerprint( ) == classified_message.fingerprint() return classified_message
def __get_cached_component( self, component_meta: Dict[Text, Any], model_metadata: 'Metadata' ) -> Tuple[Optional[Component], Optional[Text]]: """Load a component from the cache, if it exists. Returns the component, if found, and the cache key. """ from rasa.nlu import registry # try to get class name first, else create by name component_name = component_meta.get('class', component_meta['name']) component_class = registry.get_component_class(component_name) cache_key = component_class.cache_key(component_meta, model_metadata) if (cache_key is not None and self.use_cache and cache_key in self.component_cache): return self.component_cache[cache_key], cache_key else: return None, cache_key
def validate_requirements(component_names: List[Text]) -> None: """Ensures that all required python packages are installed to instantiate and used the passed components.""" from rasa.nlu import registry # Validate that all required packages are installed failed_imports = set() for component_name in component_names: component_class = registry.get_component_class(component_name) failed_imports.update( find_unavailable_packages(component_class.required_packages())) if failed_imports: # pragma: no cover # if available, use the development file to figure out the correct # version numbers for each requirement raise Exception("Not all required packages are installed. " + "To use this pipeline, you need to install the " "missing dependencies. " + "Please install {}".format(", ".join(failed_imports)))
def test_persist_and_load( training_data: TrainingData, default_sklearn_intent_classifier: SklearnIntentClassifierGraphComponent, default_model_storage: ModelStorage, default_execution_context: ExecutionContext, ): pipeline = [ { "name": "SpacyNLP", "model": "en_core_web_md" }, { "name": "SpacyTokenizer" }, { "name": "SpacyFeaturizer" }, ] loaded_pipeline = [ registry.get_component_class(component.pop("name")).create( component, RasaNLUModelConfig()) for component in copy.deepcopy(pipeline) ] for component in loaded_pipeline: component.train(training_data) default_sklearn_intent_classifier.train(training_data) loaded = SklearnIntentClassifierGraphComponent.load( SklearnIntentClassifierGraphComponent.get_default_config(), default_model_storage, Resource("sklearn"), default_execution_context, ) predicted = copy.deepcopy(training_data) actual = copy.deepcopy(training_data) loaded_messages = loaded.process(predicted.training_examples) trained_messages = default_sklearn_intent_classifier.process( actual.training_examples) for m1, m2 in zip(loaded_messages, trained_messages): assert m1.get("intent") == m2.get("intent")
def _get_default_value_for_component(name: Text, key: Text) -> Any: from rasa.nlu.registry import get_component_class return get_component_class(name).defaults[key]
async def test_train_persist_with_different_configurations( crf_entity_extractor: Callable[[Dict[Text, Any]], CRFEntityExtractorGraphComponent], config_params: Dict[Text, Any], default_model_storage: ModelStorage, default_execution_context: ExecutionContext, ): pipeline = [ { "name": "SpacyNLP", "model": "en_core_web_md" }, { "name": "SpacyTokenizer" }, ] loaded_pipeline = [ registry.get_component_class(component.pop("name")).create( component, RasaNLUModelConfig()) for component in copy.deepcopy(pipeline) ] crf_extractor = crf_entity_extractor(config_params) importer = RasaFileImporter(training_data_paths=["data/examples/rasa"]) training_data = importer.get_nlu_data() for component in loaded_pipeline: component.train(training_data) crf_extractor.train(training_data) message = Message(data={TEXT: "I am looking for an italian restaurant"}) for component in loaded_pipeline: component.process(message) message2 = copy.deepcopy(message) processed_message = crf_extractor.process([message])[0] loaded_extractor = CRFEntityExtractorGraphComponent.load( { **CRFEntityExtractorGraphComponent.get_default_config(), **config_params }, default_model_storage, Resource("CRFEntityExtractor"), default_execution_context, ) processed_message2 = loaded_extractor.process([message2])[0] assert processed_message2.fingerprint() == processed_message.fingerprint() detected_entities = processed_message2.get(ENTITIES) assert len(detected_entities) == 1 assert detected_entities[0]["entity"] == "cuisine" assert detected_entities[0]["value"] == "italian"