def parse_training_example(example: Text, intent: Optional[Text] = None) -> "Message": """Extract entities and synonyms, and convert to plain text.""" entities = find_entities_in_training_example(example) plain_text = replace_entities(example) return Message.build(plain_text, intent, entities)
def _collect_messages(evts: List[Dict[Text, Any]]) -> List[Message]: """Collect the message text and parsed data from the UserMessage events into a list""" from rasa.nlu.extractors.duckling_http_extractor import \ DucklingHTTPExtractor from rasa.nlu.extractors.mitie_entity_extractor import MitieEntityExtractor from rasa.nlu.extractors.spacy_entity_extractor import SpacyEntityExtractor msgs = [] for evt in evts: if evt.get("event") == UserUttered.type_name: data = evt.get("parse_data") for entity in data.get("entities", []): excluded_extractors = [ DucklingHTTPExtractor.__name__, SpacyEntityExtractor.__name__, MitieEntityExtractor.__name__ ] logger.debug("Exclude entity marking of following extractors" " {} when writing nlu data " "to file.".format(excluded_extractors)) if entity.get("extractor") in excluded_extractors: data["entities"].remove(entity) msg = Message.build(data["text"], data["intent"]["name"], data["entities"]) msgs.append(msg) return msgs