def _generate_lookup_regex( self, lookup_table: Dict[Text, Union[Text, List[Text]]]) -> Text: """creates a regex out of the contents of a lookup table file""" lookup_elements = lookup_table["elements"] elements_to_regex = [] # if it's a list, it should be the elements directly if isinstance(lookup_elements, list): elements_to_regex = lookup_elements common_utils.raise_warning( f"Directly including lookup tables as a list is deprecated since Rasa " f"1.6.", FutureWarning, docs=DOCS_URL_TRAINING_DATA_NLU + "#lookup-tables", ) # otherwise it's a file path. else: try: f = open(lookup_elements, "r", encoding=rasa.utils.io.DEFAULT_ENCODING) except OSError: raise ValueError( f"Could not load lookup table {lookup_elements}. " f"Please make sure you've provided the correct path.") with f: for line in f: new_element = line.strip() if new_element: elements_to_regex.append(new_element) # sanitize the regex, escape special characters elements_sanitized = [re.escape(e) for e in elements_to_regex] # regex matching elements with word boundaries on either side regex_string = "(?i)(\\b" + "\\b|\\b".join(elements_sanitized) + "\\b)" return regex_string
def _get_events_from_request_body(request: Request) -> List[Event]: events = request.json if not isinstance(events, list): events = [events] events = [Event.from_parameters(event) for event in events] events = [event for event in events if event] if not events: common_utils.raise_warning( f"Append event called, but could not extract a valid event. " f"Request JSON: {request.json}" ) raise ErrorResponse( 400, "BadRequest", "Couldn't extract a proper event from the request body.", {"parameter": "", "in": "body"}, ) return events
async def send_text_with_buttons( self, recipient_id: Text, text: Text, buttons: List[Dict[Text, Any]], **kwargs: Any, ) -> None: recipient = self.slack_channel or recipient_id text_block = { "type": "section", "text": { "type": "plain_text", "text": text } } if len(buttons) > 5: raise_warning( "Slack API currently allows only up to 5 buttons. " "Since you added more than 5, slack will ignore all of them.") return await self.send_text_message(recipient, text, **kwargs) button_block = {"type": "actions", "elements": []} for button in buttons: button_block["elements"].append({ "type": "button", "text": { "type": "plain_text", "text": button["title"] }, "value": button["payload"], }) await self.client.chat_postMessage( channel=recipient, as_user=True, text=text, blocks=[text_block, button_block], )
def _load_from_module_string( domain: Domain, store: EndpointConfig, event_broker: Optional[EventBroker] = None ) -> "TrackerStore": """Initializes a custom tracker. Defaults to the InMemoryTrackerStore if the module path can not be found. Args: domain: defines the universe in which the assistant operates store: the specific tracker store event_broker: an event broker to publish events Returns: a tracker store from a specified type in a stores endpoint configuration """ try: tracker_store_class = class_from_module_path(store.type) init_args = arguments_of(tracker_store_class.__init__) if "url" in init_args and "host" not in init_args: raise_warning( "The `url` initialization argument for custom tracker stores is " "deprecated. Your custom tracker store should take a `host` " "argument in its `__init__()` instead.", DeprecationWarning, ) store.kwargs["url"] = store.url else: store.kwargs["host"] = store.url return tracker_store_class( domain=domain, event_broker=event_broker, **store.kwargs ) except (AttributeError, ImportError): raise_warning( f"Tracker store with type '{store.type}' not found. " f"Using `InMemoryTrackerStore` instead." ) return InMemoryTrackerStore(domain)
def _user_intent_from_step(self, step: Dict[Text, Any]) -> Text: user_intent = step.get(KEY_USER_INTENT, "").strip() if not user_intent: common_utils.raise_warning( f"Issue found in '{self.source_name}':\n" f"User utterance cannot be empty. " f"This {self._get_item_title()} step will be skipped:\n" f"{step}", docs=self._get_docs_link(), ) if user_intent.startswith(INTENT_MESSAGE_PREFIX): common_utils.raise_warning( f"Issue found in '{self.source_name}':\n" f"User intent '{user_intent}' starts with " f"'{INTENT_MESSAGE_PREFIX}'. This is not required.", docs=self._get_docs_link(), ) # Remove leading slash user_intent = user_intent[1:] return user_intent
async def trigger_external_user_uttered( self, intent_name: Text, entities: Optional[Union[List[Dict[Text, Any]], Dict[Text, Text]]], tracker: DialogueStateTracker, output_channel: OutputChannel, ) -> None: """Triggers an external message. Triggers an external message (like a user message, but invisible; used, e.g., by a reminder or the trigger_intent endpoint). Args: intent_name: Name of the intent to be triggered. entities: Entities to be passed on. tracker: The tracker to which the event should be added. output_channel: The output channel. """ if isinstance(entities, list): entity_list = entities elif isinstance(entities, dict): # Allow for a short-hand notation {"ent1": "val1", "ent2": "val2", ...}. # Useful if properties like 'start', 'end', or 'extractor' are not given, # e.g. for external events. entity_list = [{ "entity": ent, "value": val } for ent, val in entities.items()] elif not entities: entity_list = [] else: raise_warning( f"Invalid entity specification: {entities}. Assuming no entities." ) entity_list = [] tracker.update(UserUttered.create_external(intent_name, entity_list)) await self._predict_and_execute_next_action(output_channel, tracker) # save tracker state to continue conversation from this state self._save_tracker(tracker)
def _parse_step(self, step: Dict[Text, Any]) -> None: if KEY_STORY_USER_UTTERANCE in step.keys(): self._parse_user_utterance(step) elif KEY_OR in step.keys(): self._parse_or_statement(step) elif KEY_SLOT_NAME in step.keys(): self._parse_slot(step) elif KEY_ACTION in step.keys(): self._parse_action(step) elif KEY_CHECKPOINT in step.keys(): self._parse_checkpoint(step) elif KEY_METADATA in step.keys(): pass else: common_utils.raise_warning( f"Issue found in '{self.source_name}': \n" f"Found an unexpected step in the story description:\n" f"{step}\n" "It will be skipped.", docs=DOCS_URL_STORIES, )
def _check_OOV_present(self, all_tokens: List[List[Text]], attribute: Text) -> None: """Check if an OOV word is present""" if not self.OOV_token or self.OOV_words or not all_tokens: return for tokens in all_tokens: for text in tokens: if self.OOV_token in text or ( self.lowercase and self.OOV_token in text.lower() ): return if any(text for tokens in all_tokens for text in tokens): training_data_type = "NLU" if attribute == TEXT else "ResponseSelector" # if there is some text in tokens, warn if there is no oov token common_utils.raise_warning( f"The out of vocabulary token '{self.OOV_token}' was configured, but " f"could not be found in any one of the {training_data_type} " f"training examples. All unseen words will be ignored during prediction.", docs=DOCS_URL_COMPONENTS + "#countvectorsfeaturizer", )
def validate_training_data_format_version(yaml_file_content: Dict[Text, Any], filename: Text) -> bool: """Validates version on the training data content using `version` field and warns users if the file is not compatible with the current version of Rasa Open Source. Args: yaml_file_content: Raw content of training data file as a dictionary. filename: Name of the validated file. Returns: `True` if the file can be processed by current version of Rasa Open Source, `False` otherwise. """ if not isinstance(yaml_file_content, dict): raise ValueError(f"Failed to validate {filename}.") version_value = yaml_file_content.get(KEY_TRAINING_DATA_FORMAT_VERSION) if not version_value: raise_warning( f"Training data file {filename} doesn't have a " f"'{KEY_TRAINING_DATA_FORMAT_VERSION}' key. " f"Rasa Open Source will read the file as a " f"version '{LATEST_TRAINING_DATA_FORMAT_VERSION}' file.", docs=DOCS_BASE_URL, ) return True try: parsed_version = version.parse(version_value) if isinstance(parsed_version, LegacyVersion): raise TypeError if version.parse( LATEST_TRAINING_DATA_FORMAT_VERSION) >= parsed_version: return True except TypeError: raise_warning( f"Training data file {filename} must specify " f"'{KEY_TRAINING_DATA_FORMAT_VERSION}' as string, for example:\n" f"{KEY_TRAINING_DATA_FORMAT_VERSION}: '{LATEST_TRAINING_DATA_FORMAT_VERSION}'\n" f"Rasa Open Source will read the file as a " f"version '{LATEST_TRAINING_DATA_FORMAT_VERSION}' file.", docs=DOCS_BASE_URL, ) return True raise_warning( f"Training data file {filename} has a greater format version than " f"your Rasa Open Source installation: " f"{version_value} > {LATEST_TRAINING_DATA_FORMAT_VERSION}. " f"Please consider updating to the latest version of Rasa Open Source." f"This file will be skipped.", docs=DOCS_BASE_URL, ) return False
async def read_from_file(self, filename: Text) -> List[StoryStep]: try: yaml_content = rasa.utils.io.read_yaml_file(filename) if not isinstance(yaml_content, dict): common_utils.raise_warning( f"Failed to read '{filename}'. It should be a Yaml dict.") return [] stories = yaml_content.get(KEY_STORIES) # pytype: disable=attribute-error if not stories: return [] self._parse_stories(stories) self._add_current_stories_to_result() return self.story_steps except ValueError as e: common_utils.raise_warning( f"Failed to read YAML from '{filename}', it will be skipped. Error: {e}" ) return []
def read_from_json(self, js: Dict[Text, Any], **_) -> "TrainingData": """Loads training data stored in the rasa NLU data format.""" from rasa.nlu.training_data import Message, TrainingData import rasa.nlu.schemas.data_schema as schema import rasa.utils.validation as validation_utils validation_utils.validate_training_data(js, schema.rasa_nlu_data_schema()) data = js["rasa_nlu_data"] common_examples = data.get("common_examples", []) intent_examples = data.get("intent_examples", []) entity_examples = data.get("entity_examples", []) entity_synonyms = data.get("entity_synonyms", []) regex_features = data.get("regex_features", []) lookup_tables = data.get("lookup_tables", []) entity_synonyms = transform_entity_synonyms(entity_synonyms) if intent_examples or entity_examples: raise_warning( "Your rasa data " "contains 'intent_examples' " "or 'entity_examples' which will be " "removed in the future. Consider " "putting all your examples " "into the 'common_examples' section.", FutureWarning, docs=DOCS_URL_TRAINING_DATA_NLU, ) all_examples = common_examples + intent_examples + entity_examples training_examples = [] for ex in all_examples: msg = Message.build(**ex) training_examples.append(msg) return TrainingData(training_examples, entity_synonyms, regex_features, lookup_tables)
def _parse_checkpoint(self, step: Dict[Text, Any]) -> None: checkpoint_name = step.get(KEY_CHECKPOINT, "") slots = step.get(KEY_CHECKPOINT_SLOTS, []) slots_dict = {} for slot in slots: if not isinstance(slot, dict): common_utils.raise_warning( f"Issue found in '{self.source_name}':\n" f"Checkpoint '{checkpoint_name}' has an invalid slot: " f"{slots}\nItems under the '{KEY_CHECKPOINT_SLOTS}' key must be " f"YAML dictionaries. The checkpoint will be skipped.", docs=self._get_docs_link(), ) return for key, value in slot.items(): slots_dict[key] = value self._add_checkpoint(checkpoint_name, slots_dict)
async def scheduler() -> AsyncIOScheduler: """Thread global scheduler to handle all recurring tasks. If no scheduler exists yet, this will instantiate one.""" global __scheduler if not __scheduler: try: __scheduler = AsyncIOScheduler(event_loop=asyncio.get_event_loop()) __scheduler.start() return __scheduler except UnknownTimeZoneError: raise_warning( "apscheduler could not find a timezone and is " "defaulting to utc. This is probably because " "your system timezone is not set. " 'Set it with e.g. echo "Europe/Berlin" > ' "/etc/timezone" ) __scheduler = AsyncIOScheduler( event_loop=asyncio.get_event_loop(), timezone=utc ) __scheduler.start() return __scheduler else: # scheduler already created, make sure it is running on # the correct loop # noinspection PyProtectedMember if not __scheduler._eventloop == asyncio.get_event_loop(): raise RuntimeError( "Detected inconsistent loop usage. " "Trying to schedule a task on a new event " "loop, but scheduler was created with a " "different event loop. Make sure there " "is only one event loop in use and that the " "scheduler is running on that one." ) return __scheduler
def get_tests_from_file(filename: Optional[Text] = None) -> List[Text]: """Returns an list of tests from a `filename`. Args: filename: Path to a test file. """ if not filename: filename = utils.get_project_directory() / DEFAULT_FILENAME try: content = io_utils.read_file(filename) return _split_tests(content) except ValueError as e: rasa_utils.raise_warning( f"Unable to get tests from {filename}:\n{e} " f"Please, make sure you have end-to-end tests added to your assistant. " f"See https://rasa.com/docs/rasa/user-guide/testing-your-assistant/ " f"for more information.", UserWarning, ) return []
def collect_templates( yml_templates: Dict[Text, List[Any]] ) -> Dict[Text, List[Dict[Text, Any]]]: """Go through the templates and make sure they are all in dict format.""" templates = {} for template_key, template_variations in yml_templates.items(): validated_variations = [] if template_variations is None: raise InvalidDomain( "Utterance '{}' does not have any defined templates.".format( template_key ) ) for t in template_variations: # templates should be a dict with options if isinstance(t, str): raise_warning( f"Templates should not be strings anymore. " f"Utterance template '{template_key}' should contain " f"either a '- text: ' or a '- custom: ' " f"attribute to be a proper template.", FutureWarning, docs=DOCS_URL_DOMAINS + "#utterance-templates", ) validated_variations.append({"text": t}) elif "text" not in t and "custom" not in t: raise InvalidDomain( f"Utter template '{template_key}' needs to contain either " f"'- text: ' or '- custom: ' attribute to be a proper " f"template." ) else: validated_variations.append(t) templates[template_key] = validated_variations return templates
def _parse_training_examples( self, examples: Union[Text, List[Dict[Text, Any]]], intent: Text ) -> List[Tuple[Text, List[Dict[Text, Any]]]]: import rasa.nlu.training_data.entities_parser as entities_parser if isinstance(examples, list): example_strings = [ # pytype: disable=attribute-error example.get(KEY_INTENT_TEXT, "") for example in examples if example ] # pytype: enable=attribute-error elif isinstance(examples, str): example_strings = self._parse_multiline_example(intent, examples) else: raise_warning( f"Unexpected block found in '{self.filename}' " f"while processing intent '{intent}':\n" f"{examples}\n" f"This block will be skipped.", docs=DOCS_URL_TRAINING_DATA_NLU, ) return [] if not example_strings: raise_warning( f"Issue found while processing '{self.filename}': " f"Intent '{intent}' has no examples.", docs=DOCS_URL_TRAINING_DATA_NLU, ) results = [] for example in example_strings: entities = entities_parser.find_entities_in_training_example(example) results.append((example, entities)) return results
def read_from_json(self, js, **kwargs) -> "TrainingData": """Loads training data stored in the rasa NLU data format.""" from rasa.nlu.training_data import Message, TrainingData validate_rasa_nlu_data(js) data = js["rasa_nlu_data"] common_examples = data.get("common_examples", []) intent_examples = data.get("intent_examples", []) entity_examples = data.get("entity_examples", []) entity_synonyms = data.get("entity_synonyms", []) regex_features = data.get("regex_features", []) lookup_tables = data.get("lookup_tables", []) gazette = data.get("gazette", []) entity_synonyms = transform_entity_synonyms(entity_synonyms) if intent_examples or entity_examples: raise_warning( "Your rasa data " "contains 'intent_examples' " "or 'entity_examples' which will be " "removed in the future. Consider " "putting all your examples " "into the 'common_examples' section.", FutureWarning, docs=DOCS_URL_TRAINING_DATA_NLU, ) all_examples = common_examples + intent_examples + entity_examples training_examples = [] for ex in all_examples: msg = Message.build(ex["text"], ex.get("intent"), ex.get("entities")) training_examples.append(msg) return TrainingData(training_examples, entity_synonyms, regex_features, lookup_tables, gazette)
async def handle_message(sid: Text, data: Dict) -> Any: output_channel = SocketIOOutput(sio, sid, self.bot_message_evt) if self.session_persistence: if not data.get("session_id"): raise_warning("A message without a valid session_id " "was received. This message will be " "ignored. Make sure to set a proper " "session id using the " "`session_request` socketIO event.") return sender_id = data["session_id"] else: sender_id = sid message = UserMessage( data["message"], output_channel, sender_id, input_channel=self.name(), metadata=data["metadata"], ) await on_new_message(message)
def add_checkpoint(self, name: Text, conditions: Optional[Dict[Text, Any]]) -> None: # Depending on the state of the story part this # is either a start or an end check point if not self.current_steps: self.start_checkpoints.append(Checkpoint(name, conditions)) else: if conditions: common_utils.raise_warning( f"End or intermediate checkpoints " f"do not support conditions! " f"(checkpoint: {name})", docs=DOCS_URL_STORIES + "#checkpoints", ) additional_steps = [] for t in self.current_steps: if t.end_checkpoints: tcp = t.create_copy(use_new_id=True) tcp.end_checkpoints = [Checkpoint(name)] additional_steps.append(tcp) else: t.end_checkpoints = [Checkpoint(name)] self.current_steps.extend(additional_steps)
def _setup_gpu_environment() -> None: """Set configuration for TensorFlow GPU environment based on the environment variable set.""" gpu_memory_config = os.getenv(ENV_GPU_CONFIG) if not gpu_memory_config: return # Import from tensorflow only if necessary (environment variable was set) from tensorflow import config as tf_config parsed_gpu_config = _parse_gpu_config(gpu_memory_config) physical_gpus = tf_config.list_physical_devices("GPU") # Logic taken from https://www.tensorflow.org/guide/gpu if physical_gpus: for gpu_id, gpu_id_memory in parsed_gpu_config.items(): _allocate_gpu_memory(physical_gpus[gpu_id], gpu_id_memory) else: rasa_utils.raise_warning( f"You have an environment variable '{ENV_GPU_CONFIG}' set but no GPUs were detected to configure." )
def verify_example_repetition_in_intents(self, ignore_warnings: bool = True ) -> bool: """Checks if there is no duplicated example in different intents.""" everything_is_alright = True duplication_hash = defaultdict(set) for example in self.intents.intent_examples: text = example.text duplication_hash[text].add(example.get("intent")) for text, intents in duplication_hash.items(): if len(duplication_hash[text]) > 1: everything_is_alright = ignore_warnings and everything_is_alright intents_string = ", ".join(sorted(intents)) raise_warning( f"The example '{text}' was found labeled with multiple " f"different intents in the training data. Each annotated message " f"should only appear with one intent. You should fix that " f"conflict The example is labeled with: {intents_string}.") return everything_is_alright
def as_feature(self) -> List[float]: r = [0.0] * self.feature_dimensionality() try: for i, v in enumerate(self.values): if v == str(self.value).lower(): r[i] = 1.0 break else: if self.value is not None: raise_warning( f"Categorical slot '{self.name}' is set to a value " f"('{self.value}') " "that is not specified in the domain. " "Value will be ignored and the slot will " "behave as if no value is set. " "Make sure to add all values a categorical " "slot should store to the domain." ) except (TypeError, ValueError): logger.exception("Failed to featurize categorical slot.") return r return r
def __init__( self, component_config: Optional[Dict[Text, Any]] = None, clf: "sklearn.model_selection.GridSearchCV" = None, le: Optional["sklearn.preprocessing.LabelEncoder"] = None, ) -> None: """Construct a new intent classifier using the sklearn framework.""" from sklearn.preprocessing import LabelEncoder super().__init__(component_config) if le is not None: self.le = le else: self.le = LabelEncoder() self.clf = clf common_utils.raise_warning( "'SklearnIntentClassifier' is deprecated and will be removed in version " "2.0. Use 'DIETClassifier' instead.", category=FutureWarning, docs=DOCS_URL_MIGRATION_GUIDE, )
def validate_required_components_from_data( pipeline: List["Component"], data: TrainingData ) -> None: """Validates that all components are present in the pipeline based on data. Args: pipeline: The list of the :class:`rasa.nlu.components.Component`. data: The :class:`rasa.nlu.training_data.training_data.TrainingData`. """ from rasa.nlu.selectors.response_selector import ResponseSelector response_selector_exists = False for component in pipeline: # check if a response selector is part of NLU pipeline if isinstance(component, ResponseSelector): response_selector_exists = True if len(data.response_examples) and not response_selector_exists: raise_warning( "Training data consists examples for training a response selector but " "no response selector component specified inside NLU pipeline." )
def process(self, message: Message, **kwargs: Any) -> None: if self._url() is not None: reference_time = self._reference_time_from_message(message) matches = self._duckling_parse(message.text, reference_time) all_extracted = convert_duckling_format_to_rasa(matches) dimensions = self.component_config["dimensions"] extracted = DucklingHTTPExtractor.filter_irrelevant_entities( all_extracted, dimensions) else: extracted = [] raise_warning( "Duckling HTTP component in pipeline, but no " "`url` configuration in the config " "file nor is `RASA_DUCKLING_HTTP_URL` " "set as an environment variable. No entities will be extracted!", docs=DOCS_URL_COMPONENTS + "#ducklinghttpextractor", ) extracted = self.add_extractor_name(extracted) message.set(ENTITIES, message.get(ENTITIES, []) + extracted, add_to_output=True)
def handle_channels( self, channels: List[InputChannel], http_port: int = constants.DEFAULT_SERVER_PORT, route: Text = "/webhooks/", cors: Union[Text, List[Text], None] = None, ) -> Sanic: """Start a webserver attaching the input channels and handling msgs.""" from rasa.core import run raise_warning( "Using `handle_channels` is deprecated. " "Please use `rasa.run(...)` or see " "`rasa.core.run.configure_app(...)` if you want to implement " "this on a more detailed level.", DeprecationWarning, ) app = run.configure_app(channels, cors, None, enable_api=False, route=route) app.agent = self update_sanic_log_level() app.run( host="0.0.0.0", port=http_port, backlog=int(os.environ.get(ENV_SANIC_BACKLOG, "100")), workers=rasa.core.utils.number_of_sanic_workers(self.lock_store), ) # this might seem unnecessary (as run does not return until the server # is killed) - but we use it for tests where we mock `.run` to directly # return and need the app to inspect if we created a properly # configured server return app
def create( obj: Union["NaturalLanguageInterpreter", EndpointConfig, Text, None], # this second parameter is deprecated! endpoint: Optional[EndpointConfig] = None, ) -> "NaturalLanguageInterpreter": """Factory to create an natural language interpreter.""" if endpoint is not None: raise_warning( "Calling `NaturalLanguageInterpreter.create` with two parameters" "is deprecated. The `endpoint` parameter will be removed in the " "future. You should replace a call " "`NaturalLanguageInterpreter.create(s, e)` " "with the single parameter version " "`NaturalLanguageInterpreter.create(e or s)`.", category=DeprecationWarning, ) obj = endpoint or obj # <bf from rasa.nlu.model import UnsupportedModelError try: if isinstance(obj, NaturalLanguageInterpreter): return obj elif isinstance(obj, str) and os.path.exists(obj): return RasaNLUInterpreter(model_directory=obj) elif isinstance(obj, str) and not os.path.exists(obj): # user passed in a string, but file does not exist logger.warning( f"No local NLU model '{obj}' found. Using RegexInterpreter instead." ) return RegexInterpreter() else: return _create_from_endpoint_config(obj) except UnsupportedModelError as e: logger.warning(e.message) return RegexInterpreter()
def train( self, training_data: TrainingData, config: Optional[RasaNLUModelConfig] = None, **kwargs: Any, ) -> None: """Train the intent classifier on a data set.""" num_threads = kwargs.get("num_threads", 1) labels = [e.get("intent") for e in training_data.intent_examples] if len(set(labels)) < 2: common_utils.raise_warning( "Can not train an intent classifier as there are not " "enough intents. Need at least 2 different intents. " "Skipping training of intent classifier.", docs=DOCS_URL_TRAINING_DATA_NLU, ) else: y = self.transform_labels_str2num(labels) X = np.stack([ sequence_to_sentence_features( example.get(DENSE_FEATURE_NAMES[TEXT])) for example in training_data.intent_examples ]) # reduce dimensionality X = np.reshape(X, (len(X), -1)) self.clf = self._create_classifier(num_threads, y) with warnings.catch_warnings(): # sklearn raises lots of # "UndefinedMetricWarning: F - score is ill - defined" # if there are few intent examples, this is needed to prevent it warnings.simplefilter("ignore") self.clf.fit(X, y)
def handle_deprecated_request_parameters( request: Request, old_name: Text, new_name: Text ) -> None: """ Modify a request to account for a deprecated request parameter following a rename. Replace the deprecated parameter in the input request with the corresponding new one. Also do this when the parameter is used as fields parameter. Args: request: The request to fix. old_name: The deprecated name of the parameter. new_name: The new name of the parameter. """ if request.args.get(old_name): rasa_utils.raise_warning( f"Your request includes the parameter '{old_name}'. This has been " f"deprecated and renamed to '{new_name}'. The '{old_name}' parameter will " f"no longer work in future versions of Rasa X. Please use '{new_name}' " f"instead.", FutureWarning, ) request.args[new_name] = request.args.pop(old_name) keys = [k for k in request.args.keys() if "fields" in k] for k in keys: new_k = "fields" # get the content between brackets, [<CONTENT>], as a list, e.g. "fields[a][b]" # becomes [a, b] d = re.findall(r"\[(.*?)\]", k) for field in d: if field == old_name: new_k += f"[{new_name}]" else: new_k += f"[{field}]" request.args[new_k] = request.args.pop(k)
def _check_deprecated_attributes(component: "Component") -> None: """Checks that the component doesn't have deprecated attributes. Args: component: The :class:`rasa.nlu.components.Component`. """ if hasattr(component, "provides"): raise_warning( f"'{component.name}' contains property 'provides', " f"which is deprecated. There is no need to specify " f"the list of attributes that a component provides.", category=FutureWarning, docs=DOCS_URL_MIGRATION_GUIDE, ) if hasattr(component, "requires"): raise_warning( f"'{component.name}' contains property 'requires', " f"which is deprecated. Use 'required_components()' method " f"to specify which components are required to be present " f"in the pipeline by this component.", category=FutureWarning, docs=DOCS_URL_MIGRATION_GUIDE, )