Ejemplo n.º 1
0
    def __init__(self, name: Text, config: Dict[Text, Any]) -> None:
        """Initializes a `ConveRTFeaturizer`.

        Args:
            name: An identifier for this featurizer.
            config: The configuration.
        """
        super().__init__(name=name, config=config)

        model_url = self._config["model_url"]
        self.model_url = (
            model_url
            if rasa.nlu.utils.is_url(model_url)
            else os.path.abspath(model_url)
        )

        self.module = train_utils.load_tf_hub_model(self.model_url)

        self.tokenize_signature: WrappedFunction = self._get_signature(
            "tokenize", self.module
        )
        self.sequence_encoding_signature: WrappedFunction = self._get_signature(
            "encode_sequence", self.module
        )
        self.sentence_encoding_signature: WrappedFunction = self._get_signature(
            "default", self.module
        )
Ejemplo n.º 2
0
    def __init__(self, component_config: Dict[Text, Any] = None) -> None:
        """Construct a new tokenizer using the WhitespaceTokenizer framework."""

        super().__init__(component_config)

        self.module = train_utils.load_tf_hub_model(TF_HUB_MODULE_URL)

        self.tokenize_signature = self.module.signatures["tokenize"]
Ejemplo n.º 3
0
    def __init__(self, component_config: Dict[Text, Any] = None) -> None:
        """Construct a new tokenizer using the WhitespaceTokenizer framework."""

        super().__init__(component_config)

        model_url = "http://models.poly-ai.com/convert/v1/model.tar.gz"
        self.module = train_utils.load_tf_hub_model(model_url)

        self.tokenize_signature = self.module.signatures["tokenize"]
Ejemplo n.º 4
0
    def __init__(self, component_config: Optional[Dict[Text, Any]] = None) -> None:

        super(ConveRTFeaturizer, self).__init__(component_config)

        model_url = "http://models.poly-ai.com/convert/v1/model.tar.gz"
        self.module = train_utils.load_tf_hub_model(model_url)

        self.sentence_encoding_signature = self.module.signatures["default"]
        self.sequence_encoding_signature = self.module.signatures["encode_sequence"]
Ejemplo n.º 5
0
    def __init__(self,
                 component_config: Optional[Dict[Text, Any]] = None) -> None:

        super().__init__(component_config)

        self.model_url = self.component_config.get("model_url",
                                                   TF_HUB_MODULE_URL)

        self.module = train_utils.load_tf_hub_model(self.model_url)

        self.sentence_encoding_signature = self.module.signatures["default"]
        self.sequence_encoding_signature = self.module.signatures[
            "encode_sequence"]
Ejemplo n.º 6
0
    def __init__(self, component_config: Dict[Text, Any] = None) -> None:
        """Construct a new tokenizer using the WhitespaceTokenizer framework.

        Args:
            component_config: User configuration for the component
        """
        super().__init__(component_config)

        self.model_url = self._get_validated_model_url()

        self.module = train_utils.load_tf_hub_model(self.model_url)

        self.tokenize_signature = self.module.signatures["tokenize"]
Ejemplo n.º 7
0
    def __init__(self, component_config: Optional[Dict[Text, Any]] = None) -> None:
        """Initializes ConveRTFeaturizer with the model and different
        encoding signatures.

        Args:
            component_config: Configuration for the component.
        """
        super(ConveRTFeaturizer, self).__init__(component_config)
        self.model_url = self._get_validated_model_url()

        self.module = train_utils.load_tf_hub_model(self.model_url)

        self.tokenize_signature = self._get_signature("tokenize", self.module)
        self.sequence_encoding_signature = self._get_signature(
            "encode_sequence", self.module
        )
        self.sentence_encoding_signature = self._get_signature("default", self.module)