Esempio n. 1
0
    def _prepare_layers(self) -> None:
        self.text_name = TEXT
        self.label_name = TEXT if self.config[SHARE_HIDDEN_LAYERS] else LABEL

        # For user text and response text, prepare layers that combine different feature
        # types, embed everything using a transformer and optionally also do masked
        # language modeling. Omit input dropout for label features.
        label_config = self.config.copy()
        label_config.update({
            SPARSE_INPUT_DROPOUT: False,
            DENSE_INPUT_DROPOUT: False
        })
        for attribute, config in [
            (self.text_name, self.config),
            (self.label_name, label_config),
        ]:
            self._tf_layers[
                f"sequence_layer.{attribute}"] = rasa_layers.RasaSequenceLayer(
                    attribute, self.data_signature[attribute], config)

        if self.config[MASKED_LM]:
            self._prepare_mask_lm_loss(self.text_name)

        self._prepare_label_classification_layers(
            predictor_attribute=self.text_name)
Esempio n. 2
0
    def _prepare_layers(self) -> None:
        self.text_name = TEXT
        self.label_name = TEXT if self.config[SHARE_HIDDEN_LAYERS] else LABEL

        # For user text and response text, prepare layers that combine different feature
        # types, embed everything using a transformer and optionally also do masked
        # language modeling.
        for attribute in [self.text_name, self.label_name]:
            self._tf_layers[
                f"sequence_layer.{attribute}"] = rasa_layers.RasaSequenceLayer(
                    attribute, self.data_signature[attribute], self.config)

        if self.config[MASKED_LM]:
            self._prepare_mask_lm_loss(self.text_name)

        self._prepare_label_classification_layers(
            predictor_attribute=self.text_name)