コード例 #1
0
    def __init__(self, data_hparams, hparams=None):
        ModelBase.__init__(self, hparams)

        self._data_hparams = HParams(data_hparams,
                                     PairedTextData.default_hparams())

        self._src_vocab = None
        self._tgt_vocab = None
        self._src_embedder = None
        self._tgt_embedder = None
        self._connector = None
        self._encoder = None
        self._decoder = None
コード例 #2
0
 def default_hparams():
     """Returns a dictionary of hyperparameters with default values.
     """
     hparams = ModelBase.default_hparams()
     hparams.update({
         "name": "seq2seq",
         "source_embedder": "WordEmbedder",
         "source_embedder_hparams": {},
         "target_embedder": "WordEmbedder",
         "target_embedder_hparams": {},
         "embedder_share": True,
         "embedder_hparams_share": True,
         "encoder": "UnidirectionalRNNEncoder",
         "encoder_hparams": {},
         "decoder": "BasicRNNDecoder",
         "decoder_hparams": {},
         "decoding_strategy_train": "train_greedy",
         "decoding_strategy_infer": "infer_greedy",
         "beam_search_width": 0,
         "connector": "MLPTransformConnector",
         "connector_hparams": {},
         "optimization": {}
     })
     return hparams
コード例 #3
0
    def default_hparams():
        """Returns a dictionary of hyperparameters with default values.

        .. code-block:: python

            {
                "source_embedder": "WordEmbedder",
                "source_embedder_hparams": {},
                "target_embedder": "WordEmbedder",
                "target_embedder_hparams": {},
                "embedder_share": True,
                "embedder_hparams_share": True,
                "encoder": "UnidirectionalRNNEncoder",
                "encoder_hparams": {},
                "decoder": "BasicRNNDecoder",
                "decoder_hparams": {},
                "decoding_strategy_train": "train_greedy",
                "decoding_strategy_infer": "infer_greedy",
                "beam_search_width": 0,
                "connector": "MLPTransformConnector",
                "connector_hparams": {},
                "optimization": {},
                "name": "seq2seq",
            }

        Here:

        "source_embedder" : str or class or instance
            Word embedder for source text. Can be a class, its name or module
            path, or a class instance.

        "source_embedder_hparams" : dict
            Hyperparameters for constructing the source embedder. E.g.,
            See :meth:`~texar.modules.WordEmbedder.default_hparams` for
            hyperparameters of :class:`~texar.modules.WordEmbedder`. Ignored
            if "source_embedder" is an instance.

        "target_embedder", "target_embedder_hparams" :
            Same as "source_embedder" and "source_embedder_hparams" but for
            target text embedder.

        "embedder_share" : bool
            Whether to share the source and target embedder. If `True`,
            source embedder will be used to embed target text.

        "embedder_hparams_share" : bool
            Whether to share the embedder configurations. If `True`,
            target embedder will be created with "source_embedder_hparams".
            But the two embedders have different set of trainable variables.

        "encoder", "encoder_hparams" :
            Same as "source_embedder" and "source_embedder_hparams" but for
            encoder.

        "decoder", "decoder_hparams" :
            Same as "source_embedder" and "source_embedder_hparams" but for
            decoder.

        "decoding_strategy_train" : str
            The decoding strategy in training mode. See
            :meth:`~texar.modules.RNNDecoderBase._build` for details.

        "decoding_strategy_infer" : str
            The decoding strategy in eval/inference mode.

        "beam_search_width" : int
            Beam width. If > 1, beam search is used in eval/inference mode.

        "connector", "connector_hparams" :
            The connector class and hyperparameters. A connector transforms
            an encoder final state to a decoder initial state.

        "optimization" : dict
            Hyperparameters of optimizating the model. See
            :func:`~texar.core.default_optimization_hparams` for details.

        "name" : str
            Name of the model.
        """
        hparams = ModelBase.default_hparams()
        hparams.update({
            "name": "seq2seq",
            "source_embedder": "WordEmbedder",
            "source_embedder_hparams": {},
            "target_embedder": "WordEmbedder",
            "target_embedder_hparams": {},
            "embedder_share": True,
            "embedder_hparams_share": True,
            "encoder": "UnidirectionalRNNEncoder",
            "encoder_hparams": {},
            "decoder": "BasicRNNDecoder",
            "decoder_hparams": {},
            "decoding_strategy_train": "train_greedy",
            "decoding_strategy_infer": "infer_greedy",
            "beam_search_width": 0,
            "connector": "MLPTransformConnector",
            "connector_hparams": {},
            "optimization": {}
        })
        return hparams