Beispiel #1
0
    def test_get_embedding(self):
        """Tests :func:`~texar.modules.embedder.embedder_utils.get_embedding`.
        """
        vocab_size = 100
        emb = embedder_utils.get_embedding(num_embeds=vocab_size)
        self.assertEqual(emb.shape[0], vocab_size)
        self.assertEqual(emb.shape[1],
                         embedder_utils.default_embedding_hparams()["dim"])

        hparams = {
            "initializer": {
                "type": "torch.nn.init.uniform_",
                "kwargs": {
                    'a': -0.1,
                    'b': 0.1
                }
            }
        }
        emb = embedder_utils.get_embedding(
            hparams=hparams,
            num_embeds=vocab_size,
        )
        self.assertEqual(emb.shape[0], vocab_size)
        self.assertEqual(emb.shape[1],
                         embedder_utils.default_embedding_hparams()["dim"])
Beispiel #2
0
    def __init__(self, embedding=None, vocab_size=None, hparams=None):
        ModuleBase.__init__(self, hparams)
        self._vocab_size = vocab_size
        self._embedding = None
        self.sampling_method = self._hparams.sampling_method
        with tf.variable_scope(self.variable_scope):
            if self._hparams.initializer:
                tf.get_variable_scope().set_initializer( \
                    layers.get_initializer(self._hparams.initializer))
            if self._hparams.position_embedder.name == 'sinusoids':
                self.position_embedder = \
                    position_embedders.SinusoidsSegmentalPositionEmbedder( \
                    self._hparams.position_embedder.hparams)

        if self._hparams.use_embedding:
            if embedding is None and vocab_size is None:
                raise ValueError("""If 'embedding' is not provided,
                    'vocab_size' must be specified.""")
            if isinstance(embedding, (tf.Tensor, tf.Variable)):
                self._embedding = embedding
            else:
                self._embedding = embedder_utils.get_embedding(
                    self._hparams.embedding,
                    embedding,
                    vocab_size,
                    variable_scope=self.variable_scope)
                self._embed_dim = shape_list(self._embedding)[-1]
                if self._hparams.zero_pad:
                    self._embedding = tf.concat( \
                        (tf.zeros(shape=[1, self._embed_dim]),\
                        self._embedding[1:, :]), 0)
            if self._vocab_size is None:
                self._vocab_size = self._embedding.get_shape().as_list()[0]
        self.output_layer = \
            self.build_output_layer(shape_list(self._embedding)[-1])
Beispiel #3
0
    def _init_parameterized_embedding(self, init_value, num_embeds, hparams):
        self._embedding = embedder_utils.get_embedding(
            hparams, init_value, num_embeds, self.variable_scope)
        if hparams.trainable:
            self._add_trainable_variable(self._embedding)

        self._num_embeds = self._embedding.get_shape().as_list()[0]

        self._dim = self._embedding.get_shape().as_list()[1:]
        self._dim_rank = len(self._dim)
        if self._dim_rank == 1:
            self._dim = self._dim[0]
    def __init__(self, num_embeds=None, init_value=None, hparams=None):
        ModuleBase.__init__(self, hparams)

        if num_embeds is not None or init_value is not None:
            self._embedding = Parameter(
                embedder_utils.get_embedding(num_embeds, init_value, hparams))

            self._num_embeds = self._embedding.shape[0]

            self._dim = self._embedding.shape[1:]
            self._dim_rank = len(self._dim)
            if self._dim_rank == 1:
                self._dim = self._dim[0]
Beispiel #5
0
    def test_get_embedding(self):
        """Tests :func:`~texar.modules.embedder.embedder_utils.get_embedding`.
        """
        vocab_size = 100
        emb = embedder_utils.get_embedding(num_embeds=vocab_size)
        self.assertEqual(emb.shape[0].value, vocab_size)
        self.assertEqual(emb.shape[1].value,
                         embedder_utils.default_embedding_hparams()["dim"])

        hparams = {
            "initializer": {
                "type": tf.random_uniform_initializer(minval=-0.1, maxval=0.1)
            },
            "regularizer": {
                "type": tf.keras.regularizers.L1L2(0.1, 0.1)
            }
        }
        emb = embedder_utils.get_embedding(hparams=hparams,
                                           num_embeds=vocab_size,
                                           variable_scope='embedding_2')
        self.assertEqual(emb.shape[0].value, vocab_size)
        self.assertEqual(emb.shape[1].value,
                         embedder_utils.default_embedding_hparams()["dim"])
Beispiel #6
0
    def __init__(self, num_embeds: Optional[int] = None,
                 init_value: Optional[torch.Tensor] = None, hparams=None):
        super().__init__(hparams)

        if num_embeds is not None or init_value is not None:
            self._embedding = nn.Parameter(embedder_utils.get_embedding(
                num_embeds, init_value, hparams))

            self._num_embeds = self._embedding.size(0)

            self._dim_rank = self._embedding.dim() - 1
            if self._dim_rank == 1:
                self._dim = self._embedding.size(1)
            else:
                self._dim = self._embedding.size()[1:]  # type: ignore