コード例 #1
0
    def _build(self, ids):
        """Lookup embeddings.

    Looks up an embedding vector for each value in `ids`. All ids must be within
    [0, vocab_size), else an `InvalidArgumentError` is raised at runtime.

    Args:
      ids: Tensor of dtype int64.

    Returns:
      Tensor of tf.shape(ids) + [embedding_dim] and dtype float32.
    """
        # Construct embeddings.
        if self._existing_vocab is None:
            if self.EMBEDDINGS not in self._initializers:

                self._initializers[
                    self.EMBEDDINGS] = basic.create_linear_initializer(
                        self._vocab_size)
            self._embeddings = tf.get_variable(
                "embeddings",
                shape=[self._vocab_size, self._embed_dim],
                dtype=tf.float32,
                initializer=self._initializers[self.EMBEDDINGS],
                partitioner=self._partitioners.get(self.EMBEDDINGS, None),
                regularizer=self._regularizers.get(self.EMBEDDINGS, None),
                trainable=self._trainable)
        else:
            self._embeddings = tf.get_variable(
                "embeddings",
                dtype=tf.float32,
                initializer=self._existing_vocab,
                regularizer=self._regularizers.get(self.EMBEDDINGS, None),
                trainable=self._trainable)

        if self._densify_gradients:
            # On the backwards pass, we convert the gradient from indexed-slices to a
            # regular tensor before sending it back to the parameter server.
            # This avoids excess computation on the parameter server.
            # In eager mode we do not need the conversion.
            # Add a check whether we are in eager mode when it is supported.
            embeddings = util.convert_gradient_to_tensor(self._embeddings)
        else:
            embeddings = self._embeddings

        # Lookup embeddings
        return tf.nn.embedding_lookup(embeddings, ids, name="embedding_lookup")
コード例 #2
0
  def _create_gate_variables(self, input_shape, dtype):
    """Initialize the variables used for the gates."""
    if len(input_shape) != 2:
      raise ValueError(
          "Rank of shape must be {} not: {}".format(2, len(input_shape)))
    input_size = input_shape.dims[1].value

    b_shape = [4 * self._hidden_size]

    equiv_input_size = self._hidden_size + input_size
    initializer = basic.create_linear_initializer(equiv_input_size)

    if self._use_batch_norm_h or self._use_batch_norm_x:
      self._w_h = tf.get_variable(
          LSTM.W_GATES + "_H",
          shape=[self._hidden_size, 4 * self._hidden_size],
          dtype=dtype,
          initializer=self._initializers.get(LSTM.W_GATES, initializer),
          partitioner=self._partitioners.get(LSTM.W_GATES),
          regularizer=self._regularizers.get(LSTM.W_GATES))
      self._w_x = tf.get_variable(
          LSTM.W_GATES + "_X",
          shape=[input_size, 4 * self._hidden_size],
          dtype=dtype,
          initializer=self._initializers.get(LSTM.W_GATES, initializer),
          partitioner=self._partitioners.get(LSTM.W_GATES),
          regularizer=self._regularizers.get(LSTM.W_GATES))
    else:
      self._w_xh = tf.get_variable(
          LSTM.W_GATES,
          shape=[self._hidden_size + input_size, 4 * self._hidden_size],
          dtype=dtype,
          initializer=self._initializers.get(LSTM.W_GATES, initializer),
          partitioner=self._partitioners.get(LSTM.W_GATES),
          regularizer=self._regularizers.get(LSTM.W_GATES))
    self._b = tf.get_variable(
        LSTM.B_GATES,
        shape=b_shape,
        dtype=dtype,
        initializer=self._initializers.get(LSTM.B_GATES, initializer),
        partitioner=self._partitioners.get(LSTM.B_GATES),
        regularizer=self._regularizers.get(LSTM.B_GATES))
コード例 #3
0
    def _build(self, ids):
        """Lookup embeddings.

        Looks up an embedding vector for each value in `ids`. All ids must be within
        [0, vocab_size), else an `InvalidArgumentError` is raised at runtime.

        Args:
          ids: Tensor of dtype int64.

        Returns:
          Tensor of tf.shape(ids) + [embedding_dim] and dtype float32.
        """
        # Construct embeddings.
        if self._existing_vocab is None:
            if self.EMBEDDINGS not in self._initializers:
                self._initializers[
                    self.EMBEDDINGS] = basic.create_linear_initializer(
                        self._vocab_size)
            self._embeddings = tf.get_variable(
                "embeddings",
                shape=[self._vocab_size, self._embed_dim],
                dtype=tf.float32,
                initializer=self._initializers[self.EMBEDDINGS],
                partitioner=self._partitioners.get(self.EMBEDDINGS, None),
                regularizer=self._regularizers.get(self.EMBEDDINGS, None),
                trainable=self._trainable,
            )
        else:
            self._embeddings = tf.get_variable(
                "embeddings",
                dtype=tf.float32,
                initializer=self._existing_vocab,
                regularizer=self._regularizers.get(self.EMBEDDINGS, None),
                trainable=self._trainable,
            )

        # Lookup embeddings
        return tf.nn.embedding_lookup(self._embeddings,
                                      ids,
                                      name="embedding_lookup")
コード例 #4
0
ファイル: embed.py プロジェクト: TianjiPang/sonnet
  def _build(self, ids):
    """Lookup embeddings.

    Looks up an embedding vector for each value in `ids`. All ids must be within
    [0, vocab_size), else an `InvalidArgumentError` is raised at runtime.

    Args:
      ids: Tensor of dtype int64.

    Returns:
      Tensor of tf.shape(ids) + [embedding_dim] and dtype float32.
    """
    # Construct embeddings.
    if self._existing_vocab is None:
      if self.EMBEDDINGS not in self._initializers:
        self._initializers[self.EMBEDDINGS] = basic.create_linear_initializer(
            self._vocab_size)
      self._embeddings = tf.get_variable(
          "embeddings",
          shape=[self._vocab_size, self._embed_dim],
          dtype=tf.float32,
          initializer=self._initializers[self.EMBEDDINGS],
          partitioner=self._partitioners.get(self.EMBEDDINGS, None),
          regularizer=self._regularizers.get(self.EMBEDDINGS, None),
          trainable=self._trainable)
    else:
      self._embeddings = tf.get_variable(
          "embeddings",
          dtype=tf.float32,
          initializer=self._existing_vocab,
          regularizer=self._regularizers.get(self.EMBEDDINGS, None),
          trainable=self._trainable)

    # Lookup embeddings
    return tf.nn.embedding_lookup(
        self._embeddings, ids, name="embedding_lookup")