Esempio n. 1
0
 def build_graph(parameters):
     """Build the segment_sum op testing graph."""
     data = tf.compat.v1.placeholder(dtype=parameters["data_dtype"],
                                     name="data",
                                     shape=parameters["data_shape"])
     segment_ids = tf.constant(parameters["segment_ids"], dtype=tf.int32)
     out = tf.segment_sum(data, segment_ids)
     return [data], [out]
Esempio n. 2
0
  def forward(self, self_vecs, neigh_vecs, segment_ids=None):
    """Calculates attention coefficients.

    The following code is used to implement equation (1) (2) (3) and (4)
    in the paper, which is called **self-attention** process.

    \alpha_{ij}^{l} & = \mathrm{softmax_i} (e_{ij}^{l})
    e_{ij}^{l}  = \mathrm{LeakyReLU}(\vec{a}^T [W h_{i} \vert W h_{j}])

    Args:
      self_vecs: Tensor, batch nodes' embedding vector, shape [B, D]
      neigh_vecs: Tensor, corresponding neighbor nodes' embedding vector,
      shape [total_nbrs, D]
      segment_ids: Tensor, segment ids indicates neighbor nodes' belonging,
      shape [total_nbrs]

    Returns:
      updated neighbor nodes' embedding vector.
    """

    self_vecs = self._fc(self_vecs)
    neigh_vecs = self._fc(neigh_vecs)
    if segment_ids is None:  # sampled GAT
      num_neibors = neigh_vecs.shape[1]
      self_vecs_extend = tf.tile(tf.expand_dims(self_vecs, 1),
                                 [1, num_neibors, 1])
      coefficients = tf.nn.softmax(tf.nn.leaky_relu(self._attn_fc(
          tf.concat([self_vecs_extend, neigh_vecs], axis=-1))))
      coefficients = tf.nn.dropout(coefficients, 1 - self._attn_drop)
      neigh_vecs = tf.multiply(coefficients, neigh_vecs)
      neigh_vecs = tf.reduce_sum(neigh_vecs, axis=1)
    else:  # full neighbor GAT
      self_vecs_extend = tf.gather(self_vecs, segment_ids)
      coefficients = tf.math.exp(tf.nn.leaky_relu(self._attn_fc(
          tf.concat([self_vecs_extend, neigh_vecs], axis=-1))))
      seg_sum = tf.gather(tf.segment_sum(coefficients, segment_ids), segment_ids)
      coefficients = coefficients / seg_sum
      coefficients = tf.nn.dropout(coefficients, 1 - self._attn_drop)
      neigh_vecs = tf.multiply(coefficients, neigh_vecs)
      neigh_vecs = tf.segment_sum(neigh_vecs, segment_ids)

    if self._activation is not None:
      neigh_vecs = self._activation(neigh_vecs)

    return neigh_vecs
Esempio n. 3
0
 def _random_segmentation(num_items, num_segments):
     """Partition a sequence of items randomly into non-empty segments.
     Args:
     num_items: an integer scalar > 0
     num_segments: an integer scalar in [1, num_items]
     Returns:
     a Tensor with shape [num_segments] containing positive integers that add
     up to num_items
     """
     first_in_segment = tf.pad(
         tf.random.shuffle(
             to_int(tf.range(num_items - 1) < num_segments - 1), seed=123),
         [[1, 0]])
     segment_id = tf.cumsum(first_in_segment)
     segment_length = tf.segment_sum(tf.ones_like(segment_id), segment_id)
     return segment_length
Esempio n. 4
0
    def forward(self, self_vecs, neigh_vecs, segment_ids=None):
        """ Update node's embedding based on its neighbors.

    Args:
      self_vecs: batch nodes' embeddings with shape [B, D]
      neigh_vecs: neighbor nodes' embeddings with shape [total_nbrs, D]
      segment_ids: segment ids that indicates neighbor nodes' belonging,
      shape [total_nbrs]

    Returns:
      updated batch nodes' embedding vector [B, H]
    """
        if segment_ids is None:  # sampled GCN
            neigh_vecs = tf.reduce_sum(neigh_vecs, axis=1)
        else:  # full neighbor GCN
            neigh_vecs = tf.segment_sum(data=neigh_vecs,
                                        segment_ids=segment_ids)
        updated_vecs = tf.reduce_sum([self_vecs, neigh_vecs], axis=0)
        return self._fc(updated_vecs)