示例#1
0
  def testIndexedSlicesGradientInCondInWhileLoop(self):
    with ops.Graph().as_default():
      embedding_matrix = tf.get_variable(
          "embedding_matrix", [5, 5],
          initializer=tf.random_normal_initializer())

      def Cond(it, _):
        return it < 5
      def Body(it, cost):
        embedding = embedding_ops.embedding_lookup(embedding_matrix, [0])
        cost = tf.cond(tf.equal(it, 3),
                       lambda: tf.square(cost),
                       lambda: cost + tf.reduce_sum(embedding))
        return it + 1, cost
      _, cost = control_flow_ops.While(
          Cond, Body, [tf.constant(0), tf.constant(0.0)])

      dynamic_grads = tf.gradients(cost, [embedding_matrix])[0]
      dynamic_grads = tf.segment_sum(dynamic_grads.values,
                                     dynamic_grads.indices)

      embedding = embedding_ops.embedding_lookup(embedding_matrix, [0])
      static = tf.square(
          tf.reduce_sum(embedding) +
          tf.reduce_sum(embedding) +
          tf.reduce_sum(embedding)) + tf.reduce_sum(embedding)
      static_grads = tf.gradients(static, [embedding_matrix])[0]
      static_grads = tf.segment_sum(static_grads.values, static_grads.indices)

      with self.test_session() as sess:
        sess.run(tf.initialize_all_variables())
        self.assertAllEqual(*sess.run([static_grads, dynamic_grads]))
示例#2
0
def _apply_activation_with_summaries(x, activation_fn):
  """Returns activation_fn(x).

  This applies the given activation and adds useful summaries specific to the
  activation.

  Args:
    x: The tensor to apply activation to.
    activation_fn: An activation function.
  Returns:
    A tensor with activation applied to x.
  """
  if activation_fn is None:
    return x
  y = activation_fn(x)
  if activation_fn in (nn.relu, nn.softplus, nn.relu6):
    # Using x for comparison to avoid floating point equality and/or epsilons.
    _add_scalar_summary(
        standard_ops.reduce_mean(standard_ops.to_float(standard_ops.less(
            x, 0.0))), '%s/zeros' % y.op.name)
  if activation_fn is nn.relu6:
    _add_scalar_summary(
        standard_ops.reduce_mean(standard_ops.to_float(standard_ops.greater(
            x, 6.0))), '%s/sixes' % y.op.name)
  if activation_fn is nn.l2_normalize:
    _add_scalar_summary(
        standard_ops.reduce_mean(standard_ops.sqrt(standard_ops.sum(
            standard_ops.square(x), 1))), '%s/length' % y.op.name)
  _add_histogram_summary(y, '%s/activations' % y.op.name)
  return y
示例#3
0
def _apply_activation_with_summaries(x, activation_fn):
    """Returns activation_fn(x).

  This applies the given activation and adds useful summaries specific to the
  activation.

  Args:
    x: The tensor to apply activation to.
    activation_fn: An activation function.
  Returns:
    A tensor with activation applied to x.
  """
    if activation_fn is None:
        return x
    y = activation_fn(x)
    if activation_fn in (nn.relu, nn.softplus, nn.relu6):
        # Using x for comparison to avoid floating point equality and/or epsilons.
        _add_scalar_summary(
            standard_ops.reduce_mean(
                standard_ops.to_float(standard_ops.less(x, 0.0))),
            '%s/zeros' % y.op.name)
    if activation_fn is nn.relu6:
        _add_scalar_summary(
            standard_ops.reduce_mean(
                standard_ops.to_float(standard_ops.greater(x, 6.0))),
            '%s/sixes' % y.op.name)
    if activation_fn is nn.l2_normalize:
        _add_scalar_summary(
            standard_ops.reduce_mean(
                standard_ops.sqrt(standard_ops.sum(standard_ops.square(x),
                                                   1))),
            '%s/length' % y.op.name)
    _add_histogram_summary(y, '%s/activations' % y.op.name)
    return y
示例#4
0
 def _apply_variational_kernel(self, inputs):
     if (not isinstance(self.kernel_posterior, independent_lib.Independent)
             or not isinstance(self.kernel_posterior.distribution,
                               normal_lib.Normal)):
         raise TypeError(
             "`DenseLocalReparameterization` requires "
             "`kernel_posterior_fn` produce an instance of "
             "`tf.distributions.Independent(tf.distributions.Normal)` "
             "(saw: \"{}\").".format(self.kernel_posterior.name))
     self.kernel_posterior_affine = normal_lib.Normal(
         loc=self._matmul(inputs, self.kernel_posterior.distribution.loc),
         scale=standard_ops.sqrt(
             self._matmul(
                 standard_ops.square(inputs),
                 standard_ops.square(
                     self.kernel_posterior.distribution.scale))))
     self.kernel_posterior_affine_tensor = (self.kernel_posterior_tensor_fn(
         self.kernel_posterior_affine))
     self.kernel_posterior_tensor = None
     return self.kernel_posterior_affine_tensor
 def _apply_variational_kernel(self, inputs):
   if not self.kernel_use_local_reparameterization:
     self.kernel.posterior_tensor = self.kernel.posterior_tensor_fn(
         self.kernel.posterior)
     self.kernel.posterior_affine = None
     self.kernel.posterior_affine_tensor = None
     return self._matmul(inputs, self.kernel.posterior_tensor)
   if not isinstance(self.kernel.posterior, normal_lib.Normal):
     raise TypeError("`kernel_use_local_reparameterization=True` requires "
                     "`kernel_posterior_fn` produce an instance of "
                     "`tf.distributions.Normal` (saw: \"{}\").".format(
                         type(self.kernel.posterior).__name__))
   self.kernel.posterior_affine = normal_lib.Normal(
       loc=self._matmul(inputs, self.kernel.posterior.loc),
       scale=standard_ops.sqrt(self._matmul(
           standard_ops.square(inputs),
           standard_ops.square(self.kernel.posterior.scale))))
   self.kernel.posterior_affine_tensor = (
       self.kernel.posterior_tensor_fn(self.kernel.posterior_affine))
   self.kernel.posterior_tensor = None
   return self.kernel.posterior_affine_tensor
示例#6
0
 def _apply_variational_kernel(self, inputs):
     if not self.kernel_use_local_reparameterization:
         self.kernel.posterior_tensor = self.kernel.posterior_tensor_fn(
             self.kernel.posterior)
         self.kernel.posterior_affine = None
         self.kernel.posterior_affine_tensor = None
         return self._matmul(inputs, self.kernel.posterior_tensor)
     if not isinstance(self.kernel.posterior, normal_lib.Normal):
         raise TypeError(
             "`kernel_use_local_reparameterization=True` requires "
             "`kernel_posterior_fn` produce an instance of "
             "`tf.distributions.Normal` (saw: \"{}\").".format(
                 type(self.kernel.posterior).__name__))
     self.kernel.posterior_affine = normal_lib.Normal(
         loc=self._matmul(inputs, self.kernel.posterior.loc),
         scale=standard_ops.sqrt(
             self._matmul(standard_ops.square(inputs),
                          standard_ops.square(
                              self.kernel.posterior.scale))))
     self.kernel.posterior_affine_tensor = (self.kernel.posterior_tensor_fn(
         self.kernel.posterior_affine))
     self.kernel.posterior_tensor = None
     return self.kernel.posterior_affine_tensor
示例#7
0
 def Body(it, cost):
   embedding = embedding_ops.embedding_lookup(embedding_matrix, [0])
   cost = tf.cond(tf.equal(it, 3),
                  lambda: tf.square(cost),
                  lambda: cost + tf.reduce_sum(embedding))
   return it + 1, cost