Пример #1
0
 def testMultipleOfNoneGradRaisesError(self):
   gradient = constant_op.constant(self._grad_vec, dtype=dtypes.float32)
   variable = variables_lib.Variable(array_ops.zeros_like(gradient))
   grad_to_var = (None, variable)
   gradient_multipliers = {variable: self._multiplier}
   with self.assertRaises(ValueError):
     learning.multiply_gradients(grad_to_var, gradient_multipliers)
Пример #2
0
    def transform_grads_fn(grads):
        if gradient_multipliers:
            with tf.name_scope('multiply_grads'):
                grads = multiply_gradients(grads, gradient_multipliers)

        # Clip gradients.
        if clip_gradient_norm > 0:
            with tf.name_scope('clip_grads'):
                grads = clip_gradient_norms(grads, clip_gradient_norm)
        return grads
Пример #3
0
  def testMultipleGradientsWithVariables(self):
    gradient = constant_op.constant(self._grad_vec, dtype=dtypes.float32)
    variable = variables_lib.Variable(array_ops.zeros_like(gradient))
    grad_to_var = (gradient, variable)
    gradient_multipliers = {variable: self._multiplier}

    [grad_to_var] = learning.multiply_gradients([grad_to_var],
                                                gradient_multipliers)

    # Ensure the variable passed through.
    self.assertEqual(grad_to_var[1], variable)

    with self.test_session() as sess:
      actual_gradient = sess.run(grad_to_var[0])
    np_testing.assert_almost_equal(actual_gradient, self._multiplied_grad_vec,
                                   5)
Пример #4
0
  def testTensorMultiplierOfGradient(self):
    gradient = constant_op.constant(self._grad_vec, dtype=dtypes.float32)
    variable = variables_lib.Variable(array_ops.zeros_like(gradient))
    multiplier_flag = variables_lib.Variable(True)
    tensor_multiplier = array_ops.where(multiplier_flag, self._multiplier, 1.0)
    grad_to_var = (gradient, variable)
    gradient_multipliers = {variable: tensor_multiplier}

    [grad_to_var] = learning.multiply_gradients([grad_to_var],
                                                gradient_multipliers)

    with self.test_session() as sess:
      sess.run(variables_lib.global_variables_initializer())
      gradient_true_flag = sess.run(grad_to_var[0])
      sess.run(multiplier_flag.assign(False))
      gradient_false_flag = sess.run(grad_to_var[0])
    np_testing.assert_almost_equal(gradient_true_flag,
                                   self._multiplied_grad_vec, 5)
    np_testing.assert_almost_equal(gradient_false_flag, self._grad_vec, 5)
Пример #5
0
  def testIndexedSlicesGradIsMultiplied(self):
    values = constant_op.constant(self._grad_vec, dtype=dtypes.float32)
    indices = constant_op.constant([0, 1, 2], dtype=dtypes.int32)
    dense_shape = constant_op.constant(
        [self._grad_vec.size], dtype=dtypes.int32)

    gradient = ops.IndexedSlices(values, indices, dense_shape)
    variable = variables_lib.Variable(array_ops.zeros((1, 3)))
    grad_to_var = (gradient, variable)
    gradient_multipliers = {variable: self._multiplier}

    [grad_to_var] = learning.multiply_gradients([grad_to_var],
                                                gradient_multipliers)

    # Ensure the built IndexedSlice has the right form.
    self.assertEqual(grad_to_var[1], variable)
    self.assertEqual(grad_to_var[0].indices, indices)
    self.assertEqual(grad_to_var[0].dense_shape, dense_shape)

    with self.test_session() as sess:
      actual_gradient = sess.run(grad_to_var[0].values)
    np_testing.assert_almost_equal(actual_gradient, self._multiplied_grad_vec,
                                   5)
Пример #6
0
 def testNonDictMultiplierRaisesError(self):
   gradient = constant_op.constant(self._grad_vec, dtype=dtypes.float32)
   variable = variables_lib.Variable(array_ops.zeros_like(gradient))
   grad_to_var = (gradient, variable)
   with self.assertRaises(ValueError):
     learning.multiply_gradients([grad_to_var], 3)