Пример #1
0
  def testNoneGradPassesThroughCorrectly(self):
    gradient = None
    variable = variables_lib.Variable(self._zero_vec, dtype=dtypes.float32)

    gradients_to_variables = (gradient, variable)
    [gradients_to_variables] = learning.clip_gradient_norms(
        [gradients_to_variables], self._max_norm)

    self.assertEqual(gradients_to_variables[0], None)
    self.assertEqual(gradients_to_variables[1], variable)
Пример #2
0
    def transform_grads_fn(grads):
        if gradient_multipliers:
            with tf.name_scope('multiply_grads'):
                grads = multiply_gradients(grads, gradient_multipliers)

        # Clip gradients.
        if clip_gradient_norm > 0:
            with tf.name_scope('clip_grads'):
                grads = clip_gradient_norms(grads, clip_gradient_norm)
        return grads
Пример #3
0
 def transform_grads_fn(grads):
     # Clip gradients.
     if params.clip_gradient_norm > 0 and params.clip_gradient_value > 0:
         raise ValueError("Only one of clip_gradient_norm or clip_gradient_value should be set")
     if params.clip_gradient_norm > 0:
         with tf.name_scope('clip_grads'):
             grads = clip_gradient_norms(grads, params.clip_gradient_norm)
     if params.clip_gradient_value > 0:
         with tf.name_scope('clip_grads'):
             grads = clip_gradient_values(grads, -params.clip_gradient_value, params.clip_gradient_value)
     return grads
Пример #4
0
  def testOrdinaryGradIsClippedCorrectly(self):
    gradient = constant_op.constant(self._grad_vec, dtype=dtypes.float32)
    variable = variables_lib.Variable(self._zero_vec, dtype=dtypes.float32)
    gradients_to_variables = (gradient, variable)
    [gradients_to_variables] = learning.clip_gradient_norms(
        [gradients_to_variables], self._max_norm)

    # Ensure the variable passed through.
    self.assertEqual(gradients_to_variables[1], variable)

    with self.test_session() as sess:
      actual_gradient = sess.run(gradients_to_variables[0])
    np_testing.assert_almost_equal(actual_gradient, self._clipped_grad_vec)
Пример #5
0
  def testIndexedSlicesGradIsClippedCorrectly(self):
    sparse_grad_indices = np.array([0, 1, 4])
    sparse_grad_dense_shape = [self._grad_vec.size]

    values = constant_op.constant(self._grad_vec, dtype=dtypes.float32)
    indices = constant_op.constant(sparse_grad_indices, dtype=dtypes.int32)
    dense_shape = constant_op.constant(
        sparse_grad_dense_shape, dtype=dtypes.int32)

    gradient = ops.IndexedSlices(values, indices, dense_shape)
    variable = variables_lib.Variable(self._zero_vec, dtype=dtypes.float32)

    gradients_to_variables = (gradient, variable)
    gradients_to_variables = learning.clip_gradient_norms(
        [gradients_to_variables], self._max_norm)[0]

    # Ensure the built IndexedSlice has the right form.
    self.assertEqual(gradients_to_variables[1], variable)
    self.assertEqual(gradients_to_variables[0].indices, indices)
    self.assertEqual(gradients_to_variables[0].dense_shape, dense_shape)

    with session.Session() as sess:
      actual_gradient = sess.run(gradients_to_variables[0].values)
    np_testing.assert_almost_equal(actual_gradient, self._clipped_grad_vec)