def testForward(self):
   with self.session(), test_util.force_cpu():
     for logits_dtype in [np.float16, np.float32, np.float64]:
       for labels_dtype in [np.int32, np.int64]:
         for trial in range(5):
           seed = 123 + trial
           labels, logits = self._generateInputs(
               labels_dtype, logits_dtype, seed=seed)
           result_a = nn_ops.sparse_softmax_cross_entropy_with_logits_v2(
               labels=labels, logits=logits)
           result_b = nn_ops.sparse_softmax_cross_entropy_with_logits_v2(
               labels=labels, logits=logits)
           self.assertAllEqual(result_a, result_b)
 def xent_grad(logits):
     with backprop_lib.GradientTape() as tape:
         tape.watch(logits)
         return tape.gradient(
             nn_ops.sparse_softmax_cross_entropy_with_logits_v2(
                 labels=labels, logits=logits, name="xent"),
             [logits])[0]
 def _testLabelsPlaceholderScalar(self, expected_error_message):
     with ops_lib.Graph().as_default(), self.session():
         labels = array_ops.placeholder(np.int32)
         y = nn_ops.sparse_softmax_cross_entropy_with_logits_v2(
             labels=labels, logits=[[7.]])
         with self.assertRaisesOpError(expected_error_message):
             y.eval(feed_dict={labels: 0})
    def _testHighDim(self, labels, logits):
        np_loss, np_gradient = self._npXent(labels=np.array(labels),
                                            logits=np.array(logits))
        # manually reshape loss
        np_loss = np.reshape(np_loss, np.array(labels).shape)
        tf_loss = nn_ops.sparse_softmax_cross_entropy_with_logits_v2(
            labels=labels, logits=logits)
        with backprop_lib.GradientTape() as tape:
            logits = constant_op.constant(logits)
            tape.watch(logits)
            tf_gradient = tape.gradient(
                nn_ops.sparse_softmax_cross_entropy_with_logits_v2(
                    labels=labels, logits=logits), [logits])[0]
            tf_gradient = array_ops.reshape(tf_gradient, np_gradient.shape)

        self.assertAllCloseAccordingToType(np_loss, tf_loss)
        self.assertAllCloseAccordingToType(np_gradient, tf_gradient)
 def _opFwdBwd(self, labels, logits):
     """Runs the op-under-test both forwards and backwards"""
     logits = ops_lib.convert_to_tensor(
         logits)  # needed for the gradient tape
     with backprop_lib.GradientTape() as tape:
         tape.watch(logits)
         loss = nn_ops.sparse_softmax_cross_entropy_with_logits_v2(
             labels=labels, logits=logits)
     return loss, tape.gradient(loss, logits)
 def gradients(seed=789):
   np.random.seed(seed)
   upstream_gradients = self._randomFloats(output_shape, logits_dtype)
   with backprop.GradientTape(persistent=True) as tape:
     tape.watch(logits)
     op_output = nn_ops.sparse_softmax_cross_entropy_with_logits_v2(
         labels=labels, logits=logits)
     gradient_injector_output = op_output * upstream_gradients
   return tape.gradient(gradient_injector_output, logits)
 def _testInvalidLabelCPU(self, expected_regex="Received a label value of"):
     labels = [4, 3, 0, -1]
     logits = [[1., 1., 1., 1.], [1., 1., 1., 1.], [1., 2., 3., 4.],
               [1., 2., 3., 4.]]
     with self.assertRaisesRegex(
         (errors_impl.InvalidArgumentError, errors_impl.UnknownError),
             expected_regex):
         self.evaluate(
             nn_ops.sparse_softmax_cross_entropy_with_logits_v2(
                 labels=labels, logits=logits))
 def testExceptionThrowing(self):
   with self.session(force_gpu=True):
     for logits_dtype in [dtypes.float16, dtypes.float32]:
       for labels_dtype in [dtypes.int32, dtypes.int64]:
         labels = constant_op.constant([1, 0], dtype=labels_dtype)
         logits = constant_op.constant([[0.3, 0.5], [0.2, 0.6]],
                                       dtype=logits_dtype)
         with self.assertRaisesRegex(
             errors_impl.UnimplementedError,
             "Deterministic GPU implementation of " +
             "SparseSoftmaxXentWithLogitsOp not available."):
           result = nn_ops.sparse_softmax_cross_entropy_with_logits_v2(
               labels=labels, logits=logits)
           self.evaluate(result)
 def _testScalarHandling(self, expected_regex):
     with ops_lib.Graph().as_default(), self.session(use_gpu=False) as sess:
         with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
                                     expected_regex):
             labels = array_ops.placeholder(dtypes.int32, shape=[None, 1])
             logits = array_ops.placeholder(dtypes.float32, shape=[None, 3])
             ce = nn_ops.sparse_softmax_cross_entropy_with_logits_v2(
                 labels=array_ops.squeeze(labels), logits=logits)
             labels_v2 = np.zeros((1, 1), dtype=np.int32)
             logits_v2 = np.random.randn(1, 3)
             sess.run([ce],
                      feed_dict={
                          labels: labels_v2,
                          logits: logits_v2
                      })
 def xent(logits):
     # gradient_checker_v2.computee_gradient doesn't take int32/int64.
     # labels must be of type int32/int64, so passing them separately here.
     return nn_ops.sparse_softmax_cross_entropy_with_logits_v2(
         labels=labels, logits=logits, name="xent")
 def testVector(self):
     loss = nn_ops.sparse_softmax_cross_entropy_with_logits_v2(
         labels=constant_op.constant(0), logits=constant_op.constant([1.0]))
     self.assertAllClose(0.0, loss)
 def testScalar(self):
     with self.assertRaisesRegex(ValueError, "`logits` cannot be a scalar"):
         nn_ops.sparse_softmax_cross_entropy_with_logits_v2(
             labels=constant_op.constant(0),
             logits=constant_op.constant(1.0))
 def testShapeMismatch(self):
     with self.assertRaisesRegex(
             ValueError,
             "`labels.shape.rank` must equal `logits.shape.rank - 1`"):
         nn_ops.sparse_softmax_cross_entropy_with_logits_v2(
             labels=[[0, 2]], logits=[[0., 1.], [2., 3.], [2., 3.]])
示例#14
0
 def testShapeMismatch(self):
     with self.assertRaisesRegex(ValueError, ".*Rank mismatch:*"):
         nn_ops.sparse_softmax_cross_entropy_with_logits_v2(
             labels=[[0, 2]], logits=[[0., 1.], [2., 3.], [2., 3.]])