Exemple #1
0
    def testNCELoss(self):
        # A simple test to verify the numerics.

        def _SigmoidCrossEntropyWithLogits(logits, targets):
            # logits, targets: float arrays of the same shape.
            assert logits.shape == targets.shape
            pred = 1. / (1. + np.exp(-logits))
            eps = 0.0001
            pred = np.minimum(np.maximum(pred, eps), 1 - eps)
            return -targets * np.log(pred) - (1. - targets) * np.log(1. - pred)

        np.random.seed(0)
        num_classes = 5
        batch_size = 3
        labels = [0, 1, 2]
        (weights, biases, hidden_acts, sampled_vals, exp_logits,
         exp_labels) = self._GenerateTestData(num_classes=num_classes,
                                              dim=10,
                                              batch_size=batch_size,
                                              num_true=1,
                                              labels=labels,
                                              sampled=[1, 0, 2, 3],
                                              subtract_log_q=True)
        exp_nce_loss = np.sum(
            _SigmoidCrossEntropyWithLogits(exp_logits, exp_labels), 1)

        with self.test_session():
            got_nce_loss = nn_impl.nce_loss(
                weights=constant_op.constant(weights),
                biases=constant_op.constant(biases),
                labels=constant_op.constant(labels, shape=(batch_size, 1)),
                inputs=constant_op.constant(hidden_acts),
                num_sampled=4,
                num_classes=num_classes,
                num_true=1,
                sampled_values=sampled_vals,
                partition_strategy="div")

            self.assertAllClose(exp_nce_loss, got_nce_loss.eval(), 1e-4)

            # Test with sharded weights and sharded biases.
            weight_shards, bias_shards = self._ShardTestEmbeddings(
                weights, biases, num_shards=3)
            got_nce_loss = nn_impl.nce_loss(
                weights=[
                    constant_op.constant(shard) for shard in weight_shards
                ],
                biases=[constant_op.constant(shard) for shard in bias_shards],
                labels=constant_op.constant(labels, shape=(batch_size, 1)),
                inputs=constant_op.constant(hidden_acts),
                num_sampled=4,
                num_classes=num_classes,
                num_true=1,
                sampled_values=sampled_vals,
                partition_strategy="div")

            self.assertAllClose(exp_nce_loss, got_nce_loss.eval(), 1e-4)
Exemple #2
0
  def testNCELoss(self):
    # A simple test to verify the numerics.

    def _SigmoidCrossEntropyWithLogits(logits, targets):
      # logits, targets: float arrays of the same shape.
      assert logits.shape == targets.shape
      pred = 1. / (1. + np.exp(-logits))
      eps = 0.0001
      pred = np.minimum(np.maximum(pred, eps), 1 - eps)
      return -targets * np.log(pred) - (1. - targets) * np.log(1. - pred)

    np.random.seed(0)
    num_classes = 5
    batch_size = 3
    labels = [0, 1, 2]
    (weights, biases, hidden_acts, sampled_vals, exp_logits,
     exp_labels) = self._GenerateTestData(
         num_classes=num_classes,
         dim=10,
         batch_size=batch_size,
         num_true=1,
         labels=labels,
         sampled=[1, 0, 2, 3],
         subtract_log_q=True)
    exp_nce_loss = np.sum(
        _SigmoidCrossEntropyWithLogits(exp_logits, exp_labels), 1)

    with self.test_session():
      got_nce_loss = nn_impl.nce_loss(
          weights=constant_op.constant(weights),
          biases=constant_op.constant(biases),
          labels=constant_op.constant(labels, shape=(batch_size, 1)),
          inputs=constant_op.constant(hidden_acts),
          num_sampled=4,
          num_classes=num_classes,
          num_true=1,
          sampled_values=sampled_vals,
          partition_strategy="div")

      self.assertAllClose(exp_nce_loss, got_nce_loss.eval(), 1e-4)

      # Test with sharded weights and sharded biases.
      weight_shards, bias_shards = self._ShardTestEmbeddings(
          weights, biases, num_shards=3)
      got_nce_loss = nn_impl.nce_loss(
          weights=[constant_op.constant(shard) for shard in weight_shards],
          biases=[constant_op.constant(shard) for shard in bias_shards],
          labels=constant_op.constant(labels, shape=(batch_size, 1)),
          inputs=constant_op.constant(hidden_acts),
          num_sampled=4,
          num_classes=num_classes,
          num_true=1,
          sampled_values=sampled_vals,
          partition_strategy="div")

      self.assertAllClose(exp_nce_loss, got_nce_loss.eval(), 1e-4)
Exemple #3
0
    def testNCELoss(self):
        # A simple test to verify the numerics.

        def _SigmoidCrossEntropyWithLogits(logits, targets):
            # logits, targets: float arrays of the same shape.
            assert logits.shape == targets.shape
            pred = 1. / (1. + np.exp(-logits))
            eps = 0.0001
            pred = np.minimum(np.maximum(pred, eps), 1 - eps)
            return -targets * np.log(pred) - (1. - targets) * np.log(1. - pred)

        weights, biases, hidden_acts, sharded_weights = self._GenerateTestInputs(
        )
        labels = [0, 1, 2]
        true_w, true_b = weights[labels], biases[labels]
        sampled = [1, 0, 2, 3]
        num_sampled = len(sampled)
        true_exp = np.empty([self._batch_size, 1], dtype=np.float32)
        true_exp.fill(0.5)
        sampled_exp = np.empty([num_sampled], dtype=np.float32)
        sampled_exp.fill(0.5)
        sampled_w, sampled_b = weights[sampled], biases[sampled]
        test_sampled_vals = (sampled, true_exp, sampled_exp)

        with self.test_session():
            logits_np, labels_np = self._ComputeSampledLogitsNP(
                true_w,
                true_b,
                sampled_w,
                sampled_b,
                hidden_acts,
                true_expected=true_exp,
                sampled_expected=sampled_exp)
            nce_loss_np = np.sum(
                _SigmoidCrossEntropyWithLogits(logits_np, labels_np), 1)

            labels_tf = constant_op.constant(labels,
                                             shape=(self._batch_size, 1))
            weights_tf = constant_op.constant(weights)
            biases_tf = constant_op.constant(biases)
            inputs_tf = constant_op.constant(hidden_acts)

            nce_loss_tf = nn_impl.nce_loss(weights_tf,
                                           biases_tf,
                                           labels_tf,
                                           inputs_tf,
                                           num_sampled=1,
                                           num_classes=self._num_classes,
                                           num_true=1,
                                           sampled_values=test_sampled_vals)

            self.assertAllClose(nce_loss_np, nce_loss_tf.eval(), 1e-4)

            # Test with sharded weights
            nce_loss_tf = nn_impl.nce_loss(
                [constant_op.constant(shard) for shard in sharded_weights],
                biases_tf,
                labels_tf,
                inputs_tf,
                num_sampled=1,
                num_classes=self._num_classes,
                num_true=1,
                sampled_values=test_sampled_vals)

            self.assertAllClose(nce_loss_np, nce_loss_tf.eval(), 1e-4)
Exemple #4
0
  def testNCELoss(self):
    # A simple test to verify the numerics.

    def _SigmoidCrossEntropyWithLogits(logits, targets):
      # logits, targets: float arrays of the same shape.
      assert logits.shape == targets.shape
      pred = 1. / (1. + np.exp(-logits))
      eps = 0.0001
      pred = np.minimum(np.maximum(pred, eps), 1 - eps)
      return -targets * np.log(pred) - (1. - targets) * np.log(1. - pred)

    weights, biases, hidden_acts, sharded_weights, sharded_biases = (
        self._GenerateTestInputs())
    labels = [0, 1, 2]
    true_w, true_b = weights[labels], biases[labels]
    sampled = [1, 0, 2, 3]
    num_sampled = len(sampled)
    true_exp = np.empty([self._batch_size, 1], dtype=np.float32)
    true_exp.fill(0.5)
    sampled_exp = np.empty([num_sampled], dtype=np.float32)
    sampled_exp.fill(0.5)
    sampled_w, sampled_b = weights[sampled], biases[sampled]
    test_sampled_vals = (sampled, true_exp, sampled_exp)

    with self.test_session():
      logits_np, labels_np = self._ComputeSampledLogitsNP(
          true_w,
          true_b,
          sampled_w,
          sampled_b,
          hidden_acts,
          true_expected=true_exp,
          sampled_expected=sampled_exp)
      nce_loss_np = np.sum(
          _SigmoidCrossEntropyWithLogits(logits_np, labels_np), 1)

      labels_tf = constant_op.constant(labels, shape=(self._batch_size, 1))
      weights_tf = constant_op.constant(weights)
      biases_tf = constant_op.constant(biases)
      inputs_tf = constant_op.constant(hidden_acts)

      nce_loss_tf = nn_impl.nce_loss(
          weights_tf,
          biases_tf,
          labels_tf,
          inputs_tf,
          num_sampled=num_sampled,
          num_classes=self._num_classes,
          num_true=1,
          sampled_values=test_sampled_vals,
          partition_strategy="div")

      self.assertAllClose(nce_loss_np, nce_loss_tf.eval(), 1e-4)

      # Test with sharded weights and sharded biases.
      nce_loss_tf = nn_impl.nce_loss(
          sharded_weights,
          sharded_biases,
          labels_tf,
          inputs_tf,
          num_sampled=num_sampled,
          num_classes=self._num_classes,
          num_true=1,
          sampled_values=test_sampled_vals,
          partition_strategy="div")

      self.assertAllClose(nce_loss_np, nce_loss_tf.eval(), 1e-4)