Ejemplo n.º 1
0
  def test_metric_op_is_tensor(self):
    """Tests that ops.Operation is wrapped by a tensor for metric_ops."""
    with context.graph_mode():
      loss = {'my_loss': constant_op.constant([0])}
      predictions = {u'output1': constant_op.constant(['foo'])}
      mean, update_op = metrics_module.mean_tensor(constant_op.constant([0]))
      metrics = {
          'metrics_1': (mean, update_op),
          'metrics_2': (constant_op.constant([0]), control_flow_ops.no_op())
      }

      outputter = MockSupervisedOutput(loss, predictions, metrics)

      self.assertTrue(outputter.metrics['metrics_1/update_op'].name.startswith(
          'mean/update_op'))
      self.assertIsInstance(
          outputter.metrics['metrics_1/update_op'], ops.Tensor)
      self.assertIsInstance(outputter.metrics['metrics_1/value'], ops.Tensor)

      self.assertEqual(outputter.metrics['metrics_2/value'],
                       metrics['metrics_2'][0])
      self.assertTrue(outputter.metrics['metrics_2/update_op'].name.startswith(
          'metric_op_wrapper'))
      self.assertIsInstance(
          outputter.metrics['metrics_2/update_op'], ops.Tensor)
Ejemplo n.º 2
0
  def test_supervised_outputs_valid(self):
    """Tests that no errors are raised when provided outputs are valid."""
    with context.graph_mode():
      loss = {'my_loss': constant_op.constant([0])}
      predictions = {u'output1': constant_op.constant(['foo'])}
      mean, update_op = metrics_module.mean_tensor(constant_op.constant([0]))
      metrics = {
          'metrics': (mean, update_op),
          'metrics2': (constant_op.constant([0]), constant_op.constant([10]))
      }

      outputter = MockSupervisedOutput(loss, predictions, metrics)
      self.assertEqual(outputter.loss['loss/my_loss'], loss['my_loss'])
      self.assertEqual(
          outputter.predictions['predictions/output1'], predictions['output1'])
      self.assertEqual(outputter.metrics['metrics/update_op'].name,
                       'mean/update_op:0')
      self.assertEqual(
          outputter.metrics['metrics2/update_op'], metrics['metrics2'][1])

      # Single Tensor is OK too
      outputter = MockSupervisedOutput(
          loss['my_loss'], predictions['output1'], metrics['metrics'])
      self.assertEqual(outputter.loss, {'loss': loss['my_loss']})
      self.assertEqual(
          outputter.predictions, {'predictions': predictions['output1']})
      self.assertEqual(outputter.metrics['metrics/update_op'].name,
                       'mean/update_op:0')
Ejemplo n.º 3
0
    def test_train_signature_def(self):
        with context.graph_mode():
            loss = {'my_loss': constant_op.constant([0])}
            predictions = {u'output1': constant_op.constant(['foo'])}
            mean, update_op = metrics_module.mean_tensor(
                constant_op.constant([0]))
            metrics = {
                'metrics_1': (mean, update_op),
                'metrics_2':
                (constant_op.constant([0]), constant_op.constant([10]))
            }

            outputter = export_output_lib.TrainOutput(loss, predictions,
                                                      metrics)

            receiver = {
                u'features': constant_op.constant(100, shape=(100, 2)),
                'labels': constant_op.constant(100, shape=(100, 1))
            }
            sig_def = outputter.as_signature_def(receiver)

            self.assertIn('loss/my_loss', sig_def.outputs)
            self.assertIn('metrics_1/value', sig_def.outputs)
            self.assertIn('metrics_2/value', sig_def.outputs)
            self.assertIn('predictions/output1', sig_def.outputs)
            self.assertIn('features', sig_def.inputs)
Ejemplo n.º 4
0
  def test_metric_op_is_tensor(self):
    """Tests that ops.Operation is wrapped by a tensor for metric_ops."""
    with context.graph_mode():
      loss = {'my_loss': constant_op.constant([0])}
      predictions = {u'output1': constant_op.constant(['foo'])}
      mean, update_op = metrics_module.mean_tensor(constant_op.constant([0]))
      metrics = {
          'metrics_1': (mean, update_op),
          'metrics_2': (constant_op.constant([0]), control_flow_ops.no_op()),
          # Keras metric's update_state() could return a Variable, rather than
          # an Operation or Tensor.
          'keras_1': (constant_op.constant([0.5]),
                      variables.Variable(1.0, name='AssignAddVariableOp_3'))
      }

      outputter = MockSupervisedOutput(loss, predictions, metrics)
      # If we get there, it means constructor succeeded; which is sufficient
      # for testing the constructor.

      self.assertTrue(outputter.metrics['metrics_1/update_op'].name.startswith(
          'mean/update_op'))
      self.assertIsInstance(
          outputter.metrics['metrics_1/update_op'], ops.Tensor)
      self.assertIsInstance(outputter.metrics['metrics_1/value'], ops.Tensor)

      self.assertEqual(outputter.metrics['metrics_2/value'],
                       metrics['metrics_2'][0])
      self.assertTrue(outputter.metrics['metrics_2/update_op'].name.startswith(
          'metric_op_wrapper'))
      self.assertIsInstance(
          outputter.metrics['metrics_2/update_op'], ops.Tensor)
Ejemplo n.º 5
0
  def test_supervised_outputs_no_prepend(self):
    """Tests that no errors are raised when provided outputs are valid."""
    with context.graph_mode():
      loss = {'loss': constant_op.constant([0])}
      predictions = {u'predictions': constant_op.constant(['foo'])}
      mean, update_op = metrics_module.mean_tensor(constant_op.constant([0]))
      metrics = {
          'metrics_1': (mean, update_op),
          'metrics_2': (constant_op.constant([0]), constant_op.constant([10]))
      }

      outputter = MockSupervisedOutput(loss, predictions, metrics)
      self.assertEqual(set(outputter.loss.keys()), set(['loss']))
      self.assertEqual(set(outputter.predictions.keys()), set(['predictions']))
      self.assertEqual(
          set(outputter.metrics.keys()),
          set([
              'metrics_1/value', 'metrics_1/update_op', 'metrics_2/update_op',
              'metrics_2/value'
          ]))
Ejemplo n.º 6
0
  def test_supervised_outputs_tuples(self):
    """Tests that no errors are raised when provided outputs are valid."""
    with context.graph_mode():
      loss = {('my', 'loss'): constant_op.constant([0])}
      predictions = {(u'output1', '2'): constant_op.constant(['foo'])}
      mean, update_op = metrics_module.mean_tensor(constant_op.constant([0]))
      metrics = {
          ('metrics', '1'): (mean, update_op),
          ('metrics', '2'): (constant_op.constant([0]),
                             constant_op.constant([10]))
      }

      outputter = MockSupervisedOutput(loss, predictions, metrics)
      self.assertEqual(set(outputter.loss.keys()), set(['loss/my/loss']))
      self.assertEqual(set(outputter.predictions.keys()),
                       set(['predictions/output1/2']))
      self.assertEqual(
          set(outputter.metrics.keys()),
          set([
              'metrics/1/value', 'metrics/1/update_op', 'metrics/2/value',
              'metrics/2/update_op'
          ]))