Beispiel #1
0
  def test_from_scratch_validate_summary(self):
    hidden_units = (2, 2)
    mock_optimizer = dnn_testing_utils.mock_optimizer(
        self, hidden_units=hidden_units)
    dnn_classifier = dnn.DNNClassifier(
        hidden_units=hidden_units,
        feature_columns=(feature_column.numeric_column('age'),),
        optimizer=mock_optimizer,
        model_dir=self._model_dir)
    self.assertEqual(0, mock_optimizer.minimize.call_count)

    # Train for a few steps, then validate optimizer, summaries, and
    # checkpoint.
    num_steps = 5
    summary_hook = _SummaryHook()
    dnn_classifier.train(
        input_fn=lambda: ({'age': [[10.]]}, [[1]]), steps=num_steps,
        hooks=(summary_hook,))
    self.assertEqual(1, mock_optimizer.minimize.call_count)
    _assert_checkpoint(
        self, num_steps, input_units=1, hidden_units=hidden_units,
        output_units=1, model_dir=self._model_dir)
    summaries = summary_hook.summaries()
    self.assertEqual(num_steps, len(summaries))
    for summary in summaries:
      summary_keys = [v.tag for v in summary.value]
      self.assertIn(metric_keys.MetricKeys.LOSS, summary_keys)
      self.assertIn(metric_keys.MetricKeys.LOSS_MEAN, summary_keys)
Beispiel #2
0
    def test_from_scratch(self):
        hidden_units = (2, 2)
        mock_optimizer = dnn_testing_utils.mock_optimizer(
            self, hidden_units=hidden_units)
        dnn_regressor = dnn.DNNRegressor(
            hidden_units=hidden_units,
            feature_columns=(feature_column.numeric_column('age'), ),
            optimizer=mock_optimizer,
            model_dir=self._model_dir)
        self.assertEqual(0, mock_optimizer.minimize.call_count)

        # Train for a few steps, then validate optimizer, summaries, and
        # checkpoint.
        num_steps = 5
        summary_hook = _SummaryHook()
        dnn_regressor.train(input_fn=lambda: ({
            'age': ((1, ), )
        }, ((5., ), )),
                            steps=num_steps,
                            hooks=(summary_hook, ))
        self.assertEqual(1, mock_optimizer.minimize.call_count)
        _assert_checkpoint(self,
                           num_steps,
                           input_units=1,
                           hidden_units=hidden_units,
                           output_units=1,
                           model_dir=self._model_dir)
        summaries = summary_hook.summaries()
        self.assertEqual(num_steps, len(summaries))
        for summary in summaries:
            summary_keys = [v.tag for v in summary.value]
            self.assertIn(metric_keys.MetricKeys.LOSS, summary_keys)
            self.assertIn(metric_keys.MetricKeys.LOSS_MEAN, summary_keys)
Beispiel #3
0
    def test_multi_dim(self):
        """Asserts train loss for multi-dimensional input and logits."""
        base_global_step = 100
        hidden_units = (2, 2)
        dnn_testing_utils.create_checkpoint((
            ([[.6, .5], [-.6, -.5]], [.1, -.1]),
            ([[1., .8], [-.8, -1.]], [.2, -.2]),
            ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
        ), base_global_step, self._model_dir)
        input_dimension = 2
        label_dimension = 3

        # Uses identical numbers as
        # DNNModelFnTest.test_multi_dim_input_multi_dim_logits.
        # See that test for calculation of logits.
        # logits = [[-0.48, 0.48, 0.39]]
        # loss = (1+0.48)^2 + (-1-0.48)^2 + (0.5-0.39)^2 = 4.3929
        expected_loss = 4.3929
        mock_optimizer = dnn_testing_utils.mock_optimizer(
            self, hidden_units=hidden_units, expected_loss=expected_loss)
        dnn_regressor = dnn.DNNRegressor(hidden_units=hidden_units,
                                         feature_columns=[
                                             feature_column.numeric_column(
                                                 'age',
                                                 shape=[input_dimension])
                                         ],
                                         label_dimension=label_dimension,
                                         optimizer=mock_optimizer,
                                         model_dir=self._model_dir)
        self.assertEqual(0, mock_optimizer.minimize.call_count)

        # Train for a few steps, then validate optimizer, summaries, and
        # checkpoint.
        num_steps = 5
        summary_hook = _SummaryHook()
        dnn_regressor.train(input_fn=lambda: ({
            'age': [[10., 8.]]
        }, [[1., -1., 0.5]]),
                            steps=num_steps,
                            hooks=(summary_hook, ))
        self.assertEqual(1, mock_optimizer.minimize.call_count)
        summaries = summary_hook.summaries()
        self.assertEqual(num_steps, len(summaries))
        for summary in summaries:
            _assert_simple_summary(
                self, {
                    metric_keys.MetricKeys.LOSS_MEAN:
                    expected_loss / label_dimension,
                    'dnn/dnn/hiddenlayer_0_fraction_of_zero_values': 0.,
                    'dnn/dnn/hiddenlayer_1_fraction_of_zero_values': 0.5,
                    'dnn/dnn/logits_fraction_of_zero_values': 0.,
                    metric_keys.MetricKeys.LOSS: expected_loss,
                }, summary)
        _assert_checkpoint(self,
                           base_global_step + num_steps,
                           input_units=input_dimension,
                           hidden_units=hidden_units,
                           output_units=label_dimension,
                           model_dir=self._model_dir)
Beispiel #4
0
  def test_multi_dim(self):
    """Asserts train loss for multi-dimensional input and logits."""
    base_global_step = 100
    hidden_units = (2, 2)
    dnn_testing_utils.create_checkpoint(
        (([[.6, .5], [-.6, -.5]], [.1, -.1]), ([[1., .8], [-.8, -1.]],
                                               [.2, -.2]),
         ([[-1., 1., .5], [-1., 1., .5]],
          [.3, -.3, .0]),), base_global_step, self._model_dir)
    input_dimension = 2
    label_dimension = 3

    # Uses identical numbers as
    # DNNModelFnTest.test_multi_dim_input_multi_dim_logits.
    # See that test for calculation of logits.
    # logits = [[-0.48, 0.48, 0.39]]
    # loss = (1+0.48)^2 + (-1-0.48)^2 + (0.5-0.39)^2 = 4.3929
    expected_loss = 4.3929
    mock_optimizer = dnn_testing_utils.mock_optimizer(
        self, hidden_units=hidden_units, expected_loss=expected_loss)
    dnn_regressor = dnn.DNNRegressor(
        hidden_units=hidden_units,
        feature_columns=[
            feature_column.numeric_column('age', shape=[input_dimension])],
        label_dimension=label_dimension,
        optimizer=mock_optimizer,
        model_dir=self._model_dir)
    self.assertEqual(0, mock_optimizer.minimize.call_count)

    # Train for a few steps, then validate optimizer, summaries, and
    # checkpoint.
    num_steps = 5
    summary_hook = _SummaryHook()
    dnn_regressor.train(
        input_fn=lambda: ({'age': [[10., 8.]]}, [[1., -1., 0.5]]),
        steps=num_steps,
        hooks=(summary_hook,))
    self.assertEqual(1, mock_optimizer.minimize.call_count)
    summaries = summary_hook.summaries()
    self.assertEqual(num_steps, len(summaries))
    for summary in summaries:
      _assert_simple_summary(
          self,
          {
              metric_keys.MetricKeys.LOSS_MEAN: expected_loss / label_dimension,
              'dnn/dnn/hiddenlayer_0_fraction_of_zero_values': 0.,
              'dnn/dnn/hiddenlayer_1_fraction_of_zero_values': 0.5,
              'dnn/dnn/logits_fraction_of_zero_values': 0.,
              metric_keys.MetricKeys.LOSS: expected_loss,
          },
          summary)
    _assert_checkpoint(
        self, base_global_step + num_steps, input_units=input_dimension,
        hidden_units=hidden_units, output_units=label_dimension,
        model_dir=self._model_dir)
Beispiel #5
0
    def test_multi_class(self):
        n_classes = 3
        base_global_step = 100
        hidden_units = (2, 2)
        dnn_testing_utils.create_checkpoint((
            ([[.6, .5]], [.1, -.1]),
            ([[1., .8], [-.8, -1.]], [.2, -.2]),
            ([[-1., 1., .5], [-1., 1., .5]], [.3, -.3, .0]),
        ), base_global_step, self._model_dir)

        # Uses identical numbers as DNNModelFnTest.test_multi_dim_logits.
        # See that test for calculation of logits.
        # logits = [-2.08, 2.08, 1.19] => probabilities = [0.0109, 0.7011, 0.2879]
        # loss = -1. * log(0.7011) = 0.35505795
        expected_loss = 0.35505795
        mock_optimizer = dnn_testing_utils.mock_optimizer(
            self, hidden_units=hidden_units, expected_loss=expected_loss)
        dnn_classifier = dnn.DNNClassifier(
            n_classes=n_classes,
            hidden_units=hidden_units,
            feature_columns=(feature_column.numeric_column('age'), ),
            optimizer=mock_optimizer,
            model_dir=self._model_dir)
        self.assertEqual(0, mock_optimizer.minimize.call_count)

        # Train for a few steps, then validate optimizer, summaries, and
        # checkpoint.
        num_steps = 5
        summary_hook = _SummaryHook()
        dnn_classifier.train(input_fn=lambda: ({
            'age': [[10.]]
        }, [[1]]),
                             steps=num_steps,
                             hooks=(summary_hook, ))
        self.assertEqual(1, mock_optimizer.minimize.call_count)
        summaries = summary_hook.summaries()
        self.assertEqual(num_steps, len(summaries))
        for summary in summaries:
            _assert_simple_summary(
                self, {
                    metric_keys.MetricKeys.LOSS_MEAN: expected_loss,
                    'dnn/dnn/hiddenlayer_0_fraction_of_zero_values': 0.,
                    'dnn/dnn/hiddenlayer_1_fraction_of_zero_values': .5,
                    'dnn/dnn/logits_fraction_of_zero_values': 0.,
                    metric_keys.MetricKeys.LOSS: expected_loss,
                }, summary)
        _assert_checkpoint(self,
                           base_global_step + num_steps,
                           input_units=1,
                           hidden_units=hidden_units,
                           output_units=n_classes,
                           model_dir=self._model_dir)
Beispiel #6
0
    def test_one_dim(self):
        """Asserts train loss for one-dimensional input and logits."""
        base_global_step = 100
        hidden_units = (2, 2)
        dnn_testing_utils.create_checkpoint((
            ([[.6, .5]], [.1, -.1]),
            ([[1., .8], [-.8, -1.]], [.2, -.2]),
            ([[-1.], [1.]], [.3]),
        ), base_global_step, self._model_dir)

        # Uses identical numbers as DNNModelFnTest.test_one_dim_logits.
        # See that test for calculation of logits.
        # logits = [-2.08] => predictions = [-2.08]
        # loss = (1 + 2.08)^2 = 9.4864
        expected_loss = 9.4864
        mock_optimizer = dnn_testing_utils.mock_optimizer(
            self, hidden_units=hidden_units, expected_loss=expected_loss)
        dnn_regressor = dnn.DNNRegressor(
            hidden_units=hidden_units,
            feature_columns=(feature_column.numeric_column('age'), ),
            optimizer=mock_optimizer,
            model_dir=self._model_dir)
        self.assertEqual(0, mock_optimizer.minimize.call_count)

        # Train for a few steps, then validate optimizer, summaries, and
        # checkpoint.
        num_steps = 5
        summary_hook = _SummaryHook()
        dnn_regressor.train(input_fn=lambda: ({
            'age': [[10.]]
        }, [[1.]]),
                            steps=num_steps,
                            hooks=(summary_hook, ))
        self.assertEqual(1, mock_optimizer.minimize.call_count)
        summaries = summary_hook.summaries()
        self.assertEqual(num_steps, len(summaries))
        for summary in summaries:
            _assert_simple_summary(
                self, {
                    metric_keys.MetricKeys.LOSS_MEAN: expected_loss,
                    'dnn/dnn/hiddenlayer_0_fraction_of_zero_values': 0.,
                    'dnn/dnn/hiddenlayer_1_fraction_of_zero_values': 0.5,
                    'dnn/dnn/logits_fraction_of_zero_values': 0.,
                    metric_keys.MetricKeys.LOSS: expected_loss,
                }, summary)
        _assert_checkpoint(self,
                           base_global_step + num_steps,
                           input_units=1,
                           hidden_units=hidden_units,
                           output_units=1,
                           model_dir=self._model_dir)
Beispiel #7
0
  def test_multi_class(self):
    n_classes = 3
    base_global_step = 100
    hidden_units = (2, 2)
    dnn_testing_utils.create_checkpoint(
        (([[.6, .5]], [.1, -.1]), ([[1., .8], [-.8, -1.]], [.2, -.2]),
         ([[-1., 1., .5], [-1., 1., .5]],
          [.3, -.3, .0]),), base_global_step, self._model_dir)

    # Uses identical numbers as DNNModelFnTest.test_multi_dim_logits.
    # See that test for calculation of logits.
    # logits = [-2.08, 2.08, 1.19] => probabilities = [0.0109, 0.7011, 0.2879]
    # loss = -1. * log(0.7011) = 0.35505795
    expected_loss = 0.35505795
    mock_optimizer = dnn_testing_utils.mock_optimizer(
        self, hidden_units=hidden_units, expected_loss=expected_loss)
    dnn_classifier = dnn.DNNClassifier(
        n_classes=n_classes,
        hidden_units=hidden_units,
        feature_columns=(feature_column.numeric_column('age'),),
        optimizer=mock_optimizer,
        model_dir=self._model_dir)
    self.assertEqual(0, mock_optimizer.minimize.call_count)

    # Train for a few steps, then validate optimizer, summaries, and
    # checkpoint.
    num_steps = 5
    summary_hook = _SummaryHook()
    dnn_classifier.train(
        input_fn=lambda: ({'age': [[10.]]}, [[1]]), steps=num_steps,
        hooks=(summary_hook,))
    self.assertEqual(1, mock_optimizer.minimize.call_count)
    summaries = summary_hook.summaries()
    self.assertEqual(num_steps, len(summaries))
    for summary in summaries:
      _assert_simple_summary(
          self,
          {
              metric_keys.MetricKeys.LOSS_MEAN: expected_loss,
              'dnn/dnn/hiddenlayer_0_fraction_of_zero_values': 0.,
              'dnn/dnn/hiddenlayer_1_fraction_of_zero_values': .5,
              'dnn/dnn/logits_fraction_of_zero_values': 0.,
              metric_keys.MetricKeys.LOSS: expected_loss,
          },
          summary)
    _assert_checkpoint(
        self, base_global_step + num_steps, input_units=1,
        hidden_units=hidden_units, output_units=n_classes,
        model_dir=self._model_dir)
Beispiel #8
0
  def test_one_dim(self):
    """Asserts train loss for one-dimensional input and logits."""
    base_global_step = 100
    hidden_units = (2, 2)
    dnn_testing_utils.create_checkpoint(
        (([[.6, .5]], [.1, -.1]), ([[1., .8], [-.8, -1.]], [.2, -.2]),
         ([[-1.], [1.]], [.3]),), base_global_step, self._model_dir)

    # Uses identical numbers as DNNModelFnTest.test_one_dim_logits.
    # See that test for calculation of logits.
    # logits = [-2.08] => predictions = [-2.08]
    # loss = (1 + 2.08)^2 = 9.4864
    expected_loss = 9.4864
    mock_optimizer = dnn_testing_utils.mock_optimizer(
        self, hidden_units=hidden_units, expected_loss=expected_loss)
    dnn_regressor = dnn.DNNRegressor(
        hidden_units=hidden_units,
        feature_columns=(feature_column.numeric_column('age'),),
        optimizer=mock_optimizer,
        model_dir=self._model_dir)
    self.assertEqual(0, mock_optimizer.minimize.call_count)

    # Train for a few steps, then validate optimizer, summaries, and
    # checkpoint.
    num_steps = 5
    summary_hook = _SummaryHook()
    dnn_regressor.train(
        input_fn=lambda: ({'age': [[10.]]}, [[1.]]), steps=num_steps,
        hooks=(summary_hook,))
    self.assertEqual(1, mock_optimizer.minimize.call_count)
    summaries = summary_hook.summaries()
    self.assertEqual(num_steps, len(summaries))
    for summary in summaries:
      _assert_simple_summary(
          self,
          {
              metric_keys.MetricKeys.LOSS_MEAN: expected_loss,
              'dnn/dnn/hiddenlayer_0_fraction_of_zero_values': 0.,
              'dnn/dnn/hiddenlayer_1_fraction_of_zero_values': 0.5,
              'dnn/dnn/logits_fraction_of_zero_values': 0.,
              metric_keys.MetricKeys.LOSS: expected_loss,
          },
          summary)
    _assert_checkpoint(
        self, base_global_step + num_steps, input_units=1,
        hidden_units=hidden_units, output_units=1, model_dir=self._model_dir)