def test_dnn_and_linear_logits_are_added(self):
    with ops.Graph().as_default():
      variables_lib.Variable([[1.0]], name='linear/linear_model/x/weights')
      variables_lib.Variable([2.0], name='linear/linear_model/bias_weights')
      variables_lib.Variable([[3.0]], name='dnn/hiddenlayer_0/kernel')
      variables_lib.Variable([4.0], name='dnn/hiddenlayer_0/bias')
      variables_lib.Variable([[5.0]], name='dnn/logits/kernel')
      variables_lib.Variable([6.0], name='dnn/logits/bias')
      variables_lib.Variable(1, name='global_step', dtype=dtypes.int64)
      linear_testing_utils.save_variables_to_ckpt(self._model_dir)

    x_column = feature_column.numeric_column('x')
    est = dnn_linear_combined.DNNLinearCombinedRegressor(
        linear_feature_columns=[x_column],
        dnn_hidden_units=[1],
        dnn_feature_columns=[x_column],
        model_dir=self._model_dir)
    input_fn = numpy_io.numpy_input_fn(
        x={'x': np.array([[10.]])}, batch_size=1, shuffle=False)
    # linear logits = 10*1 + 2 = 12
    # dnn logits = (10*3 + 4)*5 + 6 = 176
    # logits = dnn + linear = 176 + 12 = 188
    self.assertAllClose(
        {
            prediction_keys.PredictionKeys.PREDICTIONS: [188.],
        },
        next(est.predict(input_fn=input_fn)))
Beispiel #2
0
    def test_dnn_and_linear_logits_are_added(self):
        with ops.Graph().as_default():
            variables_lib.Variable([[1.0]],
                                   name='linear/linear_model/x/weights')
            variables_lib.Variable([2.0],
                                   name='linear/linear_model/bias_weights')
            variables_lib.Variable([[3.0]], name='dnn/hiddenlayer_0/kernel')
            variables_lib.Variable([4.0], name='dnn/hiddenlayer_0/bias')
            variables_lib.Variable([[5.0]], name='dnn/logits/kernel')
            variables_lib.Variable([6.0], name='dnn/logits/bias')
            variables_lib.Variable(1, name='global_step', dtype=dtypes.int64)
            linear_testing_utils.save_variables_to_ckpt(self._model_dir)

        x_column = feature_column.numeric_column('x')
        est = dnn_linear_combined.DNNLinearCombinedRegressor(
            linear_feature_columns=[x_column],
            dnn_hidden_units=[1],
            dnn_feature_columns=[x_column],
            model_dir=self._model_dir)
        input_fn = numpy_io.numpy_input_fn(x={'x': np.array([[10.]])},
                                           batch_size=1,
                                           shuffle=False)
        # linear logits = 10*1 + 2 = 12
        # dnn logits = (10*3 + 4)*5 + 6 = 176
        # logits = dnn + linear = 176 + 12 = 188
        self.assertAllClose(
            {
                prediction_keys.PredictionKeys.PREDICTIONS: [188.],
            }, next(est.predict(input_fn=input_fn)))
Beispiel #3
0
    def testFromCheckpoint(self):
        # Create initial checkpoint.
        n_classes = self._n_classes
        label = 1
        age = 17
        # For binary case, the expected weight has shape (1,1). For multi class
        # case, the shape is (1, n_classes). In order to test the weights, set
        # weights as 2.0 * range(n_classes).
        age_weight = [[2.0]] if n_classes == 2 else (np.reshape(
            2.0 * np.array(list(range(n_classes)), dtype=np.float32),
            (1, n_classes)))
        bias = [-35.0] if n_classes == 2 else [-35.0] * n_classes
        initial_global_step = 100
        with ops.Graph().as_default():
            variables.Variable(age_weight,
                               name=linear_testing_utils.AGE_WEIGHT_NAME)
            variables.Variable(bias, name=linear_testing_utils.BIAS_NAME)
            variables.Variable(initial_global_step,
                               name=ops.GraphKeys.GLOBAL_STEP,
                               dtype=dtypes.int64)
            linear_testing_utils.save_variables_to_ckpt(self._model_dir)

        # For binary classifer:
        #   logits = age * age_weight + bias = 17 * 2. - 35. = -1.
        #   loss = sigmoid_cross_entropy(logits, label)
        #   so, loss = 1 * -log ( sigmoid(-1) ) = 1.3133
        # For multi class classifer:
        #   loss = cross_entropy(logits, label)
        #   where logits = 17 * age_weight + bias and label = 1
        #   so, loss = 1 * -log ( soft_max(logits)[1] )
        if n_classes == 2:
            expected_loss = 1.3133
        else:
            logits = age_weight * age + bias
            logits_exp = np.exp(logits)
            softmax = logits_exp / logits_exp.sum()
            expected_loss = -1 * math.log(softmax[0, label])

        mock_optimizer = self._mock_optimizer(expected_loss=expected_loss)

        est = linear.LinearClassifier(
            feature_columns=(feature_column_lib.numeric_column('age'), ),
            n_classes=n_classes,
            optimizer=mock_optimizer,
            model_dir=self._model_dir)
        self.assertEqual(0, mock_optimizer.minimize.call_count)

        # Train for a few steps, and validate optimizer and final checkpoint.
        num_steps = 10
        est.train(input_fn=lambda: ({
            'age': ((age, ), )
        }, ((label, ), )),
                  steps=num_steps)
        self.assertEqual(1, mock_optimizer.minimize.call_count)
        self._assert_checkpoint(expected_global_step=initial_global_step +
                                num_steps,
                                expected_age_weight=age_weight,
                                expected_bias=bias)