def test_regressor_basic_warm_starting(self, fc_impl):
        """Tests correctness of DNNLinearCombinedRegressor default warm-start."""
        age = fc_impl.numeric_column('age')
        city = fc_impl.embedding_column(
            fc_impl.categorical_column_with_vocabulary_list(
                'city', vocabulary_list=['Mountain View', 'Palo Alto']),
            dimension=5)

        # Create a DNNLinearCombinedRegressor and train to save a checkpoint.
        dnn_lc_regressor = dnn_linear_combined.DNNLinearCombinedRegressorV2(
            linear_feature_columns=[age],
            dnn_feature_columns=[city],
            dnn_hidden_units=[256, 128],
            model_dir=self._ckpt_and_vocab_dir,
            linear_optimizer='SGD',
            dnn_optimizer='SGD')
        dnn_lc_regressor.train(input_fn=self._input_fn, max_steps=1)

        # Create a second DNNLinearCombinedRegressor, warm-started from the first.
        # Use a learning_rate = 0.0 optimizer to check values (use SGD so we don't
        # have accumulator values that change).
        # To avoid optimizer naming issue during warm start, when to create the
        # optimizer instance, the dnn_optimizer needs to be created first
        # before the linear_optimizer, since this is the order pre-defined
        # in the model function.
        # Create a default graph context to make sure the optimizer instance is
        # created within Graph v1 to make it consistent with estimator Graph.
        with tf.Graph().as_default():
            warm_started_dnn_lc_regressor = (
                dnn_linear_combined.DNNLinearCombinedRegressorV2(
                    linear_feature_columns=[age],
                    dnn_feature_columns=[city],
                    dnn_hidden_units=[256, 128],
                    dnn_optimizer=gradient_descent_v2.SGD(learning_rate=0.0),
                    linear_optimizer=gradient_descent_v2.SGD(
                        learning_rate=0.0),
                    warm_start_from=dnn_lc_regressor.model_dir))

        warm_started_dnn_lc_regressor.train(input_fn=self._input_fn,
                                            max_steps=1)
        for variable_name in warm_started_dnn_lc_regressor.get_variable_names(
        ):
            if 'learning_rate' in variable_name:
                self.assertAllClose(
                    0.0,
                    warm_started_dnn_lc_regressor.get_variable_value(
                        variable_name))
            else:
                self.assertAllClose(
                    dnn_lc_regressor.get_variable_value(variable_name),
                    warm_started_dnn_lc_regressor.get_variable_value(
                        variable_name))
Exemplo n.º 2
0
    def test_dnn_and_linear_logits_are_added(self, fc_impl):
        with ops.Graph().as_default():
            variables_lib.Variable([[1.0]],
                                   name='linear/linear_model/x/weights')
            variables_lib.Variable([2.0],
                                   name='linear/linear_model/bias_weights')
            variables_lib.Variable([[3.0]], name='dnn/hiddenlayer_0/kernel')
            variables_lib.Variable([4.0], name='dnn/hiddenlayer_0/bias')
            variables_lib.Variable([[5.0]], name='dnn/logits/kernel')
            variables_lib.Variable([6.0], name='dnn/logits/bias')
            variables_lib.Variable(1, name='global_step', dtype=dtypes.int64)
            linear_testing_utils.save_variables_to_ckpt(self._model_dir)

        x_column = fc_impl.numeric_column('x')
        est = dnn_linear_combined.DNNLinearCombinedRegressorV2(
            linear_feature_columns=[x_column],
            dnn_hidden_units=[1],
            dnn_feature_columns=[x_column],
            model_dir=self._model_dir)
        input_fn = numpy_io.numpy_input_fn(x={'x': np.array([[10.]])},
                                           batch_size=1,
                                           shuffle=False)
        # linear logits = 10*1 + 2 = 12
        # dnn logits = (10*3 + 4)*5 + 6 = 176
        # logits = dnn + linear = 176 + 12 = 188
        self.assertAllClose(
            {
                prediction_keys.PredictionKeys.PREDICTIONS: [188.],
            }, next(est.predict(input_fn=input_fn)))
Exemplo n.º 3
0
    def _test_complete_flow_helper(self, linear_feature_columns,
                                   dnn_feature_columns, feature_spec,
                                   train_input_fn, eval_input_fn,
                                   predict_input_fn, input_dimension,
                                   label_dimension, batch_size):
        est = dnn_linear_combined.DNNLinearCombinedRegressorV2(
            linear_feature_columns=linear_feature_columns,
            dnn_hidden_units=(2, 2),
            dnn_feature_columns=dnn_feature_columns,
            label_dimension=label_dimension,
            model_dir=self._model_dir)

        # TRAIN
        num_steps = 10
        est.train(train_input_fn, steps=num_steps)

        # EVALUTE
        scores = est.evaluate(eval_input_fn)
        self.assertEqual(num_steps, scores[ops.GraphKeys.GLOBAL_STEP])
        self.assertIn('loss', six.iterkeys(scores))

        # PREDICT
        predictions = np.array([
            x[prediction_keys.PredictionKeys.PREDICTIONS]
            for x in est.predict(predict_input_fn)
        ])
        self.assertAllEqual((batch_size, label_dimension), predictions.shape)

        # EXPORT
        serving_input_receiver_fn = export.build_parsing_serving_input_receiver_fn(
            feature_spec)
        export_dir = est.export_saved_model(tempfile.mkdtemp(),
                                            serving_input_receiver_fn)
        self.assertTrue(gfile.Exists(export_dir))
Exemplo n.º 4
0
    def test_regressor_basic_warm_starting(self, fc_impl):
        """Tests correctness of DNNLinearCombinedRegressor default warm-start."""
        self.skipTest("b/133320890 causes naming issue during warm starting")
        age = fc_impl.numeric_column('age')
        city = fc_impl.embedding_column(
            fc_impl.categorical_column_with_vocabulary_list(
                'city', vocabulary_list=['Mountain View', 'Palo Alto']),
            dimension=5)

        # Create a DNNLinearCombinedRegressor and train to save a checkpoint.
        dnn_lc_regressor = dnn_linear_combined.DNNLinearCombinedRegressorV2(
            linear_feature_columns=[age],
            dnn_feature_columns=[city],
            dnn_hidden_units=[256, 128],
            model_dir=self._ckpt_and_vocab_dir,
            linear_optimizer='SGD',
            dnn_optimizer='SGD')
        dnn_lc_regressor.train(input_fn=self._input_fn, max_steps=1)

        # Create a second DNNLinearCombinedRegressor, warm-started from the first.
        # Use a learning_rate = 0.0 optimizer to check values (use SGD so we don't
        # have accumulator values that change).
        warm_started_dnn_lc_regressor = (
            dnn_linear_combined.DNNLinearCombinedRegressorV2(
                linear_feature_columns=[age],
                dnn_feature_columns=[city],
                dnn_hidden_units=[256, 128],
                linear_optimizer=gradient_descent_v2.SGD(learning_rate=0.0),
                dnn_optimizer=gradient_descent_v2.SGD(learning_rate=0.0),
                warm_start_from=dnn_lc_regressor.model_dir))

        warm_started_dnn_lc_regressor.train(input_fn=self._input_fn,
                                            max_steps=1)
        for variable_name in warm_started_dnn_lc_regressor.get_variable_names(
        ):
            if 'learning_rate' in variable_name:
                self.assertAllClose(
                    0.0,
                    warm_started_dnn_lc_regressor.get_variable_value(
                        variable_name))
            else:
                self.assertAllClose(
                    dnn_lc_regressor.get_variable_value(variable_name),
                    warm_started_dnn_lc_regressor.get_variable_value(
                        variable_name))
Exemplo n.º 5
0
def _linear_regressor_fn(feature_columns,
                         model_dir=None,
                         label_dimension=1,
                         weight_column=None,
                         optimizer='Ftrl',
                         config=None,
                         sparse_combiner='sum'):
    return dnn_linear_combined.DNNLinearCombinedRegressorV2(
        model_dir=model_dir,
        linear_feature_columns=feature_columns,
        linear_optimizer=optimizer,
        label_dimension=label_dimension,
        weight_column=weight_column,
        config=config,
        linear_sparse_combiner=sparse_combiner)
Exemplo n.º 6
0
def _dnn_regressor_fn(hidden_units,
                      feature_columns,
                      model_dir=None,
                      label_dimension=1,
                      weight_column=None,
                      optimizer='Adagrad',
                      config=None):
    return dnn_linear_combined.DNNLinearCombinedRegressorV2(
        model_dir=model_dir,
        dnn_hidden_units=hidden_units,
        dnn_feature_columns=feature_columns,
        dnn_optimizer=optimizer,
        label_dimension=label_dimension,
        weight_column=weight_column,
        config=config)