def _linear_only_estimator_fn(feature_columns, model_dir=None, label_dimension=1, weight_column=None, optimizer='Ftrl', config=None, partitioner=None): return dnn_linear_combined.DNNLinearCombinedEstimator( head=head_lib.regression_head(weight_column=weight_column, label_dimension=label_dimension), model_dir=model_dir, linear_feature_columns=feature_columns, linear_optimizer=optimizer, input_layer_partitioner=partitioner, config=config)
def _linear_only_estimator_fn( feature_columns, model_dir=None, label_dimension=1, weight_column=None, optimizer='Ftrl', config=None, partitioner=None): return dnn_linear_combined.DNNLinearCombinedEstimator( head=head_lib.regression_head( weight_column=weight_column, label_dimension=label_dimension, # Tests in core (from which this test inherits) test the sum loss. loss_reduction=losses.Reduction.SUM), model_dir=model_dir, linear_feature_columns=feature_columns, linear_optimizer=optimizer, input_layer_partitioner=partitioner, config=config)
def _dnn_only_estimator_fn(hidden_units, feature_columns, model_dir=None, label_dimension=1, weight_column=None, optimizer='Adagrad', activation_fn=nn.relu, dropout=None, input_layer_partitioner=None, config=None): return dnn_linear_combined.DNNLinearCombinedEstimator( head=head_lib.regression_head(weight_column=weight_column, label_dimension=label_dimension), model_dir=model_dir, dnn_feature_columns=feature_columns, dnn_optimizer=optimizer, dnn_hidden_units=hidden_units, dnn_activation_fn=activation_fn, dnn_dropout=dropout, input_layer_partitioner=input_layer_partitioner, config=config)
def _test_complete_flow(self, train_input_fn, eval_input_fn, predict_input_fn, input_dimension, label_dimension, batch_size): linear_feature_columns = [ feature_column.numeric_column('x', shape=(input_dimension, )) ] dnn_feature_columns = [ feature_column.numeric_column('x', shape=(input_dimension, )) ] feature_columns = linear_feature_columns + dnn_feature_columns est = dnn_linear_combined.DNNLinearCombinedEstimator( head=head_lib.regression_head(label_dimension=label_dimension), linear_feature_columns=linear_feature_columns, dnn_feature_columns=dnn_feature_columns, dnn_hidden_units=(2, 2), model_dir=self._model_dir) # TRAIN num_steps = 10 est.train(train_input_fn, steps=num_steps) # EVALUTE scores = est.evaluate(eval_input_fn) self.assertEqual(num_steps, scores[ops.GraphKeys.GLOBAL_STEP]) self.assertIn('loss', six.iterkeys(scores)) # PREDICT predictions = np.array([ x[prediction_keys.PredictionKeys.PREDICTIONS] for x in est.predict(predict_input_fn) ]) self.assertAllEqual((batch_size, label_dimension), predictions.shape) # EXPORT feature_spec = feature_column.make_parse_example_spec(feature_columns) serving_input_receiver_fn = export.build_parsing_serving_input_receiver_fn( feature_spec) export_dir = est.export_savedmodel(tempfile.mkdtemp(), serving_input_receiver_fn) self.assertTrue(gfile.Exists(export_dir))
def _dnn_only_estimator_fn( hidden_units, feature_columns, model_dir=None, label_dimension=1, weight_column=None, optimizer='Adagrad', activation_fn=nn.relu, dropout=None, input_layer_partitioner=None, config=None): return dnn_linear_combined.DNNLinearCombinedEstimator( head=head_lib.regression_head( weight_column=weight_column, label_dimension=label_dimension, # Tests in core (from which this test inherits) test the sum loss. loss_reduction=losses.Reduction.SUM), model_dir=model_dir, dnn_feature_columns=feature_columns, dnn_optimizer=optimizer, dnn_hidden_units=hidden_units, dnn_activation_fn=activation_fn, dnn_dropout=dropout, input_layer_partitioner=input_layer_partitioner, config=config)