def test_auto_ensemble_estimator_lifecycle(self, candidate_pool, want_loss, max_train_steps=30): features = {"input_1": [[1., 0.]]} labels = [[1.]] run_config = tf.estimator.RunConfig(tf_random_seed=42) head = regression_head.RegressionHead() # Always create optimizers in a lambda to prevent error like: # `RuntimeError: Cannot set `iterations` to a new Variable after the # Optimizer weights have been created` optimizer = lambda: tf.keras.optimizers.SGD(lr=.01) feature_columns = [tf.feature_column.numeric_column("input_1", shape=[2])] def train_input_fn(): input_features = {} for key, feature in features.items(): input_features[key] = tf.constant(feature, name=key) input_labels = tf.constant(labels, name="labels") return input_features, input_labels def test_input_fn(): dataset = tf.data.Dataset.from_tensors([tf.constant(features["input_1"])]) input_features = tf.compat.v1.data.make_one_shot_iterator( dataset).get_next() return {"input_1": input_features}, None estimator = AutoEnsembleEstimator( head=head, candidate_pool=candidate_pool(head, feature_columns, optimizer), max_iteration_steps=10, force_grow=True, model_dir=self.test_subdirectory, config=run_config) # Train for three iterations. estimator.train(input_fn=train_input_fn, max_steps=max_train_steps) # Evaluate. eval_results = estimator.evaluate(input_fn=train_input_fn, steps=1) self.assertAllClose(max_train_steps, eval_results["global_step"]) self.assertAllClose(want_loss, eval_results["loss"], atol=.3) # Predict. predictions = estimator.predict(input_fn=test_input_fn) for prediction in predictions: self.assertIsNotNone(prediction["predictions"]) # Export SavedModel. def serving_input_fn(): """Input fn for serving export, starting from serialized example.""" serialized_example = tf.compat.v1.placeholder( dtype=tf.string, shape=(None), name="serialized_example") for key, value in features.items(): features[key] = tf.constant(value) return export.SupervisedInputReceiver( features=features, labels=tf.constant(labels), receiver_tensors=serialized_example) export_dir_base = os.path.join(self.test_subdirectory, "export") estimator.export_saved_model( export_dir_base=export_dir_base, serving_input_receiver_fn=serving_input_fn)
def test_auto_ensemble_estimator_lifecycle(self): features = {"input_1": [[1., 0.]]} labels = [[1.]] run_config = tf.estimator.RunConfig(tf_random_seed=42) head = tf.contrib.estimator.regression_head( loss_reduction=tf.losses.Reduction.SUM_OVER_BATCH_SIZE) optimizer = tf.train.GradientDescentOptimizer(learning_rate=.01) feature_columns = [ tf.feature_column.numeric_column("input_1", shape=[2]) ] def train_input_fn(): input_features = {} for key, feature in features.items(): input_features[key] = tf.constant(feature, name=key) input_labels = tf.constant(labels, name="labels") return input_features, input_labels def test_input_fn(): input_features = tf.data.Dataset.from_tensors([ tf.constant(features["input_1"]) ]).make_one_shot_iterator().get_next() return {"input_1": input_features}, None if hasattr(tf.estimator, "LinearEstimator"): linear_estimator_fn = tf.estimator.LinearEstimator else: linear_estimator_fn = tf.contrib.estimator.LinearEstimator if hasattr(tf.estimator, "DNNEstimator"): dnn_estimator_fn = tf.estimator.DNNEstimator else: dnn_estimator_fn = tf.contrib.estimator.DNNEstimator estimator = AutoEnsembleEstimator( head=head, candidate_pool=[ linear_estimator_fn(head=head, feature_columns=feature_columns, optimizer=optimizer), dnn_estimator_fn(head=head, feature_columns=feature_columns, optimizer=optimizer, hidden_units=[3]), ], max_iteration_steps=4, force_grow=True, model_dir=self.test_subdirectory, config=run_config) # Train. estimator.train(input_fn=train_input_fn, max_steps=12) # Evaluate. eval_results = estimator.evaluate(input_fn=train_input_fn, steps=3) self.assertIsNotNone(eval_results["loss"]) # Predict. predictions = estimator.predict(input_fn=test_input_fn) for prediction in predictions: self.assertIsNotNone(prediction["predictions"]) # Export SavedModel. def serving_input_fn(): """Input fn for serving export, starting from serialized example.""" serialized_example = tf.placeholder(dtype=tf.string, shape=(None), name="serialized_example") for key, value in features.items(): features[key] = tf.constant(value) return export.SupervisedInputReceiver( features=features, labels=tf.constant(labels), receiver_tensors=serialized_example) export_dir_base = os.path.join(self.test_subdirectory, "export") tf.contrib.estimator.export_saved_model_for_mode( estimator, export_dir_base=export_dir_base, input_receiver_fn=serving_input_fn, mode=tf.estimator.ModeKeys.PREDICT)
def test_auto_ensemble_estimator_lifecycle(self, candidate_pool, want_loss, max_train_steps=30): features = {"input_1": [[1., 0.]]} labels = [[1.]] run_config = tf.estimator.RunConfig(tf_random_seed=42) head = tf.contrib.estimator.regression_head( loss_reduction=tf.losses.Reduction.SUM_OVER_BATCH_SIZE) optimizer = tf.train.GradientDescentOptimizer(learning_rate=.01) feature_columns = [ tf.feature_column.numeric_column("input_1", shape=[2]) ] def train_input_fn(): input_features = {} for key, feature in features.items(): input_features[key] = tf.constant(feature, name=key) input_labels = tf.constant(labels, name="labels") return input_features, input_labels def test_input_fn(): input_features = tf.data.Dataset.from_tensors([ tf.constant(features["input_1"]) ]).make_one_shot_iterator().get_next() return {"input_1": input_features}, None estimator = AutoEnsembleEstimator(head=head, candidate_pool=candidate_pool( head, feature_columns, optimizer), max_iteration_steps=10, force_grow=True, model_dir=self.test_subdirectory, config=run_config) # Train for three iterations. estimator.train(input_fn=train_input_fn, max_steps=max_train_steps) # Evaluate. eval_results = estimator.evaluate(input_fn=train_input_fn, steps=1) self.assertAllClose(max_train_steps, eval_results["global_step"]) self.assertAllClose(want_loss, eval_results["loss"], atol=.3) # Predict. predictions = estimator.predict(input_fn=test_input_fn) for prediction in predictions: self.assertIsNotNone(prediction["predictions"]) # Export SavedModel. def serving_input_fn(): """Input fn for serving export, starting from serialized example.""" serialized_example = tf.placeholder(dtype=tf.string, shape=(None), name="serialized_example") for key, value in features.items(): features[key] = tf.constant(value) return export.SupervisedInputReceiver( features=features, labels=tf.constant(labels), receiver_tensors=serialized_example) export_dir_base = os.path.join(self.test_subdirectory, "export") export_saved_model_fn = getattr(estimator, "export_saved_model", None) if not callable(export_saved_model_fn): export_saved_model_fn = estimator.export_savedmodel export_saved_model_fn(export_dir_base=export_dir_base, serving_input_receiver_fn=serving_input_fn)
def test_auto_ensemble_estimator_lifecycle(self, list_candidate_pool): features = {"input_1": [[1., 0.]]} labels = [[1.]] run_config = tf.estimator.RunConfig(tf_random_seed=42) head = tf.contrib.estimator.regression_head( loss_reduction=tf.losses.Reduction.SUM_OVER_BATCH_SIZE) optimizer = tf.train.GradientDescentOptimizer(learning_rate=.01) feature_columns = [tf.feature_column.numeric_column("input_1", shape=[2])] def train_input_fn(): input_features = {} for key, feature in features.items(): input_features[key] = tf.constant(feature, name=key) input_labels = tf.constant(labels, name="labels") return input_features, input_labels def test_input_fn(): input_features = tf.data.Dataset.from_tensors([ tf.constant(features["input_1"]) ]).make_one_shot_iterator().get_next() return {"input_1": input_features}, None if hasattr(tf.estimator, "LinearEstimator"): linear_estimator_fn = tf.estimator.LinearEstimator else: linear_estimator_fn = tf.contrib.estimator.LinearEstimator if hasattr(tf.estimator, "DNNEstimator"): dnn_estimator_fn = tf.estimator.DNNEstimator else: dnn_estimator_fn = tf.contrib.estimator.DNNEstimator candidate_pool = { "linear": linear_estimator_fn( head=head, feature_columns=feature_columns, optimizer=optimizer), "dnn": dnn_estimator_fn( head=head, feature_columns=feature_columns, optimizer=optimizer, hidden_units=[3]) } if list_candidate_pool: candidate_pool = [candidate_pool[k] for k in sorted(candidate_pool)] estimator = AutoEnsembleEstimator( head=head, candidate_pool=candidate_pool, max_iteration_steps=10, force_grow=True, model_dir=self.test_subdirectory, config=run_config) # Train for three iterations. estimator.train(input_fn=train_input_fn, max_steps=30) # Evaluate. eval_results = estimator.evaluate(input_fn=train_input_fn, steps=3) want_loss = .209 if tf_compat.version_greater_or_equal("1.10.0") and ( not tf_compat.version_greater_or_equal("1.12.0")): # Only TF 1.10 and 1.11. want_loss = .079514 self.assertAllClose(want_loss, eval_results["loss"], atol=.05) # Predict. predictions = estimator.predict(input_fn=test_input_fn) for prediction in predictions: self.assertIsNotNone(prediction["predictions"]) # Export SavedModel. def serving_input_fn(): """Input fn for serving export, starting from serialized example.""" serialized_example = tf.placeholder( dtype=tf.string, shape=(None), name="serialized_example") for key, value in features.items(): features[key] = tf.constant(value) return export.SupervisedInputReceiver( features=features, labels=tf.constant(labels), receiver_tensors=serialized_example) export_dir_base = os.path.join(self.test_subdirectory, "export") export_saved_model_fn = getattr(estimator, "export_saved_model", None) if not callable(export_saved_model_fn): export_saved_model_fn = estimator.export_savedmodel export_saved_model_fn( export_dir_base=export_dir_base, serving_input_receiver_fn=serving_input_fn)