def _build_estimator_for_export_tests(tmpdir): def _input_fn(): iris = tf.contrib.learn.datasets.load_iris() return { 'feature': tf.constant(iris.data, dtype=tf.float32) }, tf.constant(iris.target, shape=[150], dtype=tf.int32) feature_columns = [tf.contrib.layers.real_valued_column('feature', dimension=4)] est = tf.contrib.learn.LinearRegressor(feature_columns) est.fit(input_fn=_input_fn, steps=20) feature_spec = tf.contrib.layers.create_feature_spec_for_parsing( feature_columns) export_input_fn = input_fn_utils.build_parsing_serving_input_fn(feature_spec) # hack in an op that uses an asset, in order to test asset export. # this is not actually valid, of course. def export_input_fn_with_asset(): features, labels, inputs = export_input_fn() vocab_file_name = os.path.join(tmpdir, 'my_vocab_file') vocab_file = tf.gfile.GFile(vocab_file_name, mode='w') vocab_file.write(VOCAB_FILE_CONTENT) vocab_file.close() hashtable = tf.contrib.lookup.HashTable( tf.contrib.lookup.TextFileStringTableInitializer(vocab_file_name), 'x') features['bogus_lookup'] = hashtable.lookup( tf.to_int64(features['feature'])) return input_fn_utils.InputFnOps(features, labels, inputs) return est, export_input_fn_with_asset
def _make_experiment_fn(output_dir): """Creates experiment for DNNBoostedTreeCombinedRegressor.""" (x_train, y_train), (x_test, y_test) = tf.keras.datasets.boston_housing.load_data() train_input_fn = tf.compat.v1.estimator.inputs.numpy_input_fn( x={"x": x_train}, y=y_train, batch_size=FLAGS.batch_size, num_epochs=None, shuffle=True) eval_input_fn = tf.compat.v1.estimator.inputs.numpy_input_fn( x={"x": x_test}, y=y_test, num_epochs=1, shuffle=False) feature_columns = [ feature_column.real_valued_column("x", dimension=_BOSTON_NUM_FEATURES) ] feature_spec = tf.contrib.layers.create_feature_spec_for_parsing( feature_columns) serving_input_fn = input_fn_utils.build_parsing_serving_input_fn(feature_spec) export_strategies = [ saved_model_export_utils.make_export_strategy(serving_input_fn)] return tf.contrib.learn.Experiment( estimator=_get_estimator(output_dir, feature_columns), train_input_fn=train_input_fn, eval_input_fn=eval_input_fn, train_steps=None, eval_steps=FLAGS.num_eval_steps, eval_metrics=None, export_strategies=export_strategies)
def _make_experiment_fn(output_dir): """Creates experiment for DNNBoostedTreeCombinedRegressor.""" (x_train, y_train), (x_test, y_test) = tf.keras.datasets.boston_housing.load_data() train_input_fn = tf.compat.v1.estimator.inputs.numpy_input_fn( x={"x": x_train}, y=y_train, batch_size=FLAGS.batch_size, num_epochs=None, shuffle=True) eval_input_fn = tf.compat.v1.estimator.inputs.numpy_input_fn( x={"x": x_test}, y=y_test, num_epochs=1, shuffle=False) feature_columns = [ feature_column.real_valued_column("x", dimension=_BOSTON_NUM_FEATURES) ] feature_spec = tf.contrib.layers.create_feature_spec_for_parsing( feature_columns) serving_input_fn = input_fn_utils.build_parsing_serving_input_fn( feature_spec) export_strategies = [ saved_model_export_utils.make_export_strategy(serving_input_fn) ] return tf.contrib.learn.Experiment(estimator=_get_estimator( output_dir, feature_columns), train_input_fn=train_input_fn, eval_input_fn=eval_input_fn, train_steps=None, eval_steps=FLAGS.num_eval_steps, eval_metrics=None, export_strategies=export_strategies)
def _build_estimator_for_export_tests(tmpdir): def _input_fn(): iris = tf.contrib.learn.datasets.load_iris() return { 'feature': tf.constant(iris.data, dtype=tf.float32) }, tf.constant(iris.target, shape=[150], dtype=tf.int32) feature_columns = [tf.contrib.layers.real_valued_column('feature', dimension=4)] est = tf.contrib.learn.LinearRegressor(feature_columns) est.fit(input_fn=_input_fn, steps=20) feature_spec = tf.contrib.layers.create_feature_spec_for_parsing( feature_columns) export_input_fn = input_fn_utils.build_parsing_serving_input_fn(feature_spec) # hack in an op that uses an asset, in order to test asset export. # this is not actually valid, of course. def export_input_fn_with_asset(): features, labels, inputs = export_input_fn() vocab_file_name = os.path.join(tmpdir, 'my_vocab_file') vocab_file = tf.gfile.GFile(vocab_file_name, mode='w') vocab_file.write(VOCAB_FILE_CONTENT) vocab_file.close() hashtable = tf.contrib.lookup.HashTable( tf.contrib.lookup.TextFileStringTableInitializer(vocab_file_name), 'x') features['bogus_lookup'] = hashtable.lookup( tf.to_int64(features['feature'])) return input_fn_utils.InputFnOps(features, labels, inputs) return est, export_input_fn_with_asset
def search(layers, units, optimizer, learning_rate, activation_fn, dropout): params['layers'] = layers params['units'] = units params['optimizer'] = optimizer params['learning_rate'] = learning_rate params['activation_fn'] = activation_fn params['dropout'] = dropout print params new_model_dir = utils.create_model_dir(model_dir, params) estimator = get_estimator(params, new_model_dir, feature_column, CONFIG) val_x, val_y = train_input_fn(VALIDATION_DATA_MINI) validation_monitor = utils.get_validation_monitor(val_x, val_y) history = estimator.fit(x=x, y=y, steps=EPOCHS, batch_size=BATCH_SIZE, monitors=[validation_monitor]) validation_input = create_callable_train_input_fn( VALIDATION_DATA_MINI) accuracy = evaluate.get_rmse_model(estimator, validation_input) global BEST_ACCURACY print 'rmse: ' + str(accuracy) if accuracy < BEST_ACCURACY: print 'Exporting model...' feature_spec = tf.feature_column.make_parse_example_spec( feature_column) serving_input_fn = build_parsing_serving_input_fn(feature_spec) estimator.export_savedmodel(new_model_dir, serving_input_fn) BEST_ACCURACY = accuracy print 'Best rmse: ' + str(BEST_ACCURACY) del estimator tf.reset_default_graph() return accuracy
def make_parsing_export_strategy(feature_columns, default_output_alternative_key=None, assets_extra=None, as_text=False, exports_to_keep=5, target_core=False, strip_default_attrs=False): # pylint: disable=line-too-long """Create an ExportStrategy for use with Experiment, using `FeatureColumn`s. Creates a SavedModel export that expects to be fed with a single string Tensor containing serialized tf.Examples. At serving time, incoming tf.Examples will be parsed according to the provided `FeatureColumn`s. Args: feature_columns: An iterable of `FeatureColumn`s representing the features that must be provided at serving time (excluding labels!). default_output_alternative_key: the name of the head to serve when an incoming serving request does not explicitly request a specific head. Must be `None` if the estimator inherits from ${tf.estimator.Estimator} or for single-headed models. assets_extra: A dict specifying how to populate the assets.extra directory within the exported SavedModel. Each key should give the destination path (including the filename) relative to the assets.extra directory. The corresponding value gives the full path of the source file to be copied. For example, the simple case of copying a single file without renaming it is specified as `{'my_asset_file.txt': '/path/to/my_asset_file.txt'}`. as_text: whether to write the SavedModel proto in text format. exports_to_keep: Number of exports to keep. Older exports will be garbage-collected. Defaults to 5. Set to None to disable garbage collection. target_core: If True, prepare an ExportStrategy for use with tensorflow.python.estimator.*. If False (default), prepare an ExportStrategy for use with tensorflow.contrib.learn.python.learn.*. strip_default_attrs: Boolean. If `True`, default-valued attributes will be removed from the NodeDefs. For a detailed guide, see [Stripping Default-Valued Attributes](https://github.com/tensorflow/tensorflow/blob/master/tensorflow/python/saved_model/README.md#stripping-default-valued-attributes). Returns: An ExportStrategy that can be passed to the Experiment constructor. """ # pylint: enable=line-too-long feature_spec = feature_column.create_feature_spec_for_parsing( feature_columns) if target_core: serving_input_fn = ( core_export.build_parsing_serving_input_receiver_fn(feature_spec)) else: serving_input_fn = ( input_fn_utils.build_parsing_serving_input_fn(feature_spec)) return make_export_strategy( serving_input_fn, default_output_alternative_key=default_output_alternative_key, assets_extra=assets_extra, as_text=as_text, exports_to_keep=exports_to_keep, strip_default_attrs=strip_default_attrs)
def parsing_transforming_serving_input_fn(): """Serving input_fn that applies transforms to raw data in tf.Examples.""" raw_input_fn = input_fn_utils.build_parsing_serving_input_fn( raw_serving_feature_spec, default_batch_size=None) raw_features, _, inputs = raw_input_fn() _, transformed_features = ( saved_transform_io.partially_apply_saved_transform( transform_savedmodel_dir, raw_features)) return input_fn_utils.InputFnOps(transformed_features, None, inputs)
def make_parsing_export_strategy(feature_columns, default_output_alternative_key=None, assets_extra=None, as_text=False, exports_to_keep=5, target_core=False, strip_default_attrs=False): # pylint: disable=line-too-long """Create an ExportStrategy for use with Experiment, using `FeatureColumn`s. Creates a SavedModel export that expects to be fed with a single string Tensor containing serialized tf.Examples. At serving time, incoming tf.Examples will be parsed according to the provided `FeatureColumn`s. Args: feature_columns: An iterable of `FeatureColumn`s representing the features that must be provided at serving time (excluding labels!). default_output_alternative_key: the name of the head to serve when an incoming serving request does not explicitly request a specific head. Must be `None` if the estimator inherits from ${tf.estimator.Estimator} or for single-headed models. assets_extra: A dict specifying how to populate the assets.extra directory within the exported SavedModel. Each key should give the destination path (including the filename) relative to the assets.extra directory. The corresponding value gives the full path of the source file to be copied. For example, the simple case of copying a single file without renaming it is specified as `{'my_asset_file.txt': '/path/to/my_asset_file.txt'}`. as_text: whether to write the SavedModel proto in text format. exports_to_keep: Number of exports to keep. Older exports will be garbage-collected. Defaults to 5. Set to None to disable garbage collection. target_core: If True, prepare an ExportStrategy for use with tensorflow.python.estimator.*. If False (default), prepare an ExportStrategy for use with tensorflow.contrib.learn.python.learn.*. strip_default_attrs: Boolean. If `True`, default-valued attributes will be removed from the NodeDefs. For a detailed guide, see [Stripping Default-Valued Attributes](https://github.com/tensorflow/tensorflow/blob/master/tensorflow/python/saved_model/README.md#stripping-default-valued-attributes). Returns: An ExportStrategy that can be passed to the Experiment constructor. """ # pylint: enable=line-too-long feature_spec = feature_column.create_feature_spec_for_parsing(feature_columns) if target_core: serving_input_fn = ( core_export.build_parsing_serving_input_receiver_fn(feature_spec)) else: serving_input_fn = ( input_fn_utils.build_parsing_serving_input_fn(feature_spec)) return make_export_strategy( serving_input_fn, default_output_alternative_key=default_output_alternative_key, assets_extra=assets_extra, as_text=as_text, exports_to_keep=exports_to_keep, strip_default_attrs=strip_default_attrs)
def parsing_transforming_serving_input_receiver_fn(): """Serving input_fn that applies transforms to raw data in tf.Examples.""" raw_input_fn = input_fn_utils.build_parsing_serving_input_fn( raw_serving_feature_spec, default_batch_size=None) raw_features, _, inputs = raw_input_fn() _, transformed_features = ( saved_transform_io.partially_apply_saved_transform_internal( transform_savedmodel_dir, raw_features)) if convert_scalars_to_vectors: transformed_features = _convert_scalars_to_vectors(transformed_features) return tf.estimator.export.ServingInputReceiver( transformed_features, inputs)
def _maybe_export(self): """执行模型导出""" if self._model_export_spec is None: return feature_spec = create_feature_spec_for_parsing(self._model_export_spec.features) serving_input_fn = input_fn_utils.build_parsing_serving_input_fn(feature_spec) tmp_export_model_dir = tempfile.mkdtemp() tmp_export_model_path = self._estimator.export_savedmodel( tmp_export_model_dir, serving_input_fn) print("temp export model path: {}".format(tmp_export_model_path)) os.rename(tmp_export_model_path, self._model_export_spec.export_dir) print('Succeed to rename "{0}" to "{1}"'.format(tmp_export_model_path, self._model_export_spec.export_dir))
def serving_input_fn(): """Input function for serving.""" # Get raw features by generating the basic serving input_fn and calling it. # Here we generate an input_fn that expects a parsed Example proto to be fed # to the model at serving time. See also # input_fn_utils.build_default_serving_input_fn. raw_input_fn = input_fn_utils.build_parsing_serving_input_fn( raw_feature_spec) raw_features, _, default_inputs = raw_input_fn() # Apply the transform function that was used to generate the materialized # data. transformed_features = tf_transform_output.transform_raw_features( raw_features) return input_fn_utils.InputFnOps(transformed_features, None, default_inputs)
def _build_estimator_for_resource_export_test(): def _input_fn(): iris = base.load_iris() return { 'feature': constant_op.constant(iris.data, dtype=dtypes.float32) }, constant_op.constant(iris.target, shape=[150], dtype=dtypes.int32) feature_columns = [ feature_column_lib.real_valued_column('feature', dimension=4) ] def resource_constant_model_fn(unused_features, unused_labels, mode): """A model_fn that loads a constant from a resource and serves it.""" assert mode in (model_fn.ModeKeys.TRAIN, model_fn.ModeKeys.EVAL, model_fn.ModeKeys.INFER) const = constant_op.constant(-1, dtype=dtypes.int64) table = lookup.MutableHashTable(dtypes.string, dtypes.int64, const, name='LookupTableModel') if mode in (model_fn.ModeKeys.TRAIN, model_fn.ModeKeys.EVAL): key = constant_op.constant(['key']) value = constant_op.constant([42], dtype=dtypes.int64) train_op_1 = table.insert(key, value) training_state = lookup.MutableHashTable( dtypes.string, dtypes.int64, const, name='LookupTableTrainingState') training_op_2 = training_state.insert(key, value) return const, const, control_flow_ops.group( train_op_1, training_op_2) if mode == model_fn.ModeKeys.INFER: key = constant_op.constant(['key']) prediction = table.lookup(key) return prediction, const, control_flow_ops.no_op() est = estimator.Estimator(model_fn=resource_constant_model_fn) est.fit(input_fn=_input_fn, steps=1) feature_spec = feature_column_lib.create_feature_spec_for_parsing( feature_columns) serving_input_fn = input_fn_utils.build_parsing_serving_input_fn( feature_spec) return est, serving_input_fn
def serving_input_fn(): """Input function for serving.""" # Get raw features by generating the basic serving input_fn and calling it. # Here we generate an input_fn that expects a parsed Example proto to be fed # to the model at serving time. See also # input_fn_utils.build_default_serving_input_fn. raw_input_fn = input_fn_utils.build_parsing_serving_input_fn( raw_feature_spec) raw_features, _, default_inputs = raw_input_fn() # Apply the transform function that was used to generate the materialized # data. _, transformed_features = (tft.partially_apply_saved_transform( os.path.join(working_dir, tft.TRANSFORM_FN_DIR), raw_features)) return input_fn_utils.InputFnOps(transformed_features, None, default_inputs)
def _build_estimator_for_resource_export_test(): def _input_fn(): iris = base.load_iris() return { 'feature': constant_op.constant(iris.data, dtype=dtypes.float32) }, constant_op.constant( iris.target, shape=[150], dtype=dtypes.int32) feature_columns = [ feature_column_lib.real_valued_column('feature', dimension=4) ] def resource_constant_model_fn(unused_features, unused_labels, mode): """A model_fn that loads a constant from a resource and serves it.""" assert mode in (model_fn.ModeKeys.TRAIN, model_fn.ModeKeys.EVAL, model_fn.ModeKeys.INFER) const = constant_op.constant(-1, dtype=dtypes.int64) table = lookup.MutableHashTable( dtypes.string, dtypes.int64, const, name='LookupTableModel') update_global_step = variables.get_global_step().assign_add(1) if mode in (model_fn.ModeKeys.TRAIN, model_fn.ModeKeys.EVAL): key = constant_op.constant(['key']) value = constant_op.constant([42], dtype=dtypes.int64) train_op_1 = table.insert(key, value) training_state = lookup.MutableHashTable( dtypes.string, dtypes.int64, const, name='LookupTableTrainingState') training_op_2 = training_state.insert(key, value) return (const, const, control_flow_ops.group(train_op_1, training_op_2, update_global_step)) if mode == model_fn.ModeKeys.INFER: key = constant_op.constant(['key']) prediction = table.lookup(key) return prediction, const, update_global_step est = estimator.Estimator(model_fn=resource_constant_model_fn) est.fit(input_fn=_input_fn, steps=1) feature_spec = feature_column_lib.create_feature_spec_for_parsing( feature_columns) serving_input_fn = input_fn_utils.build_parsing_serving_input_fn(feature_spec) return est, serving_input_fn
def make_parsing_export_strategy(feature_columns, default_output_alternative_key=None, assets_extra=None, as_text=False, exports_to_keep=5): """Create an ExportStrategy for use with Experiment, using `FeatureColumn`s. Creates a SavedModel export that expects to be fed with a single string Tensor containing serialized tf.Examples. At serving time, incoming tf.Examples will be parsed according to the provided `FeatureColumn`s. Args: feature_columns: An iterable of `FeatureColumn`s representing the features that must be provided at serving time (excluding labels!). default_output_alternative_key: the name of the head to serve when an incoming serving request does not explicitly request a specific head. Must be `None` if the estimator inherits from ${tf.estimator.Estimator} or for single-headed models. assets_extra: A dict specifying how to populate the assets.extra directory within the exported SavedModel. Each key should give the destination path (including the filename) relative to the assets.extra directory. The corresponding value gives the full path of the source file to be copied. For example, the simple case of copying a single file without renaming it is specified as `{'my_asset_file.txt': '/path/to/my_asset_file.txt'}`. as_text: whether to write the SavedModel proto in text format. exports_to_keep: Number of exports to keep. Older exports will be garbage-collected. Defaults to 5. Set to None to disable garbage collection. Returns: An ExportStrategy that can be passed to the Experiment constructor. """ feature_spec = feature_column.create_feature_spec_for_parsing( feature_columns) serving_input_fn = input_fn_utils.build_parsing_serving_input_fn( feature_spec) return make_export_strategy( serving_input_fn, default_output_alternative_key=default_output_alternative_key, assets_extra=assets_extra, as_text=as_text, exports_to_keep=exports_to_keep)
def make_parsing_export_strategy(feature_columns, exports_to_keep=5): """Create an ExportStrategy for use with Experiment, using `FeatureColumn`s. Creates a SavedModel export that expects to be fed with a single string Tensor containing serialized tf.Examples. At serving time, incoming tf.Examples will be parsed according to the provided `FeatureColumn`s. Args: feature_columns: An iterable of `FeatureColumn`s representing the features that must be provided at serving time (excluding labels!). exports_to_keep: Number of exports to keep. Older exports will be garbage-collected. Defaults to 5. Set to None to disable garbage collection. Returns: An ExportStrategy that can be passed to the Experiment constructor. """ feature_spec = feature_column.create_feature_spec_for_parsing(feature_columns) serving_input_fn = input_fn_utils.build_parsing_serving_input_fn(feature_spec) return make_export_strategy(serving_input_fn, exports_to_keep=exports_to_keep)
def make_parsing_export_strategy(feature_columns, exports_to_keep=5): """Create an ExportStrategy for use with Experiment, using `FeatureColumn`s. Creates a SavedModel export that expects to be fed with a single string Tensor containing serialized tf.Examples. At serving time, incoming tf.Examples will be parsed according to the provided `FeatureColumn`s. Args: feature_columns: An iterable of `FeatureColumn`s representing the features that must be provided at serving time (excluding labels!). exports_to_keep: Number of exports to keep. Older exports will be garbage-collected. Defaults to 5. Set to None to disable garbage collection. Returns: An ExportStrategy that can be passed to the Experiment constructor. """ feature_spec = feature_column.create_feature_spec_for_parsing(feature_columns) serving_input_fn = input_fn_utils.build_parsing_serving_input_fn(feature_spec) return make_export_strategy(serving_input_fn, exports_to_keep=exports_to_keep)
def make_parsing_export_strategy(feature_columns, default_output_alternative_key=None, assets_extra=None, as_text=False, exports_to_keep=5): """Create an ExportStrategy for use with Experiment, using `FeatureColumn`s. Creates a SavedModel export that expects to be fed with a single string Tensor containing serialized tf.Examples. At serving time, incoming tf.Examples will be parsed according to the provided `FeatureColumn`s. Args: feature_columns: An iterable of `FeatureColumn`s representing the features that must be provided at serving time (excluding labels!). default_output_alternative_key: the name of the head to serve when an incoming serving request does not explicitly request a specific head. Must be `None` if the estimator inherits from ${tf.estimator.Estimator} or for single-headed models. assets_extra: A dict specifying how to populate the assets.extra directory within the exported SavedModel. Each key should give the destination path (including the filename) relative to the assets.extra directory. The corresponding value gives the full path of the source file to be copied. For example, the simple case of copying a single file without renaming it is specified as `{'my_asset_file.txt': '/path/to/my_asset_file.txt'}`. as_text: whether to write the SavedModel proto in text format. exports_to_keep: Number of exports to keep. Older exports will be garbage-collected. Defaults to 5. Set to None to disable garbage collection. Returns: An ExportStrategy that can be passed to the Experiment constructor. """ feature_spec = feature_column.create_feature_spec_for_parsing(feature_columns) serving_input_fn = input_fn_utils.build_parsing_serving_input_fn(feature_spec) return make_export_strategy( serving_input_fn, default_output_alternative_key=default_output_alternative_key, assets_extra=assets_extra, as_text=as_text, exports_to_keep=exports_to_keep)
def save_tf_learn_model(estimator, model_name, export_dir, feature_columns, ): feature_spec = create_feature_spec_for_parsing(feature_columns) serving_input_fn = input_fn_utils.build_parsing_serving_input_fn(feature_spec) export_dir = os.path.join(export_dir, model_name) estimator.export_savedmodel(export_dir, serving_input_fn) print("Done exporting tf.learn model to " + export_dir + "!")
def main(unused_argv): # Load training and eval data mnist = learn.datasets.load_dataset("mnist") train_data = mnist.train.images # Returns np.array train_labels = np.asarray(mnist.train.labels, dtype=np.int32) eval_data = mnist.test.images # Returns np.array eval_labels = np.asarray(mnist.test.labels, dtype=np.int32) # Create the Estimator mnist_classifier = learn.Estimator( model_fn=cnn_model_fn, model_dir="/home/ubuntu/nealtest/mnist_convnet_model") # Set up logging for predictions # Log the values in the "Softmax" tensor with label "probabilities" tensors_to_log = {"probabilities": "softmax_tensor"} logging_hook = tf.train.LoggingTensorHook(tensors=tensors_to_log, every_n_iter=50) # Train the model mnist_classifier.fit(x=train_data, y=train_labels, batch_size=100, steps=20, monitors=[logging_hook]) # Configure the accuracy metric for evaluation metrics = { "accuracy": learn.MetricSpec(metric_fn=tf.metrics.accuracy, prediction_key="classes"), } # Evaluate the model and print results eval_results = mnist_classifier.evaluate(x=eval_data, y=eval_labels, metrics=metrics) print(eval_results) # Export the model # Tensor("input:0", shape=(?, 784), dtype=float32) feature_spec = { 'input': tf.FixedLenSequenceFeature(shape=[784], dtype=tf.float32, allow_missing=True) } #feature_spec = {'input': tf.FixedLenFeature(shape=[784], dtype=tf.float32)} serving_input_receiver_fn1 = input_fn_utils.build_parsing_serving_input_fn( feature_spec) def serving_input_receiver_fn3(): """An input receiver that expects a serialized tf.Example.""" serialized_tf_example = tf.placeholder(dtype=tf.string, shape=[100], name='input_example_tensor') receiver_tensors = {'examples': serialized_tf_example} features = tf.parse_example(serialized_tf_example, feature_spec) return tf.estimator.export.ServingInputReceiver( features, receiver_tensors) def serving_input_receiver_fn2(): """An input receiver that expects a serialized tf.Example.""" serialized_tf_example = tf.placeholder(tf.string, name='tf_example') receiver_tensors = {'examples': serialized_tf_example} feature_configs = { 'x': tf.FixedLenFeature(shape=[784], dtype=tf.float32), } tf_example = tf.parse_example(serialized_tf_example, feature_configs) tf.identity(tf_example['x'], name='x') # use tf.identity() to assign name return tf.estimator.export.ServingInputReceiver( tf_example, receiver_tensors) mnist_classifier.export_savedmodel("/home/ubuntu/nealtest/mnist_export/", serving_input_receiver_fn1, as_text=True)
import tensorflow as tf from tensorflow.contrib.learn.python.learn.utils import input_fn_utils def input_fn(): features = {'a': tf.constant([["1"], ["2"]]), 'b': tf.constant([[3], [4]])} labels = tf.constant([0, 1]) return features, labels #feature_a = tf.contrib.layers.sparse_column_with_integerized_feature("a", bucket_size=10) #feature_b = tf.contrib.layers.sparse_column_with_integerized_feature("b", bucket_size=10) #feature_c = tf.contrib.layers.crossed_column([feature_a, feature_b], hash_bucket_size=100) #feature_columns = [feature_a, feature_b, feature_c] feature_a = tf.contrib.layers.sparse_column_with_hash_bucket( "a", hash_bucket_size=1000) feature_b = tf.contrib.layers.real_valued_column("b") feature_columns = [feature_a, feature_b] model = tf.contrib.learn.LinearClassifier(feature_columns=feature_columns) model.fit(input_fn=input_fn, steps=10) feature_spec = tf.contrib.layers.create_feature_spec_for_parsing( feature_columns) serving_input_fn = input_fn_utils.build_parsing_serving_input_fn(feature_spec) savedmodel_path = "./savedmodel" model.export_savedmodel(savedmodel_path, serving_input_fn)
import csv import tensorflow as tf import numpy as np import pandas from tensorflow.python.ops import parsing_ops from tensorflow.contrib.tensor_forest.python import tensor_forest from tensorflow.contrib.learn.python.learn.utils import input_fn_utils input = pandas.read_csv( "/home/glenn/git/clojure-news-feed/client/ml/etl/throughput.csv") data = np.array(input[input.columns[6:9]], dtype=np.float32) target = np.array(input['cloud'].apply(lambda x: 1.0 if x == 'GKE' else 0.0), dtype=np.float32) hparams = tensor_forest.ForestHParams(num_classes=2, num_features=3, num_trees=1, regression=False, max_nodes=500).fill() classifier = tf.contrib.tensor_forest.client.random_forest.TensorForestEstimator( hparams, model_dir="/home/glenn/git/clojure-news-feed/client/ml/dt/e1") feature_spec = { "friends": parsing_ops.FixedLenFeature([1], dtype=tf.float32), "outbound": parsing_ops.FixedLenFeature([1], dtype=tf.float32), "participant": parsing_ops.FixedLenFeature([1], dtype=tf.float32) } serving_input_fn = input_fn_utils.build_parsing_serving_input_fn(feature_spec) c = classifier.fit(x=data, y=target) c.export_savedmodel("/home/glenn/git/clojure-news-feed/client/ml/dt/export", serving_input_fn) print c.evaluate(x=data, y=target)