Exemple #1
0
    def _example_serving_input_fn():
        feature_spec = tf_lib.get_feature_spec(model_name, ctx, training=False)
        example_bytestring = tf.placeholder(shape=[None], dtype=tf.string)
        feature_scalars = tf.parse_single_example(example_bytestring, feature_spec)
        features = {key: tf.expand_dims(tensor, -1) for key, tensor in feature_scalars.items()}

        return tf.estimator.export.ServingInputReceiver(
            features=features, receiver_tensors={"example_proto": example_bytestring}
        )
Exemple #2
0
def generate_example_parsing_fn(model_name, ctx, training=True):
    model = ctx.models[model_name]

    feature_spec = tf_lib.get_feature_spec(model_name, ctx, training)

    def _parse_example(example_proto):
        features = tf.parse_single_example(serialized=example_proto, features=feature_spec)
        target = features.pop(model["target_column"], None)
        return features, target

    return _parse_example