示例#1
0
def _input_tensor_test(data_file, batch_size=100):
    """test for categorical_column and cross_column input."""
    sess = tf.InteractiveSession()
    features, labels = _CsvDataset(data_file).input_fn('train',
                                                       batch_size=batch_size)
    print(features['age'].eval())
    print(features['min_term_6m'].eval())
    tag_list = tf.feature_column.categorical_column_with_hash_bucket(
        'tag_list', 10000)
    min_term_6m = tf.feature_column.categorical_column_with_hash_bucket(
        'min_term_6m', 30)
    cell_provinceXage = tf.feature_column.crossed_column(
        ['cell_province', 'age'], 600)
    for f in [tag_list, min_term_6m, cell_provinceXage]:
        # f_dense = tf.feature_column.indicator_column(f)
        f_embed = tf.feature_column.embedding_column(f, 5)
        input_tensor = tf.feature_column.input_layer(features, [f_embed])
        sess.run(tf.global_variables_initializer())
        # input_tensor = tf.feature_column.input_layer(features, [f_dense])
        print('{} input tensor:\n {}'.format(f, input_tensor.eval()))
    dense_tensor = tf.feature_column.input_layer(
        features, [min_term_6m, tag_list, cell_provinceXage])
    print('total input tensor:\n {}'.format(sess.run(dense_tensor)))

    wide_columns, deep_columns = _build_model_columns()
    dense_tensor = tf.feature_column.input_layer(features, deep_columns)
    sess.run(tf.global_variables_initializer()
             )  # fix Attempting to use uninitialized value error.
    sess.run(tf.tables_initializer())  # fix Table not initialized error.
    print(sess.run(dense_tensor))
def main(_):
    if FLAGS.model_version <= 0:
        print('Please specify a positive value for version number.')
        sys.exit(-1)
    # tf.estimator.export.build_parsing_serving_input_receiver_fn
    # tf.estimator.export.build_raw_serving_input_receiver_fn
    # If these utilities do not meet your needs, you are free to write your own serving_input_receiver_fn()

    # feature_spec = {
    #     "some_feature": tf.FixedLenFeature([], dtype=tf.string, default_value=""),
    #     "some_feature": tf.VarLenFeature(dtype=tf.string),
    # }
    #
    # def _serving_input_receiver_fn():
    #     serialized_tf_example = tf.placeholder(dtype=tf.string, shape=None,
    #                                            name='input_example_tensor')
    #     # key (e.g. 'examples') should be same with the inputKey when you
    #     # buid the request for prediction
    #     receiver_tensors = {'examples': serialized_tf_example}
    #     features = tf.parse_example(serialized_tf_example, your_feature_spec)
    #     return tf.estimator.export.ServingInputReceiver(features, receiver_tensors)
    # estimator.export_savedmodel(export_dir, _serving_input_receiver_fn)

    wide_columns, deep_columns = _build_model_columns()
    feature_columns = wide_columns + deep_columns
    # for f in feature_columns:
    #     print(f._parse_example_spec)
    # A dict mapping each feature key to a FixedLenFeature or VarLenFeature value.
    feature_spec = tf.feature_column.make_parse_example_spec(feature_columns)
    serving_input_receiver_fn = \
        tf.estimator.export.build_parsing_serving_input_receiver_fn(feature_spec)

    model_dir = os.path.join(model_base_dir, FLAGS.model_type)
    export_dir = os.path.join(FLAGS.export_dir, FLAGS.model_type)
    print("building custom estimator")
    model = build_custom_estimator(model_dir, FLAGS.model_type)
    print("exporting saved model")
    model.export_savedmodel(export_dir,
                            serving_input_receiver_fn,
                            as_text=CONF['as_text'],
                            checkpoint_path=FLAGS.checkpoint_path)