Пример #1
0
def download_model():
    export_dir = "./simple_tensorflow_serving/models/use/001"
    with tf.Session(graph=tf.Graph()) as sess:
        module = hub.Module(
            "https://tfhub.dev/google/universal-sentence-encoder-multilingual-large/1"
        )
        text_input = tf.placeholder(dtype=tf.string, shape=[None])

        sess.run([tf.global_variables_initializer(), tf.tables_initializer()])

        embeddings = module(text_input)

        simple_save(
            sess,
            export_dir,
            inputs={"text": text_input},
            outputs={"embeddings": embeddings},
            legacy_init_op=tf.tables_initializer(),
        )
Пример #2
0
def save_h5_pb(path_pb: str, path_json='', path_h5='', model=None, **kwargs):
    '''
    Восстанавливает архитектуру модели, используя .json.
    Создаёт keras модель в формате .h5
    Конвертирует keras модель из формата .h5 в формат .pb (для TFServ)
    :param model: object. tf.keras
    :param path_json: string
    :param path_h5: string
    :param path_pb: string
    :param kwargs: dict. При использовании custom_objects в tf.keras model
    '''
    if int(__version__.split('.')[0]) == 2:
        if path_json:
            print('load structure.json')
            model = load_structure(path_json, **kwargs)
        if path_h5:
            print('load weights.h5')
            filename = os.path.join(path_h5, 'weights.h5')
            model.load_weights(filename)
        model.save(path_pb)
    else:
        if path_json:
            print('load structure.json')
            model = load_structure(path_json, **kwargs)
        if path_h5:
            print('load weights.h5')
            filename = os.path.join(path_h5, 'weights.h5')
            model.load_weights(filename)
            filename = os.path.join(path_h5, 'model.h5')
            model.save(filename)
        assert len(
            os.listdir(path_pb)) == 0, (f'Директория {path_pb} не пуста')
        with get_session() as sess:
            simple_save(sess,
                        path_pb,
                        inputs={'input_image': model.input},
                        outputs={t.name: t
                                 for t in model.outputs})
    clear_session()
    print(f'\nУспешно сохранены данные в {os.path.dirname(path_pb)}')
def add_string():
    tf.reset_default_graph()
    sess = tf.Session()
    input1 = tf.placeholder(tf.string, name="input1")
    input2 = tf.placeholder(tf.string, name="input2")
    sum = input1 + input2
    output = tf.identity(sum, "output")
    print(input1)
    print(input2)
    print(sum)
    print(output)

    dict = {input1: "hello", input2: "world"}
    result = sess.run(output, dict).decode()  # decode the bytestring
    print(result)

    saved_model.simple_save(sess,
                            'add_string',
                            inputs={
                                "input1": input1,
                                "input2": input2
                            },
                            outputs={"output": output})
def boolean_logic():
    tf.reset_default_graph()
    sess = tf.Session()
    input1 = tf.placeholder(tf.bool, name="input1")
    input2 = tf.placeholder(tf.bool, name="input2")
    output_and = tf.identity(input1 & input2, "output_and")
    output_or = tf.identity(input1 | input2, "output_or")
    output_not_and = tf.identity(~(input1 & input2), "output_not_and")
    output_not_or = tf.identity(~(input1 | input2), "output_not_or")
    output_xor = tf.identity(input1 ^ input2, "output_xor")
    print(input1)
    print(input2)
    print(output_and)
    print(output_or)
    print(output_not_and)
    print(output_not_or)
    print(output_xor)

    dict = {input1: True, input2: False}
    result = sess.run(
        [output_and, output_or, output_not_and, output_not_or, output_xor],
        dict)
    print(result)

    saved_model.simple_save(sess,
                            'boolean_logic',
                            inputs={
                                "input1": input1,
                                "input2": input2
                            },
                            outputs={
                                "and": output_and,
                                "or": output_or,
                                "not_and": output_not_and,
                                "not_or": output_not_or,
                                "xor": output_xor
                            })
Пример #5
0
# This scripts show you how to download and setup the universal-sentence-encoder to be served by
# Tensorflow Serving over a REST API

import tensorflow as tf
import tensorflow_hub as hub
from tensorflow.saved_model import simple_save

export_dir = "./use/00000001"
with tf.Session(graph=tf.Graph()) as sess:
    module = hub.Module(
        "https://tfhub.dev/google/universal-sentence-encoder/2")
    input_params = module.get_input_info_dict()
    text_input = tf.placeholder(name='text',
                                dtype=input_params['text'].dtype,
                                shape=input_params['text'].get_shape())
    sess.run([tf.global_variables_initializer(), tf.tables_initializer()])
    embeddings = module(text_input)

    simple_save(sess,
                export_dir,
                inputs={'text': text_input},
                outputs={'embeddings': embeddings},
                legacy_init_op=tf.tables_initializer())
Пример #6
0
                   stop_on_nan],  #, reduce_lr], #, lr,   reduce_lr],
        # callbacks=[early_stopping, reduce_lr], #, lr, reduce_lr],
        use_multiprocessing=True,
        workers=8,
        validation_data=gen_x_test)
    # ## Plot performance
    # Here, we plot the history of the training and the performance in a ROC curve

    plot_history(history)
    plt.savefig(f'{path}/history.pdf', bbox_inches='tight')
    model.save(f'{path}/model.h5')

    from tensorflow import saved_model
    saved_model.simple_save(K.get_session(),
                            f'{path}/saved_model',
                            inputs={t.name: t
                                    for t in model.input},
                            outputs={t.name: t
                                     for t in model.outputs})

    from tensorflow.python.framework import graph_util
    frozen_graph = graph_util.convert_variables_to_constants(
        K.get_session(),
        K.get_session().graph_def, ['output/BiasAdd'])
    # train.write_graph(graph_or_graph_def=K.get_session().graph_def, logdir=f'{path}', name='saved_model.pb', as_text=False)
    train.write_graph(graph_or_graph_def=frozen_graph,
                      logdir=f'{path}',
                      name='saved_model.pb',
                      as_text=False)

# Print info about weights
names = [weight.name for layer in model.layers for weight in layer.weights]
Пример #7
0
def save_pretrained_model(sess, outputs, feeds, out_dir, model_name="pretrained"):
    """Save pretrained model and config"""
    try:
        import os
        import sys
        import tensorflow as tf
        import subprocess
        to_onnx_path = "{}/to_onnx".format(out_dir)
        if not os.path.isdir(to_onnx_path):
            os.makedirs(to_onnx_path)
        saved_model = "{}/saved_model".format(to_onnx_path)
        inputs_path = "{}/inputs.npy".format(to_onnx_path)
        pretrained_model_yaml_path = "{}/pretrained.yaml".format(to_onnx_path)
        envars_path = "{}/environment.txt".format(to_onnx_path)
        pip_requirement_path = "{}/requirements.txt".format(to_onnx_path)

        print("===============Save Saved Model========================")
        if os.path.exists(saved_model):
            print("{} already exists, SKIP".format(saved_model))
            return

        print("Save tf version, python version and installed packages")
        tf_version = tf.__version__
        py_version = sys.version
        pip_packages = subprocess.check_output([sys.executable, "-m", "pip", "freeze", "--all"])
        pip_packages = pip_packages.decode("UTF-8")
        with open(envars_path, "w") as fp:
            fp.write(tf_version + os.linesep)
            fp.write(py_version)
        with open(pip_requirement_path, "w") as fp:
            fp.write(pip_packages)

        print("Save model for tf2onnx: {}".format(to_onnx_path))
        # save inputs
        inputs = {}
        for inp, value in feeds.items():
            if isinstance(inp, str):
                inputs[inp] = value
            else:
                inputs[inp.name] = value
        np.save(inputs_path, inputs)
        print("Saved inputs to {}".format(inputs_path))

        # save graph and weights
        from tensorflow.saved_model import simple_save
        # pylint: disable=unnecessary-comprehension
        simple_save(sess, saved_model,
                    {n: i for n, i in zip(inputs.keys(), feeds.keys())},
                    {op.name: op for op in outputs})
        print("Saved model to {}".format(saved_model))

        # generate config
        pretrained_model_yaml = '''
{}:
  model: ./saved_model
  model_type: saved_model
  input_get: get_ramp
'''.format(model_name)
        pretrained_model_yaml += "  inputs:\n"
        for inp, _ in inputs.items():
            pretrained_model_yaml += \
                "    \"{input}\": np.array(np.load(\"./inputs.npy\")[()][\"{input}\"])\n".format(input=inp)
        outputs = [op.name for op in outputs]
        pretrained_model_yaml += "  outputs:\n"
        for out in outputs:
            pretrained_model_yaml += "    - {}\n".format(out)
        with open(pretrained_model_yaml_path, "w") as f:
            f.write(pretrained_model_yaml)
        print("Saved pretrained model yaml to {}".format(pretrained_model_yaml_path))
        print("=========================================================")
    except Exception as ex:  # pylint: disable=broad-except
        print("Error: {}".format(ex))
Пример #8
0
def train(dataset_path: str = None, batch_size=256, epochs=100, use_horovod=False,
          output_path=None, model_name='keras_cifar100_trained_model.h5'):
    x_train, y_train, x_test, y_test = load_data(dataset_path)
    print('x_train shape:', x_train.shape)
    print(x_train.shape[0], 'train samples')
    print(x_test.shape[0], 'test samples')

    num_classes = 100

    if not output_path:
        output_path = os.path.join(os.getcwd(), 'saved_models')

    # Convert class vectors to binary class matrices.
    y_train = keras.utils.to_categorical(y_train, num_classes)
    y_test = keras.utils.to_categorical(y_test, num_classes)

    model = create_model(input_shape=x_train.shape[1:], num_classes=num_classes)
    if use_horovod:
        import horovod.keras as hvd
        hvd.init()
        opt = keras.optimizers.rmsprop(lr=0.0001 * hvd.size(), decay=1e-6)
        opt = hvd.DistributedOptimizer(opt)
        with tf.Graph().as_default():
            inference_model = create_model(input_shape=x_train.shape[1:], num_classes=num_classes)
            inference_dummy_opt = keras.optimizers.rmsprop(lr=0.0001, decay=1e-6)
            inference_model.compile(loss='categorical_crossentropy', optimizer=inference_dummy_opt,
                                    metrics=['accuracy'])
    else:
        opt = keras.optimizers.rmsprop(lr=0.0001, decay=1e-6)
        inference_model = model

    # Compile and export inference model graph
    serve_graph_file = f'{output_path}/servegraph.meta'
    tf.train.export_meta_graph(serve_graph_file, as_text=True)

    model.compile(loss='categorical_crossentropy', optimizer=opt, metrics=['accuracy'])

    x_train = x_train.astype('float32')
    x_test = x_test.astype('float32')
    x_train /= 255
    x_test /= 255

    model.summary()

    callbacks = []
    if use_horovod:
        callbacks.append(hvd.callbacks.BroadcastGlobalVariablesCallback(0))
    # Save checkpoints/metrics only on first worker
    if not use_horovod or hvd.rank() == 0:
        callbacks.append(TensorflowModelCheckpoint(f'{output_path}/checkpoint-{{epoch}}.h5'))
        callbacks.append(NautaExperimentMetricsCallback())
        callbacks.append(keras.callbacks.TensorBoard(log_dir=f'{output_path}/tensorboard', update_freq=1000,
                                             histogram_freq=0, write_graph=True))


    model.fit(x_train, y_train, batch_size=batch_size, epochs=epochs, validation_data=(x_test, y_test),
              shuffle=True, callbacks=callbacks)

    # Save model and weights
    if not os.path.isdir(output_path):
        os.makedirs(output_path)
    model_path = os.path.join(output_path, model_name)
    model.save(model_path)
    print(f'Saved trained model at {model_path}')

    # Save model in Tensorflow Serving compatible format
    # https://gist.github.com/dmwendt/ed2779f07aa849eda2e1756cd3b9fcb0
    if not use_horovod or hvd.rank() == 0:
        # Now save the model for inference.
        # First, load the parameters from the latest checkpoint file.
        checkpoint_file = tf.train.latest_checkpoint(output_path)
        tf_model_export_dir = f'{output_path}/cifar100_tf_model/1'
        # Create a new graph to import the previously exported one.
        with tf.Graph().as_default():
            # Import the saved graph.
            restorer = tf.train.import_meta_graph(serve_graph_file)
            with tf.Session() as sess:
                restorer.restore(sess, checkpoint_file)
                saved_model.simple_save(session=sess, export_dir=tf_model_export_dir,
                                        inputs={'x': inference_model.layers[0].input},
                                        outputs={'y': inference_model.layers[-1].output})
        print(f'Saved trained model in TF format at {tf_model_export_dir}')

    # Score trained model.
    scores = model.evaluate(x_test, y_test, verbose=1)
    print('Test loss:', scores[0])
    print('Test accuracy:', scores[1])