def main(_):

    # 1. change the dataset
    # dataset = imagenet.get_split('train'), FLAGS.data_dir)
    dataset = flowers.get_split(FLAGS.data_split, FLAGS.data_dir)

    model = InceptionModel(checkpoints_file=FLAGS.checkpoint_file_path)

    # 2. set the model to training mode
    # op, graph model.build(dataset, image_height=224, image_width=224, num_classes=1000, is_training=True)
    op, graph = model.build(dataset,
                            image_height=224,
                            image_width=224,
                            num_classes=1000,
                            is_training=False)

    # 3. comment out the actual training code
    # slim.learning.train(
    #     op,
    #     logdir=train_dir,
    #     init_fn=model.init_fn,
    #     number_of_steps=100)

    # 4. dump model to the specified path
    from bigdl.util.tf_utils import dump_model
    dump_model(path=FLAGS.dump_model_path,
               ckpt_file=FLAGS.checkpoint_file_path,
               graph=graph)
示例#2
0
def main():
    """
    How to run this script:
    python export_tf_checkpoint.py meta_file chkp_file save_path
    """
    saver = tf.train.import_meta_graph(argv[1])
    with tf.Session() as sess:
        saver.restore(sess, argv[2])
        dump_model(argv[3], sess)
示例#3
0
def main():
    meta_file = None
    checkpoint = None
    save_path = "model"
    saver_folder = None

    if len(argv) == 2:
        if op.isdir(argv[1]):
            saver_folder = argv[1]
        else:
            meta_file = argv[1] + ".meta"
            checkpoint = argv[1]
    elif len(argv) == 3:
        if op.isdir(argv[1]):
            saver_folder = argv[1]
        else:
            meta_file = argv[1] + ".meta"
            checkpoint = argv[1]
        save_path = argv[2]
    elif len(argv) == 4:
        meta_file = argv[1]
        checkpoint = argv[2]
        save_path = argv[3]
    else:
        print(
            "Invalid script arguments. How to run the script:\n" +
            "python export_tf_checkpoint.py checkpoint_name\n" +
            "python export_tf_checkpoint.py saver_folder\n" +
            "python export_tf_checkpoint.py checkpoint_name save_path\n" +
            "python export_tf_checkpoint.py saver_folder save_path\n" +
            "python export_tf_checkpoint.py meta_file checkpoint_name save_path"
        )
        exit(1)

    if op.isfile(save_path):
        print("The save folder is a file. Exit")
        exit(1)

    if not op.exists(save_path):
        print("create folder " + save_path)
        os.makedirs(save_path)

    with tf.Session() as sess:
        if saver_folder is None:
            saver = tf.train.import_meta_graph(meta_file, clear_devices=True)
            saver.restore(sess, checkpoint)
        else:
            tf.saved_model.loader.load(sess,
                                       [tf.saved_model.tag_constants.SERVING],
                                       saver_folder)
            checkpoint = save_path + '/model.ckpt'
            saver = tf.train.Saver()
            saver.save(sess, checkpoint)
        dump_model(save_path, None, sess, checkpoint)
示例#4
0
def main():
    meta_file = None
    checkpoint = None
    save_path = "model"
    saver_folder = None

    if len(argv) == 2:
        if op.isdir(argv[1]):
            saver_folder = argv[1]
        else:
            meta_file = argv[1] + ".meta"
            checkpoint = argv[1]
    elif len(argv) == 3:
        if op.isdir(argv[1]):
            saver_folder = argv[1]
        else:
            meta_file = argv[1] + ".meta"
            checkpoint = argv[1]
        save_path = argv[2]
    elif len(argv) == 4:
        meta_file = argv[1]
        checkpoint = argv[2]
        save_path = argv[3]
    else:
        print("Invalid script arguments. How to run the script:\n" +
              "python export_tf_checkpoint.py checkpoint_name\n" +
              "python export_tf_checkpoint.py saver_folder\n" +
              "python export_tf_checkpoint.py checkpoint_name save_path\n" +
              "python export_tf_checkpoint.py saver_folder save_path\n" +
              "python export_tf_checkpoint.py meta_file checkpoint_name save_path")
        exit(1)

    if op.isfile(save_path):
        print("The save folder is a file. Exit")
        exit(1)

    if not op.exists(save_path):
        print("create folder " + save_path)
        os.makedirs(save_path)

    with tf.Session() as sess:
        if saver_folder is None:
            saver = tf.train.import_meta_graph(meta_file, clear_devices=True)
            saver.restore(sess, checkpoint)
        else:
            tf.saved_model.loader.load(sess, [tf.saved_model.tag_constants.SERVING], saver_folder)
            checkpoint = save_path + '/model.ckpt'
            saver = tf.train.Saver()
            saver.save(sess, checkpoint)
        dump_model(save_path, None, sess, checkpoint)
示例#5
0
def main(_):

    # 1. change the dataset
    # dataset = imagenet.get_split('train'), FLAGS.data_dir)
    dataset = flowers.get_split(FLAGS.data_split, FLAGS.data_dir)

    model = InceptionModel(checkpoints_file=FLAGS.checkpoint_file_path)

    # 2. set the model to training mode
    # op, graph model.build(dataset, image_height=224, image_width=224, num_classes=1000, is_training=True)
    op, graph = model.build(dataset, image_height=224, image_width=224, num_classes=1000, is_training=False)

    # 3. comment out the actual training code
    # slim.learning.train(
    #     op,
    #     logdir=train_dir,
    #     init_fn=model.init_fn,
    #     number_of_steps=100)

    # 4. dump model to the specified path
    from bigdl.util.tf_utils import dump_model
    dump_model(path=FLAGS.dump_model_path, ckpt_file=FLAGS.checkpoint_file_path, graph=graph)
示例#6
0
import tensorflow as tf

# This is your model definition.
xs = tf.placeholder(tf.float32, [None, 1])

W1 = tf.Variable(tf.zeros([1,10])+0.2)
b1 = tf.Variable(tf.zeros([10])+0.1)
Wx_plus_b1 = tf.nn.bias_add(tf.matmul(xs,W1), b1)
output = tf.nn.tanh(Wx_plus_b1, name="output")

# Adding the following lines right after your model definition
from bigdl.util.tf_utils import dump_model
dump_model_path = "/tmp/model"
# This line of code will create a Session and initialized all the Variable and
# save the model definition and variable to dump_model_path as BigDL readable format.
dump_model(path=dump_model_path)