Esempio n. 1
0
def tf1_export_ema_ckpt():
    """Restore variables from a given checkpoint."""
    with tf1.Session() as sess:
        model = effnetv2_model.EffNetV2Model(FLAGS.model_name,
                                             FLAGS.hparam_str)
        batch_size = FLAGS.batch_size
        isize = FLAGS.image_size or model.cfg.eval.isize
        inputs = tf.ones((batch_size, isize, isize, 3), tf.float32)
        _ = model(inputs, training=False)
        sess.run(tf1.global_variables_initializer())
        if tf.io.gfile.isdir(FLAGS.model_dir):
            ckpt_path = tf1.train.latest_checkpoint(FLAGS.model_dir)
        else:
            ckpt_path = FLAGS.model_dir

        ema = tf1.train.ExponentialMovingAverage(decay=0.0)
        ema_vars = utils.get_ema_vars()
        var_dict = ema.variables_to_restore(ema_vars)
        ema_assign_op = ema.apply(ema_vars)

        tf1.train.get_or_create_global_step()
        sess.run(tf1.global_variables_initializer())
        saver = tf1.train.Saver(var_dict, max_to_keep=1)
        # Restore all variables from ckpt.
        saver.restore(sess, ckpt_path)

        print('export model to {}'.format(FLAGS.export_dir))
        sess.run(ema_assign_op)
        saver = tf1.train.Saver(max_to_keep=1, save_relative_paths=True)
        saver.save(sess, FLAGS.export_dir)
Esempio n. 2
0
def tf1_benchmark():
    """Run TF1 inference and benchmark."""
    # pylint: disable=g-direct-tensorflow-import,g-import-not-at-top
    from tensorflow.python.client import timeline
    with tf1.Session() as sess:
        model = effnetv2_model.EffNetV2Model(FLAGS.model_name,
                                             FLAGS.hparam_str)
        batch_size = FLAGS.batch_size
        run_options = tf1.RunOptions(trace_level=tf1.RunOptions.FULL_TRACE)
        run_metadata = tf1.RunMetadata()
        isize = FLAGS.image_size or model.cfg.eval.isize
        data_dtype = tf.float16 if FLAGS.mixed_precision else tf.float32
        inputs = tf.ones((batch_size, isize, isize, 3), data_dtype)
        output = model(inputs, training=False)
        sess.run(tf1.global_variables_initializer())

        print('starting warmup.')
        for _ in range(5):
            sess.run(output)

        print('starting benchmark.')
        start = time.perf_counter()
        for _ in range(10):
            sess.run(output)
        end = time.perf_counter()
        inference_time = (end - start) / 10

        print('Per batch inference time: ', inference_time)
        print('FPS: ', batch_size / inference_time)

        if FLAGS.trace_file:
            sess.run(output, options=run_options, run_metadata=run_metadata)
            with tf.io.gfile.GFile(FLAGS.trace_file, 'w') as f:
                trace = timeline.Timeline(step_stats=run_metadata.step_stats)
                f.write(trace.generate_chrome_trace_format(show_memory=True))
Esempio n. 3
0
def build_tf2_model():
    """Build the tf2 model."""
    tf.config.run_functions_eagerly(FLAGS.debug)
    config = get_config(FLAGS.model_name, FLAGS.dataset_cfg, FLAGS.hparam_str)
    if config.runtime.mixed_precision:
        # Use 'mixed_float16' if running on GPUs.
        policy = tf.keras.mixed_precision.Policy('mixed_float16')
        tf.keras.mixed_precision.set_global_policy(policy)

    model = effnetv2_model.EffNetV2Model(FLAGS.model_name, config.model)
    # Use call (not build) to match the namescope: tensorflow issues/29576
    model(tf.ones([1, 224, 224, 3]), False)
    if FLAGS.model_dir:
        ckpt = FLAGS.model_dir
        if tf.io.gfile.isdir(ckpt):
            ckpt = tf.train.latest_checkpoint(FLAGS.model_dir)
        model.load_weights(ckpt)
    model.summary()

    class ExportModel(tf.Module):
        """Export a saved model."""
        def __init__(self, model):
            super().__init__()
            self.model = model

        @tf.function
        def f(self, images):
            return self.model(images, training=False)[0]

    return ExportModel(model)
Esempio n. 4
0
 def build_model(in_images):
     """Build model using the model_name given through the command line."""
     config.model.num_classes = config.data.num_classes
     model = effnetv2_model.EffNetV2Model(config.model.model_name,
                                          config.model)
     logits = model(in_images, training=is_training)[0]
     return logits
Esempio n. 5
0
def main(_):
    """Export model to MLIR."""
    config = get_config(FLAGS.model_name, FLAGS.dataset_cfg, FLAGS.hparam_str)
    model = effnetv2_model.EffNetV2Model(FLAGS.model_name, config.model)
    # Use call (not build) to match the namescope: tensorflow issues/29576
    model(tf.ones([1, 224, 224, 3]), False)
    if FLAGS.model_dir:
        ckpt = FLAGS.model_dir
        if tf.io.gfile.isdir(ckpt):
            ckpt = tf.train.latest_checkpoint(FLAGS.model_dir)
        utils.restore_tf2_ckpt(model,
                               ckpt,
                               exclude_layers=('_head', 'optimizer'))
    model.summary()

    from tensorflow.lite.python.util import run_graph_optimizations, get_grappler_config
    from tensorflow.python.framework.convert_to_constants import convert_variables_to_constants_v2_as_graph

    fff = tf.function(model).get_concrete_function(
        tf.TensorSpec([1, 224, 224, 3], tf.float32))

    frozen_func, graph_def = convert_variables_to_constants_v2_as_graph(fff)

    input_tensors = [
        tensor for tensor in frozen_func.inputs if tensor.dtype != tf.resource
    ]
    output_tensors = frozen_func.outputs

    graph_def = run_graph_optimizations(graph_def,
                                        input_tensors,
                                        output_tensors,
                                        config=get_grappler_config([
                                            'pruning', 'function', 'constfold',
                                            'shape', 'remap', 'memory',
                                            'common_subgraph_elimination',
                                            'arithmetic', 'loop', 'dependency',
                                            'debug_stripper'
                                        ]),
                                        graph=frozen_func.graph)

    tf_mlir_graph = tf.mlir.experimental.convert_graph_def(graph_def)

    print('export model to {}.mlir'.format(FLAGS.model_name))
    export_dir = FLAGS.export_dir
    if export_dir is None:
        export_dir = '.'
    os.makedirs(export_dir, exist_ok=True)
    outfile = open('{}/{}.mlir'.format(export_dir, FLAGS.model_name), 'wb')
    outfile.write(tf_mlir_graph.encode())
    outfile.close()
Esempio n. 6
0
 def test_effnetv2(self, model_name, expected_params):
     images = tf.zeros((10, 224, 224, 3), dtype=tf.float32)
     model = effnetv2_model.EffNetV2Model(model_name)
     _ = model(images)
     self.assertEqual(model.count_params(), expected_params)