Esempio n. 1
0
def export():
    with tf.Graph().as_default():
        # Build Aquila model.
        # Please refer to Tensorflow inception model for details.

        flat_image_size = 3 * FLAGS.image_size**2
        input_data = tf.placeholder(tf.float32, shape=(None, flat_image_size))
        # reshape the images appropriately
        images = tf.reshape(input_data,
                            (-1, FLAGS.image_size, FLAGS.image_size, 3))

        # Run inference.
        logits, _ = aquila_model.inference(images,
                                           for_training=False,
                                           restore_logits=True)

        # Restore variables from training checkpoint.
        variable_averages = tf.train.ExponentialMovingAverage(
            aquila_model.MOVING_AVERAGE_DECAY)
        variables_to_restore = variable_averages.variables_to_restore()
        saver = tf.train.Saver(variables_to_restore)
        with tf.Session() as sess:
            # Restore variables from training checkpoints.
            ckpt = tf.train.get_checkpoint_state(FLAGS.checkpoint_dir)
            if ckpt and ckpt.model_checkpoint_path:
                saver.restore(sess, ckpt.model_checkpoint_path)
                # Assuming model_checkpoint_path looks something like:
                #   /my-favorite-path/imagenet_train/model.ckpt-0,
                # extract global_step from it.
                global_step = ckpt.model_checkpoint_path.split('/')[-1].split(
                    '-')[-1]
                print('Successfully loaded model from %s at step=%s.' %
                      (ckpt.model_checkpoint_path, global_step))
            else:
                print('No checkpoint file found at %s' % FLAGS.checkpoint_dir)
                return

            # Export inference model.
            model_exporter = exporter.Exporter(saver)
            signature = exporter.regression_signature(jpegs, logits)
            model_exporter.init(default_graph_signature=signature)
            model_exporter.export(FLAGS.export_dir, tf.constant(global_step),
                                  sess)
            print('Successfully exported model to %s' % FLAGS.export_dir)
Esempio n. 2
0
    def doBasicsOneExportPath(self,
                              export_path,
                              clear_devices=False,
                              global_step=GLOBAL_STEP):
        # Build a graph with 2 parameter nodes on different devices.
        tf.reset_default_graph()
        with tf.Session(target="",
                        config=config_pb2.ConfigProto(
                            device_count={"CPU": 2})) as sess:
            # v2 is an unsaved variable derived from v0 and v1.  It is used to
            # exercise the ability to run an init op when restoring a graph.
            with sess.graph.device("/cpu:0"):
                v0 = tf.Variable(10, name="v0")
            with sess.graph.device("/cpu:1"):
                v1 = tf.Variable(20, name="v1")
            v2 = tf.Variable(1, name="v2", trainable=False, collections=[])
            assign_v2 = tf.assign(v2, tf.add(v0, v1))
            init_op = tf.group(assign_v2, name="init_op")

            tf.add_to_collection("v", v0)
            tf.add_to_collection("v", v1)
            tf.add_to_collection("v", v2)

            global_step_tensor = tf.Variable(global_step, name="global_step")
            named_tensor_bindings = {
                "logical_input_A": v0,
                "logical_input_B": v1
            }
            signatures = {
                "foo":
                exporter.regression_signature(input_tensor=v0,
                                              output_tensor=v1),
                "generic":
                exporter.generic_signature(named_tensor_bindings)
            }

            def write_asset(path):
                file_path = os.path.join(path, "file.txt")
                with gfile.FastGFile(file_path, "w") as f:
                    f.write("your data here")

            asset_file = tf.Variable("hello42.txt", name="filename42")
            assets = {("hello42.txt", asset_file)}

            tf.initialize_all_variables().run()

            # Run an export.
            save = tf.train.Saver({
                "v0": v0,
                "v1": v1
            },
                                  restore_sequentially=True,
                                  sharded=True)
            export = exporter.Exporter(save)
            export.init(
                sess.graph.as_graph_def(),
                init_op=init_op,
                clear_devices=clear_devices,
                default_graph_signature=exporter.classification_signature(
                    input_tensor=v0),
                named_graph_signatures=signatures,
                assets=assets,
                assets_callback=write_asset)
            export.export(export_path,
                          global_step_tensor,
                          sess,
                          exports_to_keep=gc.largest_export_versions(2))

        # Restore graph.
        compare_def = tf.get_default_graph().as_graph_def()
        tf.reset_default_graph()
        with tf.Session(target="",
                        config=config_pb2.ConfigProto(
                            device_count={"CPU": 2})) as sess:
            save = tf.train.import_meta_graph(
                os.path.join(export_path,
                             exporter.VERSION_FORMAT_SPECIFIER % global_step,
                             exporter.META_GRAPH_DEF_FILENAME))
            self.assertIsNotNone(save)
            meta_graph_def = save.export_meta_graph()
            collection_def = meta_graph_def.collection_def

            # Validate custom graph_def.
            graph_def_any = collection_def[exporter.GRAPH_KEY].any_list.value
            self.assertEquals(len(graph_def_any), 1)
            graph_def = tf.GraphDef()
            graph_def_any[0].Unpack(graph_def)
            if clear_devices:
                for node in compare_def.node:
                    node.device = ""
            self.assertProtoEquals(compare_def, graph_def)

            # Validate init_op.
            init_ops = collection_def[exporter.INIT_OP_KEY].node_list.value
            self.assertEquals(len(init_ops), 1)
            self.assertEquals(init_ops[0], "init_op")

            # Validate signatures.
            signatures_any = collection_def[
                exporter.SIGNATURES_KEY].any_list.value
            self.assertEquals(len(signatures_any), 1)
            signatures = manifest_pb2.Signatures()
            signatures_any[0].Unpack(signatures)
            default_signature = signatures.default_signature
            self.assertEqual(
                default_signature.classification_signature.input.tensor_name,
                "v0:0")
            bindings = signatures.named_signatures[
                "generic"].generic_signature.map
            self.assertEquals(bindings["logical_input_A"].tensor_name, "v0:0")
            self.assertEquals(bindings["logical_input_B"].tensor_name, "v1:0")
            read_foo_signature = (
                signatures.named_signatures["foo"].regression_signature)
            self.assertEquals(read_foo_signature.input.tensor_name, "v0:0")
            self.assertEquals(read_foo_signature.output.tensor_name, "v1:0")

            # Validate the assets.
            assets_any = collection_def[exporter.ASSETS_KEY].any_list.value
            self.assertEquals(len(assets_any), 1)
            asset = manifest_pb2.AssetFile()
            assets_any[0].Unpack(asset)
            assets_path = os.path.join(
                export_path, exporter.VERSION_FORMAT_SPECIFIER % global_step,
                exporter.ASSETS_DIRECTORY, "file.txt")
            asset_contents = gfile.GFile(assets_path).read()
            self.assertEqual(asset_contents, "your data here")
            self.assertEquals("hello42.txt", asset.filename)
            self.assertEquals("filename42:0", asset.tensor_binding.tensor_name)

            # Validate graph restoration.
            save.restore(
                sess,
                os.path.join(export_path,
                             exporter.VERSION_FORMAT_SPECIFIER % global_step,
                             exporter.VARIABLES_DIRECTORY))
            self.assertEqual(10, tf.get_collection("v")[0].eval())
            self.assertEqual(20, tf.get_collection("v")[1].eval())
            tf.get_collection(exporter.INIT_OP_KEY)[0].run()
            self.assertEqual(30, tf.get_collection("v")[2].eval())
Esempio n. 3
0
def Export():
  export_path = "/tmp/half_plus_two"
  with tf.Session() as sess:
    # Make model parameters a&b variables instead of constants to
    # exercise the variable reloading mechanisms.
    a = tf.Variable(0.5, name="a")
    b = tf.Variable(2.0, name="b")

    # Calculate, y = a*x + b
    # here we use a placeholder 'x' which is fed at inference time.
    x = tf.placeholder(tf.float32, name="x")
    y = tf.add(tf.mul(a, x), b, name="y")

    # Setup a standard Saver for our variables.
    save = tf.train.Saver({"a": a, "b": b}, sharded=True)

    # asset_path contains the base directory of assets used in training (e.g.
    # vocabulary files).
    original_asset_path = tf.constant("/tmp/original/export/assets")
    # Ops reading asset files should reference the asset_path tensor
    # which stores the original asset path at training time and the
    # overridden assets directory at restore time.
    asset_path = tf.Variable(original_asset_path,
                             name="asset_path",
                             trainable=False,
                             collections=[])
    assign_asset_path = asset_path.assign(original_asset_path)

    # CopyAssets is used as a callback during export to copy files to the
    # given export directory.
    def CopyAssets(export_path):
      print("copying asset files to: %s" % export_path)

    # Use a fixed global step number.
    global_step_tensor = tf.Variable(123, name="global_step")

    # Create a RegressionSignature for our input and output.
    signature = exporter.regression_signature(input_tensor=x, output_tensor=y)

    # Create two filename assets and corresponding tensors.
    # TODO(b/26254158) Consider adding validation of file existance as well as
    # hashes (e.g. sha1) for consistency.
    original_filename1 = tf.constant("hello1.txt")
    filename1 = tf.Variable(original_filename1,
                            name="filename1",
                            trainable=False,
                            collections=[])
    assign_filename1 = filename1.assign(original_filename1)
    original_filename2 = tf.constant("hello2.txt")
    filename2 = tf.Variable(original_filename2,
                            name="filename2",
                            trainable=False,
                            collections=[])
    assign_filename2 = filename2.assign(original_filename2)
    assets = {("hello1.txt", original_filename1),
              ("hello2.txt", original_filename2)}

    # Init op contains a group of all variables that we assign.
    init_op = tf.group(assign_asset_path, assign_filename1, assign_filename2)

    # Run an export.
    tf.initialize_all_variables().run()
    export = exporter.Exporter(save)
    export.init(sess.graph.as_graph_def(),
                init_op=init_op,
                default_graph_signature=signature,
                assets=assets,
                assets_callback=CopyAssets)
    export.export(export_path, global_step_tensor, sess)
Esempio n. 4
0
  def doBasicsOneExportPath(self,
                            export_path,
                            clear_devices=False,
                            global_step=GLOBAL_STEP,
                            sharded=True):
    # Build a graph with 2 parameter nodes on different devices.
    tf.reset_default_graph()
    with tf.Session(
        target="",
        config=config_pb2.ConfigProto(device_count={"CPU": 2})) as sess:
      # v2 is an unsaved variable derived from v0 and v1.  It is used to
      # exercise the ability to run an init op when restoring a graph.
      with sess.graph.device("/cpu:0"):
        v0 = tf.Variable(10, name="v0")
      with sess.graph.device("/cpu:1"):
        v1 = tf.Variable(20, name="v1")
      v2 = tf.Variable(1, name="v2", trainable=False, collections=[])
      assign_v2 = tf.assign(v2, tf.add(v0, v1))
      init_op = tf.group(assign_v2, name="init_op")

      tf.add_to_collection("v", v0)
      tf.add_to_collection("v", v1)
      tf.add_to_collection("v", v2)

      global_step_tensor = tf.Variable(global_step, name="global_step")
      named_tensor_bindings = {"logical_input_A": v0, "logical_input_B": v1}
      signatures = {
          "foo": exporter.regression_signature(input_tensor=v0,
                                               output_tensor=v1),
          "generic": exporter.generic_signature(named_tensor_bindings)
      }

      asset_filepath_orig = os.path.join(tf.test.get_temp_dir(), "hello42.txt")
      asset_file = tf.constant(asset_filepath_orig, name="filename42")
      tf.add_to_collection(tf.GraphKeys.ASSET_FILEPATHS, asset_file)

      with gfile.FastGFile(asset_filepath_orig, "w") as f:
        f.write("your data here")
      assets_collection = tf.get_collection(tf.GraphKeys.ASSET_FILEPATHS)

      ignored_asset = os.path.join(tf.test.get_temp_dir(), "ignored.txt")
      with gfile.FastGFile(ignored_asset, "w") as f:
        f.write("additional data here")

      tf.initialize_all_variables().run()

      # Run an export.
      save = tf.train.Saver({"v0": v0,
                             "v1": v1},
                            restore_sequentially=True,
                            sharded=sharded)
      export = exporter.Exporter(save)
      export.init(sess.graph.as_graph_def(),
                  init_op=init_op,
                  clear_devices=clear_devices,
                  default_graph_signature=exporter.classification_signature(
                      input_tensor=v0),
                  named_graph_signatures=signatures,
                  assets_collection=assets_collection)
      export.export(export_path,
                    global_step_tensor,
                    sess,
                    exports_to_keep=gc.largest_export_versions(2))

    # Restore graph.
    compare_def = tf.get_default_graph().as_graph_def()
    tf.reset_default_graph()
    with tf.Session(
        target="",
        config=config_pb2.ConfigProto(device_count={"CPU": 2})) as sess:
      save = tf.train.import_meta_graph(
          os.path.join(export_path, constants.VERSION_FORMAT_SPECIFIER %
                       global_step, constants.META_GRAPH_DEF_FILENAME))
      self.assertIsNotNone(save)
      meta_graph_def = save.export_meta_graph()
      collection_def = meta_graph_def.collection_def

      # Validate custom graph_def.
      graph_def_any = collection_def[constants.GRAPH_KEY].any_list.value
      self.assertEquals(len(graph_def_any), 1)
      graph_def = tf.GraphDef()
      graph_def_any[0].Unpack(graph_def)
      if clear_devices:
        for node in compare_def.node:
          node.device = ""
      self.assertProtoEquals(compare_def, graph_def)

      # Validate init_op.
      init_ops = collection_def[constants.INIT_OP_KEY].node_list.value
      self.assertEquals(len(init_ops), 1)
      self.assertEquals(init_ops[0], "init_op")

      # Validate signatures.
      signatures_any = collection_def[constants.SIGNATURES_KEY].any_list.value
      self.assertEquals(len(signatures_any), 1)
      signatures = manifest_pb2.Signatures()
      signatures_any[0].Unpack(signatures)
      default_signature = signatures.default_signature
      self.assertEqual(
          default_signature.classification_signature.input.tensor_name, "v0:0")
      bindings = signatures.named_signatures["generic"].generic_signature.map
      self.assertEquals(bindings["logical_input_A"].tensor_name, "v0:0")
      self.assertEquals(bindings["logical_input_B"].tensor_name, "v1:0")
      read_foo_signature = (
          signatures.named_signatures["foo"].regression_signature)
      self.assertEquals(read_foo_signature.input.tensor_name, "v0:0")
      self.assertEquals(read_foo_signature.output.tensor_name, "v1:0")

      # Validate the assets.
      assets_any = collection_def[constants.ASSETS_KEY].any_list.value
      self.assertEquals(len(assets_any), 1)
      asset = manifest_pb2.AssetFile()
      assets_any[0].Unpack(asset)
      assets_path = os.path.join(export_path,
                                 constants.VERSION_FORMAT_SPECIFIER %
                                 global_step, constants.ASSETS_DIRECTORY,
                                 "hello42.txt")
      asset_contents = gfile.GFile(assets_path).read()
      self.assertEqual(asset_contents, "your data here")
      self.assertEquals("hello42.txt", asset.filename)
      self.assertEquals("filename42:0", asset.tensor_binding.tensor_name)
      ignored_asset_path = os.path.join(export_path,
                                        constants.VERSION_FORMAT_SPECIFIER %
                                        global_step, constants.ASSETS_DIRECTORY,
                                        "ignored.txt")
      self.assertFalse(gfile.Exists(ignored_asset_path))

      # Validate graph restoration.
      if sharded:
        save.restore(sess,
                     os.path.join(
                        export_path, constants.VERSION_FORMAT_SPECIFIER %
                        global_step, constants.VARIABLES_FILENAME_PATTERN))
      else:
        save.restore(sess,
                     os.path.join(
                        export_path, constants.VERSION_FORMAT_SPECIFIER %
                        global_step, constants.VARIABLES_FILENAME))
      self.assertEqual(10, tf.get_collection("v")[0].eval())
      self.assertEqual(20, tf.get_collection("v")[1].eval())
      tf.get_collection(constants.INIT_OP_KEY)[0].run()
      self.assertEqual(30, tf.get_collection("v")[2].eval())
Esempio n. 5
0
def Export():
  export_path = "/tmp/half_plus_two"
  with tf.Session() as sess:
    # Make model parameters a&b variables instead of constants to
    # exercise the variable reloading mechanisms.
    a = tf.Variable(0.5, name="a")
    b = tf.Variable(2.0, name="b")

    # Calculate, y = a*x + b
    # here we use a placeholder 'x' which is fed at inference time.
    x = tf.placeholder(tf.float32, name="x")
    y = tf.add(tf.mul(a, x), b, name="y")

    # Setup a standard Saver for our variables.
    save = tf.train.Saver({"a": a, "b": b}, sharded=True)

    # asset_path contains the base directory of assets used in training (e.g.
    # vocabulary files).
    original_asset_path = tf.constant("/tmp/original/export/assets")
    # Ops reading asset files should reference the asset_path tensor
    # which stores the original asset path at training time and the
    # overridden assets directory at restore time.
    asset_path = tf.Variable(original_asset_path,
                             name="asset_path",
                             trainable=False,
                             collections=[])
    assign_asset_path = asset_path.assign(original_asset_path)

    # CopyAssets is used as a callback during export to copy files to the
    # given export directory.
    def CopyAssets(export_path):
      print "copying asset files to: %s" % export_path

    # Use a fixed global step number.
    global_step_tensor = tf.Variable(123, name="global_step")

    # Create a RegressionSignature for our input and output.
    signature = exporter.regression_signature(input_tensor=x, output_tensor=y)

    # Create two filename assets and corresponding tensors.
    # TODO(b/26254158) Consider adding validation of file existance as well as
    # hashes (e.g. sha1) for consistency.
    original_filename1 = tf.constant("hello1.txt")
    filename1 = tf.Variable(original_filename1,
                            name="filename1",
                            trainable=False,
                            collections=[])
    assign_filename1 = filename1.assign(original_filename1)
    original_filename2 = tf.constant("hello2.txt")
    filename2 = tf.Variable(original_filename2,
                            name="filename2",
                            trainable=False,
                            collections=[])
    assign_filename2 = filename2.assign(original_filename2)
    assets = {("hello1.txt", original_filename1),
              ("hello2.txt", original_filename2)}

    # Init op contains a group of all variables that we assign.
    init_op = tf.group(assign_asset_path, assign_filename1, assign_filename2)

    # Run an export.
    tf.initialize_all_variables().run()
    export = exporter.Exporter(save)
    export.init(sess.graph.as_graph_def(),
                init_op=init_op,
                default_graph_signature=signature,
                assets=assets,
                assets_callback=CopyAssets)
    export.export(export_path, global_step_tensor, sess)