Esempio n. 1
0
def logistic_regression_signature_fn(examples, unused_features, predictions):
  """Creates logistic regression signature from given examples and predictions.

  Args:
    examples: `Tensor`.
    unused_features: `dict` of `Tensor`s.
    predictions: `dict` of `Tensor`s.

  Returns:
    Tuple of default classification signature and named signature.
  """
  # predictions should have shape [batch_size, 2] where first column is P(Y=0|x)
  # while second column is P(Y=1|x). We are only interested in the second
  # column for inference.
  predictions_shape = predictions.get_shape()
  predictions_rank = len(predictions_shape)
  if predictions_rank != 2:
    logging.fatal(
        'Expected predictions to have rank 2, but received predictions with '
        'rank: {} and shape: {}'.format(predictions_rank, predictions_shape))
  if predictions_shape[1] != 2:
    logging.fatal(
        'Expected predictions to have 2nd dimension: 2, but received '
        'predictions with 2nd dimension: {} and shape: {}. Did you mean to use '
        'regression_signature_fn instead?'.format(predictions_shape[1],
                                                  predictions_shape))

  positive_predictions = predictions[:, 1]
  signatures = {}
  signatures['regression'] = exporter.regression_signature(examples,
                                                           positive_predictions)
  return signatures['regression'], signatures
Esempio n. 2
0
    def _regression_signature_fn(examples, unused_features, predictions):
      if isinstance(predictions, dict):
        score = predictions[PredictionKey.SCORES]
      else:
        score = predictions

      default_signature = exporter.regression_signature(
          input_tensor=examples, output_tensor=score)
      # TODO(zakaria): add validation
      return default_signature, {}
Esempio n. 3
0
    def _regression_signature_fn(examples, features, predictions):
      # pylint: disable=missing-docstring
      del features
      if isinstance(predictions, dict):
        score = predictions[prediction_key.PredictionKey.SCORES]
      else:
        score = predictions

      default_signature = exporter.regression_signature(
          input_tensor=examples, output_tensor=score)
      # TODO(zakaria): add validation
      return default_signature, {}
Esempio n. 4
0
def regression_signature_fn(examples, unused_features, predictions):
  """Creates regression signature from given examples and predictions.

  Args:
    examples: `Tensor`.
    unused_features: `dict` of `Tensor`s.
    predictions: `Tensor`.

  Returns:
    Tuple of default regression signature and empty named signatures.
  """
  default_signature = exporter.regression_signature(
      input_tensor=examples, output_tensor=predictions)
  return default_signature, {}
Esempio n. 5
0
def logistic_regression_signature_fn(examples, unused_features, predictions):
    """Creates logistic regression signature from given examples and predictions.

  Args:
    examples: `Tensor`.
    unused_features: `dict` of `Tensor`s.
    predictions: `Tensor` of shape [batch_size, 2] of predicted probabilities or
      dict that contains the probabilities tensor as in
      {'probabilities', `Tensor`}.

  Returns:
    Tuple of default regression signature and named signature.

  Raises:
    ValueError: If examples is `None`.
  """
    if examples is None:
        raise ValueError("examples cannot be None when using this signature fn.")

    if isinstance(predictions, dict):
        predictions_tensor = predictions["probabilities"]
    else:
        predictions_tensor = predictions
    # predictions should have shape [batch_size, 2] where first column is P(Y=0|x)
    # while second column is P(Y=1|x). We are only interested in the second
    # column for inference.
    predictions_shape = predictions_tensor.get_shape()
    predictions_rank = len(predictions_shape)
    if predictions_rank != 2:
        logging.fatal(
            "Expected predictions to have rank 2, but received predictions with "
            "rank: {} and shape: {}".format(predictions_rank, predictions_shape)
        )
    if predictions_shape[1] != 2:
        logging.fatal(
            "Expected predictions to have 2nd dimension: 2, but received "
            "predictions with 2nd dimension: {} and shape: {}. Did you mean to use "
            "regression_signature_fn or classification_signature_fn_with_prob "
            "instead?".format(predictions_shape[1], predictions_shape)
        )

    positive_predictions = predictions_tensor[:, 1]
    default_signature = exporter.regression_signature(input_tensor=examples, output_tensor=positive_predictions)
    return default_signature, {}
Esempio n. 6
0
def logistic_regression_signature_fn(examples, unused_features, predictions):
  """Creates logistic regression signature from given examples and predictions.

  Args:
    examples: `Tensor`.
    unused_features: `dict` of `Tensor`s.
    predictions: `Tensor` of shape [batch_size, 2] of predicted probabilities or
      dict that contains the probabilities tensor as in
      {'probabilities', `Tensor`}.

  Returns:
    Tuple of default regression signature and named signature.

  Raises:
    ValueError: If examples is `None`.
  """
  if examples is None:
    raise ValueError('examples cannot be None when using this signature fn.')

  if isinstance(predictions, dict):
    predictions_tensor = predictions['probabilities']
  else:
    predictions_tensor = predictions
  # predictions should have shape [batch_size, 2] where first column is P(Y=0|x)
  # while second column is P(Y=1|x). We are only interested in the second
  # column for inference.
  predictions_shape = predictions_tensor.get_shape()
  predictions_rank = len(predictions_shape)
  if predictions_rank != 2:
    logging.fatal(
        'Expected predictions to have rank 2, but received predictions with '
        'rank: {} and shape: {}'.format(predictions_rank, predictions_shape))
  if predictions_shape[1] != 2:
    logging.fatal(
        'Expected predictions to have 2nd dimension: 2, but received '
        'predictions with 2nd dimension: {} and shape: {}. Did you mean to use '
        'regression_signature_fn or classification_signature_fn_with_prob '
        'instead?'.format(predictions_shape[1], predictions_shape))

  positive_predictions = predictions_tensor[:, 1]
  default_signature = exporter.regression_signature(
      input_tensor=examples, output_tensor=positive_predictions)
  return default_signature, {}
Esempio n. 7
0
def regression_signature_fn(examples, unused_features, predictions):
    """Creates regression signature from given examples and predictions.

  Args:
    examples: `Tensor`.
    unused_features: `dict` of `Tensor`s.
    predictions: `Tensor`.

  Returns:
    Tuple of default regression signature and empty named signatures.

  Raises:
    ValueError: If examples is `None`.
  """
    if examples is None:
        raise ValueError("examples cannot be None when using this signature fn.")

    default_signature = exporter.regression_signature(input_tensor=examples, output_tensor=predictions)
    return default_signature, {}
Esempio n. 8
0
def regression_signature_fn(examples, unused_features, predictions):
  """Creates regression signature from given examples and predictions.

  Args:
    examples: `Tensor`.
    unused_features: `dict` of `Tensor`s.
    predictions: `Tensor`.

  Returns:
    Tuple of default regression signature and empty named signatures.

  Raises:
    ValueError: If examples is `None`.
  """
  if examples is None:
    raise ValueError('examples cannot be None when using this signature fn.')

  default_signature = exporter.regression_signature(
      input_tensor=examples, output_tensor=predictions)
  return default_signature, {}
Esempio n. 9
0
def logistic_regression_signature_fn(examples, unused_features, predictions):
    """Creates regression signature from given examples and predictions.

  Args:
    examples: `Tensor`.
    unused_features: `dict` of `Tensor`s.
    predictions: `dict` of `Tensor`s.

  Returns:
    Tuple of default classification signature and named signature.
  """
    # predictions has shape [batch_size, 2] where first column is P(Y=0|x)
    # while second column is P(Y=1|x). We are only interested in the second
    # column for inference.
    assert predictions.get_shape()[1] == 2
    positive_predictions = predictions[:, 1]

    signatures = {}
    signatures['regression'] = exporter.regression_signature(
        examples, positive_predictions)
    return signatures['regression'], signatures
Esempio n. 10
0
def logistic_regression_signature_fn(examples, unused_features, predictions):
  """Creates regression signature from given examples and predictions.

  Args:
    examples: `Tensor`.
    unused_features: `dict` of `Tensor`s.
    predictions: `dict` of `Tensor`s.

  Returns:
    Tuple of default classification signature and named signature.
  """
  # predictions has shape [batch_size, 2] where first column is P(Y=0|x)
  # while second column is P(Y=1|x). We are only interested in the second
  # column for inference.
  assert predictions.get_shape()[1] == 2
  positive_predictions = predictions[:, 1]

  signatures = {}
  signatures['regression'] = exporter.regression_signature(examples,
                                                           positive_predictions)
  return signatures['regression'], signatures
Esempio n. 11
0
def ExportModel(sess, model_dir, input, output, assets):
    if os.path.isdir(model_dir):
        shutil.rmtree(model_dir)

    # using TF Serving exporter to load into a TF Serving session bundle
    logging.info('Exporting trained model to %s', model_dir)
    saver = tf.train.Saver()
    model_exporter = exporter.Exporter(saver)
    signature = exporter.regression_signature(input_tensor=input,
                                              output_tensor=output)
    model_exporter.init(sess.graph.as_graph_def(),
                        default_graph_signature=signature,
                        assets_collection=assets)
    model_exporter.export(model_dir, tf.constant(1), sess)

    # using a SummaryWriter so graph can be loaded in TensorBoard
    writer = tf.train.SummaryWriter(model_dir, sess.graph)
    writer.flush()

    # exporting the graph as a text protobuf, to view graph manualy
    f1 = open(model_dir + '/graph.pbtxt', 'w+')
    print >> f1, str(tf.get_default_graph().as_graph_def())
Esempio n. 12
0
def Export():
  export_path = "/tmp/half_plus_two"
  with tf.Session() as sess:
    # Make model parameters a&b variables instead of constants to
    # exercise the variable reloading mechanisms.
    a = tf.Variable(0.5)
    b = tf.Variable(2.0)

    # Calculate, y = a*x + b
    # here we use a placeholder 'x' which is fed at inference time.
    x = tf.placeholder(tf.float32)
    y = tf.add(tf.multiply(a, x), b)

    # Run an export.
    tf.initialize_all_variables().run()
    export = exporter.Exporter(tf.train.Saver())
    export.init(named_graph_signatures={
        "inputs": exporter.generic_signature({"x": x}),
        "outputs": exporter.generic_signature({"y": y}),
        "regress": exporter.regression_signature(x, y)
    })
    export.export(export_path, tf.constant(123), sess)
Esempio n. 13
0
def Export():
    export_path = "/tmp/half_plus_two"
    with tf.Session() as sess:
        # Make model parameters a&b variables instead of constants to
        # exercise the variable reloading mechanisms.
        a = tf.Variable(0.5)
        b = tf.Variable(2.0)

        # Calculate, y = a*x + b
        # here we use a placeholder 'x' which is fed at inference time.
        x = tf.placeholder(tf.float32)
        y = tf.add(tf.mul(a, x), b)

        # Run an export.
        tf.initialize_all_variables().run()
        export = exporter.Exporter(tf.train.Saver())
        export.init(
            named_graph_signatures={
                "inputs": exporter.generic_signature({"x": x}),
                "outputs": exporter.generic_signature({"y": y}),
                "regress": exporter.regression_signature(x, y)
            })
        export.export(export_path, tf.constant(123), sess)
Esempio n. 14
0
    def doBasicsOneExportPath(self,
                              export_path,
                              clear_devices=False,
                              global_step=GLOBAL_STEP,
                              sharded=True):
        # Build a graph with 2 parameter nodes on different devices.
        tf.reset_default_graph()
        with tf.Session(target="",
                        config=config_pb2.ConfigProto(
                            device_count={"CPU": 2})) as sess:
            # v2 is an unsaved variable derived from v0 and v1.  It is used to
            # exercise the ability to run an init op when restoring a graph.
            with sess.graph.device("/cpu:0"):
                v0 = tf.Variable(10, name="v0")
            with sess.graph.device("/cpu:1"):
                v1 = tf.Variable(20, name="v1")
            v2 = tf.Variable(1, name="v2", trainable=False, collections=[])
            assign_v2 = tf.assign(v2, tf.add(v0, v1))
            init_op = tf.group(assign_v2, name="init_op")

            tf.add_to_collection("v", v0)
            tf.add_to_collection("v", v1)
            tf.add_to_collection("v", v2)

            global_step_tensor = tf.Variable(global_step, name="global_step")
            named_tensor_bindings = {
                "logical_input_A": v0,
                "logical_input_B": v1
            }
            signatures = {
                "foo":
                exporter.regression_signature(input_tensor=v0,
                                              output_tensor=v1),
                "generic":
                exporter.generic_signature(named_tensor_bindings)
            }

            asset_filepath_orig = os.path.join(tf.test.get_temp_dir(),
                                               "hello42.txt")
            asset_file = tf.constant(asset_filepath_orig, name="filename42")
            tf.add_to_collection(tf.GraphKeys.ASSET_FILEPATHS, asset_file)

            with gfile.FastGFile(asset_filepath_orig, "w") as f:
                f.write("your data here")
            assets_collection = tf.get_collection(tf.GraphKeys.ASSET_FILEPATHS)

            ignored_asset = os.path.join(tf.test.get_temp_dir(), "ignored.txt")
            with gfile.FastGFile(ignored_asset, "w") as f:
                f.write("additional data here")

            tf.initialize_all_variables().run()

            # Run an export.
            save = tf.train.Saver({
                "v0": v0,
                "v1": v1
            },
                                  restore_sequentially=True,
                                  sharded=sharded)
            export = exporter.Exporter(save)
            export.init(
                sess.graph.as_graph_def(),
                init_op=init_op,
                clear_devices=clear_devices,
                default_graph_signature=exporter.classification_signature(
                    input_tensor=v0),
                named_graph_signatures=signatures,
                assets_collection=assets_collection)
            export.export(export_path,
                          global_step_tensor,
                          sess,
                          exports_to_keep=gc.largest_export_versions(2))

        # Restore graph.
        compare_def = tf.get_default_graph().as_graph_def()
        tf.reset_default_graph()
        with tf.Session(target="",
                        config=config_pb2.ConfigProto(
                            device_count={"CPU": 2})) as sess:
            save = tf.train.import_meta_graph(
                os.path.join(export_path,
                             constants.VERSION_FORMAT_SPECIFIER % global_step,
                             constants.META_GRAPH_DEF_FILENAME))
            self.assertIsNotNone(save)
            meta_graph_def = save.export_meta_graph()
            collection_def = meta_graph_def.collection_def

            # Validate custom graph_def.
            graph_def_any = collection_def[constants.GRAPH_KEY].any_list.value
            self.assertEquals(len(graph_def_any), 1)
            graph_def = tf.GraphDef()
            graph_def_any[0].Unpack(graph_def)
            if clear_devices:
                for node in compare_def.node:
                    node.device = ""
            self.assertProtoEquals(compare_def, graph_def)

            # Validate init_op.
            init_ops = collection_def[constants.INIT_OP_KEY].node_list.value
            self.assertEquals(len(init_ops), 1)
            self.assertEquals(init_ops[0], "init_op")

            # Validate signatures.
            signatures_any = collection_def[
                constants.SIGNATURES_KEY].any_list.value
            self.assertEquals(len(signatures_any), 1)
            signatures = manifest_pb2.Signatures()
            signatures_any[0].Unpack(signatures)
            default_signature = signatures.default_signature
            self.assertEqual(
                default_signature.classification_signature.input.tensor_name,
                "v0:0")
            bindings = signatures.named_signatures[
                "generic"].generic_signature.map
            self.assertEquals(bindings["logical_input_A"].tensor_name, "v0:0")
            self.assertEquals(bindings["logical_input_B"].tensor_name, "v1:0")
            read_foo_signature = (
                signatures.named_signatures["foo"].regression_signature)
            self.assertEquals(read_foo_signature.input.tensor_name, "v0:0")
            self.assertEquals(read_foo_signature.output.tensor_name, "v1:0")

            # Validate the assets.
            assets_any = collection_def[constants.ASSETS_KEY].any_list.value
            self.assertEquals(len(assets_any), 1)
            asset = manifest_pb2.AssetFile()
            assets_any[0].Unpack(asset)
            assets_path = os.path.join(
                export_path, constants.VERSION_FORMAT_SPECIFIER % global_step,
                constants.ASSETS_DIRECTORY, "hello42.txt")
            asset_contents = gfile.GFile(assets_path).read()
            self.assertEqual(asset_contents, "your data here")
            self.assertEquals("hello42.txt", asset.filename)
            self.assertEquals("filename42:0", asset.tensor_binding.tensor_name)
            ignored_asset_path = os.path.join(
                export_path, constants.VERSION_FORMAT_SPECIFIER % global_step,
                constants.ASSETS_DIRECTORY, "ignored.txt")
            self.assertFalse(gfile.Exists(ignored_asset_path))

            # Validate graph restoration.
            if sharded:
                save.restore(
                    sess,
                    os.path.join(
                        export_path,
                        constants.VERSION_FORMAT_SPECIFIER % global_step,
                        constants.VARIABLES_FILENAME_PATTERN))
            else:
                save.restore(
                    sess,
                    os.path.join(
                        export_path,
                        constants.VERSION_FORMAT_SPECIFIER % global_step,
                        constants.VARIABLES_FILENAME))
            self.assertEqual(10, tf.get_collection("v")[0].eval())
            self.assertEqual(20, tf.get_collection("v")[1].eval())
            tf.get_collection(constants.INIT_OP_KEY)[0].run()
            self.assertEqual(30, tf.get_collection("v")[2].eval())
Esempio n. 15
0
  def doBasicsOneExportPath(self,
                            export_path,
                            clear_devices=False,
                            global_step=GLOBAL_STEP,
                            sharded=True):
    # Build a graph with 2 parameter nodes on different devices.
    tf.reset_default_graph()
    with tf.Session(
        target="",
        config=config_pb2.ConfigProto(device_count={"CPU": 2})) as sess:
      # v2 is an unsaved variable derived from v0 and v1.  It is used to
      # exercise the ability to run an init op when restoring a graph.
      with sess.graph.device("/cpu:0"):
        v0 = tf.Variable(10, name="v0")
      with sess.graph.device("/cpu:1"):
        v1 = tf.Variable(20, name="v1")
      v2 = tf.Variable(1, name="v2", trainable=False, collections=[])
      assign_v2 = tf.assign(v2, tf.add(v0, v1))
      init_op = tf.group(assign_v2, name="init_op")

      tf.add_to_collection("v", v0)
      tf.add_to_collection("v", v1)
      tf.add_to_collection("v", v2)

      global_step_tensor = tf.Variable(global_step, name="global_step")
      named_tensor_bindings = {"logical_input_A": v0, "logical_input_B": v1}
      signatures = {
          "foo": exporter.regression_signature(input_tensor=v0,
                                               output_tensor=v1),
          "generic": exporter.generic_signature(named_tensor_bindings)
      }

      asset_filepath_orig = os.path.join(tf.test.get_temp_dir(), "hello42.txt")
      asset_file = tf.constant(asset_filepath_orig, name="filename42")
      tf.add_to_collection(tf.GraphKeys.ASSET_FILEPATHS, asset_file)

      with gfile.FastGFile(asset_filepath_orig, "w") as f:
        f.write("your data here")
      assets_collection = tf.get_collection(tf.GraphKeys.ASSET_FILEPATHS)

      ignored_asset = os.path.join(tf.test.get_temp_dir(), "ignored.txt")
      with gfile.FastGFile(ignored_asset, "w") as f:
        f.write("additional data here")

      tf.initialize_all_variables().run()

      # Run an export.
      save = tf.train.Saver({"v0": v0,
                             "v1": v1},
                            restore_sequentially=True,
                            sharded=sharded)
      export = exporter.Exporter(save)
      export.init(sess.graph.as_graph_def(),
                  init_op=init_op,
                  clear_devices=clear_devices,
                  default_graph_signature=exporter.classification_signature(
                      input_tensor=v0),
                  named_graph_signatures=signatures,
                  assets_collection=assets_collection)
      export.export(export_path,
                    global_step_tensor,
                    sess,
                    exports_to_keep=gc.largest_export_versions(2))

    # Restore graph.
    compare_def = tf.get_default_graph().as_graph_def()
    tf.reset_default_graph()
    with tf.Session(
        target="",
        config=config_pb2.ConfigProto(device_count={"CPU": 2})) as sess:
      save = tf.train.import_meta_graph(
          os.path.join(export_path, constants.VERSION_FORMAT_SPECIFIER %
                       global_step, constants.META_GRAPH_DEF_FILENAME))
      self.assertIsNotNone(save)
      meta_graph_def = save.export_meta_graph()
      collection_def = meta_graph_def.collection_def

      # Validate custom graph_def.
      graph_def_any = collection_def[constants.GRAPH_KEY].any_list.value
      self.assertEquals(len(graph_def_any), 1)
      graph_def = tf.GraphDef()
      graph_def_any[0].Unpack(graph_def)
      if clear_devices:
        for node in compare_def.node:
          node.device = ""
      self.assertProtoEquals(compare_def, graph_def)

      # Validate init_op.
      init_ops = collection_def[constants.INIT_OP_KEY].node_list.value
      self.assertEquals(len(init_ops), 1)
      self.assertEquals(init_ops[0], "init_op")

      # Validate signatures.
      signatures_any = collection_def[constants.SIGNATURES_KEY].any_list.value
      self.assertEquals(len(signatures_any), 1)
      signatures = manifest_pb2.Signatures()
      signatures_any[0].Unpack(signatures)
      default_signature = signatures.default_signature
      self.assertEqual(
          default_signature.classification_signature.input.tensor_name, "v0:0")
      bindings = signatures.named_signatures["generic"].generic_signature.map
      self.assertEquals(bindings["logical_input_A"].tensor_name, "v0:0")
      self.assertEquals(bindings["logical_input_B"].tensor_name, "v1:0")
      read_foo_signature = (
          signatures.named_signatures["foo"].regression_signature)
      self.assertEquals(read_foo_signature.input.tensor_name, "v0:0")
      self.assertEquals(read_foo_signature.output.tensor_name, "v1:0")

      # Validate the assets.
      assets_any = collection_def[constants.ASSETS_KEY].any_list.value
      self.assertEquals(len(assets_any), 1)
      asset = manifest_pb2.AssetFile()
      assets_any[0].Unpack(asset)
      assets_path = os.path.join(export_path,
                                 constants.VERSION_FORMAT_SPECIFIER %
                                 global_step, constants.ASSETS_DIRECTORY,
                                 "hello42.txt")
      asset_contents = gfile.GFile(assets_path).read()
      self.assertEqual(asset_contents, "your data here")
      self.assertEquals("hello42.txt", asset.filename)
      self.assertEquals("filename42:0", asset.tensor_binding.tensor_name)
      ignored_asset_path = os.path.join(export_path,
                                        constants.VERSION_FORMAT_SPECIFIER %
                                        global_step, constants.ASSETS_DIRECTORY,
                                        "ignored.txt")
      self.assertFalse(gfile.Exists(ignored_asset_path))

      # Validate graph restoration.
      if sharded:
        save.restore(sess,
                     os.path.join(
                        export_path, constants.VERSION_FORMAT_SPECIFIER %
                        global_step, constants.VARIABLES_FILENAME_PATTERN))
      else:
        save.restore(sess,
                     os.path.join(
                        export_path, constants.VERSION_FORMAT_SPECIFIER %
                        global_step, constants.VARIABLES_FILENAME))
      self.assertEqual(10, tf.get_collection("v")[0].eval())
      self.assertEqual(20, tf.get_collection("v")[1].eval())
      tf.get_collection(constants.INIT_OP_KEY)[0].run()
      self.assertEqual(30, tf.get_collection("v")[2].eval())
Esempio n. 16
0
def Export():
    with tf.Session() as sess:
        # Make model parameters a&b variables instead of constants to
        # exercise the variable reloading mechanisms.
        a = tf.Variable(0.5, name="a")
        b = tf.Variable(2.0, name="b")

        # Create a placeholder for serialized tensorflow.Example messages to be fed.
        serialized_tf_example = tf.placeholder(tf.string, name="tf_example")

        # Parse the tensorflow.Example looking for a feature named "x" with a single
        # floating point value.
        feature_configs = {
            "x": tf.FixedLenFeature([1], dtype=tf.float32),
        }
        tf_example = tf.parse_example(serialized_tf_example, feature_configs)
        # Use tf.identity() to assign name
        x = tf.identity(tf_example["x"], name="x")

        # Calculate, y = a*x + b
        y = tf.add(tf.mul(a, x), b, name="y")

        # Setup a standard Saver for our variables.
        save = tf.train.Saver(
            {
                "a": a,
                "b": b
            },
            sharded=True,
            write_version=tf.train.SaverDef.V2
            if FLAGS.use_checkpoint_v2 else tf.train.SaverDef.V1)

        # asset_path contains the base directory of assets used in training (e.g.
        # vocabulary files).
        original_asset_path = tf.constant("/tmp/original/export/assets")
        # Ops reading asset files should reference the asset_path tensor
        # which stores the original asset path at training time and the
        # overridden assets directory at restore time.
        asset_path = tf.Variable(original_asset_path,
                                 name="asset_path",
                                 trainable=False,
                                 collections=[])
        assign_asset_path = asset_path.assign(original_asset_path)

        # Use a fixed global step number.
        global_step_tensor = tf.Variable(123, name="global_step")

        # Create a RegressionSignature for our input and output.
        regression_signature = exporter.regression_signature(
            input_tensor=serialized_tf_example,
            # Use tf.identity here because we export two signatures here.
            # Otherwise only graph for one of the signatures will be loaded
            # (whichever is created first) during serving.
            output_tensor=tf.identity(y))
        named_graph_signature = {
            "inputs": exporter.generic_signature({"x": x}),
            "outputs": exporter.generic_signature({"y": y})
        }

        # Create two filename assets and corresponding tensors.
        # TODO(b/26254158) Consider adding validation of file existance as well as
        # hashes (e.g. sha1) for consistency.
        original_filename1 = tf.constant("hello1.txt")
        tf.add_to_collection(tf.GraphKeys.ASSET_FILEPATHS, original_filename1)
        filename1 = tf.Variable(original_filename1,
                                name="filename1",
                                trainable=False,
                                collections=[])
        assign_filename1 = filename1.assign(original_filename1)
        original_filename2 = tf.constant("hello2.txt")
        tf.add_to_collection(tf.GraphKeys.ASSET_FILEPATHS, original_filename2)
        filename2 = tf.Variable(original_filename2,
                                name="filename2",
                                trainable=False,
                                collections=[])
        assign_filename2 = filename2.assign(original_filename2)

        # Init op contains a group of all variables that we assign.
        init_op = tf.group(assign_asset_path, assign_filename1,
                           assign_filename2)

        # CopyAssets is used as a callback during export to copy files to the
        # given export directory.
        def CopyAssets(filepaths, export_path):
            print("copying asset files to: %s" % export_path)
            for filepath in filepaths:
                print("copying asset file: %s" % filepath)

        # Run an export.
        tf.initialize_all_variables().run()
        export = exporter.Exporter(save)
        export.init(sess.graph.as_graph_def(),
                    init_op=init_op,
                    default_graph_signature=regression_signature,
                    named_graph_signatures=named_graph_signature,
                    assets_collection=tf.get_collection(
                        tf.GraphKeys.ASSET_FILEPATHS),
                    assets_callback=CopyAssets)
        export.export(FLAGS.export_dir, global_step_tensor, sess)
def Export():
  with tf.Session() as sess:
    # Make model parameters a&b variables instead of constants to
    # exercise the variable reloading mechanisms.
    a = tf.Variable(0.5, name="a")
    b = tf.Variable(2.0, name="b")

    # Create a placeholder for serialized tensorflow.Example messages to be fed.
    serialized_tf_example = tf.placeholder(tf.string, name="tf_example")

    # Parse the tensorflow.Example looking for a feature named "x" with a single
    # floating point value.
    feature_configs = {"x": tf.FixedLenFeature([1], dtype=tf.float32),}
    tf_example = tf.parse_example(serialized_tf_example, feature_configs)
    # Use tf.identity() to assign name
    x = tf.identity(tf_example["x"], name="x")

    # Calculate, y = a*x + b
    y = tf.add(tf.mul(a, x), b, name="y")

    # Setup a standard Saver for our variables.
    save = tf.train.Saver(
        {
            "a": a,
            "b": b
        },
        sharded=True,
        write_version=tf.train.SaverDef.V2 if FLAGS.use_checkpoint_v2 else
        tf.train.SaverDef.V1)

    # asset_path contains the base directory of assets used in training (e.g.
    # vocabulary files).
    original_asset_path = tf.constant("/tmp/original/export/assets")
    # Ops reading asset files should reference the asset_path tensor
    # which stores the original asset path at training time and the
    # overridden assets directory at restore time.
    asset_path = tf.Variable(original_asset_path,
                             name="asset_path",
                             trainable=False,
                             collections=[])
    assign_asset_path = asset_path.assign(original_asset_path)

    # Use a fixed global step number.
    global_step_tensor = tf.Variable(123, name="global_step")

    # Create a RegressionSignature for our input and output.
    regression_signature = exporter.regression_signature(
        input_tensor=serialized_tf_example,
        # Use tf.identity here because we export two signatures here.
        # Otherwise only graph for one of the signatures will be loaded
        # (whichever is created first) during serving.
        output_tensor=tf.identity(y))
    named_graph_signature = {
        "inputs": exporter.generic_signature({"x": x}),
        "outputs": exporter.generic_signature({"y": y})
    }

    # Create two filename assets and corresponding tensors.
    # TODO(b/26254158) Consider adding validation of file existance as well as
    # hashes (e.g. sha1) for consistency.
    original_filename1 = tf.constant("hello1.txt")
    tf.add_to_collection(tf.GraphKeys.ASSET_FILEPATHS, original_filename1)
    filename1 = tf.Variable(original_filename1,
                            name="filename1",
                            trainable=False,
                            collections=[])
    assign_filename1 = filename1.assign(original_filename1)
    original_filename2 = tf.constant("hello2.txt")
    tf.add_to_collection(tf.GraphKeys.ASSET_FILEPATHS, original_filename2)
    filename2 = tf.Variable(original_filename2,
                            name="filename2",
                            trainable=False,
                            collections=[])
    assign_filename2 = filename2.assign(original_filename2)

    # Init op contains a group of all variables that we assign.
    init_op = tf.group(assign_asset_path, assign_filename1, assign_filename2)

    # CopyAssets is used as a callback during export to copy files to the
    # given export directory.
    def CopyAssets(filepaths, export_path):
      print("copying asset files to: %s" % export_path)
      for filepath in filepaths:
        print("copying asset file: %s" % filepath)

    # Run an export.
    tf.initialize_all_variables().run()
    export = exporter.Exporter(save)
    export.init(
        sess.graph.as_graph_def(),
        init_op=init_op,
        default_graph_signature=regression_signature,
        named_graph_signatures=named_graph_signature,
        assets_collection=tf.get_collection(tf.GraphKeys.ASSET_FILEPATHS),
        assets_callback=CopyAssets)
    export.export(FLAGS.export_dir, global_step_tensor, sess)
Esempio n. 18
0
def Export():
    export_path = "/tmp/half_plus_two"
    with tf.Session() as sess:
        # Make model parameters a&b variables instead of constants to
        # exercise the variable reloading mechanisms.
        a = tf.Variable(0.5, name="a")
        b = tf.Variable(2.0, name="b")

        # Calculate, y = a*x + b
        # here we use a placeholder 'x' which is fed at inference time.
        x = tf.placeholder(tf.float32, name="x")
        y = tf.add(tf.mul(a, x), b, name="y")

        # Setup a standard Saver for our variables.
        save = tf.train.Saver({"a": a, "b": b}, sharded=True)

        # asset_path contains the base directory of assets used in training (e.g.
        # vocabulary files).
        original_asset_path = tf.constant("/tmp/original/export/assets")
        # Ops reading asset files should reference the asset_path tensor
        # which stores the original asset path at training time and the
        # overridden assets directory at restore time.
        asset_path = tf.Variable(original_asset_path,
                                 name="asset_path",
                                 trainable=False,
                                 collections=[])
        assign_asset_path = asset_path.assign(original_asset_path)

        # Use a fixed global step number.
        global_step_tensor = tf.Variable(123, name="global_step")

        # Create a RegressionSignature for our input and output.
        signature = exporter.regression_signature(input_tensor=x,
                                                  output_tensor=y)

        # Create two filename assets and corresponding tensors.
        # TODO(b/26254158) Consider adding validation of file existance as well as
        # hashes (e.g. sha1) for consistency.
        original_filename1 = tf.constant("hello1.txt")
        tf.add_to_collection(tf.GraphKeys.ASSET_FILEPATHS, original_filename1)
        filename1 = tf.Variable(original_filename1,
                                name="filename1",
                                trainable=False,
                                collections=[])
        assign_filename1 = filename1.assign(original_filename1)
        original_filename2 = tf.constant("hello2.txt")
        tf.add_to_collection(tf.GraphKeys.ASSET_FILEPATHS, original_filename2)
        filename2 = tf.Variable(original_filename2,
                                name="filename2",
                                trainable=False,
                                collections=[])
        assign_filename2 = filename2.assign(original_filename2)

        # Init op contains a group of all variables that we assign.
        init_op = tf.group(assign_asset_path, assign_filename1,
                           assign_filename2)

        # CopyAssets is used as a callback during export to copy files to the
        # given export directory.
        def CopyAssets(filepaths, export_path):
            print("copying asset files to: %s" % export_path)
            for filepath in filepaths:
                print("copying asset file: %s" % filepath)

        # Run an export.
        tf.initialize_all_variables().run()
        export = exporter.Exporter(save)
        export.init(sess.graph.as_graph_def(),
                    init_op=init_op,
                    default_graph_signature=signature,
                    assets_collection=tf.get_collection(
                        tf.GraphKeys.ASSET_FILEPATHS),
                    assets_callback=CopyAssets)
        export.export(export_path, global_step_tensor, sess)
def train(mnist):
    x = tf.placeholder(tf.float32, [None, INPUT_NODE], name='x-input')
    y_ = tf.placeholder(tf.float32, [None, OUTPUT_NODE], name='y-input')

    regularizer = tf.contrib.layers.l2_regularizer(REGULARIZATION_RATE)
    y = inference(x, regularizer)

    global_step = tf.Variable(0, trainable=False)

    variable_averages = \
        tf.train.ExponentialMovingAverage \
            (MOVING_AVERAGE_DECAY, global_step)
    variables_averages_op = \
        variable_averages.apply \
            (tf.trainable_variables())

    cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits \
        (logits=y, labels=tf.argmax(y_, 1))
    cross_entropy_mean = tf.reduce_mean(cross_entropy)
    loss = cross_entropy_mean + tf.add_n(tf.get_collection('losses'))

    learning_rate = tf.train.exponential_decay(LEARNING_RATE_BASE,
                                               global_step,
                                               mnist.train.num_examples /
                                               BATCH_SIZE,
                                               LEARNING_RATE_DECAY,
                                               staircase=True)

    train_step = \
        tf.train.GradientDescentOptimizer(learning_rate) \
            .minimize(loss, global_step=global_step)

    with tf.control_dependencies([train_step, variables_averages_op]):
        train_op = tf.no_op(name='train')

    saver = tf.train.Saver()
    with tf.Session() as sess:
        tf.global_variables_initializer().run()
        export = exporter.Exporter(saver)
        for i in range(TRAINING_STEPS):
            xs, ys = mnist.train.next_batch(BATCH_SIZE)
            _, loss_value, step = sess.run([train_op, loss, global_step],
                                           feed_dict={
                                               x: xs,
                                               y_: ys
                                           })
            if i % 1000 == 0:
                print(
                    "After %d training step(s), loss on training batch is %g."
                    % (step, loss_value))
                saver.save(sess,
                           os.path.join(MODEL_SAVE_PATH, MODEL_NAME),
                           global_step=global_step)
        saver.export_meta_graph(os.path.join(MODEL_SAVE_PATH, MODEL_NAME) +
                                ".json",
                                as_text=True)
        export.init(
            named_graph_signatures={
                "inputs":
                exporter.generic_signature({"input_matrix": x}),
                "outputs":
                exporter.generic_signature({"output_lable": tf.argmax(y, 1)}),
                "regress":
                exporter.regression_signature(x, y)
            })
        export.export(MODEL_SAVE_PATH + MODEL_NAME, tf.constant(123), sess)
 def _regression_signature(examples, unused_features, predictions):
     signatures = {}
     signatures['regression'] = (exporter.regression_signature(
         examples, predictions))
     return signatures['regression'], signatures
def Export():
  export_path = "/tmp/half_plus_two"
  with tf.Session() as sess:
    # Make model parameters a&b variables instead of constants to
    # exercise the variable reloading mechanisms.
    a = tf.Variable(0.5, name="a")
    b = tf.Variable(2.0, name="b")

    # Calculate, y = a*x + b
    # here we use a placeholder 'x' which is fed at inference time.
    x = tf.placeholder(tf.float32, name="x")
    y = tf.add(tf.mul(a, x), b, name="y")

    # Setup a standard Saver for our variables.
    save = tf.train.Saver({"a": a, "b": b}, sharded=True)

    # asset_path contains the base directory of assets used in training (e.g.
    # vocabulary files).
    original_asset_path = tf.constant("/tmp/original/export/assets")
    # Ops reading asset files should reference the asset_path tensor
    # which stores the original asset path at training time and the
    # overridden assets directory at restore time.
    asset_path = tf.Variable(original_asset_path,
                             name="asset_path",
                             trainable=False,
                             collections=[])
    assign_asset_path = asset_path.assign(original_asset_path)

    # Use a fixed global step number.
    global_step_tensor = tf.Variable(123, name="global_step")

    # Create a RegressionSignature for our input and output.
    signature = exporter.regression_signature(input_tensor=x, output_tensor=y)

    # Create two filename assets and corresponding tensors.
    # TODO(b/26254158) Consider adding validation of file existance as well as
    # hashes (e.g. sha1) for consistency.
    original_filename1 = tf.constant("hello1.txt")
    tf.add_to_collection(tf.GraphKeys.ASSET_FILEPATHS, original_filename1)
    filename1 = tf.Variable(original_filename1,
                            name="filename1",
                            trainable=False,
                            collections=[])
    assign_filename1 = filename1.assign(original_filename1)
    original_filename2 = tf.constant("hello2.txt")
    tf.add_to_collection(tf.GraphKeys.ASSET_FILEPATHS, original_filename2)
    filename2 = tf.Variable(original_filename2,
                            name="filename2",
                            trainable=False,
                            collections=[])
    assign_filename2 = filename2.assign(original_filename2)

    # Init op contains a group of all variables that we assign.
    init_op = tf.group(assign_asset_path, assign_filename1, assign_filename2)

    # CopyAssets is used as a callback during export to copy files to the
    # given export directory.
    def CopyAssets(filepaths, export_path):
      print("copying asset files to: %s" % export_path)
      for filepath in filepaths:
        print("copying asset file: %s" % filepath)

    # Run an export.
    tf.initialize_all_variables().run()
    export = exporter.Exporter(save)
    export.init(
        sess.graph.as_graph_def(),
        init_op=init_op,
        default_graph_signature=signature,
        assets_collection=tf.get_collection(tf.GraphKeys.ASSET_FILEPATHS),
        assets_callback=CopyAssets)
    export.export(export_path, global_step_tensor, sess)
Esempio n. 22
0
 def _regression_signature(examples, unused_features, predictions):
   signatures = {}
   signatures['regression'] = (
       exporter.regression_signature(examples, predictions))
   return signatures['regression'], signatures