Ejemplo n.º 1
0
  def test_asset_loading(self):
    first_path = self._v1_asset_saved_model()
    imported = load.load(first_path)
    self.evaluate(lookup_ops.tables_initializer())
    fn = imported.signatures["serving_default"]
    self.assertAllClose({"output": [2, 0]},
                        fn(start=constant_op.constant(["gamma", "alpha"])))
    second_path = os.path.join(self.get_temp_dir(), "saved_model",
                               str(ops.uid()))
    save.save(imported, second_path, signatures=imported.signatures)
    shutil.rmtree(first_path)
    del ops.get_collection_ref(ops.GraphKeys.TABLE_INITIALIZERS)[:]
    second_import = load.load(second_path)
    self.evaluate(lookup_ops.tables_initializer())
    fn = second_import.signatures["serving_default"]
    self.assertAllClose({"output": [2, 0]},
                        fn(start=constant_op.constant(["gamma", "alpha"])))

    third_path = os.path.join(self.get_temp_dir(), "saved_model",
                              str(ops.uid()))
    save.save(second_import, third_path, signatures=second_import.signatures)
    shutil.rmtree(second_path)
    del ops.get_collection_ref(ops.GraphKeys.TABLE_INITIALIZERS)[:]
    third_import = load.load(third_path)
    self.evaluate(lookup_ops.tables_initializer())
    fn = third_import.signatures["serving_default"]
    self.assertAllClose({"output": [2, 0]},
                        fn(start=constant_op.constant(["gamma", "alpha"])))
Ejemplo n.º 2
0
 def test_trainable_not_set_in_proto(self):
   """If a VariableDef has no 'trainable', we fall back to collections."""
   real_tf_version = versions.__version__
   # Pretend to be exported from an older version of TensorFlow, so trainable
   # will follow collections instead of checking VariableDefs.
   versions.__version__ = "1.7.0"
   path = self._no_trainable_variable_attribute(trainable=True)
   root = load.load(path)
   self.assertTrue(root.variables[0].trainable)
   path = self._no_trainable_variable_attribute(trainable=False)
   root = load.load(path)
   self.assertFalse(root.variables[0].trainable)
   versions.__version__ = real_tf_version
Ejemplo n.º 3
0
 def test_trainable_in_proto(self):
   """If a VariableDef has a trainable property, we do not use collections."""
   path = self._export_variable(
       trainable=True,
       collections=[ops.GraphKeys.GLOBAL_VARIABLES])
   root = load.load(path)
   self.assertTrue(root.variables[0].trainable)
   path = self._export_variable(
       trainable=False,
       collections=[ops.GraphKeys.GLOBAL_VARIABLES,
                    ops.GraphKeys.TRAINABLE_VARIABLES])
   root = load.load(path)
   self.assertFalse(root.variables[0].trainable)
  def testConstSavedModel(self):
    """Test a basic model with functions to make sure functions are inlined."""
    input_data = constant_op.constant(1., shape=[1])
    root = tracking.AutoTrackable()
    root.f = def_function.function(lambda x: 2. * x)
    to_save = root.f.get_concrete_function(input_data)

    save_dir = os.path.join(self.get_temp_dir(), "saved_model")
    save(root, save_dir, to_save)
    saved_model = load(save_dir)
    input_func = saved_model.signatures["serving_default"]

    variable_graph_def = input_func.graph.as_graph_def()
    self.assertEqual(0, self._getNumVariables(variable_graph_def))
    self.assertTrue(variable_graph_def.library.function)

    output_func = convert_to_constants.convert_variables_to_constants_v2(
        input_func)
    constant_graph_def = output_func.graph.as_graph_def()
    self.assertEqual(0, self._getNumVariables(constant_graph_def))
    self.assertFalse(constant_graph_def.library.function)

    # Check value.
    expected_value = root.f(input_data)
    actual_value = self._evaluateGraphDef(constant_graph_def, input_func,
                                          [input_data.numpy()])
    self.assertEqual(expected_value.numpy(), actual_value)
  def testVariableSavedModel(self):
    """Test a basic model with Variables with saving/loading the SavedModel."""
    input_data = constant_op.constant(1., shape=[1])
    root = tracking.AutoTrackable()
    root.v1 = variables.Variable(3.)
    root.v2 = variables.Variable(2.)
    root.f = def_function.function(lambda x: root.v1 * root.v2 * x)
    to_save = root.f.get_concrete_function(input_data)

    save_dir = os.path.join(self.get_temp_dir(), "saved_model")
    save(root, save_dir, to_save)
    saved_model = load(save_dir)
    input_func = saved_model.signatures["serving_default"]

    variable_graph_def = input_func.graph.as_graph_def()
    self.assertTrue(self._hasStatefulPartitionedCallOp(variable_graph_def))

    output_func = convert_to_constants.convert_variables_to_constants_v2(
        input_func)
    constant_graph_def = output_func.graph.as_graph_def()
    self.assertEqual(0, self._getNumVariables(constant_graph_def))
    self.assertFalse(self._hasStatefulPartitionedCallOp(constant_graph_def))

    # Check value.
    expected_value = root.f(input_data)
    actual_value = self._evaluateGraphDef(constant_graph_def, input_func,
                                          [input_data.numpy()])
    self.assertEqual(expected_value.numpy(), actual_value)
Ejemplo n.º 6
0
def test_saved_model_v2(directory,
                        tag_set=None,
                        signature_key=None,
                        input_data=None,
                        **kwargs):
  """Validates the TensorFlow SavedModel converts to a TFLite model.

  Converts the TensorFlow SavedModel to TFLite and checks the accuracy of the
  model on random data.

  Args:
    directory: SavedModel directory to convert.
    tag_set: Set of tags identifying the MetaGraphDef within the SavedModel to
      analyze. All tags in the tag set must be present.
    signature_key: Key identifying SignatureDef containing inputs and outputs.
    input_data: np.ndarray to pass into models during inference. (default None)
    **kwargs: Additional arguments to be passed into the converter.
  """
  model = _load.load(directory, tags=tag_set)
  if not signature_key:
    signature_key = _signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY
  concrete_func = model.signatures[signature_key]

  converter = _lite.TFLiteConverterV2.from_concrete_functions([concrete_func])
  tflite_model = _convert(converter, version=2, **kwargs)

  compare_models_v2(tflite_model, concrete_func, input_data=input_data)
Ejemplo n.º 7
0
  def _TestTrtGraphConverter(self,
                             input_saved_model_dir=None,
                             output_saved_model_dir=None,
                             need_calibration=False,
                             is_dynamic_op=False):
    """General method to test trt_convert.TrtGraphConverter()."""
    output_graph_def = self._ConvertGraph(
        input_saved_model_dir=input_saved_model_dir,
        output_saved_model_dir=output_saved_model_dir,
        need_calibration=need_calibration,
        is_dynamic_op=is_dynamic_op,
        use_function_backup=need_calibration)
    graph_defs_to_verify = [output_graph_def]

    if output_saved_model_dir:
      if context.executing_eagerly():
        root = load.load(output_saved_model_dir)
        saved_model_graph_def = root.signatures[
            signature_constants
            .DEFAULT_SERVING_SIGNATURE_DEF_KEY].graph.as_graph_def()
      else:
        saved_model_graph_def = saved_model_utils.get_meta_graph_def(
            output_saved_model_dir, tag_constants.SERVING).graph_def
      self.assertTrue(isinstance(saved_model_graph_def, graph_pb2.GraphDef))
      graph_defs_to_verify.append(saved_model_graph_def)

    for graph_def in graph_defs_to_verify:
      node_name_to_op = {node.name: node.op for node in graph_def.node}
      if context.executing_eagerly():
        # In V2 the actual graph could be inside a function.
        for func in graph_def.library.function:
          node_name_to_op.update({node.name: node.op for node in func.node_def})
        self.assertIn("TRTEngineOp_0", node_name_to_op)
        self.assertEqual("TRTEngineOp", node_name_to_op["TRTEngineOp_0"])
      else:
        self.assertEqual({
            "input": "Placeholder",
            "TRTEngineOp_0": "TRTEngineOp",
            "output": "Identity"
        }, node_name_to_op)

      if need_calibration:
        trt_engine_nodes = [
            node for node in graph_def.node if node.op == "TRTEngineOp"
        ]
        self.assertNotEmpty(trt_engine_nodes)
        for node in trt_engine_nodes:
          self.assertTrue(len(node.attr["calibration_data"].s))
        # Run the calibrated graph.
        # TODO(laigd): consider having some input where the answer is different.
        with ops.Graph().as_default():
          importer.import_graph_def(graph_def, name="")
          with self.session(config=self._GetConfigProto()) as sess:
            for test_data in range(10):
              self.assertEqual((test_data + 1.0)**2,
                               sess.run(
                                   "output:0",
                                   feed_dict={"input:0": [[[test_data]]]}))
Ejemplo n.º 8
0
 def test_multi_meta_graph_loading(self):
   with self.assertRaisesRegexp(ValueError, "2 MetaGraphs"):
     load.load(self._v1_multi_metagraph_saved_model())
   first_imported = load.load(self._v1_multi_metagraph_saved_model(),
                              tags=["first"])
   self.assertEqual({"first_output": 42.},
                    self.evaluate(first_imported.signatures["first_key"](
                        first_start=constant_op.constant(2.))))
   second_imported = load.load(self._v1_multi_metagraph_saved_model(),
                               tags=set(["second"]))
   with self.assertRaisesRegexp(TypeError, "second_start"):
     second_imported.signatures["second_key"](x=constant_op.constant(2.))
   with self.assertRaisesRegexp(TypeError, "second_start"):
     second_imported.signatures["second_key"](
         second_start=constant_op.constant(2.),
         x=constant_op.constant(2.))
   self.assertEqual({"second_output": 21.},
                    self.evaluate(second_imported.signatures["second_key"](
                        second_start=constant_op.constant(2.))))
Ejemplo n.º 9
0
 def cycle(self, obj, cycles=1, signatures=None):
   to_save = obj
   # TODO(vbardiovsky): It would be nice if exported protos reached a fixed
   # point w.r.t. saving/restoring, ideally after 2nd saving.
   for _ in range(cycles):
     path = tempfile.mkdtemp(prefix=self.get_temp_dir())
     save.save(to_save, path, signatures)
     loaded = load.load(path)
     to_save = loaded
   return loaded
Ejemplo n.º 10
0
def cycle(obj, cycles, signatures=None, options=None):
    to_save = obj
    for _ in range(cycles):
        path = tempfile.mkdtemp(prefix=test.get_temp_dir())
        # If available, we'll run the save and restore preferring the GPU. This
        # just makes sure we aren't throwing errors and have enough
        # device("CPU") blocks to satisfy the placer.
        with test_util.use_gpu():
            save.save(to_save, path, signatures, options=options)
            loaded = load.load(path)
            signatures = loaded.signatures
        to_save = loaded
    return loaded
Ejemplo n.º 11
0
    def testSaveAndLoadModuleUnderStrategy(self):
        class Dense(module.Module):
            def __init__(self):
                self.kernel = variables_lib.Variable(random_ops.random_uniform(
                    (6, 6)),
                                                     name='kernel')
                self.bias = variables_lib.Variable(random_ops.random_uniform(
                    (6, )),
                                                   name='bias')

            @def_function.function
            def __call__(self, x):
                out = math_ops.matmul(self.kernel, x)
                out = out + self.bias
                return out

        x = constant_op.constant(math_ops.range(6, dtype=dtypes.float32),
                                 shape=[6, 1])

        strategy = self._create_strategy(2)
        with strategy.scope():
            layer = Dense()
            expect = layer(x)

        model_dir = self.get_temp_dir()
        save.save(layer, model_dir)

        strategy2 = self._create_strategy(3)
        with strategy2.scope():
            loaded_layer = load.load(model_dir)
            # Should fail with informative error
            with self.assertRaisesRegex(ValueError, 'run a loaded non-Keras'):
                got = loaded_layer(x)

        # Loading without a strategy should work, because the tf.function is traced
        # with a single variable as input
        loaded_layer = load.load(model_dir)
        got = loaded_layer(x)
        self.assertAllClose(got, expect)
Ejemplo n.º 12
0
 def test_load_with_tags(self):
     root = tracking.AutoTrackable()
     path = tempfile.mkdtemp(prefix=self.get_temp_dir())
     save.save(root, path)
     with self.assertRaises(ValueError):
         load.load(path, tags=[tag_constants.EVAL])
     load.load(path, tags=[tag_constants.SERVING])
     load.load(path, tags=tag_constants.SERVING)
    def testLoadSavedModelWithUnregisteredExtensionType(self):
        def f(x, y):
            x_values = x.values if isinstance(x, MaskedTensorV1) else x
            y_values = y.values if isinstance(y, MaskedTensorV1) else y
            x_mask = x.mask if isinstance(x, MaskedTensorV1) else True
            y_mask = y.mask if isinstance(y, MaskedTensorV1) else True
            return MaskedTensorV1(x_values + y_values, x_mask & y_mask)

        t_spec = tensor_spec.TensorSpec(None, dtypes.int32)
        b_spec = tensor_spec.TensorSpec(None, dtypes.bool)
        mt_spec = MaskedTensorV1.Spec(values=t_spec, mask=b_spec)
        model = module.Module()
        model.f = def_function.function(f)
        model.f.get_concrete_function(t_spec, t_spec)
        model.f.get_concrete_function(t_spec, mt_spec)
        model.f.get_concrete_function(mt_spec, t_spec)
        model.f.get_concrete_function(mt_spec, mt_spec)

        path = tempfile.mkdtemp(prefix=test.get_temp_dir())
        with temporarily_register_type_spec('tf.test.MaskedTensorV1.Spec',
                                            MaskedTensorV1.Spec):
            save.save(model, path)
        loaded_model = load.load(path)

        with self.assertRaises(ValueError):
            type_spec.lookup('tf.test.MaskedTensorV1')

        t = constant_op.constant([10, 20, 30])
        v1 = loaded_model.f(t, t)
        self.assertIsInstance(v1, extension_type.AnonymousExtensionType)
        self.assertAllEqual(v1.values, [20, 40, 60])
        self.assertAllEqual(v1.mask, True)

        v2 = loaded_model.f(v1, v1)
        self.assertIsInstance(v2, extension_type.AnonymousExtensionType)
        self.assertAllEqual(v2.values, [40, 80, 120])
        self.assertAllEqual(v2.mask, True)

        mt = MaskedTensorV1([1, 2, 3], [True, True, False])
        v3 = loaded_model.f(
            t,
            extension_type.reinterpret(mt,
                                       extension_type.AnonymousExtensionType))
        self.assertIsInstance(v3, extension_type.AnonymousExtensionType)
        self.assertAllEqual(v3.values, [11, 22, 33])
        self.assertAllEqual(v3.mask, [True, True, False])

        v4 = extension_type.reinterpret(v3, MaskedTensorV1)
        self.assertIsInstance(v4, MaskedTensorV1)
        self.assertAllEqual(v4.values, [11, 22, 33])
        self.assertAllEqual(v4.mask, [True, True, False])
Ejemplo n.º 14
0
  def test_saved_model(self):
    with self.device:
      different_values = self.device.pack(
          [constant_op.constant(-1.),
           constant_op.constant(3.)])
      m = module.Module()
      m.v = variables.Variable(different_values)
      m.f = def_function.function(lambda: m.v * 2.)
      self.assertAllClose([-2., 6.], self.device.unpack(m.f()))
    saved_model_path = os.path.join(self.get_temp_dir(), "saved_model")
    save.save(m, saved_model_path)

    context._reset_context()
    self.setUp()

    single_device_loaded = load.load(saved_model_path)
    self.assertAllClose(-2., single_device_loaded.f())
    with self.device:
      parallel_loaded = load.load(saved_model_path)
      self.assertAllClose([-2., 6.], self.device.unpack(parallel_loaded.f()))
      self.assertAllClose([-1., 3.], self.device.unpack(parallel_loaded.v))
      parallel_loaded.v.assign(self.device.pack([.1, .2]))
      self.assertAllClose([.2, .4], self.device.unpack(parallel_loaded.f()))
Ejemplo n.º 15
0
    def testRefVariableImport(self):
        saved = self._singleMetaGraphSavedModel()
        imported = load(saved)
        fn = imported.signatures["serving_default"]
        output_func = convert_to_constants.convert_variables_to_constants_v2(
            fn)
        constant_graph_def = output_func.graph.as_graph_def()
        self.assertEqual(0, self._getNumVariables(constant_graph_def))
        self.assertFalse(
            self._hasStatefulPartitionedCallOp(constant_graph_def))

        input_data = {"start": constant_op.constant(1., shape=[1, 1])}
        root = tracking.AutoTrackable()
        self._testConvertedFunction(root, fn, output_func, input_data)
def convert_tf_saved_model(saved_model_dir,
                           output_dir,
                           signature_def='serving_default',
                           saved_model_tags='serve',
                           quantization_dtype=None,
                           skip_op_check=False,
                           strip_debug_ops=False):
    """Freeze the SavedModel and check the model compatibility with Tensorflow.js.

  Optimize and convert the model to Tensorflow.js format, when the model passes
  the compatiblity check.

  Args:
    saved_model_dir: string The saved model directory.
    : string The names of the output nodes, comma separated.
    output_dir: string The name of the output directory. The directory
      will consist of
      - a file named 'model.json'
      - possibly sharded binary weight files.
    signature_def: string Tagset of the SignatureDef to load. Defaults to
      'serving_default'.
    saved_model_tags: tags of the GraphDef to load. Defaults to 'serve'.
    quantization_dtype: An optional numpy dtype to quantize weights to for
      compression. Only np.uint8 and np.uint16 are supported.
    skip_op_check: Bool whether to skip the op check.
    strip_debug_ops: Bool whether to strip debug ops.
  """
    if signature_def is None:
        signature_def = 'serving_default'

    if not os.path.exists(output_dir):
        os.makedirs(output_dir)
    output_graph = os.path.join(output_dir,
                                common.ARTIFACT_MODEL_JSON_FILE_NAME)

    saved_model_tags = saved_model_tags.split(', ')
    model = load(saved_model_dir, saved_model_tags)

    _check_signature_in_model(model, signature_def)

    concrete_func = model.signatures[signature_def]
    frozen_func = convert_to_constants.convert_variables_to_constants_v2(
        concrete_func)

    optimize_graph(frozen_func,
                   output_graph,
                   model.tensorflow_version,
                   quantization_dtype=quantization_dtype,
                   skip_op_check=skip_op_check,
                   strip_debug_ops=strip_debug_ops)
Ejemplo n.º 17
0
 def test_structure_import(self):
   root = tracking.Checkpointable()
   root.f = def_function.function(
       lambda x: 2. * x,
       input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])
   root.dep_one = tracking.Checkpointable()
   root.dep_two = tracking.Checkpointable()
   root.dep_two.dep = tracking.Checkpointable()
   root.dep_three = root.dep_two.dep
   save_dir = os.path.join(self.get_temp_dir(), "saved_model")
   save.save(root, save_dir)
   imported = load.load(save_dir)
   self.assertIs(imported.dep_three, imported.dep_two.dep)
   self.assertIsNot(imported.dep_one, imported.dep_two)
Ejemplo n.º 18
0
  def test_asset_loading(self):
    first_path = self._v1_asset_saved_model()
    imported = load.load(first_path)
    fn = imported.signatures["serving_default"]
    self.assertAllClose({"output": [2, 0]},
                        fn(start=constant_op.constant(["gamma", "alpha"])))
    second_path = os.path.join(self.get_temp_dir(), "saved_model",
                               str(ops.uid()))
    save.save(imported, second_path, signatures=imported.signatures)
    shutil.rmtree(first_path)
    second_import = load.load(second_path)
    fn = second_import.signatures["serving_default"]
    self.assertAllClose({"output": [2, 0]},
                        fn(start=constant_op.constant(["gamma", "alpha"])))

    third_path = os.path.join(self.get_temp_dir(), "saved_model",
                              str(ops.uid()))
    save.save(second_import, third_path, signatures=second_import.signatures)
    shutil.rmtree(second_path)
    third_import = load.load(third_path)
    fn = third_import.signatures["serving_default"]
    self.assertAllClose({"output": [2, 0]},
                        fn(start=constant_op.constant(["gamma", "alpha"])))
Ejemplo n.º 19
0
 def test_capture_assets(self):
   root = tracking.Checkpointable()
   root.vocab = tracking.TrackableAsset(self._make_asset("contents"))
   root.f = def_function.function(
       lambda: root.vocab.asset_path,
       input_signature=[])
   save_dir = os.path.join(self.get_temp_dir(), "save_dir")
   save.save(root, save_dir)
   imported = load.load(save_dir)
   origin_output = root.f().numpy()
   imported_output = imported.f().numpy()
   self.assertNotEqual(origin_output, imported_output)
   with open(imported_output, "r") as f:
     self.assertEquals("contents", f.read())
Ejemplo n.º 20
0
  def test_asset_loading(self):
    first_path = self._v1_asset_saved_model()
    imported = load.load(first_path)
    fn = imported.signatures["serving_default"]
    self.assertAllClose({"output": [2, 0]},
                        fn(start=constant_op.constant(["gamma", "alpha"])))
    second_path = os.path.join(self.get_temp_dir(), "saved_model",
                               str(ops.uid()))
    save.save(imported, second_path, signatures=imported.signatures)
    shutil.rmtree(first_path)
    second_import = load.load(second_path)
    fn = second_import.signatures["serving_default"]
    self.assertAllClose({"output": [2, 0]},
                        fn(start=constant_op.constant(["gamma", "alpha"])))

    third_path = os.path.join(self.get_temp_dir(), "saved_model",
                              str(ops.uid()))
    save.save(second_import, third_path, signatures=second_import.signatures)
    shutil.rmtree(second_path)
    third_import = load.load(third_path)
    fn = third_import.signatures["serving_default"]
    self.assertAllClose({"output": [2, 0]},
                        fn(start=constant_op.constant(["gamma", "alpha"])))
Ejemplo n.º 21
0
    def testTrtGraphConverter_Int8Conversion_v2(self):
        if not is_tensorrt_enabled():
            return

        np_input = np.random.random_sample([4, 1, 1]).astype(np.float32)

        # Create a model and save it.
        input_saved_model_dir = tempfile.mkdtemp(dir=self.get_temp_dir())
        root = self._GetModelForV2()
        expected_output = root.run(np_input)
        save.save(root, input_saved_model_dir,
                  {_SAVED_MODEL_SIGNATURE_KEY: root.run})

        # Run TRT conversion.
        converter = self._CreateConverterV2(
            input_saved_model_dir,
            precision_mode=trt_convert.TrtPrecisionMode.INT8)
        converted_func = converter.convert()

        # Run the converted function for INT8 calibration.
        calibration_output = converted_func(np_input)
        self.assertEqual(1, len(calibration_output))
        self.assertAllClose(expected_output,
                            calibration_output.values()[0],
                            atol=1e-6,
                            rtol=1e-6)

        # Save the converted model again with serialized engine cache.
        output_saved_model_dir = self.mkdtemp()
        converter.save(output_saved_model_dir)
        expected_asset_file = os.path.join(
            output_saved_model_dir,
            "assets/trt-serialized-engine.TRTEngineOp_0")
        self.assertTrue(os.path.exists(expected_asset_file))
        self.assertTrue(os.path.getsize(expected_asset_file))

        # Load and verify the converted model.
        root_with_trt = load.load(output_saved_model_dir)
        converted_signature = root_with_trt.signatures[
            _SAVED_MODEL_SIGNATURE_KEY]
        output_with_trt = converted_signature(ops.convert_to_tensor(np_input))
        self.assertEqual(1, len(output_with_trt))

        # The output of running the converted signature is a dict due to
        # compatibility reasons with V1 SavedModel signature mechanism.
        self.assertAllClose(expected_output,
                            output_with_trt.values()[0],
                            atol=1e-6,
                            rtol=1e-6)
Ejemplo n.º 22
0
  def testTrtGraphConverter_ShapeOp_v2(self):
    """Test case for TrtGraphConverterV2 with ShapeOp."""

    class ShapeOpModel(tracking.AutoTrackable):

      def __init__(self):
        self.v = None

      @def_function.function(input_signature=[
          tensor_spec.TensorSpec(shape=[None, None], dtype=dtypes.float32)
      ])
      def run(self, x):
        q = x + 1
        q_shape = array_ops.shape(q)
        return array_ops.identity(q_shape, name="output")

    np_input = np.random.random_sample([5, 3]).astype(np.float32)

    def _InputFunc():
      yield (np_input,)

    # Create the SavedModel.
    root = ShapeOpModel()
    expected_output = root.run(np_input)
    input_saved_model_dir = self.mkdtemp()
    save.save(root, input_saved_model_dir, signatures=root.run)

    # Convert the graph to TF-TRT.
    conv_params = trt_convert.TrtConversionParams(minimum_segment_size=2)
    converter = trt_convert.TrtGraphConverterV2(
        input_saved_model_dir=input_saved_model_dir,
        conversion_params=conv_params,
        use_dynamic_shape=True)
    converter.convert()

    # Build the graph with the input generator. This runs the TRTEngineOp native
    # segment.
    converter.build(_InputFunc)
    output_saved_model_dir = self.mkdtemp()
    converter.save(output_saved_model_dir)

    root_with_trt = load.load(output_saved_model_dir)
    converted_signature = root_with_trt.signatures["serving_default"]
    # Check that the graph is converted to one TRTEngineOp.
    self._CheckTrtOps(converted_signature)
    # Run the graph.
    output_with_trt = converted_signature(x=ops.convert_to_tensor(np_input))
    # Check the result of the run.
    self.assertAllClose(expected_output, list(output_with_trt.values())[0])
Ejemplo n.º 23
0
    def testTrainingDefaults(self):
        def assert_training_default(fn, default_value):
            arg_spec = tf_inspect.getfullargspec(fn)
            index = len(arg_spec.args) - arg_spec.args.index('training')
            self.assertEqual(arg_spec.defaults[-index], default_value)

        class LayerWithTrainingRequiredArg(keras.engine.base_layer.Layer):
            def call(self, inputs, training):
                return tf_utils.smart_cond(training, lambda: inputs * 0,
                                           lambda: array_ops.identity(inputs))

        class LayerWithTrainingDefaultTrue(keras.engine.base_layer.Layer):
            def call(self, inputs, training=True):
                return tf_utils.smart_cond(training, lambda: inputs * 0,
                                           lambda: array_ops.identity(inputs))

        class Model(keras.models.Model):
            def __init__(self):
                super(Model, self).__init__()
                self.layer_with_training_default_none = LayerWithLearningPhase(
                )
                self.layer_with_training_default_true = LayerWithTrainingDefaultTrue(
                )
                self.layer_with_required_training_arg = LayerWithTrainingRequiredArg(
                )

            def call(self, inputs):
                x = self.layer_with_training_default_none(inputs)
                x += self.layer_with_training_default_true(inputs)
                x += self.layer_with_required_training_arg(inputs, False)
                return x

        model = Model()
        # Build and set model inputs
        model.predict(np.ones([1, 3]).astype('float32'))
        saved_model_dir = self._save_model_dir()
        model.save(saved_model_dir, save_format='tf')
        load = tf_load.load(saved_model_dir)

        assert_training_default(load.__call__, False)
        assert_training_default(load.layer_with_training_default_none.__call__,
                                False)
        assert_training_default(load.layer_with_training_default_true.__call__,
                                True)

        # Assert that there are no defaults for layer with required training arg
        arg_spec = tf_inspect.getfullargspec(
            load.layer_with_required_training_arg.__call__)
        self.assertFalse(arg_spec.defaults)  # defaults is None or empty
Ejemplo n.º 24
0
  def test_save_uninitialized_variable(self):
    root = tracking.AutoTrackable()
    root.uninitialized_variable = resource_variable_ops.UninitializedVariable(
        name="uninitialized_variable", dtype=dtypes.float32)
    root.initialized_variable = variables.Variable(
        1.0, name="initialized_variable")

    # TODO(b/149594077): Python loading does not work now partly because it
    # shouldn't, as the public API and semantics of uninitialized variables
    # are not properly defined, and officially supporting loading would end up
    # defining semantics "by usage." We should only allow loading once the API
    # is made official.
    export_dir = os.path.join(self.get_temp_dir(), "saved_model")
    save.save(root, export_dir)
    with self.assertRaisesRegex(FileNotFoundError,
                                "Key uninitialized_variable"):
      load.load(export_dir)
    with ops.Graph().as_default(), session_lib.Session() as session:
      # The final ValueError here (with "no variables to save") is confusing,
      # but errors upstream give the user the correct information (a
      # NotFoundError stating that the uninitalized_variable was not found in
      # the checkpoint).
      with self.assertRaises(ValueError):
        loader.load(session, [tag_constants.SERVING], export_dir)
Ejemplo n.º 25
0
  def test_concrete_function_with_set_shape(self,):
    # Serialized concrete function should retain the shape from the TensorSpec,
    # instead of using the shape of the inputs (which are changed by set_shape).
    @def_function.function
    def f(x):
      x.set_shape((5, 1))
      return x

    root = tracking.AutoTrackable()
    path = os.path.join(self.get_temp_dir(), "saved_model")
    concrete = f.get_concrete_function(
        tensor_spec.TensorSpec((None, 1), name="name"))
    save.save(root, path, signatures={"key": concrete})
    imported = load.load(path)
    self.assertEqual(imported.signatures["key"].structured_input_signature[1],
                     {"name": tensor_spec.TensorSpec((None, 1), name="name")})
    def _testConvertedFunction(self, obj, func, converted_concrete_func,
                               input_data):
        # Check that the converted ConcreteFunction produces the same result as the
        # original Function.
        expected_value = func(input_data)
        actual_value = nest.flatten(converted_concrete_func(input_data))
        self.assertEqual(expected_value.numpy(), actual_value)

        # Save the converted ConcreteFunction as a signature.
        save_dir = os.path.join(self.get_temp_dir(), "frozen_saved_model")
        save(obj, save_dir, {"mykey": converted_concrete_func})

        # Load it back and make sure it works.
        loaded_obj = load(save_dir)
        actual_value = nest.flatten(loaded_obj.signatures["mykey"](input_data))
        self.assertEqual(expected_value.numpy(), actual_value)
Ejemplo n.º 27
0
    def test_assets_dedup(self):
        vocab = self._make_asset("contents")
        root = tracking.Checkpointable()
        root.f = def_function.function(
            lambda x: 2. * x,
            input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])

        root.asset1 = tracking.TrackableAsset(vocab)
        root.asset2 = tracking.TrackableAsset(vocab)

        export_dir = os.path.join(self.get_temp_dir(), "save_dir")
        save.save(root, export_dir)
        imported = load.load(export_dir)

        self.assertEqual(imported.asset1.asset_path.numpy(),
                         imported.asset2.asset_path.numpy())
Ejemplo n.º 28
0
  def testBackwardCompatibility(self):
    """Load and execute a model that was saved in TF2.0."""

    model_dir = test.test_src_dir_path(
        "python/compiler/tensorrt/test/testdata/tftrt_2.0_saved_model")
    saved_model_loaded = load.load(model_dir, tags=[tag_constants.SERVING])
    graph_func = saved_model_loaded.signatures[
        signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY]

    np_input1 = ops.convert_to_tensor(np.ones([4, 1, 1]).astype(np.float32))
    np_input2 = ops.convert_to_tensor(np.ones([4, 1, 1]).astype(np.float32))
    output = graph_func(input1=np_input1, input2=np_input2)["output_0"]

    self.assertEqual(output.shape, (4, 1, 1))
    self.assertAllClose(
        np.asarray([5.0, 5.0, 5.0, 5.0]).reshape([4, 1, 1]), output)
Ejemplo n.º 29
0
def _show_defined_functions(saved_model_dir):
    """Prints the callable concrete and polymorphic functions of the Saved Model.

  Args:
    saved_model_dir: Directory containing the SavedModel to inspect.
  """
    meta_graphs = saved_model_utils.read_saved_model(
        saved_model_dir).meta_graphs
    has_object_graph_def = False

    for meta_graph_def in meta_graphs:
        has_object_graph_def |= meta_graph_def.HasField('object_graph_def')
    if not has_object_graph_def:
        return
    with ops_lib.Graph().as_default():
        trackable_object = load.load(saved_model_dir)

    print('\nDefined Functions:', end='')
    children = list(
        save._AugmentedGraphView(trackable_object)  # pylint: disable=protected-access
        .list_children(trackable_object))
    children = sorted(children, key=lambda x: x.name)
    for name, child in children:
        concrete_functions = []
        if isinstance(child, defun.ConcreteFunction):
            concrete_functions.append(child)
        elif isinstance(child, def_function.Function):
            concrete_functions.extend(
                child._list_all_concrete_functions_for_serialization())  # pylint: disable=protected-access
        else:
            continue
        print('\n  Function Name: \'%s\'' % name)
        concrete_functions = sorted(concrete_functions, key=lambda x: x.name)
        for index, concrete_function in enumerate(concrete_functions, 1):
            args, kwargs = None, None
            if concrete_function.structured_input_signature:
                args, kwargs = concrete_function.structured_input_signature
            elif concrete_function._arg_keywords:  # pylint: disable=protected-access
                # For pure ConcreteFunctions we might have nothing better than
                # _arg_keywords.
                args = concrete_function._arg_keywords  # pylint: disable=protected-access
            if args:
                print('    Option #%d' % index)
                print('      Callable with:')
                _print_args(args, indent=4)
            if kwargs:
                _print_args(kwargs, 'Named Argument', indent=4)
Ejemplo n.º 30
0
    def test_save_composite_tensor_signature(self):
        @def_function.function(
            input_signature=[ragged_tensor.RaggedTensorSpec(ragged_rank=2)])
        def f(x):
            return {"output_key": x}

        root = tracking.AutoTrackable()
        path = os.path.join(self.get_temp_dir(), "saved_model")
        inp = ragged_factory_ops.constant([[[1.0, 2.0], [3.0]], [[5.]]])
        flat_inp = {
            "x": constant_op.constant([1., 2., 3., 5]),
            "x_1": constant_op.constant([0, 2, 3], dtype=dtypes.int64),
            "x_2": constant_op.constant([0, 2, 3, 4], dtype=dtypes.int64)
        }
        save.save(root, path, signatures={"key": f.get_concrete_function()})

        # Test that the ragged signature can be loaded back into Python with V2 APIs
        imported = load.load(path)
        self.assertAllEqual(
            inp, imported.signatures["key"](**flat_inp)["output_key"])
        graph = ops.Graph()

        # Try running the signature with V1 APIs.
        with graph.as_default(), session_lib.Session() as session:
            meta_graph_def = loader.load(session, [tag_constants.SERVING],
                                         path)
            signature = meta_graph_def.signature_def["key"]

            feed_dict = {}
            for arg_name in flat_inp:
                input_tensor = session.graph.get_tensor_by_name(
                    signature.inputs[arg_name].name)
                feed_dict[input_tensor] = flat_inp[arg_name].numpy()

            # Get composite tensor components
            output_components = (
                signature.outputs["output_key"].composite_tensor.components)
            fetches = {}
            components_keys = ["x", "x_1", "x_2"]
            for k, output_tensor_info in zip(components_keys,
                                             output_components):
                fetches[k] = session.graph.get_tensor_by_name(
                    output_tensor_info.name)

            outputs = session.run(fetches, feed_dict)

        self.assertAllClose(flat_inp, outputs)
Ejemplo n.º 31
0
  def _convert_saved_model_v2(self):
    """Convert the input SavedModel in 2.0 format."""
    self._saved_model = load.load(self._input_saved_model_dir,
                                  self._input_saved_model_tags)
    func = self._saved_model.signatures[self._input_saved_model_signature_key]
    frozen_func = convert_to_constants.convert_variables_to_constants_v2(func)
    self._grappler_meta_graph_def = saver.export_meta_graph(
        graph_def=frozen_func.graph.as_graph_def(), graph=frozen_func.graph)

    # Add a collection 'train_op' so that Grappler knows the outputs.
    fetch_collection = meta_graph_pb2.CollectionDef()
    for array in func.inputs + func.outputs:
      fetch_collection.node_list.value.append(array.name)
    self._grappler_meta_graph_def.collection_def["train_op"].CopyFrom(
        fetch_collection)

    # Run TRT optimizer in Grappler to convert the graph.
    self._run_conversion()

    def _get_tensor(graph, tensors):
      new_tensors = []
      for tensor in tensors:
        new_tensor = graph.get_tensor_by_name(tensor.name)
        new_tensor.set_shape(tensor.shape)
        new_tensors.append(new_tensor)
      return new_tensors

    # TODO(laigd): do we need to use different name e.g. "trt_func_graph"?
    converted_graph = func_graph.FuncGraph(func.graph.name)
    with converted_graph.as_default():
      importer.import_graph_def(self._converted_graph_def, name="")

    converted_graph.inputs = _get_tensor(converted_graph, func.graph.inputs)
    converted_graph.outputs = _get_tensor(converted_graph, func.graph.outputs)
    converted_graph.structured_outputs = func.graph.structured_outputs
    converted_graph.structured_input_signature = (
        func.graph.structured_input_signature)

    # pylint: disable=protected-access
    # TODO(laigd): should we set up the signature as well?
    self._converted_func = function.ConcreteFunction(
        converted_graph, attrs=None, signature=None)
    self._converted_func.add_to_graph()
    self._converted_func._arg_keywords = func._arg_keywords
    self._converted_func._num_positional_args = func._num_positional_args
    self._converted_func._captured_inputs = func._captured_inputs
    self._converted_func.graph.variables = func.graph.variables
Ejemplo n.º 32
0
  def _convert_saved_model_v2(self):
    """Convert the input SavedModel in 2.0 format."""
    self._saved_model = load.load(self._input_saved_model_dir,
                                  self._input_saved_model_tags)
    func = self._saved_model.signatures[self._input_saved_model_signature_key]
    frozen_func = convert_to_constants.convert_variables_to_constants_v2(func)
    self._grappler_meta_graph_def = saver.export_meta_graph(
        graph_def=frozen_func.graph.as_graph_def(), graph=frozen_func.graph)

    # Add a collection 'train_op' so that Grappler knows the outputs.
    fetch_collection = meta_graph_pb2.CollectionDef()
    for array in func.inputs + func.outputs:
      fetch_collection.node_list.value.append(array.name)
    self._grappler_meta_graph_def.collection_def["train_op"].CopyFrom(
        fetch_collection)

    # Run TRT optimizer in Grappler to convert the graph.
    self._run_conversion()

    def _get_tensor(graph, tensors):
      new_tensors = []
      for tensor in tensors:
        new_tensor = graph.get_tensor_by_name(tensor.name)
        new_tensor.set_shape(tensor.shape)
        new_tensors.append(new_tensor)
      return new_tensors

    # TODO(laigd): do we need to use different name e.g. "trt_func_graph"?
    converted_graph = func_graph.FuncGraph(func.graph.name)
    with converted_graph.as_default():
      importer.import_graph_def(self._converted_graph_def, name="")

    converted_graph.inputs = _get_tensor(converted_graph, func.graph.inputs)
    converted_graph.outputs = _get_tensor(converted_graph, func.graph.outputs)
    converted_graph.structured_outputs = func.graph.structured_outputs
    converted_graph.structured_input_signature = (
        func.graph.structured_input_signature)

    # pylint: disable=protected-access
    # TODO(laigd): should we set up the signature as well?
    self._converted_func = function.ConcreteFunction(
        converted_graph, attrs=None, signature=None)
    self._converted_func.add_to_graph()
    self._converted_func._arg_keywords = func._arg_keywords
    self._converted_func._num_positional_args = func._num_positional_args
    self._converted_func._captured_inputs = func._captured_inputs
    self._converted_func.graph.variables = func.graph.variables
Ejemplo n.º 33
0
    def test_only_implicit_signatures(self):
        def func(x):
            return 2 * x

        root = tracking.Checkpointable()
        root.f = def_function.function(func)

        # Add two traces.
        root.f(constant_op.constant(1.))
        root.f(constant_op.constant(1))

        save_dir = os.path.join(self.get_temp_dir(), "saved_model")
        save.save(root, save_dir, signatures=dict())
        imported = load.load(save_dir)

        self.assertEqual(4., imported.f(constant_op.constant(2.)).numpy())
        self.assertEqual(14, imported.f(constant_op.constant(7)).numpy())
Ejemplo n.º 34
0
  def test_table_in_graph(self, cycles):
    root = self._make_model_with_tables()

    if cycles > 1:
      root = self.cycle(root, cycles - 1)
    path = tempfile.mkdtemp(prefix=self.get_temp_dir())
    save.save(root, path)
    imported = self.cycle(root, 1)

    with ops.Graph().as_default():
      imported = load.load(path)
      keys = constant_op.constant(["brain", "test", "foo", "surgery"])
      output1 = imported.lookup1(keys)
      output2 = imported.lookup2(keys)
      with monitored_session.MonitoredSession() as sess:
        self.assertAllEqual([0, -1, -1, 2], sess.run(output1))
        self.assertAllEqual([2, 0, 1, -1], sess.run(output2))
Ejemplo n.º 35
0
  def test_save_without_tracing(self):

    class DoNotTrace(keras.layers.Layer):

      def __init__(self):
        super(DoNotTrace, self).__init__()
        self.input_spec = keras.layers.InputSpec(shape=[None])
        self.built = True

      def call(self, inputs):
        raise ValueError('I said do not trace')

      def get_config(self):
        return {}

      @property
      def _use_input_spec_as_call_signature(self):
        return True

    root = keras.models.Sequential()
    root.add(keras.layers.Input(shape=(3,)))
    root.attached_layer = DoNotTrace()

    saved_model_dir = self._save_model_dir()

    # With the default settings, the call function is traced.
    with self.assertRaisesRegex(ValueError, 'do not trace'):
      root.save(saved_model_dir, save_format='tf')

    # When saving the config only, the layer call function should not be not
    # traced.
    root.save(saved_model_dir, save_format='tf', save_traces=False)
    loaded = tf_load.load(saved_model_dir)
    self.assertTrue(hasattr(loaded, 'attached_layer'))

    # This should raise an error when loaded without the custom object
    loaded = keras_load.load(saved_model_dir)
    with self.assertRaisesRegex(ValueError, 'Cannot call custom layer'):
      loaded.attached_layer(constant_op.constant([1.]))

    # Try loading with the custom objects
    with generic_utils.CustomObjectScope({'DoNotTrace': DoNotTrace}):
      loaded = keras_load.load(saved_model_dir)
    with self.assertRaisesRegex(ValueError, 'I said do not trace'):
      loaded.attached_layer(constant_op.constant([1.]))
Ejemplo n.º 36
0
    def convert(self):
        """Convert the input SavedModel in 2.0 format.

    Returns:
      The TF-TRT converted Function.
    """
        assert not self._converted
        self._saved_model = load.load(self._input_saved_model_dir,
                                      self._input_saved_model_tags)
        func = self._saved_model.signatures[
            self._input_saved_model_signature_key]
        frozen_func = convert_to_constants.convert_variables_to_constants_v2(
            func)
        grappler_meta_graph_def = saver.export_meta_graph(
            graph_def=frozen_func.graph.as_graph_def(),
            graph=frozen_func.graph)

        # Add a collection 'train_op' so that Grappler knows the outputs.
        fetch_collection = meta_graph_pb2.CollectionDef()
        for array in frozen_func.inputs + frozen_func.outputs:
            fetch_collection.node_list.value.append(array.name)
        grappler_meta_graph_def.collection_def["train_op"].CopyFrom(
            fetch_collection)

        # Run TRT optimizer in Grappler to convert the graph.
        self._converted_graph_def = self._run_conversion(
            grappler_meta_graph_def)
        self._converted_func = wrap_function.function_from_graph_def(
            self._converted_graph_def,
            [tensor.name for tensor in frozen_func.inputs],
            [tensor.name for tensor in frozen_func.outputs])
        # Reconstruct the output signatures using the ones from original model.
        self._converted_func.graph.structured_outputs = nest.pack_sequence_as(
            func.graph.structured_outputs,
            self._converted_func.graph.structured_outputs)

        self._converted = True

        # Wrap the converted ConcreteFunction in a Function so it can accept numpy
        # arrays as input.
        @def_function.function
        def wrapper_func(*args, **kwargs):
            return self._converted_func(*args, **kwargs)

        return wrapper_func
    def testConstSavedModel(self):
        """Test a basic model with functions to make sure functions are inlined."""
        input_data = {"x": constant_op.constant(1., shape=[1])}
        root = tracking.AutoTrackable()
        root.f = def_function.function(lambda x: 2. * x)
        to_save = root.f.get_concrete_function(input_data["x"])

        save_dir = os.path.join(self.get_temp_dir(), "saved_model")
        save(root, save_dir, to_save)
        saved_model = load(save_dir)
        input_func = saved_model.signatures["serving_default"]

        variable_graph_def = input_func.graph.as_graph_def()
        self.assertEqual(0, self._getNumVariables(variable_graph_def))
        self.assertTrue(variable_graph_def.library.function)

        output_func = convert_to_constants.convert_variables_to_constants_v2(
            input_func)
        self._testConvertedFunction(root, root.f, output_func, input_data)
Ejemplo n.º 38
0
 def test_resource_variable_import(self):
   imported = load.load(self._v1_single_metagraph_saved_model(
       use_resource=True))
   fn = imported.signatures["serving_default"]
   self.assertEqual({"output": 6.},
                    self.evaluate(fn(constant_op.constant(2.))))
   self.assertAllEqual([3., 1.], self.evaluate(imported.variables))
   imported.variables[0].assign(4.)
   self.assertEqual({"output": 8.},
                    self.evaluate(fn(start=constant_op.constant(2.))))
   imported.variables[1].assign(2.)
   self.assertEqual({"output": 24.},
                    self.evaluate(fn(start=constant_op.constant(3.))))
   self.assertTrue(imported.variables[0].trainable)
   self.assertFalse(imported.variables[1].trainable)
   with backprop.GradientTape() as tape:
     output = fn(start=constant_op.constant(4.))
   self.assertEqual(imported.variables[:1], list(tape.watched_variables()))
   self.assertEqual(8., tape.gradient(output, imported.variables[0]).numpy())
Ejemplo n.º 39
0
    def testSavedModelSupport(self):
        class TestModule(module.Module):
            @def_function.function
            def f(self, s):
                return s.x[0] + s.x[1] + s.y

        s1 = self.StructWithName((1, 2), 3)
        s2 = self.StructWithName((1.0, 2), [3.0, 4.0])

        m = TestModule()
        m.f.get_concrete_function(s1)
        m.f.get_concrete_function(s2)

        path = tempfile.mkdtemp(prefix=test.get_temp_dir())
        save.save(m, path)
        loaded = load.load(path)

        self.assertAllEqual(loaded.f(s1), 6)
        self.assertAllEqual(loaded.f(s2), [6.0, 7.0])
 def _GetGraphDef(self, run_params, gdef_or_saved_model_dir):
     if isinstance(gdef_or_saved_model_dir, str):
         if run_params.is_v2:
             root = load.load(gdef_or_saved_model_dir)
             func = root.signatures[
                 signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY]
             gdef = func.graph.as_graph_def()
             # Manually unref the loaded saved model and force GC to destroy the TRT
             # engine cache after load(). There is currently a reference cycle in 2.0
             # which prevents auto deletion of the resource.
             # TODO(laigd): fix this.
             del func
             del root
             gc.collect()
             return gdef
         return saved_model_utils.get_meta_graph_def(
             gdef_or_saved_model_dir, tag_constants.SERVING).graph_def
     assert isinstance(gdef_or_saved_model_dir, graph_pb2.GraphDef)
     return gdef_or_saved_model_dir
Ejemplo n.º 41
0
  def test_load_in_graph_mode(self, cycles):
    root = tracking.AutoCheckpointable()
    root.v1 = variables.Variable(1.)
    root.v2 = variables.Variable(2.)
    root.f = def_function.function(
        lambda x: root.v2 * x,
        input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])

    if cycles > 1:
      root = self.cycle(root, cycles - 1)
    path = tempfile.mkdtemp(prefix=self.get_temp_dir())
    save.save(root, path)

    with ops.Graph().as_default():
      imported = load.load(path)
      var_v1 = imported.v1
      output = imported.f(constant_op.constant(2.))
      with monitored_session.MonitoredSession() as sess:
        self.assertEqual(1.0, sess.run(var_v1))
        self.assertEqual(4.0, sess.run(output))
    def testVariableSavedModel(self):
        """Test a basic model with Variables with saving/loading the SavedModel."""
        input_data = {"x": constant_op.constant(1., shape=[1])}
        root = tracking.AutoTrackable()
        root.v1 = variables.Variable(3.)
        root.v2 = variables.Variable(2.)
        root.f = def_function.function(lambda x: root.v1 * root.v2 * x)
        to_save = root.f.get_concrete_function(input_data["x"])

        save_dir = os.path.join(self.get_temp_dir(), "saved_model")
        save(root, save_dir, to_save)
        saved_model = load(save_dir)
        input_func = saved_model.signatures["serving_default"]

        variable_graph_def = input_func.graph.as_graph_def()
        self.assertTrue(self._hasStatefulPartitionedCallOp(variable_graph_def))

        output_func = convert_to_constants.convert_variables_to_constants_v2(
            input_func)
        self._testConvertedFunction(root, root.f, output_func, input_data)
Ejemplo n.º 43
0
  def test_load_in_graph_mode(self, cycles):
    root = tracking.AutoTrackable()
    root.v1 = variables.Variable(1.)
    root.v2 = variables.Variable(2.)
    root.f = def_function.function(
        lambda x: root.v2 * x,
        input_signature=[tensor_spec.TensorSpec(None, dtypes.float32)])

    if cycles > 1:
      root = self.cycle(root, cycles - 1)
    path = tempfile.mkdtemp(prefix=self.get_temp_dir())
    save.save(root, path)

    with ops.Graph().as_default():
      imported = load.load(path)
      var_v1 = imported.v1
      output = imported.f(constant_op.constant(2.))
      with monitored_session.MonitoredSession() as sess:
        self.assertEqual(1.0, sess.run(var_v1))
        self.assertEqual(4.0, sess.run(output))
Ejemplo n.º 44
0
  def testConstSavedModel(self):
    """Test a basic model with functions to make sure functions are inlined."""
    self.skipTest('b/124205572')
    input_data = constant_op.constant(1., shape=[1])
    root = tracking.AutoTrackable()
    root.f = def_function.function(lambda x: 2. * x)
    to_save = root.f.get_concrete_function(input_data)

    save_dir = os.path.join(self.get_temp_dir(), 'saved_model')
    save(root, save_dir, to_save)
    saved_model = load(save_dir)
    concrete_func = saved_model.signatures['serving_default']

    # Convert model and ensure model is not None.
    converter = lite.TFLiteConverterV2.from_concrete_function(concrete_func)
    tflite_model = converter.convert()

    # Check values from converted model.
    expected_value = root.f(input_data)
    actual_value = self._evaluateTFLiteModel(tflite_model, [input_data])
    self.assertEqual(expected_value.numpy(), actual_value)
Ejemplo n.º 45
0
  def test_assets(self):
    file1 = self._make_asset("contents 1")
    file2 = self._make_asset("contents 2")

    root = tracking.AutoCheckpointable()
    root.asset1 = tracking.TrackableAsset(file1)
    root.asset2 = tracking.TrackableAsset(file2)

    save_dir = os.path.join(self.get_temp_dir(), "save_dir")
    save.save(root, save_dir, signatures={})

    file_io.delete_file(file1)
    file_io.delete_file(file2)
    load_dir = os.path.join(self.get_temp_dir(), "load_dir")
    file_io.rename(save_dir, load_dir)

    imported = load.load(load_dir)
    with open(imported.asset1.asset_path.numpy(), "r") as f:
      self.assertEquals("contents 1", f.read())
    with open(imported.asset2.asset_path.numpy(), "r") as f:
      self.assertEquals("contents 2", f.read())
Ejemplo n.º 46
0
  def convert(self):
    """Convert the input SavedModel in 2.0 format.

    Returns:
      The TF-TRT converted Function.
    """
    assert not self._converted
    self._saved_model = load.load(self._input_saved_model_dir,
                                  self._input_saved_model_tags)
    func = self._saved_model.signatures[self._input_saved_model_signature_key]
    frozen_func = convert_to_constants.convert_variables_to_constants_v2(func)
    grappler_meta_graph_def = saver.export_meta_graph(
        graph_def=frozen_func.graph.as_graph_def(), graph=frozen_func.graph)

    # Add a collection 'train_op' so that Grappler knows the outputs.
    fetch_collection = meta_graph_pb2.CollectionDef()
    for array in frozen_func.inputs + frozen_func.outputs:
      fetch_collection.node_list.value.append(array.name)
    grappler_meta_graph_def.collection_def["train_op"].CopyFrom(
        fetch_collection)

    # Run TRT optimizer in Grappler to convert the graph.
    self._converted_graph_def = self._run_conversion(grappler_meta_graph_def)
    self._converted_func = wrap_function.function_from_graph_def(
        self._converted_graph_def,
        [tensor.name for tensor in frozen_func.inputs],
        [tensor.name for tensor in frozen_func.outputs])

    self._converted = True

    # Wrap the converted ConcreteFunction in a Function so it can accept numpy
    # arrays as input.
    @def_function.function
    def wrapper_func(*args, **kwargs):
      return self._converted_func(*args, **kwargs)

    return wrapper_func
Ejemplo n.º 47
0
  def _convert_saved_model_v2(self):
    """Convert the input SavedModel in 2.0 format."""
    assert context.executing_eagerly()

    self._saved_model = load.load(self._input_saved_model_dir,
                                  self._input_saved_model_tags)
    func = self._saved_model.signatures[self._input_saved_model_signature_key]
    frozen_func = convert_to_constants.convert_variables_to_constants_v2(func)
    self._grappler_meta_graph_def = saver.export_meta_graph(
        graph_def=frozen_func.graph.as_graph_def(), graph=frozen_func.graph)

    # Add a collection 'train_op' so that Grappler knows the outputs.
    fetch_collection = meta_graph_pb2.CollectionDef()
    for array in frozen_func.inputs + frozen_func.outputs:
      fetch_collection.node_list.value.append(array.name)
    self._grappler_meta_graph_def.collection_def["train_op"].CopyFrom(
        fetch_collection)

    # Run TRT optimizer in Grappler to convert the graph.
    self._run_conversion()
    self._converted_func = wrap_function.function_from_graph_def(
        self._converted_graph_def,
        [tensor.name for tensor in frozen_func.inputs],
        [tensor.name for tensor in frozen_func.outputs])
Ejemplo n.º 48
0
  def testTrtGraphConverter_BasicConversion_v2(self):
    """Test case for trt_convert.TrtGraphConverter()."""
    if not is_tensorrt_enabled():
      return

    np_input = np.random.random_sample([4, 1, 1]).astype(np.float32)

    # Create a model and save it.
    input_saved_model_dir = tempfile.mkdtemp(dir=self.get_temp_dir())
    root = self._GetModelForV2()
    expected_output = root.run(np_input)
    save.save(root, input_saved_model_dir,
              {_SAVED_MODEL_SIGNATURE_KEY: root.run})

    # Run TRT conversion.
    converter = trt_convert.TrtGraphConverterV2(
        input_saved_model_dir=input_saved_model_dir,
        input_saved_model_signature_key=_SAVED_MODEL_SIGNATURE_KEY,
        conversion_params=trt_convert.DEFAULT_TRT_CONVERSION_PARAMS._replace(
            precision_mode=trt_convert.TrtPrecisionMode.FP32,
            is_dynamic_op=True,
            maximum_cached_engines=2,
            use_function_backup=False))
    converted_func = converter.convert()

    def _check_trt_ops(graph_def):
      trt_op_names = [
          node.name for node in graph_def.node if node.op == "TRTEngineOp"
      ]
      for func in graph_def.library.function:
        for node in func.node_def:
          if node.op == "TRTEngineOp":
            trt_op_names.append(node.name)
      self.assertEqual(1, len(trt_op_names))
      self.assertIn("TRTEngineOp_0", trt_op_names[0])

    # Verify the converted GraphDef and ConcreteFunction.
    self.assertIsInstance(converted_func, def_function.Function)
    converted_concrete_func = converted_func.get_concrete_function(
        tensor_spec.TensorSpec(shape=[None, 1, 1], dtype=dtypes.float32))
    _check_trt_ops(converted_concrete_func.graph.as_graph_def())

    # Save the converted model without any TRT engine cache.
    output_saved_model_dir = tempfile.mkdtemp(dir=self.get_temp_dir())
    converter.save(output_saved_model_dir)
    unexpected_asset_file = os.path.join(
        output_saved_model_dir, "assets/trt-serialized-engine.TRTEngineOp_0")
    self.assertFalse(os.path.exists(unexpected_asset_file))

    # Run the converted function to populate the engine cache.
    output_with_trt = converted_func(np_input)
    self.assertEqual(1, len(output_with_trt))
    self.assertAllClose(
        expected_output, output_with_trt[0], atol=1e-6, rtol=1e-6)

    # Save the converted model again with serialized engine cache.
    output_saved_model_dir = tempfile.mkdtemp(dir=self.get_temp_dir())
    converter.save(output_saved_model_dir)
    expected_asset_file = os.path.join(
        output_saved_model_dir, "assets/trt-serialized-engine.TRTEngineOp_0")
    self.assertTrue(os.path.exists(expected_asset_file))
    self.assertTrue(os.path.getsize(expected_asset_file))

    # Load and verify the converted model.
    #
    # TODO(laigd): the name of then new input_signature of the
    # `root_with_trt.run` function is empty string (originaly was None),
    # investigate why.
    root_with_trt = load.load(output_saved_model_dir)
    # TODO(laigd): `root_with_trt.run` is still using the original graph without
    # trt. Consider changing that.
    # _check_trt_ops(
    #     root_with_trt.run.get_concrete_function().graph.as_graph_def())
    converted_signature = root_with_trt.signatures[_SAVED_MODEL_SIGNATURE_KEY]
    _check_trt_ops(converted_signature.graph.as_graph_def())
    output_with_trt = converted_signature(ops.convert_to_tensor(np_input))
    # The output of running the converted signature is a dict due to
    # compatibility reasons with V1 SavedModel signature mechanism.
    output_with_trt = output_with_trt[output_with_trt.keys()[0]]
    self.assertAllClose(expected_output, output_with_trt, atol=1e-6, rtol=1e-6)
Ejemplo n.º 49
0
 def func(x):
   if not hasattr(closure, "model"):
     closure.model = load.load(path)
   return closure.model.f(x)
Ejemplo n.º 50
0
 def test_restore_output_shapes(self):
   saved = self._v1_output_shape_saved_model()
   imported = load.load(saved)
   fn = imported.signatures["serving_default"]
   self.assertEqual(tensor_shape.TensorShape([1]), fn.outputs[0].shape)
Ejemplo n.º 51
0
 def test_version_info(self):
   path = self._signature_with_no_inputs()
   imported = load.load(path)
   self.assertEqual(versions.__version__, imported.tensorflow_version)
   self.assertEqual(versions.__git_version__,
                    imported.tensorflow_git_version)
Ejemplo n.º 52
0
 def test_unfed_placeholder_exception(self):
   path = self._unfed_placeholder_signature()
   with self.assertRaisesRegexp(
       lift_to_graph.UnliftableError,
       "signature needs an input for each placeholder.*\n\nUnable to lift"):
     load.load(path)
Ejemplo n.º 53
0
 def test_custom_pruning(self):
   path = self._no_signatures_model()
   root = load.load(path)
   fn = root.prune("x:0", "out:0")
   self.assertEqual(2., self.evaluate(fn(x=array_ops.ones([]))))
   root.graph.as_graph_element("x:0")
Ejemplo n.º 54
0
 def cycle(self, obj, signatures=None):
   path = tempfile.mkdtemp(prefix=self.get_temp_dir())
   save.save(obj, path, signatures=signatures or {})
   return load.load(path)
Ejemplo n.º 55
0
 def test_ref_variable_import(self):
   saved = self._v1_single_metagraph_saved_model(use_resource=False)
   imported = load.load(saved)
   fn = imported.signatures["serving_default"]
   self.assertEqual(6., fn(start=constant_op.constant(2.))["output"].numpy())
Ejemplo n.º 56
0
 def test_cond(self):
   first_path = self._v1_cond_saved_model()
   imported = load.load(first_path)
   function = imported.signatures["serving_default"]
   self.assertAllClose({"output": 1.}, function(constant_op.constant(True)))
   self.assertAllClose({"output": 0.}, function(constant_op.constant(False)))
Ejemplo n.º 57
0
 def test_nested_while(self):
   first_path = self._v1_nested_while_saved_model()
   imported = load.load(first_path)
   function = imported.signatures["serving_default"]
   self.assertAllClose({"output": 20}, function(constant_op.constant(4)))
   self.assertAllClose({"output": 35}, function(constant_op.constant(5)))
Ejemplo n.º 58
0
 def test_no_signature(self):
   path = self._no_signatures_model()
   imported = load.load(path)
   self.assertEqual([], list(imported.signatures.keys()))
Ejemplo n.º 59
0
 def test_signature_with_no_inputs(self):
   path = self._signature_with_no_inputs()
   imported = load.load(path)
   self.assertEqual([2], imported.signatures["key"]()["value"].shape)