Ejemplo n.º 1
0
def models():
    tf.keras.backend.set_learning_phase(False)
    tf_test_utils.set_random_seed()

    input_shape = get_input_shape(FLAGS.data)
    # keras model receives images size as input,
    # where batch size is not specified - by default it is dynamic
    if FLAGS.model in APP_MODELS:
        weights = 'imagenet' if FLAGS.data == 'imagenet' else None
        # TODO(rybakov) with the fix of https://github.com/google/iree/issues/1660
        # add include_top=True

        # if weights == 'imagenet' it will load weights from external tf.keras URL
        model = APP_MODELS[FLAGS.model](weights=weights,
                                        input_shape=input_shape[1:])

        if FLAGS.data == 'cifar10' and FLAGS.url:
            file_name = 'cifar10' + FLAGS.model
            # it will download model weights from publically available folder: PATH
            # and save it to cache_dir=~/.keras and return path to it
            weights_path = tf.keras.utils.get_file(
                file_name, os.path.join(FLAGS.url, file_name + '.h5'))
            model.load_weights(weights_path)
    else:
        raise ValueError('Unsupported model', FLAGS.model)

    module = tf.Module()
    module.m = model
    # specify input size with static batch size
    # TODO(b/142948097): with support of dynamic shape
    # replace input_shape by model.input_shape, so batch size will be dynamic (-1)
    module.predict = tf.function(input_signature=[tf.TensorSpec(input_shape)])(
        model.call)
    return module
Ejemplo n.º 2
0
    def __init__(self, build_fn, *args, also_track=None, **kwargs):
        """Defers initialization of an object with transformed arguments.

    Args:
      build_fn: Python callable specifying a deferred transformation of the
        provided arguments. This must have signature
        `module = build_fn(*args, **kwargs)`. The return value `module` is an
        instance of `tf.Module`.
      *args: Optional positional arguments to `build_fn`.
      also_track: Optional instance or structure of instances of `tf.Variable`
        and/or `tf.Module`, containing any additional trainable variables that
        the `build_fn` may access beyond the given `args` and `kwargs`. This
        ensures that such variables will be correctly tracked in
        `self.trainable_variables`.
        Default value: `None`.
      **kwargs: Optional keyword arguments to `build_fn`.
    """
        self._build_fn = build_fn
        self._param_args = args
        self._param_kwargs = kwargs
        self._deferred_module_also_track = also_track

        # In order for DeferredModule to work as a tf.Module, we need to ensure that
        # attrs used by tf.Module are handled directly, rather than being forwarded
        # to the inner class.
        self._module_attrs = set(dir(tf.Module()))
        super(DeferredModule, self).__init__()
Ejemplo n.º 3
0
    def __init__(self, base_class, args_fn, *args, **kwargs):
        """Defers initialization of an object with transformed arguments.

    Args:
      base_class: Python type or callable such that `base_class(**args_fn(...))`
        is an instance of `tf.Module`---for example, a TFP Distribution or
        Bijector.
      args_fn: Python callable specifying a deferred transformation of the
        provided arguments. This must have signature
        `base_class_init_args = args_fn(*args, **kwargs)`. The return value
        `base_class_init_args` may be either a dictionary or an iterable
        (list/tuple), in which case the class will be initialized as
        `base_class(**base_class_init_args)` or
        `base_class(*base_class_init_args)`, respectively.
      *args: Optional positional arguments to `args_fn`.
      **kwargs: Optional keyword arguments to `args_fn`.
    """
        self._base_class = base_class
        self._args_fn = args_fn
        self._param_args = args
        self._param_kwargs = kwargs

        # In order for DeferredModule to work as a tf.Module, we need to ensure that
        # attrs used by tf.Module are handled directly, rather than being forwarded
        # to the inner class.
        self._module_attrs = set(dir(tf.Module()))

        super(DeferredModule, self).__init__()
    def _testConvertedFunction(self, obj, func, converted_concrete_func,
                               input_data):
        # Ensure the converted graph has no variables and no function calls.
        constant_graph_def = converted_concrete_func.graph.as_graph_def()
        self.assertEqual(0, self._getNumVariables(constant_graph_def))
        self.assertFalse(
            self._hasStatefulPartitionedCallOp(constant_graph_def))

        # Check that the converted ConcreteFunction produces the same result as the
        # original Function.
        expected_value = tf.nest.flatten(func(**input_data))
        actual_value = tf.nest.flatten(converted_concrete_func(**input_data))

        for expected, actual in zip(expected_value, actual_value):
            np.testing.assert_almost_equal(expected.numpy(), actual.numpy())

        # Ensure the shape is retained.
        for tensor in converted_concrete_func.inputs:
            actual_shape = input_data[tensor.name.split(":")[0]].shape
            self.assertEqual(tensor.shape, actual_shape)

        # Save the converted ConcreteFunction as a signature.
        save_dir = os.path.join(self.get_temp_dir(), "frozen_saved_model")
        root = tf.Module()
        root.f = converted_concrete_func
        save(root, save_dir, {"mykey": converted_concrete_func})

        # Load it back and make sure it works.
        loaded_obj = load(save_dir)
        actual_value = tf.nest.flatten(
            loaded_obj.signatures["mykey"](**input_data))
        for expected, actual in zip(expected_value, actual_value):
            np.testing.assert_almost_equal(expected.numpy(), actual.numpy())
Ejemplo n.º 5
0
 def test_model_wrapped_in_module_discovers_submodules(self):
     linear = tf.keras.models.Sequential(
         [tf.keras.layers.Dense(units=1, input_shape=[1])])
     linear.compile(optimizer="sgd", loss="mean_squared_error")
     m = tf.Module()
     m.l = linear
     self.assertNotEmpty(m.submodules)
     self.assertLen(m.variables, 2)
Ejemplo n.º 6
0
 def testDictWrapperBadKeys(self):
   a = tf.Module()
   a.d = {}
   a.d[1] = data_structures.wrap_or_unwrap([])
   model = training.Model()
   model.sub = a
   save_path = os.path.join(self.get_temp_dir(), "ckpt")
   with self.assertRaisesRegex(ValueError, "non-string key"):
     model.save_weights(save_path)
Ejemplo n.º 7
0
 def testDictWrapperNoDependency(self):
   a = tf.Module()
   a.d = data_structures.NoDependency({})
   a.d[1] = [3]
   self.assertEqual([a], util.list_objects(a))
   model = training.Model()
   model.sub = a
   save_path = os.path.join(self.get_temp_dir(), "ckpt")
   model.save_weights(save_path)
   model.load_weights(save_path)
Ejemplo n.º 8
0
    def testNoDependency(self):
        root = tf.Module()
        hasdep = tf.Module()
        root.hasdep = hasdep
        nodep = tf.Module()
        root.nodep = data_structures.NoDependency(nodep)
        self.assertEqual(1, len(root._checkpoint_dependencies))
        self.assertIs(root._checkpoint_dependencies[0].ref, root.hasdep)
        self.assertIs(root.hasdep, hasdep)
        self.assertIs(root.nodep, nodep)

        class NoDependencyModel(training.Model):
            @base.no_automatic_dependency_tracking
            def __init__(self):
                super(NoDependencyModel, self).__init__()
                self.a = []
                self.b = tf.Module()

        nodeps = NoDependencyModel()
        self.assertEqual([nodeps], util.list_objects(nodeps))
Ejemplo n.º 9
0
    def testNoDependency(self):
        root = tf.Module()
        hasdep = tf.Module()
        root.hasdep = hasdep
        nodep = tf.Module()
        root.nodep = data_structures.NoDependency(nodep)
        self.assertLen(root._trackable_children(), 1)
        self.assertIs(root._trackable_children()["hasdep"], root.hasdep)
        self.assertIs(root.hasdep, hasdep)
        self.assertIs(root.nodep, nodep)

        class NoDependencyModel(training.Model):
            @tf.__internal__.tracking.no_automatic_dependency_tracking
            def __init__(self):
                super(NoDependencyModel, self).__init__()
                self.a = []
                self.b = tf.Module()

        nodeps = NoDependencyModel()
        self.assertEqual([nodeps], util.list_objects(nodeps))
Ejemplo n.º 10
0
 def testNoDepList(self):
   a = training.Model()
   a.l1 = data_structures.NoDependency([])
   a.l1.insert(1, 0)
   self.assertIsInstance(a.l1, list)
   checkpoint = tf.train.Checkpoint(a=a)
   checkpoint.save(os.path.join(self.get_temp_dir(), "ckpt"))
   a.l2 = []
   a.l2.insert(1, tf.Module())
   with self.assertRaisesRegex(ValueError, "A list element was replaced"):
     checkpoint.save(os.path.join(self.get_temp_dir(), "ckpt"))
Ejemplo n.º 11
0
 def testNonAppendNotTrackable(self):
   # Non-append mutations (deleting or overwriting values) are OK when the
   # values aren't tracked.
   a = tf.Module()
   a.d = {}
   a.d["a"] = [3]
   a.d[1] = 3
   a.d[1] = 2
   self.assertEqual(2, a.d[1])
   del a.d[1]
   a.d[2] = data_structures.NoDependency(tf.Module())
   second = tf.Module()
   a.d[2] = data_structures.NoDependency(second)
   self.assertIs(second, a.d[2])
   self.assertEqual([a, a.d, a.d["a"]], util.list_objects(a))
   model = training.Model()
   model.sub = a
   save_path = os.path.join(self.get_temp_dir(), "ckpt")
   model.save_weights(save_path)
   model.load_weights(save_path)
Ejemplo n.º 12
0
 def testNonStringKeyNotTrackableValue(self):
   a = tf.Module()
   a.d = {}
   a.d["a"] = [3]
   a.d[1] = data_structures.NoDependency([3])
   self.assertEqual([a, a.d, a.d["a"]], util.list_objects(a))
   model = training.Model()
   model.sub = a
   save_path = os.path.join(self.get_temp_dir(), "ckpt")
   model.save_weights(save_path)
   model.load_weights(save_path)
Ejemplo n.º 13
0
def lstm_module():
  tf_utils.set_random_seed()
  inputs = tf.keras.layers.Input(batch_size=NUM_BATCH, shape=INPUT_SHAPE[1:])
  outputs = tf.keras.layers.LSTM(units=NUM_UNITS, return_sequences=True)(inputs)
  model = tf.keras.Model(inputs, outputs)
  module = tf.Module()
  module.m = model
  module.predict = tf.function(
      input_signature=[tf.TensorSpec(INPUT_SHAPE, tf.float32)])(
          model.call)
  return module
Ejemplo n.º 14
0
 def testLayerCollectionWithExternalMutation(self):
   d = {}
   root = tf.Module()
   root.wrapper = d
   self.assertEqual([], root.wrapper.layers)
   self.assertEqual([], root.wrapper.trainable_weights)
   layer1 = core.Dense(1)
   layer2 = core.Dense(1)
   d["a"] = layer1
   d["b"] = layer2
   self.assertEqual([layer1, layer2], root.wrapper.layers)
   # The layers have still not created variables
   self.assertEqual([], root.wrapper.trainable_weights)
    def _freezeModel(self, model):
        """Freezes the model.

    Args:
      model: Function.

    Returns:
      root: AutoTrackable object with original ConcreteFunction.
      output_func: frozen ConcreteFunction.
    """
        root = tf.Module()
        root.f = model
        input_func = root.f.get_concrete_function()

        output_func = convert_to_constants.convert_variables_to_constants_v2(
            input_func, lower_control_flow=False)
        return root, output_func
Ejemplo n.º 16
0
    def test_module_discover_layer_variable(self):
        m = tf.Module()
        m.a = tf.keras.layers.Dense(1)
        m.b = tf.keras.layers.Dense(2)

        # The weights of the layer has not been created yet.
        self.assertEmpty(m.variables)
        self.assertLen(m.submodules, 2)

        inputs = tf.keras.layers.Input((1, ))
        m.a(inputs)
        m.b(inputs)

        variable_list = m.variables
        self.assertLen(variable_list, 4)
        self.assertIs(variable_list[0], m.a.kernel)
        self.assertIs(variable_list[1], m.a.bias)
        self.assertIs(variable_list[2], m.b.kernel)
        self.assertIs(variable_list[3], m.b.bias)
Ejemplo n.º 17
0
def models():
    tf.keras.backend.set_learning_phase(False)

    # keras model receives images size as input,
    # where batch size is not specified - by default it is dynamic
    if FLAGS.model in APP_MODELS:
        model = APP_MODELS[FLAGS.model](weights=None,
                                        include_top=False,
                                        input_shape=INPUT_SHAPE[1:])
    else:
        raise ValueError('unsupported model', FLAGS.model)

    module = tf.Module()
    module.m = model
    # specify input size with static batch size
    # TODO(b/142948097): with support of dynamic shape
    # replace INPUT_SHAPE by model.input_shape, so batch size will be dynamic (-1)
    module.predict = tf.function(input_signature=[tf.TensorSpec(INPUT_SHAPE)])(
        model.call)
    return module
Ejemplo n.º 18
0
    def __init__(self, build_fn, *args, **kwargs):
        """Defers initialization of an object with transformed arguments.

    Args:
      build_fn: Python callable specifying a deferred transformation of the
        provided arguments. This must have signature
        `module = build_fn(*args, **kwargs)`. The return value `module` is an
        instance of `tf.Module`.
      *args: Optional positional arguments to `build_fn`.
      **kwargs: Optional keyword arguments to `build_fn`.
    """
        self._build_fn = build_fn
        self._param_args = args
        self._param_kwargs = kwargs

        # In order for DeferredModule to work as a tf.Module, we need to ensure that
        # attrs used by tf.Module are handled directly, rather than being forwarded
        # to the inner class.
        self._module_attrs = set(dir(tf.Module()))

        super(DeferredModule, self).__init__()
Ejemplo n.º 19
0
 def __init__(self):
     super().__init__()
     self.a = []
     self.b = tf.Module()
Ejemplo n.º 20
0
 def __init__(self):
   super(NoDependencyModel, self).__init__()
   self.a = []
   self.b = tf.Module()