Example #1
0
  def _method_wrapper(self, *args, **kwargs):
    var_store = getattr(self, "_tf1_style_var_store", None)
    if not var_store:
      if not isinstance(self, tf.Module):
        # Raise an error if you incorrectly decorate a method
        # that is not a method of a Module, Layer, or Model:
        raise ValueError(
            "`@tf.compat.v1.keras.utils.track_tf1_layers_and_variables` must "
            "be applied to a method of a subclassed `tf.Module`, "
            "`tf.keras.layers.Layer`, or `tf.keras.Model` and which takes "
            "`self` as the first argument. But, the first argument passed "
            "to the decorated method was {}, which does not "
            "extend Module, Layer, or Model.".format(self))
      var_store = _EagerVariableStore()
      self._tf1_style_var_store = var_store  # pylint: disable=protected-access

    existing_regularized_variables = set(var_store._regularizers.keys())  # pylint: disable=protected-access
    with vs.with_variable_store(var_store):
      out = method(self, *args, **kwargs)

    # If this is a layer method, add the regularization losses
    # to the layer for any newly-created regularized variables
    if isinstance(self, base_layer.Layer):
      for var_name, regularizer in var_store._regularizers.items():  # pylint: disable=protected-access
        if var_name not in existing_regularized_variables:
          self.add_loss(regularizer)

    return out
    def test_eager_delayed_store_pickup(self):
        """This test shows a very simple line model with test_loss in eager mode.

    The template is used to share parameters between a training and test model.

    This test also shows that it can pick up explicitly set variable stores
    even if they are only set before the first template usage.
    """
        with context.eager_mode():
            training_input, training_output = ([1., 2., 3.,
                                                4.], [2.8, 5.1, 7.2, 8.7])
            test_input, test_output = ([5., 6., 7., 8.], [11, 13, 15, 17])

            random_seed.set_random_seed(1234)

            def test_line(x):
                m = variable_scope.get_variable(
                    "w",
                    shape=[],
                    initializer=init_ops.truncated_normal_initializer())
                b = variable_scope.get_variable(
                    "b",
                    shape=[],
                    initializer=init_ops.truncated_normal_initializer())
                return x * m + b

            line_template = template.make_template("line", test_line)

            def train_loss():
                train_prediction = line_template(training_input)
                return math_ops.reduce_mean(
                    math_ops.square(train_prediction - training_output))

            def test_loss():
                test_prediction = line_template(test_input)
                return math_ops.reduce_mean(
                    math_ops.square(test_prediction - test_output))

            store = variable_scope._VariableStore()
            store._store_eager_variables = True

            with variable_scope.with_variable_store(store):
                optimizer = gradient_descent.GradientDescentOptimizer(0.1)
                initial_test_loss = test_loss()
                optimizer.minimize(train_loss)
                final_test_loss = test_loss()

                # Parameters are tied, so the loss should have gone down after training.
                self.assertLess(final_test_loss.numpy(),
                                initial_test_loss.numpy())

            # Verify that the explicitly set store is not empty
            # and the make_template picked it up
            self.assertEqual(set(store._vars.keys()), {"line/w", "line/b"})

            # But the store should only get picked up once, so a second
            # store will go unused:
            second_store = variable_scope._VariableStore()
            second_store._store_eager_variables = True

            with variable_scope.with_variable_store(second_store):
                optimizer = gradient_descent.GradientDescentOptimizer(0.1)
                test_loss()
                optimizer.minimize(train_loss)
                test_loss()
            self.assertEmpty(second_store._vars)
 def scope(self):
     with vs.with_variable_store(self):
         yield
Example #4
0
 def scope(self):
   with tf.variable_creator_scope(
       self._variable_creator), vs.with_variable_store(self._var_store):
     yield