def _get_checkpoint_name(self, name):
   root = tracking.AutoCheckpointable()
   checkpointable_utils.add_variable(
       root, name=name, shape=[1, 2], dtype=dtypes.float64)
   (named_variable,), _, _ = checkpointable_utils._serialize_object_graph(
       root, saveables_cache=None)
   with ops.name_scope("root/" + named_variable.name):
     pass  # Make sure we can use this as an op name if we prefix it.
   return named_variable.name
 def _get_checkpoint_name(self, name):
   root = tracking.AutoCheckpointable()
   checkpointable_utils.add_variable(
       root, name=name, shape=[1, 2], dtype=dtypes.float64)
   (named_variable,), _, _ = checkpointable_utils._serialize_object_graph(
       root, saveables_cache=None)
   with ops.name_scope("root/" + named_variable.name):
     pass  # Make sure we can use this as an op name if we prefix it.
   return named_variable.name
 def testNamingWithOptimizer(self):
   input_value = constant_op.constant([[3.]])
   model = MyModel()
   # A nuisance Model using the same optimizer. Its slot variables should not
   # go in the checkpoint, since it is never depended on.
   other_model = MyModel()
   optimizer = adam.AdamOptimizer(0.001)
   optimizer_step = training_util.get_or_create_global_step()
   root_checkpointable = checkpointable_utils.Checkpoint(
       optimizer=optimizer, model=model, optimizer_step=optimizer_step)
   if context.executing_eagerly():
     optimizer.minimize(
         lambda: model(input_value),
         global_step=optimizer_step)
     optimizer.minimize(
         lambda: other_model(input_value),
         global_step=optimizer_step)
   else:
     train_op = optimizer.minimize(
         model(input_value), global_step=optimizer_step)
     optimizer.minimize(
         other_model(input_value),
         global_step=optimizer_step)
     self.evaluate(checkpointable_utils.gather_initializers(
         root_checkpointable))
     self.evaluate(train_op)
   named_variables, serialized_graph, _ = (
       checkpointable_utils._serialize_object_graph(
           root_checkpointable, saveables_cache=None))
   expected_checkpoint_names = (
       # Created in the root node, so no prefix.
       "optimizer_step",
       "model/_second/kernel",
       "model/_named_dense/kernel",
       "model/_named_dense/bias",
       # non-Layer dependency of the model
       "model/_non_layer/a_variable",
       # The optimizer creates two non-slot variables
       "optimizer/beta1_power",
       "optimizer/beta2_power",
       # Slot variables
       "model/_second/kernel/.OPTIMIZER_SLOT/optimizer/m",
       "model/_second/kernel/.OPTIMIZER_SLOT/optimizer/v",
       "model/_named_dense/kernel/.OPTIMIZER_SLOT/optimizer/m",
       "model/_named_dense/kernel/.OPTIMIZER_SLOT/optimizer/v",
       "model/_named_dense/bias/.OPTIMIZER_SLOT/optimizer/m",
       "model/_named_dense/bias/.OPTIMIZER_SLOT/optimizer/v",
   )
   suffix = "/.ATTRIBUTES/VARIABLE_VALUE"
   expected_checkpoint_names = [
       name + suffix for name in expected_checkpoint_names]
   # The Dense layers also save get_config() JSON
   expected_checkpoint_names.extend(
       ["model/_second/.ATTRIBUTES/OBJECT_CONFIG_JSON",
        "model/_named_dense/.ATTRIBUTES/OBJECT_CONFIG_JSON"])
   named_variables = {v.name: v for v in named_variables}
   six.assertCountEqual(self, expected_checkpoint_names,
                        named_variables.keys())
   # Check that we've mapped to the right variable objects (not exhaustive)
   self.assertEqual(
       "global_step",
       named_variables["optimizer_step" + suffix].full_name)
   self.assertEqual(
       "my_model/dense_1/kernel",
       named_variables["model/_second/kernel" + suffix].full_name)
   self.assertEqual(
       "my_model/dense/kernel",
       named_variables["model/_named_dense/kernel" + suffix].full_name)
   self.assertEqual(
       "beta1_power",
       named_variables["optimizer/beta1_power" + suffix].full_name)
   self.assertEqual(
       "beta2_power",
       named_variables["optimizer/beta2_power" + suffix].full_name)
   # Spot check the generated protocol buffers.
   self.assertEqual("optimizer",
                    serialized_graph.nodes[0].children[1].local_name)
   optimizer_node = serialized_graph.nodes[serialized_graph.nodes[0].children[
       1].node_id]
   self.assertEqual("beta1_power",
                    optimizer_node.children[0].local_name)
   self.assertEqual("beta1_power",
                    serialized_graph.nodes[optimizer_node.children[0].node_id]
                    .attributes[0].full_name)
   self.assertEqual(
       "my_model/dense/kernel",
       serialized_graph.nodes[optimizer_node.slot_variables[0]
                              .original_variable_node_id]
       .attributes[0].full_name)
   # We strip off the :0 suffix, as variable.name-based saving does.
   self.assertEqual(
       "my_model/dense/kernel/Adam",
       serialized_graph.nodes[optimizer_node.slot_variables[0]
                              .slot_variable_node_id]
       .attributes[0].full_name)
   self.assertEqual(
       "my_model/dense/kernel/Adam:0",
       optimizer.get_slot(
           var=model._named_dense.kernel,
           name="m").name)
   self.assertEqual(
       "model/_named_dense/kernel" + suffix,
       serialized_graph.nodes[
           optimizer_node.slot_variables[0]
           .original_variable_node_id].attributes[0].checkpoint_key)
   self.assertEqual("m", optimizer_node.slot_variables[0].slot_name)
   self.assertEqual(
       "model/_named_dense/kernel/.OPTIMIZER_SLOT/optimizer/m" + suffix,
       serialized_graph.nodes[
           optimizer_node.slot_variables[0]
           .slot_variable_node_id].attributes[0].checkpoint_key)
 def testNamingWithOptimizer(self):
     input_value = constant_op.constant([[3.]])
     model = MyModel()
     # A nuisance Model using the same optimizer. Its slot variables should not
     # go in the checkpoint, since it is never depended on.
     other_model = MyModel()
     optimizer = adam.AdamOptimizer(0.001)
     optimizer_step = training_util.get_or_create_global_step()
     root_checkpointable = checkpointable_utils.Checkpoint(
         optimizer=optimizer, model=model, optimizer_step=optimizer_step)
     if context.executing_eagerly():
         optimizer.minimize(lambda: model(input_value),
                            global_step=optimizer_step)
         optimizer.minimize(lambda: other_model(input_value),
                            global_step=optimizer_step)
     else:
         train_op = optimizer.minimize(model(input_value),
                                       global_step=optimizer_step)
         optimizer.minimize(other_model(input_value),
                            global_step=optimizer_step)
         self.evaluate(
             checkpointable_utils.gather_initializers(root_checkpointable))
         self.evaluate(train_op)
     named_variables, serialized_graph, _ = (
         checkpointable_utils._serialize_object_graph(root_checkpointable,
                                                      saveables_cache=None))
     expected_checkpoint_names = (
         # Created in the root node, so no prefix.
         "optimizer_step",
         "model/_second/kernel",
         "model/_named_dense/kernel",
         "model/_named_dense/bias",
         # non-Layer dependency of the model
         "model/_non_layer/a_variable",
         # The optimizer creates two non-slot variables
         "optimizer/beta1_power",
         "optimizer/beta2_power",
         # Slot variables
         "model/_second/kernel/.OPTIMIZER_SLOT/optimizer/m",
         "model/_second/kernel/.OPTIMIZER_SLOT/optimizer/v",
         "model/_named_dense/kernel/.OPTIMIZER_SLOT/optimizer/m",
         "model/_named_dense/kernel/.OPTIMIZER_SLOT/optimizer/v",
         "model/_named_dense/bias/.OPTIMIZER_SLOT/optimizer/m",
         "model/_named_dense/bias/.OPTIMIZER_SLOT/optimizer/v",
     )
     suffix = "/.ATTRIBUTES/VARIABLE_VALUE"
     expected_checkpoint_names = [
         name + suffix for name in expected_checkpoint_names
     ]
     # The Dense layers also save get_config() JSON
     expected_checkpoint_names.extend([
         "model/_second/.ATTRIBUTES/OBJECT_CONFIG_JSON",
         "model/_named_dense/.ATTRIBUTES/OBJECT_CONFIG_JSON"
     ])
     named_variables = {v.name: v for v in named_variables}
     six.assertCountEqual(self, expected_checkpoint_names,
                          named_variables.keys())
     # Check that we've mapped to the right variable objects (not exhaustive)
     self.assertEqual("global_step",
                      named_variables["optimizer_step" + suffix].full_name)
     self.assertEqual(
         "my_model/dense_1/kernel",
         named_variables["model/_second/kernel" + suffix].full_name)
     self.assertEqual(
         "my_model/dense/kernel",
         named_variables["model/_named_dense/kernel" + suffix].full_name)
     self.assertEqual(
         "beta1_power",
         named_variables["optimizer/beta1_power" + suffix].full_name)
     self.assertEqual(
         "beta2_power",
         named_variables["optimizer/beta2_power" + suffix].full_name)
     # Spot check the generated protocol buffers.
     self.assertEqual("optimizer",
                      serialized_graph.nodes[0].children[1].local_name)
     optimizer_node = serialized_graph.nodes[
         serialized_graph.nodes[0].children[1].node_id]
     self.assertEqual("beta1_power", optimizer_node.children[0].local_name)
     self.assertEqual(
         "beta1_power", serialized_graph.nodes[
             optimizer_node.children[0].node_id].attributes[0].full_name)
     self.assertEqual(
         "my_model/dense/kernel",
         serialized_graph.nodes[optimizer_node.slot_variables[
             0].original_variable_node_id].attributes[0].full_name)
     # We strip off the :0 suffix, as variable.name-based saving does.
     self.assertEqual(
         "my_model/dense/kernel/Adam",
         serialized_graph.nodes[optimizer_node.slot_variables[
             0].slot_variable_node_id].attributes[0].full_name)
     self.assertEqual(
         "my_model/dense/kernel/Adam:0",
         optimizer.get_slot(var=model._named_dense.kernel, name="m").name)
     self.assertEqual(
         "model/_named_dense/kernel" + suffix,
         serialized_graph.nodes[optimizer_node.slot_variables[
             0].original_variable_node_id].attributes[0].checkpoint_key)
     self.assertEqual("m", optimizer_node.slot_variables[0].slot_name)
     self.assertEqual(
         "model/_named_dense/kernel/.OPTIMIZER_SLOT/optimizer/m" + suffix,
         serialized_graph.nodes[optimizer_node.slot_variables[
             0].slot_variable_node_id].attributes[0].checkpoint_key)