Beispiel #1
0
 def construct_objective(self):
   with tf.name_scope("objective"):
     self.total_loss = 0
     self.total_constraints = 0
     self.loss_by_layer.clear()
     self.error_by_layer.clear()
     for name, layer in sorted(self.layers.items()):
       assert isinstance(layer, LayerBase)
       with reuse_name_scope(layer.tf_scope_name):
         loss = layer.get_loss_value()
         error = layer.get_error_value()
         constraints = layer.get_constraints_value()
         if loss is not None:
           tf.summary.scalar("loss_%s" % layer.name, loss)
         if error is not None:
           tf.summary.scalar("error_%s" % layer.name, error)
       if loss is not None:
         self.loss_by_layer[name] = loss
         self.total_loss += loss
       if error is not None:
         self.error_by_layer[name] = error
       if constraints is not None:
         self.total_constraints += constraints
     tf.summary.scalar("loss", self.total_loss)
     tf.summary.scalar("constraints", self.total_constraints)
     self.total_objective = self.total_loss + self.total_constraints
     tf.summary.scalar("objective", self.total_objective)
Beispiel #2
0
def helper_variable_scope():
  """
  :rtype: tf.VariableScope
  """
  from TFUtil import reuse_name_scope
  with reuse_name_scope("IO", absolute=True) as scope:
    yield scope
Beispiel #3
0
 def add_layer(self, name, layer_class, **layer_desc):
   """
   :param str name:
   :param (()->LayerBase)|LayerBase layer_class:
   """
   layer_desc = layer_desc.copy()
   assert "name" not in layer_desc
   assert "network" not in layer_desc
   assert "output" not in layer_desc
   layer_desc["name"] = name
   layer_desc["network"] = self
   debug_print_layer_output_template = self._config and self._config.bool("debug_print_layer_output_template", False)
   debug_print_layer_output_shape = self._config and self._config.bool("debug_print_layer_output_shape", False)
   with reuse_name_scope(layer_class.cls_get_tf_scope_name(name)):
     output = layer_class.get_out_data_from_opts(**layer_desc)
     if debug_print_layer_output_template:
       print("layer %r output: %r" % (name, output))
     layer = layer_class(output=output, **layer_desc)
     layer.post_init()
     if debug_print_layer_output_shape:
       layer.output.placeholder = tf.Print(
         layer.output.placeholder, [layer_class.cls_get_tf_scope_name(name), "shape:", tf.shape(layer.output.placeholder)],
         summarize=10, name="debug_print_layer_output_shape")
   assert layer.output
   assert layer.output.placeholder is not None
   assert layer.output.size_placeholder is not None
   self.layers[name] = layer
   if layer.recurrent:
     self.recurrent = True
   return layer
Beispiel #4
0
 def get_var_assigner(self, var):
   """
   :param tf.Variable var:
   """
   if var in self._assigner_cache:
     return self._assigner_cache[var]
   with reuse_name_scope("var_assigner"):
     assigner = VariableAssigner(var)
   self._assigner_cache[var] = assigner
   return assigner
Beispiel #5
0
 def _add_layer(self, name, layer_class, **layer_desc):
     """
 :param str name:
 :param ()->LayerBase layer_class:
 """
     with reuse_name_scope(layer_class.cls_get_tf_scope_name(name)):
         layer = layer_class(name=name, network=self, **layer_desc)
     assert layer.output
     assert layer.output.placeholder is not None
     assert layer.output.size_placeholder is not None
     self.layers[name] = layer
     if layer.recurrent:
         self.recurrent = True
     return layer
Beispiel #6
0
 def add_layer(self, name, layer_class, **layer_desc):
     """
 :param str name:
 :param (()->LayerBase)|LayerBase layer_class:
 """
     from Util import help_on_type_error_wrong_args
     layer_desc = layer_desc.copy()
     assert "name" not in layer_desc
     assert "network" not in layer_desc
     assert "output" not in layer_desc
     layer_desc["name"] = name
     layer_desc["network"] = self
     debug_print_layer_output_template = self._config and self._config.bool(
         "debug_print_layer_output_template", False)
     debug_print_layer_output_sizes = self._config and self._config.bool(
         "debug_print_layer_output_sizes", False)
     debug_print_layer_output_shape = self._config and self._config.bool(
         "debug_print_layer_output_shape", False)
     with reuse_name_scope(layer_class.cls_get_tf_scope_name(name)):
         try:
             output = layer_class.get_out_data_from_opts(**layer_desc)
             if debug_print_layer_output_template:
                 print("layer %r output: %r" % (name, output))
             layer = layer_class(output=output, **layer_desc)
         except TypeError:
             help_on_type_error_wrong_args(cls=layer_class,
                                           kwargs=list(layer_desc.keys()))
             raise
         layer.post_init()
         if debug_print_layer_output_sizes:
             print("layer %r output sizes: %r" %
                   (name, output.size_placeholder))
         if debug_print_layer_output_shape:
             layer.output.placeholder = tf.Print(
                 layer.output.placeholder, [
                     layer_class.cls_get_tf_scope_name(name), "shape:",
                     tf.shape(layer.output.placeholder)
                 ],
                 summarize=10,
                 name="debug_print_layer_output_shape")
     assert layer.output
     assert layer.output.placeholder is not None
     layer.output.placeholder.set_shape(layer.output.batch_shape)
     assert layer.output.size_placeholder is not None
     self.layers[name] = layer
     if layer.recurrent:
         self.recurrent = True
     return layer
Beispiel #7
0
 def add_layer(self, name, layer_class, **layer_desc):
   """
   :param str name:
   :param (()->LayerBase)|LayerBase layer_class:
   """
   layer_desc = layer_desc.copy()
   assert "name" not in layer_desc
   assert "network" not in layer_desc
   assert "output" not in layer_desc
   layer_desc["name"] = name
   layer_desc["network"] = self
   with reuse_name_scope(layer_class.cls_get_tf_scope_name(name)):
     output = layer_class.get_out_data_from_opts(**layer_desc)
     layer = layer_class(output=output, **layer_desc)
     layer.post_init()
   assert layer.output
   assert layer.output.placeholder is not None
   assert layer.output.size_placeholder is not None
   self.layers[name] = layer
   if layer.recurrent:
     self.recurrent = True
   return layer