def __init__(self, save_steps=100, output_dir=None, summary_writer=None, scaffold=None, summary_op=None): """Initializes a `SummarySaver` monitor. Args: save_steps: `int`, save summaries every N steps. See `EveryN`. output_dir: `string`, the directory to save the summaries to. Only used if no `summary_writer` is supplied. summary_writer: `SummaryWriter`. If `None` and an `output_dir` was passed, one will be created accordingly. scaffold: `Scaffold` to get summary_op if it's not provided. summary_op: `Tensor` of type `string`. A serialized `Summary` protocol buffer, as output by TF summary methods like `scalar_summary` or `merge_all_summaries`. """ # TODO(ipolosukhin): Implement every N seconds. self._summary_op = summary_op self._summary_writer = summary_writer if summary_writer is None and output_dir: self._summary_writer = SummaryWriterCache.get(output_dir) self._scaffold = scaffold self._save_steps = save_steps
def __init__(self, checkpoint_dir, save_secs=None, save_steps=None, saver=None, checkpoint_basename="model.ckpt", scaffold=None): """Initialize CheckpointSaverHook monitor. Args: checkpoint_dir: `str`, base directory for the checkpoint files. save_secs: `int`, save every N secs. save_steps: `int`, save every N steps. saver: `Saver` object, used for saving. checkpoint_basename: `str`, base name for the checkpoint files. scaffold: `Scaffold`, use to get saver object. Raises: ValueError: One of `save_steps` or `save_secs` should be set. """ logging.info("Create CheckpointSaverHook.") self._saver = saver self._checkpoint_dir = checkpoint_dir self._summary_writer = SummaryWriterCache.get(checkpoint_dir) self._save_path = os.path.join(checkpoint_dir, checkpoint_basename) self._scaffold = scaffold self._save_secs = save_secs self._save_steps = save_steps self._last_saved_time = None self._last_saved_step = None if save_steps is None and save_secs is None: raise ValueError("Either save_steps or save_secs should be provided") if (save_steps is not None) and (save_secs is not None): raise ValueError("Can not provide both save_steps and save_secs.")
def __init__(self, checkpoint_dir, save_secs=None, save_steps=None, saver=None, checkpoint_basename="model.ckpt", scaffold=None): """Initialize CheckpointSaverHook monitor. Args: checkpoint_dir: `str`, base directory for the checkpoint files. save_secs: `int`, save every N secs. save_steps: `int`, save every N steps. saver: `Saver` object, used for saving. checkpoint_basename: `str`, base name for the checkpoint files. scaffold: `Scaffold`, use to get saver object. Raises: ValueError: One of `save_steps` or `save_secs` should be set. """ logging.info("Create CheckpointSaverHook") self._saver = saver self._checkpoint_dir = checkpoint_dir self._summary_writer = SummaryWriterCache.get(checkpoint_dir) self._save_path = os.path.join(checkpoint_dir, checkpoint_basename) self._scaffold = scaffold self._save_secs = save_secs self._save_steps = save_steps self._last_saved_time = None self._last_saved_step = None if save_steps is None and save_secs is None: raise ValueError("Either save_steps or save_secs should be provided") if (save_steps is not None) and (save_secs is not None): raise ValueError("Can not provide both save_steps and save_secs.")
def __init__(self, every_n_steps=100, output_dir=None, summary_writer=None): super(StepCounter, self).__init__(every_n_steps=every_n_steps) self._summary_tag = "global_step/sec" self._last_reported_step = None self._last_reported_time = None self._summary_writer = summary_writer if summary_writer is None and output_dir: self._summary_writer = SummaryWriterCache.get(output_dir)
def __init__(self, every_n_steps=100, output_dir=None, summary_writer=None): self._summary_tag = "global_step/sec" self._every_n_steps = every_n_steps self._summary_writer = summary_writer if summary_writer is None and output_dir: self._summary_writer = SummaryWriterCache.get(output_dir)
def begin(self): if self.summary_writer is None and self.output_dir: self.summary_writer = SummaryWriterCache.get(self.output_dir) graph = ops.get_default_graph() self.fake_seq = graph.get_tensor_by_name("model/" + FAKE_PROTEINS + ":0") self.labels = graph.get_tensor_by_name("model/" + LABELS + ":0") self.d_score = graph.get_tensor_by_name("model/d_score:0") self.global_step_tensor = training_util._get_or_create_global_step_read( ) if self.global_step_tensor is None: raise RuntimeError("Could not global step tensor") if self.fake_seq is None: raise RuntimeError("Could not get fake seq tensor")
def __init__(self, every_n_steps, saver, checkpoint_dir, checkpoint_basename="model3124.ckpt", first_n_steps=-1): """Initialize CheckpointSaver monitor. Args: every_n_steps: `int`, save every N steps. saver: `Saver` object, used for saving. checkpoint_dir: `str`, base directory for the checkpoint files. checkpoint_basename: `str`, base name for the checkpoint files. first_n_steps: `int`, if positive, save every step during the first `first_n_steps` steps. """ logging.info("Create CheckpointSaver") super(CheckpointSaver, self).__init__(every_n_steps=every_n_steps, first_n_steps=first_n_steps) self._saver = saver self._summary_writer = SummaryWriterCache.get(checkpoint_dir) self._save_path = os.path.join(checkpoint_dir, checkpoint_basename)
def __init__(self, every_n_steps, saver, checkpoint_dir, checkpoint_basename="model.ckpt", first_n_steps=-1): """Initialize CheckpointSaver monitor. Args: every_n_steps: `int`, save every N steps. saver: `Saver` object, used for saving. checkpoint_dir: `str`, base directory for the checkpoint files. checkpoint_basename: `str`, base name for the checkpoint files. first_n_steps: `int`, if positive, save every step during the first `first_n_steps` steps. """ logging.info("Create CheckpointSaver") super(CheckpointSaver, self).__init__(every_n_steps=every_n_steps, first_n_steps=first_n_steps) self._saver = saver self._summary_writer = SummaryWriterCache.get(checkpoint_dir) self._save_path = os.path.join(checkpoint_dir, checkpoint_basename)
def add_custom_scalar(logdir): summary_writer = SummaryWriterCache.get(logdir) layout_summary = summary.custom_scalar_pb( layout_pb2.Layout(category=[ layout_pb2.Category( title='Loss', chart=[ layout_pb2.Chart( title='Loss', multiline=layout_pb2.MultilineChartContent( tag=[r'1_loss/*'], )), layout_pb2.Chart( title='Loss Component', multiline=layout_pb2.MultilineChartContent( tag=[r'2_loss_component/*'], )), layout_pb2.Chart( title='Discriminator Values', multiline=layout_pb2.MultilineChartContent( tag=[r'3_discriminator_values/*'], )), layout_pb2.Chart( title='Variation of sequences', multiline=layout_pb2.MultilineChartContent( tag=[r'Stddev/*'], )), layout_pb2.Chart( title='BLOMSUM45', multiline=layout_pb2.MultilineChartContent( tag=[r'Blast/*/BLOMSUM45'], )), layout_pb2.Chart( title='Evalue', multiline=layout_pb2.MultilineChartContent( tag=[r'Blast/*/Evalue'], )), layout_pb2.Chart( title='Identity', multiline=layout_pb2.MultilineChartContent( tag=[r'Blast/*/Identity'], )), ]), ])) summary_writer.add_summary(layout_summary)
def set_estimator(self, estimator): super(StepCounter, self).set_estimator(estimator) if self._summary_writer is None: self._summary_writer = SummaryWriterCache.get(estimator.model_dir)
def after_create_session(self, session, coord): self._global_step_tensor = tf.train.get_global_step() self._writer = SummaryWriterCache.get(self.summary_dir)