def testFlushFunction(self):
     logdir = self.get_temp_dir()
     writer = summary_ops.create_file_writer(logdir,
                                             max_queue=999999,
                                             flush_millis=999999)
     with writer.as_default(), summary_ops.always_record_summaries():
         summary_ops.scalar('scalar', 2.0, step=1)
         flush_op = summary_ops.flush()
     with self.cached_session() as sess:
         sess.run(summary_ops.summary_writer_initializer_op())
         get_total = lambda: len(
             summary_test_util.events_from_logdir(logdir))
         # Note: First tf.Event is always file_version.
         self.assertEqual(1, get_total())
         sess.run(summary_ops.all_summary_ops())
         self.assertEqual(1, get_total())
         sess.run(flush_op)
         self.assertEqual(2, get_total())
         # Test "writer" parameter
         sess.run(summary_ops.all_summary_ops())
         sess.run(summary_ops.flush(writer=writer))
         self.assertEqual(3, get_total())
         sess.run(summary_ops.all_summary_ops())
         sess.run(summary_ops.flush(writer=writer._resource))  # pylint:disable=protected-access
         self.assertEqual(4, get_total())
 def testCreate_immediateSetAsDefault_retainsReference(self):
     logdir = self.get_temp_dir()
     try:
         with context.eager_mode():
             summary_ops.create_file_writer_v2(logdir).set_as_default()
             summary_ops.flush()
     finally:
         # Ensure we clean up no matter how the test executes.
         context.context().summary_writer_resource = None
Beispiel #3
0
 def testCreate_immediateSetAsDefault_retainsReference(self):
   logdir = self.get_temp_dir()
   try:
     with context.eager_mode():
       summary_ops.create_file_writer_v2(logdir).set_as_default()
       summary_ops.flush()
   finally:
     # Ensure we clean up no matter how the test executes.
     summary_ops._summary_state.writer = None  # pylint: disable=protected-access
 def testCreate_immediateSetAsDefault_retainsReference(self):
   logdir = self.get_temp_dir()
   try:
     with context.eager_mode():
       summary_ops.create_file_writer_v2(logdir).set_as_default()
       summary_ops.flush()
   finally:
     # Ensure we clean up no matter how the test executes.
     context.context().summary_writer_resource = None
Beispiel #5
0
    def begin(self):
        """Call when session hook begins"""
        self._init_ops = []
        self._finalize_ops = []

        summary_writer_init_ops = contrib_summary.summary_writer_initializer_op(
        )
        self._init_ops.extend(summary_writer_init_ops)
        # Get all the writer resources from the initializer, so we know what to flush.
        for op in summary_writer_init_ops:
            self._finalize_ops.append(
                contrib_summary.flush(writer=op.inputs[0]))
Beispiel #6
0
    def begin(self):
        self._init_ops = []
        self._finalize_ops = [
            npu_ops.stop_outfeed_dequeue_op(self._channel_name)
        ]

        summary_writer_init_ops = contrib_summary.summary_writer_initializer_op(
        )
        self._init_ops.extend(summary_writer_init_ops)
        # Get all the writer resources from the initializer, so we know what to flush.
        for op in summary_writer_init_ops:
            self._finalize_ops.append(
                contrib_summary.flush(writer=op.inputs[0]))
 def testFlushFunction(self):
   logdir = self.get_temp_dir()
   writer = summary_ops.create_file_writer(
       logdir, max_queue=999999, flush_millis=999999)
   with writer.as_default(), summary_ops.always_record_summaries():
     summary_ops.scalar('scalar', 2.0, step=1)
     flush_op = summary_ops.flush()
   with self.cached_session() as sess:
     sess.run(summary_ops.summary_writer_initializer_op())
     get_total = lambda: len(summary_test_util.events_from_logdir(logdir))
     # Note: First tf.Event is always file_version.
     self.assertEqual(1, get_total())
     sess.run(summary_ops.all_summary_ops())
     self.assertEqual(1, get_total())
     sess.run(flush_op)
     self.assertEqual(2, get_total())
     # Test "writer" parameter
     sess.run(summary_ops.all_summary_ops())
     sess.run(summary_ops.flush(writer=writer))
     self.assertEqual(3, get_total())
     sess.run(summary_ops.all_summary_ops())
     sess.run(summary_ops.flush(writer=writer._resource))  # pylint:disable=protected-access
     self.assertEqual(4, get_total())
 def testFlushFunction(self):
   logs = tempfile.mkdtemp()
   writer = summary_ops.create_file_writer(
       logs, max_queue=999999, flush_millis=999999, name='lol')
   with writer.as_default(), summary_ops.always_record_summaries():
     get_total = lambda: len(summary_test_util.events_from_logdir(logs))
     # Note: First tf.Event is always file_version.
     self.assertEqual(1, get_total())
     summary_ops.scalar('scalar', 2.0, step=1)
     summary_ops.scalar('scalar', 2.0, step=2)
     self.assertEqual(1, get_total())
     summary_ops.flush()
     self.assertEqual(3, get_total())
     # Test "writer" parameter
     summary_ops.scalar('scalar', 2.0, step=3)
     summary_ops.flush(writer=writer)
     self.assertEqual(4, get_total())
     summary_ops.scalar('scalar', 2.0, step=4)
     summary_ops.flush(writer=writer._resource)  # pylint:disable=protected-access
     self.assertEqual(5, get_total())
 def testFlushFunction(self):
   logs = tempfile.mkdtemp()
   writer = summary_ops.create_file_writer(
       logs, max_queue=999999, flush_millis=999999, name='lol')
   with writer.as_default(), summary_ops.always_record_summaries():
     get_total = lambda: len(summary_test_util.events_from_logdir(logs))
     # Note: First tf.Event is always file_version.
     self.assertEqual(1, get_total())
     summary_ops.scalar('scalar', 2.0, step=1)
     summary_ops.scalar('scalar', 2.0, step=2)
     self.assertEqual(1, get_total())
     summary_ops.flush()
     self.assertEqual(3, get_total())
     # Test "writer" parameter
     summary_ops.scalar('scalar', 2.0, step=3)
     summary_ops.flush(writer=writer)
     self.assertEqual(4, get_total())
     summary_ops.scalar('scalar', 2.0, step=4)
     summary_ops.flush(writer=writer._resource)  # pylint:disable=protected-access
     self.assertEqual(5, get_total())
    def testSharedName(self):
        logdir = self.get_temp_dir()
        with summary_ops.always_record_summaries():
            # Create with default shared name (should match logdir)
            writer1 = summary_ops.create_file_writer(logdir)
            with writer1.as_default():
                summary_ops.scalar('one', 1.0, step=1)
                summary_ops.flush()
            # Create with explicit logdir shared name (should be same resource/file)
            shared_name = 'logdir:' + logdir
            writer2 = summary_ops.create_file_writer(logdir, name=shared_name)
            with writer2.as_default():
                summary_ops.scalar('two', 2.0, step=2)
                summary_ops.flush()
            # Create with different shared name (should be separate resource/file)
            time.sleep(1.1)  # Ensure filename has a different timestamp
            writer3 = summary_ops.create_file_writer(logdir, name='other')
            with writer3.as_default():
                summary_ops.scalar('three', 3.0, step=3)
                summary_ops.flush()

        event_files = iter(
            sorted(gfile.Glob(os.path.join(logdir, '*tfevents*'))))

        # First file has tags "one" and "two"
        events = iter(summary_test_util.events_from_file(next(event_files)))
        self.assertEqual('brain.Event:2', next(events).file_version)
        self.assertEqual('one', next(events).summary.value[0].tag)
        self.assertEqual('two', next(events).summary.value[0].tag)
        self.assertRaises(StopIteration, lambda: next(events))

        # Second file has tag "three"
        events = iter(summary_test_util.events_from_file(next(event_files)))
        self.assertEqual('brain.Event:2', next(events).file_version)
        self.assertEqual('three', next(events).summary.value[0].tag)
        self.assertRaises(StopIteration, lambda: next(events))

        # No more files
        self.assertRaises(StopIteration, lambda: next(event_files))
  def testSharedName(self):
    logdir = self.get_temp_dir()
    with summary_ops.always_record_summaries():
      # Create with default shared name (should match logdir)
      writer1 = summary_ops.create_file_writer(logdir)
      with writer1.as_default():
        summary_ops.scalar('one', 1.0, step=1)
        summary_ops.flush()
      # Create with explicit logdir shared name (should be same resource/file)
      shared_name = 'logdir:' + logdir
      writer2 = summary_ops.create_file_writer(logdir, name=shared_name)
      with writer2.as_default():
        summary_ops.scalar('two', 2.0, step=2)
        summary_ops.flush()
      # Create with different shared name (should be separate resource/file)
      time.sleep(1.1)  # Ensure filename has a different timestamp
      writer3 = summary_ops.create_file_writer(logdir, name='other')
      with writer3.as_default():
        summary_ops.scalar('three', 3.0, step=3)
        summary_ops.flush()

    event_files = iter(sorted(gfile.Glob(os.path.join(logdir, '*tfevents*'))))

    # First file has tags "one" and "two"
    events = iter(summary_test_util.events_from_file(next(event_files)))
    self.assertEqual('brain.Event:2', next(events).file_version)
    self.assertEqual('one', next(events).summary.value[0].tag)
    self.assertEqual('two', next(events).summary.value[0].tag)
    self.assertRaises(StopIteration, lambda: next(events))

    # Second file has tag "three"
    events = iter(summary_test_util.events_from_file(next(event_files)))
    self.assertEqual('brain.Event:2', next(events).file_version)
    self.assertEqual('three', next(events).summary.value[0].tag)
    self.assertRaises(StopIteration, lambda: next(events))

    # No more files
    self.assertRaises(StopIteration, lambda: next(event_files))
Beispiel #12
0
  def testSharedName(self):
    logdir = self.get_temp_dir()
    with context.eager_mode():
      # Create with default shared name (should match logdir)
      writer1 = summary_ops.create_file_writer(logdir)
      with writer1.as_default():
        summary_ops.write('tag', 1, step=1)
        summary_ops.flush()
      # Create with explicit logdir shared name (should be same resource/file)
      shared_name = 'logdir:' + logdir
      writer2 = summary_ops.create_file_writer(logdir, name=shared_name)
      with writer2.as_default():
        summary_ops.write('tag', 1, step=2)
        summary_ops.flush()
      # Create with different shared name (should be separate resource/file)
      time.sleep(1.1)  # Ensure filename has a different timestamp
      writer3 = summary_ops.create_file_writer(logdir, name='other')
      with writer3.as_default():
        summary_ops.write('tag', 1, step=3)
        summary_ops.flush()

    event_files = iter(sorted(gfile.Glob(os.path.join(logdir, '*'))))

    # First file has tags "one" and "two"
    events = iter(events_from_file(next(event_files)))
    self.assertEqual('brain.Event:2', next(events).file_version)
    self.assertEqual(1, next(events).step)
    self.assertEqual(2, next(events).step)
    self.assertRaises(StopIteration, lambda: next(events))

    # Second file has tag "three"
    events = iter(events_from_file(next(event_files)))
    self.assertEqual('brain.Event:2', next(events).file_version)
    self.assertEqual(3, next(events).step)
    self.assertRaises(StopIteration, lambda: next(events))

    # No more files
    self.assertRaises(StopIteration, lambda: next(event_files))
Beispiel #13
0
 def testFlushFunction(self):
   logdir = self.get_temp_dir()
   with context.eager_mode():
     writer = summary_ops.create_file_writer_v2(
         logdir, max_queue=999999, flush_millis=999999)
     with writer.as_default(), summary_ops.always_record_summaries():
       get_total = lambda: len(events_from_logdir(logdir))
       # Note: First tf.Event is always file_version.
       self.assertEqual(1, get_total())
       summary_ops.write('tag', 1, step=0)
       summary_ops.write('tag', 1, step=0)
       self.assertEqual(1, get_total())
       summary_ops.flush()
       self.assertEqual(3, get_total())
       # Test "writer" parameter
       summary_ops.write('tag', 1, step=0)
       self.assertEqual(3, get_total())
       summary_ops.flush(writer=writer)
       self.assertEqual(4, get_total())
       summary_ops.write('tag', 1, step=0)
       self.assertEqual(4, get_total())
       summary_ops.flush(writer=writer._resource)  # pylint:disable=protected-access
       self.assertEqual(5, get_total())
 def testFlushFunction(self):
   logdir = self.get_temp_dir()
   with context.eager_mode():
     writer = summary_ops.create_file_writer_v2(
         logdir, max_queue=999999, flush_millis=999999)
     with writer.as_default():
       get_total = lambda: len(events_from_logdir(logdir))
       # Note: First tf.Event is always file_version.
       self.assertEqual(1, get_total())
       summary_ops.write('tag', 1, step=0)
       summary_ops.write('tag', 1, step=0)
       self.assertEqual(1, get_total())
       summary_ops.flush()
       self.assertEqual(3, get_total())
       # Test "writer" parameter
       summary_ops.write('tag', 1, step=0)
       self.assertEqual(3, get_total())
       summary_ops.flush(writer=writer)
       self.assertEqual(4, get_total())
       summary_ops.write('tag', 1, step=0)
       self.assertEqual(4, get_total())
       summary_ops.flush(writer=writer._resource)  # pylint:disable=protected-access
       self.assertEqual(5, get_total())
 def testCreate_immediateAsDefault_retainsReference(self):
     logdir = self.get_temp_dir()
     with context.eager_mode():
         with summary_ops.create_file_writer_v2(logdir).as_default():
             summary_ops.flush()
 def testCreate_immediateAsDefault_retainsReference(self):
   logdir = self.get_temp_dir()
   with context.eager_mode():
     with summary_ops.create_file_writer_v2(logdir).as_default():
       summary_ops.flush()