def testSummaryMetadata_FirstMetadataWins(self): logdir = self.get_temp_dir() summary_metadata_1 = summary_pb2.SummaryMetadata( display_name="current tagee", summary_description="no", plugin_data=summary_pb2.SummaryMetadata.PluginData( plugin_name="outlet", content=b"120v" ), ) self._writeMetadata(logdir, summary_metadata_1, nonce="1") acc = ea.EventAccumulator(logdir) acc.Reload() summary_metadata_2 = summary_pb2.SummaryMetadata( display_name="tagee of the future", summary_description="definitely not", plugin_data=summary_pb2.SummaryMetadata.PluginData( plugin_name="plug", content=b"110v" ), ) self._writeMetadata(logdir, summary_metadata_2, nonce="2") acc.Reload() self.assertProtoEquals( summary_metadata_1, acc.SummaryMetadata("you_are_it") )
def testNewStyleScalarSummary(self): """Verify processing of tensorboard.plugins.scalar.summary.""" event_sink = _EventGenerator(self, zero_out_timestamps=True) writer = test_util.FileWriter(self.get_temp_dir()) writer.event_writer = event_sink with self.test_session() as sess: step = tf.compat.v1.placeholder(tf.float32, shape=[]) scalar_summary.op('accuracy', 1.0 - 1.0 / (step + tf.constant(1.0))) scalar_summary.op('xent', 1.0 / (step + tf.constant(1.0))) merged = tf.compat.v1.summary.merge_all() writer.add_graph(sess.graph) for i in xrange(10): summ = sess.run(merged, feed_dict={step: float(i)}) writer.add_summary(summ, global_step=i) accumulator = ea.EventAccumulator(event_sink) accumulator.Reload() tags = [ u'accuracy/scalar_summary', u'xent/scalar_summary', ] self.assertTagsEqual(accumulator.Tags(), { ea.TENSORS: tags, ea.GRAPH: True, ea.META_GRAPH: False, })
def testTFSummaryTensor(self): """Verify processing of tf.summary.tensor.""" event_sink = _EventGenerator(self, zero_out_timestamps=True) writer = test_util.FileWriter(self.get_temp_dir()) writer.event_writer = event_sink with tf.compat.v1.Graph().as_default(): with self.test_session() as sess: tensor_summary = tf.compat.v1.summary.tensor_summary tensor_summary("scalar", tf.constant(1.0)) tensor_summary("vector", tf.constant([1.0, 2.0, 3.0])) tensor_summary("string", tf.constant(six.b("foobar"))) merged = tf.compat.v1.summary.merge_all() summ = sess.run(merged) writer.add_summary(summ, 0) accumulator = ea.EventAccumulator(event_sink) accumulator.Reload() self.assertTagsEqual(accumulator.Tags(), { ea.TENSORS: ["scalar", "vector", "string"], }) scalar_proto = accumulator.Tensors("scalar")[0].tensor_proto scalar = tensor_util.make_ndarray(scalar_proto) vector_proto = accumulator.Tensors("vector")[0].tensor_proto vector = tensor_util.make_ndarray(vector_proto) string_proto = accumulator.Tensors("string")[0].tensor_proto string = tensor_util.make_ndarray(string_proto) self.assertTrue(np.array_equal(scalar, 1.0)) self.assertTrue(np.array_equal(vector, [1.0, 2.0, 3.0])) self.assertTrue(np.array_equal(string, six.b("foobar"))) self.assertItemsEqual(accumulator.ActivePlugins(), [])
def testSessionLogStartMessageDiscardsExpiredEvents(self): """Test that SessionLog.START message discards expired events. This discard logic is preferred over the out-of-order step discard logic, but this logic can only be used for event protos which have the SessionLog enum, which was introduced to event.proto for file_version >= brain.Event:2. """ gen = _EventGenerator(self) acc = ea.EventAccumulator(gen) gen.AddEvent( event_pb2.Event(wall_time=0, step=1, file_version='brain.Event:2')) gen.AddScalarTensor('s1', wall_time=1, step=100, value=20) gen.AddScalarTensor('s1', wall_time=1, step=200, value=20) gen.AddScalarTensor('s1', wall_time=1, step=300, value=20) gen.AddScalarTensor('s1', wall_time=1, step=400, value=20) gen.AddScalarTensor('s2', wall_time=1, step=202, value=20) gen.AddScalarTensor('s2', wall_time=1, step=203, value=20) slog = event_pb2.SessionLog(status=event_pb2.SessionLog.START) gen.AddEvent( event_pb2.Event(wall_time=2, step=201, session_log=slog)) acc.Reload() self.assertEqual([x.step for x in acc.Tensors('s1')], [100, 200]) self.assertEqual([x.step for x in acc.Tensors('s2')], [])
def _testTFSummaryTensor_SizeGuidance(self, plugin_name, tensor_size_guidance, steps, expected_count): event_sink = _EventGenerator(self, zero_out_timestamps=True) writer = test_util.FileWriter(self.get_temp_dir()) writer.event_writer = event_sink with self.test_session() as sess: summary_metadata = summary_pb2.SummaryMetadata( plugin_data=summary_pb2.SummaryMetadata.PluginData( plugin_name=plugin_name, content=b'{}')) tf.compat.v1.summary.tensor_summary('scalar', tf.constant(1.0), summary_metadata=summary_metadata) merged = tf.compat.v1.summary.merge_all() for step in xrange(steps): writer.add_summary(sess.run(merged), global_step=step) accumulator = ea.EventAccumulator( event_sink, tensor_size_guidance=tensor_size_guidance) accumulator.Reload() tensors = accumulator.Tensors('scalar') self.assertEqual(len(tensors), expected_count)
def testKeyError(self): """KeyError should be raised when accessing non-existing keys.""" gen = _EventGenerator(self) acc = ea.EventAccumulator(gen) acc.Reload() with self.assertRaises(KeyError): acc.Tensors('s1')
def testExpiredDataDiscardedAfterRestartForFileVersionLessThan2(self): """Tests that events are discarded after a restart is detected. If a step value is observed to be lower than what was previously seen, this should force a discard of all previous items with the same tag that are outdated. Only file versions < 2 use this out-of-order discard logic. Later versions discard events based on the step value of SessionLog.START. """ warnings = [] self.stubs.Set(logger, 'warn', warnings.append) gen = _EventGenerator(self) acc = ea.EventAccumulator(gen) gen.AddEvent( event_pb2.Event(wall_time=0, step=0, file_version='brain.Event:1')) gen.AddScalarTensor('s1', wall_time=1, step=100, value=20) gen.AddScalarTensor('s1', wall_time=1, step=200, value=20) gen.AddScalarTensor('s1', wall_time=1, step=300, value=20) acc.Reload() ## Check that number of items are what they should be self.assertEqual([x.step for x in acc.Tensors('s1')], [100, 200, 300]) gen.AddScalarTensor('s1', wall_time=1, step=101, value=20) gen.AddScalarTensor('s1', wall_time=1, step=201, value=20) gen.AddScalarTensor('s1', wall_time=1, step=301, value=20) acc.Reload() ## Check that we have discarded 200 and 300 from s1 self.assertEqual([x.step for x in acc.Tensors('s1')], [100, 101, 201, 301])
def testPluginTagToContent_PluginsCannotJumpOnTheBandwagon(self): # If there are multiple `SummaryMetadata` for a given tag, and the # set of plugins in the `plugin_data` of second is different from # that of the first, then the second set should be ignored. logdir = self.get_temp_dir() summary_metadata_1 = summary_pb2.SummaryMetadata( display_name="current tagee", summary_description="no", plugin_data=summary_pb2.SummaryMetadata.PluginData( plugin_name="outlet", content=b"120v"), ) self._writeMetadata(logdir, summary_metadata_1, nonce="1") acc = ea.EventAccumulator(logdir) acc.Reload() summary_metadata_2 = summary_pb2.SummaryMetadata( display_name="tagee of the future", summary_description="definitely not", plugin_data=summary_pb2.SummaryMetadata.PluginData( plugin_name="plug", content=b"110v"), ) self._writeMetadata(logdir, summary_metadata_2, nonce="2") acc.Reload() self.assertEqual(acc.PluginTagToContent("outlet"), {"you_are_it": b"120v"}) with self.assertRaisesRegex(KeyError, "plug"): acc.PluginTagToContent("plug") self.assertItemsEqual(acc.ActivePlugins(), ["outlet"])
def testGraphFromMetaGraphBecomesAvailable(self): """Test accumulator by writing values and then reading them.""" directory = os.path.join(self.get_temp_dir(), 'metagraph_test_values_dir') if tf.gfile.IsDirectory(directory): tf.gfile.DeleteRecursively(directory) tf.gfile.MkDir(directory) writer = tf.summary.FileWriter(directory, max_queue=100) with tf.Graph().as_default() as graph: _ = tf.constant([2.0, 1.0]) # Add a graph to the summary writer. meta_graph_def = tf.train.export_meta_graph(graph_def=graph.as_graph_def( add_shapes=True)) writer.add_meta_graph(meta_graph_def) writer.flush() # Verify that we can load those events properly acc = ea.EventAccumulator(directory) acc.Reload() self.assertTagsEqual(acc.Tags(), { ea.GRAPH: True, ea.META_GRAPH: True, }) self.assertProtoEquals(graph.as_graph_def(add_shapes=True), acc.Graph()) self.assertProtoEquals(meta_graph_def, acc.MetaGraph())
def testGraphFromMetaGraphBecomesAvailable(self): """Test accumulator by writing values and then reading them.""" directory = os.path.join(self.get_temp_dir(), 'metagraph_test_values_dir') if tf.io.gfile.isdir(directory): tf.io.gfile.rmtree(directory) tf.io.gfile.mkdir(directory) writer = test_util.FileWriter(directory, max_queue=100) with tf.Graph().as_default() as graph: _ = tf.constant([2.0, 1.0]) # Add a graph to the summary writer. meta_graph_def = tf.compat.v1.train.export_meta_graph(graph_def=graph.as_graph_def( add_shapes=True)) writer.add_meta_graph(meta_graph_def) writer.flush() # Verify that we can load those events properly acc = ea.EventAccumulator(directory) acc.Reload() self.assertTagsEqual(acc.Tags(), { ea.GRAPH: True, ea.META_GRAPH: True, }) expected_graph_def = graph_pb2.GraphDef.FromString( graph.as_graph_def(add_shapes=True).SerializeToString()) self.assertProtoEquals(expected_graph_def, acc.Graph()) expected_meta_graph = meta_graph_pb2.MetaGraphDef.FromString( meta_graph_def.SerializeToString()) self.assertProtoEquals(expected_meta_graph, acc.MetaGraph())
def testPluginTagToContent_PluginsCannotJumpOnTheBandwagon(self): # If there are multiple `SummaryMetadata` for a given tag, and the # set of plugins in the `plugin_data` of second is different from # that of the first, then the second set should be ignored. logdir = self.get_temp_dir() summary_metadata_1 = summary_pb2.SummaryMetadata( display_name='current tagee', summary_description='no', plugin_data=summary_pb2.SummaryMetadata.PluginData( plugin_name='outlet', content=b'120v')) self._writeMetadata(logdir, summary_metadata_1, nonce='1') acc = ea.EventAccumulator(logdir) acc.Reload() summary_metadata_2 = summary_pb2.SummaryMetadata( display_name='tagee of the future', summary_description='definitely not', plugin_data=summary_pb2.SummaryMetadata.PluginData( plugin_name='plug', content=b'110v')) self._writeMetadata(logdir, summary_metadata_2, nonce='2') acc.Reload() self.assertEqual(acc.PluginTagToContent('outlet'), {'you_are_it': b'120v'}) with six.assertRaisesRegex(self, KeyError, 'plug'): acc.PluginTagToContent('plug')
def testFirstEventTimestamp(self): """Test that FirstEventTimestamp() returns wall_time of the first event.""" gen = _EventGenerator(self) acc = ea.EventAccumulator(gen) gen.AddEvent(tf.Event(wall_time=10, step=20, file_version='brain.Event:2')) gen.AddScalarTensor('s1', wall_time=30, step=40, value=20) self.assertEqual(acc.FirstEventTimestamp(), 10)
def testTFSummaryTensor(self): """Verify processing of tf.summary.tensor.""" event_sink = _EventGenerator(self, zero_out_timestamps=True) writer = test_util.FileWriter(self.get_temp_dir()) writer.event_writer = event_sink with self.test_session() as sess: tf.compat.v1.summary.tensor_summary('scalar', tf.constant(1.0)) tf.compat.v1.summary.tensor_summary('vector', tf.constant([1.0, 2.0, 3.0])) tf.compat.v1.summary.tensor_summary('string', tf.constant(six.b('foobar'))) merged = tf.compat.v1.summary.merge_all() summ = sess.run(merged) writer.add_summary(summ, 0) accumulator = ea.EventAccumulator(event_sink) accumulator.Reload() self.assertTagsEqual(accumulator.Tags(), { ea.TENSORS: ['scalar', 'vector', 'string'], }) scalar_proto = accumulator.Tensors('scalar')[0].tensor_proto scalar = tensor_util.make_ndarray(scalar_proto) vector_proto = accumulator.Tensors('vector')[0].tensor_proto vector = tensor_util.make_ndarray(vector_proto) string_proto = accumulator.Tensors('string')[0].tensor_proto string = tensor_util.make_ndarray(string_proto) self.assertTrue(np.array_equal(scalar, 1.0)) self.assertTrue(np.array_equal(vector, [1.0, 2.0, 3.0])) self.assertTrue(np.array_equal(string, six.b('foobar')))
def testNewStyleAudioSummary(self): """Verify processing of tensorboard.plugins.audio.summary.""" event_sink = _EventGenerator(self, zero_out_timestamps=True) writer = test_util.FileWriter(self.get_temp_dir()) writer.event_writer = event_sink with self.test_session() as sess: ipt = tf.random.normal(shape=[5, 441, 2]) with tf.name_scope('1'): audio_summary.op('one', ipt, sample_rate=44100, max_outputs=1) with tf.name_scope('2'): audio_summary.op('two', ipt, sample_rate=44100, max_outputs=2) with tf.name_scope('3'): audio_summary.op('three', ipt, sample_rate=44100, max_outputs=3) merged = tf.compat.v1.summary.merge_all() writer.add_graph(sess.graph) for i in xrange(10): summ = sess.run(merged) writer.add_summary(summ, global_step=i) accumulator = ea.EventAccumulator(event_sink) accumulator.Reload() tags = [ u'1/one/audio_summary', u'2/two/audio_summary', u'3/three/audio_summary', ] self.assertTagsEqual(accumulator.Tags(), { ea.TENSORS: tags, ea.GRAPH: True, ea.META_GRAPH: False, })
def enable_runs(self, runs): for run in runs: self._multiplexer._accumulators[run] = event_accumulator.EventAccumulator( os.path.join(self.logdir, run), size_guidance=self._multiplexer._size_guidance, tensor_size_guidance=self._multiplexer._tensor_size_guidance, purge_orphaned_data=self._multiplexer.purge_orphaned_data) self._multiplexer._paths[run] = os.path.join(self.logdir, run)
def testFirstEventTimestampLoadsEvent(self): """Test that FirstEventTimestamp() doesn't discard the loaded event.""" gen = _EventGenerator(self) acc = ea.EventAccumulator(gen) gen.AddEvent(tf.Event(wall_time=1, step=2, file_version='brain.Event:2')) self.assertEqual(acc.FirstEventTimestamp(), 1) acc.Reload() self.assertEqual(acc.file_version, 2.0)
def readGraphDef(self, logdir=None): '''function to read graph from drive''' if logdir != None: pass else: accumulator = event_accumulator.EventAccumulator(self._logdir) accumulator.Reload() self._graphDef = accumulator.Graph() return
def testReload(self): """EventAccumulator contains suitable tags after calling Reload.""" gen = _EventGenerator(self) acc = ea.EventAccumulator(gen) acc.Reload() self.assertTagsEqual(acc.Tags(), {}) gen.AddScalarTensor("s1", wall_time=1, step=10, value=50) gen.AddScalarTensor("s2", wall_time=1, step=10, value=80) acc.Reload() self.assertTagsEqual(acc.Tags(), {ea.TENSORS: ["s1", "s2"],})
def testSummaryMetadata(self): logdir = self.get_temp_dir() summary_metadata = tf.SummaryMetadata( display_name='current tagee', summary_description='no', plugin_data=tf.SummaryMetadata.PluginData(plugin_name='outlet')) self._writeMetadata(logdir, summary_metadata) acc = ea.EventAccumulator(logdir) acc.Reload() self.assertProtoEquals(summary_metadata, acc.SummaryMetadata('you_are_it'))
def testOnlySummaryEventsTriggerDiscards(self): """Test that file version event does not trigger data purge.""" gen = _EventGenerator(self) acc = ea.EventAccumulator(gen) gen.AddScalarTensor("s1", wall_time=1, step=100, value=20) ev1 = event_pb2.Event(wall_time=2, step=0, file_version="brain.Event:1") graph_bytes = tf.compat.v1.GraphDef().SerializeToString() ev2 = event_pb2.Event(wall_time=3, step=0, graph_def=graph_bytes) gen.AddEvent(ev1) gen.AddEvent(ev2) acc.Reload() self.assertEqual([x.step for x in acc.Tensors("s1")], [100])
def testNonValueEvents(self): """Non-value events in the generator don't cause early exits.""" gen = _EventGenerator(self) acc = ea.EventAccumulator(gen) gen.AddScalarTensor("s1", wall_time=1, step=10, value=20) gen.AddEvent( event_pb2.Event(wall_time=2, step=20, file_version="nots2") ) gen.AddScalarTensor("s3", wall_time=3, step=100, value=1) acc.Reload() self.assertTagsEqual(acc.Tags(), {ea.TENSORS: ["s1", "s3"],})
def testSummaryMetadata(self): logdir = self.get_temp_dir() summary_metadata = summary_pb2.SummaryMetadata( display_name="current tagee", summary_description="no", plugin_data=summary_pb2.SummaryMetadata.PluginData( plugin_name="outlet"), ) self._writeMetadata(logdir, summary_metadata) acc = ea.EventAccumulator(logdir) acc.Reload() self.assertProtoEquals(summary_metadata, acc.SummaryMetadata("you_are_it"))
def testReloadPopulatesFirstEventTimestamp(self): """Test that Reload() means FirstEventTimestamp() won't load events.""" gen = _EventGenerator(self) acc = ea.EventAccumulator(gen) gen.AddEvent(tf.Event(wall_time=1, step=2, file_version='brain.Event:2')) acc.Reload() def _Die(*args, **kwargs): # pylint: disable=unused-argument raise RuntimeError('Load() should not be called') self.stubs.Set(gen, 'Load', _Die) self.assertEqual(acc.FirstEventTimestamp(), 1)
def testNewStyleAudioSummary(self): """Verify processing of tensorboard.plugins.audio.summary.""" event_sink = _EventGenerator(self, zero_out_timestamps=True) writer = test_util.FileWriter(self.get_temp_dir()) writer.event_writer = event_sink with tf.compat.v1.Graph().as_default(): with self.test_session() as sess: ipt = tf.random.normal(shape=[5, 441, 2]) with tf.name_scope("1"): audio_summary.op("one", ipt, sample_rate=44100, max_outputs=1) with tf.name_scope("2"): audio_summary.op("two", ipt, sample_rate=44100, max_outputs=2) with tf.name_scope("3"): audio_summary.op("three", ipt, sample_rate=44100, max_outputs=3) merged = tf.compat.v1.summary.merge_all() writer.add_graph(sess.graph) for i in range(10): summ = sess.run(merged) writer.add_summary(summ, global_step=i) accumulator = ea.EventAccumulator(event_sink) accumulator.Reload() tags = [ graph_metadata.RUN_GRAPH_NAME, "1/one/audio_summary", "2/two/audio_summary", "3/three/audio_summary", ] self.assertTagsEqual( accumulator.Tags(), { ea.TENSORS: tags, ea.GRAPH: True, ea.META_GRAPH: False, }, ) self.assertItemsEqual( accumulator.ActivePlugins(), [audio_metadata.PLUGIN_NAME, graph_metadata.PLUGIN_NAME], )
def AddRun(self, path, name=None): """Add a run to the multiplexer. If the name is not specified, it is the same as the path. If a run by that name exists, and we are already watching the right path, do nothing. If we are watching a different path, replace the event accumulator. If `Reload` has been called, it will `Reload` the newly created accumulators. Args: path: Path to the event files (or event directory) for given run. name: Name of the run to add. If not provided, is set to path. Returns: The `EventMultiplexer`. """ name = name or path accumulator = None with self._accumulators_mutex: if name not in self._accumulators or self._paths[name] != path: if name in self._paths and self._paths[name] != path: # TODO(@decentralion) - Make it impossible to overwrite an old path # with a new path (just give the new path a distinct name) logger.warning( "Conflict for name %s: old path %s, new path %s", name, self._paths[name], path, ) logger.info("Constructing EventAccumulator for %s", path) accumulator = event_accumulator.EventAccumulator( path, size_guidance=self._size_guidance, tensor_size_guidance=self._tensor_size_guidance, purge_orphaned_data=self.purge_orphaned_data, event_file_active_filter=self._event_file_active_filter, detect_file_replacement=self._detect_file_replacement, ) self._accumulators[name] = accumulator self._paths[name] = path if accumulator: if self._reload_called: accumulator.Reload() return self
def testNewStyleImageSummary(self): """Verify processing of tensorboard.plugins.image.summary.""" event_sink = _EventGenerator(self, zero_out_timestamps=True) writer = test_util.FileWriter(self.get_temp_dir()) writer.event_writer = event_sink with tf.compat.v1.Graph().as_default(): with self.test_session() as sess: ipt = tf.ones([10, 4, 4, 3], tf.uint8) # This is an interesting example, because the old tf.image_summary op # would throw an error here, because it would be tag reuse. # Using the tf node name instead allows argument re-use to the image # summary. with tf.name_scope("1"): image_summary.op("images", ipt, max_outputs=1) with tf.name_scope("2"): image_summary.op("images", ipt, max_outputs=2) with tf.name_scope("3"): image_summary.op("images", ipt, max_outputs=3) merged = tf.compat.v1.summary.merge_all() writer.add_graph(sess.graph) for i in range(10): summ = sess.run(merged) writer.add_summary(summ, global_step=i) accumulator = ea.EventAccumulator(event_sink) accumulator.Reload() tags = [ graph_metadata.RUN_GRAPH_NAME, "1/images/image_summary", "2/images/image_summary", "3/images/image_summary", ] self.assertTagsEqual( accumulator.Tags(), { ea.TENSORS: tags, ea.GRAPH: True, ea.META_GRAPH: False, }, ) self.assertItemsEqual( accumulator.ActivePlugins(), [image_metadata.PLUGIN_NAME, graph_metadata.PLUGIN_NAME], )
def testEventsDiscardedPerTagAfterRestartForFileVersionLessThan2(self): """Tests that event discards after restart, only affect the misordered tag. If a step value is observed to be lower than what was previously seen, this should force a discard of all previous items that are outdated, but only for the out of order tag. Other tags should remain unaffected. Only file versions < 2 use this out-of-order discard logic. Later versions discard events based on the step value of SessionLog.START. """ warnings = [] self.stubs.Set(logger, "warn", warnings.append) gen = _EventGenerator(self) acc = ea.EventAccumulator(gen) gen.AddEvent( event_pb2.Event(wall_time=0, step=0, file_version="brain.Event:1") ) gen.AddScalarTensor("s1", wall_time=1, step=100, value=20) gen.AddScalarTensor("s2", wall_time=1, step=101, value=20) gen.AddScalarTensor("s1", wall_time=1, step=200, value=20) gen.AddScalarTensor("s2", wall_time=1, step=201, value=20) gen.AddScalarTensor("s1", wall_time=1, step=300, value=20) gen.AddScalarTensor("s2", wall_time=1, step=301, value=20) gen.AddScalarTensor("s1", wall_time=1, step=101, value=20) gen.AddScalarTensor("s3", wall_time=1, step=101, value=20) gen.AddScalarTensor("s1", wall_time=1, step=201, value=20) gen.AddScalarTensor("s1", wall_time=1, step=301, value=20) acc.Reload() ## Check that we have discarded 200 and 300 for s1 self.assertEqual( [x.step for x in acc.Tensors("s1")], [100, 101, 201, 301] ) ## Check that s1 discards do not affect s2 (written before out-of-order) ## or s3 (written after out-of-order). ## i.e. check that only events from the out of order tag are discarded self.assertEqual([x.step for x in acc.Tensors("s2")], [101, 201, 301]) self.assertEqual([x.step for x in acc.Tensors("s3")], [101])
def testSummaryMetadata_FirstMetadataWins(self): logdir = self.get_temp_dir() summary_metadata_1 = summary_pb2.SummaryMetadata( display_name='current tagee', summary_description='no', plugin_data=summary_pb2.SummaryMetadata.PluginData( plugin_name='outlet', content=b'120v')) self._writeMetadata(logdir, summary_metadata_1, nonce='1') acc = ea.EventAccumulator(logdir) acc.Reload() summary_metadata_2 = summary_pb2.SummaryMetadata( display_name='tagee of the future', summary_description='definitely not', plugin_data=summary_pb2.SummaryMetadata.PluginData( plugin_name='plug', content=b'110v')) self._writeMetadata(logdir, summary_metadata_2, nonce='2') acc.Reload() self.assertProtoEquals(summary_metadata_1, acc.SummaryMetadata('you_are_it'))
def testOrphanedDataNotDiscardedIfFlagUnset(self): """Tests that events are not discarded if purge_orphaned_data is false. """ gen = _EventGenerator(self) acc = ea.EventAccumulator(gen, purge_orphaned_data=False) gen.AddEvent(tf.Event(wall_time=0, step=0, file_version='brain.Event:1')) gen.AddScalarTensor('s1', wall_time=1, step=100, value=20) gen.AddScalarTensor('s1', wall_time=1, step=200, value=20) gen.AddScalarTensor('s1', wall_time=1, step=300, value=20) acc.Reload() ## Check that number of items are what they should be self.assertEqual([x.step for x in acc.Tensors('s1')], [100, 200, 300]) gen.AddScalarTensor('s1', wall_time=1, step=101, value=20) gen.AddScalarTensor('s1', wall_time=1, step=201, value=20) gen.AddScalarTensor('s1', wall_time=1, step=301, value=20) acc.Reload() ## Check that we have discarded 200 and 300 from s1 self.assertEqual([x.step for x in acc.Tensors('s1')], [100, 200, 300, 101, 201, 301])
def testNewStyleImageSummary(self): """Verify processing of tensorboard.plugins.image.summary.""" event_sink = _EventGenerator(self, zero_out_timestamps=True) writer = test_util.FileWriter(self.get_temp_dir()) writer.event_writer = event_sink with self.test_session() as sess: ipt = tf.ones([10, 4, 4, 3], tf.uint8) # This is an interesting example, because the old tf.image_summary op # would throw an error here, because it would be tag reuse. # Using the tf node name instead allows argument re-use to the image # summary. with tf.name_scope('1'): image_summary.op('images', ipt, max_outputs=1) with tf.name_scope('2'): image_summary.op('images', ipt, max_outputs=2) with tf.name_scope('3'): image_summary.op('images', ipt, max_outputs=3) merged = tf.summary.merge_all() writer.add_graph(sess.graph) for i in xrange(10): summ = sess.run(merged) writer.add_summary(summ, global_step=i) accumulator = ea.EventAccumulator(event_sink) accumulator.Reload() tags = [ u'1/images/image_summary', u'2/images/image_summary', u'3/images/image_summary', ] self.assertTagsEqual(accumulator.Tags(), { ea.TENSORS: tags, ea.GRAPH: True, ea.META_GRAPH: False, })