def testIsNotActiveDueToNoScalarsData(self): # Generate a directory with a layout but no scalars data. directory = os.path.join(self.logdir, "no_scalars") with test_util.FileWriterCache.get(directory) as writer: writer.add_summary( test_util.ensure_tb_summary_proto( summary.pb(self.logdir_layout))) local_plugin = self.createPlugin(directory) self.assertFalse(local_plugin.is_active())
def normalize_summary_pb(self, pb): """Pass `pb`'s `TensorProto` through a marshalling roundtrip. `TensorProto`s can be equal in value even if they are not identical in representation, because data can be stored in either the `tensor_content` field or the `${dtype}_value` field. This normalization ensures a canonical form, and should be used before comparing two `Summary`s for equality. """ result = summary_pb2.Summary() if not isinstance(pb, summary_pb2.Summary): # pb can come from `pb_via_op` which creates a TB Summary. pb = test_util.ensure_tb_summary_proto(pb) result.MergeFrom(pb) for value in result.value: if value.HasField('tensor'): new_tensor = tensor_util.make_tensor_proto( tensor_util.make_ndarray(value.tensor)) value.ClearField('tensor') value.tensor.MergeFrom(new_tensor) return result
def compute_and_check_summary_pb(self, name, audio, max_outputs=3, display_name=None, description=None, audio_tensor=None, feed_dict=None): """Use both `op` and `pb` to get a summary, asserting validity. "Validity" means that the `op` and `pb` functions must return the same protobufs, and also that each encoded audio value appears to be a valid WAV file. If either of these conditions fails, the test will immediately fail. Otherwise, the valid protobuf will be returned. Returns: A `Summary` protocol buffer. """ if audio_tensor is None: audio_tensor = tf.constant(audio) op = summary.op(name, audio_tensor, self.samples_per_second, max_outputs=max_outputs, display_name=display_name, description=description) pb = summary.pb(name, audio, self.samples_per_second, max_outputs=max_outputs, display_name=display_name, description=description) pb = test_util.ensure_tb_summary_proto(pb) pb_via_op = self.pb_via_op(op, feed_dict=feed_dict) self.assertProtoEquals(pb, pb_via_op) audios = tensor_util.make_ndarray(pb.value[0].tensor)[:, 0].tolist() invalid_audios = [x for x in audios if not x.startswith(b'RIFF')] self.assertFalse(invalid_audios) return pb
def __init__(self, *args, **kwargs): super(CustomScalarsPluginTest, self).__init__(*args, **kwargs) self.logdir = os.path.join(self.get_temp_dir(), "logdir") os.makedirs(self.logdir) self.logdir_layout = layout_pb2.Layout(category=[ layout_pb2.Category( title="cross entropy", chart=[ layout_pb2.Chart( title="cross entropy", multiline=layout_pb2.MultilineChartContent( tag=[r"cross entropy"], ), ), ], closed=True, ) ]) self.foo_layout = layout_pb2.Layout(category=[ layout_pb2.Category( title="mean biases", chart=[ layout_pb2.Chart( title="mean layer biases", multiline=layout_pb2.MultilineChartContent(tag=[ r"mean/layer0/biases", r"mean/layer1/biases", ], ), ), ], ), layout_pb2.Category( title="std weights", chart=[ layout_pb2.Chart( title="stddev layer weights", multiline=layout_pb2.MultilineChartContent( tag=[r"stddev/layer\d+/weights"], ), ), ], ), # A category with this name is also present in a layout for a # different run (the logdir run) and also contains a duplicate chart layout_pb2.Category( title="cross entropy", chart=[ layout_pb2.Chart( title="cross entropy margin chart", margin=layout_pb2.MarginChartContent(series=[ layout_pb2.MarginChartContent.Series( value="cross entropy", lower="cross entropy lower", upper="cross entropy upper", ), ], ), ), layout_pb2.Chart( title="cross entropy", multiline=layout_pb2.MultilineChartContent( tag=[r"cross entropy"], ), ), ], ), ]) # Generate test data. with test_util.FileWriterCache.get(os.path.join(self.logdir, "foo")) as writer: writer.add_summary( test_util.ensure_tb_summary_proto(summary.pb(self.foo_layout))) for step in range(4): writer.add_summary( test_util.ensure_tb_summary_proto( scalar_summary.pb("squares", step * step)), step, ) with test_util.FileWriterCache.get(os.path.join(self.logdir, "bar")) as writer: for step in range(3): writer.add_summary( test_util.ensure_tb_summary_proto( scalar_summary.pb("increments", step + 1)), step, ) # The '.' run lacks scalar data but has a layout. with test_util.FileWriterCache.get(self.logdir) as writer: writer.add_summary( test_util.ensure_tb_summary_proto( summary.pb(self.logdir_layout))) self.plugin = self.createPlugin(self.logdir)
def scalar(self, *args, **kwargs): return test_util.ensure_tb_summary_proto(summary.pb(*args, **kwargs))