def testPluginTagToContent_PluginsCannotJumpOnTheBandwagon(self):
        # If there are multiple `SummaryMetadata` for a given tag, and the
        # set of plugins in the `plugin_data` of second is different from
        # that of the first, then the second set should be ignored.
        logdir = self.get_temp_dir()
        summary_metadata_1 = summary_pb2.SummaryMetadata(
            display_name="current tagee",
            summary_description="no",
            plugin_data=summary_pb2.SummaryMetadata.PluginData(
                plugin_name="outlet", content=b"120v"
            ),
        )
        self._writeMetadata(logdir, summary_metadata_1, nonce="1")
        acc = ea.EventAccumulator(logdir)
        acc.Reload()
        summary_metadata_2 = summary_pb2.SummaryMetadata(
            display_name="tagee of the future",
            summary_description="definitely not",
            plugin_data=summary_pb2.SummaryMetadata.PluginData(
                plugin_name="plug", content=b"110v"
            ),
        )
        self._writeMetadata(logdir, summary_metadata_2, nonce="2")
        acc.Reload()

        self.assertEqual(
            acc.PluginTagToContent("outlet"), {"you_are_it": b"120v"}
        )
        with six.assertRaisesRegex(self, KeyError, "plug"):
            acc.PluginTagToContent("plug")
示例#2
0
  def testPluginTagToContent_PluginsCannotJumpOnTheBandwagon(self):
    # If there are multiple `SummaryMetadata` for a given tag, and the
    # set of plugins in the `plugin_data` of second is different from
    # that of the first, then the second set should be ignored.
    logdir = self.get_temp_dir()
    summary_metadata_1 = summary_pb2.SummaryMetadata(
        display_name='current tagee',
        summary_description='no',
        plugin_data=summary_pb2.SummaryMetadata.PluginData(
            plugin_name='outlet', content=b'120v'))
    self._writeMetadata(logdir, summary_metadata_1, nonce='1')
    acc = ea.EventAccumulator(logdir)
    acc.Reload()
    summary_metadata_2 = summary_pb2.SummaryMetadata(
        display_name='tagee of the future',
        summary_description='definitely not',
        plugin_data=summary_pb2.SummaryMetadata.PluginData(
            plugin_name='plug', content=b'110v'))
    self._writeMetadata(logdir, summary_metadata_2, nonce='2')
    acc.Reload()

    self.assertEqual(acc.PluginTagToContent('outlet'),
                     {'you_are_it': b'120v'})
    with six.assertRaisesRegex(self, KeyError, 'plug'):
      acc.PluginTagToContent('plug')
    def testSummaryMetadata_FirstMetadataWins(self):
        logdir = self.get_temp_dir()
        summary_metadata_1 = summary_pb2.SummaryMetadata(
            display_name="current tagee",
            summary_description="no",
            plugin_data=summary_pb2.SummaryMetadata.PluginData(
                plugin_name="outlet", content=b"120v"
            ),
        )
        self._writeMetadata(logdir, summary_metadata_1, nonce="1")
        acc = ea.EventAccumulator(logdir)
        acc.Reload()
        summary_metadata_2 = summary_pb2.SummaryMetadata(
            display_name="tagee of the future",
            summary_description="definitely not",
            plugin_data=summary_pb2.SummaryMetadata.PluginData(
                plugin_name="plug", content=b"110v"
            ),
        )
        self._writeMetadata(logdir, summary_metadata_2, nonce="2")
        acc.Reload()

        self.assertProtoEquals(
            summary_metadata_1, acc.SummaryMetadata("you_are_it")
        )
示例#4
0
    def setUp(self):
        super(MultiplexerDataProviderTest, self).setUp()
        self.logdir = self.get_temp_dir()

        logdir = os.path.join(self.logdir, "polynomials")
        with tf.summary.create_file_writer(logdir).as_default():
            for i in xrange(10):
                scalar_summary.scalar("square",
                                      i**2,
                                      step=2 * i,
                                      description="boxen")
                scalar_summary.scalar("cube", i**3, step=3 * i)

        logdir = os.path.join(self.logdir, "waves")
        with tf.summary.create_file_writer(logdir).as_default():
            for i in xrange(10):
                scalar_summary.scalar("sine", tf.sin(float(i)), step=i)
                scalar_summary.scalar("square",
                                      tf.sign(tf.sin(float(i))),
                                      step=i)
                # Summary with rank-0 data but not owned by the scalars plugin.
                metadata = summary_pb2.SummaryMetadata()
                metadata.plugin_data.plugin_name = "marigraphs"
                metadata.data_class = summary_pb2.DATA_CLASS_SCALAR
                tf.summary.write("high_tide",
                                 tensor=i,
                                 step=i,
                                 metadata=metadata)
                # Summary with rank-1 data of scalar data class (bad!).
                metadata = summary_pb2.SummaryMetadata()
                metadata.plugin_data.plugin_name = "greetings"
                metadata.data_class = summary_pb2.DATA_CLASS_SCALAR
                tf.summary.write("bad",
                                 tensor=[i, i],
                                 step=i,
                                 metadata=metadata)

        logdir = os.path.join(self.logdir, "lebesgue")
        with tf.summary.create_file_writer(logdir).as_default():
            data = [
                ("very smooth", (0.0, 0.25, 0.5, 0.75, 1.0), "uniform"),
                ("very smoothn't", (0.0, 0.01, 0.99, 1.0), "bimodal"),
            ]
            for (description, distribution, name) in data:
                tensor = tf.constant([distribution], dtype=tf.float64)
                for i in xrange(1, 11):
                    histogram_summary.histogram(name,
                                                tensor * i,
                                                step=i,
                                                description=description)
    def _testTFSummaryTensor_SizeGuidance(
        self, plugin_name, tensor_size_guidance, steps, expected_count
    ):
        event_sink = _EventGenerator(self, zero_out_timestamps=True)
        writer = test_util.FileWriter(self.get_temp_dir())
        writer.event_writer = event_sink
        with tf.compat.v1.Graph().as_default():
            with self.test_session() as sess:
                summary_metadata = summary_pb2.SummaryMetadata(
                    plugin_data=summary_pb2.SummaryMetadata.PluginData(
                        plugin_name=plugin_name, content=b"{}"
                    )
                )
                tf.compat.v1.summary.tensor_summary(
                    "scalar",
                    tf.constant(1.0),
                    summary_metadata=summary_metadata,
                )
                merged = tf.compat.v1.summary.merge_all()
                for step in xrange(steps):
                    writer.add_summary(sess.run(merged), global_step=step)

        accumulator = ea.EventAccumulator(
            event_sink, tensor_size_guidance=tensor_size_guidance
        )
        accumulator.Reload()

        tensors = accumulator.Tensors("scalar")
        self.assertEqual(len(tensors), expected_count)
示例#6
0
 def emit_scalar(
     self,
     *,
     plugin_name,
     tag,
     data,
     step,
     wall_time,
     tag_metadata=None,
     description=None,
 ):
     """See `Output`."""
     # TODO(#4581): cache summary metadata to emit only once.
     summary_metadata = summary_pb2.SummaryMetadata(
         plugin_data=summary_pb2.SummaryMetadata.PluginData(
             plugin_name=plugin_name, content=tag_metadata
         ),
         summary_description=description,
         data_class=summary_pb2.DataClass.DATA_CLASS_SCALAR,
     )
     tensor_proto = tensor_util.make_tensor_proto(data)
     event = event_pb2.Event(wall_time=wall_time, step=step)
     event.summary.value.add(
         tag=tag, tensor=tensor_proto, metadata=summary_metadata
     )
     self._ev_writer.add_event(event)
 def test_experiment_with_experiment_tag(self):
     experiment = """
         description: 'Test experiment'
         metric_infos: [
           { name: { tag: 'current_temp' } }
         ]
     """
     run = "exp"
     tag = metadata.EXPERIMENT_TAG
     m = summary_pb2.SummaryMetadata()
     m.data_class = summary_pb2.DATA_CLASS_TENSOR
     m.plugin_data.plugin_name = metadata.PLUGIN_NAME
     m.plugin_data.content = self._serialized_plugin_data(
         DATA_TYPE_EXPERIMENT, experiment)
     self._mock_multiplexer.AllSummaryMetadata.side_effect = None
     self._mock_multiplexer.AllSummaryMetadata.return_value = {
         run: {
             tag: m
         }
     }
     ctxt = backend_context.Context(self._mock_tb_context)
     request_ctx = context.RequestContext()
     self.assertProtoEquals(
         experiment,
         ctxt.experiment_from_metadata(
             request_ctx, "123", ctxt.hparams_metadata(request_ctx, "123")),
     )
示例#8
0
def _migrate_value(value, initial_metadata):
    """Convert an old value to a stream of new values. May mutate."""
    metadata = initial_metadata.get(value.tag)
    initial = False
    if metadata is None:
        initial = True
        # Retain a copy of the initial metadata, so that even after we
        # update its data class we know whether to also transform later
        # events in this time series.
        metadata = summary_pb2.SummaryMetadata()
        metadata.CopyFrom(value.metadata)
        initial_metadata[value.tag] = metadata
    if metadata.data_class != summary_pb2.DATA_CLASS_UNKNOWN:
        return (value, )
    plugin_name = metadata.plugin_data.plugin_name
    if plugin_name == histograms_metadata.PLUGIN_NAME:
        return _migrate_histogram_value(value)
    if plugin_name == images_metadata.PLUGIN_NAME:
        return _migrate_image_value(value)
    if plugin_name == audio_metadata.PLUGIN_NAME:
        return _migrate_audio_value(value)
    if plugin_name == scalars_metadata.PLUGIN_NAME:
        return _migrate_scalar_value(value)
    if plugin_name == text_metadata.PLUGIN_NAME:
        return _migrate_text_value(value)
    if plugin_name == hparams_metadata.PLUGIN_NAME:
        return _migrate_hparams_value(value)
    if plugin_name == pr_curves_metadata.PLUGIN_NAME:
        return _migrate_pr_curve_value(value)
    return (value, )
示例#9
0
def create_summary_metadata(hparams_plugin_data_pb):
    """Returns a summary metadata for the HParams plugin.

    Returns a summary_pb2.SummaryMetadata holding a copy of the given
    HParamsPluginData message in its plugin_data.content field.
    Sets the version field of the hparams_plugin_data_pb copy to
    PLUGIN_DATA_VERSION.

    Args:
      hparams_plugin_data_pb: the HParamsPluginData protobuffer to use.
    """
    if not isinstance(
        hparams_plugin_data_pb, plugin_data_pb2.HParamsPluginData
    ):
        raise TypeError(
            "Needed an instance of plugin_data_pb2.HParamsPluginData."
            " Got: %s" % type(hparams_plugin_data_pb)
        )
    content = plugin_data_pb2.HParamsPluginData()
    content.CopyFrom(hparams_plugin_data_pb)
    content.version = PLUGIN_DATA_VERSION
    return summary_pb2.SummaryMetadata(
        plugin_data=summary_pb2.SummaryMetadata.PluginData(
            plugin_name=PLUGIN_NAME, content=content.SerializeToString()
        )
    )
示例#10
0
  def setUp(self):
    super(MultiplexerDataProviderTest, self).setUp()
    self.logdir = self.get_temp_dir()

    logdir = os.path.join(self.logdir, "polynomials")
    with tf.summary.create_file_writer(logdir).as_default():
      for i in xrange(10):
        scalar_summary.scalar("square", i ** 2, step=2 * i, description="boxen")
        scalar_summary.scalar("cube", i ** 3, step=3 * i)

    logdir = os.path.join(self.logdir, "waves")
    with tf.summary.create_file_writer(logdir).as_default():
      for i in xrange(10):
        scalar_summary.scalar("sine", tf.sin(float(i)), step=i)
        scalar_summary.scalar("square", tf.sign(tf.sin(float(i))), step=i)
        # Summary with rank-0 data but not owned by the scalars plugin.
        metadata = summary_pb2.SummaryMetadata()
        metadata.plugin_data.plugin_name = "marigraphs"
        tf.summary.write("high_tide", tensor=i, step=i, metadata=metadata)

    logdir = os.path.join(self.logdir, "pictures")
    with tf.summary.create_file_writer(logdir).as_default():
      purple = tf.constant([[[255, 0, 255]]], dtype=tf.uint8)
      for i in xrange(1, 11):
        image_summary.image("purple", [tf.tile(purple, [i, i, 1])], step=i)
示例#11
0
def scalar_metadata(display_name):
    """Makes a scalar metadata proto, for constructing expected requests."""
    metadata = summary_pb2.SummaryMetadata(
        display_name=display_name, data_class=summary_pb2.DATA_CLASS_SCALAR
    )
    metadata.plugin_data.plugin_name = "scalars"
    return metadata
示例#12
0
 def __call__(self):
     entries = []
     for tag, value in self.items():
         if isinstance(value, DelayedScalar):
             entries.append(
                 summary_pb2.Summary.Value(tag=tag, simple_value=value()))
         elif isinstance(value, Image):
             image_summary = summary_pb2.Summary.Image(
                 encoded_image_string=value.png,
                 colorspace=value.shape[0],
                 height=value.shape[1],
                 width=value.shape[2])
             entries.append(
                 summary_pb2.Summary.Value(tag=tag, image=image_summary))
         elif isinstance(value, Text):
             metadata = summary_pb2.SummaryMetadata(
                 plugin_data=summary_pb2.SummaryMetadata.PluginData(
                     plugin_name='text'))
             entries.append(
                 summary_pb2.Summary.Value(
                     tag=tag,
                     metadata=metadata,
                     tensor=make_tensor_proto(
                         values=value.text.encode('utf-8'), shape=(1, ))))
         else:
             raise NotImplementedError(tag, value)
     return summary_pb2.Summary(value=entries)
示例#13
0
def _create_summary_metadata(description):
    return summary_pb2.SummaryMetadata(
        summary_description=description,
        plugin_data=summary_pb2.SummaryMetadata.PluginData(
            plugin_name=metadata.PLUGIN_NAME,
            content=b"",  # no need for summary-specific metadata
        ),
    )
示例#14
0
def create_summary_metadata():
    """Create a `SummaryMetadata` proto for custom scalar plugin data.

  Returns:
    A `summary_pb2.SummaryMetadata` protobuf object.
  """
    return summary_pb2.SummaryMetadata(
        plugin_data=summary_pb2.SummaryMetadata.PluginData(
            plugin_name=PLUGIN_NAME))
示例#15
0
def create_summary_metadata(description):
    content = plugin_data_pb2.NpmiPluginData(version=PROTO_VERSION)
    return summary_pb2.SummaryMetadata(
        summary_description=description,
        plugin_data=summary_pb2.SummaryMetadata.PluginData(
            plugin_name=PLUGIN_NAME,
            content=content.SerializeToString(),
        ),
        data_class=summary_pb2.DATA_CLASS_TENSOR,
    )
示例#16
0
 def testSummaryMetadata(self):
     logdir = self.get_temp_dir()
     summary_metadata = summary_pb2.SummaryMetadata(
         display_name='current tagee', summary_description='no')
     summary_metadata.plugin_data.plugin_name = 'outlet'
     self._writeMetadata(logdir, summary_metadata)
     acc = ea.EventAccumulator(logdir)
     acc.Reload()
     self.assertProtoEquals(summary_metadata,
                            acc.SummaryMetadata('you_are_it'))
示例#17
0
  def testSummaryMetadata_FirstMetadataWins(self):
    logdir = self.get_temp_dir()
    summary_metadata_1 = summary_pb2.SummaryMetadata(
        display_name='current tagee',
        summary_description='no',
        plugin_data=summary_pb2.SummaryMetadata.PluginData(
            plugin_name='outlet', content=b'120v'))
    self._writeMetadata(logdir, summary_metadata_1, nonce='1')
    acc = ea.EventAccumulator(logdir)
    acc.Reload()
    summary_metadata_2 = summary_pb2.SummaryMetadata(
        display_name='tagee of the future',
        summary_description='definitely not',
        plugin_data=summary_pb2.SummaryMetadata.PluginData(
            plugin_name='plug', content=b'110v'))
    self._writeMetadata(logdir, summary_metadata_2, nonce='2')
    acc.Reload()

    self.assertProtoEquals(summary_metadata_1,
                           acc.SummaryMetadata('you_are_it'))
示例#18
0
def create_summary_metadata(display_name, description):
    """Create a `summary_pb2.SummaryMetadata` proto for bar plugin data.

  Returns:
    A `summary_pb2.SummaryMetadata` protobuf object.
  """
    content = plugin_data_pb2.BarPluginData(version=PROTO_VERSION)
    return summary_pb2.SummaryMetadata(
        display_name=display_name,
        summary_description=description,
        plugin_data=summary_pb2.SummaryMetadata.PluginData(
            plugin_name=PLUGIN_NAME, content=content.SerializeToString()))
示例#19
0
 def testSummaryMetadata(self):
     logdir = self.get_temp_dir()
     summary_metadata = summary_pb2.SummaryMetadata(
         display_name="current tagee",
         summary_description="no",
         plugin_data=summary_pb2.SummaryMetadata.PluginData(
             plugin_name="outlet"),
     )
     self._writeMetadata(logdir, summary_metadata)
     acc = ea.EventAccumulator(logdir)
     acc.Reload()
     self.assertProtoEquals(summary_metadata,
                            acc.SummaryMetadata("you_are_it"))
示例#20
0
def create_summary_metadata(display_name, description, encoding):
    """Create a `SummaryMetadata` proto for audio plugin data.

  Returns:
    A `SummaryMetadata` protobuf object.
  """
    content = plugin_data_pb2.AudioPluginData(version=PROTO_VERSION,
                                              encoding=encoding)
    metadata = summary_pb2.SummaryMetadata(
        display_name=display_name,
        summary_description=description,
        plugin_data=summary_pb2.SummaryMetadata.PluginData(
            plugin_name=PLUGIN_NAME, content=content.SerializeToString()))
    return metadata
示例#21
0
 def test_fully_populated_tensor(self):
     metadata = summary_pb2.SummaryMetadata(
         plugin_data=summary_pb2.SummaryMetadata.PluginData(
             plugin_name='font_of_wisdom', content=b'adobe_garamond'))
     op = tf.summary.tensor_summary(
         name='tensorpocalypse',
         tensor=tf.constant([[0.0, 2.0], [float('inf'),
                                          float('nan')]]),
         display_name='TENSORPOCALYPSE',
         summary_description='look on my works ye mighty and despair',
         summary_metadata=metadata)
     value = self._value_from_op(op)
     assert value.HasField('tensor'), value
     self._assert_noop(value)
示例#22
0
 def test_fully_populated_tensor(self):
     with tf.compat.v1.Graph().as_default():
         metadata = summary_pb2.SummaryMetadata(
             plugin_data=summary_pb2.SummaryMetadata.PluginData(
                 plugin_name="font_of_wisdom", content=b"adobe_garamond"))
         op = tf.compat.v1.summary.tensor_summary(
             name="tensorpocalypse",
             tensor=tf.constant([[0.0, 2.0], [float("inf"),
                                              float("nan")]]),
             display_name="TENSORPOCALYPSE",
             summary_description="look on my works ye mighty and despair",
             summary_metadata=metadata,
         )
         value = self._value_from_op(op)
     assert value.HasField("tensor"), value
     self._assert_noop(value)
示例#23
0
def create_summary_metadata(
    name,
    display_name,
    content_type,
    components,
    shape,
    description=None,
    json_config=None,
):
    """Creates summary metadata which defined at MeshPluginData proto.

    Arguments:
      name: Original merged (summaries of different types) summary name.
      display_name: The display name used in TensorBoard.
      content_type: Value from MeshPluginData.ContentType enum describing data.
      components: Bitmask representing present parts (vertices, colors, etc.) that
        belong to the summary.
      shape: list of dimensions sizes of the tensor.
      description: The description to show in TensorBoard.
      json_config: A string, JSON-serialized dictionary of ThreeJS classes
        configuration.

    Returns:
      A `summary_pb2.SummaryMetadata` protobuf object.
    """
    # Shape should be at least BxNx3 where B represents the batch dimensions
    # and N - the number of points, each with x,y,z coordinates.
    if len(shape) != 3:
        raise ValueError("Tensor shape should be of shape BxNx3, but got %s." %
                         str(shape))
    mesh_plugin_data = plugin_data_pb2.MeshPluginData(
        version=get_current_version(),
        name=name,
        content_type=content_type,
        components=components,
        shape=shape,
        json_config=json_config,
    )
    content = mesh_plugin_data.SerializeToString()
    return summary_pb2.SummaryMetadata(
        display_name=display_name,  # Will not be used in TensorBoard UI.
        summary_description=description,
        plugin_data=summary_pb2.SummaryMetadata.PluginData(
            plugin_name=PLUGIN_NAME, content=content),
    )
示例#24
0
        def stream_experiment_data(request, **kwargs):
            self.assertEqual(kwargs["metadata"], grpc_util.version_metadata())

            tag = "__default_graph__"
            for run in ("train", "test"):
                response = export_service_pb2.StreamExperimentDataResponse()
                response.run_name = run
                response.tag_name = tag
                display_name = "%s:%s" % (request.experiment_id, tag)
                response.tag_metadata.CopyFrom(
                    summary_pb2.SummaryMetadata(
                        data_class=summary_pb2.DATA_CLASS_BLOB_SEQUENCE
                    )
                )
                for step in range(1):
                    response.blob_sequences.steps.append(step)
                    response.blob_sequences.wall_times.add(
                        seconds=1571084520 + step, nanos=862939144
                    )
                    blob_sequence = blob_pb2.BlobSequence()
                    if run == "train":
                        # A finished blob sequence.
                        blob = blob_pb2.Blob(
                            blob_id="%s_blob" % run,
                            state=blob_pb2.BlobState.BLOB_STATE_CURRENT,
                        )
                        blob_sequence.entries.append(
                            blob_pb2.BlobSequenceEntry(blob=blob)
                        )
                        # An unfinished blob sequence.
                        blob = blob_pb2.Blob(
                            state=blob_pb2.BlobState.BLOB_STATE_UNFINALIZED,
                        )
                        blob_sequence.entries.append(
                            blob_pb2.BlobSequenceEntry(blob=blob)
                        )
                    elif run == "test":
                        blob_sequence.entries.append(
                            # `blob` unspecified: a hole in the blob sequence.
                            blob_pb2.BlobSequenceEntry()
                        )
                    response.blob_sequences.values.append(blob_sequence)
                yield response
示例#25
0
def create_summary_metadata(display_name, description, num_thresholds):
  """Create a `summary_pb2.SummaryMetadata` proto for pr_curves plugin data.

  Arguments:
    display_name: The display name used in TensorBoard.
    description: The description to show in TensorBoard.
    num_thresholds: The number of thresholds to use for PR curves.

  Returns:
    A `summary_pb2.SummaryMetadata` protobuf object.
  """
  pr_curve_plugin_data = plugin_data_pb2.PrCurvePluginData(
      version=PROTO_VERSION, num_thresholds=num_thresholds)
  content = pr_curve_plugin_data.SerializeToString()
  return summary_pb2.SummaryMetadata(
      display_name=display_name,
      summary_description=description,
      plugin_data=summary_pb2.SummaryMetadata.PluginData(
          plugin_name=PLUGIN_NAME,
          content=content))
示例#26
0
def create_summary_metadata(display_name,
                            description,
                            *,
                            converted_to_tensor=None):
    """Create a `summary_pb2.SummaryMetadata` proto for image plugin data.

    Returns:
      A `summary_pb2.SummaryMetadata` protobuf object.
    """
    content = plugin_data_pb2.ImagePluginData(
        version=PROTO_VERSION,
        converted_to_tensor=converted_to_tensor,
    )
    metadata = summary_pb2.SummaryMetadata(
        display_name=display_name,
        summary_description=description,
        plugin_data=summary_pb2.SummaryMetadata.PluginData(
            plugin_name=PLUGIN_NAME, content=content.SerializeToString()),
    )
    return metadata
示例#27
0
 def _mock_all_summary_metadata(self):
     result = {}
     hparams_content = {
         "": {
             metadata.EXPERIMENT_TAG:
             self._serialized_plugin_data(
                 DATA_TYPE_EXPERIMENT,
                 """
                 description: 'Test experiment'
                 user: '******'
                 hparam_infos: [
                   {
                     name: 'initial_temp'
                     type: DATA_TYPE_FLOAT64
                   },
                   {
                     name: 'final_temp'
                     type: DATA_TYPE_FLOAT64
                   },
                   { name: 'string_hparam' },
                   { name: 'bool_hparam' },
                   { name: 'optional_string_hparam' }
                 ]
                 metric_infos: [
                   { name: { tag: 'current_temp' } },
                   { name: { tag: 'delta_temp' } },
                   { name: { tag: 'optional_metric' } }
                 ]
                 """,
             )
         },
         "session_1": {
             metadata.SESSION_START_INFO_TAG:
             self._serialized_plugin_data(
                 DATA_TYPE_SESSION_START_INFO,
                 """
                 hparams:{ key: 'initial_temp' value: { number_value: 270 } },
                 hparams:{ key: 'final_temp' value: { number_value: 150 } },
                 hparams:{
                   key: 'string_hparam' value: { string_value: 'a string' }
                 },
                 hparams:{ key: 'bool_hparam' value: { bool_value: true } }
                 group_name: 'group_1'
                 start_time_secs: 314159
                 """,
             ),
             metadata.SESSION_END_INFO_TAG:
             self._serialized_plugin_data(
                 DATA_TYPE_SESSION_END_INFO,
                 """
                 status: STATUS_SUCCESS
                 end_time_secs: 314164
                 """,
             ),
         },
         "session_2": {
             metadata.SESSION_START_INFO_TAG:
             self._serialized_plugin_data(
                 DATA_TYPE_SESSION_START_INFO,
                 """
                 hparams:{ key: 'initial_temp' value: { number_value: 280 } },
                 hparams:{ key: 'final_temp' value: { number_value: 100 } },
                 hparams:{
                   key: 'string_hparam' value: { string_value: 'AAAAA' }
                 },
                 hparams:{ key: 'bool_hparam' value: { bool_value: false } }
                 group_name: 'group_2'
                 start_time_secs: 314159
                 """,
             ),
             metadata.SESSION_END_INFO_TAG:
             self._serialized_plugin_data(
                 DATA_TYPE_SESSION_END_INFO,
                 """
                 status: STATUS_SUCCESS
                 end_time_secs: 314164
                 """,
             ),
         },
         "session_3": {
             metadata.SESSION_START_INFO_TAG:
             self._serialized_plugin_data(
                 DATA_TYPE_SESSION_START_INFO,
                 """
                 hparams:{ key: 'initial_temp' value: { number_value: 280 } },
                 hparams:{ key: 'final_temp' value: { number_value: 100 } },
                 hparams:{
                   key: 'string_hparam' value: { string_value: 'AAAAA' }
                 },
                 hparams:{ key: 'bool_hparam' value: { bool_value: false } }
                 group_name: 'group_2'
                 start_time_secs: 314159
                 """,
             ),
             metadata.SESSION_END_INFO_TAG:
             self._serialized_plugin_data(
                 DATA_TYPE_SESSION_END_INFO,
                 """
                 status: STATUS_FAILURE
                 end_time_secs: 314164
                 """,
             ),
         },
         "session_4": {
             metadata.SESSION_START_INFO_TAG:
             self._serialized_plugin_data(
                 DATA_TYPE_SESSION_START_INFO,
                 """
                 hparams:{ key: 'initial_temp' value: { number_value: 300 } },
                 hparams:{ key: 'final_temp' value: { number_value: 120 } },
                 hparams:{
                   key: 'string_hparam' value: { string_value: 'a string_3' }
                 },
                 hparams:{ key: 'bool_hparam' value: { bool_value: true } }
                 hparams:{
                   key: 'optional_string_hparam' value { string_value: 'BB' }
                 },
                 group_name: 'group_3'
                 start_time_secs: 314159
                 """,
             ),
             metadata.SESSION_END_INFO_TAG:
             self._serialized_plugin_data(
                 DATA_TYPE_SESSION_END_INFO,
                 """
                 status: STATUS_UNKNOWN
                 end_time_secs: 314164
                 """,
             ),
         },
         "session_5": {
             metadata.SESSION_START_INFO_TAG:
             self._serialized_plugin_data(
                 DATA_TYPE_SESSION_START_INFO,
                 """
                 hparams:{ key: 'initial_temp' value: { number_value: 280 } },
                 hparams:{ key: 'final_temp' value: { number_value: 100 } },
                 hparams:{
                   key: 'string_hparam' value: { string_value: 'AAAAA' }
                 },
                 hparams:{ key: 'bool_hparam' value: { bool_value: false } }
                 group_name: 'group_2'
                 start_time_secs: 314159
                 """,
             ),
             metadata.SESSION_END_INFO_TAG:
             self._serialized_plugin_data(
                 DATA_TYPE_SESSION_END_INFO,
                 """
                 status: STATUS_SUCCESS
                 end_time_secs: 314164
                 """,
             ),
         },
     }
     scalars_content = {
         "session_1": {
             "current_temp": b"",
             "delta_temp": b"",
             "optional_metric": b"",
         },
         "session_2": {
             "current_temp": b"",
             "delta_temp": b""
         },
         "session_3": {
             "current_temp": b"",
             "delta_temp": b""
         },
         "session_4": {
             "current_temp": b"",
             "delta_temp": b""
         },
         "session_5": {
             "current_temp": b"",
             "delta_temp": b""
         },
     }
     for (run, tag_to_content) in hparams_content.items():
         result.setdefault(run, {})
         for (tag, content) in tag_to_content.items():
             m = summary_pb2.SummaryMetadata()
             m.data_class = summary_pb2.DATA_CLASS_TENSOR
             m.plugin_data.plugin_name = metadata.PLUGIN_NAME
             m.plugin_data.content = content
             result[run][tag] = m
     for (run, tag_to_content) in scalars_content.items():
         result.setdefault(run, {})
         for (tag, content) in tag_to_content.items():
             m = summary_pb2.SummaryMetadata()
             m.data_class = summary_pb2.DATA_CLASS_SCALAR
             m.plugin_data.plugin_name = scalars_metadata.PLUGIN_NAME
             m.plugin_data.content = content
             result[run][tag] = m
     return result
示例#28
0
def CreateSummaryMetadata(description=None):
  return summary_pb2.SummaryMetadata(
      summary_description=description,
      plugin_data=summary_pb2.SummaryMetadata.PluginData(
          plugin_name=PLUGIN_NAME))
示例#29
0
    def test_e2e_success_case_with_blob_sequence_data(self):
        """Covers exporting of complete and incomplete blob sequences

        as well as rpc error during blob streaming.
        """
        mock_api_client = self._create_mock_api_client()

        def stream_experiments(request, **kwargs):
            del request  # unused
            self.assertEqual(kwargs["metadata"], grpc_util.version_metadata())

            response = export_service_pb2.StreamExperimentsResponse()
            response.experiments.add(experiment_id="123")
            yield response
            response = export_service_pb2.StreamExperimentsResponse()
            response.experiments.add(experiment_id="456")
            yield response

        def stream_experiment_data(request, **kwargs):
            self.assertEqual(kwargs["metadata"], grpc_util.version_metadata())

            tag = "__default_graph__"
            for run in ("train", "test"):
                response = export_service_pb2.StreamExperimentDataResponse()
                response.run_name = run
                response.tag_name = tag
                display_name = "%s:%s" % (request.experiment_id, tag)
                response.tag_metadata.CopyFrom(
                    summary_pb2.SummaryMetadata(
                        data_class=summary_pb2.DATA_CLASS_BLOB_SEQUENCE))
                for step in range(1):
                    response.blob_sequences.steps.append(step)
                    response.blob_sequences.wall_times.add(seconds=1571084520 +
                                                           step,
                                                           nanos=862939144)
                    blob_sequence = blob_pb2.BlobSequence()
                    if run == "train":
                        # A finished blob sequence.
                        blob = blob_pb2.Blob(
                            blob_id="%s_blob" % run,
                            state=blob_pb2.BlobState.BLOB_STATE_CURRENT,
                        )
                        blob_sequence.entries.append(
                            blob_pb2.BlobSequenceEntry(blob=blob))
                        # An unfinished blob sequence.
                        blob = blob_pb2.Blob(
                            state=blob_pb2.BlobState.BLOB_STATE_UNFINALIZED, )
                        blob_sequence.entries.append(
                            blob_pb2.BlobSequenceEntry(blob=blob))
                    elif run == "test":
                        blob_sequence.entries.append(
                            # `blob` unspecified: a hole in the blob sequence.
                            blob_pb2.BlobSequenceEntry())
                    response.blob_sequences.values.append(blob_sequence)
                yield response

        mock_api_client.StreamExperiments = mock.Mock(wraps=stream_experiments)
        mock_api_client.StreamExperimentData = mock.Mock(
            wraps=stream_experiment_data)
        mock_api_client.StreamBlobData.side_effect = [
            iter([
                export_service_pb2.StreamBlobDataResponse(
                    data=b"4321",
                    offset=0,
                    final_chunk=False,
                ),
                export_service_pb2.StreamBlobDataResponse(
                    data=b"8765",
                    offset=4,
                    final_chunk=True,
                ),
            ]),
            # Raise error from `StreamBlobData` to test the grpc-error
            # condition.
            test_util.grpc_error(grpc.StatusCode.INTERNAL,
                                 "Error for testing"),
        ]

        outdir = os.path.join(self.get_temp_dir(), "outdir")
        exporter = exporter_lib.TensorBoardExporter(mock_api_client, outdir)
        start_time = 1571084846.25
        start_time_pb = test_util.timestamp_pb(1571084846250000000)

        generator = exporter.export(read_time=start_time)
        expected_files = []
        self.assertTrue(os.path.isdir(outdir))
        self.assertCountEqual(expected_files, _outdir_files(outdir))
        mock_api_client.StreamExperiments.assert_not_called()
        mock_api_client.StreamExperimentData.assert_not_called()

        # The first iteration should request the list of experiments and
        # data for one of them.
        self.assertEqual(next(generator), "123")
        expected_files.append(os.path.join("experiment_123", "metadata.json"))
        # scalars.json and tensors.json should exist and be empty.
        expected_files.append(os.path.join("experiment_123", "scalars.json"))
        expected_files.append(os.path.join("experiment_123", "tensors.json"))
        expected_files.append(
            os.path.join("experiment_123", "blob_sequences.json"))
        expected_files.append(
            os.path.join("experiment_123", "blobs", "blob_train_blob.bin"))
        # blobs/blob_test_blob.bin should not exist, because it contains
        # an unfinished blob.
        self.assertCountEqual(expected_files, _outdir_files(outdir))

        # Check that the scalars and tensors data files are empty, because there
        # no scalars or tensors.
        with open(os.path.join(outdir, "experiment_123",
                               "scalars.json")) as infile:
            self.assertEqual(infile.read(), "")
        with open(os.path.join(outdir, "experiment_123",
                               "tensors.json")) as infile:
            self.assertEqual(infile.read(), "")

        # Check the blob_sequences.json file.
        with open(os.path.join(outdir, "experiment_123",
                               "blob_sequences.json")) as infile:
            jsons = [json.loads(line) for line in infile]
        self.assertLen(jsons, 2)

        datum = jsons[0]
        self.assertEqual(datum.pop("run"), "train")
        self.assertEqual(datum.pop("tag"), "__default_graph__")
        summary_metadata = summary_pb2.SummaryMetadata.FromString(
            base64.b64decode(datum.pop("summary_metadata")))
        expected_summary_metadata = summary_pb2.SummaryMetadata(
            data_class=summary_pb2.DATA_CLASS_BLOB_SEQUENCE)
        self.assertEqual(summary_metadata, expected_summary_metadata)
        points = datum.pop("points")
        self.assertEqual(datum, {})
        self.assertEqual(points.pop("steps"), [0])
        self.assertEqual(points.pop("wall_times"), [1571084520.862939144])
        # The 1st blob is finished; the 2nd is unfinished.
        self.assertEqual(points.pop("blob_file_paths"),
                         [["blobs/blob_train_blob.bin", None]])
        self.assertEqual(points, {})

        datum = jsons[1]
        self.assertEqual(datum.pop("run"), "test")
        self.assertEqual(datum.pop("tag"), "__default_graph__")
        summary_metadata = summary_pb2.SummaryMetadata.FromString(
            base64.b64decode(datum.pop("summary_metadata")))
        self.assertEqual(summary_metadata, expected_summary_metadata)
        points = datum.pop("points")
        self.assertEqual(datum, {})
        self.assertEqual(points.pop("steps"), [0])
        self.assertEqual(points.pop("wall_times"), [1571084520.862939144])
        # `None` blob file path indicates an unfinished blob.
        self.assertEqual(points.pop("blob_file_paths"), [[None]])
        self.assertEqual(points, {})

        # Check the BLOB files.
        with open(
                os.path.join(outdir, "experiment_123", "blobs",
                             "blob_train_blob.bin"),
                "rb",
        ) as f:
            self.assertEqual(f.read(), b"43218765")

        # Check call to StreamBlobData.
        expected_blob_data_request = export_service_pb2.StreamBlobDataRequest(
            blob_id="train_blob")
        mock_api_client.StreamBlobData.assert_called_once_with(
            expected_blob_data_request, metadata=grpc_util.version_metadata())

        # Test the case where blob streaming errors out.
        self.assertEqual(next(generator), "456")
        # Check the blob_sequences.json file.
        with open(os.path.join(outdir, "experiment_456",
                               "blob_sequences.json")) as infile:
            jsons = [json.loads(line) for line in infile]
        self.assertLen(jsons, 2)

        datum = jsons[0]
        self.assertEqual(datum.pop("run"), "train")
        self.assertEqual(datum.pop("tag"), "__default_graph__")
        summary_metadata = summary_pb2.SummaryMetadata.FromString(
            base64.b64decode(datum.pop("summary_metadata")))
        self.assertEqual(summary_metadata, expected_summary_metadata)
        points = datum.pop("points")
        self.assertEqual(datum, {})
        self.assertEqual(points.pop("steps"), [0])
        self.assertEqual(points.pop("wall_times"), [1571084520.862939144])
        # `None` represents the blob that experienced error during downloading
        # and hence is missing.
        self.assertEqual(points.pop("blob_file_paths"), [[None, None]])
        self.assertEqual(points, {})

        datum = jsons[1]
        self.assertEqual(datum.pop("run"), "test")
        self.assertEqual(datum.pop("tag"), "__default_graph__")
        summary_metadata = summary_pb2.SummaryMetadata.FromString(
            base64.b64decode(datum.pop("summary_metadata")))
        self.assertEqual(summary_metadata, expected_summary_metadata)
        points = datum.pop("points")
        self.assertEqual(datum, {})
        self.assertEqual(points.pop("steps"), [0])
        self.assertEqual(points.pop("wall_times"), [1571084520.862939144])
        # `None` represents the blob that experienced error during downloading
        # and hence is missing.
        self.assertEqual(points.pop("blob_file_paths"), [[None]])
        self.assertEqual(points, {})
示例#30
0
    def setUp(self):
        super(MultiplexerDataProviderTest, self).setUp()
        self.logdir = self.get_temp_dir()
        self.ctx = context.RequestContext()

        logdir = os.path.join(self.logdir, "polynomials")
        with tf.summary.create_file_writer(logdir).as_default():
            for i in range(10):
                scalar_summary.scalar(
                    "square", i ** 2, step=2 * i, description="boxen"
                )
                scalar_summary.scalar("cube", i ** 3, step=3 * i)

        logdir = os.path.join(self.logdir, "waves")
        with tf.summary.create_file_writer(logdir).as_default():
            for i in range(10):
                scalar_summary.scalar("sine", tf.sin(float(i)), step=i)
                scalar_summary.scalar(
                    "square", tf.sign(tf.sin(float(i))), step=i
                )
                # Summary with rank-0 data but not owned by the scalars plugin.
                metadata = summary_pb2.SummaryMetadata()
                metadata.plugin_data.plugin_name = "marigraphs"
                metadata.data_class = summary_pb2.DATA_CLASS_SCALAR
                tf.summary.write(
                    "high_tide", tensor=i, step=i, metadata=metadata
                )
                # Summary with rank-1 data of scalar data class (bad!).
                metadata = summary_pb2.SummaryMetadata()
                metadata.plugin_data.plugin_name = "greetings"
                metadata.data_class = summary_pb2.DATA_CLASS_SCALAR
                tf.summary.write(
                    "bad", tensor=[i, i], step=i, metadata=metadata
                )

        logdir = os.path.join(self.logdir, "lebesgue")
        with tf.summary.create_file_writer(logdir).as_default():
            data = [
                ("very smooth", (0.0, 0.25, 0.5, 0.75, 1.0), "uniform"),
                ("very smoothn't", (0.0, 0.01, 0.99, 1.0), "bimodal"),
            ]
            for (description, distribution, name) in data:
                tensor = tf.constant([distribution], dtype=tf.float64)
                for i in range(1, 11):
                    histogram_summary.histogram(
                        name, tensor * i, step=i, description=description
                    )

        logdir = os.path.join(self.logdir, "mondrian")
        with tf.summary.create_file_writer(logdir).as_default():
            data = [
                ("red", (221, 28, 38), "top-right"),
                ("blue", (1, 91, 158), "bottom-left"),
                ("yellow", (239, 220, 111), "bottom-right"),
            ]
            for (name, color, description) in data:
                image_1x1 = tf.constant([[[color]]], dtype=tf.uint8)
                for i in range(1, 11):
                    # Use a non-monotonic sequence of sample sizes to
                    # test `max_length` calculation.
                    k = 6 - abs(6 - i)  # 1, .., 6, .., 2
                    # a `k`-sample image summary of `i`-by-`i` images
                    image = tf.tile(image_1x1, [k, i, i, 1])
                    image_summary.image(
                        name,
                        image,
                        step=i,
                        description=description,
                        max_outputs=99,
                    )