Esempio n. 1
0
    def test_graph_def_experimental_filter_graph(self):
        # Create a `GraphDef`
        graph_def = graph_pb2.GraphDef()
        graph_def.node.add(name="alice", op="Person")
        graph_def.node.add(name="bob", op="Person")

        graph_def.node[1].attr["small"].s = b"small_attr_value"
        graph_def.node[1].attr["large"].s = (
            b"large_attr_value" * 100  # 1600 bytes > 1024 limit
        )
        graph_def.node.add(name="friendship",
                           op="Friendship",
                           input=["alice", "bob"])

        # Simulate legacy graph event
        old_event = event_pb2.Event()
        old_event.step = 0
        old_event.wall_time = 456.75
        old_event.graph_def = graph_def.SerializeToString()

        new_events = self._migrate_event(old_event,
                                         experimental_filter_graph=True)

        new_event = new_events[1]
        tensor = tensor_util.make_ndarray(new_event.summary.value[0].tensor)
        new_graph_def_bytes = tensor[0]
        new_graph_def = graph_pb2.GraphDef.FromString(new_graph_def_bytes)

        expected_graph_def = graph_pb2.GraphDef()
        expected_graph_def.CopyFrom(graph_def)
        del expected_graph_def.node[1].attr["large"]
        expected_graph_def.node[1].attr["_too_large_attrs"].list.s.append(
            b"large")

        self.assertProtoEquals(expected_graph_def, new_graph_def)
Esempio n. 2
0
def merge_graph_defs(graph_defs):
    """Merges GraphDefs by adding unique prefix, `graph_{ind}`, to names.

    All GraphDefs are expected to be of TensorBoard's.

    When collecting graphs using the `tf.summary.trace` API, node names are not
    guranteed to be unique.  When non-unique names are not considered, it can
    lead to graph visualization showing them as one which creates inaccurate
    depiction of the flow of the graph (e.g., if there are A -> B -> C and D ->
    B -> E, you may see {A, D} -> B -> E).  To prevent such graph, we checked
    for uniquenss while merging but it resulted in
    https://github.com/tensorflow/tensorboard/issues/1929.

    To remedy these issues, we simply "apply name scope" on each graph by
    prefixing it with unique name (with a chance of collision) to create
    unconnected group of graphs.

    In case there is only one graph def passed, it returns the original
    graph_def. In case no graph defs are passed, it returns an empty GraphDef.

    Args:
      graph_defs: TensorBoard GraphDefs to merge.

    Returns:
      TensorBoard GraphDef that merges all graph_defs with unique prefixes.

    Raises:
      ValueError in case GraphDef versions mismatch.
    """
    if len(graph_defs) == 1:
        return graph_defs[0]
    elif len(graph_defs) == 0:
        return graph_pb2.GraphDef()

    dst_graph_def = graph_pb2.GraphDef()

    if graph_defs[0].versions.producer:
        dst_graph_def.versions.CopyFrom(graph_defs[0].versions)

    for index, graph_def in enumerate(graph_defs):
        if dst_graph_def.versions.producer != graph_def.versions.producer:
            raise ValueError("Cannot combine GraphDefs of different versions.")

        _add_with_prepended_names(
            "graph_%d" % (index + 1),
            graph_def,
            dst_graph_def,
        )

    return dst_graph_def
Esempio n. 3
0
def _migrate_graph_event(old_event, experimental_filter_graph=False):
    result = event_pb2.Event()
    result.wall_time = old_event.wall_time
    result.step = old_event.step
    value = result.summary.value.add(tag=graphs_metadata.RUN_GRAPH_NAME)
    graph_bytes = old_event.graph_def

    # TODO(@davidsoergel): Move this stopgap to a more appropriate place.
    if experimental_filter_graph:
        try:
            graph_def = graph_pb2.GraphDef().FromString(graph_bytes)
        # The reason for the RuntimeWarning catch here is b/27494216, whereby
        # some proto parsers incorrectly raise that instead of DecodeError
        # on certain kinds of malformed input.  Triggering this seems to require
        # a combination of mysterious circumstances.
        except (message.DecodeError, RuntimeWarning):
            logger.warning(
                "Could not parse GraphDef of size %d. Skipping.",
                len(graph_bytes),
            )
            return (old_event, )
        # Use the default filter parameters:
        # limit_attr_size=1024, large_attrs_key="_too_large_attrs"
        process_graph.prepare_graph_for_ui(graph_def)
        graph_bytes = graph_def.SerializeToString()

    value.tensor.CopyFrom(tensor_util.make_tensor_proto([graph_bytes]))
    value.metadata.plugin_data.plugin_name = graphs_metadata.PLUGIN_NAME
    # `value.metadata.plugin_data.content` left as the empty proto
    value.metadata.data_class = summary_pb2.DATA_CLASS_BLOB_SEQUENCE
    # In the short term, keep both the old event and the new event to
    # maintain compatibility.
    return (old_event, result)
Esempio n. 4
0
    def graph_impl(
        self,
        run,
        tag,
        is_conceptual,
        experiment=None,
        limit_attr_size=None,
        large_attrs_key=None,
    ):
        """Result of the form `(body, mime_type)`, or `None` if no graph
        exists."""
        if self._data_provider:
            graph_blob_sequences = self._data_provider.read_blob_sequences(
                experiment_id=experiment,
                plugin_name=metadata.PLUGIN_NAME,
                run_tag_filter=provider.RunTagFilter(runs=[run], tags=[tag]),
            )
            blob_datum_list = graph_blob_sequences.get(run, {}).get(tag, ())
            try:
                blob_ref = blob_datum_list[0].values[0]
            except IndexError:
                return None
            # Always use the blob_key approach for now, even if there is a direct url.
            graph_raw = self._data_provider.read_blob(blob_ref.blob_key)
            # This method ultimately returns pbtxt, but we have to deserialize and
            # later reserialize this anyway, because a) this way we accept binary
            # protobufs too, and b) below we run `prepare_graph_for_ui` on the graph.
            graph = graph_pb2.GraphDef.FromString(graph_raw)

        elif is_conceptual:
            tensor_events = self._multiplexer.Tensors(run, tag)
            # Take the first event if there are multiple events written from different
            # steps.
            keras_model_config = json.loads(
                tensor_events[0].tensor_proto.string_val[0]
            )
            graph = keras_util.keras_model_to_graph_def(keras_model_config)

        elif tag:
            tensor_events = self._multiplexer.Tensors(run, tag)
            # Take the first event if there are multiple events written from different
            # steps.
            run_metadata = config_pb2.RunMetadata.FromString(
                tensor_events[0].tensor_proto.string_val[0]
            )
            graph = graph_pb2.GraphDef()

            for func_graph in run_metadata.function_graphs:
                graph_util.combine_graph_defs(
                    graph, func_graph.pre_optimization_graph
                )
        else:
            graph = self._multiplexer.Graph(run)

        # This next line might raise a ValueError if the limit parameters
        # are invalid (size is negative, size present but key absent, etc.).
        process_graph.prepare_graph_for_ui(
            graph, limit_attr_size, large_attrs_key
        )
        return (str(graph), "text/x-protobuf")  # pbtxt
    def graph_impl(self,
                   run,
                   tag,
                   is_conceptual,
                   limit_attr_size=None,
                   large_attrs_key=None):
        """Result of the form `(body, mime_type)`, or `None` if no graph exists."""
        if is_conceptual:
            tensor_events = self._multiplexer.Tensors(run, tag)
            # Take the first event if there are multiple events written from different
            # steps.
            keras_model_config = json.loads(
                tensor_events[0].tensor_proto.string_val[0])
            graph = keras_util.keras_model_to_graph_def(keras_model_config)
        elif tag:
            tensor_events = self._multiplexer.Tensors(run, tag)
            # Take the first event if there are multiple events written from different
            # steps.
            run_metadata = config_pb2.RunMetadata.FromString(
                tensor_events[0].tensor_proto.string_val[0])
            graph = graph_pb2.GraphDef()

            for func_graph in run_metadata.function_graphs:
                graph_util.combine_graph_defs(
                    graph, func_graph.pre_optimization_graph)
        else:
            graph = self._multiplexer.Graph(run)
            self.graph = graph

        # This next line might raise a ValueError if the limit parameters
        # are invalid (size is negative, size present but key absent, etc.).
        process_graph.prepare_graph_for_ui(graph, limit_attr_size,
                                           large_attrs_key)
        return (str(graph), 'text/x-protobuf')  # pbtxt
Esempio n. 6
0
def _migrate_graph_event(old_event, experimental_filter_graph=False):
    result = event_pb2.Event()
    result.wall_time = old_event.wall_time
    result.step = old_event.step
    value = result.summary.value.add(tag=graphs_metadata.RUN_GRAPH_NAME)
    graph_bytes = old_event.graph_def

    # TODO(@davidsoergel): Move this stopgap to a more appropriate place.
    if experimental_filter_graph:
        try:
            graph_def = graph_pb2.GraphDef().FromString(graph_bytes)
        except message.DecodeError:
            logger.warning(
                "Could not parse GraphDef of size %d. Skipping.",
                len(graph_bytes),
            )
            return (old_event, )
        # Use the default filter parameters:
        # limit_attr_size=1024, large_attrs_key="_too_large_attrs"
        process_graph.prepare_graph_for_ui(graph_def)
        graph_bytes = graph_def.SerializeToString()

    value.tensor.CopyFrom(tensor_util.make_tensor_proto([graph_bytes]))
    value.metadata.plugin_data.plugin_name = graphs_metadata.PLUGIN_NAME
    # `value.metadata.plugin_data.content` left as the empty proto
    value.metadata.data_class = summary_pb2.DATA_CLASS_BLOB_SEQUENCE
    # In the short term, keep both the old event and the new event to
    # maintain compatibility.
    return (old_event, result)
Esempio n. 7
0
def _create_example_graph_bytes(large_attr_size):
    graph_def = graph_pb2.GraphDef()
    graph_def.node.add(name="alice", op="Person")
    graph_def.node.add(name="bob", op="Person")

    graph_def.node[1].attr["small"].s = b"small_attr_value"
    graph_def.node[1].attr["large"].s = b"l" * large_attr_size
    graph_def.node.add(name="friendship",
                       op="Friendship",
                       input=["alice", "bob"])
    return graph_def.SerializeToString()
Esempio n. 8
0
    def test_graph_def(self):
        # Create a `GraphDef` and write it to disk as an event.
        logdir = self.get_temp_dir()
        writer = test_util.FileWriter(logdir)
        graph_def = graph_pb2.GraphDef()
        graph_def.node.add(name="alice", op="Person")
        graph_def.node.add(name="bob", op="Person")
        graph_def.node.add(name="friendship",
                           op="Friendship",
                           input=["alice", "bob"])
        writer.add_graph(graph=None, graph_def=graph_def, global_step=123)
        writer.flush()

        # Read in the `Event` containing the written `graph_def`.
        files = os.listdir(logdir)
        self.assertLen(files, 1)
        event_file = os.path.join(logdir, files[0])
        self.assertIn("tfevents", event_file)
        loader = event_file_loader.EventFileLoader(event_file)
        events = list(loader.Load())
        self.assertLen(events, 2)
        self.assertEqual(events[0].WhichOneof("what"), "file_version")
        self.assertEqual(events[1].WhichOneof("what"), "graph_def")
        old_event = events[1]

        new_events = self._migrate_event(old_event)
        self.assertLen(new_events, 2)
        self.assertIs(new_events[0], old_event)
        new_event = new_events[1]

        self.assertEqual(new_event.WhichOneof("what"), "summary")
        self.assertLen(new_event.summary.value, 1)
        tensor = tensor_util.make_ndarray(new_event.summary.value[0].tensor)
        self.assertEqual(
            new_event.summary.value[0].metadata.data_class,
            summary_pb2.DATA_CLASS_BLOB_SEQUENCE,
        )
        self.assertEqual(
            new_event.summary.value[0].metadata.plugin_data.plugin_name,
            graphs_metadata.PLUGIN_NAME,
        )
        self.assertEqual(tensor.shape, (1, ))
        new_graph_def_bytes = tensor[0]
        self.assertIsInstance(new_graph_def_bytes, bytes)
        self.assertGreaterEqual(len(new_graph_def_bytes), 16)
        new_graph_def = graph_pb2.GraphDef.FromString(new_graph_def_bytes)

        self.assertProtoEquals(graph_def, new_graph_def)
Esempio n. 9
0
def _filtered_graph_bytes(graph_bytes):
    try:
        graph_def = graph_pb2.GraphDef().FromString(graph_bytes)
    # The reason for the RuntimeWarning catch here is b/27494216, whereby
    # some proto parsers incorrectly raise that instead of DecodeError
    # on certain kinds of malformed input. Triggering this seems to require
    # a combination of mysterious circumstances.
    except (message.DecodeError, RuntimeWarning):
        logger.warning(
            "Could not parse GraphDef of size %d. Skipping.", len(graph_bytes),
        )
        return None
    # Use the default filter parameters:
    # limit_attr_size=1024, large_attrs_key="_too_large_attrs"
    process_graph.prepare_graph_for_ui(graph_def)
    return graph_def.SerializeToString()
Esempio n. 10
0
    def Graph(self):
        """Return the graph definition, if there is one.

        If the graph is stored directly, return that.  If no graph is stored
        directly but a metagraph is stored containing a graph, return that.

        Raises:
          ValueError: If there is no graph for this run.

        Returns:
          The `graph_def` proto.
        """
        graph = graph_pb2.GraphDef()
        if self._graph is not None:
            graph.ParseFromString(self._graph)
            return graph
        raise ValueError("There is no graph in this EventAccumulator")
Esempio n. 11
0
    def _generate_test_data(self, run_name, experiment_name):
        """Generates the test data directory.

        The test data has a single run of the given name, containing:
          - a graph definition and metagraph definition

        Arguments:
          run_name: The directory under self.logdir into which to write
              events.
        """
        run_path = os.path.join(self.logdir, run_name)
        with test_util.FileWriterCache.get(run_path) as writer:

            # Add a simple graph event.
            graph_def = graph_pb2.GraphDef()
            node1 = graph_def.node.add()
            node1.name = "a"
            node2 = graph_def.node.add()
            node2.name = "b"
            node2.attr["very_large_attr"].s = b"a" * 2048  # 2 KB attribute

            meta_graph_def = meta_graph_pb2.MetaGraphDef(graph_def=graph_def)

            if self._only_use_meta_graph:
                writer.add_meta_graph(meta_graph_def)
            else:
                writer.add_graph(graph=None, graph_def=graph_def)

        # Write data for the run to the database.
        # TODO(nickfelt): Figure out why reseting the graph is necessary.
        tf.compat.v1.reset_default_graph()
        db_writer = tf.contrib.summary.create_db_writer(
            db_uri=self.db_path,
            experiment_name=experiment_name,
            run_name=run_name,
            user_name="user",
        )
        with db_writer.as_default(
        ), tf.contrib.summary.always_record_summaries():
            tf.contrib.summary.scalar("mytag", 1)

        with tf.compat.v1.Session() as sess:
            sess.run(tf.compat.v1.global_variables_initializer())
            sess.run(tf.contrib.summary.summary_writer_initializer_op())
            sess.run(tf.contrib.summary.all_summary_ops())
Esempio n. 12
0
 def _get_graph(self, plugin, *args, **kwargs):
     """Fetch and return the graph as a proto."""
     (graph_pbtxt, mime_type) = plugin.graph_impl(*args, **kwargs)
     self.assertEqual(mime_type, "text/x-protobuf")
     return text_format.Parse(graph_pbtxt, graph_pb2.GraphDef())
 def init_graph(self, request):
     self.graph = graph_pb2.GraphDef()
     return http_util.Respond(request, 'init', 'text/plain')
Esempio n. 14
0
 def __init__(self):
     self._tb_graph = graph_pb2.GraphDef()