def _get_graph(self, *args, **kwargs): """Set up runs, then fetch and return the graph as a proto.""" self.set_up_with_runs() (graph_pbtxt, mime_type) = self.plugin.graph_impl(self._RUN_WITH_GRAPH, *args, **kwargs) self.assertEqual(mime_type, 'text/x-protobuf') return text_format.Parse(graph_pbtxt, tf.GraphDef())
def testOnlySummaryEventsTriggerDiscards(self): """Test that file version event does not trigger data purge.""" gen = _EventGenerator(self) acc = ea.EventAccumulator(gen) gen.AddScalar('s1', wall_time=1, step=100, value=20) ev1 = tf.Event(wall_time=2, step=0, file_version='brain.Event:1') graph_bytes = tf.GraphDef().SerializeToString() ev2 = tf.Event(wall_time=3, step=0, graph_def=graph_bytes) gen.AddEvent(ev1) gen.AddEvent(ev2) acc.Reload() self.assertEqual([x.step for x in acc.Scalars('s1')], [100])
def Graph(self): """Return the graph definition, if there is one. If the graph is stored directly, return that. If no graph is stored directly but a metagraph is stored containing a graph, return that. Raises: ValueError: If there is no graph for this run. Returns: The `graph_def` proto. """ graph = tf.GraphDef() if self._graph is not None: graph.ParseFromString(self._graph) return graph raise ValueError('There is no graph in this EventAccumulator')
def _generate_test_data(self, run_name, experiment_name): """Generates the test data directory. The test data has a single run of the given name, containing: - a graph definition and metagraph definition Arguments: run_name: The directory under self.logdir into which to write events. """ run_path = os.path.join(self.logdir, run_name) writer = tf.summary.FileWriter(run_path) # Add a simple graph event. graph_def = tf.GraphDef() node1 = graph_def.node.add() node1.name = 'a' node2 = graph_def.node.add() node2.name = 'b' node2.attr['very_large_attr'].s = b'a' * 2048 # 2 KB attribute meta_graph_def = tf.MetaGraphDef(graph_def=graph_def) if self._only_use_meta_graph: writer.add_meta_graph(meta_graph_def) else: writer.add_graph(graph_def) writer.flush() writer.close() # Write data for the run to the database. # TODO(nickfelt): Figure out why reseting the graph is necessary. tf.reset_default_graph() db_writer = tf.contrib.summary.create_db_writer( db_uri=self.db_path, experiment_name=experiment_name, run_name=run_name, user_name='user') with db_writer.as_default(), tf.contrib.summary.always_record_summaries(): tf.contrib.summary.scalar('mytag', 1) with tf.Session() as sess: sess.run(tf.global_variables_initializer()) sess.run(tf.contrib.summary.summary_writer_initializer_op()) sess.run(tf.contrib.summary.all_summary_ops())
def GenerateTestData(path): """Generates the test data directory.""" run1_path = os.path.join(path, "run1") os.makedirs(run1_path) writer1 = tf.summary.FileWriter(run1_path) WriteScalarSeries(writer1, "foo/square", lambda x: x * x) WriteScalarSeries(writer1, "bar/square", lambda x: x * x) WriteScalarSeries(writer1, "foo/sin", math.sin) WriteScalarSeries(writer1, "foo/cos", math.cos) WriteHistogramSeries(writer1, "histo1", [[0, 1], [0.3, 1], [0.5, 1], [0.7, 1], [1, 1]]) WriteImageSeries(writer1, "im1") WriteImageSeries(writer1, "im2") WriteAudioSeries(writer1, "au1") run2_path = os.path.join(path, "run2") os.makedirs(run2_path) writer2 = tf.summary.FileWriter(run2_path) WriteScalarSeries(writer2, "foo/square", lambda x: x * x * 2) WriteScalarSeries(writer2, "bar/square", lambda x: x * x * 3) WriteScalarSeries(writer2, "foo/cos", lambda x: math.cos(x) * 2) WriteHistogramSeries(writer2, "histo1", [[0, 2], [0.3, 2], [0.5, 2], [0.7, 2], [1, 2]]) WriteHistogramSeries(writer2, "histo2", [[0, 1], [0.3, 1], [0.5, 1], [0.7, 1], [1, 1]]) WriteImageSeries(writer2, "im1") WriteAudioSeries(writer2, "au2") graph_def = tf.GraphDef() node1 = graph_def.node.add() node1.name = "a" node1.op = "matmul" node2 = graph_def.node.add() node2.name = "b" node2.op = "matmul" node2.input.extend(["a:0"]) writer1.add_graph(graph_def) node3 = graph_def.node.add() node3.name = "c" node3.op = "matmul" node3.input.extend(["a:0", "b:0"]) writer2.add_graph(graph_def) writer1.close() writer2.close()