def _test(self, route, should_be_okay): temp_dir = tempfile.mkdtemp(prefix=self.get_temp_dir()) self.addCleanup(shutil.rmtree, temp_dir) multiplexer = event_multiplexer.EventMultiplexer( size_guidance=application.DEFAULT_SIZE_GUIDANCE, purge_orphaned_data=True) plugins = [ FakePlugin(None, plugin_name='foo', is_active_value=True, routes_mapping={ route: lambda environ, start_response: None }), ] if should_be_okay: application.TensorBoardWSGIApp(temp_dir, plugins, multiplexer, reload_interval=0, path_prefix='') else: with six.assertRaisesRegex(self, ValueError, r'invalid route'): application.TensorBoardWSGIApp(temp_dir, plugins, multiplexer, reload_interval=0, path_prefix='')
def testExceptions(self): logdir = '/fake/foo' multiplexer = event_multiplexer.EventMultiplexer() # Fails if there is an unnamed plugin with self.assertRaises(ValueError): # This plugin lacks a name. plugins = [ FakePlugin(None, plugin_name=None, is_active_value=True, routes_mapping={}), ] application.TensorBoardWSGIApp(logdir, plugins, multiplexer, 0, '') # Fails if there are two plugins with same name with self.assertRaises(ValueError): plugins = [ FakePlugin(None, plugin_name='foo', is_active_value=True, routes_mapping={}), FakePlugin(None, plugin_name='foo', is_active_value=True, routes_mapping={}), ] application.TensorBoardWSGIApp(logdir, plugins, multiplexer, 0, '')
def setUp(self): self.logdir = self.get_temp_dir() self._GenerateTestData(run_name='run1') self._multiplexer = event_multiplexer.EventMultiplexer( size_guidance=application.DEFAULT_SIZE_GUIDANCE, purge_orphaned_data=True) plugins = [ FakePlugin(None, plugin_name='foo', is_active_value=True, routes_mapping={}), FakePlugin(None, plugin_name='bar', is_active_value=False, routes_mapping={}), ] app = application.TensorBoardWSGIApp(self.logdir, plugins, self._multiplexer, reload_interval=0) try: self._server = serving.BaseWSGIServer('localhost', 0, app) # 0 to pick an unused port. except IOError: # BaseWSGIServer has a preference for IPv4. If that didn't work, try again # with an explicit IPv6 address. self._server = serving.BaseWSGIServer('::1', 0, app) self._server_thread = threading.Thread( target=self._server.serve_forever) self._server_thread.daemon = True self._server_thread.start() self._connection = http_client.HTTPConnection( 'localhost', self._server.server_address[1])
def create_tb_app(logdir, reload_interval, purge_orphaned_data): argv = [ "", "--logdir", logdir, "--reload_interval", str(reload_interval), "--purge_orphaned_data", str(purge_orphaned_data), ] tensorboard = program.TensorBoard() tensorboard.configure(argv) if version.parse(TB_VERSION) < version.parse("2.4"): return application.standard_tensorboard_wsgi( tensorboard.flags, tensorboard.plugin_loaders, tensorboard.assets_zip_provider, ) else: (data_provider, deprecated_multiplexer) = tensorboard._make_data_provider() app = application.TensorBoardWSGIApp( tensorboard.flags, tensorboard.plugin_loaders, data_provider, tensorboard.assets_zip_provider, deprecated_multiplexer, )
def setUp(self): self.context = None # The application should have added routes for both plugins. self.app = application.TensorBoardWSGIApp( FakeFlags(logdir=self.get_temp_dir()), [ FakePluginLoader( plugin_name="foo", is_active_value=True, routes_mapping={"/foo_route": self._foo_handler}, construction_callback=self._construction_callback, ), FakePluginLoader( plugin_name="bar", is_active_value=True, routes_mapping={ "/bar_route": self._bar_handler, "/wildcard/*": self._wildcard_handler, "/wildcard/special/*": self._wildcard_special_handler, "/wildcard/special/exact": self._foo_handler, }, construction_callback=self._construction_callback, ), FakePluginLoader( plugin_name="whoami", routes_mapping={"/eid": self._eid_handler,}, ), ], data_provider=FakeDataProvider(), auth_providers={HeaderAuthProvider: HeaderAuthProvider()}, ) self.server = werkzeug_test.Client(self.app, wrappers.BaseResponse)
def setUp(self): self.log_dir = tempfile.mkdtemp() # We use numpy.random to generate images. We seed to avoid non-determinism # in this test. numpy.random.seed(42) # Create old-style image summaries for run "foo". tf.reset_default_graph() sess = tf.Session() placeholder = tf.placeholder(tf.uint8) tf.summary.image(name="baz", tensor=placeholder) merged_summary_op = tf.summary.merge_all() foo_directory = os.path.join(self.log_dir, "foo") writer = tf.summary.FileWriter(foo_directory) writer.add_graph(sess.graph) for step in xrange(2): writer.add_summary(sess.run(merged_summary_op, feed_dict={ placeholder: (numpy.random.rand(1, 16, 42, 3) * 255).astype(numpy.uint8) }), global_step=step) writer.close() # Create new-style image summaries for run bar. tf.reset_default_graph() sess = tf.Session() placeholder = tf.placeholder(tf.uint8) summary.op(name="quux", images=placeholder, description="how do you pronounce that, anyway?") merged_summary_op = tf.summary.merge_all() bar_directory = os.path.join(self.log_dir, "bar") writer = tf.summary.FileWriter(bar_directory) writer.add_graph(sess.graph) for step in xrange(2): writer.add_summary(sess.run(merged_summary_op, feed_dict={ placeholder: (numpy.random.rand(1, 6, 8, 3) * 255).astype(numpy.uint8) }), global_step=step) writer.close() # Start a server with the plugin. multiplexer = event_multiplexer.EventMultiplexer({ "foo": foo_directory, "bar": bar_directory, }) context = base_plugin.TBContext(logdir=self.log_dir, multiplexer=multiplexer) plugin = images_plugin.ImagesPlugin(context) wsgi_app = application.TensorBoardWSGIApp(self.log_dir, [plugin], multiplexer, reload_interval=0) self.server = werkzeug_test.Client(wsgi_app, wrappers.BaseResponse) self.routes = plugin.get_plugin_apps()
def standard_tensorboard_wsgi(flags, plugin_loaders, assets_zip_provider): from tensorboard.backend.event_processing import data_ingester ingester = data_ingester.LocalDataIngester(flags) ingester.start() return application.TensorBoardWSGIApp( flags, plugin_loaders, ingester.data_provider, assets_zip_provider, ingester.deprecated_multiplexer)
def setUp(self): self.log_dir = tempfile.mkdtemp() # We use numpy.random to generate audio. We seed to avoid non-determinism # in this test. numpy.random.seed(42) # Create old-style audio summaries for run "foo". tf.reset_default_graph() sess = tf.Session() placeholder = tf.placeholder(tf.float32) tf.summary.audio(name="baz", tensor=placeholder, sample_rate=44100) merged_summary_op = tf.summary.merge_all() foo_directory = os.path.join(self.log_dir, "foo") writer = tf.summary.FileWriter(foo_directory) writer.add_graph(sess.graph) for step in xrange(2): # The floats (sample data) range from -1 to 1. writer.add_summary(sess.run(merged_summary_op, feed_dict={ placeholder: numpy.random.rand(42, 22050) * 2 - 1 }), global_step=step) writer.close() # Create new-style audio summaries for run "bar". tf.reset_default_graph() sess = tf.Session() audio_placeholder = tf.placeholder(tf.float32) labels_placeholder = tf.placeholder(tf.string) summary.op("quux", audio_placeholder, sample_rate=44100, labels=labels_placeholder, description="how do you pronounce that, anyway?") merged_summary_op = tf.summary.merge_all() bar_directory = os.path.join(self.log_dir, "bar") writer = tf.summary.FileWriter(bar_directory) writer.add_graph(sess.graph) for step in xrange(2): # The floats (sample data) range from -1 to 1. writer.add_summary(sess.run(merged_summary_op, feed_dict={ audio_placeholder: numpy.random.rand(42, 11025, 1) * 2 - 1, labels_placeholder: [ tf.compat.as_bytes('step **%s**, sample %s' % (step, sample)) for sample in xrange(42) ], }), global_step=step) writer.close() # Start a server with the plugin. multiplexer = event_multiplexer.EventMultiplexer({ "foo": foo_directory, "bar": bar_directory, }) context = base_plugin.TBContext( logdir=self.log_dir, multiplexer=multiplexer) self.plugin = audio_plugin.AudioPlugin(context) wsgi_app = application.TensorBoardWSGIApp( self.log_dir, [self.plugin], multiplexer, reload_interval=0) self.server = werkzeug_test.Client(wsgi_app, wrappers.BaseResponse)
def setUp(self): self.log_dir = tempfile.mkdtemp() # We use numpy.random to generate images. We seed to avoid non-determinism # in this test. numpy.random.seed(42) # Create old-style image summaries for run "foo". tf.compat.v1.reset_default_graph() sess = tf.compat.v1.Session() placeholder = tf.compat.v1.placeholder(tf.uint8) tf.compat.v1.summary.image(name="baz", tensor=placeholder) merged_summary_op = tf.compat.v1.summary.merge_all() foo_directory = os.path.join(self.log_dir, "foo") with test_util.FileWriterCache.get(foo_directory) as writer: writer.add_graph(sess.graph) for step in xrange(2): writer.add_summary(sess.run(merged_summary_op, feed_dict={ placeholder: (numpy.random.rand(1, 16, 42, 3) * 255).astype( numpy.uint8) }), global_step=step) # Create new-style image summaries for run bar. tf.compat.v1.reset_default_graph() sess = tf.compat.v1.Session() placeholder = tf.compat.v1.placeholder(tf.uint8) summary.op(name="quux", images=placeholder, description="how do you pronounce that, anyway?") merged_summary_op = tf.compat.v1.summary.merge_all() bar_directory = os.path.join(self.log_dir, "bar") with test_util.FileWriterCache.get(bar_directory) as writer: writer.add_graph(sess.graph) for step in xrange(2): writer.add_summary(sess.run(merged_summary_op, feed_dict={ placeholder: (numpy.random.rand(1, 8, 6, 3) * 255).astype( numpy.uint8) }), global_step=step) # Start a server with the plugin. multiplexer = event_multiplexer.EventMultiplexer({ "foo": foo_directory, "bar": bar_directory, }) context = base_plugin.TBContext( logdir=self.log_dir, multiplexer=multiplexer) plugin = images_plugin.ImagesPlugin(context) # Setting a reload interval of -1 disables reloading. We disable reloading # because we seek to block tests from running til after one reload finishes. # This setUp method thus manually reloads the multiplexer. TensorBoard would # otherwise reload in a non-blocking thread. wsgi_app = application.TensorBoardWSGIApp( self.log_dir, [plugin], multiplexer, reload_interval=-1, path_prefix='') self.server = werkzeug_test.Client(wsgi_app, wrappers.BaseResponse) multiplexer.Reload() self.routes = plugin.get_plugin_apps()
def _make_server(self): """Constructs the TensorBoard WSGI app and instantiates the server.""" (data_provider, deprecated_multiplexer) = self._make_data_provider() app = application.TensorBoardWSGIApp( self.flags, self.plugin_loaders, data_provider, self.assets_zip_provider, deprecated_multiplexer, ) return self.server_class(app, self.flags)
def _SetupWSGIApp(self): multiplexer = event_multiplexer.EventMultiplexer( size_guidance=application.DEFAULT_SIZE_GUIDANCE, purge_orphaned_data=True) context = base_plugin.TBContext(logdir=self.log_dir, multiplexer=multiplexer) self.plugin = projector_plugin.ProjectorPlugin(context) wsgi_app = application.TensorBoardWSGIApp(self.log_dir, [self.plugin], multiplexer, reload_interval=0) self.server = werkzeug_test.Client(wsgi_app, wrappers.BaseResponse)
def _make_server(self): """Constructs the TensorBoard WSGI app and instantiates the server.""" ingester = data_ingester.LocalDataIngester(self.flags) ingester.start() app = application.TensorBoardWSGIApp( self.flags, self.plugin_loaders, ingester.data_provider, self.assets_zip_provider, ingester.deprecated_multiplexer, ) return self.server_class(app, self.flags)
def setUp(self): self.logdir = tf.test.get_temp_dir() self.context = base_plugin.TBContext(logdir=self.logdir) self.plugin = interactive_inference_plugin.InteractiveInferencePlugin( self.context) wsgi_app = application.TensorBoardWSGIApp( self.logdir, [self.plugin], multiplexer=event_multiplexer.EventMultiplexer({}), reload_interval=0, path_prefix='') self.server = werkzeug_test.Client(wsgi_app, wrappers.BaseResponse)
def setUp(self): self.log_dir = tempfile.mkdtemp() # We use numpy.random to generate audio. We seed to avoid non-determinism # in this test. numpy.random.seed(42) # Create audio summaries for run foo. ops.reset_default_graph() sess = tf.Session() placeholder = array_ops.placeholder(dtypes.float32) summary_lib.audio(name="baz", tensor=placeholder, sample_rate=44100) merged_summary_op = summary_lib.merge_all() foo_directory = os.path.join(self.log_dir, "foo") writer = FileWriter(foo_directory) writer.add_graph(sess.graph) for step in xrange(2): # The floats (sample data) range from -1 to 1. writer.add_summary(sess.run(merged_summary_op, feed_dict={ placeholder: numpy.random.rand(42, 22050) * 2 - 1 }), global_step=step) writer.close() # Create audio summaries for run bar. ops.reset_default_graph() sess = tf.Session() placeholder = array_ops.placeholder(dtypes.float32) summary_lib.audio(name="quux", tensor=placeholder, sample_rate=44100) merged_summary_op = summary_lib.merge_all() bar_directory = os.path.join(self.log_dir, "bar") writer = summary_lib.FileWriter(bar_directory) writer.add_graph(sess.graph) for step in xrange(2): # The floats (sample data) range from -1 to 1. writer.add_summary(sess.run(merged_summary_op, feed_dict={ placeholder: numpy.random.rand(42, 11025) * 2 - 1 }), global_step=step) writer.close() # Start a server with the plugin. multiplexer = event_multiplexer.EventMultiplexer({ "foo": foo_directory, "bar": bar_directory, }) context = base_plugin.TBContext( logdir=self.log_dir, multiplexer=multiplexer) self.plugin = audio_plugin.AudioPlugin(context) wsgi_app = application.TensorBoardWSGIApp( self.log_dir, [self.plugin], multiplexer, reload_interval=0) self.server = werkzeug_test.Client(wsgi_app, wrappers.BaseResponse)
def setUp(self): self.logdir = self.get_temp_dir() self.addCleanup(shutil.rmtree, self.logdir) self._generate_test_data(run_name='run1') self.multiplexer = event_multiplexer.EventMultiplexer( size_guidance=application.DEFAULT_SIZE_GUIDANCE, purge_orphaned_data=True) self._context = base_plugin.TBContext( assets_zip_provider=application.get_default_assets_zip_provider(), logdir=self.logdir, multiplexer=self.multiplexer) self.plugin = core_plugin.CorePlugin(self._context) app = application.TensorBoardWSGIApp( self.logdir, [self.plugin], self.multiplexer, 0) self.server = werkzeug_test.Client(app, wrappers.BaseResponse)
def _start_logdir_based_server(self, temp_dir): self.logdir = temp_dir self.multiplexer = event_multiplexer.EventMultiplexer( size_guidance=application.DEFAULT_SIZE_GUIDANCE, purge_orphaned_data=True) context = base_plugin.TBContext( assets_zip_provider=get_test_assets_zip_provider(), logdir=self.logdir, multiplexer=self.multiplexer, window_title='title foo') self.logdir_based_plugin = core_plugin.CorePlugin(context) app = application.TensorBoardWSGIApp(self.logdir, [self.logdir_based_plugin], self.multiplexer, 0, path_prefix='') self.logdir_based_server = werkzeug_test.Client( app, wrappers.BaseResponse)
def setUp(self): super(InteractiveDebuggerPluginTest, self).setUp() self._dummy_logdir = tempfile.mkdtemp() self._dummy_multiplexer = event_multiplexer.EventMultiplexer({}) self._debugger_port = portpicker.pick_unused_port() self._debugger_url = 'grpc://localhost:%d' % self._debugger_port context = base_plugin.TBContext(logdir=self._dummy_logdir, multiplexer=self._dummy_multiplexer) self._debugger_plugin = ( interactive_debugger_plugin.InteractiveDebuggerPlugin(context)) self._debugger_plugin.listen(self._debugger_port) wsgi_app = application.TensorBoardWSGIApp(self._dummy_logdir, [self._debugger_plugin], self._dummy_multiplexer, reload_interval=0, path_prefix='') self._server = werkzeug_test.Client(wsgi_app, wrappers.BaseResponse)
def setUp(self): super(DebuggerPluginTestBase, self).setUp() # Importing the debugger_plugin can sometimes unfortunately produce errors. try: # pylint: disable=g-import-not-at-top from tensorboard.plugins.debugger import debugger_plugin from tensorboard.plugins.debugger import debugger_server_lib # pylint: enable=g-import-not-at-top except Exception as e: # pylint: disable=broad-except raise self.skipTest( 'Skipping test because importing some modules failed: %r' % e) self.debugger_plugin_module = debugger_plugin # Populate the log directory with debugger event for run '.'. self.log_dir = self.get_temp_dir() file_prefix = tf.compat.as_bytes( os.path.join(self.log_dir, 'events.debugger')) writer = tf.pywrap_tensorflow.EventsWriter(file_prefix) device_name = '/job:localhost/replica:0/task:0/cpu:0' writer.WriteEvent( self._CreateEventWithDebugNumericSummary( device_name=device_name, op_name='layers/Matmul', output_slot=0, wall_time=42, step=2, list_of_values=(list(range(12)) + [float(tf.float32.as_datatype_enum), 1.0, 3.0]))) writer.WriteEvent( self._CreateEventWithDebugNumericSummary( device_name=device_name, op_name='layers/Matmul', output_slot=1, wall_time=43, step=7, list_of_values=( list(range(12)) + [float(tf.float64.as_datatype_enum), 2.0, 3.0, 3.0]))) writer.WriteEvent( self._CreateEventWithDebugNumericSummary( device_name=device_name, op_name='logits/Add', output_slot=0, wall_time=1337, step=7, list_of_values=(list(range(12)) + [float(tf.int32.as_datatype_enum), 2.0, 3.0, 3.0]))) writer.WriteEvent( self._CreateEventWithDebugNumericSummary( device_name=device_name, op_name='logits/Add', output_slot=0, wall_time=1338, step=8, list_of_values=(list(range(12)) + [float(tf.int16.as_datatype_enum), 0.0]))) writer.Close() # Populate the log directory with debugger event for run 'run_foo'. run_foo_directory = os.path.join(self.log_dir, 'run_foo') os.mkdir(run_foo_directory) file_prefix = tf.compat.as_bytes( os.path.join(run_foo_directory, 'events.debugger')) writer = tf.pywrap_tensorflow.EventsWriter(file_prefix) writer.WriteEvent( self._CreateEventWithDebugNumericSummary( device_name=device_name, op_name='layers/Variable', output_slot=0, wall_time=4242, step=42, list_of_values=(list(range(12)) + [float(tf.int16.as_datatype_enum), 1.0, 8.0]))) writer.Close() # Start a server that will receive requests and respond with health pills. self.multiplexer = event_multiplexer.EventMultiplexer({ '.': self.log_dir, 'run_foo': run_foo_directory, }) self.debugger_data_server_grpc_port = portpicker.pick_unused_port() # Fake threading behavior so that threads are synchronous. tf.test.mock.patch('threading.Thread.start', threading.Thread.run).start() self.mock_debugger_data_server = tf.test.mock.Mock( debugger_server_lib.DebuggerDataServer) self.mock_debugger_data_server_class = tf.test.mock.Mock( debugger_server_lib.DebuggerDataServer, return_value=self.mock_debugger_data_server) tf.test.mock.patch.object( debugger_server_lib, 'DebuggerDataServer', self.mock_debugger_data_server_class).start() self.context = base_plugin.TBContext( logdir=self.log_dir, multiplexer=self.multiplexer) self.plugin = debugger_plugin.DebuggerPlugin(self.context) self.plugin.listen(self.debugger_data_server_grpc_port) wsgi_app = application.TensorBoardWSGIApp( self.log_dir, [self.plugin], self.multiplexer, reload_interval=0, path_prefix='') self.server = werkzeug_test.Client(wsgi_app, wrappers.BaseResponse) # The debugger data server should be started at the correct port. self.mock_debugger_data_server_class.assert_called_once_with( self.debugger_data_server_grpc_port, self.log_dir) mock_debugger_data_server = self.mock_debugger_data_server start = mock_debugger_data_server.start_the_debugger_data_receiving_server self.assertEqual(1, start.call_count)
def setUp(self): # We use numpy.random to generate meshes. We seed to avoid non-determinism # in this test. np.random.seed(17) # Log dir to save temp events into. self.log_dir = self.get_temp_dir() # Create mesh summary. with tf.compat.v1.Graph().as_default(): tf_placeholder = tf.compat.v1.placeholder sess = tf.compat.v1.Session() point_cloud = test_utils.get_random_mesh(1000) point_cloud_vertices = tf_placeholder(tf.float32, point_cloud.vertices.shape) mesh_no_color = test_utils.get_random_mesh(2000, add_faces=True) mesh_no_color_extended = test_utils.get_random_mesh(2500, add_faces=True) mesh_no_color_vertices = tf_placeholder(tf.float32, [1, None, 3]) mesh_no_color_faces = tf_placeholder(tf.int32, [1, None, 3]) mesh_color = test_utils.get_random_mesh(3000, add_faces=True, add_colors=True) mesh_color_vertices = tf_placeholder(tf.float32, mesh_color.vertices.shape) mesh_color_faces = tf_placeholder(tf.int32, mesh_color.faces.shape) mesh_color_colors = tf_placeholder(tf.uint8, mesh_color.colors.shape) self.data = [ point_cloud, mesh_no_color, mesh_no_color_extended, mesh_color ] # In case when name is present and display_name is not, we will reuse name # as display_name. Summaries below intended to test both cases. self.names = ["point_cloud", "mesh_no_color", "mesh_color"] summary.op(self.names[0], point_cloud_vertices, description="just point cloud") summary.op(self.names[1], mesh_no_color_vertices, faces=mesh_no_color_faces, display_name="name_to_display_in_ui", description="beautiful mesh in grayscale") summary.op(self.names[2], mesh_color_vertices, faces=mesh_color_faces, colors=mesh_color_colors, description="mesh with random colors") merged_summary_op = tf.compat.v1.summary.merge_all() self.runs = ["bar"] self.steps = 20 bar_directory = os.path.join(self.log_dir, self.runs[0]) with tensorboard_test_util.FileWriterCache.get( bar_directory) as writer: writer.add_graph(sess.graph) for step in range(self.steps): # Alternate between two random meshes with different number of # vertices. no_color = mesh_no_color if step % 2 == 0 else mesh_no_color_extended with patch.object(time, 'time', return_value=step): writer.add_summary(sess.run(merged_summary_op, feed_dict={ point_cloud_vertices: point_cloud.vertices, mesh_no_color_vertices: no_color.vertices, mesh_no_color_faces: no_color.faces, mesh_color_vertices: mesh_color.vertices, mesh_color_faces: mesh_color.faces, mesh_color_colors: mesh_color.colors, }), global_step=step) # Start a server that will receive requests. self.multiplexer = event_multiplexer.EventMultiplexer({ "bar": bar_directory, }) self.context = base_plugin.TBContext(logdir=self.log_dir, multiplexer=self.multiplexer) self.plugin = mesh_plugin.MeshPlugin(self.context) wsgi_app = application.TensorBoardWSGIApp(self.log_dir, [self.plugin], self.multiplexer, reload_interval=0, path_prefix="") self.server = werkzeug_test.Client(wsgi_app, wrappers.BaseResponse) self.multiplexer.Reload() self.routes = self.plugin.get_plugin_apps()
def setUp(self): self.log_dir = tempfile.mkdtemp() # We use numpy.random to generate audio. We seed to avoid non-determinism # in this test. numpy.random.seed(42) # Create old-style audio summaries for run "foo". tf.compat.v1.reset_default_graph() sess = tf.compat.v1.Session() placeholder = tf.compat.v1.placeholder(tf.float32) tf.compat.v1.summary.audio(name="baz", tensor=placeholder, sample_rate=44100) merged_summary_op = tf.compat.v1.summary.merge_all() foo_directory = os.path.join(self.log_dir, "foo") with test_util.FileWriterCache.get(foo_directory) as writer: writer.add_graph(sess.graph) for step in xrange(2): # The floats (sample data) range from -1 to 1. writer.add_summary(sess.run(merged_summary_op, feed_dict={ placeholder: numpy.random.rand(42, 22050) * 2 - 1 }), global_step=step) # Create new-style audio summaries for run "bar". tf.compat.v1.reset_default_graph() sess = tf.compat.v1.Session() audio_placeholder = tf.compat.v1.placeholder(tf.float32) labels_placeholder = tf.compat.v1.placeholder(tf.string) summary.op("quux", audio_placeholder, sample_rate=44100, labels=labels_placeholder, description="how do you pronounce that, anyway?") merged_summary_op = tf.compat.v1.summary.merge_all() bar_directory = os.path.join(self.log_dir, "bar") with test_util.FileWriterCache.get(bar_directory) as writer: writer.add_graph(sess.graph) for step in xrange(2): # The floats (sample data) range from -1 to 1. writer.add_summary(sess.run(merged_summary_op, feed_dict={ audio_placeholder: numpy.random.rand(42, 11025, 1) * 2 - 1, labels_placeholder: [ tf.compat.as_bytes('step **%s**, sample %s' % (step, sample)) for sample in xrange(42) ], }), global_step=step) # Start a server with the plugin. multiplexer = event_multiplexer.EventMultiplexer({ "foo": foo_directory, "bar": bar_directory, }) context = base_plugin.TBContext( logdir=self.log_dir, multiplexer=multiplexer) self.plugin = audio_plugin.AudioPlugin(context) # Setting a reload interval of -1 disables reloading. We disable reloading # because we seek to block tests from running til after one reload finishes. # This setUp method thus manually reloads the multiplexer. TensorBoard would # otherwise reload in a non-blocking thread. wsgi_app = application.TensorBoardWSGIApp( self.log_dir, [self.plugin], multiplexer, reload_interval=-1, path_prefix='') self.server = werkzeug_test.Client(wsgi_app, wrappers.BaseResponse) multiplexer.Reload()