def setUp(self): super(CorePluginTestBase, self).setUp() self.logdir = self.get_temp_dir() self.multiplexer = event_multiplexer.EventMultiplexer() context = base_plugin.TBContext( assets_zip_provider=get_test_assets_zip_provider(), logdir=self.logdir, multiplexer=self.multiplexer, ) self.plugin = core_plugin.CorePlugin(context) app = application.TensorBoardWSGI([self.plugin]) self.server = werkzeug_test.Client(app, wrappers.BaseResponse)
def _SetupWSGIApp(self): multiplexer = event_multiplexer.EventMultiplexer( size_guidance=application.DEFAULT_SIZE_GUIDANCE, purge_orphaned_data=True) context = base_plugin.TBContext(logdir=self.log_dir, multiplexer=multiplexer) self.plugin = projector_plugin.ProjectorPlugin(context) wsgi_app = application.TensorBoardWSGIApp(self.log_dir, [self.plugin], multiplexer, reload_interval=0, path_prefix='') self.server = werkzeug_test.Client(wsgi_app, wrappers.BaseResponse)
def set_up_with_runs(self, with_graph=True, without_graph=True): self.logdir = self.get_temp_dir() if with_graph: self.generate_run(self._RUN_WITH_GRAPH, include_graph=True) if without_graph: self.generate_run(self._RUN_WITHOUT_GRAPH, include_graph=False) multiplexer = event_multiplexer.EventMultiplexer() multiplexer.AddRunsFromDirectory(self.logdir) multiplexer.Reload() context = base_plugin.TBContext(logdir=self.logdir, multiplexer=multiplexer) self.plugin = graphs_plugin.GraphsPlugin(context)
def setUp(self): self.logdir = tf.test.get_temp_dir() self.context = base_plugin.TBContext(logdir=self.logdir) self.plugin = interactive_inference_plugin.InteractiveInferencePlugin( self.context) wsgi_app = application.TensorBoardWSGIApp( self.logdir, [self.plugin], multiplexer=event_multiplexer.EventMultiplexer({}), reload_interval=0, path_prefix='') self.server = werkzeug_test.Client(wsgi_app, wrappers.BaseResponse)
def standard_tensorboard_wsgi( logdir, purge_orphaned_data, reload_interval, plugins, db_uri="", assets_zip_provider=None, path_prefix=""): """Construct a TensorBoardWSGIApp with standard plugins and multiplexer. Args: logdir: The path to the directory containing events files. purge_orphaned_data: Whether to purge orphaned data. reload_interval: The interval at which the backend reloads more data in seconds. plugins: A list of constructor functions for TBPlugin subclasses. path_prefix: A prefix of the path when app isn't served from root. db_uri: A String containing the URI of the SQL database for persisting data, or empty for memory-only mode. assets_zip_provider: See TBContext documentation for more information. If this value is not specified, this function will attempt to load the `tensorboard.default` module to use the default. This behavior might be removed in the future. Returns: The new TensorBoard WSGI application. """ if assets_zip_provider is None: from tensorboard import default assets_zip_provider = default.get_assets_zip_provider() multiplexer = event_multiplexer.EventMultiplexer( size_guidance=DEFAULT_SIZE_GUIDANCE, tensor_size_guidance=DEFAULT_TENSOR_SIZE_GUIDANCE, purge_orphaned_data=purge_orphaned_data) db_module, db_connection_provider = get_database_info(db_uri) if db_connection_provider is not None: with contextlib.closing(db_connection_provider()) as db_conn: schema = db.Schema(db_conn) schema.create_tables() schema.create_indexes() plugin_name_to_instance = {} context = base_plugin.TBContext( db_module=db_module, db_connection_provider=db_connection_provider, logdir=logdir, multiplexer=multiplexer, assets_zip_provider=assets_zip_provider, plugin_name_to_instance=plugin_name_to_instance) plugin_instances = [constructor(context) for constructor in plugins] for plugin_instance in plugin_instances: plugin_name_to_instance[plugin_instance.plugin_name] = plugin_instance return TensorBoardWSGIApp( logdir, plugin_instances, multiplexer, reload_interval, path_prefix)
def set_up_with_runs(self, run_names): self.logdir = self.get_temp_dir() for run_name in run_names: self.generate_run(run_name) multiplexer = event_multiplexer.EventMultiplexer(size_guidance={ # don't truncate my test data, please event_accumulator.TENSORS: self._STEPS, }) multiplexer.AddRunsFromDirectory(self.logdir) multiplexer.Reload() context = base_plugin.TBContext(logdir=self.logdir, multiplexer=multiplexer) self.plugin = scalars_plugin.ScalarsPlugin(context)
def testAddRunsFromDirectory(self): """Tests AddRunsFromDirectory function. Tests the following scenarios: - When the directory does not exist. - When the directory is empty. - When the directory has empty subdirectory. - Contains proper EventAccumulators after adding events. """ x = event_multiplexer.EventMultiplexer() tmpdir = self.get_temp_dir() join = os.path.join fakedir = join(tmpdir, "fake_accumulator_directory") realdir = join(tmpdir, "real_accumulator_directory") self.assertEqual(x.Runs(), {}) x.AddRunsFromDirectory(fakedir) self.assertEqual(x.Runs(), {}, "loading fakedir had no effect") _CreateCleanDirectory(realdir) x.AddRunsFromDirectory(realdir) self.assertEqual(x.Runs(), {}, "loading empty directory had no effect") path1 = join(realdir, "path1") tf.io.gfile.mkdir(path1) x.AddRunsFromDirectory(realdir) self.assertEqual( x.Runs(), {}, "creating empty subdirectory had no effect" ) _AddEvents(path1) x.AddRunsFromDirectory(realdir) self.assertItemsEqual(x.Runs(), ["path1"], "loaded run: path1") loader1 = x.GetAccumulator("path1") self.assertEqual(loader1._path, path1, "has the correct path") path2 = join(realdir, "path2") _AddEvents(path2) x.AddRunsFromDirectory(realdir) self.assertItemsEqual(x.Runs(), ["path1", "path2"]) self.assertEqual( x.GetAccumulator("path1"), loader1, "loader1 not regenerated" ) path2_2 = join(path2, "path2") _AddEvents(path2_2) x.AddRunsFromDirectory(realdir) self.assertItemsEqual(x.Runs(), ["path1", "path2", "path2/path2"]) self.assertEqual( x.GetAccumulator("path2/path2")._path, path2_2, "loader2 path correct", )
def setUp(self): self.log_dir = tempfile.mkdtemp() # We use numpy.random to generate images. We seed to avoid non-determinism # in this test. numpy.random.seed(42) # Create old-style image summaries for run "foo". tf.reset_default_graph() sess = tf.Session() placeholder = tf.placeholder(tf.uint8) tf.summary.image(name="baz", tensor=placeholder) merged_summary_op = tf.summary.merge_all() foo_directory = os.path.join(self.log_dir, "foo") writer = tf.summary.FileWriter(foo_directory) writer.add_graph(sess.graph) for step in xrange(2): writer.add_summary(sess.run(merged_summary_op, feed_dict={ placeholder: (numpy.random.rand(1, 16, 42, 3) * 255).astype( numpy.uint8) }), global_step=step) writer.close() # Create new-style image summaries for run bar. tf.reset_default_graph() sess = tf.Session() placeholder = tf.placeholder(tf.uint8) summary.op(name="quux", images=placeholder, description="how do you pronounce that, anyway?") merged_summary_op = tf.summary.merge_all() bar_directory = os.path.join(self.log_dir, "bar") writer = tf.summary.FileWriter(bar_directory) writer.add_graph(sess.graph) for step in xrange(2): writer.add_summary(sess.run(merged_summary_op, feed_dict={ placeholder: (numpy.random.rand(1, 6, 8, 3) * 255).astype( numpy.uint8) }), global_step=step) writer.close() # Start a server with the plugin. multiplexer = event_multiplexer.EventMultiplexer({ "foo": foo_directory, "bar": bar_directory, }) context = base_plugin.TBContext( logdir=self.log_dir, multiplexer=multiplexer) plugin = images_plugin.ImagesPlugin(context) wsgi_app = application.TensorBoardWSGIApp( self.log_dir, [plugin], multiplexer, reload_interval=0) self.server = werkzeug_test.Client(wsgi_app, wrappers.BaseResponse) self.routes = plugin.get_plugin_apps()
def load_runs(self, run_names): logdir = self.get_temp_dir() for run_name in run_names: self.generate_run(logdir, run_name) multiplexer = event_multiplexer.EventMultiplexer( size_guidance={ # don't truncate my test data, please event_accumulator.TENSORS: self._STEPS, }) multiplexer.AddRunsFromDirectory(logdir) multiplexer.Reload() return (logdir, multiplexer)
def _start_logdir_based_server(self, temp_dir): self.logdir = temp_dir self.multiplexer = event_multiplexer.EventMultiplexer( size_guidance=application.DEFAULT_SIZE_GUIDANCE, purge_orphaned_data=True) context = base_plugin.TBContext( assets_zip_provider=get_test_assets_zip_provider(), logdir=self.logdir, multiplexer=self.multiplexer, window_title='title foo') self.logdir_based_plugin = core_plugin.CorePlugin(context) app = application.TensorBoardWSGI([self.logdir_based_plugin]) self.logdir_based_server = werkzeug_test.Client(app, wrappers.BaseResponse)
def load_plugin(self, run_specs): logdir = self.get_temp_dir() for run_spec in run_specs: self.generate_run(logdir, *run_spec) multiplexer = event_multiplexer.EventMultiplexer() multiplexer.AddRunsFromDirectory(logdir) multiplexer.Reload() provider = data_provider.MultiplexerDataProvider(multiplexer, logdir) ctx = base_plugin.TBContext( logdir=logdir, multiplexer=multiplexer, data_provider=provider, ) return graphs_plugin.GraphsPlugin(ctx)
def setUp(self): super(CorePluginNoDataTest, self).setUp() multiplexer = event_multiplexer.EventMultiplexer() logdir = self.get_temp_dir() provider = data_provider.MultiplexerDataProvider(multiplexer, logdir) context = base_plugin.TBContext( assets_zip_provider=get_test_assets_zip_provider(), logdir=logdir, data_provider=provider, window_title="title foo", ) self.plugin = core_plugin.CorePlugin(context) app = application.TensorBoardWSGI([self.plugin]) self.server = werkzeug_test.Client(app, wrappers.BaseResponse)
def set_up_with_runs(self, run_names): logdir = self.get_temp_dir() for run_name in run_names: self.generate_run(logdir, run_name) multiplexer = event_multiplexer.EventMultiplexer( size_guidance={ # don't truncate my test data, please tag_types.TENSORS: self._STEPS, }) multiplexer.AddRunsFromDirectory(logdir) multiplexer.Reload() provider = data_provider.MultiplexerDataProvider(multiplexer, logdir) context = base_plugin.TBContext(logdir=logdir, data_provider=provider) self.plugin = distributions_plugin.DistributionsPlugin(context)
def setUp(self): super(InteractiveDebuggerPluginTest, self).setUp() self._dummy_logdir = tempfile.mkdtemp() dummy_multiplexer = event_multiplexer.EventMultiplexer({}) self._debugger_port = portpicker.pick_unused_port() self._debugger_url = "grpc://localhost:%d" % self._debugger_port context = base_plugin.TBContext(logdir=self._dummy_logdir, multiplexer=dummy_multiplexer) self._debugger_plugin = interactive_debugger_plugin.InteractiveDebuggerPlugin( context) self._debugger_plugin.listen(self._debugger_port) wsgi_app = application.TensorBoardWSGI([self._debugger_plugin]) self._server = werkzeug_test.Client(wsgi_app, wrappers.BaseResponse)
def setUp(self): self.logdir = self.get_temp_dir() self.addCleanup(shutil.rmtree, self.logdir) self._generate_test_data(run_name='run1') self.multiplexer = event_multiplexer.EventMultiplexer( size_guidance=application.DEFAULT_SIZE_GUIDANCE, purge_orphaned_data=True) self._context = base_plugin.TBContext( assets_zip_provider=get_test_assets_zip_provider(), logdir=self.logdir, multiplexer=self.multiplexer) self.plugin = core_plugin.CorePlugin(self._context) app = application.TensorBoardWSGIApp( self.logdir, [self.plugin], self.multiplexer, 0, path_prefix='') self.server = werkzeug_test.Client(app, wrappers.BaseResponse)
def setUp(self): super(MetricsPluginTest, self).setUp() self._logdir = self.get_temp_dir() self._multiplexer = event_multiplexer.EventMultiplexer() flags = argparse.Namespace(generic_data="true") provider = data_provider.MultiplexerDataProvider( self._multiplexer, self._logdir) ctx = base_plugin.TBContext( flags=flags, logdir=self._logdir, multiplexer=self._multiplexer, data_provider=provider, ) self._plugin = metrics_plugin.MetricsPlugin(ctx)
def setUp(self): super(PrCurveTest, self).setUp() self.logdir = self.get_temp_dir() tf.reset_default_graph() # Generate data. pr_curve_demo.run_all(logdir=self.logdir, steps=3, thresholds=5, verbose=False) # Create a multiplexer for reading the data we just wrote. self.multiplexer = event_multiplexer.EventMultiplexer() self.multiplexer.AddRunsFromDirectory(self.logdir) self.multiplexer.Reload()
def _send_request(self, path_prefix, pathname): multiplexer = event_multiplexer.EventMultiplexer() logdir = self.get_temp_dir() provider = data_provider.MultiplexerDataProvider(multiplexer, logdir) context = base_plugin.TBContext( assets_zip_provider=get_test_assets_zip_provider(), logdir=logdir, data_provider=provider, window_title="", flags=FakeFlags(path_prefix=path_prefix), ) plugin = core_plugin.CorePlugin(context) app = application.TensorBoardWSGI([plugin], path_prefix=path_prefix) server = werkzeug_test.Client(app, wrappers.BaseResponse) return server.get(pathname)
def testAddRunsFromDirectory(self): """Tests AddRunsFromDirectory function. Tests the following scenarios: - When the directory does not exist. - When the directory is empty. - When the directory has empty subdirectory. - Contains proper EventAccumulators after adding events. """ x = event_multiplexer.EventMultiplexer() tmpdir = self.get_temp_dir() join = os.path.join fakedir = join(tmpdir, 'fake_accumulator_directory') realdir = join(tmpdir, 'real_accumulator_directory') self.assertEqual(x.Runs(), {}) x.AddRunsFromDirectory(fakedir) self.assertEqual(x.Runs(), {}, 'loading fakedir had no effect') _CreateCleanDirectory(realdir) x.AddRunsFromDirectory(realdir) self.assertEqual(x.Runs(), {}, 'loading empty directory had no effect') path1 = join(realdir, 'path1') tf.gfile.MkDir(path1) x.AddRunsFromDirectory(realdir) self.assertEqual(x.Runs(), {}, 'creating empty subdirectory had no effect') _AddEvents(path1) x.AddRunsFromDirectory(realdir) self.assertItemsEqual(x.Runs(), ['path1'], 'loaded run: path1') loader1 = x.GetAccumulator('path1') self.assertEqual(loader1._path, path1, 'has the correct path') path2 = join(realdir, 'path2') _AddEvents(path2) x.AddRunsFromDirectory(realdir) self.assertItemsEqual(x.Runs(), ['path1', 'path2']) self.assertEqual(x.GetAccumulator('path1'), loader1, 'loader1 not regenerated') path2_2 = join(path2, 'path2') _AddEvents(path2_2) x.AddRunsFromDirectory(realdir) self.assertItemsEqual(x.Runs(), ['path1', 'path2', 'path2/path2']) self.assertEqual( x.GetAccumulator('path2/path2')._path, path2_2, 'loader2 path correct')
def create_plugin(self, generate_testdata=True): if generate_testdata: self.generate_testdata() multiplexer = event_multiplexer.EventMultiplexer() multiplexer.AddRunsFromDirectory(self.logdir) multiplexer.Reload() provider = data_provider.MultiplexerDataProvider( multiplexer, self.logdir) ctx = base_plugin.TBContext(logdir=self.logdir, multiplexer=multiplexer, data_provider=provider) return npmi_plugin.NpmiPlugin(ctx)
def testEnvironmentDebugOnExplicitly(self): multiplexer = event_multiplexer.EventMultiplexer() logdir = self.get_temp_dir() provider = data_provider.MultiplexerDataProvider(multiplexer, logdir) context = base_plugin.TBContext( assets_zip_provider=get_test_assets_zip_provider(), logdir=logdir, data_provider=provider, window_title="title foo", ) plugin = core_plugin.CorePlugin(context, include_debug_info=True) app = application.TensorBoardWSGI([plugin]) server = werkzeug_test.Client(app, wrappers.BaseResponse) parsed_object = self._get_json(server, "/data/environment") self.assertIn("debug", parsed_object)
def testPluginIsActiveWhenTextRuns(self): """The plugin should be active when there are runs with text.""" multiplexer = event_multiplexer.EventMultiplexer() context = base_plugin.TBContext(logdir=self.logdir, multiplexer=multiplexer) plugin = text_plugin.TextPlugin(context) multiplexer.AddRunsFromDirectory(self.logdir) multiplexer.Reload() patcher = tf.test.mock.patch('threading.Thread.start', autospec=True) mock = patcher.start() self.addCleanup(patcher.stop) self.assertTrue(plugin.is_active(), True) # Data is available within the multiplexer. No thread should have started # for checking plugin assets data. self.assertFalse(mock.called)
def setUp(self): super(PrCurvesPluginTest, self).setUp() logdir = os.path.join(self.get_temp_dir(), "logdir") # Generate data. pr_curve_demo.run_all( logdir=logdir, steps=3, thresholds=5, verbose=False ) # Create a multiplexer for reading the data we just wrote. multiplexer = event_multiplexer.EventMultiplexer() multiplexer.AddRunsFromDirectory(logdir) multiplexer.Reload() context = base_plugin.TBContext(logdir=logdir, multiplexer=multiplexer) self.plugin = pr_curves_plugin.PrCurvesPlugin(context)
def create_profile_plugin(logdir, master_tpu_unsecure_channel=''): """Instantiates ProfilePlugin with data from the specified directory. Args: logdir: Directory containing TensorBoard data. master_tpu_unsecure_channel: Master TPU address for streaming trace viewer. Returns: An instance of ProfilePlugin. """ multiplexer = plugin_event_multiplexer.EventMultiplexer() multiplexer.AddRunsFromDirectory(logdir) context = base_plugin.TBContext(logdir=logdir, multiplexer=multiplexer, flags=_FakeFlags( logdir, master_tpu_unsecure_channel)) return profile_plugin.ProfilePlugin(context)
def testPluginRunToTagToContent(self): """Tests the method that produces the run to tag to content mapping.""" x = event_multiplexer.EventMultiplexer({ 'run1': 'path1', 'run2': 'path2' }) self.assertDictEqual( { 'run1': { 'path1_foo': 'foo_content', 'path1_bar': 'bar_content', }, 'run2': { 'path2_foo': 'foo_content', 'path2_bar': 'bar_content', } }, x.PluginRunToTagToContent('baz_plugin'))
def testSetLayout(self): layout_proto_to_write = layout_pb2.Layout(category=[ layout_pb2.Category(title='mean biases', chart=[ layout_pb2.Chart( title='mean layer biases', tag=[r'mean/layer\d+/biases']) ]), layout_pb2.Category(title='std weights', chart=[ layout_pb2.Chart( title='stddev layer weights', tag=[r'stddev/layer\d+/weights']) ]), layout_pb2.Category( title='cross entropy ... and maybe some other values', chart=[ layout_pb2.Chart(title='cross entropy', tag=[r'cross entropy']), layout_pb2.Chart(title='accuracy', tag=[r'accuracy']), layout_pb2.Chart(title='max layer weights', tag=[r'max/layer1/.*', r'max/layer2/.*']) ], closed=True) ]) # Write the data as a summary for the '.' run. with tf.Session() as s, tf.summary.FileWriter(self.logdir) as writer: writer.add_summary(s.run(summary.op(layout_proto_to_write))) # Read the data from disk. multiplexer = event_multiplexer.EventMultiplexer() multiplexer.AddRunsFromDirectory(self.logdir) multiplexer.Reload() tensor_events = multiplexer.Tensors('.', metadata.CONFIG_SUMMARY_TAG) self.assertEqual(1, len(tensor_events)) # Parse the data. string_array = tf.make_ndarray(tensor_events[0].tensor_proto) content = np.asscalar(string_array) layout_proto_from_disk = layout_pb2.Layout() layout_proto_from_disk.ParseFromString(tf.compat.as_bytes(content)) # Verify the content. self.assertProtoEquals(layout_proto_to_write, layout_proto_from_disk)
def testPluginRunToTagToContent(self): """Tests the method that produces the run to tag to content mapping.""" x = event_multiplexer.EventMultiplexer( {"run1": "path1", "run2": "path2"} ) self.assertDictEqual( { "run1": { "path1_foo": "foo_content", "path1_bar": "bar_content", }, "run2": { "path2_foo": "foo_content", "path2_bar": "bar_content", }, }, x.PluginRunToTagToContent("baz_plugin"), )
def testAddRunsFromDirectoryThatContainsEvents(self): x = event_multiplexer.EventMultiplexer() tmpdir = self.get_temp_dir() join = os.path.join realdir = join(tmpdir, "event_containing_directory") _CreateCleanDirectory(realdir) self.assertEqual(x.Runs(), {}) _AddEvents(realdir) x.AddRunsFromDirectory(realdir) self.assertItemsEqual(x.Runs(), ["."]) subdir = join(realdir, "subdir") _AddEvents(subdir) x.AddRunsFromDirectory(realdir) self.assertItemsEqual(x.Runs(), [".", "subdir"])
def testAddRun(self): x = event_multiplexer.EventMultiplexer() x.AddRun("run1_path", "run1") run1 = x.GetAccumulator("run1") self.assertEqual(sorted(x.Runs().keys()), ["run1"]) self.assertEqual(run1._path, "run1_path") x.AddRun("run1_path", "run1") self.assertEqual(run1, x.GetAccumulator("run1"), "loader not recreated") x.AddRun("run2_path", "run1") new_run1 = x.GetAccumulator("run1") self.assertEqual(new_run1._path, "run2_path") self.assertNotEqual(run1, new_run1) x.AddRun("runName3") self.assertItemsEqual(sorted(x.Runs().keys()), ["run1", "runName3"]) self.assertEqual(x.GetAccumulator("runName3")._path, "runName3")
def testAddRunsFromDirectoryWithRunNames(self): x = event_multiplexer.EventMultiplexer() tmpdir = self.get_temp_dir() join = os.path.join realdir = join(tmpdir, 'event_containing_directory') _CreateCleanDirectory(realdir) self.assertEqual(x.Runs(), {}) _AddEvents(realdir) x.AddRunsFromDirectory(realdir, 'foo') self.assertItemsEqual(x.Runs(), ['foo/.']) subdir = join(realdir, 'subdir') _AddEvents(subdir) x.AddRunsFromDirectory(realdir, 'foo') self.assertItemsEqual(x.Runs(), ['foo/.', 'foo/subdir'])