def testInitialization(self):
   """Tests EventMultiplexer is created properly with its params."""
   x = event_multiplexer.EventMultiplexer()
   self.assertEqual(x.Runs(), {})
   x = event_multiplexer.EventMultiplexer({'run1': 'path1', 'run2': 'path2'})
   self.assertItemsEqual(x.Runs(), ['run1', 'run2'])
   self.assertEqual(x.GetAccumulator('run1')._path, 'path1')
   self.assertEqual(x.GetAccumulator('run2')._path, 'path2')
Esempio n. 2
0
 def testInitialization(self):
     """Tests EventMultiplexer is created properly with its params."""
     x = event_multiplexer.EventMultiplexer()
     self.assertEqual(x.Runs(), {})
     x = event_multiplexer.EventMultiplexer({
         "run1": "path1",
         "run2": "path2"
     })
     self.assertItemsEqual(x.Runs(), ["run1", "run2"])
     self.assertEqual(x.GetAccumulator("run1")._path, "path1")
     self.assertEqual(x.GetAccumulator("run2")._path, "path2")
Esempio n. 3
0
 def testGetOpsWithHealthPillsWhenHealthPillsAreNotAvailable(self):
     # The event accumulator lacks health pills for the run.
     x = event_multiplexer.EventMultiplexer({
         'run1': 'path1',
         'run2': 'path2'
     })
     self.assertItemsEqual([], x.GetOpsWithHealthPills('run1'))
Esempio n. 4
0
def read_metrics_from_events_dir(events_dir,
                                 tags_to_ignore=None,
                                 use_run_name_prefix=False):
    """Collect the TensorBoard summary values for each metric.

  Args:
    events_dir (string): Path to location of TensorBoard summaries.
    tags_to_ignore (set[string]): Set of TensorBoard tag names to skip.
    use_run_name_prefix (bool): If True, prefix tag names with the name
      of the run that contains them (e.g. `train/` or `eval/`)

  Returns:
    raw_metrics (dict): Keys are TensorBoard tags and value is a list of
      MetricPoint for every data point for that Tensorboard tag.
  """
    tags_to_ignore = tags_to_ignore or set()

    em = event_multiplexer.EventMultiplexer()
    em.AddRunsFromDirectory(events_dir)
    em.Reload()

    raw_metrics = collections.defaultdict(list)
    for run, tags in em.Runs().items():
        # 'Old-style' runs have a simple format and store values directly.
        for tag in tags['scalars']:
            if use_run_name_prefix and run != '.':
                tag_final = '/'.join((run, tag))
            else:
                tag_final = tag
            if tag_final in tags_to_ignore:
                continue
            raw_metrics[tag_final].extend([
                MetricPoint(metric_value=x.value, wall_time=x.wall_time)
                for x in em.Scalars(run, tag)
            ])
        # 'New-style' runs stores values inside of Tensor protos.
        for tag in tags['tensors']:
            if use_run_name_prefix and run != '.':
                tag_final = '/'.join((run, tag))
            else:
                tag_final = tag
            if tag_final in tags_to_ignore:
                continue
            for t in em.Tensors(run, tag):
                tensor_dtype = tf.dtypes.as_dtype(t.tensor_proto.dtype)
                try:
                    val = np.frombuffer(t.tensor_proto.tensor_content,
                                        tensor_dtype.as_numpy_dtype).tolist()
                    assert len(val) == 1  # There should be 1 value per tensor.
                    raw_metrics[tag_final].append(
                        MetricPoint(metric_value=val[0],
                                    wall_time=t.wall_time))
                except ValueError as e:
                    logging.warning(
                        'Unable to parse tag: `{}` from tensor_content: {}. '
                        'Error: {}. Consider adding this tag to tags_to_ignore '
                        'in config.'.format(tag, t.tensor_proto.tensor_content,
                                            e))

    return raw_metrics
 def testAddRunMaintainsLoading(self):
   x = event_multiplexer.EventMultiplexer()
   x.Reload()
   x.AddRun('run1')
   x.AddRun('run2')
   self.assertTrue(x.GetAccumulator('run1').reload_called)
   self.assertTrue(x.GetAccumulator('run2').reload_called)
Esempio n. 6
0
 def _test(self, route, should_be_okay):
     temp_dir = tempfile.mkdtemp(prefix=self.get_temp_dir())
     self.addCleanup(shutil.rmtree, temp_dir)
     multiplexer = event_multiplexer.EventMultiplexer(
         size_guidance=application.DEFAULT_SIZE_GUIDANCE,
         purge_orphaned_data=True)
     plugins = [
         FakePlugin(None,
                    plugin_name='foo',
                    is_active_value=True,
                    routes_mapping={
                        route: lambda environ, start_response: None
                    }),
     ]
     if should_be_okay:
         application.TensorBoardWSGIApp(temp_dir,
                                        plugins,
                                        multiplexer,
                                        reload_interval=0)
     else:
         with six.assertRaisesRegex(self, ValueError, r'invalid route'):
             application.TensorBoardWSGIApp(temp_dir,
                                            plugins,
                                            multiplexer,
                                            reload_interval=0)
Esempio n. 7
0
def standard_tensorboard_wsgi(
    logdir,
    purge_orphaned_data,
    reload_interval,
    plugins):
  """Construct a TensorBoardWSGIApp with standard plugins and multiplexer.

  Args:
    logdir: The path to the directory containing events files.
    purge_orphaned_data: Whether to purge orphaned data.
    reload_interval: The interval at which the backend reloads more data in
        seconds.
    plugins: A list of constructor functions for TBPlugin subclasses.

  Returns:
    The new TensorBoard WSGI application.
  """
  multiplexer = event_multiplexer.EventMultiplexer(
      size_guidance=DEFAULT_SIZE_GUIDANCE,
      purge_orphaned_data=purge_orphaned_data)
  context = base_plugin.TBContext(
      assets_zip_provider=get_default_assets_zip_provider(),
      logdir=logdir,
      multiplexer=multiplexer)
  plugins = [constructor(context) for constructor in plugins]
  return TensorBoardWSGIApp(logdir, plugins, multiplexer, reload_interval)
    def setUp(self):
        self.logdir = self.get_temp_dir()

        self._GenerateTestData(run_name='run1')
        self._multiplexer = event_multiplexer.EventMultiplexer(
            size_guidance=application.DEFAULT_SIZE_GUIDANCE,
            purge_orphaned_data=True)
        plugins = [
            FakePlugin(None,
                       plugin_name='foo',
                       is_active_value=True,
                       routes_mapping={}),
            FakePlugin(None,
                       plugin_name='bar',
                       is_active_value=False,
                       routes_mapping={}),
        ]
        app = application.TensorBoardWSGIApp(self.logdir,
                                             plugins,
                                             self._multiplexer,
                                             reload_interval=0)
        try:
            self._server = serving.BaseWSGIServer('localhost', 0, app)
            # 0 to pick an unused port.
        except IOError:
            # BaseWSGIServer has a preference for IPv4. If that didn't work, try again
            # with an explicit IPv6 address.
            self._server = serving.BaseWSGIServer('::1', 0, app)
        self._server_thread = threading.Thread(
            target=self._server.serve_forever)
        self._server_thread.daemon = True
        self._server_thread.start()
        self._connection = http_client.HTTPConnection(
            'localhost', self._server.server_address[1])
Esempio n. 9
0
def standard_tensorboard_wsgi(logdir,
                              purge_orphaned_data,
                              reload_interval,
                              plugins,
                              db_uri="",
                              assets_zip_provider=None):
    """Construct a TensorBoardWSGIApp with standard plugins and multiplexer.

  Args:
    logdir: The path to the directory containing events files.
    purge_orphaned_data: Whether to purge orphaned data.
    reload_interval: The interval at which the backend reloads more data in
        seconds.
    plugins: A list of constructor functions for TBPlugin subclasses.
    db_uri: A String containing the URI of the SQL database for persisting
        data, or empty for memory-only mode.
    assets_zip_provider: Delegates to TBContext or uses default if None.

  Returns:
    The new TensorBoard WSGI application.
  """
    multiplexer = event_multiplexer.EventMultiplexer(
        size_guidance=DEFAULT_SIZE_GUIDANCE,
        purge_orphaned_data=purge_orphaned_data)
    db_module, db_connection_provider = get_database_info(db_uri)
    context = base_plugin.TBContext(
        db_module=db_module,
        db_connection_provider=db_connection_provider,
        logdir=logdir,
        multiplexer=multiplexer,
        assets_zip_provider=(assets_zip_provider
                             or get_default_assets_zip_provider()))
    plugins = [constructor(context) for constructor in plugins]
    return TensorBoardWSGIApp(logdir, plugins, multiplexer, reload_interval)
  def testAddRunsFromDirectoryThrowsException(self):
    x = event_multiplexer.EventMultiplexer()
    tmpdir = self.get_temp_dir()

    filepath = _AddEvents(tmpdir)
    with self.assertRaises(ValueError):
      x.AddRunsFromDirectory(filepath)
Esempio n. 11
0
    def testExceptions(self):
        logdir = '/fake/foo'
        multiplexer = event_multiplexer.EventMultiplexer()

        # Fails if there is an unnamed plugin
        with self.assertRaises(ValueError):
            # This plugin lacks a name.
            plugins = [
                FakePlugin(None,
                           plugin_name=None,
                           is_active_value=True,
                           routes_mapping={}),
            ]
            application.TensorBoardWSGIApp(logdir, plugins, multiplexer, 0)

        # Fails if there are two plugins with same name
        with self.assertRaises(ValueError):
            plugins = [
                FakePlugin(None,
                           plugin_name='foo',
                           is_active_value=True,
                           routes_mapping={}),
                FakePlugin(None,
                           plugin_name='foo',
                           is_active_value=True,
                           routes_mapping={}),
            ]
            application.TensorBoardWSGIApp(logdir, plugins, multiplexer, 0)
Esempio n. 12
0
 def _init_multiplexer(self):
     dirs = [
         os.path.join(self._events_dir, subdir) for subdir in self._subdirs
     ]
     run_path_map = dict([(self._RUN_NAME % i, d)
                          for i, d in enumerate(dirs)])
     return event_multiplexer.EventMultiplexer(run_path_map)
Esempio n. 13
0
    def setUp(self):
        self.log_dir = tempfile.mkdtemp()

        # We use numpy.random to generate images. We seed to avoid non-determinism
        # in this test.
        numpy.random.seed(42)

        # Create old-style image summaries for run "foo".
        tf.reset_default_graph()
        sess = tf.Session()
        placeholder = tf.placeholder(tf.uint8)
        tf.summary.image(name="baz", tensor=placeholder)
        merged_summary_op = tf.summary.merge_all()
        foo_directory = os.path.join(self.log_dir, "foo")
        writer = tf.summary.FileWriter(foo_directory)
        writer.add_graph(sess.graph)
        for step in xrange(2):
            writer.add_summary(sess.run(merged_summary_op,
                                        feed_dict={
                                            placeholder:
                                            (numpy.random.rand(1, 16, 42, 3) *
                                             255).astype(numpy.uint8)
                                        }),
                               global_step=step)
        writer.close()

        # Create new-style image summaries for run bar.
        tf.reset_default_graph()
        sess = tf.Session()
        placeholder = tf.placeholder(tf.uint8)
        summary.op(name="quux",
                   images=placeholder,
                   description="how do you pronounce that, anyway?")
        merged_summary_op = tf.summary.merge_all()
        bar_directory = os.path.join(self.log_dir, "bar")
        writer = tf.summary.FileWriter(bar_directory)
        writer.add_graph(sess.graph)
        for step in xrange(2):
            writer.add_summary(sess.run(merged_summary_op,
                                        feed_dict={
                                            placeholder:
                                            (numpy.random.rand(1, 6, 8, 3) *
                                             255).astype(numpy.uint8)
                                        }),
                               global_step=step)
        writer.close()

        # Start a server with the plugin.
        multiplexer = event_multiplexer.EventMultiplexer({
            "foo": foo_directory,
            "bar": bar_directory,
        })
        context = base_plugin.TBContext(logdir=self.log_dir,
                                        multiplexer=multiplexer)
        plugin = images_plugin.ImagesPlugin(context)
        wsgi_app = application.TensorBoardWSGIApp(self.log_dir, [plugin],
                                                  multiplexer,
                                                  reload_interval=0)
        self.server = werkzeug_test.Client(wsgi_app, wrappers.BaseResponse)
        self.routes = plugin.get_plugin_apps()
Esempio n. 14
0
 def testPluginIsActiveWhenTextRuns(self):
     """The plugin should be active when there are runs with text."""
     multiplexer = event_multiplexer.EventMultiplexer()
     context = base_plugin.TBContext(logdir=None, multiplexer=multiplexer)
     plugin = text_plugin.TextPlugin(context)
     multiplexer.AddRunsFromDirectory(self.logdir)
     multiplexer.Reload()
     self.assertTrue(plugin.is_active())
 def testReload(self):
   """EventAccumulators should Reload after EventMultiplexer call it."""
   x = event_multiplexer.EventMultiplexer({'run1': 'path1', 'run2': 'path2'})
   self.assertFalse(x.GetAccumulator('run1').reload_called)
   self.assertFalse(x.GetAccumulator('run2').reload_called)
   x.Reload()
   self.assertTrue(x.GetAccumulator('run1').reload_called)
   self.assertTrue(x.GetAccumulator('run2').reload_called)
  def testScalars(self):
    """Tests Scalars function returns suitable values."""
    x = event_multiplexer.EventMultiplexer({'run1': 'path1', 'run2': 'path2'})

    run1_actual = x.Scalars('run1', 'sv1')
    run1_expected = ['path1/sv1']

    self.assertEqual(run1_expected, run1_actual)
Esempio n. 17
0
 def setUp(self):
   self.logdir = self.get_temp_dir()
   self.generate_testdata()
   multiplexer = event_multiplexer.EventMultiplexer()
   multiplexer.AddRunsFromDirectory(self.logdir)
   multiplexer.Reload()
   context = base_plugin.TBContext(logdir=self.logdir, multiplexer=multiplexer)
   self.plugin = text_plugin.TextPlugin(context)
Esempio n. 18
0
 def testExceptions(self):
     """KeyError should be raised when accessing non-existing keys."""
     x = event_multiplexer.EventMultiplexer({
         'run1': 'path1',
         'run2': 'path2'
     })
     with self.assertRaises(KeyError):
         x.Scalars('sv1', 'xxx')
Esempio n. 19
0
 def testExceptions(self):
     """KeyError should be raised when accessing non-existing keys."""
     x = event_multiplexer.EventMultiplexer({
         "run1": "path1",
         "run2": "path2"
     })
     with self.assertRaises(KeyError):
         x.Scalars("sv1", "xxx")
Esempio n. 20
0
 def testRunNamesRespected(self):
     """Tests two EventAccumulators inserted/accessed in EventMultiplexer."""
     x = event_multiplexer.EventMultiplexer({
         'run1': 'path1',
         'run2': 'path2'
     })
     self.assertItemsEqual(sorted(x.Runs().keys()), ['run1', 'run2'])
     self.assertEqual(x.GetAccumulator('run1')._path, 'path1')
     self.assertEqual(x.GetAccumulator('run2')._path, 'path2')
Esempio n. 21
0
 def set_up_with_runs(self, run_names):
     self.logdir = self.get_temp_dir()
     for run_name in run_names:
         self.generate_run(run_name)
     multiplexer = event_multiplexer.EventMultiplexer()
     multiplexer.AddRunsFromDirectory(self.logdir)
     multiplexer.Reload()
     context = base_plugin.TBContext(logdir=self.logdir,
                                     multiplexer=multiplexer)
     self.plugin = scalars_plugin.ScalarsPlugin(context)
Esempio n. 22
0
 def testRunNamesRespected(self):
     """Tests two EventAccumulators inserted/accessed in
     EventMultiplexer."""
     x = event_multiplexer.EventMultiplexer({
         "run1": "path1",
         "run2": "path2"
     })
     self.assertItemsEqual(sorted(x.Runs().keys()), ["run1", "run2"])
     self.assertEqual(x.GetAccumulator("run1")._path, "path1")
     self.assertEqual(x.GetAccumulator("run2")._path, "path2")
Esempio n. 23
0
 def testPluginIsActiveWhenRunsButNoText(self):
     """The plugin should be inactive when there are runs but none has text."""
     multiplexer = event_multiplexer.EventMultiplexer()
     context = base_plugin.TBContext(logdir=None, multiplexer=multiplexer)
     plugin = text_plugin.TextPlugin(context)
     logdir = os.path.join(self.get_temp_dir(), 'runs_with_no_text')
     self.generate_testdata(include_text=False, logdir=logdir)
     multiplexer.AddRunsFromDirectory(logdir)
     multiplexer.Reload()
     self.assertFalse(plugin.is_active())
Esempio n. 24
0
 def testReload(self):
     """EventAccumulators should Reload after EventMultiplexer call it."""
     x = event_multiplexer.EventMultiplexer({
         "run1": "path1",
         "run2": "path2"
     })
     self.assertFalse(x.GetAccumulator("run1").reload_called)
     self.assertFalse(x.GetAccumulator("run2").reload_called)
     x.Reload()
     self.assertTrue(x.GetAccumulator("run1").reload_called)
     self.assertTrue(x.GetAccumulator("run2").reload_called)
Esempio n. 25
0
 def testGetOpsWithHealthPillsWhenHealthPillsAreAvailable(self):
     # The event accumulator has health pills for the run.
     self.stubs.Set(
         event_accumulator, 'EventAccumulator',
         functools.partial(_GetFakeAccumulator,
                           health_pill_mapping={'Add': ['hp1', 'hp2']}))
     x = event_multiplexer.EventMultiplexer({
         'run1': 'path1',
         'run2': 'path2'
     })
     self.assertItemsEqual(['Add'], x.GetOpsWithHealthPills('run1'))
Esempio n. 26
0
    def testScalars(self):
        """Tests Scalars function returns suitable values."""
        x = event_multiplexer.EventMultiplexer({
            "run1": "path1",
            "run2": "path2"
        })

        run1_actual = x.Scalars("run1", "sv1")
        run1_expected = ["path1/sv1"]

        self.assertEqual(run1_expected, run1_actual)
Esempio n. 27
0
 def _SetupWSGIApp(self):
     multiplexer = event_multiplexer.EventMultiplexer(
         size_guidance=application.DEFAULT_SIZE_GUIDANCE,
         purge_orphaned_data=True)
     context = base_plugin.TBContext(logdir=self.log_dir,
                                     multiplexer=multiplexer)
     self.plugin = projector_plugin.ProjectorPlugin(context)
     wsgi_app = application.TensorBoardWSGIApp(self.log_dir, [self.plugin],
                                               multiplexer,
                                               reload_interval=0)
     self.server = werkzeug_test.Client(wsgi_app, wrappers.BaseResponse)
Esempio n. 28
0
    def setUp(self):
        self.log_dir = tempfile.mkdtemp()

        # We use numpy.random to generate audio. We seed to avoid non-determinism
        # in this test.
        numpy.random.seed(42)

        # Create audio summaries for run foo.
        tf.reset_default_graph()
        sess = tf.Session()
        placeholder = tf.placeholder(tf.float32)
        tf.summary.audio(name="baz", tensor=placeholder, sample_rate=44100)
        merged_summary_op = tf.summary.merge_all()
        foo_directory = os.path.join(self.log_dir, "foo")
        writer = tf.summary.FileWriter(foo_directory)
        writer.add_graph(sess.graph)
        for step in xrange(2):
            # The floats (sample data) range from -1 to 1.
            writer.add_summary(sess.run(
                merged_summary_op,
                feed_dict={placeholder: numpy.random.rand(42, 22050) * 2 - 1}),
                               global_step=step)
        writer.close()

        # Create audio summaries for run bar.
        tf.reset_default_graph()
        sess = tf.Session()
        placeholder = tf.placeholder(tf.float32)
        tf.summary.audio(name="quux", tensor=placeholder, sample_rate=44100)
        merged_summary_op = tf.summary.merge_all()
        bar_directory = os.path.join(self.log_dir, "bar")
        writer = tf.summary.FileWriter(bar_directory)
        writer.add_graph(sess.graph)
        for step in xrange(2):
            # The floats (sample data) range from -1 to 1.
            writer.add_summary(sess.run(
                merged_summary_op,
                feed_dict={placeholder: numpy.random.rand(42, 11025) * 2 - 1}),
                               global_step=step)
        writer.close()

        # Start a server with the plugin.
        multiplexer = event_multiplexer.EventMultiplexer({
            "foo": foo_directory,
            "bar": bar_directory,
        })
        context = base_plugin.TBContext(logdir=self.log_dir,
                                        multiplexer=multiplexer)
        self.plugin = audio_plugin.AudioPlugin(context)
        wsgi_app = application.TensorBoardWSGIApp(self.log_dir, [self.plugin],
                                                  multiplexer,
                                                  reload_interval=0)
        self.server = werkzeug_test.Client(wsgi_app, wrappers.BaseResponse)
Esempio n. 29
0
  def __init__(self, logdir):
    """Loads the events of all runs under the given directory.

        The directory should contain runs and event files in the same format
        used by TensorBoard.

        Args:
          logdir: The string directory name containing event files.
    """
    self.logdir = logdir
    self.event_mux = emux.EventMultiplexer().AddRunsFromDirectory(self.logdir)
    self.event_mux.Reload()
Esempio n. 30
0
 def set_up_with_runs(self, with_graph=True, without_graph=True):
     self.logdir = self.get_temp_dir()
     if with_graph:
         self.generate_run(self._RUN_WITH_GRAPH, include_graph=True)
     if without_graph:
         self.generate_run(self._RUN_WITHOUT_GRAPH, include_graph=False)
     multiplexer = event_multiplexer.EventMultiplexer()
     multiplexer.AddRunsFromDirectory(self.logdir)
     multiplexer.Reload()
     context = base_plugin.TBContext(logdir=self.logdir,
                                     multiplexer=multiplexer)
     self.plugin = graphs_plugin.GraphsPlugin(context)