Пример #1
0
 def _start_logdir_based_server(self, temp_dir):
     self.logdir = temp_dir
     self.multiplexer = event_multiplexer.EventMultiplexer(
         size_guidance=application.DEFAULT_SIZE_GUIDANCE,
         purge_orphaned_data=True)
     context = base_plugin.TBContext(
         assets_zip_provider=get_test_assets_zip_provider(),
         logdir=self.logdir,
         multiplexer=self.multiplexer,
         window_title='title foo')
     self.logdir_based_plugin = core_plugin.CorePlugin(context)
     app = application.TensorBoardWSGI([self.logdir_based_plugin])
     self.logdir_based_server = werkzeug_test.Client(
         app, wrappers.BaseResponse)
Пример #2
0
 def setUp(self):
     plugins = [
         FakePlugin(None,
                    plugin_name='foo',
                    is_active_value=True,
                    routes_mapping={}),
         FakePlugin(None,
                    plugin_name='bar',
                    is_active_value=False,
                    routes_mapping={}),
     ]
     app = application.TensorBoardWSGI(plugins,
                                       path_prefix=self.path_prefix)
     self.server = werkzeug_test.Client(app, wrappers.BaseResponse)
Пример #3
0
 def setUp(self):
     super(CorePluginDbModeTest, self).setUp()
     self.db_path = os.path.join(self.get_temp_dir(), "db.db")
     self.db_uri = "sqlite:" + self.db_path
     db_connection_provider = application.create_sqlite_connection_provider(
         self.db_uri)
     context = base_plugin.TBContext(
         assets_zip_provider=get_test_assets_zip_provider(),
         db_connection_provider=db_connection_provider,
         db_uri=self.db_uri,
     )
     self.plugin = core_plugin.CorePlugin(context)
     app = application.TensorBoardWSGI([self.plugin])
     self.server = werkzeug_test.Client(app, wrappers.BaseResponse)
def TensorBoardWSGIApp(logdir,
                       plugins,
                       multiplexer,
                       reload_interval,
                       path_prefix="",
                       reload_task='auto'):
    thread = None
    path_to_run = application.parse_event_files_spec(logdir)
    if reload_interval >= 0:
        thread = start_reloading_multiplexer(multiplexer, path_to_run,
                                             reload_interval)
    tb_app = application.TensorBoardWSGI(plugins)
    manager.add_instance(logdir, tb_app, thread)
    return tb_app
 def test_download_url_csv(self):
     wsgi_app = application.TensorBoardWSGI([self.plugin])
     server = werkzeug_test.Client(wsgi_app, wrappers.BaseResponse)
     response = server.get(
         "/data/plugin/custom_scalars/download_data?run=%s&tag=%s&format=csv"
         % ("foo", "squares/scalar_summary"))
     self.assertEqual(200, response.status_code)
     self.assertEqual("text/csv; charset=utf-8",
                      response.headers["Content-Type"])
     payload = response.get_data()
     s = io.StringIO(payload.decode("utf-8"))
     reader = csv.reader(s)
     self.assertEqual(["Wall time", "Step", "Value"], next(reader))
     self.assertEqual(len(list(reader)), 4)
Пример #6
0
 def setUp(self):
     super(CorePluginNoDataTest, self).setUp()
     multiplexer = event_multiplexer.EventMultiplexer()
     logdir = self.get_temp_dir()
     provider = data_provider.MultiplexerDataProvider(multiplexer, logdir)
     context = base_plugin.TBContext(
         assets_zip_provider=get_test_assets_zip_provider(),
         logdir=logdir,
         data_provider=provider,
         window_title="title foo",
     )
     self.plugin = core_plugin.CorePlugin(context)
     app = application.TensorBoardWSGI([self.plugin])
     self.server = werkzeug_test.Client(app, wrappers.BaseResponse)
Пример #7
0
 def testNgComponentPluginWithIncompatiblEsModulePath(self):
     plugins = [
         FakePlugin(
             plugin_name="quux",
             is_ng_component=True,
             es_module_path_value="//incompatible",
         ),
     ]
     app = application.TensorBoardWSGI(plugins)
     server = werkzeug_test.Client(app, wrappers.BaseResponse)
     with six.assertRaisesRegex(
         self, ValueError, "quux.*declared.*both Angular.*iframed"
     ):
         server.get("/data/plugins_listing")
    def setUp(self):
        super(InteractiveDebuggerPluginTest, self).setUp()

        self._dummy_logdir = tempfile.mkdtemp()
        dummy_multiplexer = event_multiplexer.EventMultiplexer({})
        self._debugger_port = portpicker.pick_unused_port()
        self._debugger_url = "grpc://localhost:%d" % self._debugger_port
        context = base_plugin.TBContext(logdir=self._dummy_logdir,
                                        multiplexer=dummy_multiplexer)
        self._debugger_plugin = interactive_debugger_plugin.InteractiveDebuggerPlugin(
            context)
        self._debugger_plugin.listen(self._debugger_port)

        wsgi_app = application.TensorBoardWSGI([self._debugger_plugin])
        self._server = werkzeug_test.Client(wsgi_app, wrappers.BaseResponse)
Пример #9
0
 def _send_request(self, path_prefix, pathname):
     multiplexer = event_multiplexer.EventMultiplexer()
     logdir = self.get_temp_dir()
     provider = data_provider.MultiplexerDataProvider(multiplexer, logdir)
     context = base_plugin.TBContext(
         assets_zip_provider=get_test_assets_zip_provider(),
         logdir=logdir,
         data_provider=provider,
         window_title="",
         flags=FakeFlags(path_prefix=path_prefix),
     )
     plugin = core_plugin.CorePlugin(context)
     app = application.TensorBoardWSGI([plugin], path_prefix=path_prefix)
     server = werkzeug_test.Client(app, wrappers.BaseResponse)
     return server.get(pathname)
 def test_download_url_json(self):
     wsgi_app = application.TensorBoardWSGI([self.plugin])
     server = werkzeug_test.Client(wsgi_app, wrappers.BaseResponse)
     response = server.get(
         "/data/plugin/custom_scalars/download_data?run=%s&tag=%s" %
         ("foo", "squares/scalar_summary"))
     self.assertEqual(200, response.status_code)
     self.assertEqual("application/json", response.headers["Content-Type"])
     body = json.loads(response.get_data())
     self.assertEqual(4, len(body))
     for step, entry in enumerate(body):
         # The time stamp should be reasonable.
         self.assertGreater(entry[0], 0)
         self.assertEqual(step, entry[1])
         np.testing.assert_allclose(step * step, entry[2])
Пример #11
0
 def test_download_url_json(self):
     plugin = self.load_plugin([self._RUN_WITH_SCALARS])
     wsgi_app = application.TensorBoardWSGI([plugin])
     server = werkzeug_test.Client(wsgi_app, wrappers.BaseResponse)
     response = server.get(
         "/data/plugin/scalars/scalars?run=%s&tag=%s"
         % (
             self._RUN_WITH_SCALARS,
             "%s/scalar_summary" % self._SCALAR_TAG,
         )
     )
     self.assertEqual(200, response.status_code)
     self.assertEqual("application/json", response.headers["Content-Type"])
     payload = json.loads(response.get_data())
     self.assertEqual(len(payload), self._STEPS)
Пример #12
0
 def setUp(self):
     plugins = [
         FakePlugin(plugin_name="foo"),
         FakePlugin(
             plugin_name="bar",
             is_active_value=False,
             element_name_value="tf-bar-dashboard",
         ),
         FakePlugin(
             plugin_name="baz",
             routes_mapping={"/esmodule": lambda req: None,},
             es_module_path_value="/esmodule",
         ),
     ]
     app = application.TensorBoardWSGI(plugins, path_prefix=self.path_prefix)
     self.server = werkzeug_test.Client(app, wrappers.BaseResponse)
Пример #13
0
def MyTensorBoardWSGIApp(logdir,
                         plugins,
                         multiplexer,
                         reload_interval,
                         path_prefix=''):
    if reload_interval <= 0:
        raise ValueError('load_interval is negative or zero: %d' %
                         load_interval)

    path_to_run = application.parse_event_files_spec(logdir)
    thread = my_start_reloading_multiplexer(multiplexer, path_to_run,
                                            reload_interval)
    app = application.TensorBoardWSGI(plugins, path_prefix)
    app.multiplexer_thread = thread

    return app
Пример #14
0
def TensorBoardWSGIApp(logdir,
                       plugins,
                       multiplexer,
                       reload_interval,
                       using_db,
                       path_prefix=""):
    path_to_run = application.parse_event_files_spec(logdir)
    if reload_interval >= 0:
        thread = start_reloading_multiplexer(multiplexer, path_to_run,
                                             reload_interval)
    else:
        application.reload_multiplexer(multiplexer, path_to_run)
        thread = None
    tb_app = application.TensorBoardWSGI(plugins, using_db, path_prefix)
    manager.add_instance(logdir, tb_app, thread)
    return tb_app
Пример #15
0
    def testEnvironmentDebugOnExplicitly(self):
        multiplexer = event_multiplexer.EventMultiplexer()
        logdir = self.get_temp_dir()
        provider = data_provider.MultiplexerDataProvider(multiplexer, logdir)
        context = base_plugin.TBContext(
            assets_zip_provider=get_test_assets_zip_provider(),
            logdir=logdir,
            data_provider=provider,
            window_title="title foo",
        )
        plugin = core_plugin.CorePlugin(context, include_debug_info=True)
        app = application.TensorBoardWSGI([plugin])
        server = werkzeug_test.Client(app, wrappers.BaseResponse)

        parsed_object = self._get_json(server, "/data/environment")
        self.assertIn("debug", parsed_object)
Пример #16
0
 def setUp(self):
     plugins = [
         FakePlugin(plugin_name='foo'),
         FakePlugin(
             plugin_name='bar',
             is_active_value=False,
             element_name_value='tf-bar-dashboard',
         ),
         FakePlugin(plugin_name='baz',
                    routes_mapping={
                        '/esmodule': lambda req: None,
                    },
                    es_module_path_value='/esmodule'),
     ]
     app = application.TensorBoardWSGI(plugins)
     self.server = werkzeug_test.Client(app, wrappers.BaseResponse)
Пример #17
0
def TensorBoardWSGIApp_2x(
        flags, plugins,
        data_provider=None,
        assets_zip_provider=None,
        deprecated_multiplexer=None):

    logdir = flags.logdir
    multiplexer = deprecated_multiplexer
    reload_interval = flags.reload_interval
    from tensorboard.backend.event_processing import data_ingester

    path_to_run = data_ingester._parse_event_files_spec(logdir)
    if reload_interval:
        thread = start_reloading_multiplexer(
            multiplexer, path_to_run, reload_interval)
    else:
        application.reload_multiplexer(multiplexer, path_to_run)
        thread = None

    db_uri = None
    db_connection_provider = None

    plugin_name_to_instance = {}

    from tensorboard.plugins import base_plugin
    context = base_plugin.TBContext(
        data_provider=data_provider,
        # db_connection_provider=db_connection_provider,
        # db_uri=db_uri,
        flags=flags,
        logdir=flags.logdir,
        multiplexer=deprecated_multiplexer,
        assets_zip_provider=assets_zip_provider,
        plugin_name_to_instance=plugin_name_to_instance,
        window_title=flags.window_title)

    tbplugins = []
    for loader in plugins:
        plugin = loader.load(context)
        if plugin is None:
            continue
        tbplugins.append(plugin)
        plugin_name_to_instance[plugin.plugin_name] = plugin

    tb_app = application.TensorBoardWSGI(tbplugins)
    manager.add_instance(logdir, tb_app, thread)
    return tb_app
Пример #18
0
 def test_download_url_csv(self):
     plugin = self.load_plugin([self._RUN_WITH_SCALARS])
     wsgi_app = application.TensorBoardWSGI([plugin])
     server = werkzeug_test.Client(wsgi_app, wrappers.Response)
     response = server.get(
         "/data/plugin/scalars/scalars?run=%s&tag=%s&format=csv" % (
             self._RUN_WITH_SCALARS,
             "%s/scalar_summary" % self._SCALAR_TAG,
         ))
     self.assertEqual(200, response.status_code)
     self.assertEqual("text/csv; charset=utf-8",
                      response.headers["Content-Type"])
     payload = response.get_data()
     s = io.StringIO(payload.decode("utf-8"))
     reader = csv.reader(s)
     self.assertEqual(["Wall time", "Step", "Value"], next(reader))
     self.assertEqual(len(list(reader)), self._STEPS)
Пример #19
0
 def startDbBasedServer(self, temp_dir):
     self.db_uri = 'sqlite:' + os.path.join(temp_dir, 'db.sqlite')
     db_module, db_connection_provider = application.get_database_info(
         self.db_uri)
     if db_connection_provider is not None:
         with contextlib.closing(db_connection_provider()) as db_conn:
             schema = db.Schema(db_conn)
             schema.create_tables()
             schema.create_indexes()
     context = base_plugin.TBContext(
         assets_zip_provider=get_test_assets_zip_provider(),
         db_module=db_module,
         db_connection_provider=db_connection_provider,
         db_uri=self.db_uri,
         window_title='title foo')
     self.db_based_plugin = core_plugin.CorePlugin(context)
     app = application.TensorBoardWSGI([self.db_based_plugin])
     self.db_based_server = werkzeug_test.Client(app, wrappers.BaseResponse)
Пример #20
0
    def setUp(self):
        super(DebuggerV2PluginTest, self).setUp()
        self.logdir = self.get_temp_dir()
        context = base_plugin.TBContext(logdir=self.logdir)
        self.plugin = debugger_v2_plugin.DebuggerV2Plugin(context)
        wsgi_app = application.TensorBoardWSGI([self.plugin])
        self.server = werkzeug_test.Client(wsgi_app, wrappers.BaseResponse)
        # The multiplexer reads data asynchronously on a separate thread, so
        # as not to block the main thread of the TensorBoard backend. During
        # unit test, we disable the asynchronous behavior, so that we can
        # load the debugger data synchronously on the main thread and get
        # determinisic behavior in the tests.
        def run_in_background_mock(target):
            target()

        self.run_in_background_patch = tf.compat.v1.test.mock.patch.object(
            debug_data_multiplexer, "run_in_background", run_in_background_mock
        )
        self.run_in_background_patch.start()
Пример #21
0
 def setUp(self):
     plugins = [
         FakePlugin(plugin_name="foo"),
         FakePlugin(
             plugin_name="bar",
             is_active_value=False,
             element_name_value="tf-bar-dashboard",
         ),
         FakePlugin(
             plugin_name="baz",
             routes_mapping={"/esmodule": lambda req: None,},
             es_module_path_value="/esmodule",
         ),
         FakePlugin(
             plugin_name="qux", is_active_value=False, is_ng_component=True,
         ),
     ]
     app = application.TensorBoardWSGI(plugins)
     self._install_server(app)
Пример #22
0
 def setUp(self):
     super(CorePluginTestBase, self).setUp()
     self.logdir = self.get_temp_dir()
     self.multiplexer = self.create_multiplexer()
     db_uri = None
     db_connection_provider = None
     if isinstance(self.multiplexer,
                   db_import_multiplexer.DbImportMultiplexer):
         db_uri = self.multiplexer.db_uri
         db_connection_provider = self.multiplexer.db_connection_provider
     context = base_plugin.TBContext(
         assets_zip_provider=get_test_assets_zip_provider(),
         logdir=self.logdir,
         multiplexer=self.multiplexer,
         db_uri=db_uri,
         db_connection_provider=db_connection_provider,
     )
     self.plugin = core_plugin.CorePlugin(context)
     app = application.TensorBoardWSGI([self.plugin])
     self.server = werkzeug_test.Client(app, wrappers.BaseResponse)
Пример #23
0
    def testPluginsListingWithDataProviderListActivePlugins(self):
        prov = FakeDataProvider()
        self.assertIsNotNone(prov.list_plugins)
        prov.list_plugins = lambda ctx, *, experiment_id: ("foo", "bar")

        plugins = [
            FakePlugin(plugin_name="foo", is_active_value=False),
            FakePlugin(
                plugin_name="bar",
                is_active_value=False,
                data_plugin_names=(),
            ),
            FakePlugin(plugin_name="baz", is_active_value=False),
            FakePlugin(
                plugin_name="quux",
                is_active_value=False,
                data_plugin_names=("bar", "baz"),
            ),
            FakePlugin(
                plugin_name="zod",
                is_active_value=True,
                data_plugin_names=("none_but_should_fall_back"),
            ),
        ]
        app = application.TensorBoardWSGI(plugins, data_provider=prov)
        self._install_server(app)

        parsed_object = self._get_json("/data/plugins_listing")
        actives = {k: v["enabled"] for (k, v) in parsed_object.items()}
        self.assertEqual(
            actives,
            {
                "foo": True,  # directly has data
                "bar": False,  # has data, but does not depend on itself
                "baz": False,  # no data, and no dependencies
                "quux": True,  # no data, but depends on "bar"
                "zod": True,  # no data, but `is_active` return `True`
            },
        )
Пример #24
0
    def setUp(self):
        super(LitePluginTest, self).setUp()
        logdir = os.path.join(self.get_temp_dir(), "logdir")
        run_logdir = os.path.join(logdir, "0")
        saved_model_dir = os.path.join(logdir, "0", "exported_saved_model")
        model = lite_demo_model.generate_run(run_logdir, saved_model_dir)

        self.input_arrays = lite_demo_model.INPUT_TENSOR_ARRAYS
        self.output_arrays = lite_demo_model.OUTPUT_TENSOR_ARRAYS

        # Create a multiplexer for reading the data we just wrote.
        multiplexer = event_multiplexer.EventMultiplexer()
        multiplexer.AddRunsFromDirectory(logdir)
        multiplexer.Reload()
        context = base_plugin.TBContext(logdir=logdir, multiplexer=multiplexer)

        self.plugin = lite_plugin.LitePlugin(context)
        # TODO(tensorflow/tensorboard#2573): Remove TensorBoardWSGI wrapper.
        wsgi_app = application.TensorBoardWSGI([self.plugin])
        self.server = werkzeug_test.Client(wsgi_app, wrappers.BaseResponse)

        self.routes = self.plugin.get_plugin_apps()
Пример #25
0
    def testGetEnvironmentDataWithDataLocationFallback(self):
        """Test environment route returns correct metadata about experiment."""
        class FakeDataProvider(object):
            def data_location(self, ctx, *, experiment_id):
                del experiment_id  # Unused.
                return "fallback (pick me)"

            def experiment_metadata(self, ctx, *, experiment_id):
                del experiment_id  # Unused.
                # note: no data location provided
                return provider.ExperimentMetadata(experiment_name="name")

        self.context = base_plugin.TBContext(
            flags=FakeFlags(generic_data="true"),
            data_provider=FakeDataProvider(),
        )

        self.plugin = core_plugin.CorePlugin(self.context)
        app = application.TensorBoardWSGI([self.plugin])
        self.server = werkzeug_test.Client(app, wrappers.BaseResponse)

        parsed_object = self._get_json(self.server, "/data/environment")
        self.assertEqual(parsed_object["data_location"], "fallback (pick me)")
Пример #26
0
    def setUp(self):
        self.log_dir = tempfile.mkdtemp()

        # We use numpy.random to generate audio. We seed to avoid non-determinism
        # in this test.
        numpy.random.seed(42)

        # Create old-style audio summaries for run "foo".
        tf.compat.v1.reset_default_graph()
        with tf.compat.v1.Graph().as_default():
            sess = tf.compat.v1.Session()
            placeholder = tf.compat.v1.placeholder(tf.float32)
            tf.compat.v1.summary.audio(name="baz",
                                       tensor=placeholder,
                                       sample_rate=44100)
            merged_summary_op = tf.compat.v1.summary.merge_all()
            foo_directory = os.path.join(self.log_dir, "foo")
            with test_util.FileWriterCache.get(foo_directory) as writer:
                writer.add_graph(sess.graph)
                for step in xrange(2):
                    # The floats (sample data) range from -1 to 1.
                    writer.add_summary(
                        sess.run(
                            merged_summary_op,
                            feed_dict={
                                placeholder:
                                numpy.random.rand(42, 22050) * 2 - 1
                            },
                        ),
                        global_step=step,
                    )

        # Create new-style audio summaries for run "bar".
        tf.compat.v1.reset_default_graph()
        with tf.compat.v1.Graph().as_default():
            sess = tf.compat.v1.Session()
            audio_placeholder = tf.compat.v1.placeholder(tf.float32)
            labels_placeholder = tf.compat.v1.placeholder(tf.string)
            summary.op(
                "quux",
                audio_placeholder,
                sample_rate=44100,
                labels=labels_placeholder,
                description="how do you pronounce that, anyway?",
            )
            merged_summary_op = tf.compat.v1.summary.merge_all()
            bar_directory = os.path.join(self.log_dir, "bar")
            with test_util.FileWriterCache.get(bar_directory) as writer:
                writer.add_graph(sess.graph)
                for step in xrange(2):
                    # The floats (sample data) range from -1 to 1.
                    writer.add_summary(
                        sess.run(
                            merged_summary_op,
                            feed_dict={
                                audio_placeholder:
                                numpy.random.rand(42, 11025, 1) * 2 - 1,
                                labels_placeholder: [
                                    tf.compat.as_bytes(
                                        "step **%s**, sample %s" %
                                        (step, sample))
                                    for sample in xrange(42)
                                ],
                            },
                        ),
                        global_step=step,
                    )

        # Start a server with the plugin.
        multiplexer = event_multiplexer.EventMultiplexer({
            "foo": foo_directory,
            "bar": bar_directory,
        })
        multiplexer.Reload()
        context = base_plugin.TBContext(logdir=self.log_dir,
                                        multiplexer=multiplexer)
        self.plugin = audio_plugin.AudioPlugin(context)
        wsgi_app = application.TensorBoardWSGI([self.plugin])
        self.server = werkzeug_test.Client(wsgi_app, wrappers.BaseResponse)
Пример #27
0
    def setUp(self):
        # We use numpy.random to generate meshes. We seed to avoid non-determinism
        # in this test.
        np.random.seed(17)

        # Log dir to save temp events into.
        self.log_dir = self.get_temp_dir()

        # Create mesh summary.
        with tf.compat.v1.Graph().as_default():
            tf_placeholder = tf.compat.v1.placeholder
            sess = tf.compat.v1.Session()
            point_cloud = test_utils.get_random_mesh(1000)
            point_cloud_vertices = tf_placeholder(tf.float32,
                                                  point_cloud.vertices.shape)

            mesh_no_color = test_utils.get_random_mesh(2000, add_faces=True)
            mesh_no_color_extended = test_utils.get_random_mesh(2500,
                                                                add_faces=True)
            mesh_no_color_vertices = tf_placeholder(tf.float32, [1, None, 3])
            mesh_no_color_faces = tf_placeholder(tf.int32, [1, None, 3])

            mesh_color = test_utils.get_random_mesh(3000,
                                                    add_faces=True,
                                                    add_colors=True)
            mesh_color_vertices = tf_placeholder(tf.float32,
                                                 mesh_color.vertices.shape)
            mesh_color_faces = tf_placeholder(tf.int32, mesh_color.faces.shape)
            mesh_color_colors = tf_placeholder(tf.uint8,
                                               mesh_color.colors.shape)

            self.data = [
                point_cloud, mesh_no_color, mesh_no_color_extended, mesh_color
            ]

            # In case when name is present and display_name is not, we will reuse name
            # as display_name. Summaries below intended to test both cases.
            self.names = ["point_cloud", "mesh_no_color", "mesh_color"]
            summary.op(self.names[0],
                       point_cloud_vertices,
                       description="just point cloud")
            summary.op(self.names[1],
                       mesh_no_color_vertices,
                       faces=mesh_no_color_faces,
                       display_name="name_to_display_in_ui",
                       description="beautiful mesh in grayscale")
            summary.op(self.names[2],
                       mesh_color_vertices,
                       faces=mesh_color_faces,
                       colors=mesh_color_colors,
                       description="mesh with random colors")

            merged_summary_op = tf.compat.v1.summary.merge_all()
            self.runs = ["bar"]
            self.steps = 20
            bar_directory = os.path.join(self.log_dir, self.runs[0])
            with tensorboard_test_util.FileWriterCache.get(
                    bar_directory) as writer:
                writer.add_graph(sess.graph)
                for step in range(self.steps):
                    # Alternate between two random meshes with different number of
                    # vertices.
                    no_color = mesh_no_color if step % 2 == 0 else mesh_no_color_extended
                    with patch.object(time, 'time', return_value=step):
                        writer.add_summary(sess.run(merged_summary_op,
                                                    feed_dict={
                                                        point_cloud_vertices:
                                                        point_cloud.vertices,
                                                        mesh_no_color_vertices:
                                                        no_color.vertices,
                                                        mesh_no_color_faces:
                                                        no_color.faces,
                                                        mesh_color_vertices:
                                                        mesh_color.vertices,
                                                        mesh_color_faces:
                                                        mesh_color.faces,
                                                        mesh_color_colors:
                                                        mesh_color.colors,
                                                    }),
                                           global_step=step)

        # Start a server that will receive requests.
        self.multiplexer = event_multiplexer.EventMultiplexer({
            "bar":
            bar_directory,
        })
        self.context = base_plugin.TBContext(logdir=self.log_dir,
                                             multiplexer=self.multiplexer)
        self.plugin = mesh_plugin.MeshPlugin(self.context)
        # Wait until after plugin construction to reload the multiplexer because the
        # plugin caches data from the multiplexer upon construction and this affects
        # logic tested later down.
        # TODO(https://github.com/tensorflow/tensorboard/issues/2579): Eliminate the
        # caching of data at construction time and move this Reload() up to just
        # after the multiplexer is created.
        self.multiplexer.Reload()
        wsgi_app = application.TensorBoardWSGI([self.plugin])
        self.server = werkzeug_test.Client(wsgi_app, wrappers.BaseResponse)
        self.routes = self.plugin.get_plugin_apps()
Пример #28
0
 def load_server(self, run_names):
     plugin = self.load_plugin(run_names)
     wsgi_app = application.TensorBoardWSGI([plugin])
     server = werkzeug_test.Client(wsgi_app, wrappers.Response)
     return server
Пример #29
0
    def setUp(self):
        self.log_dir = tempfile.mkdtemp()

        # We use numpy.random to generate images. We seed to avoid non-determinism
        # in this test.
        numpy.random.seed(42)

        # Create old-style image summaries for run "foo".
        tf.compat.v1.reset_default_graph()
        sess = tf.compat.v1.Session()
        placeholder = tf.compat.v1.placeholder(tf.uint8)
        tf.compat.v1.summary.image(name="baz", tensor=placeholder)
        merged_summary_op = tf.compat.v1.summary.merge_all()
        foo_directory = os.path.join(self.log_dir, "foo")
        with test_util.FileWriterCache.get(foo_directory) as writer:
            writer.add_graph(sess.graph)
            for step in xrange(2):
                writer.add_summary(
                    sess.run(
                        merged_summary_op,
                        feed_dict={
                            placeholder: (numpy.random.rand(1, 16, 42, 3) *
                                          255).astype(numpy.uint8)
                        },
                    ),
                    global_step=step,
                )

        # Create new-style image summaries for run bar.
        tf.compat.v1.reset_default_graph()
        sess = tf.compat.v1.Session()
        placeholder = tf.compat.v1.placeholder(tf.uint8)
        summary.op(
            name="quux",
            images=placeholder,
            description="how do you pronounce that, anyway?",
        )
        merged_summary_op = tf.compat.v1.summary.merge_all()
        bar_directory = os.path.join(self.log_dir, "bar")
        with test_util.FileWriterCache.get(bar_directory) as writer:
            writer.add_graph(sess.graph)
            for step in xrange(2):
                writer.add_summary(
                    sess.run(
                        merged_summary_op,
                        feed_dict={
                            placeholder: (numpy.random.rand(1, 8, 6, 3) *
                                          255).astype(numpy.uint8)
                        },
                    ),
                    global_step=step,
                )

        # Start a server with the plugin.
        multiplexer = event_multiplexer.EventMultiplexer({
            "foo": foo_directory,
            "bar": bar_directory,
        })
        multiplexer.Reload()
        context = base_plugin.TBContext(logdir=self.log_dir,
                                        multiplexer=multiplexer)
        plugin = images_plugin.ImagesPlugin(context)
        wsgi_app = application.TensorBoardWSGI([plugin])
        self.server = werkzeug_test.Client(wsgi_app, wrappers.BaseResponse)
        self.routes = plugin.get_plugin_apps()
    def setUp(self):
        super(DebuggerPluginTestBase, self).setUp()
        # Importing the debugger_plugin can sometimes unfortunately produce errors.
        try:
            # pylint: disable=g-import-not-at-top
            from tensorboard.plugins.debugger import debugger_plugin
            from tensorboard.plugins.debugger import debugger_server_lib
            # pylint: enable=g-import-not-at-top
        except Exception as e:  # pylint: disable=broad-except
            raise self.skipTest(
                'Skipping test because importing some modules failed: %r' % e)
        self.debugger_plugin_module = debugger_plugin

        # Populate the log directory with debugger event for run '.'.
        self.log_dir = self.get_temp_dir()
        file_prefix = tf.compat.as_bytes(
            os.path.join(self.log_dir, 'events.debugger'))
        writer = pywrap_tensorflow.EventsWriter(file_prefix)
        device_name = '/job:localhost/replica:0/task:0/cpu:0'
        writer.WriteEvent(
            self._CreateEventWithDebugNumericSummary(
                device_name=device_name,
                op_name='layers/Matmul',
                output_slot=0,
                wall_time=42,
                step=2,
                list_of_values=(
                    list(range(12)) +
                    [float(tf.float32.as_datatype_enum), 1.0, 3.0])))
        writer.WriteEvent(
            self._CreateEventWithDebugNumericSummary(
                device_name=device_name,
                op_name='layers/Matmul',
                output_slot=1,
                wall_time=43,
                step=7,
                list_of_values=(
                    list(range(12)) +
                    [float(tf.float64.as_datatype_enum), 2.0, 3.0, 3.0])))
        writer.WriteEvent(
            self._CreateEventWithDebugNumericSummary(
                device_name=device_name,
                op_name='logits/Add',
                output_slot=0,
                wall_time=1337,
                step=7,
                list_of_values=(
                    list(range(12)) +
                    [float(tf.int32.as_datatype_enum), 2.0, 3.0, 3.0])))
        writer.WriteEvent(
            self._CreateEventWithDebugNumericSummary(
                device_name=device_name,
                op_name='logits/Add',
                output_slot=0,
                wall_time=1338,
                step=8,
                list_of_values=(list(range(12)) +
                                [float(tf.int16.as_datatype_enum), 0.0])))
        writer.Close()

        # Populate the log directory with debugger event for run 'run_foo'.
        run_foo_directory = os.path.join(self.log_dir, 'run_foo')
        os.mkdir(run_foo_directory)
        file_prefix = tf.compat.as_bytes(
            os.path.join(run_foo_directory, 'events.debugger'))
        writer = pywrap_tensorflow.EventsWriter(file_prefix)
        writer.WriteEvent(
            self._CreateEventWithDebugNumericSummary(
                device_name=device_name,
                op_name='layers/Variable',
                output_slot=0,
                wall_time=4242,
                step=42,
                list_of_values=(list(range(12)) +
                                [float(tf.int16.as_datatype_enum), 1.0, 8.0])))
        writer.Close()

        # Start a server that will receive requests and respond with health pills.
        multiplexer = event_multiplexer.EventMultiplexer({
            '.':
            self.log_dir,
            'run_foo':
            run_foo_directory,
        })
        multiplexer.Reload()
        self.debugger_data_server_grpc_port = portpicker.pick_unused_port()

        # Fake threading behavior so that threads are synchronous.
        tf.compat.v1.test.mock.patch('threading.Thread.start',
                                     threading.Thread.run).start()

        self.mock_debugger_data_server = tf.compat.v1.test.mock.Mock(
            debugger_server_lib.DebuggerDataServer)
        self.mock_debugger_data_server_class = tf.compat.v1.test.mock.Mock(
            debugger_server_lib.DebuggerDataServer,
            return_value=self.mock_debugger_data_server)

        tf.compat.v1.test.mock.patch.object(
            debugger_server_lib, 'DebuggerDataServer',
            self.mock_debugger_data_server_class).start()

        self.context = base_plugin.TBContext(logdir=self.log_dir,
                                             multiplexer=multiplexer)
        self.plugin = debugger_plugin.DebuggerPlugin(self.context)
        self.plugin.listen(self.debugger_data_server_grpc_port)
        wsgi_app = application.TensorBoardWSGI([self.plugin])
        self.server = werkzeug_test.Client(wsgi_app, wrappers.BaseResponse)

        # The debugger data server should be started at the correct port.
        self.mock_debugger_data_server_class.assert_called_once_with(
            self.debugger_data_server_grpc_port, self.log_dir)

        mock_debugger_data_server = self.mock_debugger_data_server
        start = mock_debugger_data_server.start_the_debugger_data_receiving_server
        self.assertEqual(1, start.call_count)