def _create_uploader( writer_client=_USE_DEFAULT, logdir=None, allowed_plugins=_USE_DEFAULT, logdir_poll_rate_limiter=_USE_DEFAULT, rpc_rate_limiter=_USE_DEFAULT, blob_rpc_rate_limiter=_USE_DEFAULT, name=None, description=None, ): if writer_client is _USE_DEFAULT: writer_client = _create_mock_client() if allowed_plugins is _USE_DEFAULT: allowed_plugins = _SCALARS_ONLY if logdir_poll_rate_limiter is _USE_DEFAULT: logdir_poll_rate_limiter = util.RateLimiter(0) if rpc_rate_limiter is _USE_DEFAULT: rpc_rate_limiter = util.RateLimiter(0) if blob_rpc_rate_limiter is _USE_DEFAULT: blob_rpc_rate_limiter = util.RateLimiter(0) return uploader_lib.TensorBoardUploader( writer_client, logdir, allowed_plugins=allowed_plugins, logdir_poll_rate_limiter=logdir_poll_rate_limiter, rpc_rate_limiter=rpc_rate_limiter, blob_rpc_rate_limiter=blob_rpc_rate_limiter, name=name, description=description, )
def execute(self, server_info, channel): api_client = write_service_pb2_grpc.TensorBoardWriterServiceStub( channel ) _die_if_bad_experiment_name(self.name) _die_if_bad_experiment_description(self.description) uploader = uploader_lib.TensorBoardUploader( api_client, self.logdir, name=self.name, description=self.description, ) experiment_id = uploader.create_experiment() url = server_info_lib.experiment_url(server_info, experiment_id) print( "Upload started and will continue reading any new data as it's added" ) print("to the logdir. To stop uploading, press Ctrl-C.") print("View your TensorBoard live at: %s" % url) try: uploader.start_uploading() except uploader_lib.ExperimentNotFoundError: print("Experiment was deleted; uploading has been cancelled") return except KeyboardInterrupt: print() print("Upload stopped. View your TensorBoard at %s" % url) return # TODO(@nfelt): make it possible for the upload cycle to end once we # detect that no more runs are active, so this code can be reached. print("Done! View your TensorBoard at %s" % url)
def test_upload_preserves_wall_time(self): logdir = self.get_temp_dir() with tb_test_util.FileWriter(logdir) as writer: # Add a raw event so we can specify the wall_time value deterministically. writer.add_event( event_pb2.Event( step=1, wall_time=123.123123123, summary=scalar_v2.scalar_pb("foo", 5.0), ) ) mock_client = _create_mock_client() mock_rate_limiter = mock.create_autospec(util.RateLimiter) uploader = uploader_lib.TensorBoardUploader( mock_client, logdir, mock_rate_limiter ) uploader.create_experiment() uploader._upload_once() mock_client.WriteScalar.assert_called_once() request = mock_client.WriteScalar.call_args[0][0] # Just check the wall_time value; everything else is covered in the full # logdir test below. self.assertEqual( 123123123123, request.runs[0].tags[0].points[0].wall_time.ToNanoseconds(), )
def test_start_uploading(self): mock_client = _create_mock_client() mock_rate_limiter = mock.create_autospec(util.RateLimiter) uploader = uploader_lib.TensorBoardUploader( mock_client, "/logs/foo", mock_rate_limiter ) uploader.create_experiment() def scalar_event(tag, value): return event_pb2.Event(summary=scalar_v2.scalar_pb(tag, value)) mock_logdir_loader = mock.create_autospec(logdir_loader.LogdirLoader) mock_logdir_loader.get_run_events.side_effect = [ { "run 1": [scalar_event("1.1", 5.0), scalar_event("1.2", 5.0)], "run 2": [scalar_event("2.1", 5.0), scalar_event("2.2", 5.0)], }, { "run 3": [scalar_event("3.1", 5.0), scalar_event("3.2", 5.0)], "run 4": [scalar_event("4.1", 5.0), scalar_event("4.2", 5.0)], "run 5": [scalar_event("5.1", 5.0), scalar_event("5.2", 5.0)], }, AbortUploadError, ] with mock.patch.object( uploader, "_logdir_loader", mock_logdir_loader ), self.assertRaises(AbortUploadError): uploader.start_uploading() self.assertEqual(4 + 6, mock_client.WriteScalar.call_count) self.assertEqual(4 + 6, mock_rate_limiter.tick.call_count)
def execute(self, server_info, channel): if self.dry_run: api_client = dry_run_stubs.DryRunTensorBoardWriterStub() else: api_client = write_service_pb2_grpc.TensorBoardWriterServiceStub( channel ) _die_if_bad_experiment_name(self.name) _die_if_bad_experiment_description(self.description) uploader = uploader_lib.TensorBoardUploader( api_client, self.logdir, allowed_plugins=server_info_lib.allowed_plugins(server_info), upload_limits=server_info_lib.upload_limits(server_info), name=self.name, description=self.description, verbosity=self.verbosity, one_shot=self.one_shot, ) if self.one_shot and not os.path.isdir(self.logdir): print("%s: No such directory." % self.logdir) print( "User specified `one_shot` mode with an unavailable " "logdir. Exiting without creating an experiment." ) return experiment_id = uploader.create_experiment() url = server_info_lib.experiment_url(server_info, experiment_id) if self.experiment_url_callback is not None: self.experiment_url_callback(url) print( "Upload started and will continue reading any new data as it's " "added to the logdir.\n\nTo stop uploading, press Ctrl-C." ) if self.dry_run: print( "\n** This is a dry run. " "No data will be sent to tensorboard.dev. **\n" ) else: print("\nView your TensorBoard live at: %s\n" % url) interrupted = False try: uploader.start_uploading() except uploader_lib.ExperimentNotFoundError: print("Experiment was deleted; uploading has been cancelled") return except KeyboardInterrupt: interrupted = True finally: end_message = "\n" if interrupted: end_message += "Interrupted." else: end_message += "Done." if not self.dry_run: end_message += " View your TensorBoard at %s" % url sys.stdout.write(end_message + "\n") sys.stdout.flush()
def test_upload_empty_logdir(self): logdir = self.get_temp_dir() mock_client = self._create_mock_client() mock_rate_limiter = mock.create_autospec(util.RateLimiter) uploader = uploader_lib.TensorBoardUploader(mock_client, logdir, mock_rate_limiter) uploader.create_experiment() uploader._upload_once() mock_client.WriteScalar.assert_not_called()
def test_upload_swallows_rpc_failure(self): logdir = self.get_temp_dir() with tb_test_util.FileWriter(logdir) as writer: writer.add_test_summary("foo") mock_client = self._create_mock_client() mock_rate_limiter = mock.create_autospec(util.RateLimiter) uploader = uploader_lib.TensorBoardUploader(mock_client, logdir, mock_rate_limiter) uploader.create_experiment() error = test_util.grpc_error(grpc.StatusCode.INTERNAL, "Failure") mock_client.WriteScalar.side_effect = error uploader._upload_once() mock_client.WriteScalar.assert_called_once()
def test_upload_propagates_experiment_deletion(self): logdir = self.get_temp_dir() with tb_test_util.FileWriter(logdir) as writer: writer.add_test_summary("foo") mock_client = self._create_mock_client() mock_rate_limiter = mock.create_autospec(util.RateLimiter) uploader = uploader_lib.TensorBoardUploader(mock_client, logdir, mock_rate_limiter) uploader.create_experiment() error = test_util.grpc_error(grpc.StatusCode.NOT_FOUND, "nope") mock_client.WriteScalar.side_effect = error with self.assertRaises(uploader_lib.ExperimentNotFoundError): uploader._upload_once()
def test_create_experiment_with_name(self): logdir = "/logs/foo" mock_client = _create_mock_client() new_name = "This is the new name" uploader = uploader_lib.TensorBoardUploader(mock_client, logdir, name=new_name) eid = uploader.create_experiment() self.assertEqual(eid, "123") mock_client.CreateExperiment.assert_called_once() (args, _) = mock_client.CreateExperiment.call_args expected_request = write_service_pb2.CreateExperimentRequest( name=new_name, ) self.assertEqual(args[0], expected_request)
def test_create_experiment_with_description(self): logdir = "/logs/foo" mock_client = _create_mock_client() new_description = """ **description**" may have "strange" unicode chars 🌴 \\/<> """ uploader = uploader_lib.TensorBoardUploader( mock_client, logdir, description=new_description) eid = uploader.create_experiment() self.assertEqual(eid, "123") mock_client.CreateExperiment.assert_called_once() (args, _) = mock_client.CreateExperiment.call_args expected_request = write_service_pb2.CreateExperimentRequest( description=new_description, ) self.assertEqual(args[0], expected_request)
def execute(self, server_info, channel): if self.dry_run: api_client = dry_run_stubs.DryRunTensorBoardWriterStub() else: api_client = write_service_pb2_grpc.TensorBoardWriterServiceStub( channel ) _die_if_bad_experiment_name(self.name) _die_if_bad_experiment_description(self.description) uploader = uploader_lib.TensorBoardUploader( api_client, self.logdir, allowed_plugins=server_info_lib.allowed_plugins(server_info), upload_limits=server_info_lib.upload_limits(server_info), name=self.name, description=self.description, verbosity=self.verbosity, one_shot=self.one_shot, ) experiment_id = uploader.create_experiment() url = server_info_lib.experiment_url(server_info, experiment_id) print( "Upload started and will continue reading any new data as it's added" ) print("to the logdir. To stop uploading, press Ctrl-C.") if self.dry_run: print( "\n** This is a dry run. " "No data will be sent to tensorboard.dev. **\n" ) else: print("View your TensorBoard live at: %s" % url) try: uploader.start_uploading() except uploader_lib.ExperimentNotFoundError: print("Experiment was deleted; uploading has been cancelled") return except KeyboardInterrupt: pass finally: if not self.dry_run: print() print("Done! View your TensorBoard at %s" % url)
def test_start_uploading(self): mock_client = self._create_mock_client() mock_rate_limiter = mock.create_autospec(util.RateLimiter) uploader = uploader_lib.TensorBoardUploader(mock_client, "/logs/foo", mock_rate_limiter) uploader.create_experiment() mock_builder = mock.create_autospec(uploader_lib._RequestBuilder) request = write_service_pb2.WriteScalarRequest() mock_builder.build_requests.side_effect = [ iter([request, request]), iter([request, request, request, request, request]), AbortUploadError, ] # pylint: disable=g-backslash-continuation with mock.patch.object(uploader, "_upload") as mock_upload, \ mock.patch.object(uploader, "_request_builder", mock_builder), \ self.assertRaises(AbortUploadError): uploader.start_uploading() # pylint: enable=g-backslash-continuation self.assertEqual(7, mock_upload.call_count) self.assertEqual(2 + 5 + 1, mock_rate_limiter.tick.call_count)
def execute(self): api_client = write_service_pb2_grpc.TensorBoardWriterServiceStub( self.channel) uploader = uploader_lib.TensorBoardUploader( api_client, self.args.logdir, allowed_plugins=server_info_lib.allowed_plugins(self.server_info), name=self.args.name, description=self.args.description, ) experiment_id = uploader.create_experiment() url = server_info_lib.experiment_url(self.server_info, experiment_id) # Blocks forever to continuously upload data from the logdir #print("Upload started and will continue reading any new data as it's added") #print("View your TensorBoard live at: %s" % url) #uploader.start_uploading() # Runs one upload cycle uploader._upload_once() return url
def test_start_uploading_without_create_experiment_fails(self): mock_client = _create_mock_client() uploader = uploader_lib.TensorBoardUploader(mock_client, "/logs/foo") with self.assertRaisesRegex(RuntimeError, "call create_experiment()"): uploader.start_uploading()
def test_create_experiment(self): logdir = "/logs/foo" mock_client = _create_mock_client() uploader = uploader_lib.TensorBoardUploader(mock_client, logdir) eid = uploader.create_experiment() self.assertEqual(eid, "123")
def test_upload_full_logdir(self): logdir = self.get_temp_dir() mock_client = _create_mock_client() mock_rate_limiter = mock.create_autospec(util.RateLimiter) uploader = uploader_lib.TensorBoardUploader( mock_client, logdir, mock_rate_limiter ) uploader.create_experiment() # Convenience helpers for constructing expected requests. run = write_service_pb2.WriteScalarRequest.Run tag = write_service_pb2.WriteScalarRequest.Tag point = scalar_pb2.ScalarPoint # First round writer = tb_test_util.FileWriter(logdir) writer.add_test_summary("foo", simple_value=5.0, step=1) writer.add_test_summary("foo", simple_value=6.0, step=2) writer.add_test_summary("foo", simple_value=7.0, step=3) writer.add_test_summary("bar", simple_value=8.0, step=3) writer.flush() writer_a = tb_test_util.FileWriter(os.path.join(logdir, "a")) writer_a.add_test_summary("qux", simple_value=9.0, step=2) writer_a.flush() uploader._upload_once() self.assertEqual(1, mock_client.WriteScalar.call_count) request1 = mock_client.WriteScalar.call_args[0][0] _clear_wall_times(request1) expected_request1 = write_service_pb2.WriteScalarRequest( experiment_id="123", runs=[ run( name=".", tags=[ tag( name="foo", metadata=test_util.scalar_metadata("foo"), points=[ point(step=1, value=5.0), point(step=2, value=6.0), point(step=3, value=7.0), ], ), tag( name="bar", metadata=test_util.scalar_metadata("bar"), points=[point(step=3, value=8.0)], ), ], ), run( name="a", tags=[ tag( name="qux", metadata=test_util.scalar_metadata("qux"), points=[point(step=2, value=9.0)], ) ], ), ], ) self.assertProtoEquals(expected_request1, request1) mock_client.WriteScalar.reset_mock() # Second round writer.add_test_summary("foo", simple_value=10.0, step=5) writer.add_test_summary("baz", simple_value=11.0, step=1) writer.flush() writer_b = tb_test_util.FileWriter(os.path.join(logdir, "b")) writer_b.add_test_summary("xyz", simple_value=12.0, step=1) writer_b.flush() uploader._upload_once() self.assertEqual(1, mock_client.WriteScalar.call_count) request2 = mock_client.WriteScalar.call_args[0][0] _clear_wall_times(request2) expected_request2 = write_service_pb2.WriteScalarRequest( experiment_id="123", runs=[ run( name=".", tags=[ tag( name="foo", metadata=test_util.scalar_metadata("foo"), points=[point(step=5, value=10.0)], ), tag( name="baz", metadata=test_util.scalar_metadata("baz"), points=[point(step=1, value=11.0)], ), ], ), run( name="b", tags=[ tag( name="xyz", metadata=test_util.scalar_metadata("xyz"), points=[point(step=1, value=12.0)], ) ], ), ], ) self.assertProtoEquals(expected_request2, request2) mock_client.WriteScalar.reset_mock() # Empty third round uploader._upload_once() mock_client.WriteScalar.assert_not_called()
def test_create_experiment(self): logdir = "/logs/foo" mock_client = self._create_mock_client() uploader = uploader_lib.TensorBoardUploader(mock_client, logdir) url = uploader.create_experiment() self.assertEqual(url, "https://example.com/123")