def execute(self, server_info, channel): api_client = write_service_pb2_grpc.TensorBoardWriterServiceStub( channel) _die_if_bad_experiment_name(self.name) _die_if_bad_experiment_description(self.description) uploader = uploader_lib.TensorBoardUploader( api_client, self.logdir, allowed_plugins=server_info_lib.allowed_plugins(server_info), upload_limits=server_info_lib.upload_limits(server_info), name=self.name, description=self.description, ) experiment_id = uploader.create_experiment() url = server_info_lib.experiment_url(server_info, experiment_id) print( "Upload started and will continue reading any new data as it's added" ) print("to the logdir. To stop uploading, press Ctrl-C.") print("View your TensorBoard live at: %s" % url) try: uploader.start_uploading() except uploader_lib.ExperimentNotFoundError: print("Experiment was deleted; uploading has been cancelled") return except KeyboardInterrupt: print() print("Upload stopped. View your TensorBoard at %s" % url) return # TODO(@nfelt): make it possible for the upload cycle to end once we # detect that no more runs are active, so this code can be reached. print("Done! View your TensorBoard at %s" % url)
def execute(self, server_info, channel): if self.dry_run: api_client = dry_run_stubs.DryRunTensorBoardWriterStub() else: api_client = write_service_pb2_grpc.TensorBoardWriterServiceStub( channel ) _die_if_bad_experiment_name(self.name) _die_if_bad_experiment_description(self.description) uploader = uploader_lib.TensorBoardUploader( api_client, self.logdir, allowed_plugins=server_info_lib.allowed_plugins(server_info), upload_limits=server_info_lib.upload_limits(server_info), name=self.name, description=self.description, verbosity=self.verbosity, one_shot=self.one_shot, ) if self.one_shot and not os.path.isdir(self.logdir): print("%s: No such directory." % self.logdir) print( "User specified `one_shot` mode with an unavailable " "logdir. Exiting without creating an experiment." ) return experiment_id = uploader.create_experiment() url = server_info_lib.experiment_url(server_info, experiment_id) if self.experiment_url_callback is not None: self.experiment_url_callback(url) print( "Upload started and will continue reading any new data as it's " "added to the logdir.\n\nTo stop uploading, press Ctrl-C." ) if self.dry_run: print( "\n** This is a dry run. " "No data will be sent to tensorboard.dev. **\n" ) else: print("\nView your TensorBoard live at: %s\n" % url) interrupted = False try: uploader.start_uploading() except uploader_lib.ExperimentNotFoundError: print("Experiment was deleted; uploading has been cancelled") return except KeyboardInterrupt: interrupted = True finally: end_message = "\n" if interrupted: end_message += "Interrupted." else: end_message += "Done." if not self.dry_run: end_message += " View your TensorBoard at %s" % url sys.stdout.write(end_message + "\n") sys.stdout.flush()
def test_missing_max_blob_size_in_upload_limits(self): # Test the one remaining field we did not test in # test_missing_fields_in_upload_limits. info = server_info_pb2.ServerInfoResponse() info.upload_limits.max_tensor_point_size = 22 actual = server_info.upload_limits(info) self.assertEqual(actual.max_blob_size, server_info._DEFAULT_MAX_BLOB_SIZE) self.assertEqual(actual.max_tensor_point_size, 22)
def test_upload_limits_from_server_info(self): info_upload_limits = server_info_pb2.UploadLimits() info_upload_limits.max_scalar_request_size = 1 info_upload_limits.max_tensor_request_size = 2 info_upload_limits.max_blob_request_size = 3 info_upload_limits.min_scalar_request_interval = 4 info_upload_limits.min_tensor_request_interval = 5 info_upload_limits.min_blob_request_interval = 6 info_upload_limits.max_blob_size = 7 info_upload_limits.max_tensor_point_size = 8 info = server_info_pb2.ServerInfoResponse() info.upload_limits.CopyFrom(info_upload_limits) actual = server_info.upload_limits(info) self.assertEqual(actual, info_upload_limits)
def execute(self, server_info, channel): if self.dry_run: api_client = dry_run_stubs.DryRunTensorBoardWriterStub() else: api_client = write_service_pb2_grpc.TensorBoardWriterServiceStub( channel ) _die_if_bad_experiment_name(self.name) _die_if_bad_experiment_description(self.description) uploader = uploader_lib.TensorBoardUploader( api_client, self.logdir, allowed_plugins=server_info_lib.allowed_plugins(server_info), upload_limits=server_info_lib.upload_limits(server_info), name=self.name, description=self.description, verbosity=self.verbosity, one_shot=self.one_shot, ) experiment_id = uploader.create_experiment() url = server_info_lib.experiment_url(server_info, experiment_id) print( "Upload started and will continue reading any new data as it's added" ) print("to the logdir. To stop uploading, press Ctrl-C.") if self.dry_run: print( "\n** This is a dry run. " "No data will be sent to tensorboard.dev. **\n" ) else: print("View your TensorBoard live at: %s" % url) try: uploader.start_uploading() except uploader_lib.ExperimentNotFoundError: print("Experiment was deleted; uploading has been cancelled") return except KeyboardInterrupt: pass finally: if not self.dry_run: print() print("Done! View your TensorBoard at %s" % url)
def test_no_upload_limits_in_server_info(self): info = server_info_pb2.ServerInfoResponse() actual = server_info.upload_limits(info) expected = server_info_pb2.UploadLimits() expected.max_scalar_request_size = ( server_info._DEFAULT_MAX_SCALAR_REQUEST_SIZE) expected.max_tensor_request_size = ( server_info._DEFAULT_MAX_TENSOR_REQUEST_SIZE) expected.max_blob_request_size = ( server_info._DEFAULT_MAX_BLOB_REQUEST_SIZE) expected.min_scalar_request_interval = ( server_info._DEFAULT_MIN_SCALAR_REQUEST_INTERVAL) expected.min_tensor_request_interval = ( server_info._DEFAULT_MIN_TENSOR_REQUEST_INTERVAL) expected.min_blob_request_interval = ( server_info._DEFAULT_MIN_BLOB_REQUEST_INTERVAL) expected.max_blob_size = server_info._DEFAULT_MAX_BLOB_SIZE expected.max_tensor_point_size = ( server_info._DEFAULT_MAX_TENSOR_POINT_SIZE) self.assertEqual(actual, expected)