def testInvalidVerbosityRaisesError(self): with self.assertRaises(ValueError): upload_tracker.UploadTracker(verbosity="1") with self.assertRaises(ValueError): upload_tracker.UploadTracker(verbosity=-1) with self.assertRaises(ValueError): upload_tracker.UploadTracker(verbosity=0.5) with self.assertRaises(ValueError): upload_tracker.UploadTracker(verbosity=100) with self.assertRaises(ValueError): upload_tracker.UploadTracker(verbosity=None)
def testBlobTrackerWithVerbosity0(self): tracker = upload_tracker.UploadTracker(verbosity=0) with tracker.blob_tracker(blob_bytes=2048): self.assertEqual(self.mock_write.call_count, 0) self.assertEqual(self.mock_flush.call_count, 0) self.assertEqual(self.mock_write.call_count, 0) self.assertEqual(self.mock_flush.call_count, 0)
def testBlobTrackerNotUploaded(self): tracker = upload_tracker.UploadTracker(verbosity=1) with tracker.send_tracker(): self.assertEqual(self.mock_write.call_count, 2) self.assertEqual(self.mock_flush.call_count, 2) self.assertIn( "Started scanning", self.mock_write.call_args_list[0][0][0], ) with tracker.blob_tracker(blob_bytes=2048 * 1024 * 1024) as blob_tracker: self.assertEqual(self.mock_write.call_count, 3) self.assertEqual(self.mock_flush.call_count, 3) self.assertIn( "Uploading binary object (2048.0 MB)", self.mock_write.call_args[0][0], ) blob_tracker.mark_uploaded(is_uploaded=False) self.assertEqual(self.mock_write.call_count, 6) self.assertEqual(self.mock_flush.call_count, 5) self.assertIn( "Total uploaded: 0 scalars, 0 tensors, 0 binary objects\n", self.mock_write.call_args_list[3][0][0], ) self.assertIn( "Total skipped: 1 binary objects (2048.0 MB)\n", self.mock_write.call_args_list[4][0][0], ) self.assertEqual(tracker.has_data(), True)
def testScalarsTrackerWithVerbosity0(self): tracker = upload_tracker.UploadTracker(verbosity=0) with tracker.scalars_tracker(123): self.assertEqual(self.mock_write.call_count, 0) self.assertEqual(self.mock_flush.call_count, 0) self.assertEqual(self.mock_write.call_count, 0) self.assertEqual(self.mock_flush.call_count, 0)
def testBlobTrackerUploaded(self): tracker = upload_tracker.UploadTracker(verbosity=1) with tracker.blob_tracker(blob_bytes=2048) as blob_tracker: self.assertEqual(self.mock_write.call_count, 1) self.assertEqual(self.mock_flush.call_count, 1) self.assertIn( "Uploading binary object (2.0 kB)", self.mock_write.call_args[0][0], )
def testTensorsTrackerWithVerbosity0(self): tracker = upload_tracker.UploadTracker(verbosity=0) with tracker.tensors_tracker( num_tensors=200, num_tensors_skipped=50, tensor_bytes=6000, tensor_bytes_skipped=4000, ): self.assertEqual(self.mock_write.call_count, 0) self.assertEqual(self.mock_flush.call_count, 0) self.assertEqual(self.mock_write.call_count, 0) self.assertEqual(self.mock_flush.call_count, 0)
def testScalarsTracker(self): tracker = upload_tracker.UploadTracker(verbosity=1) with tracker.scalars_tracker(123): self.assertEqual(self.mock_write.call_count, 1) self.assertEqual(self.mock_flush.call_count, 1) self.assertIn( "Uploading 123 scalars...", self.mock_write.call_args[0][0], ) self.assertEqual(self.mock_write.call_count, 1) self.assertEqual(self.mock_flush.call_count, 1) self.assertEqual(tracker.has_data(), True)
def testTensorsTrackerWithoutSkippedTensors(self): tracker = upload_tracker.UploadTracker(verbosity=1) with tracker.tensors_tracker( num_tensors=200, num_tensors_skipped=0, tensor_bytes=6000, tensor_bytes_skipped=0, ): self.assertEqual(self.mock_write.call_count, 1) self.assertEqual(self.mock_flush.call_count, 1) self.assertIn( "Uploading 200 tensors (5.9 kB)", self.mock_write.call_args[0][0], ) self.assertEqual(tracker.has_data(), True)
def testSendTracker(self): tracker = upload_tracker.UploadTracker(verbosity=1) with tracker.send_tracker(): self.assertEqual(self.mock_write.call_count, 2) self.assertEqual(self.mock_flush.call_count, 2) self.assertIn( "Data upload starting...", self.mock_write.call_args[0][0], ) self.assertEqual(self.mock_write.call_count, 3) self.assertEqual(self.mock_flush.call_count, 3) self.assertIn( "Listening for new data in logdir...", self.mock_write.call_args[0][0], ) self.assertEqual(tracker.has_data(), False)
def create_experiment(self): """Creates an Experiment for this upload session and returns the ID.""" logger.info("Creating experiment") request = write_service_pb2.CreateExperimentRequest( name=self._name, description=self._description) response = grpc_util.call_with_retries(self._api.CreateExperiment, request) self._tracker = upload_tracker.UploadTracker(verbosity=self._verbosity) self._request_sender = _BatchedRequestSender( response.experiment_id, self._api, allowed_plugins=self._allowed_plugins, upload_limits=self._upload_limits, rpc_rate_limiter=self._rpc_rate_limiter, tensor_rpc_rate_limiter=self._tensor_rpc_rate_limiter, blob_rpc_rate_limiter=self._blob_rpc_rate_limiter, tracker=self._tracker, ) return response.experiment_id
def create_profile_request_sender() -> profile_uploader.ProfileRequestSender: """Creates the `ProfileRequestSender` for the profile plugin. A profile request sender is created for the plugin so that after profiling runs have finished, data can be uploaded to the tensorboard backend. Returns: A ProfileRequestSender object. """ api_client = _get_api_client() experiment_name = _get_or_create_experiment( api_client, training_utils.environment_variables.cloud_ml_job_id) upload_limits = _make_upload_limits() blob_rpc_rate_limiter = util.RateLimiter( upload_limits.min_blob_request_interval / 100) blob_storage_bucket, blob_storage_folder = _get_blob_items(api_client, ) source_bucket = uploader_utils.get_source_bucket( training_utils.environment_variables.tensorboard_log_dir) profile_request_sender = profile_uploader.ProfileRequestSender( experiment_name, api_client, upload_limits=upload_limits, blob_rpc_rate_limiter=blob_rpc_rate_limiter, blob_storage_bucket=blob_storage_bucket, blob_storage_folder=blob_storage_folder, source_bucket=source_bucket, tracker=upload_tracker.UploadTracker(verbosity=1), logdir=training_utils.environment_variables.tensorboard_log_dir, ) return profile_request_sender
def __init__( self, writer_client, logdir, allowed_plugins, upload_limits, logdir_poll_rate_limiter=None, rpc_rate_limiter=None, tensor_rpc_rate_limiter=None, blob_rpc_rate_limiter=None, name=None, description=None, verbosity=None, one_shot=None, ): """Constructs a TensorBoardUploader. Args: writer_client: a TensorBoardWriterService stub instance logdir: path of the log directory to upload allowed_plugins: collection of string plugin names; events will only be uploaded if their time series's metadata specifies one of these plugin names upload_limits: instance of tensorboard.service.UploadLimits proto. logdir_poll_rate_limiter: a `RateLimiter` to use to limit logdir polling frequency, to avoid thrashing disks, especially on networked file systems rpc_rate_limiter: a `RateLimiter` to use to limit write RPC frequency. Note this limit applies at the level of single RPCs in the Scalar and Tensor case, but at the level of an entire blob upload in the Blob case-- which may require a few preparatory RPCs and a stream of chunks. Note the chunk stream is internally rate-limited by backpressure from the server, so it is not a concern that we do not explicitly rate-limit within the stream here. name: String name to assign to the experiment. description: String description to assign to the experiment. verbosity: Level of verbosity, an integer. Supported value: 0 - No upload statistics is printed. 1 - Print upload statistics while uploading data (default). one_shot: Once uploading starts, upload only the existing data in the logdir and then return immediately, instead of the default behavior of continuing to listen for new data in the logdir and upload them when it appears. """ self._api = writer_client self._logdir = logdir self._allowed_plugins = frozenset(allowed_plugins) self._upload_limits = upload_limits self._name = name self._description = description self._verbosity = 1 if verbosity is None else verbosity self._one_shot = False if one_shot is None else one_shot self._request_sender = None self._experiment_id = None if logdir_poll_rate_limiter is None: self._logdir_poll_rate_limiter = util.RateLimiter( _MIN_LOGDIR_POLL_INTERVAL_SECS) else: self._logdir_poll_rate_limiter = logdir_poll_rate_limiter if rpc_rate_limiter is None: self._rpc_rate_limiter = util.RateLimiter( self._upload_limits.min_scalar_request_interval / 1000) else: self._rpc_rate_limiter = rpc_rate_limiter if tensor_rpc_rate_limiter is None: self._tensor_rpc_rate_limiter = util.RateLimiter( self._upload_limits.min_tensor_request_interval / 1000) else: self._tensor_rpc_rate_limiter = tensor_rpc_rate_limiter if blob_rpc_rate_limiter is None: self._blob_rpc_rate_limiter = util.RateLimiter( self._upload_limits.min_blob_request_interval / 1000) else: self._blob_rpc_rate_limiter = blob_rpc_rate_limiter active_filter = ( lambda secs: secs + _EVENT_FILE_INACTIVE_SECS >= time.time()) directory_loader_factory = functools.partial( directory_loader.DirectoryLoader, loader_factory=event_file_loader.TimestampedEventFileLoader, path_filter=io_wrapper.IsTensorFlowEventsFile, active_filter=active_filter, ) self._logdir_loader = logdir_loader.LogdirLoader( self._logdir, directory_loader_factory) self._tracker = upload_tracker.UploadTracker(verbosity=self._verbosity, one_shot=self._one_shot)