def execute(self, server_info, channel): api_client = export_service_pb2_grpc.TensorBoardExporterServiceStub( channel) fieldmask = experiment_pb2.ExperimentMask( name=True, description=True, create_time=True, update_time=True, num_runs=True, num_tags=True, num_scalars=True, total_tensor_bytes=True, total_blob_bytes=True, ) gen = exporter_lib.list_experiments(api_client, fieldmask=fieldmask) count = 0 if self.json: formatter = formatters.JsonFormatter() else: formatter = formatters.ReadableFormatter() for experiment in gen: count += 1 experiment_id = experiment.experiment_id url = server_info_lib.experiment_url(server_info, experiment_id) print(formatter.format_experiment(experiment, url)) sys.stdout.flush() if not count: sys.stderr.write( "No experiments. Use `tensorboard dev upload` to get started.\n" ) else: sys.stderr.write("Total: %d experiment(s)\n" % count) sys.stderr.flush()
def export(self, read_time=None): """Executes the export flow. Args: read_time: A fixed timestamp from which to export data, as float seconds since epoch (like `time.time()`). Optional; defaults to the current time. Yields: After each experiment is successfully downloaded, the ID of that experiment, as a string. """ if read_time is None: read_time = time.time() experiment_metadata_mask = experiment_pb2.ExperimentMask( create_time=True, update_time=True, name=True, description=True, ) experiments = list_experiments(self._api, fieldmask=experiment_metadata_mask, read_time=read_time) for experiment in experiments: experiment_id = experiment.experiment_id experiment_metadata = { "name": experiment.name, "description": experiment.description, "create_time": util.format_time_absolute(experiment.create_time), "update_time": util.format_time_absolute(experiment.update_time), } experiment_dir = _experiment_directory(self._outdir, experiment_id) os.mkdir(experiment_dir) metadata_filepath = os.path.join(experiment_dir, _FILENAME_METADATA) with _open_excl(metadata_filepath) as outfile: json.dump(experiment_metadata, outfile, sort_keys=True) outfile.write("\n") scalars_filepath = os.path.join(experiment_dir, _FILENAME_SCALARS) try: with _open_excl(scalars_filepath) as outfile: data = self._request_scalar_data(experiment_id, read_time) for block in data: json.dump(block, outfile, sort_keys=True) outfile.write("\n") outfile.flush() yield experiment_id except grpc.RpcError as e: if e.code() == grpc.StatusCode.CANCELLED: raise GrpcTimeoutException(experiment_id) else: raise
def test_success(self): mock_client = _create_mock_client() new_name = "a new name" response = write_service_pb2.UpdateExperimentResponse() mock_client.UpdateExperiment.return_value = response uploader_lib.update_experiment_metadata(mock_client, "123", name=new_name) expected_request = write_service_pb2.UpdateExperimentRequest( experiment=experiment_pb2.Experiment(experiment_id="123", name=new_name), experiment_mask=experiment_pb2.ExperimentMask(name=True), ) mock_client.UpdateExperiment.assert_called_once() (args, _) = mock_client.UpdateExperiment.call_args self.assertEqual(args[0], expected_request)
def execute(self, server_info, channel): api_client = export_service_pb2_grpc.TensorBoardExporterServiceStub( channel ) fieldmask = experiment_pb2.ExperimentMask( create_time=True, update_time=True, num_scalars=True, num_runs=True, num_tags=True, ) gen = exporter_lib.list_experiments(api_client, fieldmask=fieldmask) count = 0 for experiment in gen: count += 1 if not isinstance(experiment, experiment_pb2.Experiment): url = server_info_lib.experiment_url(server_info, experiment) print(url) continue experiment_id = experiment.experiment_id url = server_info_lib.experiment_url(server_info, experiment_id) print(url) data = [ ("Name", experiment.name or "[No Name]"), ("Description", experiment.description or "[No Description]"), ("Id", experiment.experiment_id), ("Created", util.format_time(experiment.create_time)), ("Updated", util.format_time(experiment.update_time)), ("Scalars", str(experiment.num_scalars)), ("Runs", str(experiment.num_runs)), ("Tags", str(experiment.num_tags)), ] for (name, value) in data: print("\t%s %s" % (name.ljust(12), value)) sys.stdout.flush() if not count: sys.stderr.write( "No experiments. Use `tensorboard dev upload` to get started.\n" ) else: sys.stderr.write("Total: %d experiment(s)\n" % count) sys.stderr.flush()
def testListIntentSetsExperimentMask(self): mock_server_info = mock.MagicMock() mock_channel = mock.MagicMock() expected_mask = experiment_pb2.ExperimentMask( name=True, description=True, create_time=True, update_time=True, num_runs=True, num_tags=True, num_scalars=True, total_tensor_bytes=True, total_blob_bytes=True, ) with mock.patch.object( exporter_lib, "list_experiments", ): intent = uploader_subcommand._ListIntent() intent.execute(mock_server_info, mock_channel) actual_mask = exporter_lib.list_experiments.call_args[1][ "fieldmask"] self.assertEquals(actual_mask, expected_mask)