def execute(self, server_info, channel): api_client = export_service_pb2_grpc.TensorBoardExporterServiceStub( channel) fieldmask = experiment_pb2.ExperimentMask( name=True, description=True, create_time=True, update_time=True, num_runs=True, num_tags=True, num_scalars=True, total_tensor_bytes=True, total_blob_bytes=True, ) gen = exporter_lib.list_experiments(api_client, fieldmask=fieldmask) count = 0 if self.json: formatter = formatters.JsonFormatter() else: formatter = formatters.ReadableFormatter() for experiment in gen: count += 1 experiment_id = experiment.experiment_id url = server_info_lib.experiment_url(server_info, experiment_id) print(formatter.format_experiment(experiment, url)) sys.stdout.flush() if not count: sys.stderr.write( "No experiments. Use `tensorboard dev upload` to get started.\n" ) else: sys.stderr.write("Total: %d experiment(s)\n" % count) sys.stderr.flush()
def _create_mock_api_client(): # Create a stub instance (using a test channel) in order to derive a mock # from it with autospec enabled. Mocking TensorBoardExporterServiceStub # itself doesn't work with autospec because grpc constructs stubs via # metaclassing. test_channel = grpc_testing.channel(service_descriptors=[], time=grpc_testing.strict_real_time()) stub = export_service_pb2_grpc.TensorBoardExporterServiceStub(test_channel) mock_api_client = mock.create_autospec(stub) return mock_api_client
def get_api_client(api_endpoint=None): server_info = _get_server_info(api_endpoint=api_endpoint) _handle_server_info(server_info) channel_creds = grpc.ssl_channel_credentials() credentials = auth.CredentialsStore().read_credentials() if credentials: channel_creds = grpc.composite_channel_credentials( channel_creds, auth.id_token_call_credentials(credentials)) channel = grpc.secure_channel(server_info.api_server.endpoint, channel_creds) return export_service_pb2_grpc.TensorBoardExporterServiceStub(channel)
def execute(self, channel): api_client = export_service_pb2_grpc.TensorBoardExporterServiceStub(channel) outdir = self.output_dir try: exporter = exporter_lib.TensorBoardExporter(api_client, outdir) except exporter_lib.OutputDirectoryExistsError: msg = 'Output directory already exists: %r' % outdir raise base_plugin.FlagsError(msg) num_experiments = 0 for experiment_id in exporter.export(): num_experiments += 1 print('Downloaded experiment %s' % experiment_id) print('Done. Downloaded %d experiments to: %s' % (num_experiments, outdir))
def execute(self, server_info, channel): api_client = export_service_pb2_grpc.TensorBoardExporterServiceStub( channel) gen = exporter_lib.list_experiments(api_client) count = 0 for experiment_id in gen: count += 1 url = server_info_lib.experiment_url(server_info, experiment_id) print(url) sys.stdout.flush() if not count: sys.stderr.write( 'No experiments. Use `tensorboard dev upload` to get started.\n' ) else: sys.stderr.write('Total: %d experiment(s)\n' % count) sys.stderr.flush()
def execute(self, channel): api_client = export_service_pb2_grpc.TensorBoardExporterServiceStub( channel) gen = exporter_lib.list_experiments(api_client) count = 0 for experiment_id in gen: count += 1 # TODO(@wchargin): Once #2879 is in, remove this hard-coded URL pattern. url = 'https://tensorboard.dev/experiment/%s/' % experiment_id print(url) sys.stdout.flush() if not count: sys.stderr.write( 'No experiments. Use `tensorboard dev upload` to get started.\n' ) else: sys.stderr.write('Total: %d experiment(s)\n' % count) sys.stderr.flush()
def execute(self, server_info, channel): api_client = export_service_pb2_grpc.TensorBoardExporterServiceStub( channel ) fieldmask = experiment_pb2.ExperimentMask( create_time=True, update_time=True, num_scalars=True, num_runs=True, num_tags=True, ) gen = exporter_lib.list_experiments(api_client, fieldmask=fieldmask) count = 0 for experiment in gen: count += 1 if not isinstance(experiment, experiment_pb2.Experiment): url = server_info_lib.experiment_url(server_info, experiment) print(url) continue experiment_id = experiment.experiment_id url = server_info_lib.experiment_url(server_info, experiment_id) print(url) data = [ ("Name", experiment.name or "[No Name]"), ("Description", experiment.description or "[No Description]"), ("Id", experiment.experiment_id), ("Created", util.format_time(experiment.create_time)), ("Updated", util.format_time(experiment.update_time)), ("Scalars", str(experiment.num_scalars)), ("Runs", str(experiment.num_runs)), ("Tags", str(experiment.num_tags)), ] for (name, value) in data: print("\t%s %s" % (name.ljust(12), value)) sys.stdout.flush() if not count: sys.stderr.write( "No experiments. Use `tensorboard dev upload` to get started.\n" ) else: sys.stderr.write("Total: %d experiment(s)\n" % count) sys.stderr.flush()
def execute(self, server_info, channel): api_client = export_service_pb2_grpc.TensorBoardExporterServiceStub( channel) outdir = self.output_dir try: exporter = exporter_lib.TensorBoardExporter(api_client, outdir) except exporter_lib.OutputDirectoryExistsError: msg = "Output directory already exists: %r" % outdir raise base_plugin.FlagsError(msg) num_experiments = 0 try: for experiment_id in exporter.export(): num_experiments += 1 print("Downloaded experiment %s" % experiment_id) except exporter_lib.GrpcTimeoutException as e: print( "\nUploader has failed because of a timeout error. Please reach " "out via e-mail to [email protected] to get help " "completing your export of experiment %s." % e.experiment_id) print("Done. Downloaded %d experiments to: %s" % (num_experiments, outdir))
def execute(self, server_info, channel): api_client = export_service_pb2_grpc.TensorBoardExporterServiceStub( channel) fieldmask = export_service_pb2.ExperimentMask( create_time=True, update_time=True, num_scalars=True, num_runs=True, num_tags=True, ) gen = exporter_lib.list_experiments(api_client, fieldmask=fieldmask) count = 0 for experiment in gen: count += 1 if not isinstance(experiment, export_service_pb2.Experiment): url = server_info_lib.experiment_url(server_info, experiment) print(url) continue experiment_id = experiment.experiment_id url = server_info_lib.experiment_url(server_info, experiment_id) print(url) data = [ ('Id', experiment.experiment_id), ('Created', util.format_time(experiment.create_time)), ('Updated', util.format_time(experiment.update_time)), ('Scalars', str(experiment.num_scalars)), ('Runs', str(experiment.num_runs)), ('Tags', str(experiment.num_tags)), ] for (name, value) in data: print('\t%s %s' % (name.ljust(10), value)) sys.stdout.flush() if not count: sys.stderr.write( 'No experiments. Use `tensorboard dev upload` to get started.\n' ) else: sys.stderr.write('Total: %d experiment(s)\n' % count) sys.stderr.flush()