Example #1
0
def _get_intent(flags):
    """Determines what the program should do (upload, delete, ...).

    Args:
      flags: An `argparse.Namespace` with the parsed flags.

    Returns:
      An `_Intent` instance.

    Raises:
      base_plugin.FlagsError: If the command-line `flags` do not correctly
        specify an intent.
    """
    cmd = getattr(flags, _SUBCOMMAND_FLAG, None)
    if cmd is None:
        raise base_plugin.FlagsError("Must specify subcommand (try --help).")
    if cmd == _SUBCOMMAND_KEY_UPLOAD:
        if flags.logdir:
            return _UploadIntent(os.path.expanduser(flags.logdir))
        else:
            raise base_plugin.FlagsError(
                "Must specify directory to upload via `--logdir`."
            )
    elif cmd == _SUBCOMMAND_KEY_DELETE:
        if flags.experiment_id:
            return _DeleteExperimentIntent(flags.experiment_id)
        else:
            raise base_plugin.FlagsError(
                "Must specify experiment to delete via `--experiment_id`."
            )
    elif cmd == _SUBCOMMAND_KEY_LIST:
        return _ListIntent()
    elif cmd == _SUBCOMMAND_KEY_EXPORT:
        if flags.outdir:
            return _ExportIntent(flags.outdir)
        else:
            raise base_plugin.FlagsError(
                "Must specify output directory via `--outdir`."
            )
    elif cmd == _SUBCOMMAND_KEY_AUTH:
        auth_cmd = getattr(flags, _AUTH_SUBCOMMAND_FLAG, None)
        if auth_cmd is None:
            raise base_plugin.FlagsError("Must specify a subcommand to `auth`.")
        if auth_cmd == _AUTH_SUBCOMMAND_KEY_REVOKE:
            return _AuthRevokeIntent()
        else:
            raise AssertionError("Unknown auth subcommand %r" % (auth_cmd,))
    else:
        raise AssertionError("Unknown subcommand %r" % (cmd,))
Example #2
0
def _run(flags):
    """Runs the main uploader program given parsed flags.

    Args:
      flags: An `argparse.Namespace`.
    """

    logging.set_stderrthreshold(logging.WARNING)
    intent = _get_intent(flags)

    store = auth.CredentialsStore()
    if isinstance(intent, _AuthRevokeIntent):
        store.clear()
        sys.stderr.write("Logged out of uploader.\n")
        sys.stderr.flush()
        return
    # TODO(b/141723268): maybe reconfirm Google Account prior to reuse.
    credentials = store.read_credentials()
    if not credentials:
        _prompt_for_user_ack(intent)
        client_config = json.loads(auth.OAUTH_CLIENT_CONFIG)
        flow = auth.build_installed_app_flow(client_config)
        credentials = flow.run(force_console=flags.auth_force_console)
        sys.stderr.write("\n")  # Extra newline after auth flow messages.
        store.write_credentials(credentials)

    channel_options = None
    if flags.grpc_creds_type == "local":
        channel_creds = grpc.local_channel_credentials()
    elif flags.grpc_creds_type == "ssl":
        channel_creds = grpc.ssl_channel_credentials()
    elif flags.grpc_creds_type == "ssl_dev":
        channel_creds = grpc.ssl_channel_credentials(dev_creds.DEV_SSL_CERT)
        channel_options = [("grpc.ssl_target_name_override", "localhost")]
    else:
        msg = "Invalid --grpc_creds_type %s" % flags.grpc_creds_type
        raise base_plugin.FlagsError(msg)

    try:
        server_info = _get_server_info(flags)
    except server_info_lib.CommunicationError as e:
        _die(str(e))
    _handle_server_info(server_info)

    if not server_info.api_server.endpoint:
        logging.error("Server info response: %s", server_info)
        _die("Internal error: frontend did not specify an API server")
    composite_channel_creds = grpc.composite_channel_credentials(
        channel_creds, auth.id_token_call_credentials(credentials)
    )

    # TODO(@nfelt): In the `_UploadIntent` case, consider waiting until
    # logdir exists to open channel.
    channel = grpc.secure_channel(
        server_info.api_server.endpoint,
        composite_channel_creds,
        options=channel_options,
    )
    with channel:
        intent.execute(server_info, channel)
Example #3
0
 def execute(self, server_info, channel):
     api_client = write_service_pb2_grpc.TensorBoardWriterServiceStub(
         channel)
     experiment_id = self.experiment_id
     _die_if_bad_experiment_name(self.name)
     _die_if_bad_experiment_description(self.description)
     if not experiment_id:
         raise base_plugin.FlagsError(
             "Must specify a non-empty experiment ID to modify.")
     try:
         uploader_lib.update_experiment_metadata(
             api_client,
             experiment_id,
             name=self.name,
             description=self.description,
         )
     except uploader_lib.ExperimentNotFoundError:
         _die("No such experiment %s. Either it never existed or it has "
              "already been deleted." % experiment_id)
     except uploader_lib.PermissionDeniedError:
         _die("Cannot modify experiment %s because it is owned by a "
              "different user." % experiment_id)
     except uploader_lib.InvalidArgumentError as e:
         _die("Server cannot modify experiment as requested: %s" % e)
     except grpc.RpcError as e:
         _die("Internal error modifying experiment: %s" % e)
     logging.info("Modified experiment %s.", experiment_id)
     if self.name is not None:
         logging.info("Set name to %r", self.name)
     if self.description is not None:
         logging.info("Set description to %r", repr(self.description))
Example #4
0
    def fix_flags(self, flags):
        """Fixes Debugger related flags.

    Raises:
      ValueError: If both the `debugger_data_server_grpc_port` and
        `debugger_port` flags are specified as >= 0.
    """
        # Check that not both grpc port flags are specified.
        if flags.debugger_data_server_grpc_port > 0 and flags.debugger_port > 0:
            raise base_plugin.FlagsError(
                '--debugger_data_server_grpc_port and --debugger_port are mutually '
                'exclusive. Do not use both of them at the same time.')
 def execute(self, server_info, channel):
     api_client = write_service_pb2_grpc.TensorBoardWriterServiceStub(
         channel)
     if not self.experiment_id_list:
         raise base_plugin.FlagsError(
             "Must specify at least one experiment ID to delete.")
     # Map from eid to (msg, action) pair.
     results = {}
     NO_ACTION = "NO_ACTION"
     DIE_ACTION = "DIE_ACTION"
     for experiment_id in set(self.experiment_id_list):
         if not experiment_id:
             results[experiment_id] = (
                 "Skipping empty experiment_id.",
                 NO_ACTION,
             )
             continue
         try:
             uploader_lib.delete_experiment(api_client, experiment_id)
             results[experiment_id] = (
                 "Deleted experiment %s." % experiment_id,
                 NO_ACTION,
             )
         except uploader_lib.ExperimentNotFoundError:
             results[experiment_id] = (
                 "No such experiment %s. Either it never existed or it has "
                 "already been deleted." % experiment_id,
                 DIE_ACTION,
             )
         except uploader_lib.PermissionDeniedError:
             results[experiment_id] = (
                 "Cannot delete experiment %s because it is owned by a "
                 "different user." % experiment_id,
                 DIE_ACTION,
             )
         except grpc.RpcError as e:
             results[experiment_id] = (
                 ("Internal error deleting experiment %s: %s." %
                  (experiment_id, e)),
                 DIE_ACTION,
             )
     # business logic on the receipt
     any_die_action = False
     err_msg = ""
     for (msg, action) in results.values():
         if action == NO_ACTION:
             print(msg)
         if action == DIE_ACTION:
             err_msg += msg + "\n"
             any_die_action = True
     if any_die_action:
         _die(err_msg)
Example #6
0
 def execute(self, channel):
   api_client = export_service_pb2_grpc.TensorBoardExporterServiceStub(channel)
   outdir = self.output_dir
   try:
     exporter = exporter_lib.TensorBoardExporter(api_client, outdir)
   except exporter_lib.OutputDirectoryExistsError:
     msg = 'Output directory already exists: %r' % outdir
     raise base_plugin.FlagsError(msg)
   num_experiments = 0
   for experiment_id in exporter.export():
     num_experiments += 1
     print('Downloaded experiment %s' % experiment_id)
   print('Done. Downloaded %d experiments to: %s' % (num_experiments, outdir))
Example #7
0
def _run(flags):
    """Runs the main uploader program given parsed flags.

  Args:
    flags: An `argparse.Namespace`.
  """

    logging.set_stderrthreshold(logging.WARNING)
    intent = _get_intent(flags)

    store = auth.CredentialsStore()
    if isinstance(intent, _AuthRevokeIntent):
        store.clear()
        sys.stderr.write('Logged out of uploader.\n')
        sys.stderr.flush()
        return
    # TODO(b/141723268): maybe reconfirm Google Account prior to reuse.
    credentials = store.read_credentials()
    if not credentials:
        _prompt_for_user_ack(intent)
        client_config = json.loads(auth.OAUTH_CLIENT_CONFIG)
        flow = auth.build_installed_app_flow(client_config)
        credentials = flow.run(force_console=flags.auth_force_console)
        sys.stderr.write('\n')  # Extra newline after auth flow messages.
        store.write_credentials(credentials)

    channel_options = None
    if flags.grpc_creds_type == 'local':
        channel_creds = grpc.local_channel_credentials()
    elif flags.grpc_creds_type == 'ssl':
        channel_creds = grpc.ssl_channel_credentials()
    elif flags.grpc_creds_type == 'ssl_dev':
        channel_creds = grpc.ssl_channel_credentials(dev_creds.DEV_SSL_CERT)
        channel_options = [('grpc.ssl_target_name_override', 'localhost')]
    else:
        msg = 'Invalid --grpc_creds_type %s' % flags.grpc_creds_type
        raise base_plugin.FlagsError(msg)

    composite_channel_creds = grpc.composite_channel_credentials(
        channel_creds, auth.id_token_call_credentials(credentials))

    # TODO(@nfelt): In the `_UploadIntent` case, consider waiting until
    # logdir exists to open channel.
    channel = grpc.secure_channel(flags.endpoint,
                                  composite_channel_creds,
                                  options=channel_options)
    with channel:
        intent.execute(channel)
Example #8
0
 def execute(self, server_info, channel):
     api_client = write_service_pb2_grpc.TensorBoardWriterServiceStub(
         channel)
     experiment_id = self.experiment_id
     if not experiment_id:
         raise base_plugin.FlagsError(
             "Must specify a non-empty experiment ID to delete.")
     try:
         uploader_lib.delete_experiment(api_client, experiment_id)
     except uploader_lib.ExperimentNotFoundError:
         _die("No such experiment %s. Either it never existed or it has "
              "already been deleted." % experiment_id)
     except uploader_lib.PermissionDeniedError:
         _die("Cannot delete experiment %s because it is owned by a "
              "different user." % experiment_id)
     except grpc.RpcError as e:
         _die("Internal error deleting experiment: %s" % e)
     print("Deleted experiment %s." % experiment_id)
Example #9
0
 def execute(self, server_info, channel):
     api_client = export_service_pb2_grpc.TensorBoardExporterServiceStub(
         channel)
     outdir = self.output_dir
     try:
         exporter = exporter_lib.TensorBoardExporter(api_client, outdir)
     except exporter_lib.OutputDirectoryExistsError:
         msg = "Output directory already exists: %r" % outdir
         raise base_plugin.FlagsError(msg)
     num_experiments = 0
     try:
         for experiment_id in exporter.export():
             num_experiments += 1
             print("Downloaded experiment %s" % experiment_id)
     except exporter_lib.GrpcTimeoutException as e:
         print(
             "\nUploader has failed because of a timeout error.  Please reach "
             "out via e-mail to [email protected] to get help "
             "completing your export of experiment %s." % e.experiment_id)
     print("Done. Downloaded %d experiments to: %s" %
           (num_experiments, outdir))
Example #10
0
def _get_intent(flags, experiment_url_callback=None):
    """Determines what the program should do (upload, delete, ...).

    Args:
      flags: An `argparse.Namespace` with the parsed flags.
      experiment_url_callback: A function accepting a single string argument
        containing the full TB.dev URL of the uploaded experiment.

    Returns:
      An `_Intent` instance.

    Raises:
      base_plugin.FlagsError: If the command-line `flags` do not correctly
        specify an intent.
    """
    cmd = getattr(flags, flags_parser.SUBCOMMAND_FLAG, None)
    if cmd is None:
        raise base_plugin.FlagsError("Must specify subcommand (try --help).")
    if cmd == flags_parser.SUBCOMMAND_KEY_UPLOAD:
        if flags.logdir:
            return UploadIntent(
                os.path.expanduser(flags.logdir),
                name=flags.name,
                description=flags.description,
                verbosity=flags.verbose,
                dry_run=flags.dry_run,
                one_shot=flags.one_shot,
                experiment_url_callback=experiment_url_callback,
            )
        else:
            raise base_plugin.FlagsError(
                "Must specify directory to upload via `--logdir`.")
    if cmd == flags_parser.SUBCOMMAND_KEY_UPDATE_METADATA:
        if flags.experiment_id:
            if flags.name is not None or flags.description is not None:
                return _UpdateMetadataIntent(
                    flags.experiment_id,
                    name=flags.name,
                    description=flags.description,
                )
            else:
                raise base_plugin.FlagsError(
                    "Must specify either `--name` or `--description`.")
        else:
            raise base_plugin.FlagsError(
                "Must specify experiment to modify via `--experiment_id`.")
    elif cmd == flags_parser.SUBCOMMAND_KEY_DELETE:
        if flags.experiment_id:
            return _DeleteExperimentIntent(flags.experiment_id)
        else:
            raise base_plugin.FlagsError(
                "Must specify experiment to delete via `--experiment_id`.")
    elif cmd == flags_parser.SUBCOMMAND_KEY_LIST:
        return _ListIntent(json=flags.json)
    elif cmd == flags_parser.SUBCOMMAND_KEY_EXPORT:
        if flags.outdir:
            return _ExportIntent(flags.outdir)
        else:
            raise base_plugin.FlagsError(
                "Must specify output directory via `--outdir`.")
    elif cmd == flags_parser.SUBCOMMAND_KEY_AUTH:
        auth_cmd = getattr(flags, flags_parser.AUTH_SUBCOMMAND_FLAG, None)
        if auth_cmd is None:
            raise base_plugin.FlagsError(
                "Must specify a subcommand to `auth`.")
        if auth_cmd == flags_parser.AUTH_SUBCOMMAND_KEY_REVOKE:
            return _AuthRevokeIntent()
        else:
            raise AssertionError("Unknown auth subcommand %r" % (auth_cmd, ))
    else:
        raise AssertionError("Unknown subcommand %r" % (cmd, ))
Example #11
0
def standard_tensorboard_wsgi(flags, plugin_loaders, assets_zip_provider):
    """Construct a TensorBoardWSGIApp with standard plugins and multiplexer.

  Args:
    flags: An argparse.Namespace containing TensorBoard CLI flags.
    plugin_loaders: A list of TBLoader instances.
    assets_zip_provider: See TBContext documentation for more information.

  Returns:
    The new TensorBoard WSGI application.

  :type plugin_loaders: list[base_plugin.TBLoader]
  :rtype: TensorBoardWSGI
  """
    event_file_active_filter = _get_event_file_active_filter(flags)
    multiplexer = event_multiplexer.EventMultiplexer(
        size_guidance=DEFAULT_SIZE_GUIDANCE,
        tensor_size_guidance=tensor_size_guidance_from_flags(flags),
        purge_orphaned_data=flags.purge_orphaned_data,
        max_reload_threads=flags.max_reload_threads,
        event_file_active_filter=event_file_active_filter)
    if flags.generic_data == 'false':
        data_provider = None
    else:
        data_provider = event_data_provider.MultiplexerDataProvider(
            multiplexer)
    loading_multiplexer = multiplexer
    reload_interval = flags.reload_interval
    # For db import op mode, prefer reloading in a child process. See
    # https://github.com/tensorflow/tensorboard/issues/1467
    reload_task = flags.reload_task
    if reload_task == 'auto' and flags.db_import and flags.db_import_use_op:
        reload_task == 'process'
    db_uri = flags.db
    # For DB import mode, create a DB file if we weren't given one.
    if flags.db_import and not flags.db:
        tmpdir = tempfile.mkdtemp(prefix='tbimport')
        atexit.register(shutil.rmtree, tmpdir)
        db_uri = 'sqlite:%s/tmp.sqlite' % tmpdir
    db_module, db_connection_provider = get_database_info(db_uri)
    if flags.db_import:
        # DB import mode.
        if db_module != sqlite3:
            raise base_plugin.FlagsError(
                '--db_import is only compatible with sqlite DBs')
        logger.info('Importing logdir into DB at %s', db_uri)
        loading_multiplexer = db_import_multiplexer.DbImportMultiplexer(
            db_connection_provider=db_connection_provider,
            purge_orphaned_data=flags.purge_orphaned_data,
            max_reload_threads=flags.max_reload_threads,
            use_import_op=flags.db_import_use_op)
    elif flags.db:
        # DB read-only mode, never load event logs.
        reload_interval = -1
    plugin_name_to_instance = {}
    context = base_plugin.TBContext(
        data_provider=data_provider,
        db_module=db_module,
        db_connection_provider=db_connection_provider,
        db_uri=db_uri,
        flags=flags,
        logdir=flags.logdir,
        multiplexer=multiplexer,
        assets_zip_provider=assets_zip_provider,
        plugin_name_to_instance=plugin_name_to_instance,
        window_title=flags.window_title)
    plugins = []
    for loader in plugin_loaders:
        plugin = loader.load(context)
        if plugin is None:
            continue
        plugins.append(plugin)
        plugin_name_to_instance[plugin.plugin_name] = plugin
    return TensorBoardWSGIApp(flags.logdir, plugins, loading_multiplexer,
                              reload_interval, flags.path_prefix, reload_task)