class FakeCommand(Command):
    """Fake command class for overriding command instance state."""
    command_spec = Command.CreateCommandSpec(
        'fake',
        command_name_aliases=[],
    )
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='fake',
        help_name_aliases=[],
        help_type='command_help',
        help_one_line_summary='Something to take up space.',
        help_text='Something else to take up space.',
        subcommand_help_text={},
    )

    def __init__(self, do_parallel):
        self.bucket_storage_uri_class = BucketStorageUri
        support_map = {'gs': ['JSON'], 's3': ['XML']}
        default_map = {'gs': 'JSON', 's3': 'XML'}
        self.gsutil_api_map = cs_api_map.GsutilApiMapFactory.GetApiMap(
            cs_api_map.GsutilApiClassMapFactory, support_map, default_map)
        self.logger = CreateGsutilLogger('FakeCommand')
        self.parallel_operations = do_parallel
        self.failure_count = 0
        self.gsutil_api = MockCloudApi()
        self.multiprocessing_is_available = (
            CheckMultiprocessingAvailableAndInit().is_available)
        self.debug = 0
        self.user_project = None
Esempio n. 2
0
class FakeCommandWithCompleters(Command):
  """Command with various completer types."""

  command_spec = Command.CreateCommandSpec(
      'fake2',
      argparse_arguments=[
          CommandArgument.MakeZeroOrMoreCloudURLsArgument(),
          CommandArgument.MakeZeroOrMoreFileURLsArgument(),
          CommandArgument.MakeZeroOrMoreCloudOrFileURLsArgument(),
          CommandArgument.MakeFreeTextArgument(),
          CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument(),
          CommandArgument.MakeFileURLOrCannedACLArgument(),
      ]
  )

  help_spec = Command.HelpSpec(
      help_name='fake2',
      help_name_aliases=[],
      help_type='command_help',
      help_one_line_summary='fake command for tests',
      help_text='fake command for tests',
      subcommand_help_text={}
  )

  def __init__(self):
    pass
Esempio n. 3
0
class CatCommand(Command):
    """Implementation of gsutil cat command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'cat',
        command_name_aliases=[],
        usage_synopsis=_SYNOPSIS,
        min_args=1,
        max_args=NO_MAX,
        supported_sub_args='hr:',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=0,
        gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments=[CommandArgument.MakeZeroOrMoreCloudURLsArgument()])
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='cat',
        help_name_aliases=[],
        help_type='command_help',
        help_one_line_summary='Concatenate object content to stdout',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={},
    )

    # Command entry point.
    def RunCommand(self):
        """Command entry point for the cat command."""
        show_header = False
        request_range = None
        start_byte = 0
        end_byte = None
        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-h':
                    show_header = True
                elif o == '-r':
                    request_range = a.strip()
                    range_matcher = re.compile(
                        '^(?P<start>[0-9]+)-(?P<end>[0-9]*)$|^(?P<endslice>-[0-9]+)$'
                    )
                    range_match = range_matcher.match(request_range)
                    if not range_match:
                        raise CommandException('Invalid range (%s)' %
                                               request_range)
                    if range_match.group('start'):
                        start_byte = long(range_match.group('start'))
                    if range_match.group('end'):
                        end_byte = long(range_match.group('end'))
                    if range_match.group('endslice'):
                        start_byte = long(range_match.group('endslice'))
                else:
                    self.RaiseInvalidArgumentException()

        return CatHelper(self).CatUrlStrings(self.args,
                                             show_header=show_header,
                                             start_byte=start_byte,
                                             end_byte=end_byte)
Esempio n. 4
0
class MvCommand(Command):
    """Implementation of gsutil mv command.

     Note that there is no atomic rename operation - this command is simply
     a shorthand for 'cp' followed by 'rm'.
  """

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'mv',
        command_name_aliases=['move', 'ren', 'rename'],
        min_args=2,
        max_args=NO_MAX,
        # Flags for mv are passed through to cp.
        supported_sub_args=CP_SUB_ARGS,
        file_url_ok=True,
        provider_url_ok=False,
        urls_start_arg=0,
        gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
    )
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='mv',
        help_name_aliases=['move', 'rename'],
        help_type='command_help',
        help_one_line_summary='Move/rename objects and/or subdirectories',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={},
    )

    def RunCommand(self):
        """Command entry point for the mv command."""
        # Check each source arg up, refusing to delete a bucket src URL (force users
        # to explicitly do that as a separate operation).
        for arg_to_check in self.args[0:-1]:
            url = StorageUrlFromString(arg_to_check)
            if url.IsCloudUrl() and (url.IsBucket() or url.IsProvider()):
                raise CommandException(
                    'You cannot move a source bucket using the mv '
                    'command. If you meant to move\nall objects in '
                    'the bucket, you can use a command like:\n'
                    '\tgsutil mv %s/* %s' % (arg_to_check, self.args[-1]))

        # Insert command-line opts in front of args so they'll be picked up by cp
        # and rm commands (e.g., for -p option). Use undocumented (internal
        # use-only) cp -M option, which causes each original object to be deleted
        # after successfully copying to its destination, and also causes naming
        # behavior consistent with Unix mv naming behavior (see comments in
        # ConstructDstUrl).
        unparsed_args = ['-M']
        if self.recursion_requested:
            unparsed_args.append('-R')
        unparsed_args.extend(self.unparsed_args)
        self.command_runner.RunNamedCommand('cp', unparsed_args, self.headers,
                                            self.debug,
                                            self.parallel_operations)

        return 0
Esempio n. 5
0
class AclCommand(Command):
    """Implementation of gsutil acl command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'acl',
        command_name_aliases=['getacl', 'setacl', 'chacl'],
        min_args=2,
        max_args=NO_MAX,
        supported_sub_args='afRrg:u:d:',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=1,
        gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
    )
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='acl',
        help_name_aliases=['getacl', 'setacl', 'chmod', 'chacl'],
        help_type='command_help',
        help_one_line_summary='Get, set, or change bucket and/or object ACLs',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={
            'get': _get_help_text,
            'set': _set_help_text,
            'ch': _ch_help_text
        },
    )

    def _CalculateUrlsStartArg(self):
        if not self.args:
            self._RaiseWrongNumberOfArgumentsException()
        if (self.args[0].lower() == 'set') or (self.command_alias_used
                                               == 'setacl'):
            return 1
        else:
            return 0

    def _SetAcl(self):
        """Parses options and sets ACLs on the specified buckets/objects."""
        self.continue_on_error = False
        if self.sub_opts:
            for o, unused_a in self.sub_opts:
                if o == '-a':
                    self.all_versions = True
                elif o == '-f':
                    self.continue_on_error = True
                elif o == '-r' or o == '-R':
                    self.recursion_requested = True
        try:
            self.SetAclCommandHelper(SetAclFuncWrapper, SetAclExceptionHandler)
        except AccessDeniedException, unused_e:
            self._WarnServiceAccounts()
            raise
        if not self.everything_set_okay:
            raise CommandException('ACLs for some objects could not be set.')
Esempio n. 6
0
class ConfigCommand(Command):
    """Implementation of gsutil config command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'config',
        command_name_aliases=['cfg', 'conf', 'configure'],
        usage_synopsis=_SYNOPSIS,
        min_args=0,
        max_args=0,
        supported_sub_args='habefwrs:o:',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=0,
    )
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='config',
        help_name_aliases=['cfg', 'conf', 'configure', 'proxy', 'aws', 's3'],
        help_type='command_help',
        help_one_line_summary=(
            'Obtain credentials and create configuration file'),
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={},
    )

    def _OpenConfigFile(self, file_path):
        """Creates and opens a configuration file for writing.

    The file is created with mode 0600, and attempts to open existing files will
    fail (the latter is important to prevent symlink attacks).

    It is the caller's responsibility to close the file.

    Args:
      file_path: Path of the file to be created.

    Returns:
      A writable file object for the opened file.

    Raises:
      CommandException: if an error occurred when opening the file (including
          when the file already exists).
    """
        flags = os.O_RDWR | os.O_CREAT | os.O_EXCL
        # Accommodate Windows; copied from python2.6/tempfile.py.
        if hasattr(os, 'O_NOINHERIT'):
            flags |= os.O_NOINHERIT
        try:
            fd = os.open(file_path, flags, 0600)
        except (OSError, IOError), e:
            raise CommandException('Failed to open %s for writing: %s' %
                                   (file_path, e))
        return os.fdopen(fd, 'w')
Esempio n. 7
0
class FakeCommandWithInvalidCompleter(Command):
  """Command with an invalid completer on an argument."""

  command_spec = Command.CreateCommandSpec(
      'fake1', argparse_arguments=[CommandArgument('arg', completer='BAD')])

  help_spec = Command.HelpSpec(help_name='fake1',
                               help_name_aliases=[],
                               help_type='command_help',
                               help_one_line_summary='fake command for tests',
                               help_text='fake command for tests',
                               subcommand_help_text={})

  def __init__(self):
    pass
Esempio n. 8
0
 def test_help_runs_for_all_commands(self):
     # This test is particularly helpful because the `help` command can fail
     # under unusual circumstances (e.g. someone adds a new command and they make
     # the "one-line" summary longer than the defined character limit).
     for command in Command.__subclasses__():
         # Raises exception if the exit code is non-zero.
         self.RunGsUtil(['help', command.command_spec.command_name])
Esempio n. 9
0
 def _LoadHelpMaps(self):
     """Returns tuple (help type -> [HelpProviders],
                   help name->HelpProvider dict,
                  )."""
     # Walk gslib/commands and gslib/addlhelp to find all HelpProviders.
     for f in os.listdir(
             os.path.join(self.gsutil_bin_dir, 'gslib', 'commands')):
         # Handles no-extension files, etc.
         (module_name, ext) = os.path.splitext(f)
         if ext == '.py':
             __import__('gslib.commands.%s' % module_name)
     for f in os.listdir(
             os.path.join(self.gsutil_bin_dir, 'gslib', 'addlhelp')):
         (module_name, ext) = os.path.splitext(f)
         if ext == '.py':
             __import__('gslib.addlhelp.%s' % module_name)
     help_type_map = {}
     help_name_map = {}
     for s in gslib.help_provider.ALL_HELP_TYPES:
         help_type_map[s] = []
     # Only include HelpProvider subclasses in the dict.
     for help_prov in itertools.chain(HelpProvider.__subclasses__(),
                                      Command.__subclasses__()):
         if help_prov is Command:
             # Skip the Command base class itself; we just want its subclasses,
             # where the help command text lives (in addition to non-Command
             # HelpProviders, like naming.py).
             continue
         gslib.help_provider.SanityCheck(help_prov, help_name_map)
         help_name_map[help_prov.help_spec[HELP_NAME]] = help_prov
         for help_name_aliases in help_prov.help_spec[HELP_NAME_ALIASES]:
             help_name_map[help_name_aliases] = help_prov
         help_type_map[help_prov.help_spec[HELP_TYPE]].append(help_prov)
     return (help_type_map, help_name_map)
Esempio n. 10
0
 def _LoadHelpMaps(self):
   """Returns tuple (help type -> [HelpProviders],
                     help name->HelpProvider dict,
                    )."""
   # Walk gslib/commands and gslib/addlhelp to find all HelpProviders.
   for f in os.listdir(os.path.join(gslib.GSLIB_DIR, 'commands')):
     # Handles no-extension files, etc.
     (module_name, ext) = os.path.splitext(f)
     if ext == '.py':
       __import__('gslib.commands.%s' % module_name)
   for f in os.listdir(os.path.join(gslib.GSLIB_DIR, 'addlhelp')):
     (module_name, ext) = os.path.splitext(f)
     if ext == '.py':
       __import__('gslib.addlhelp.%s' % module_name)
   help_type_map = {}
   help_name_map = {}
   for s in gslib.help_provider.ALL_HELP_TYPES:
     help_type_map[s] = []
   # Only include HelpProvider subclasses in the dict.
   for help_prov in itertools.chain(
       HelpProvider.__subclasses__(), Command.__subclasses__()):
     if help_prov is Command:
       # Skip the Command base class itself; we just want its subclasses,
       # where the help command text lives (in addition to non-Command
       # HelpProviders, like naming.py).
       continue
     gslib.help_provider.SanityCheck(help_prov, help_name_map)
     help_name_map[help_prov.help_spec[HELP_NAME]] = help_prov
     for help_name_aliases in help_prov.help_spec[HELP_NAME_ALIASES]:
       help_name_map[help_name_aliases] = help_prov
     help_type_map[help_prov.help_spec[HELP_TYPE]].append(help_prov)
   return (help_type_map, help_name_map)
Esempio n. 11
0
  def _LoadHelpMaps(self):
    """Returns tuple (help type -> [HelpProviders],
                      help name->HelpProvider dict,
                     )."""

    # Import all gslib.commands submodules.
    for _, module_name, _ in pkgutil.iter_modules(gslib.commands.__path__):
      __import__('gslib.commands.%s' % module_name)
    # Import all gslib.addlhelp submodules.
    for _, module_name, _ in pkgutil.iter_modules(gslib.addlhelp.__path__):
      __import__('gslib.addlhelp.%s' % module_name)

    help_type_map = {}
    help_name_map = {}
    for s in gslib.help_provider.ALL_HELP_TYPES:
      help_type_map[s] = []
    # Only include HelpProvider subclasses in the dict.
    for help_prov in itertools.chain(
        HelpProvider.__subclasses__(), Command.__subclasses__()):
      if help_prov is Command:
        # Skip the Command base class itself; we just want its subclasses,
        # where the help command text lives (in addition to non-Command
        # HelpProviders, like naming.py).
        continue
      gslib.help_provider.SanityCheck(help_prov, help_name_map)
      help_name_map[help_prov.help_spec[HELP_NAME]] = help_prov
      for help_name_aliases in help_prov.help_spec[HELP_NAME_ALIASES]:
        help_name_map[help_name_aliases] = help_prov
      help_type_map[help_prov.help_spec[HELP_TYPE]].append(help_prov)
    return (help_type_map, help_name_map)
Esempio n. 12
0
    def _LoadCommandMap(self):
        """Returns dict mapping each command_name to implementing class."""
        # Import all gslib.commands submodules.
        for _, module_name, _ in pkgutil.iter_modules(gslib.commands.__path__):
            __import__('gslib.commands.%s' % module_name)

        command_map = {}
        # Only include Command subclasses in the dict.
        for command in Command.__subclasses__():
            command_map[command.command_spec.command_name] = command
            for command_name_aliases in command.command_spec.command_name_aliases:
                command_map[command_name_aliases] = command
        return command_map
Esempio n. 13
0
  def _LoadCommandMap(self):
    """Returns dict mapping each command_name to implementing class."""
    # Import all gslib.commands submodules.
    for _, module_name, _ in pkgutil.iter_modules(gslib.commands.__path__):
      __import__('gslib.commands.%s' % module_name)

    command_map = {}
    # Only include Command subclasses in the dict.
    for command in Command.__subclasses__():
      command_map[command.command_spec.command_name] = command
      for command_name_aliases in command.command_spec.command_name_aliases:
        command_map[command_name_aliases] = command
    return command_map
Esempio n. 14
0
class FakeCommandWithNestedArguments(object):
  subcommand_name = 'event'
  subcommand_subname = 'set'
  # Matches the number of CommandArguments in argparse_arguments below.
  num_args = 2

  command_spec = Command.CreateCommandSpec('FakeCommandWithNestedArguments',
                                           argparse_arguments={
                                               subcommand_name: {
                                                   subcommand_subname: [
                                                       CommandArgument('arg1'),
                                                       CommandArgument('arg2'),
                                                   ]
                                               }
                                           })
Esempio n. 15
0
 def _LoadCommandMap(self):
   """Returns dict mapping each command_name to implementing class."""
   # Walk gslib/commands and find all commands.
   commands_dir = os.path.join(self.gsutil_bin_dir, 'gslib', 'commands')
   for f in os.listdir(commands_dir):
     # Handles no-extension files, etc.
     (module_name, ext) = os.path.splitext(f)
     if ext == '.py':
       __import__('gslib.commands.%s' % module_name)
   command_map = {}
   # Only include Command subclasses in the dict.
   for command in Command.__subclasses__():
     command_map[command.command_spec[COMMAND_NAME]] = command
     for command_name_aliases in command.command_spec[COMMAND_NAME_ALIASES]:
       command_map[command_name_aliases] = command
   return command_map
Esempio n. 16
0
 def _LoadCommandMap(self):
     """Returns dict mapping each command_name to implementing class."""
     # Walk gslib/commands and find all commands.
     commands_dir = os.path.join(self.gsutil_bin_dir, 'gslib', 'commands')
     for f in os.listdir(commands_dir):
         # Handles no-extension files, etc.
         (module_name, ext) = os.path.splitext(f)
         if ext == '.py':
             __import__('gslib.commands.%s' % module_name)
     map = {}
     # Only include Command subclasses in the dict.
     for command in Command.__subclasses__():
         map[command.command_spec[COMMAND_NAME]] = command
         for command_name_aliases in command.command_spec[
                 COMMAND_NAME_ALIASES]:
             map[command_name_aliases] = command
     return map
Esempio n. 17
0
  def _LoadHelpMaps(self):
    """Returns tuple of help type and help name.

    help type is a dict with key: help type
                             value: list of HelpProviders
    help name is a dict with key: help command name or alias
                             value: HelpProvider

    Returns:
      (help type, help name)
    """

    # Import all gslib.commands submodules.
    for _, module_name, _ in pkgutil.iter_modules(gslib.commands.__path__):
      __import__('gslib.commands.%s' % module_name)
    # Import all gslib.addlhelp submodules.
    for _, module_name, _ in pkgutil.iter_modules(gslib.addlhelp.__path__):
      __import__('gslib.addlhelp.%s' % module_name)

    help_type_map = {}
    help_name_map = {}
    for s in gslib.help_provider.ALL_HELP_TYPES:
      help_type_map[s] = []
    # Only include HelpProvider subclasses in the dict.
    for help_prov in itertools.chain(HelpProvider.__subclasses__(),
                                     Command.__subclasses__()):
      if help_prov is Command:
        # Skip the Command base class itself; we just want its subclasses,
        # where the help command text lives (in addition to non-Command
        # HelpProviders, like naming.py).
        continue
      gslib.help_provider.SanityCheck(help_prov, help_name_map)
      help_name_map[help_prov.help_spec.help_name] = help_prov
      for help_name_aliases in help_prov.help_spec.help_name_aliases:
        help_name_map[help_name_aliases] = help_prov
      help_type_map[help_prov.help_spec.help_type].append(help_prov)
    return (help_type_map, help_name_map)
Esempio n. 18
0
  def _LoadHelpMaps(self):
    """Returns tuple of help type and help name.

    help type is a dict with key: help type
                             value: list of HelpProviders
    help name is a dict with key: help command name or alias
                             value: HelpProvider

    Returns:
      (help type, help name)
    """

    # Import all gslib.commands submodules.
    for _, module_name, _ in pkgutil.iter_modules(gslib.commands.__path__):
      __import__('gslib.commands.%s' % module_name)
    # Import all gslib.addlhelp submodules.
    for _, module_name, _ in pkgutil.iter_modules(gslib.addlhelp.__path__):
      __import__('gslib.addlhelp.%s' % module_name)

    help_type_map = {}
    help_name_map = {}
    for s in gslib.help_provider.ALL_HELP_TYPES:
      help_type_map[s] = []
    # Only include HelpProvider subclasses in the dict.
    for help_prov in itertools.chain(
        HelpProvider.__subclasses__(), Command.__subclasses__()):
      if help_prov is Command:
        # Skip the Command base class itself; we just want its subclasses,
        # where the help command text lives (in addition to non-Command
        # HelpProviders, like naming.py).
        continue
      gslib.help_provider.SanityCheck(help_prov, help_name_map)
      help_name_map[help_prov.help_spec.help_name] = help_prov
      for help_name_aliases in help_prov.help_spec.help_name_aliases:
        help_name_map[help_name_aliases] = help_prov
      help_type_map[help_prov.help_spec.help_type].append(help_prov)
    return (help_type_map, help_name_map)
Esempio n. 19
0
class StatCommand(Command):
  """Implementation of gsutil stat command."""

  # Command specification. See base class for documentation.
  command_spec = Command.CreateCommandSpec(
      'stat',
      command_name_aliases=[],
      usage_synopsis=_SYNOPSIS,
      min_args=1,
      max_args=NO_MAX,
      supported_sub_args='',
      file_url_ok=False,
      provider_url_ok=False,
      urls_start_arg=0,
      gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
      gs_default_api=ApiSelector.JSON,
      argparse_arguments=[
          CommandArgument.MakeZeroOrMoreCloudURLsArgument()
      ]
  )
  # Help specification. See help_provider.py for documentation.
  help_spec = Command.HelpSpec(
      help_name='stat',
      help_name_aliases=[],
      help_type='command_help',
      help_one_line_summary='Display object status',
      help_text=_DETAILED_HELP_TEXT,
      subcommand_help_text={},
  )

  def RunCommand(self):
    """Command entry point for stat command."""
    stat_fields = ENCRYPTED_FIELDS + UNENCRYPTED_FULL_LISTING_FIELDS
    found_nonmatching_arg = False
    for url_str in self.args:
      arg_matches = 0
      url = StorageUrlFromString(url_str)
      if not url.IsObject():
        raise CommandException('The stat command only works with object URLs')
      try:
        if ContainsWildcard(url_str):
          blr_iter = self.WildcardIterator(url_str).IterObjects(
              bucket_listing_fields=stat_fields)
        else:
          try:
            single_obj = self.gsutil_api.GetObjectMetadata(
                url.bucket_name, url.object_name, generation=url.generation,
                provider=url.scheme, fields=stat_fields)
          except EncryptionException:
            # Retry without requesting hashes.
            single_obj = self.gsutil_api.GetObjectMetadata(
                url.bucket_name, url.object_name, generation=url.generation,
                provider=url.scheme, fields=UNENCRYPTED_FULL_LISTING_FIELDS)
          blr_iter = [BucketListingObject(url, root_object=single_obj)]
        for blr in blr_iter:
          if blr.IsObject():
            arg_matches += 1
            # TODO: Request fewer fields if we're not printing the object.
            if logging.getLogger().isEnabledFor(logging.INFO):
              PrintFullInfoAboutObject(blr, incl_acl=False)
      except AccessDeniedException:
        if logging.getLogger().isEnabledFor(logging.INFO):
          sys.stderr.write('You aren\'t authorized to read %s - skipping' %
                           url_str)
      except InvalidUrlError:
        raise
      except NotFoundException:
        pass
      if not arg_matches:
        if logging.getLogger().isEnabledFor(logging.INFO):
          sys.stderr.write(NO_URLS_MATCHED_TARGET % url_str)
        found_nonmatching_arg = True
    if found_nonmatching_arg:
      return 1
    return 0
Esempio n. 20
0
class NotificationCommand(Command):
    """Implementation of gsutil notification command."""

    # Notification names might look like one of these:
    #  canonical form:  projects/_/buckets/bucket/notificationConfigs/3
    #  JSON API form:   b/bucket/notificationConfigs/5
    # Either of the above might start with a / if a user is copying & pasting.
    def _GetNotificationPathRegex(self):
        if not NotificationCommand._notification_path_regex:
            NotificationCommand._notification_path_regex = re.compile(
                ('/?(projects/[^/]+/)?b(uckets)?/(?P<bucket>[^/]+)/'
                 'notificationConfigs/(?P<notification>[0-9]+)'))
        return NotificationCommand._notification_path_regex

    _notification_path_regex = None

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'notification',
        command_name_aliases=[
            'notify',
            'notifyconfig',
            'notifications',
            'notif',
        ],
        usage_synopsis=_SYNOPSIS,
        min_args=2,
        max_args=NO_MAX,
        supported_sub_args='i:t:m:t:of:e:p:s',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=1,
        gs_api_support=[ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments={
            'watchbucket': [
                CommandArgument.MakeFreeTextArgument(),
                CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument(),
            ],
            'stopchannel': [],
            'list': [
                CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument(),
            ],
            'delete': [
                # Takes a list of one of the following:
                #   notification: projects/_/buckets/bla/notificationConfigs/5,
                #   bucket: gs://foobar
                CommandArgument.MakeZeroOrMoreCloudURLsArgument(),
            ],
            'create': [
                CommandArgument.MakeFreeTextArgument(),  # Cloud Pub/Sub topic
                CommandArgument.MakeNCloudBucketURLsArgument(1),
            ]
        },
    )
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='notification',
        help_name_aliases=[
            'watchbucket',
            'stopchannel',
            'notifyconfig',
        ],
        help_type='command_help',
        help_one_line_summary='Configure object change notification',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={
            'create': _create_help_text,
            'list': _list_help_text,
            'delete': _delete_help_text,
            'watchbucket': _watchbucket_help_text,
            'stopchannel': _stopchannel_help_text,
        },
    )

    def _WatchBucket(self):
        """Creates a watch on a bucket given in self.args."""
        self.CheckArguments()
        identifier = None
        client_token = None
        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-i':
                    identifier = a
                if o == '-t':
                    client_token = a

        identifier = identifier or str(uuid.uuid4())
        watch_url = self.args[0]
        bucket_arg = self.args[-1]

        if not watch_url.lower().startswith('https://'):
            raise CommandException(
                'The application URL must be an https:// URL.')

        bucket_url = StorageUrlFromString(bucket_arg)
        if not (bucket_url.IsBucket() and bucket_url.scheme == 'gs'):
            raise CommandException(
                'The %s command can only be used with gs:// bucket URLs.' %
                self.command_name)
        if not bucket_url.IsBucket():
            raise CommandException(
                'URL must name a bucket for the %s command.' %
                self.command_name)

        self.logger.info('Watching bucket %s with application URL %s ...',
                         bucket_url, watch_url)

        try:
            channel = self.gsutil_api.WatchBucket(bucket_url.bucket_name,
                                                  watch_url,
                                                  identifier,
                                                  token=client_token,
                                                  provider=bucket_url.scheme)
        except AccessDeniedException as e:
            self.logger.warn(
                NOTIFICATION_AUTHORIZATION_FAILED_MESSAGE.format(
                    watch_error=str(e), watch_url=watch_url))
            raise

        channel_id = channel.id
        resource_id = channel.resourceId
        client_token = channel.token
        self.logger.info('Successfully created watch notification channel.')
        self.logger.info('Watch channel identifier: %s', channel_id)
        self.logger.info('Canonicalized resource identifier: %s', resource_id)
        self.logger.info('Client state token: %s', client_token)

        return 0

    def _StopChannel(self):
        channel_id = self.args[0]
        resource_id = self.args[1]

        self.logger.info('Removing channel %s with resource identifier %s ...',
                         channel_id, resource_id)
        self.gsutil_api.StopChannel(channel_id, resource_id, provider='gs')
        self.logger.info('Succesfully removed channel.')

        return 0

    def _ListChannels(self, bucket_arg):
        """Lists active channel watches on a bucket given in self.args."""
        bucket_url = StorageUrlFromString(bucket_arg)
        if not (bucket_url.IsBucket() and bucket_url.scheme == 'gs'):
            raise CommandException(
                'The %s command can only be used with gs:// bucket URLs.' %
                self.command_name)
        if not bucket_url.IsBucket():
            raise CommandException(
                'URL must name a bucket for the %s command.' %
                self.command_name)
        channels = self.gsutil_api.ListChannels(bucket_url.bucket_name,
                                                provider='gs').items
        self.logger.info(
            'Bucket %s has the following active Object Change Notifications:',
            bucket_url.bucket_name)
        for idx, channel in enumerate(channels):
            self.logger.info('\tNotification channel %d:', idx + 1)
            self.logger.info('\t\tChannel identifier: %s', channel.channel_id)
            self.logger.info('\t\tResource identifier: %s',
                             channel.resource_id)
            self.logger.info('\t\tApplication URL: %s', channel.push_url)
            self.logger.info('\t\tCreated by: %s', channel.subscriber_email)
            self.logger.info(
                '\t\tCreation time: %s',
                str(datetime.fromtimestamp(channel.creation_time_ms / 1000)))

        return 0

    def _Create(self):
        self.CheckArguments()

        # User-specified options
        pubsub_topic = None
        payload_format = None
        custom_attributes = {}
        event_types = []
        object_name_prefix = None
        should_setup_topic = True

        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-e':
                    event_types.append(a)
                elif o == '-f':
                    payload_format = a
                elif o == '-m':
                    if ':' not in a:
                        raise CommandException(
                            'Custom attributes specified with -m should be of the form '
                            'key:value')
                    key, value = a.split(':')
                    custom_attributes[key] = value
                elif o == '-p':
                    object_name_prefix = a
                elif o == '-s':
                    should_setup_topic = False
                elif o == '-t':
                    pubsub_topic = a

        if payload_format not in PAYLOAD_FORMAT_MAP:
            raise CommandException(
                "Must provide a payload format with -f of either 'json' or 'none'"
            )
        payload_format = PAYLOAD_FORMAT_MAP[payload_format]

        bucket_arg = self.args[-1]

        bucket_url = StorageUrlFromString(bucket_arg)
        if not bucket_url.IsCloudUrl() or not bucket_url.IsBucket():
            raise CommandException(
                "%s %s requires a GCS bucket name, but got '%s'" %
                (self.command_name, self.subcommand_name, bucket_arg))
        if bucket_url.scheme != 'gs':
            raise CommandException(
                'The %s command can only be used with gs:// bucket URLs.' %
                self.command_name)
        bucket_name = bucket_url.bucket_name
        self.logger.debug('Creating notification for bucket %s', bucket_url)

        # Find the project this bucket belongs to
        bucket_metadata = self.gsutil_api.GetBucket(bucket_name,
                                                    fields=['projectNumber'],
                                                    provider=bucket_url.scheme)
        bucket_project_number = bucket_metadata.projectNumber

        # If not specified, choose a sensible default for the Cloud Pub/Sub topic
        # name.
        if not pubsub_topic:
            pubsub_topic = 'projects/%s/topics/%s' % (PopulateProjectId(None),
                                                      bucket_name)
        if not pubsub_topic.startswith('projects/'):
            # If a user picks a topic ID (mytopic) but doesn't pass the whole name (
            # projects/my-project/topics/mytopic ), pick a default project.
            pubsub_topic = 'projects/%s/topics/%s' % (PopulateProjectId(None),
                                                      pubsub_topic)
        self.logger.debug('Using Cloud Pub/Sub topic %s', pubsub_topic)

        just_modified_topic_permissions = False
        if should_setup_topic:
            # Ask GCS for the email address that represents GCS's permission to
            # publish to a Cloud Pub/Sub topic from this project.
            service_account = self.gsutil_api.GetProjectServiceAccount(
                bucket_project_number,
                provider=bucket_url.scheme).email_address
            self.logger.debug('Service account for project %d: %s',
                              bucket_project_number, service_account)
            just_modified_topic_permissions = self._CreateTopic(
                pubsub_topic, service_account)

        for attempt_number in range(0, 2):
            try:
                create_response = self.gsutil_api.CreateNotificationConfig(
                    bucket_name,
                    pubsub_topic=pubsub_topic,
                    payload_format=payload_format,
                    custom_attributes=custom_attributes,
                    event_types=event_types if event_types else None,
                    object_name_prefix=object_name_prefix,
                    provider=bucket_url.scheme)
                break
            except PublishPermissionDeniedException:
                if attempt_number == 0 and just_modified_topic_permissions:
                    # If we have just set the IAM policy, it may take up to 10 seconds to
                    # take effect.
                    self.logger.info(
                        'Retrying create notification in 10 seconds '
                        '(new permissions may take up to 10 seconds to take effect.)'
                    )
                    time.sleep(10)
                else:
                    raise

        notification_name = 'projects/_/buckets/%s/notificationConfigs/%s' % (
            bucket_name, create_response.id)
        self.logger.info('Created notification config %s', notification_name)

        return 0

    def _CreateTopic(self, pubsub_topic, service_account):
        """Assures that a topic exists, creating it if necessary.

    Also adds GCS as a publisher on that bucket, if necessary.

    Args:
      pubsub_topic: name of the Cloud Pub/Sub topic to use/create.
      service_account: the GCS service account that needs publish permission.

    Returns:
      true if we modified IAM permissions, otherwise false.
    """

        pubsub_api = PubsubApi(logger=self.logger)

        # Verify that the Pub/Sub topic exists. If it does not, create it.
        try:
            pubsub_api.GetTopic(topic_name=pubsub_topic)
            self.logger.debug('Topic %s already exists', pubsub_topic)
        except NotFoundException:
            self.logger.debug('Creating topic %s', pubsub_topic)
            pubsub_api.CreateTopic(topic_name=pubsub_topic)
            self.logger.info('Created Cloud Pub/Sub topic %s', pubsub_topic)

        # Verify that the service account is in the IAM policy.
        policy = pubsub_api.GetTopicIamPolicy(topic_name=pubsub_topic)
        binding = Binding(role='roles/pubsub.publisher',
                          members=['serviceAccount:%s' % service_account])

        # This could be more extensive. We could, for instance, check for roles
        # that are stronger that pubsub.publisher, like owner. We could also
        # recurse up the hierarchy looking to see if there are project-level
        # permissions. This can get very complex very quickly, as the caller
        # may not necessarily have access to the project-level IAM policy.
        # There's no danger in double-granting permission just to make sure it's
        # there, though.
        if binding not in policy.bindings:
            policy.bindings.append(binding)
            # transactional safety via etag field.
            pubsub_api.SetTopicIamPolicy(topic_name=pubsub_topic,
                                         policy=policy)
            return True
        else:
            self.logger.debug(
                'GCS already has publish permission to topic %s.',
                pubsub_topic)
            return False

    def _EnumerateNotificationsFromArgs(self,
                                        accept_notification_configs=True):
        """Yields bucket/notification tuples from command-line args.

    Given a list of strings that are bucket names (gs://foo) or notification
    config IDs, yield tuples of bucket names and their associated notifications.

    Args:
      accept_notification_configs: whether notification configs are valid args.
    Yields:
      Tuples of the form (bucket_name, Notification)
    """
        path_regex = self._GetNotificationPathRegex()

        for list_entry in self.args:
            match = path_regex.match(list_entry)
            if match:
                if not accept_notification_configs:
                    raise CommandException(
                        '%s %s accepts only bucket names, but you provided %s'
                        %
                        (self.command_name, self.subcommand_name, list_entry))
                bucket_name = match.group('bucket')
                notification_id = match.group('notification')
                found = False
                for notification in self.gsutil_api.ListNotificationConfigs(
                        bucket_name, provider='gs'):
                    if notification.id == notification_id:
                        yield (bucket_name, notification)
                        found = True
                        break
                if not found:
                    raise NotFoundException('Could not find notification %s' %
                                            list_entry)
            else:
                storage_url = StorageUrlFromString(list_entry)
                if not storage_url.IsCloudUrl():
                    raise CommandException(
                        'The %s command must be used on cloud buckets or notification '
                        'config names.' % self.command_name)
                if storage_url.scheme != 'gs':
                    raise CommandException(
                        'The %s command only works on gs:// buckets.')
                path = None
                if storage_url.IsProvider():
                    path = 'gs://*'
                elif storage_url.IsBucket():
                    path = list_entry
                if not path:
                    raise CommandException(
                        'The %s command cannot be used on cloud objects, only buckets'
                        % self.command_name)
                for blr in self.WildcardIterator(path).IterBuckets(
                        bucket_fields=['id']):
                    for notification in self.gsutil_api.ListNotificationConfigs(
                            blr.storage_url.bucket_name, provider='gs'):
                        yield (blr.storage_url.bucket_name, notification)

    def _List(self):
        self.CheckArguments()
        if self.sub_opts:
            if '-o' in dict(self.sub_opts):
                for bucket_name in self.args:
                    self._ListChannels(bucket_name)
        else:
            for bucket_name, notification in self._EnumerateNotificationsFromArgs(
                    accept_notification_configs=False):
                self._PrintNotificationDetails(bucket_name, notification)
        return 0

    def _PrintNotificationDetails(self, bucket, notification):
        print(
            'projects/_/buckets/{bucket}/notificationConfigs/{notification}\n'
            '\tCloud Pub/Sub topic: {topic}'.format(
                bucket=bucket,
                notification=notification.id,
                topic=notification.topic[len('//pubsub.googleapis.com/'):]))
        if notification.custom_attributes:
            print('\tCustom attributes:')
            for attr in notification.custom_attributes.additionalProperties:
                print('\t\t%s: %s' % (attr.key, attr.value))
        filters = []
        if notification.event_types:
            filters.append('\t\tEvent Types: %s' %
                           ', '.join(notification.event_types))
        if notification.object_name_prefix:
            filters.append("\t\tObject name prefix: '%s'" %
                           notification.object_name_prefix)
        if filters:
            print('\tFilters:')
            for line in filters:
                print(line)
        self.logger.info('')

    def _Delete(self):
        for bucket_name, notification in self._EnumerateNotificationsFromArgs(
        ):
            self._DeleteNotification(bucket_name, notification.id)
        return 0

    def _DeleteNotification(self, bucket_name, notification_id):
        self.gsutil_api.DeleteNotificationConfig(bucket_name,
                                                 notification=notification_id,
                                                 provider='gs')
        return 0

    def _RunSubCommand(self, func):
        try:
            (self.sub_opts,
             self.args) = getopt.getopt(self.args,
                                        self.command_spec.supported_sub_args)
            # Commands with both suboptions and subcommands need to reparse for
            # suboptions, so we log again.
            metrics.LogCommandParams(sub_opts=self.sub_opts)
            return func(self)
        except getopt.GetoptError:
            self.RaiseInvalidArgumentException()

    SUBCOMMANDS = {
        'create': _Create,
        'list': _List,
        'delete': _Delete,
        'watchbucket': _WatchBucket,
        'stopchannel': _StopChannel
    }

    def RunCommand(self):
        """Command entry point for the notification command."""
        self.subcommand_name = self.args.pop(0)
        if self.subcommand_name in NotificationCommand.SUBCOMMANDS:
            metrics.LogCommandParams(subcommands=[self.subcommand_name])
            return self._RunSubCommand(
                NotificationCommand.SUBCOMMANDS[self.subcommand_name])
        else:
            raise CommandException(
                'Invalid subcommand "%s" for the %s command.' %
                (self.subcommand_name, self.command_name))
Esempio n. 21
0
class RmCommand(Command):
    """Implementation of gsutil rm command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'rm',
        command_name_aliases=['del', 'delete', 'remove'],
        usage_synopsis=_SYNOPSIS,
        min_args=0,
        max_args=NO_MAX,
        supported_sub_args='afIrR',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=0,
        gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments=[CommandArgument.MakeZeroOrMoreCloudURLsArgument()])
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='rm',
        help_name_aliases=['del', 'delete', 'remove'],
        help_type='command_help',
        help_one_line_summary='Remove objects',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={},
    )

    def RunCommand(self):
        """Command entry point for the rm command."""
        # self.recursion_requested is initialized in command.py (so it can be
        # checked in parent class for all commands).
        self.continue_on_error = self.parallel_operations
        self.read_args_from_stdin = False
        self.all_versions = False
        if self.sub_opts:
            for o, unused_a in self.sub_opts:
                if o == '-a':
                    self.all_versions = True
                elif o == '-f':
                    self.continue_on_error = True
                elif o == '-I':
                    self.read_args_from_stdin = True
                elif o == '-r' or o == '-R':
                    self.recursion_requested = True
                    self.all_versions = True

        if self.read_args_from_stdin:
            if self.args:
                raise CommandException(
                    'No arguments allowed with the -I flag.')
            url_strs = StdinIterator()
        else:
            if not self.args:
                raise CommandException(
                    'The rm command (without -I) expects at '
                    'least one URL.')
            url_strs = self.args

        # Tracks number of object deletes that failed.
        self.op_failure_count = 0

        # Tracks if any buckets were missing.
        self.bucket_not_found_count = 0

        # Tracks buckets that are slated for recursive deletion.
        bucket_urls_to_delete = []
        self.bucket_strings_to_delete = []

        if self.recursion_requested:
            bucket_fields = ['id']
            for url_str in url_strs:
                url = StorageUrlFromString(url_str)
                if url.IsBucket() or url.IsProvider():
                    for blr in self.WildcardIterator(url_str).IterBuckets(
                            bucket_fields=bucket_fields):
                        bucket_urls_to_delete.append(blr.storage_url)
                        self.bucket_strings_to_delete.append(url_str)

        self.preconditions = PreconditionsFromHeaders(self.headers or {})

        try:
            # Expand wildcards, dirs, buckets, and bucket subdirs in URLs.
            name_expansion_iterator = NameExpansionIterator(
                self.command_name,
                self.debug,
                self.logger,
                self.gsutil_api,
                url_strs,
                self.recursion_requested,
                project_id=self.project_id,
                all_versions=self.all_versions,
                continue_on_error=self.continue_on_error
                or self.parallel_operations)

            seek_ahead_iterator = None
            # Cannot seek ahead with stdin args, since we can only iterate them
            # once without buffering in memory.
            if not self.read_args_from_stdin:
                seek_ahead_iterator = SeekAheadNameExpansionIterator(
                    self.command_name,
                    self.debug,
                    self.GetSeekAheadGsutilApi(),
                    url_strs,
                    self.recursion_requested,
                    all_versions=self.all_versions,
                    project_id=self.project_id)

            # Perform remove requests in parallel (-m) mode, if requested, using
            # configured number of parallel processes and threads. Otherwise,
            # perform requests with sequential function calls in current process.
            self.Apply(
                _RemoveFuncWrapper,
                name_expansion_iterator,
                _RemoveExceptionHandler,
                fail_on_error=(not self.continue_on_error),
                shared_attrs=['op_failure_count', 'bucket_not_found_count'],
                seek_ahead_iterator=seek_ahead_iterator)

        # Assuming the bucket has versioning enabled, url's that don't map to
        # objects should throw an error even with all_versions, since the prior
        # round of deletes only sends objects to a history table.
        # This assumption that rm -a is only called for versioned buckets should be
        # corrected, but the fix is non-trivial.
        except CommandException as e:
            # Don't raise if there are buckets to delete -- it's valid to say:
            #   gsutil rm -r gs://some_bucket
            # if the bucket is empty.
            if _ExceptionMatchesBucketToDelete(self.bucket_strings_to_delete,
                                               e):
                DecrementFailureCount()
            else:
                raise
        except ServiceException, e:
            if not self.continue_on_error:
                raise

        if self.bucket_not_found_count:
            raise CommandException(
                'Encountered non-existent bucket during listing')

        if self.op_failure_count and not self.continue_on_error:
            raise CommandException('Some files could not be removed.')

        # If this was a gsutil rm -r command covering any bucket subdirs,
        # remove any dir_$folder$ objects (which are created by various web UI
        # tools to simulate folders).
        if self.recursion_requested:
            folder_object_wildcards = []
            for url_str in url_strs:
                url = StorageUrlFromString(url_str)
                if url.IsObject():
                    folder_object_wildcards.append('%s**_$folder$' % url_str)
            if folder_object_wildcards:
                self.continue_on_error = True
                try:
                    name_expansion_iterator = NameExpansionIterator(
                        self.command_name,
                        self.debug,
                        self.logger,
                        self.gsutil_api,
                        folder_object_wildcards,
                        self.recursion_requested,
                        project_id=self.project_id,
                        all_versions=self.all_versions)
                    # When we're removing folder objects, always continue on error
                    self.Apply(_RemoveFuncWrapper,
                               name_expansion_iterator,
                               _RemoveFoldersExceptionHandler,
                               fail_on_error=False)
                except CommandException as e:
                    # Ignore exception from name expansion due to an absent folder file.
                    if not e.reason.startswith(NO_URLS_MATCHED_GENERIC):
                        raise

        # Now that all data has been deleted, delete any bucket URLs.
        for url in bucket_urls_to_delete:
            self.logger.info('Removing %s...', url)

            @Retry(NotEmptyException, tries=3, timeout_secs=1)
            def BucketDeleteWithRetry():
                self.gsutil_api.DeleteBucket(url.bucket_name,
                                             provider=url.scheme)

            BucketDeleteWithRetry()

        if self.op_failure_count:
            plural_str = 's' if self.op_failure_count else ''
            raise CommandException(
                '%d file%s/object%s could not be removed.' %
                (self.op_failure_count, plural_str, plural_str))

        return 0
Esempio n. 22
0
class WebCommand(Command):
    """Implementation of gsutil web command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'web',
        command_name_aliases=['setwebcfg', 'getwebcfg'],
        usage_synopsis=_SYNOPSIS,
        min_args=2,
        max_args=NO_MAX,
        supported_sub_args='m:e:',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=1,
        gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments={
            'set': [CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()],
            'get': [CommandArgument.MakeNCloudBucketURLsArgument(1)]
        })
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='web',
        help_name_aliases=['getwebcfg', 'setwebcfg'],
        help_type='command_help',
        help_one_line_summary=(
            'Set a main page and/or error page for one or more buckets'),
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={
            'get': _get_help_text,
            'set': _set_help_text
        },
    )

    def _GetWeb(self):
        """Gets website configuration for a bucket."""
        bucket_url, bucket_metadata = self.GetSingleBucketUrlFromArg(
            self.args[0], bucket_fields=['website'])

        if bucket_url.scheme == 's3':
            sys.stdout.write(
                self.gsutil_api.XmlPassThroughGetWebsite(
                    bucket_url, provider=bucket_url.scheme))
        else:
            if bucket_metadata.website and (
                    bucket_metadata.website.mainPageSuffix
                    or bucket_metadata.website.notFoundPage):
                sys.stdout.write(
                    str(encoding.MessageToJson(bucket_metadata.website)) +
                    '\n')
            else:
                sys.stdout.write('%s has no website configuration.\n' %
                                 bucket_url)

        return 0

    def _SetWeb(self):
        """Sets website configuration for a bucket."""
        main_page_suffix = None
        error_page = None
        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-m':
                    main_page_suffix = a
                elif o == '-e':
                    error_page = a

        url_args = self.args

        website = apitools_messages.Bucket.WebsiteValue(
            mainPageSuffix=main_page_suffix, notFoundPage=error_page)

        # Iterate over URLs, expanding wildcards and setting the website
        # configuration on each.
        some_matched = False
        for url_str in url_args:
            bucket_iter = self.GetBucketUrlIterFromArg(url_str,
                                                       bucket_fields=['id'])
            for blr in bucket_iter:
                url = blr.storage_url
                some_matched = True
                self.logger.info('Setting website configuration on %s...', blr)
                bucket_metadata = apitools_messages.Bucket(website=website)
                self.gsutil_api.PatchBucket(url.bucket_name,
                                            bucket_metadata,
                                            provider=url.scheme,
                                            fields=['id'])
        if not some_matched:
            raise CommandException(NO_URLS_MATCHED_TARGET % list(url_args))
        return 0

    def RunCommand(self):
        """Command entry point for the web command."""
        action_subcommand = self.args.pop(0)
        self.ParseSubOpts(check_args=True)
        if action_subcommand == 'get':
            func = self._GetWeb
        elif action_subcommand == 'set':
            func = self._SetWeb
        else:
            raise CommandException(
                ('Invalid subcommand "%s" for the %s command.\n'
                 'See "gsutil help web".') %
                (action_subcommand, self.command_name))

        # Commands with both suboptions and subcommands need to reparse for
        # suboptions, so we log again.
        metrics.LogCommandParams(subcommands=[action_subcommand],
                                 sub_opts=self.sub_opts)
        return func()
Esempio n. 23
0
class BucketPolicyOnlyCommand(Command):
  """Implements the gsutil bucketpolicyonly command."""

  command_spec = Command.CreateCommandSpec(
      'bucketpolicyonly',
      usage_synopsis=_SYNOPSIS,
      min_args=2,
      max_args=NO_MAX,
      supported_sub_args='',
      file_url_ok=False,
      provider_url_ok=False,
      urls_start_arg=2,
      gs_api_support=[ApiSelector.JSON],
      gs_default_api=ApiSelector.JSON,
      argparse_arguments={
          'get': [CommandArgument.MakeNCloudURLsArgument(1),],
          'set': [
              CommandArgument('mode', choices=['on', 'off']),
              CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()
          ],
      })
  # Help specification. See help_provider.py for documentation.
  help_spec = Command.HelpSpec(
      help_name='bucketpolicyonly',
      help_name_aliases=[],
      help_type='command_help',
      help_one_line_summary='Configure uniform bucket-level access',
      help_text=_DETAILED_HELP_TEXT,
      subcommand_help_text={
          'get': _get_help_text,
          'set': _set_help_text,
      },
  )

  def _ValidateBucketListingRefAndReturnBucketName(self, blr):
    if blr.storage_url.scheme != 'gs':
      raise CommandException(
          'The %s command can only be used with gs:// bucket URLs.' %
          self.command_name)

  def _GetBucketPolicyOnly(self, blr):
    """Gets the Bucket Policy Only setting for a bucket."""
    self._ValidateBucketListingRefAndReturnBucketName(blr)
    bucket_url = blr.storage_url

    bucket_metadata = self.gsutil_api.GetBucket(bucket_url.bucket_name,
                                                fields=['iamConfiguration'],
                                                provider=bucket_url.scheme)
    iam_config = bucket_metadata.iamConfiguration
    bucket_policy_only = iam_config.bucketPolicyOnly

    fields = {
        'bucket': str(bucket_url).rstrip('/'),
        'enabled': bucket_policy_only.enabled
    }

    locked_time_line = ''
    if bucket_policy_only.lockedTime:
      fields['locked_time'] = bucket_policy_only.lockedTime
      locked_time_line = '  LockedTime: {locked_time}\n'

    if bucket_policy_only:
      print(('Bucket Policy Only setting for {bucket}:\n'
             '  Enabled: {enabled}\n' + locked_time_line).format(**fields))

  def _SetBucketPolicyOnly(self, blr, setting_arg):
    """Sets the Bucket Policy Only setting for a bucket on or off."""
    self._ValidateBucketListingRefAndReturnBucketName(blr)
    bucket_url = blr.storage_url

    iam_config = IamConfigurationValue()
    iam_config.bucketPolicyOnly = BucketPolicyOnlyValue()
    iam_config.bucketPolicyOnly.enabled = (setting_arg == 'on')

    bucket_metadata = apitools_messages.Bucket(iamConfiguration=iam_config)

    setting_verb = 'Enabling' if setting_arg == 'on' else 'Disabling'
    print('%s Bucket Policy Only for %s...' %
          (setting_verb, str(bucket_url).rstrip('/')))

    self.gsutil_api.PatchBucket(bucket_url.bucket_name,
                                bucket_metadata,
                                fields=['iamConfiguration'],
                                provider=bucket_url.scheme)
    return 0

  def _BucketPolicyOnly(self):
    """Handles bucketpolicyonly command on a Cloud Storage bucket."""
    subcommand = self.args.pop(0)

    if subcommand not in ('get', 'set'):
      raise CommandException('bucketpolicyonly only supports get|set')

    subcommand_func = None
    subcommand_args = []
    setting_arg = None

    if subcommand == 'get':
      subcommand_func = self._GetBucketPolicyOnly
    elif subcommand == 'set':
      subcommand_func = self._SetBucketPolicyOnly
      setting_arg = self.args.pop(0)
      InsistOnOrOff(setting_arg,
                    'Only on and off values allowed for set option')
      subcommand_args.append(setting_arg)

    # Iterate over bucket args, performing the specified subsubcommand.
    some_matched = False
    url_args = self.args
    if not url_args:
      self.RaiseWrongNumberOfArgumentsException()
    for url_str in url_args:
      # Throws a CommandException if the argument is not a bucket.
      bucket_iter = self.GetBucketUrlIterFromArg(url_str)
      for bucket_listing_ref in bucket_iter:
        some_matched = True
        subcommand_func(bucket_listing_ref, *subcommand_args)

    if not some_matched:
      raise CommandException(NO_URLS_MATCHED_TARGET % list(url_args))
    return 0

  def RunCommand(self):
    """Command entry point for the bucketpolicyonly command."""
    if self.gsutil_api.GetApiSelector(provider='gs') != ApiSelector.JSON:
      raise CommandException('\n'.join(
          textwrap.wrap(
              'The "%s" command can only be used with the Cloud Storage JSON API.'
              % self.command_name)))

    action_subcommand = self.args[0]
    self.ParseSubOpts(check_args=True)

    if action_subcommand == 'get' or action_subcommand == 'set':
      metrics.LogCommandParams(sub_opts=self.sub_opts)
      metrics.LogCommandParams(subcommands=[action_subcommand])
      self._BucketPolicyOnly()
    else:
      raise CommandException('Invalid subcommand "%s", use get|set instead.' %
                             action_subcommand)
Esempio n. 24
0
class UpdateCommand(Command):
    """Implementation of gsutil update command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'update',
        command_name_aliases=['refresh'],
        usage_synopsis=_SYNOPSIS,
        min_args=0,
        max_args=1,
        supported_sub_args='fn',
        file_url_ok=True,
        provider_url_ok=False,
        urls_start_arg=0,
        gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
    )
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='update',
        help_name_aliases=['refresh'],
        help_type='command_help',
        help_one_line_summary='Update to the latest gsutil release',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={},
    )

    def _DisallowUpdataIfDataInGsutilDir(self):
        """Disallows the update command if files not in the gsutil distro are found.

    This prevents users from losing data if they are in the habit of running
    gsutil from the gsutil directory and leaving data in that directory.

    This will also detect someone attempting to run gsutil update from a git
    repo, since the top-level directory will contain git files and dirs (like
    .git) that are not distributed with gsutil.

    Raises:
      CommandException: if files other than those distributed with gsutil found.
    """
        # Manifest includes recursive-includes of gslib. Directly add
        # those to the list here so we will skip them in os.listdir() loop without
        # having to build deeper handling of the MANIFEST file here. Also include
        # 'third_party', which isn't present in manifest but gets added to the
        # gsutil distro by the gsutil submodule configuration; and the MANIFEST.in
        # and CHANGES.md files.
        manifest_lines = ['gslib', 'third_party', 'MANIFEST.in', 'CHANGES.md']

        try:
            with open(os.path.join(gslib.GSUTIL_DIR, 'MANIFEST.in'),
                      'r') as fp:
                for line in fp:
                    if line.startswith('include '):
                        manifest_lines.append(line.split()[-1])
        except IOError:
            self.logger.warn(
                'MANIFEST.in not found in %s.\nSkipping user data '
                'check.\n', gslib.GSUTIL_DIR)
            return

        # Look just at top-level directory. We don't try to catch data dropped into
        # subdirs (like gslib) because that would require deeper parsing of
        # MANFFEST.in, and most users who drop data into gsutil dir do so at the top
        # level directory.
        for filename in os.listdir(gslib.GSUTIL_DIR):
            if filename.endswith('.pyc'):
                # Ignore compiled code.
                continue
            if filename not in manifest_lines:
                raise CommandException('\n'.join(
                    textwrap.wrap(
                        'A file (%s) that is not distributed with gsutil was found in '
                        'the gsutil directory. The update command cannot run with user '
                        'data in the gsutil directory.' %
                        os.path.join(gslib.GSUTIL_DIR, filename))))

    def _ExplainIfSudoNeeded(self, tf, dirs_to_remove):
        """Explains what to do if sudo needed to update gsutil software.

    Happens if gsutil was previously installed by a different user (typically if
    someone originally installed in a shared file system location, using sudo).

    Args:
      tf: Opened TarFile.
      dirs_to_remove: List of directories to remove.

    Raises:
      CommandException: if errors encountered.
    """
        # If running under Windows or Cygwin we don't need (or have) sudo.
        if IS_CYGWIN or IS_WINDOWS:
            return

        user_id = os.getuid()
        if os.stat(gslib.GSUTIL_DIR).st_uid == user_id:
            return

        # Won't fail - this command runs after main startup code that insists on
        # having a config file.
        config_file_list = GetBotoConfigFileList()
        config_files = ' '.join(config_file_list)
        self._CleanUpUpdateCommand(tf, dirs_to_remove)

        # Pick current protection of each boto config file for command that restores
        # protection (rather than fixing at 600) to support use cases like how GCE
        # installs a service account with an /etc/boto.cfg file protected to 644.
        chmod_cmds = []
        for config_file in config_file_list:
            mode = oct(stat.S_IMODE((os.stat(config_file)[stat.ST_MODE])))
            chmod_cmds.append('\n\tsudo chmod %s %s' % (mode, config_file))

        raise CommandException('\n'.join(
            textwrap.wrap(
                'Since it was installed by a different user previously, you will need '
                'to update using the following commands. You will be prompted for your '
                'password, and the install will run as "root". If you\'re unsure what '
                'this means please ask your system administrator for help:')
        ) + ('\n\tsudo chmod 0644 %s\n\tsudo env BOTO_CONFIG="%s" %s update'
             '%s') % (config_files, config_files, self.gsutil_path,
                      ' '.join(chmod_cmds)),
                               informational=True)

    # This list is checked during gsutil update by doing a lowercased
    # slash-left-stripped check. For example "/Dev" would match the "dev" entry.
    unsafe_update_dirs = [
        'applications',
        'auto',
        'bin',
        'boot',
        'desktop',
        'dev',
        'documents and settings',
        'etc',
        'export',
        'home',
        'kernel',
        'lib',
        'lib32',
        'library',
        'lost+found',
        'mach_kernel',
        'media',
        'mnt',
        'net',
        'null',
        'network',
        'opt',
        'private',
        'proc',
        'program files',
        'python',
        'root',
        'sbin',
        'scripts',
        'srv',
        'sys',
        'system',
        'tmp',
        'users',
        'usr',
        'var',
        'volumes',
        'win',
        'win32',
        'windows',
        'winnt',
    ]

    def _EnsureDirsSafeForUpdate(self, dirs):
        """Raises Exception if any of dirs is known to be unsafe for gsutil update.

    This provides a fail-safe check to ensure we don't try to overwrite
    or delete any important directories. (That shouldn't happen given the
    way we construct tmp dirs, etc., but since the gsutil update cleanup
    uses shutil.rmtree() it's prudent to add extra checks.)

    Args:
      dirs: List of directories to check.

    Raises:
      CommandException: If unsafe directory encountered.
    """
        for d in dirs:
            if not d:
                d = 'null'
            if d.lstrip(os.sep).lower() in self.unsafe_update_dirs:
                raise CommandException(
                    'EnsureDirsSafeForUpdate: encountered unsafe '
                    'directory (%s); aborting update' % d)

    def _CleanUpUpdateCommand(self, tf, dirs_to_remove):
        """Cleans up temp files etc. from running update command.

    Args:
      tf: Opened TarFile, or None if none currently open.
      dirs_to_remove: List of directories to remove.

    """
        if tf:
            tf.close()
        self._EnsureDirsSafeForUpdate(dirs_to_remove)
        for directory in dirs_to_remove:
            try:
                shutil.rmtree(directory)
            except OSError:
                # Ignore errors while attempting to remove old dirs under Windows. They
                # happen because of Windows exclusive file locking, and the update
                # actually succeeds but just leaves the old versions around in the
                # user's temp dir.
                if not IS_WINDOWS:
                    raise

    def RunCommand(self):
        """Command entry point for the update command."""

        if gslib.IS_PACKAGE_INSTALL:
            raise CommandException(
                'The update command is only available for gsutil installed from a '
                'tarball. If you installed gsutil via another method, use the same '
                'method to update it.')

        if os.environ.get('CLOUDSDK_WRAPPER') == '1':
            raise CommandException(
                'The update command is disabled for Cloud SDK installs. Please run '
                '"gcloud components update" to update it. Note: the Cloud SDK '
                'incorporates updates to the underlying tools approximately every 2 '
                'weeks, so if you are attempting to update to a recently created '
                'release / pre-release of gsutil it may not yet be available via '
                'the Cloud SDK.')

        https_validate_certificates = CERTIFICATE_VALIDATION_ENABLED
        if not https_validate_certificates:
            raise CommandException(
                'Your boto configuration has https_validate_certificates = False.\n'
                'The update command cannot be run this way, for security reasons.'
            )

        self._DisallowUpdataIfDataInGsutilDir()

        force_update = False
        no_prompt = False
        if self.sub_opts:
            for o, unused_a in self.sub_opts:
                if o == '-f':
                    force_update = True
                if o == '-n':
                    no_prompt = True

        dirs_to_remove = []
        tmp_dir = tempfile.mkdtemp()
        dirs_to_remove.append(tmp_dir)
        os.chdir(tmp_dir)

        if not no_prompt:
            self.logger.info('Checking for software update...')
        if self.args:
            update_from_url_str = self.args[0]
            if not update_from_url_str.endswith('.tar.gz'):
                raise CommandException(
                    'The update command only works with tar.gz files.')
            for i, result in enumerate(
                    self.WildcardIterator(update_from_url_str)):
                if i > 0:
                    raise CommandException(
                        'Invalid update URL. Must name a single .tar.gz file.')
                storage_url = result.storage_url
                if storage_url.IsFileUrl() and not storage_url.IsDirectory():
                    if not force_update:
                        raise CommandException((
                            '"update" command does not support "file://" URLs without the '
                            '-f option.'))
                elif not (storage_url.IsCloudUrl() and storage_url.IsObject()):
                    raise CommandException(
                        'Invalid update object URL. Must name a single .tar.gz file.'
                    )
        else:
            update_from_url_str = GSUTIL_PUB_TARBALL

        # Try to retrieve version info from tarball metadata; failing that; download
        # the tarball and extract the VERSION file. The version lookup will fail
        # when running the update system test, because it retrieves the tarball from
        # a temp file rather than a cloud URL (files lack the version metadata).
        tarball_version = LookUpGsutilVersion(self.gsutil_api,
                                              update_from_url_str)
        if tarball_version:
            tf = None
        else:
            tf = self._FetchAndOpenGsutilTarball(update_from_url_str)
            tf.extractall()
            with open(os.path.join('gsutil', 'VERSION'), 'r') as ver_file:
                tarball_version = ver_file.read().strip()

        if not force_update and gslib.VERSION == tarball_version:
            self._CleanUpUpdateCommand(tf, dirs_to_remove)
            if self.args:
                raise CommandException('You already have %s installed.' %
                                       update_from_url_str,
                                       informational=True)
            else:
                raise CommandException(
                    'You already have the latest gsutil release '
                    'installed.',
                    informational=True)

        if not no_prompt:
            (_, major) = CompareVersions(tarball_version, gslib.VERSION)
            if major:
                print('\n'.join(
                    textwrap.wrap(
                        'This command will update to the "%s" version of gsutil at %s. '
                        'NOTE: This a major new version, so it is strongly recommended '
                        'that you review the release note details at %s before updating to '
                        'this version, especially if you use gsutil in scripts.'
                        % (tarball_version, gslib.GSUTIL_DIR,
                           RELEASE_NOTES_URL))))
            else:
                print(
                    'This command will update to the "%s" version of\ngsutil at %s'
                    % (tarball_version, gslib.GSUTIL_DIR))
        self._ExplainIfSudoNeeded(tf, dirs_to_remove)

        if no_prompt:
            answer = 'y'
        else:
            answer = raw_input('Proceed? [y/N] ')
        if not answer or answer.lower()[0] != 'y':
            self._CleanUpUpdateCommand(tf, dirs_to_remove)
            raise CommandException('Not running update.', informational=True)

        if not tf:
            tf = self._FetchAndOpenGsutilTarball(update_from_url_str)

        # Ignore keyboard interrupts during the update to reduce the chance someone
        # hitting ^C leaves gsutil in a broken state.
        RegisterSignalHandler(signal.SIGINT, signal.SIG_IGN)

        # gslib.GSUTIL_DIR lists the path where the code should end up (like
        # /usr/local/gsutil), which is one level down from the relative path in the
        # tarball (since the latter creates files in ./gsutil). So, we need to
        # extract at the parent directory level.
        gsutil_bin_parent_dir = os.path.normpath(
            os.path.join(gslib.GSUTIL_DIR, '..'))

        # Extract tarball to a temporary directory in a sibling to GSUTIL_DIR.
        old_dir = tempfile.mkdtemp(dir=gsutil_bin_parent_dir)
        new_dir = tempfile.mkdtemp(dir=gsutil_bin_parent_dir)
        dirs_to_remove.append(old_dir)
        dirs_to_remove.append(new_dir)
        self._EnsureDirsSafeForUpdate(dirs_to_remove)
        try:
            tf.extractall(path=new_dir)
        except Exception, e:
            self._CleanUpUpdateCommand(tf, dirs_to_remove)
            raise CommandException('Update failed: %s.' % e)

        # For enterprise mode (shared/central) installation, users with
        # different user/group than the installation user/group must be
        # able to run gsutil so we need to do some permissions adjustments
        # here. Since enterprise mode is not not supported for Windows
        # users, we can skip this step when running on Windows, which
        # avoids the problem that Windows has no find or xargs command.
        if not IS_WINDOWS:
            # Make all files and dirs in updated area owner-RW and world-R, and make
            # all directories owner-RWX and world-RX.
            for dirname, subdirs, filenames in os.walk(new_dir):
                for filename in filenames:
                    fd = os.open(os.path.join(dirname, filename), os.O_RDONLY)
                    os.fchmod(
                        fd, stat.S_IWRITE | stat.S_IRUSR | stat.S_IRGRP
                        | stat.S_IROTH)
                    os.close(fd)
                for subdir in subdirs:
                    fd = os.open(os.path.join(dirname, subdir), os.O_RDONLY)
                    os.fchmod(
                        fd, stat.S_IRWXU | stat.S_IXGRP | stat.S_IXOTH
                        | stat.S_IRGRP | stat.S_IROTH)
                    os.close(fd)

            # Make main gsutil script owner-RWX and world-RX.
            fd = os.open(os.path.join(new_dir, 'gsutil', 'gsutil'),
                         os.O_RDONLY)
            os.fchmod(
                fd, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH
                | stat.S_IXOTH)
            os.close(fd)

        # Move old installation aside and new into place.
        os.rename(gslib.GSUTIL_DIR, os.path.join(old_dir, 'old'))
        os.rename(os.path.join(new_dir, 'gsutil'), gslib.GSUTIL_DIR)
        self._CleanUpUpdateCommand(tf, dirs_to_remove)
        RegisterSignalHandler(signal.SIGINT, signal.SIG_DFL)
        self.logger.info('Update complete.')
        return 0
class LoggingCommand(Command):
  """Implementation of gsutil logging command."""

  # Command specification. See base class for documentation.
  command_spec = Command.CreateCommandSpec(
      'logging',
      command_name_aliases=['disablelogging', 'enablelogging', 'getlogging'],
      usage_synopsis=_SYNOPSIS,
      min_args=2,
      max_args=NO_MAX,
      supported_sub_args='b:o:',
      file_url_ok=False,
      provider_url_ok=False,
      urls_start_arg=0,
      gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
      gs_default_api=ApiSelector.JSON,
      argparse_arguments=[
          CommandArgument('mode', choices=['on', 'off']),
          CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()
      ]
  )
  # Help specification. See help_provider.py for documentation.
  help_spec = Command.HelpSpec(
      help_name='logging',
      help_name_aliases=['loggingconfig', 'logs', 'log', 'getlogging',
                         'enablelogging', 'disablelogging'],
      help_type='command_help',
      help_one_line_summary='Configure or retrieve logging on buckets',
      help_text=_DETAILED_HELP_TEXT,
      subcommand_help_text={'get': _get_help_text, 'set': _set_help_text},
  )

  def _Get(self):
    """Gets logging configuration for a bucket."""
    bucket_url, bucket_metadata = self.GetSingleBucketUrlFromArg(
        self.args[0], bucket_fields=['logging'])

    if bucket_url.scheme == 's3':
      sys.stdout.write(self.gsutil_api.XmlPassThroughGetLogging(
          bucket_url, provider=bucket_url.scheme))
    else:
      if (bucket_metadata.logging and bucket_metadata.logging.logBucket and
          bucket_metadata.logging.logObjectPrefix):
        sys.stdout.write(str(encoding.MessageToJson(
            bucket_metadata.logging)) + '\n')
      else:
        sys.stdout.write('%s has no logging configuration.\n' % bucket_url)
    return 0

  def _Enable(self):
    """Enables logging configuration for a bucket."""
    # Disallow multi-provider 'logging set on' calls, because the schemas
    # differ.
    if not UrlsAreForSingleProvider(self.args):
      raise CommandException('"logging set on" command spanning providers not '
                             'allowed.')
    target_bucket_url = None
    target_prefix = None
    for opt, opt_arg in self.sub_opts:
      if opt == '-b':
        target_bucket_url = StorageUrlFromString(opt_arg)
      if opt == '-o':
        target_prefix = opt_arg

    if not target_bucket_url:
      raise CommandException('"logging set on" requires \'-b <log_bucket>\' '
                             'option')
    if not target_bucket_url.IsBucket():
      raise CommandException('-b option must specify a bucket URL.')

    # Iterate over URLs, expanding wildcards and setting logging on each.
    some_matched = False
    for url_str in self.args:
      bucket_iter = self.GetBucketUrlIterFromArg(url_str, bucket_fields=['id'])
      for blr in bucket_iter:
        url = blr.storage_url
        some_matched = True
        self.logger.info('Enabling logging on %s...', blr)
        logging = apitools_messages.Bucket.LoggingValue(
            logBucket=target_bucket_url.bucket_name,
            logObjectPrefix=target_prefix or url.bucket_name)

        bucket_metadata = apitools_messages.Bucket(logging=logging)
        self.gsutil_api.PatchBucket(url.bucket_name, bucket_metadata,
                                    provider=url.scheme, fields=['id'])
    if not some_matched:
      raise CommandException(NO_URLS_MATCHED_TARGET % list(self.args))
    return 0

  def _Disable(self):
    """Disables logging configuration for a bucket."""
    # Iterate over URLs, expanding wildcards, and disabling logging on each.
    some_matched = False
    for url_str in self.args:
      bucket_iter = self.GetBucketUrlIterFromArg(url_str, bucket_fields=['id'])
      for blr in bucket_iter:
        url = blr.storage_url
        some_matched = True
        self.logger.info('Disabling logging on %s...', blr)
        logging = apitools_messages.Bucket.LoggingValue()

        bucket_metadata = apitools_messages.Bucket(logging=logging)
        self.gsutil_api.PatchBucket(url.bucket_name, bucket_metadata,
                                    provider=url.scheme, fields=['id'])
    if not some_matched:
      raise CommandException(NO_URLS_MATCHED_TARGET % list(self.args))
    return 0

  def RunCommand(self):
    """Command entry point for the logging command."""
    # Parse the subcommand and alias for the new logging command.
    action_subcommand = self.args.pop(0)
    if action_subcommand == 'get':
      func = self._Get
    elif action_subcommand == 'set':
      state_subcommand = self.args.pop(0)
      if not self.args:
        self.RaiseWrongNumberOfArgumentsException()
      if state_subcommand == 'on':
        func = self._Enable
      elif state_subcommand == 'off':
        func = self._Disable
      else:
        raise CommandException((
            'Invalid subcommand "%s" for the "%s %s" command.\n'
            'See "gsutil help logging".') % (
                state_subcommand, self.command_name, action_subcommand))
    else:
      raise CommandException(('Invalid subcommand "%s" for the %s command.\n'
                              'See "gsutil help logging".') %
                             (action_subcommand, self.command_name))
    self.ParseSubOpts(check_args=True)
    func()
    return 0
Esempio n. 26
0
class MbCommand(Command):
  """Implementation of gsutil mb command."""

  # Command specification. See base class for documentation.
  command_spec = Command.CreateCommandSpec(
      'mb',
      command_name_aliases=['makebucket', 'createbucket', 'md', 'mkdir'],
      usage_synopsis=_SYNOPSIS,
      min_args=1,
      max_args=NO_MAX,
      supported_sub_args='c:l:p:s:',
      file_url_ok=False,
      provider_url_ok=False,
      urls_start_arg=0,
      gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
      gs_default_api=ApiSelector.JSON,
      argparse_arguments=[
          CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()
      ]
  )
  # Help specification. See help_provider.py for documentation.
  help_spec = Command.HelpSpec(
      help_name='mb',
      help_name_aliases=[
          'createbucket', 'makebucket', 'md', 'mkdir', 'location', 'dra',
          'dras', 'reduced_availability', 'durable_reduced_availability', 'rr',
          'reduced_redundancy', 'standard', 'storage class', 'nearline', 'nl'],
      help_type='command_help',
      help_one_line_summary='Make buckets',
      help_text=_DETAILED_HELP_TEXT,
      subcommand_help_text={},
  )

  def RunCommand(self):
    """Command entry point for the mb command."""
    location = None
    storage_class = None
    if self.sub_opts:
      for o, a in self.sub_opts:
        if o == '-l':
          location = a
        elif o == '-p':
          # Project IDs are sent as header values when using gs and s3 XML APIs.
          InsistAscii(a, 'Invalid non-ASCII character found in project ID')
          self.project_id = a
        elif o == '-c' or o == '-s':
          storage_class = NormalizeStorageClass(a)

    bucket_metadata = apitools_messages.Bucket(location=location,
                                               storageClass=storage_class)

    for bucket_url_str in self.args:
      bucket_url = StorageUrlFromString(bucket_url_str)
      if not bucket_url.IsBucket():
        raise CommandException('The mb command requires a URL that specifies a '
                               'bucket.\n"%s" is not valid.' % bucket_url)
      if (not BUCKET_NAME_RE.match(bucket_url.bucket_name) or
          TOO_LONG_DNS_NAME_COMP.search(bucket_url.bucket_name)):
        raise InvalidUrlError(
            'Invalid bucket name in URL "%s"' % bucket_url.bucket_name)

      self.logger.info('Creating %s...', bucket_url)
      # Pass storage_class param only if this is a GCS bucket. (In S3 the
      # storage class is specified on the key object.)
      try:
        self.gsutil_api.CreateBucket(
            bucket_url.bucket_name, project_id=self.project_id,
            metadata=bucket_metadata, provider=bucket_url.scheme)
      except BadRequestException as e:
        if (e.status == 400 and e.reason == 'DotfulBucketNameNotUnderTld' and
            bucket_url.scheme == 'gs'):
          bucket_name = bucket_url.bucket_name
          final_comp = bucket_name[bucket_name.rfind('.')+1:]
          raise CommandException('\n'.join(textwrap.wrap(
              'Buckets with "." in the name must be valid DNS names. The bucket'
              ' you are attempting to create (%s) is not a valid DNS name,'
              ' because the final component (%s) is not currently a valid part'
              ' of the top-level DNS tree.' % (bucket_name, final_comp))))
        else:
          raise

    return 0
Esempio n. 27
0
class VersionCommand(Command):
    """Implementation of gsutil version command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'version',
        command_name_aliases=['ver'],
        usage_synopsis=_SYNOPSIS,
        min_args=0,
        max_args=0,
        supported_sub_args='l',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=0,
    )
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='version',
        help_name_aliases=['ver'],
        help_type='command_help',
        help_one_line_summary='Print version info about gsutil',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={},
    )

    def RunCommand(self):
        """Command entry point for the version command."""
        long_form = False
        if self.sub_opts:
            for o, _ in self.sub_opts:
                if o == '-l':
                    long_form = True

        if GetConfigFilePaths():
            config_paths = ', '.join(GetConfigFilePaths())
        else:
            config_paths = 'no config found'

        shipped_checksum = gslib.CHECKSUM
        try:
            cur_checksum = self._ComputeCodeChecksum()
        except IOError:
            cur_checksum = 'MISSING FILES'
        if shipped_checksum == cur_checksum:
            checksum_ok_str = 'OK'
        else:
            checksum_ok_str = '!= %s' % shipped_checksum

        sys.stdout.write('gsutil version: %s\n' % gslib.VERSION)

        if long_form:

            long_form_output = (
                'checksum: {checksum} ({checksum_ok})\n'
                'boto version: {boto_version}\n'
                'python version: {python_version}\n'
                'OS: {os_version}\n'
                'multiprocessing available: {multiprocessing_available}\n'
                'using cloud sdk: {cloud_sdk}\n'
                'pass cloud sdk credentials to gsutil: {cloud_sdk_credentials}\n'
                'config path(s): {config_paths}\n'
                'gsutil path: {gsutil_path}\n'
                'compiled crcmod: {compiled_crcmod}\n'
                'installed via package manager: {is_package_install}\n'
                'editable install: {is_editable_install}\n')

            sys.stdout.write(
                long_form_output.format(
                    checksum=cur_checksum,
                    checksum_ok=checksum_ok_str,
                    boto_version=boto.__version__,
                    python_version=sys.version.replace('\n', ''),
                    os_version='%s %s' %
                    (platform.system(), platform.release()),
                    multiprocessing_available=(
                        CheckMultiprocessingAvailableAndInit().is_available),
                    cloud_sdk=(os.environ.get('CLOUDSDK_WRAPPER') == '1'),
                    cloud_sdk_credentials=(os.environ.get(
                        'CLOUDSDK_CORE_PASS_CREDENTIALS_TO_GSUTIL') == '1'),
                    config_paths=config_paths,
                    gsutil_path=gslib.GSUTIL_PATH,
                    compiled_crcmod=UsingCrcmodExtension(crcmod),
                    is_package_install=gslib.IS_PACKAGE_INSTALL,
                    is_editable_install=gslib.IS_EDITABLE_INSTALL,
                ))

        return 0

    def _ComputeCodeChecksum(self):
        """Computes a checksum of gsutil code.

    This checksum can be used to determine if users locally modified
    gsutil when requesting support. (It's fine for users to make local mods,
    but when users ask for support we ask them to run a stock version of
    gsutil so we can reduce possible variables.)

    Returns:
      MD5 checksum of gsutil code.
    """
        if gslib.IS_PACKAGE_INSTALL:
            return 'PACKAGED_GSUTIL_INSTALLS_DO_NOT_HAVE_CHECKSUMS'
        m = md5()
        # Checksum gsutil and all .py files under gslib directory.
        files_to_checksum = [gslib.GSUTIL_PATH]
        for root, _, files in os.walk(gslib.GSLIB_DIR):
            for filepath in files:
                if filepath.endswith('.py'):
                    files_to_checksum.append(os.path.join(root, filepath))
        # Sort to ensure consistent checksum build, no matter how os.walk
        # orders the list.
        for filepath in sorted(files_to_checksum):
            f = open(filepath, 'r')
            content = f.read()
            content = re.sub(r'(\r\n|\r|\n)', '\n', content)
            m.update(content)
            f.close()
        return m.hexdigest()
Esempio n. 28
0
class HelpCommand(Command):
  """Implementation of gsutil help command."""

  # Command specification. See base class for documentation.
  command_spec = Command.CreateCommandSpec(
      'help',
      command_name_aliases=['?', 'man'],
      usage_synopsis=_SYNOPSIS,
      min_args=0,
      max_args=2,
      supported_sub_args='',
      file_url_ok=True,
      provider_url_ok=False,
      urls_start_arg=0,
  )
  # Help specification. See help_provider.py for documentation.
  help_spec = Command.HelpSpec(
      help_name='help',
      help_name_aliases=['?'],
      help_type='command_help',
      help_one_line_summary='Get help about commands and topics',
      help_text=_DETAILED_HELP_TEXT,
      subcommand_help_text={},
  )

  def RunCommand(self):
    """Command entry point for the help command."""
    (help_type_map, help_name_map) = self._LoadHelpMaps()
    output = []
    if not self.args:
      output.append('%s\nAvailable commands:\n' % top_level_usage_string)
      format_str = '  %-' + str(MAX_HELP_NAME_LEN) + 's%s\n'
      for help_prov in sorted(help_type_map['command_help'],
                              key=lambda hp: hp.help_spec.help_name):
        output.append(format_str % (help_prov.help_spec.help_name,
                                    help_prov.help_spec.help_one_line_summary))
      output.append('\nAdditional help topics:\n')
      for help_prov in sorted(help_type_map['additional_help'],
                              key=lambda hp: hp.help_spec.help_name):
        output.append(format_str % (help_prov.help_spec.help_name,
                                    help_prov.help_spec.help_one_line_summary))
      output.append('\nUse gsutil help <command or topic> for detailed help.')
    else:
      invalid_subcommand = False
      arg = self.args[0]
      if arg not in help_name_map:
        output.append('No help available for "%s"' % arg)
      else:
        help_prov = help_name_map[arg]
        help_name = None
        if len(self.args) > 1:  # We also have a subcommand argument.
          subcommand_map = help_prov.help_spec.subcommand_help_text
          if subcommand_map and self.args[1] in subcommand_map:
            help_name = arg + ' ' + self.args[1]
            help_text = subcommand_map[self.args[1]]
          else:
            invalid_subcommand = True
            if not subcommand_map:
              output.append(
                  ('The "%s" command has no subcommands. You can ask for the '
                   'full help by running:\n\n\tgsutil help %s\n') % (arg, arg))
            else:
              subcommand_examples = []
              for subcommand in subcommand_map:
                subcommand_examples.append('\tgsutil help %s %s' %
                                           (arg, subcommand))
              output.append(
                  ('Subcommand "%s" does not exist for command "%s".\n'
                   'You can either ask for the full help about the command by '
                   'running:\n\n\tgsutil help %s\n\n'
                   'Or you can ask for help about one of the subcommands:\n\n%s'
                  ) % (self.args[1], arg, arg, '\n'.join(subcommand_examples)))
        if not invalid_subcommand:
          if not help_name:  # No subcommand or invalid subcommand.
            help_name = help_prov.help_spec.help_name
            help_text = help_prov.help_spec.help_text

          output.append('<B>NAME</B>\n')
          output.append('  %s - %s\n' %
                        (help_name, help_prov.help_spec.help_one_line_summary))
          output.append('\n\n')
          output.append(help_text.strip('\n'))
          new_alias = OLD_ALIAS_MAP.get(arg, [None])[0]
          if new_alias:
            deprecation_warning = """
  The "%s" alias is deprecated, and will eventually be removed completely.
  Please use the "%s" command instead.""" % (arg, new_alias)

            output.append('\n\n\n<B>DEPRECATION WARNING</B>\n')
            output.append(deprecation_warning)
    self._OutputHelp(''.join(output))
    return 0

  def _OutputHelp(self, help_str):
    """Outputs simply formatted string.

    This function paginates if the string is too long, PAGER is defined, and
    the output is a tty.

    Args:
      help_str: String to format.
    """
    # Remove <B> and </B> tags and replace them with ANSI control codes if
    # writing to a compatible tty.
    if IS_WINDOWS or not IsRunningInteractively():
      help_str = re.sub('<B>', '', help_str)
      help_str = re.sub('</B>', '', help_str)
      text_util.print_to_fd(help_str)
      return
    help_str = re.sub('<B>', '\033[1m', help_str)
    help_str = re.sub('</B>', '\033[0;0m', help_str)
    num_lines = len(help_str.split('\n'))
    if 'PAGER' in os.environ and num_lines >= GetTermLines():
      # Use -r option for less to make bolding work right.
      pager = os.environ['PAGER'].split(' ')
      if pager[0].endswith('less'):
        pager.append('-r')
      try:
        Popen(pager, stdin=PIPE,
              universal_newlines=True).communicate(input=help_str)
      except OSError as e:
        raise CommandException('Unable to open pager (%s): %s' %
                               (' '.join(pager), e))
    else:
      text_util.print_to_fd(help_str)

  def _LoadHelpMaps(self):
    """Returns tuple of help type and help name.

    help type is a dict with key: help type
                             value: list of HelpProviders
    help name is a dict with key: help command name or alias
                             value: HelpProvider

    Returns:
      (help type, help name)
    """

    # Import all gslib.commands submodules.
    for _, module_name, _ in pkgutil.iter_modules(gslib.commands.__path__):
      __import__('gslib.commands.%s' % module_name)
    # Import all gslib.addlhelp submodules.
    for _, module_name, _ in pkgutil.iter_modules(gslib.addlhelp.__path__):
      __import__('gslib.addlhelp.%s' % module_name)

    help_type_map = {}
    help_name_map = {}
    for s in gslib.help_provider.ALL_HELP_TYPES:
      help_type_map[s] = []
    # Only include HelpProvider subclasses in the dict.
    for help_prov in itertools.chain(HelpProvider.__subclasses__(),
                                     Command.__subclasses__()):
      if help_prov is Command:
        # Skip the Command base class itself; we just want its subclasses,
        # where the help command text lives (in addition to non-Command
        # HelpProviders, like naming.py).
        continue
      gslib.help_provider.SanityCheck(help_prov, help_name_map)
      help_name_map[help_prov.help_spec.help_name] = help_prov
      for help_name_aliases in help_prov.help_spec.help_name_aliases:
        help_name_map[help_name_aliases] = help_prov
      help_type_map[help_prov.help_spec.help_type].append(help_prov)
    return (help_type_map, help_name_map)
Esempio n. 29
0
class HmacCommand(Command):
    """Implementation of gsutil hmac command."""
    command_spec = Command.CreateCommandSpec(
        'hmac',
        min_args=1,
        max_args=8,
        supported_sub_args='ae:lp:s:u:',
        file_url_ok=True,
        urls_start_arg=1,
        gs_api_support=[ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        usage_synopsis=_SYNOPSIS,
        argparse_arguments={
            'create':
            [CommandArgument.MakeZeroOrMoreCloudOrFileURLsArgument()],
            'delete':
            [CommandArgument.MakeZeroOrMoreCloudOrFileURLsArgument()],
            'get': [CommandArgument.MakeZeroOrMoreCloudOrFileURLsArgument()],
            'list': [CommandArgument.MakeZeroOrMoreCloudOrFileURLsArgument()],
            'update':
            [CommandArgument.MakeZeroOrMoreCloudOrFileURLsArgument()],
        },
    )

    help_spec = Command.HelpSpec(
        help_name='hmac',
        help_name_aliases=[],
        help_type='command_help',
        help_one_line_summary=(
            'CRUD operations on service account HMAC keys.'),
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={
            'create': _create_help_text,
            'delete': _delete_help_text,
            'get': _get_help_text,
            'list': _list_help_text,
            'update': _update_help_text,
        })

    def _CreateHmacKey(self, thread_state=None):
        """Creates HMAC key for a service account."""
        if self.args:
            self.service_account_email = self.args[0]
        else:
            err_msg = (
                '%s %s requires a service account to be specified as the '
                'last argument.\n%s')
            raise CommandException(
                err_msg %
                (self.command_name, self.action_subcommand, _CREATE_SYNOPSIS))

        gsutil_api = GetCloudApiInstance(self, thread_state=thread_state)

        response = gsutil_api.CreateHmacKey(self.project_id,
                                            self.service_account_email,
                                            provider='gs')

        print('%-12s %s' % ('Access ID:', response.metadata.accessId))
        print('%-12s %s' % ('Secret:', response.secret))

    def _DeleteHmacKey(self, thread_state=None):
        """Deletes an HMAC key."""
        if self.args:
            access_id = self.args[0]
        else:
            raise _AccessIdException(self.command_name, self.action_subcommand,
                                     _DELETE_SYNOPSIS)

        gsutil_api = GetCloudApiInstance(self, thread_state=thread_state)

        gsutil_api.DeleteHmacKey(self.project_id, access_id, provider='gs')

    def _GetHmacKey(self, thread_state=None):
        """Gets HMAC key from its Access Id."""
        if self.args:
            access_id = self.args[0]
        else:
            raise _AccessIdException(self.command_name, self.action_subcommand,
                                     _GET_SYNOPSIS)

        gsutil_api = GetCloudApiInstance(self, thread_state=thread_state)

        response = gsutil_api.GetHmacKey(self.project_id,
                                         access_id,
                                         provider='gs')

        print(_KeyMetadataOutput(response))

    def _ListHmacKeys(self, thread_state=None):
        """Lists HMAC keys for a project or service account."""
        if self.args:
            raise CommandException(
                '%s %s received unexpected arguments.\n%s' %
                (self.command_name, self.action_subcommand, _LIST_SYNOPSIS))

        gsutil_api = GetCloudApiInstance(self, thread_state=thread_state)

        response = gsutil_api.ListHmacKeys(self.project_id,
                                           self.service_account_email,
                                           self.show_all,
                                           provider='gs')

        short_list_format = '%s\t%-12s %s'
        if self.long_list:
            for item in response:
                print(_KeyMetadataOutput(item))
                print()
        else:
            for item in response:
                print(short_list_format %
                      (item.accessId, item.state, item.serviceAccountEmail))

    def _UpdateHmacKey(self, thread_state=None):
        """Update an HMAC key's state."""
        if not self.state:
            raise CommandException(
                'A state flag must be supplied for %s %s\n%s' %
                (self.command_name, self.action_subcommand, _UPDATE_SYNOPSIS))
        elif self.state not in _VALID_UPDATE_STATES:
            raise CommandException('The state flag value must be one of %s' %
                                   ', '.join(_VALID_UPDATE_STATES))
        if self.args:
            access_id = self.args[0]
        else:
            raise _AccessIdException(self.command_name, self.action_subcommand,
                                     _UPDATE_SYNOPSIS)

        gsutil_api = GetCloudApiInstance(self, thread_state=thread_state)

        response = gsutil_api.UpdateHmacKey(self.project_id,
                                            access_id,
                                            self.state,
                                            self.etag,
                                            provider='gs')

        print(_KeyMetadataOutput(response))

    def RunCommand(self):
        """Command entry point for the hmac command."""

        if self.gsutil_api.GetApiSelector(provider='gs') != ApiSelector.JSON:
            raise CommandException(
                'The "hmac" command can only be used with the GCS JSON API')

        self.action_subcommand = self.args.pop(0)
        self.ParseSubOpts(check_args=True)
        # Commands with both suboptions and subcommands need to reparse for
        # suboptions, so we log again.
        LogCommandParams(sub_opts=self.sub_opts)

        self.service_account_email = None
        self.state = None
        self.show_all = False
        self.long_list = False
        self.etag = None

        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-u':
                    self.service_account_email = a
                elif o == '-p':
                    # Project IDs are sent as header values when using gs and s3 XML APIs.
                    InsistAscii(
                        a, 'Invalid non-ASCII character found in project ID')
                    self.project_id = a
                elif o == '-s':
                    self.state = a
                elif o == '-a':
                    self.show_all = True
                elif o == '-l':
                    self.long_list = True
                elif o == '-e':
                    self.etag = a

        if not self.project_id:
            self.project_id = PopulateProjectId(None)

        method_for_arg = {
            'create': self._CreateHmacKey,
            'delete': self._DeleteHmacKey,
            'get': self._GetHmacKey,
            'list': self._ListHmacKeys,
            'update': self._UpdateHmacKey,
        }
        if self.action_subcommand not in method_for_arg:
            raise CommandException(
                'Invalid subcommand "%s" for the %s command.\n'
                'See "gsutil help hmac".' %
                (self.action_subcommand, self.command_name))

        LogCommandParams(subcommands=[self.action_subcommand])
        method_for_arg[self.action_subcommand]()

        return 0
Esempio n. 30
0
class CorsCommand(Command):
    """Implementation of gsutil cors command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'cors',
        command_name_aliases=['getcors', 'setcors'],
        usage_synopsis=_SYNOPSIS,
        min_args=2,
        max_args=NO_MAX,
        supported_sub_args='',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=1,
        gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments={
            'set': [
                CommandArgument.MakeNFileURLsArgument(1),
                CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()
            ],
            'get': [CommandArgument.MakeNCloudBucketURLsArgument(1)]
        })
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='cors',
        help_name_aliases=['getcors', 'setcors', 'cross-origin'],
        help_type='command_help',
        help_one_line_summary=(
            'Set a CORS JSON document for one or more buckets'),
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={
            'get': _get_help_text,
            'set': _set_help_text
        },
    )

    def _CalculateUrlsStartArg(self):
        if not self.args:
            self.RaiseWrongNumberOfArgumentsException()
        if self.args[0].lower() == 'set':
            return 2
        else:
            return 1

    def _SetCors(self):
        """Sets CORS configuration on a Google Cloud Storage bucket."""
        cors_arg = self.args[0]
        url_args = self.args[1:]
        # Disallow multi-provider 'cors set' requests.
        if not UrlsAreForSingleProvider(url_args):
            raise CommandException(
                '"%s" command spanning providers not allowed.' %
                self.command_name)

        # Open, read and parse file containing JSON document.
        cors_file = open(cors_arg, 'r')
        cors_txt = cors_file.read()
        cors_file.close()

        self.api = self.gsutil_api.GetApiSelector(
            StorageUrlFromString(url_args[0]).scheme)

        # Iterate over URLs, expanding wildcards and setting the CORS on each.
        some_matched = False
        for url_str in url_args:
            bucket_iter = self.GetBucketUrlIterFromArg(url_str,
                                                       bucket_fields=['id'])
            for blr in bucket_iter:
                url = blr.storage_url
                some_matched = True
                self.logger.info('Setting CORS on %s...', blr)
                if url.scheme == 's3':
                    self.gsutil_api.XmlPassThroughSetCors(cors_txt,
                                                          url,
                                                          provider=url.scheme)
                else:
                    cors = CorsTranslation.JsonCorsToMessageEntries(cors_txt)
                    if not cors:
                        cors = REMOVE_CORS_CONFIG
                    bucket_metadata = apitools_messages.Bucket(cors=cors)
                    self.gsutil_api.PatchBucket(url.bucket_name,
                                                bucket_metadata,
                                                provider=url.scheme,
                                                fields=['id'])
        if not some_matched:
            raise CommandException('No URLs matched')
        return 0

    def _GetCors(self):
        """Gets CORS configuration for a Google Cloud Storage bucket."""
        bucket_url, bucket_metadata = self.GetSingleBucketUrlFromArg(
            self.args[0], bucket_fields=['cors'])

        if bucket_url.scheme == 's3':
            sys.stdout.write(
                self.gsutil_api.XmlPassThroughGetCors(
                    bucket_url, provider=bucket_url.scheme))
        else:
            if bucket_metadata.cors:
                sys.stdout.write(
                    CorsTranslation.MessageEntriesToJson(bucket_metadata.cors))
            else:
                sys.stdout.write('%s has no CORS configuration.\n' %
                                 bucket_url)
        return 0

    def RunCommand(self):
        """Command entry point for the cors command."""
        action_subcommand = self.args.pop(0)
        if action_subcommand == 'get':
            func = self._GetCors
        elif action_subcommand == 'set':
            func = self._SetCors
        else:
            raise CommandException(
                ('Invalid subcommand "%s" for the %s command.\n'
                 'See "gsutil help cors".') %
                (action_subcommand, self.command_name))
        return func()
Esempio n. 31
0
class DefStorageClassCommand(Command):
    """Implementation of gsutil defstorageclass command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'defstorageclass',
        usage_synopsis=_SYNOPSIS,
        min_args=2,
        max_args=NO_MAX,
        supported_sub_args='',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=2,
        gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments={
            'set': [
                # FreeTextArgument allows for using storage class abbreviations.
                CommandArgument.MakeFreeTextArgument(),
                CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument(),
            ],
            'get': [
                CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument(),
            ],
        },
    )
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='defstorageclass',
        help_name_aliases=['defaultstorageclass'],
        help_type='command_help',
        help_one_line_summary='Get or set the default storage class on buckets',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={
            'get': _get_help_text,
            'set': _set_help_text,
        },
    )

    def _CheckIsGsUrl(self, url_str):
        if not url_str.startswith('gs://'):
            raise CommandException(
                '"%s" does not support the URL "%s". Did you mean to use a gs:// '
                'URL?' % (self.command_name, url_str))

    def _CalculateUrlsStartArg(self):
        if not self.args:
            self.RaiseWrongNumberOfArgumentsException()
        if self.args[0].lower() == 'set':
            return 2
        else:
            return 1

    def _SetDefStorageClass(self):
        """Sets the default storage class for a bucket."""
        # At this point, "set" has been popped off the front of self.args.
        normalized_storage_class = NormalizeStorageClass(self.args[0])
        url_args = self.args[1:]
        if not url_args:
            self.RaiseWrongNumberOfArgumentsException()

        some_matched = False
        for url_str in url_args:
            self._CheckIsGsUrl(url_str)
            # Throws a CommandException if the argument is not a bucket.
            bucket_iter = self.GetBucketUrlIterFromArg(url_str,
                                                       bucket_fields=['id'])
            for blr in bucket_iter:
                some_matched = True
                bucket_metadata = apitools_messages.Bucket()
                self.logger.info(
                    'Setting default storage class to "%s" for bucket %s' %
                    (normalized_storage_class, blr.url_string.rstrip('/')))
                bucket_metadata.storageClass = normalized_storage_class
                self.gsutil_api.PatchBucket(blr.storage_url.bucket_name,
                                            bucket_metadata,
                                            provider=blr.storage_url.scheme,
                                            fields=['id'])
        if not some_matched:
            raise CommandException(NO_URLS_MATCHED_TARGET % list(url_args))

    def _GetDefStorageClass(self):
        """Gets the default storage class for a bucket."""
        # At this point, "get" has been popped off the front of self.args.
        url_args = self.args
        some_matched = False
        for url_str in url_args:
            self._CheckIsGsUrl(url_str)
            bucket_iter = self.GetBucketUrlIterFromArg(
                url_str, bucket_fields=['storageClass'])
            for blr in bucket_iter:
                some_matched = True
                print(
                    '%s: %s' %
                    (blr.url_string.rstrip('/'), blr.root_object.storageClass))
        if not some_matched:
            raise CommandException(NO_URLS_MATCHED_TARGET % list(url_args))

    def RunCommand(self):
        """Command entry point for the defstorageclass command."""
        action_subcommand = self.args.pop(0)
        subcommand_args = [action_subcommand]
        if action_subcommand == 'get':
            func = self._GetDefStorageClass
        elif action_subcommand == 'set':
            func = self._SetDefStorageClass
            normalized_storage_class = NormalizeStorageClass(self.args[0])
            subcommand_args.append(normalized_storage_class)
        else:
            raise CommandException(
                ('Invalid subcommand "%s" for the %s command.\n'
                 'See "gsutil help %s".') %
                (action_subcommand, self.command_name, self.command_name))
        metrics.LogCommandParams(subcommands=subcommand_args)
        func()
        return 0
Esempio n. 32
0
class KmsCommand(Command):
  """Implements of gsutil kms command."""

  command_spec = Command.CreateCommandSpec(
      'kms',
      usage_synopsis=_SYNOPSIS,
      min_args=1,
      max_args=NO_MAX,
      supported_sub_args='dk:p:w',
      file_url_ok=False,
      provider_url_ok=False,
      urls_start_arg=1,
      gs_api_support=[ApiSelector.JSON],
      gs_default_api=ApiSelector.JSON,
      argparse_arguments={
          'authorize': [],
          'encryption': [CommandArgument.MakeNCloudBucketURLsArgument(1)],
          'serviceaccount': [],
      })
  # Help specification. See help_provider.py for documentation.
  help_spec = Command.HelpSpec(
      help_name='kms',
      help_name_aliases=[],
      help_type='command_help',
      help_one_line_summary='Configure Cloud KMS encryption',
      help_text=_DETAILED_HELP_TEXT,
      subcommand_help_text={
          'authorize': _authorize_help_text,
          'encryption': _encryption_help_text,
          'serviceaccount': _serviceaccount_help_text
      },
  )

  def _GatherSubOptions(self, subcommand_name):
    self.CheckArguments()
    self.clear_kms_key = False
    self.kms_key = None
    self.warn_on_key_authorize_failure = False

    if self.sub_opts:
      for o, a in self.sub_opts:
        if o == '-p':
          self.project_id = a
        elif o == '-k':
          self.kms_key = a
          ValidateCMEK(self.kms_key)
        elif o == '-d':
          self.clear_kms_key = True
        elif o == '-w':
          self.warn_on_key_authorize_failure = True

    if self.warn_on_key_authorize_failure and (
        self.subcommand_name != 'encryption' or not self.kms_key):
      raise CommandException('\n'.join(
          textwrap.wrap(
              'The "-w" option should only be specified for the "encryption" '
              'subcommand and must be used with the "-k" option.')))
    # Determine the project (used in the serviceaccount and authorize
    # subcommands), either from the "-p" option's value or the default specified
    # in the user's Boto config file.
    if not self.project_id:
      self.project_id = PopulateProjectId(None)

  def _AuthorizeProject(self, project_id, kms_key):
    """Authorizes a project's service account to be used with a KMS key.

    Authorizes the Cloud Storage-owned service account for project_id to be used
    with kms_key.

    Args:
      project_id: (str) Project id string (not number).
      kms_key: (str) Fully qualified resource name for the KMS key.

    Returns:
      (str, bool) A 2-tuple consisting of:
      1) The email address for the service account associated with the project,
         which is authorized to encrypt/decrypt with the specified key.
      2) A bool value - True if we had to grant the service account permission
         to encrypt/decrypt with the given key; False if the required permission
         was already present.
    """
    # Request the Cloud Storage-owned service account for project_id, creating
    # it if it does not exist.
    service_account = self.gsutil_api.GetProjectServiceAccount(
        project_id, provider='gs').email_address

    kms_api = KmsApi(logger=self.logger)
    self.logger.debug('Getting IAM policy for %s', kms_key)
    try:
      policy = kms_api.GetKeyIamPolicy(kms_key)
      self.logger.debug('Current policy is %s', policy)

      # Check if the required binding is already present; if not, add it and
      # update the key's IAM policy.
      added_new_binding = False
      binding = Binding(role='roles/cloudkms.cryptoKeyEncrypterDecrypter',
                        members=['serviceAccount:%s' % service_account])
      if binding not in policy.bindings:
        policy.bindings.append(binding)
        kms_api.SetKeyIamPolicy(kms_key, policy)
        added_new_binding = True
      return (service_account, added_new_binding)
    except AccessDeniedException:
      if self.warn_on_key_authorize_failure:
        text_util.print_to_fd('\n'.join(
            textwrap.wrap(
                'Warning: Check that your Cloud Platform project\'s service '
                'account has the "cloudkms.cryptoKeyEncrypterDecrypter" role '
                'for the specified key. Without this role, you may not be '
                'able to encrypt or decrypt objects using the key which will '
                'prevent you from uploading or downloading objects.')))
        return (service_account, False)
      else:
        raise

  def _Authorize(self):
    self._GatherSubOptions('authorize')
    if not self.kms_key:
      raise CommandException('%s %s requires a key to be specified with -k' %
                             (self.command_name, self.subcommand_name))

    _, newly_authorized = self._AuthorizeProject(self.project_id, self.kms_key)
    if newly_authorized:
      print('Authorized project %s to encrypt and decrypt with key:\n%s' %
            (self.project_id, self.kms_key))
    else:
      print('Project %s was already authorized to encrypt and decrypt with '
            'key:\n%s.' % (self.project_id, self.kms_key))
    return 0

  def _EncryptionClearKey(self, bucket_metadata, bucket_url):
    """Clears the defaultKmsKeyName on a Cloud Storage bucket.

    Args:
      bucket_metadata: (apitools_messages.Bucket) Metadata for the given bucket.
      bucket_url: (gslib.storage_url.StorageUrl) StorageUrl of the given bucket.
    """
    bucket_metadata.encryption = apitools_messages.Bucket.EncryptionValue()
    print('Clearing default encryption key for %s...' %
          str(bucket_url).rstrip('/'))
    self.gsutil_api.PatchBucket(bucket_url.bucket_name,
                                bucket_metadata,
                                fields=['encryption'],
                                provider=bucket_url.scheme)

  def _EncryptionSetKey(self, bucket_metadata, bucket_url,
                        svc_acct_for_project_num):
    """Sets defaultKmsKeyName on a Cloud Storage bucket.

    Args:
      bucket_metadata: (apitools_messages.Bucket) Metadata for the given bucket.
      bucket_url: (gslib.storage_url.StorageUrl) StorageUrl of the given bucket.
      svc_acct_for_project_num: (Dict[int, str]) Mapping of project numbers to
          their corresponding service account.
    """
    bucket_project_number = bucket_metadata.projectNumber
    try:
      # newly_authorized will always be False if the project number is in our
      # cache dict, since we've already called _AuthorizeProject on it.
      service_account, newly_authorized = (
          svc_acct_for_project_num[bucket_project_number], False)
    except KeyError:
      service_account, newly_authorized = self._AuthorizeProject(
          bucket_project_number, self.kms_key)
      svc_acct_for_project_num[bucket_project_number] = service_account
    if newly_authorized:
      text_util.print_to_fd('Authorized service account %s to use key:\n%s' %
                            (service_account, self.kms_key))

    bucket_metadata.encryption = apitools_messages.Bucket.EncryptionValue(
        defaultKmsKeyName=self.kms_key)
    print('Setting default KMS key for bucket %s...' %
          str(bucket_url).rstrip('/'))
    self.gsutil_api.PatchBucket(bucket_url.bucket_name,
                                bucket_metadata,
                                fields=['encryption'],
                                provider=bucket_url.scheme)

  def _Encryption(self):
    self._GatherSubOptions('encryption')
    # For each project, we should only make one API call to look up its
    # associated Cloud Storage-owned service account; subsequent lookups can be
    # pulled from this cache dict.
    svc_acct_for_project_num = {}

    def _EncryptionForBucket(blr):
      """Set, clear, or get the defaultKmsKeyName for a bucket."""
      bucket_url = blr.storage_url

      if bucket_url.scheme != 'gs':
        raise CommandException(
            'The %s command can only be used with gs:// bucket URLs.' %
            self.command_name)

      # Determine the project from the provided bucket.
      bucket_metadata = self.gsutil_api.GetBucket(
          bucket_url.bucket_name,
          fields=['encryption', 'projectNumber'],
          provider=bucket_url.scheme)

      # "-d" flag was specified, so clear the default KMS key and return.
      if self.clear_kms_key:
        self._EncryptionClearKey(bucket_metadata, bucket_url)
        return 0
      # "-k" flag was specified, so set the default KMS key and return.
      if self.kms_key:
        self._EncryptionSetKey(bucket_metadata, bucket_url,
                               svc_acct_for_project_num)
        return 0
      # Neither "-d" nor "-k" was specified, so emit the default KMS key and
      # return.
      bucket_url_string = str(bucket_url).rstrip('/')
      if (bucket_metadata.encryption and
          bucket_metadata.encryption.defaultKmsKeyName):
        print('Default encryption key for %s:\n%s' %
              (bucket_url_string, bucket_metadata.encryption.defaultKmsKeyName))
      else:
        print('Bucket %s has no default encryption key' % bucket_url_string)
      return 0

    # Iterate over bucket args, performing the specified encryption operation
    # for each.
    some_matched = False
    url_args = self.args
    if not url_args:
      self.RaiseWrongNumberOfArgumentsException()
    for url_str in url_args:
      # Throws a CommandException if the argument is not a bucket.
      bucket_iter = self.GetBucketUrlIterFromArg(url_str)
      for bucket_listing_ref in bucket_iter:
        some_matched = True
        _EncryptionForBucket(bucket_listing_ref)

    if not some_matched:
      raise CommandException(NO_URLS_MATCHED_TARGET % list(url_args))
    return 0

  def _ServiceAccount(self):
    self.CheckArguments()
    if not self.args:
      self.args = ['gs://']
    if self.sub_opts:
      for o, a in self.sub_opts:
        if o == '-p':
          self.project_id = a

    if not self.project_id:
      self.project_id = PopulateProjectId(None)

    # Request the service account for that project; this might create the
    # service account if it doesn't already exist.
    self.logger.debug('Checking service account for project %s',
                      self.project_id)

    service_account = self.gsutil_api.GetProjectServiceAccount(
        self.project_id, provider='gs').email_address

    print(service_account)

    return 0

  def _RunSubCommand(self, func):
    try:
      self.sub_opts, self.args = getopt.getopt(
          self.args, self.command_spec.supported_sub_args)
      # Commands with both suboptions and subcommands need to reparse for
      # suboptions, so we log again.
      metrics.LogCommandParams(sub_opts=self.sub_opts)
      return func(self)
    except getopt.GetoptError:
      self.RaiseInvalidArgumentException()

  def RunCommand(self):
    """Command entry point for the kms command."""
    # If the only credential type the user supplies in their boto file is hmac,
    # GetApiSelector logic will force us to use the XML API. As the XML API does
    # not support all the operations needed for kms subcommands, fail early.
    if self.gsutil_api.GetApiSelector(provider='gs') != ApiSelector.JSON:
      raise CommandException('\n'.join(
          textwrap.wrap(
              'The "%s" command can only be used with the GCS JSON API. If you '
              'have only supplied hmac credentials in your boto file, please '
              'instead supply a credential type that can be used with the JSON '
              'API.' % self.command_name)))

  def RunCommand(self):
    """Command entry point for the kms command."""
    # If the only credential type the user supplies in their boto file is hmac,
    # GetApiSelector logic will force us to use the XML API. As the XML API does
    # not support all the operations needed for kms subcommands, fail early.
    if self.gsutil_api.GetApiSelector(provider='gs') != ApiSelector.JSON:
      raise CommandException('\n'.join(
          textwrap.wrap(
              'The "%s" command can only be used with the GCS JSON API, which '
              'cannot use HMAC credentials. Please supply a credential '
              'type that is compatible with the JSON API (e.g. OAuth2) in your '
              'boto config file.' % self.command_name)))

    method_for_subcommand = {
        'authorize': KmsCommand._Authorize,
        'encryption': KmsCommand._Encryption,
        'serviceaccount': KmsCommand._ServiceAccount
    }
    self.subcommand_name = self.args.pop(0)
    if self.subcommand_name in method_for_subcommand:
      metrics.LogCommandParams(subcommands=[self.subcommand_name])
      return self._RunSubCommand(method_for_subcommand[self.subcommand_name])
    else:
      raise CommandException('Invalid subcommand "%s" for the %s command.' %
                             (self.subcommand_name, self.command_name))
Esempio n. 33
0
class NotificationCommand(Command):
    """Implementation of gsutil notification command."""

    # Notification names might look like one of these:
    #  canonical form:  projects/_/buckets/bucket/notificationConfigs/3
    #  JSON API form:   b/bucket/notificationConfigs/5
    # Either of the above might start with a / if a user is copying & pasting.
    def _GetNotificationPathRegex(self):
        if not NotificationCommand._notification_path_regex:
            NotificationCommand._notification_path_regex = re.compile(
                ('/?(projects/[^/]+/)?b(uckets)?/(?P<bucket>[^/]+)/'
                 'notificationConfigs/(?P<notification>[0-9]+)'))
        return NotificationCommand._notification_path_regex

    _notification_path_regex = None

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'notification',
        command_name_aliases=[
            'notify', 'notifyconfig', 'notifications', 'notif'
        ],
        usage_synopsis=_SYNOPSIS,
        min_args=2,
        max_args=NO_MAX,
        supported_sub_args='i:t:m:t:o:f:e:p:s',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=1,
        gs_api_support=[ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments={
            'watchbucket': [
                CommandArgument.MakeFreeTextArgument(),
                CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()
            ],
            'stopchannel': [],
            'list': [CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()],
            'delete': [
                # Takes a list of one of the following:
                #   notification: projects/_/buckets/bla/notificationConfigs/5,
                #   bucket: gs://foobar
                CommandArgument.MakeZeroOrMoreCloudURLsArgument()
            ],
            'create': [
                CommandArgument.MakeFreeTextArgument(),  # Cloud Pub/Sub topic
                CommandArgument.MakeNCloudBucketURLsArgument(1)
            ]
        })
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='notification',
        help_name_aliases=['watchbucket', 'stopchannel', 'notifyconfig'],
        help_type='command_help',
        help_one_line_summary='Configure object change notification',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={
            'create': _create_help_text,
            'list': _list_help_text,
            'delete': _delete_help_text,
            'watchbucket': _watchbucket_help_text,
            'stopchannel': _stopchannel_help_text
        },
    )

    def _WatchBucket(self):
        """Creates a watch on a bucket given in self.args."""
        self.CheckArguments()
        identifier = None
        client_token = None
        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-i':
                    identifier = a
                if o == '-t':
                    client_token = a

        identifier = identifier or str(uuid.uuid4())
        watch_url = self.args[0]
        bucket_arg = self.args[-1]

        if not watch_url.lower().startswith('https://'):
            raise CommandException(
                'The application URL must be an https:// URL.')

        bucket_url = StorageUrlFromString(bucket_arg)
        if not (bucket_url.IsBucket() and bucket_url.scheme == 'gs'):
            raise CommandException(
                'The %s command can only be used with gs:// bucket URLs.' %
                self.command_name)
        if not bucket_url.IsBucket():
            raise CommandException(
                'URL must name a bucket for the %s command.' %
                self.command_name)

        self.logger.info('Watching bucket %s with application URL %s ...',
                         bucket_url, watch_url)

        try:
            channel = self.gsutil_api.WatchBucket(bucket_url.bucket_name,
                                                  watch_url,
                                                  identifier,
                                                  token=client_token,
                                                  provider=bucket_url.scheme)
        except AccessDeniedException, e:
            self.logger.warn(
                NOTIFICATION_AUTHORIZATION_FAILED_MESSAGE.format(
                    watch_error=str(e), watch_url=watch_url))
            raise

        channel_id = channel.id
        resource_id = channel.resourceId
        client_token = channel.token
        self.logger.info('Successfully created watch notification channel.')
        self.logger.info('Watch channel identifier: %s', channel_id)
        self.logger.info('Canonicalized resource identifier: %s', resource_id)
        self.logger.info('Client state token: %s', client_token)

        return 0
Esempio n. 34
0
class LsCommand(Command):
    """Implementation of gsutil ls command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'ls',
        command_name_aliases=['dir', 'list'],
        usage_synopsis=_SYNOPSIS,
        min_args=0,
        max_args=NO_MAX,
        supported_sub_args='aeblLhp:rR',
        file_url_ok=False,
        provider_url_ok=True,
        urls_start_arg=0,
        gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments=[CommandArgument.MakeZeroOrMoreCloudURLsArgument()])
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='ls',
        help_name_aliases=['dir', 'list'],
        help_type='command_help',
        help_one_line_summary='List providers, buckets, or objects',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={},
    )

    def _PrintBucketInfo(self, bucket_blr, listing_style):
        """Print listing info for given bucket.

    Args:
      bucket_blr: BucketListingReference for the bucket being listed
      listing_style: ListingStyle enum describing type of output desired.

    Returns:
      Tuple (total objects, total bytes) in the bucket.
    """
        if (listing_style == ListingStyle.SHORT
                or listing_style == ListingStyle.LONG):
            print bucket_blr
            return
        # listing_style == ListingStyle.LONG_LONG:
        # We're guaranteed by the caller that the root object is populated.
        bucket = bucket_blr.root_object
        location_constraint = bucket.location
        storage_class = bucket.storageClass
        fields = {
            'bucket': bucket_blr.url_string,
            'storage_class': storage_class,
            'location_constraint': location_constraint,
            'acl': AclTranslation.JsonFromMessage(bucket.acl),
            'default_acl':
            AclTranslation.JsonFromMessage(bucket.defaultObjectAcl)
        }

        fields['versioning'] = bucket.versioning and bucket.versioning.enabled
        fields['website_config'] = 'Present' if bucket.website else 'None'
        fields['logging_config'] = 'Present' if bucket.logging else 'None'
        fields['cors_config'] = 'Present' if bucket.cors else 'None'
        fields['lifecycle_config'] = 'Present' if bucket.lifecycle else 'None'

        # For field values that are multiline, add indenting to make it look
        # prettier.
        for key in fields:
            previous_value = fields[key]
            if (not isinstance(previous_value, basestring)
                    or '\n' not in previous_value):
                continue
            new_value = previous_value.replace('\n', '\n\t  ')
            # Start multiline values on a new line if they aren't already.
            if not new_value.startswith('\n'):
                new_value = '\n\t  ' + new_value
            fields[key] = new_value

        print(
            '{bucket} :\n'
            '\tStorage class:\t\t\t{storage_class}\n'
            '\tLocation constraint:\t\t{location_constraint}\n'
            '\tVersioning enabled:\t\t{versioning}\n'
            '\tLogging configuration:\t\t{logging_config}\n'
            '\tWebsite configuration:\t\t{website_config}\n'
            '\tCORS configuration: \t\t{cors_config}\n'
            '\tLifecycle configuration:\t{lifecycle_config}\n'
            '\tACL:\t\t\t\t{acl}\n'
            '\tDefault ACL:\t\t\t{default_acl}'.format(**fields))
        if bucket_blr.storage_url.scheme == 's3':
            print(
                'Note: this is an S3 bucket so configuration values may be '
                'blank. To retrieve bucket configuration values, use '
                'individual configuration commands such as gsutil acl get '
                '<bucket>.')

    def _PrintLongListing(self, bucket_listing_ref):
        """Prints an object with ListingStyle.LONG."""
        obj = bucket_listing_ref.root_object
        url_str = bucket_listing_ref.url_string
        if (obj.metadata and S3_DELETE_MARKER_GUID
                in obj.metadata.additionalProperties):
            size_string = '0'
            num_bytes = 0
            num_objs = 0
            url_str += '<DeleteMarker>'
        else:
            size_string = (MakeHumanReadable(obj.size)
                           if self.human_readable else str(obj.size))
            num_bytes = obj.size
            num_objs = 1

        timestamp = JSON_TIMESTAMP_RE.sub(
            r'\1T\2Z',
            str(obj.updated).decode(UTF8).encode('ascii'))
        printstr = '%(size)10s  %(timestamp)s  %(url)s'
        encoded_etag = None
        encoded_metagen = None
        if self.all_versions:
            printstr += '  metageneration=%(metageneration)s'
            encoded_metagen = str(obj.metageneration).encode(UTF8)
        if self.include_etag:
            printstr += '  etag=%(etag)s'
            encoded_etag = obj.etag.encode(UTF8)
        format_args = {
            'size': size_string,
            'timestamp': timestamp,
            'url': url_str.encode(UTF8),
            'metageneration': encoded_metagen,
            'etag': encoded_etag
        }
        print printstr % format_args
        return (num_objs, num_bytes)

    def RunCommand(self):
        """Command entry point for the ls command."""
        got_nomatch_errors = False
        got_bucket_nomatch_errors = False
        listing_style = ListingStyle.SHORT
        get_bucket_info = False
        self.recursion_requested = False
        self.all_versions = False
        self.include_etag = False
        self.human_readable = False
        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-a':
                    self.all_versions = True
                elif o == '-e':
                    self.include_etag = True
                elif o == '-b':
                    get_bucket_info = True
                elif o == '-h':
                    self.human_readable = True
                elif o == '-l':
                    listing_style = ListingStyle.LONG
                elif o == '-L':
                    listing_style = ListingStyle.LONG_LONG
                elif o == '-p':
                    self.project_id = a
                elif o == '-r' or o == '-R':
                    self.recursion_requested = True

        if not self.args:
            # default to listing all gs buckets
            self.args = ['gs://']

        total_objs = 0
        total_bytes = 0

        def MaybePrintBucketHeader(blr):
            if len(self.args) > 1:
                print '%s:' % blr.url_string.encode(UTF8)

        print_bucket_header = MaybePrintBucketHeader

        for url_str in self.args:
            storage_url = StorageUrlFromString(url_str)
            if storage_url.IsFileUrl():
                raise CommandException('Only cloud URLs are supported for %s' %
                                       self.command_name)
            bucket_fields = None
            if (listing_style == ListingStyle.SHORT
                    or listing_style == ListingStyle.LONG):
                bucket_fields = ['id']
            elif listing_style == ListingStyle.LONG_LONG:
                bucket_fields = [
                    'location', 'storageClass', 'versioning', 'acl',
                    'defaultObjectAcl', 'website', 'logging', 'cors',
                    'lifecycle'
                ]
            if storage_url.IsProvider():
                # Provider URL: use bucket wildcard to list buckets.
                for blr in self.WildcardIterator(
                        '%s://*' % storage_url.scheme).IterBuckets(
                            bucket_fields=bucket_fields):
                    self._PrintBucketInfo(blr, listing_style)
            elif storage_url.IsBucket() and get_bucket_info:
                # ls -b bucket listing request: List info about bucket(s).
                total_buckets = 0
                for blr in self.WildcardIterator(url_str).IterBuckets(
                        bucket_fields=bucket_fields):
                    if not ContainsWildcard(url_str) and not blr.root_object:
                        # Iterator does not make an HTTP call for non-wildcarded
                        # listings with fields=='id'. Ensure the bucket exists by calling
                        # GetBucket.
                        self.gsutil_api.GetBucket(blr.storage_url.bucket_name,
                                                  fields=['id'],
                                                  provider=storage_url.scheme)
                    self._PrintBucketInfo(blr, listing_style)
                    total_buckets += 1
                if not ContainsWildcard(url_str) and not total_buckets:
                    got_bucket_nomatch_errors = True
            else:
                # URL names a bucket, object, or object subdir ->
                # list matching object(s) / subdirs.
                def _PrintPrefixLong(blr):
                    print '%-33s%s' % ('', blr.url_string.encode(UTF8))

                if listing_style == ListingStyle.SHORT:
                    # ls helper by default readies us for a short listing.
                    ls_helper = LsHelper(
                        self.WildcardIterator,
                        self.logger,
                        all_versions=self.all_versions,
                        print_bucket_header_func=print_bucket_header,
                        should_recurse=self.recursion_requested)
                elif listing_style == ListingStyle.LONG:
                    bucket_listing_fields = ['name', 'updated', 'size']
                    if self.all_versions:
                        bucket_listing_fields.extend(
                            ['generation', 'metageneration'])
                    if self.include_etag:
                        bucket_listing_fields.append('etag')

                    ls_helper = LsHelper(
                        self.WildcardIterator,
                        self.logger,
                        print_object_func=self._PrintLongListing,
                        print_dir_func=_PrintPrefixLong,
                        print_bucket_header_func=print_bucket_header,
                        all_versions=self.all_versions,
                        should_recurse=self.recursion_requested,
                        fields=bucket_listing_fields)

                elif listing_style == ListingStyle.LONG_LONG:
                    # List all fields
                    bucket_listing_fields = None
                    ls_helper = LsHelper(
                        self.WildcardIterator,
                        self.logger,
                        print_object_func=PrintFullInfoAboutObject,
                        print_dir_func=_PrintPrefixLong,
                        print_bucket_header_func=print_bucket_header,
                        all_versions=self.all_versions,
                        should_recurse=self.recursion_requested,
                        fields=bucket_listing_fields)
                else:
                    raise CommandException('Unknown listing style: %s' %
                                           listing_style)

                exp_dirs, exp_objs, exp_bytes = ls_helper.ExpandUrlAndPrint(
                    storage_url)
                if storage_url.IsObject() and exp_objs == 0 and exp_dirs == 0:
                    got_nomatch_errors = True
                total_bytes += exp_bytes
                total_objs += exp_objs

        if total_objs and listing_style != ListingStyle.SHORT:
            print('TOTAL: %d objects, %d bytes (%s)' %
                  (total_objs, total_bytes,
                   MakeHumanReadable(float(total_bytes))))
        if got_nomatch_errors:
            raise CommandException('One or more URLs matched no objects.')
        if got_bucket_nomatch_errors:
            raise NotFoundException(
                'One or more bucket URLs matched no buckets.')

        return 0
Esempio n. 35
0
class StatCommand(Command):
    """Implementation of gsutil stat command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'stat',
        command_name_aliases=[],
        usage_synopsis=_SYNOPSIS,
        min_args=1,
        max_args=NO_MAX,
        supported_sub_args='',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=0,
        gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments=[CommandArgument.MakeZeroOrMoreCloudURLsArgument()])
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='stat',
        help_name_aliases=[],
        help_type='command_help',
        help_one_line_summary='Display object status',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={},
    )

    def RunCommand(self):
        """Command entry point for stat command."""
        # List of fields we'll print for stat objects.
        stat_fields = [
            'updated', 'cacheControl', 'contentDisposition', 'contentEncoding',
            'contentLanguage', 'size', 'contentType', 'componentCount',
            'metadata', 'crc32c', 'md5Hash', 'etag', 'generation',
            'metageneration'
        ]
        found_nonmatching_arg = False
        for url_str in self.args:
            arg_matches = 0
            url = StorageUrlFromString(url_str)
            if not url.IsObject():
                raise CommandException(
                    'The stat command only works with object URLs')
            try:
                if ContainsWildcard(url_str):
                    blr_iter = self.WildcardIterator(url_str).IterObjects(
                        bucket_listing_fields=stat_fields)
                else:
                    single_obj = self.gsutil_api.GetObjectMetadata(
                        url.bucket_name,
                        url.object_name,
                        generation=url.generation,
                        provider=url.scheme,
                        fields=stat_fields)
                    blr_iter = [
                        BucketListingObject(url, root_object=single_obj)
                    ]
                for blr in blr_iter:
                    if blr.IsObject():
                        arg_matches += 1
                        if logging.getLogger().isEnabledFor(logging.INFO):
                            PrintFullInfoAboutObject(blr, incl_acl=False)
            except AccessDeniedException:
                print 'You aren\'t authorized to read %s - skipping' % url_str
            except InvalidUrlError:
                raise
            except NotFoundException:
                pass
            if not arg_matches:
                if logging.getLogger().isEnabledFor(logging.INFO):
                    print 'No URLs matched %s' % url_str
                found_nonmatching_arg = True
        if found_nonmatching_arg:
            return 1
        return 0