class FakeCommandWithCompleters(Command):
  """Command with various completer types."""

  command_spec = Command.CreateCommandSpec(
      'fake2',
      argparse_arguments=[
          CommandArgument.MakeZeroOrMoreCloudURLsArgument(),
          CommandArgument.MakeZeroOrMoreFileURLsArgument(),
          CommandArgument.MakeZeroOrMoreCloudOrFileURLsArgument(),
          CommandArgument.MakeFreeTextArgument(),
          CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument(),
          CommandArgument.MakeFileURLOrCannedACLArgument(),
      ]
  )

  help_spec = Command.HelpSpec(
      help_name='fake2',
      help_name_aliases=[],
      help_type='command_help',
      help_one_line_summary='fake command for tests',
      help_text='fake command for tests',
      subcommand_help_text={}
  )

  def __init__(self):
    pass
Пример #2
0
class CatCommand(Command):
    """Implementation of gsutil cat command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'cat',
        command_name_aliases=[],
        usage_synopsis=_SYNOPSIS,
        min_args=1,
        max_args=NO_MAX,
        supported_sub_args='hr:',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=0,
        gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments=[CommandArgument.MakeZeroOrMoreCloudURLsArgument()])
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='cat',
        help_name_aliases=[],
        help_type='command_help',
        help_one_line_summary='Concatenate object content to stdout',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={},
    )

    # Command entry point.
    def RunCommand(self):
        """Command entry point for the cat command."""
        show_header = False
        request_range = None
        start_byte = 0
        end_byte = None
        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-h':
                    show_header = True
                elif o == '-r':
                    request_range = a.strip()
                    range_matcher = re.compile(
                        '^(?P<start>[0-9]+)-(?P<end>[0-9]*)$|^(?P<endslice>-[0-9]+)$'
                    )
                    range_match = range_matcher.match(request_range)
                    if not range_match:
                        raise CommandException('Invalid range (%s)' %
                                               request_range)
                    if range_match.group('start'):
                        start_byte = long(range_match.group('start'))
                    if range_match.group('end'):
                        end_byte = long(range_match.group('end'))
                    if range_match.group('endslice'):
                        start_byte = long(range_match.group('endslice'))
                else:
                    self.RaiseInvalidArgumentException()

        return CatHelper(self).CatUrlStrings(self.args,
                                             show_header=show_header,
                                             start_byte=start_byte,
                                             end_byte=end_byte)
class FakeCommand(Command):
  """Fake command class for overriding command instance state."""
  command_spec = Command.CreateCommandSpec(
      'fake',
      command_name_aliases=[],
  )
  # Help specification. See help_provider.py for documentation.
  help_spec = Command.HelpSpec(
      help_name='fake',
      help_name_aliases=[],
      help_type='command_help',
      help_one_line_summary='Something to take up space.',
      help_text='Something else to take up space.',
      subcommand_help_text={},
  )

  def __init__(self, do_parallel):
    self.bucket_storage_uri_class = BucketStorageUri
    support_map = {
        'gs': ['JSON'],
        's3': ['XML']
    }
    default_map = {
        'gs': 'JSON',
        's3': 'XML'
    }
    self.gsutil_api_map = cs_api_map.GsutilApiMapFactory.GetApiMap(
        cs_api_map.GsutilApiClassMapFactory, support_map, default_map)
    self.logger = CreateGsutilLogger('FakeCommand')
    self.parallel_operations = do_parallel
    self.failure_count = 0
    self.multiprocessing_is_available = (
        CheckMultiprocessingAvailableAndInit().is_available)
    self.debug = 0
Пример #4
0
class MvCommand(Command):
    """Implementation of gsutil mv command.

     Note that there is no atomic rename operation - this command is simply
     a shorthand for 'cp' followed by 'rm'.
  """

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'mv',
        command_name_aliases=['move', 'ren', 'rename'],
        min_args=2,
        max_args=NO_MAX,
        # Flags for mv are passed through to cp.
        supported_sub_args=CP_SUB_ARGS,
        file_url_ok=True,
        provider_url_ok=False,
        urls_start_arg=0,
        gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
    )
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='mv',
        help_name_aliases=['move', 'rename'],
        help_type='command_help',
        help_one_line_summary='Move/rename objects and/or subdirectories',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={},
    )

    def RunCommand(self):
        """Command entry point for the mv command."""
        # Check each source arg up, refusing to delete a bucket src URL (force users
        # to explicitly do that as a separate operation).
        for arg_to_check in self.args[0:-1]:
            url = StorageUrlFromString(arg_to_check)
            if url.IsCloudUrl() and (url.IsBucket() or url.IsProvider()):
                raise CommandException(
                    'You cannot move a source bucket using the mv '
                    'command. If you meant to move\nall objects in '
                    'the bucket, you can use a command like:\n'
                    '\tgsutil mv %s/* %s' % (arg_to_check, self.args[-1]))

        # Insert command-line opts in front of args so they'll be picked up by cp
        # and rm commands (e.g., for -p option). Use undocumented (internal
        # use-only) cp -M option, which causes each original object to be deleted
        # after successfully copying to its destination, and also causes naming
        # behavior consistent with Unix mv naming behavior (see comments in
        # ConstructDstUrl).
        unparsed_args = ['-M']
        if self.recursion_requested:
            unparsed_args.append('-R')
        unparsed_args.extend(self.unparsed_args)
        self.command_runner.RunNamedCommand('cp', unparsed_args, self.headers,
                                            self.debug,
                                            self.parallel_operations)

        return 0
Пример #5
0
class AclCommand(Command):
    """Implementation of gsutil acl command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'acl',
        command_name_aliases=['getacl', 'setacl', 'chacl'],
        min_args=2,
        max_args=NO_MAX,
        supported_sub_args='afRrg:u:d:',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=1,
        gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
    )
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='acl',
        help_name_aliases=['getacl', 'setacl', 'chmod', 'chacl'],
        help_type='command_help',
        help_one_line_summary='Get, set, or change bucket and/or object ACLs',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={
            'get': _get_help_text,
            'set': _set_help_text,
            'ch': _ch_help_text
        },
    )

    def _CalculateUrlsStartArg(self):
        if not self.args:
            self._RaiseWrongNumberOfArgumentsException()
        if (self.args[0].lower() == 'set') or (self.command_alias_used
                                               == 'setacl'):
            return 1
        else:
            return 0

    def _SetAcl(self):
        """Parses options and sets ACLs on the specified buckets/objects."""
        self.continue_on_error = False
        if self.sub_opts:
            for o, unused_a in self.sub_opts:
                if o == '-a':
                    self.all_versions = True
                elif o == '-f':
                    self.continue_on_error = True
                elif o == '-r' or o == '-R':
                    self.recursion_requested = True
        try:
            self.SetAclCommandHelper(SetAclFuncWrapper, SetAclExceptionHandler)
        except AccessDeniedException, unused_e:
            self._WarnServiceAccounts()
            raise
        if not self.everything_set_okay:
            raise CommandException('ACLs for some objects could not be set.')
Пример #6
0
class ConfigCommand(Command):
    """Implementation of gsutil config command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'config',
        command_name_aliases=['cfg', 'conf', 'configure'],
        usage_synopsis=_SYNOPSIS,
        min_args=0,
        max_args=0,
        supported_sub_args='habefwrs:o:',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=0,
    )
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='config',
        help_name_aliases=['cfg', 'conf', 'configure', 'proxy', 'aws', 's3'],
        help_type='command_help',
        help_one_line_summary=(
            'Obtain credentials and create configuration file'),
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={},
    )

    def _OpenConfigFile(self, file_path):
        """Creates and opens a configuration file for writing.

    The file is created with mode 0600, and attempts to open existing files will
    fail (the latter is important to prevent symlink attacks).

    It is the caller's responsibility to close the file.

    Args:
      file_path: Path of the file to be created.

    Returns:
      A writable file object for the opened file.

    Raises:
      CommandException: if an error occurred when opening the file (including
          when the file already exists).
    """
        flags = os.O_RDWR | os.O_CREAT | os.O_EXCL
        # Accommodate Windows; copied from python2.6/tempfile.py.
        if hasattr(os, 'O_NOINHERIT'):
            flags |= os.O_NOINHERIT
        try:
            fd = os.open(file_path, flags, 0600)
        except (OSError, IOError), e:
            raise CommandException('Failed to open %s for writing: %s' %
                                   (file_path, e))
        return os.fdopen(fd, 'w')
Пример #7
0
class FakeCommandWithInvalidCompleter(Command):
  """Command with an invalid completer on an argument."""

  command_spec = Command.CreateCommandSpec(
      'fake1', argparse_arguments=[CommandArgument('arg', completer='BAD')])

  help_spec = Command.HelpSpec(help_name='fake1',
                               help_name_aliases=[],
                               help_type='command_help',
                               help_one_line_summary='fake command for tests',
                               help_text='fake command for tests',
                               subcommand_help_text={})

  def __init__(self):
    pass
Пример #8
0
class MbCommand(Command):
  """Implementation of gsutil mb command."""

  # Command specification. See base class for documentation.
  command_spec = Command.CreateCommandSpec(
      'mb',
      command_name_aliases=['makebucket', 'createbucket', 'md', 'mkdir'],
      usage_synopsis=_SYNOPSIS,
      min_args=1,
      max_args=NO_MAX,
      supported_sub_args='c:l:p:s:',
      file_url_ok=False,
      provider_url_ok=False,
      urls_start_arg=0,
      gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
      gs_default_api=ApiSelector.JSON,
      argparse_arguments=[
          CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()
      ]
  )
  # Help specification. See help_provider.py for documentation.
  help_spec = Command.HelpSpec(
      help_name='mb',
      help_name_aliases=[
          'createbucket', 'makebucket', 'md', 'mkdir', 'location', 'dra',
          'dras', 'reduced_availability', 'durable_reduced_availability', 'rr',
          'reduced_redundancy', 'standard', 'storage class', 'nearline', 'nl'],
      help_type='command_help',
      help_one_line_summary='Make buckets',
      help_text=_DETAILED_HELP_TEXT,
      subcommand_help_text={},
  )

  def RunCommand(self):
    """Command entry point for the mb command."""
    location = None
    storage_class = None
    if self.sub_opts:
      for o, a in self.sub_opts:
        if o == '-l':
          location = a
        elif o == '-p':
          # Project IDs are sent as header values when using gs and s3 XML APIs.
          InsistAscii(a, 'Invalid non-ASCII character found in project ID')
          self.project_id = a
        elif o == '-c' or o == '-s':
          storage_class = NormalizeStorageClass(a)

    bucket_metadata = apitools_messages.Bucket(location=location,
                                               storageClass=storage_class)

    for bucket_url_str in self.args:
      bucket_url = StorageUrlFromString(bucket_url_str)
      if not bucket_url.IsBucket():
        raise CommandException('The mb command requires a URL that specifies a '
                               'bucket.\n"%s" is not valid.' % bucket_url)
      if (not BUCKET_NAME_RE.match(bucket_url.bucket_name) or
          TOO_LONG_DNS_NAME_COMP.search(bucket_url.bucket_name)):
        raise InvalidUrlError(
            'Invalid bucket name in URL "%s"' % bucket_url.bucket_name)

      self.logger.info('Creating %s...', bucket_url)
      # Pass storage_class param only if this is a GCS bucket. (In S3 the
      # storage class is specified on the key object.)
      try:
        self.gsutil_api.CreateBucket(
            bucket_url.bucket_name, project_id=self.project_id,
            metadata=bucket_metadata, provider=bucket_url.scheme)
      except BadRequestException as e:
        if (e.status == 400 and e.reason == 'DotfulBucketNameNotUnderTld' and
            bucket_url.scheme == 'gs'):
          bucket_name = bucket_url.bucket_name
          final_comp = bucket_name[bucket_name.rfind('.')+1:]
          raise CommandException('\n'.join(textwrap.wrap(
              'Buckets with "." in the name must be valid DNS names. The bucket'
              ' you are attempting to create (%s) is not a valid DNS name,'
              ' because the final component (%s) is not currently a valid part'
              ' of the top-level DNS tree.' % (bucket_name, final_comp))))
        else:
          raise

    return 0
class LoggingCommand(Command):
  """Implementation of gsutil logging command."""

  # Command specification. See base class for documentation.
  command_spec = Command.CreateCommandSpec(
      'logging',
      command_name_aliases=['disablelogging', 'enablelogging', 'getlogging'],
      usage_synopsis=_SYNOPSIS,
      min_args=2,
      max_args=NO_MAX,
      supported_sub_args='b:o:',
      file_url_ok=False,
      provider_url_ok=False,
      urls_start_arg=0,
      gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
      gs_default_api=ApiSelector.JSON,
      argparse_arguments=[
          CommandArgument('mode', choices=['on', 'off']),
          CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()
      ]
  )
  # Help specification. See help_provider.py for documentation.
  help_spec = Command.HelpSpec(
      help_name='logging',
      help_name_aliases=['loggingconfig', 'logs', 'log', 'getlogging',
                         'enablelogging', 'disablelogging'],
      help_type='command_help',
      help_one_line_summary='Configure or retrieve logging on buckets',
      help_text=_DETAILED_HELP_TEXT,
      subcommand_help_text={'get': _get_help_text, 'set': _set_help_text},
  )

  def _Get(self):
    """Gets logging configuration for a bucket."""
    bucket_url, bucket_metadata = self.GetSingleBucketUrlFromArg(
        self.args[0], bucket_fields=['logging'])

    if bucket_url.scheme == 's3':
      sys.stdout.write(self.gsutil_api.XmlPassThroughGetLogging(
          bucket_url, provider=bucket_url.scheme))
    else:
      if (bucket_metadata.logging and bucket_metadata.logging.logBucket and
          bucket_metadata.logging.logObjectPrefix):
        sys.stdout.write(str(encoding.MessageToJson(
            bucket_metadata.logging)) + '\n')
      else:
        sys.stdout.write('%s has no logging configuration.\n' % bucket_url)
    return 0

  def _Enable(self):
    """Enables logging configuration for a bucket."""
    # Disallow multi-provider 'logging set on' calls, because the schemas
    # differ.
    if not UrlsAreForSingleProvider(self.args):
      raise CommandException('"logging set on" command spanning providers not '
                             'allowed.')
    target_bucket_url = None
    target_prefix = None
    for opt, opt_arg in self.sub_opts:
      if opt == '-b':
        target_bucket_url = StorageUrlFromString(opt_arg)
      if opt == '-o':
        target_prefix = opt_arg

    if not target_bucket_url:
      raise CommandException('"logging set on" requires \'-b <log_bucket>\' '
                             'option')
    if not target_bucket_url.IsBucket():
      raise CommandException('-b option must specify a bucket URL.')

    # Iterate over URLs, expanding wildcards and setting logging on each.
    some_matched = False
    for url_str in self.args:
      bucket_iter = self.GetBucketUrlIterFromArg(url_str, bucket_fields=['id'])
      for blr in bucket_iter:
        url = blr.storage_url
        some_matched = True
        self.logger.info('Enabling logging on %s...', blr)
        logging = apitools_messages.Bucket.LoggingValue(
            logBucket=target_bucket_url.bucket_name,
            logObjectPrefix=target_prefix or url.bucket_name)

        bucket_metadata = apitools_messages.Bucket(logging=logging)
        self.gsutil_api.PatchBucket(url.bucket_name, bucket_metadata,
                                    provider=url.scheme, fields=['id'])
    if not some_matched:
      raise CommandException(NO_URLS_MATCHED_TARGET % list(self.args))
    return 0

  def _Disable(self):
    """Disables logging configuration for a bucket."""
    # Iterate over URLs, expanding wildcards, and disabling logging on each.
    some_matched = False
    for url_str in self.args:
      bucket_iter = self.GetBucketUrlIterFromArg(url_str, bucket_fields=['id'])
      for blr in bucket_iter:
        url = blr.storage_url
        some_matched = True
        self.logger.info('Disabling logging on %s...', blr)
        logging = apitools_messages.Bucket.LoggingValue()

        bucket_metadata = apitools_messages.Bucket(logging=logging)
        self.gsutil_api.PatchBucket(url.bucket_name, bucket_metadata,
                                    provider=url.scheme, fields=['id'])
    if not some_matched:
      raise CommandException(NO_URLS_MATCHED_TARGET % list(self.args))
    return 0

  def RunCommand(self):
    """Command entry point for the logging command."""
    # Parse the subcommand and alias for the new logging command.
    action_subcommand = self.args.pop(0)
    if action_subcommand == 'get':
      func = self._Get
    elif action_subcommand == 'set':
      state_subcommand = self.args.pop(0)
      if not self.args:
        self.RaiseWrongNumberOfArgumentsException()
      if state_subcommand == 'on':
        func = self._Enable
      elif state_subcommand == 'off':
        func = self._Disable
      else:
        raise CommandException((
            'Invalid subcommand "%s" for the "%s %s" command.\n'
            'See "gsutil help logging".') % (
                state_subcommand, self.command_name, action_subcommand))
    else:
      raise CommandException(('Invalid subcommand "%s" for the %s command.\n'
                              'See "gsutil help logging".') %
                             (action_subcommand, self.command_name))
    self.ParseSubOpts(check_args=True)
    func()
    return 0
Пример #10
0
class VersionCommand(Command):
    """Implementation of gsutil version command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'version',
        command_name_aliases=['ver'],
        usage_synopsis=_SYNOPSIS,
        min_args=0,
        max_args=0,
        supported_sub_args='l',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=0,
    )
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='version',
        help_name_aliases=['ver'],
        help_type='command_help',
        help_one_line_summary='Print version info about gsutil',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={},
    )

    def RunCommand(self):
        """Command entry point for the version command."""
        long_form = False
        if self.sub_opts:
            for o, _ in self.sub_opts:
                if o == '-l':
                    long_form = True

        if GetConfigFilePaths():
            config_paths = ', '.join(GetConfigFilePaths())
        else:
            config_paths = 'no config found'

        shipped_checksum = gslib.CHECKSUM
        try:
            cur_checksum = self._ComputeCodeChecksum()
        except IOError:
            cur_checksum = 'MISSING FILES'
        if shipped_checksum == cur_checksum:
            checksum_ok_str = 'OK'
        else:
            checksum_ok_str = '!= %s' % shipped_checksum

        sys.stdout.write('gsutil version: %s\n' % gslib.VERSION)

        if long_form:

            long_form_output = (
                'checksum: {checksum} ({checksum_ok})\n'
                'boto version: {boto_version}\n'
                'python version: {python_version}\n'
                'OS: {os_version}\n'
                'multiprocessing available: {multiprocessing_available}\n'
                'using cloud sdk: {cloud_sdk}\n'
                'pass cloud sdk credentials to gsutil: {cloud_sdk_credentials}\n'
                'config path(s): {config_paths}\n'
                'gsutil path: {gsutil_path}\n'
                'compiled crcmod: {compiled_crcmod}\n'
                'installed via package manager: {is_package_install}\n'
                'editable install: {is_editable_install}\n')

            sys.stdout.write(
                long_form_output.format(
                    checksum=cur_checksum,
                    checksum_ok=checksum_ok_str,
                    boto_version=boto.__version__,
                    python_version=sys.version.replace('\n', ''),
                    os_version='%s %s' %
                    (platform.system(), platform.release()),
                    multiprocessing_available=(
                        CheckMultiprocessingAvailableAndInit().is_available),
                    cloud_sdk=(os.environ.get('CLOUDSDK_WRAPPER') == '1'),
                    cloud_sdk_credentials=(os.environ.get(
                        'CLOUDSDK_CORE_PASS_CREDENTIALS_TO_GSUTIL') == '1'),
                    config_paths=config_paths,
                    gsutil_path=gslib.GSUTIL_PATH,
                    compiled_crcmod=UsingCrcmodExtension(crcmod),
                    is_package_install=gslib.IS_PACKAGE_INSTALL,
                    is_editable_install=gslib.IS_EDITABLE_INSTALL,
                ))

        return 0

    def _ComputeCodeChecksum(self):
        """Computes a checksum of gsutil code.

    This checksum can be used to determine if users locally modified
    gsutil when requesting support. (It's fine for users to make local mods,
    but when users ask for support we ask them to run a stock version of
    gsutil so we can reduce possible variables.)

    Returns:
      MD5 checksum of gsutil code.
    """
        if gslib.IS_PACKAGE_INSTALL:
            return 'PACKAGED_GSUTIL_INSTALLS_DO_NOT_HAVE_CHECKSUMS'
        m = md5()
        # Checksum gsutil and all .py files under gslib directory.
        files_to_checksum = [gslib.GSUTIL_PATH]
        for root, _, files in os.walk(gslib.GSLIB_DIR):
            for filepath in files:
                if filepath.endswith('.py'):
                    files_to_checksum.append(os.path.join(root, filepath))
        # Sort to ensure consistent checksum build, no matter how os.walk
        # orders the list.
        for filepath in sorted(files_to_checksum):
            f = open(filepath, 'r')
            content = f.read()
            content = re.sub(r'(\r\n|\r|\n)', '\n', content)
            m.update(content)
            f.close()
        return m.hexdigest()
Пример #11
0
class LsCommand(Command):
    """Implementation of gsutil ls command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'ls',
        command_name_aliases=[
            'dir',
            'list',
        ],
        usage_synopsis=_SYNOPSIS,
        min_args=0,
        max_args=NO_MAX,
        supported_sub_args='aebdlLhp:rR',
        file_url_ok=False,
        provider_url_ok=True,
        urls_start_arg=0,
        gs_api_support=[
            ApiSelector.XML,
            ApiSelector.JSON,
        ],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments=[
            CommandArgument.MakeZeroOrMoreCloudURLsArgument(),
        ],
    )
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='ls',
        help_name_aliases=[
            'dir',
            'list',
        ],
        help_type='command_help',
        help_one_line_summary='List providers, buckets, or objects',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={},
    )

    def _PrintBucketInfo(self, bucket_blr, listing_style):
        """Print listing info for given bucket.

    Args:
      bucket_blr: BucketListingReference for the bucket being listed
      listing_style: ListingStyle enum describing type of output desired.

    Returns:
      Tuple (total objects, total bytes) in the bucket.
    """
        if (listing_style == ListingStyle.SHORT
                or listing_style == ListingStyle.LONG):
            text_util.print_to_fd(bucket_blr)
            return
        # listing_style == ListingStyle.LONG_LONG:
        # We're guaranteed by the caller that the root object is populated.
        bucket = bucket_blr.root_object
        location_constraint = bucket.location
        storage_class = bucket.storageClass
        fields = {
            'bucket': bucket_blr.url_string,
            'storage_class': storage_class,
            'location_constraint': location_constraint,
            'acl': AclTranslation.JsonFromMessage(bucket.acl),
            'default_acl':
            AclTranslation.JsonFromMessage(bucket.defaultObjectAcl),
            'versioning': bucket.versioning and bucket.versioning.enabled,
            'website_config': 'Present' if bucket.website else 'None',
            'logging_config': 'Present' if bucket.logging else 'None',
            'cors_config': 'Present' if bucket.cors else 'None',
            'lifecycle_config': 'Present' if bucket.lifecycle else 'None',
            'requester_pays': bucket.billing and bucket.billing.requesterPays
        }
        if bucket.retentionPolicy:
            fields['retention_policy'] = 'Present'
        if bucket.labels:
            fields['labels'] = LabelTranslation.JsonFromMessage(
                bucket.labels, pretty_print=True)
        else:
            fields['labels'] = 'None'
        if bucket.encryption and bucket.encryption.defaultKmsKeyName:
            fields['default_kms_key'] = bucket.encryption.defaultKmsKeyName
        else:
            fields['default_kms_key'] = 'None'
        fields[
            'encryption_config'] = 'Present' if bucket.encryption else 'None'
        # Fields not available in all APIs (e.g. the XML API)
        if bucket.locationType:
            fields['location_type'] = bucket.locationType
        if bucket.metageneration:
            fields['metageneration'] = bucket.metageneration
        if bucket.timeCreated:
            fields['time_created'] = bucket.timeCreated.strftime(
                '%a, %d %b %Y %H:%M:%S GMT')
        if bucket.updated:
            fields['updated'] = bucket.updated.strftime(
                '%a, %d %b %Y %H:%M:%S GMT')
        if bucket.defaultEventBasedHold:
            fields['default_eventbased_hold'] = bucket.defaultEventBasedHold
        if bucket.iamConfiguration and bucket.iamConfiguration.bucketPolicyOnly:
            enabled = bucket.iamConfiguration.bucketPolicyOnly.enabled
            fields['bucket_policy_only_enabled'] = enabled

        # For field values that are multiline, add indenting to make it look
        # prettier.
        for key in fields:
            previous_value = fields[key]
            if (not isinstance(previous_value, six.string_types)
                    or '\n' not in previous_value):
                continue
            new_value = previous_value.replace('\n', '\n\t  ')
            # Start multiline values on a new line if they aren't already.
            if not new_value.startswith('\n'):
                new_value = '\n\t  ' + new_value
            fields[key] = new_value

        # Only display certain properties if the given API returned them (JSON API
        # returns many fields that the XML API does not).
        location_type_line = ''
        metageneration_line = ''
        time_created_line = ''
        time_updated_line = ''
        default_eventbased_hold_line = ''
        retention_policy_line = ''
        bucket_policy_only_enabled_line = ''
        if 'location_type' in fields:
            location_type_line = '\tLocation type:\t\t\t{location_type}\n'
        if 'metageneration' in fields:
            metageneration_line = '\tMetageneration:\t\t\t{metageneration}\n'
        if 'time_created' in fields:
            time_created_line = '\tTime created:\t\t\t{time_created}\n'
        if 'updated' in fields:
            time_updated_line = '\tTime updated:\t\t\t{updated}\n'
        if 'default_eventbased_hold' in fields:
            default_eventbased_hold_line = (
                '\tDefault Event-Based Hold:\t{default_eventbased_hold}\n')
        if 'retention_policy' in fields:
            retention_policy_line = '\tRetention Policy:\t\t{retention_policy}\n'
        if 'bucket_policy_only_enabled' in fields:
            bucket_policy_only_enabled_line = (
                '\tBucket Policy Only enabled:\t'
                '{bucket_policy_only_enabled}\n')

        text_util.print_to_fd(
            ('{bucket} :\n'
             '\tStorage class:\t\t\t{storage_class}\n' + location_type_line +
             '\tLocation constraint:\t\t{location_constraint}\n'
             '\tVersioning enabled:\t\t{versioning}\n'
             '\tLogging configuration:\t\t{logging_config}\n'
             '\tWebsite configuration:\t\t{website_config}\n'
             '\tCORS configuration: \t\t{cors_config}\n'
             '\tLifecycle configuration:\t{lifecycle_config}\n'
             '\tRequester Pays enabled:\t\t{requester_pays}\n' +
             retention_policy_line + default_eventbased_hold_line +
             '\tLabels:\t\t\t\t{labels}\n' +
             '\tDefault KMS key:\t\t{default_kms_key}\n' + time_created_line +
             time_updated_line + metageneration_line +
             bucket_policy_only_enabled_line + '\tACL:\t\t\t\t{acl}\n'
             '\tDefault ACL:\t\t\t{default_acl}').format(**fields))
        if bucket_blr.storage_url.scheme == 's3':
            text_util.print_to_fd(
                'Note: this is an S3 bucket so configuration values may be '
                'blank. To retrieve bucket configuration values, use '
                'individual configuration commands such as gsutil acl get '
                '<bucket>.')

    def _PrintLongListing(self, bucket_listing_ref):
        """Prints an object with ListingStyle.LONG."""
        obj = bucket_listing_ref.root_object
        url_str = bucket_listing_ref.url_string
        if (obj.metadata and S3_DELETE_MARKER_GUID
                in obj.metadata.additionalProperties):
            size_string = '0'
            num_bytes = 0
            num_objs = 0
            url_str += '<DeleteMarker>'
        else:
            size_string = (MakeHumanReadable(obj.size)
                           if self.human_readable else str(obj.size))
            num_bytes = obj.size
            num_objs = 1

        timestamp = JSON_TIMESTAMP_RE.sub(r'\1T\2Z', str(obj.timeCreated))
        printstr = '%(size)10s  %(timestamp)s  %(url)s'
        encoded_etag = None
        encoded_metagen = None
        if self.all_versions:
            printstr += '  metageneration=%(metageneration)s'
            encoded_metagen = str(obj.metageneration)
        if self.include_etag:
            printstr += '  etag=%(etag)s'
            encoded_etag = obj.etag
        format_args = {
            'size': size_string,
            'timestamp': timestamp,
            'url': url_str,
            'metageneration': encoded_metagen,
            'etag': encoded_etag
        }
        text_util.print_to_fd(printstr % format_args)
        return (num_objs, num_bytes)

    def RunCommand(self):
        """Command entry point for the ls command."""
        got_nomatch_errors = False
        got_bucket_nomatch_errors = False
        listing_style = ListingStyle.SHORT
        get_bucket_info = False
        self.recursion_requested = False
        self.all_versions = False
        self.include_etag = False
        self.human_readable = False
        self.list_subdir_contents = True
        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-a':
                    self.all_versions = True
                elif o == '-e':
                    self.include_etag = True
                elif o == '-b':
                    get_bucket_info = True
                elif o == '-h':
                    self.human_readable = True
                elif o == '-l':
                    listing_style = ListingStyle.LONG
                elif o == '-L':
                    listing_style = ListingStyle.LONG_LONG
                elif o == '-p':
                    # Project IDs are sent as header values when using gs and s3 XML APIs.
                    InsistAscii(
                        a, 'Invalid non-ASCII character found in project ID')
                    self.project_id = a
                elif o == '-r' or o == '-R':
                    self.recursion_requested = True
                elif o == '-d':
                    self.list_subdir_contents = False

        if not self.args:
            # default to listing all gs buckets
            self.args = ['gs://']

        total_objs = 0
        total_bytes = 0

        def MaybePrintBucketHeader(blr):
            if len(self.args) > 1:
                text_util.print_to_fd('%s:' % six.ensure_text(blr.url_string))

        print_bucket_header = MaybePrintBucketHeader

        for url_str in self.args:
            storage_url = StorageUrlFromString(url_str)
            if storage_url.IsFileUrl():
                raise CommandException('Only cloud URLs are supported for %s' %
                                       self.command_name)
            bucket_fields = None
            if (listing_style == ListingStyle.SHORT
                    or listing_style == ListingStyle.LONG):
                bucket_fields = ['id']
            elif listing_style == ListingStyle.LONG_LONG:
                bucket_fields = [
                    'acl',
                    'billing',
                    'cors',
                    'defaultObjectAcl',
                    'encryption',
                    'iamConfiguration',
                    'labels',
                    'location',
                    'locationType',
                    'logging',
                    'lifecycle',
                    'metageneration',
                    'retentionPolicy',
                    'defaultEventBasedHold',
                    'storageClass',
                    'timeCreated',
                    'updated',
                    'versioning',
                    'website',
                ]
            if storage_url.IsProvider():
                # Provider URL: use bucket wildcard to list buckets.
                for blr in self.WildcardIterator(
                        '%s://*' % storage_url.scheme).IterBuckets(
                            bucket_fields=bucket_fields):
                    self._PrintBucketInfo(blr, listing_style)
            elif storage_url.IsBucket() and get_bucket_info:
                # ls -b bucket listing request: List info about bucket(s).
                total_buckets = 0
                for blr in self.WildcardIterator(url_str).IterBuckets(
                        bucket_fields=bucket_fields):
                    if not ContainsWildcard(url_str) and not blr.root_object:
                        # Iterator does not make an HTTP call for non-wildcarded
                        # listings with fields=='id'. Ensure the bucket exists by calling
                        # GetBucket.
                        self.gsutil_api.GetBucket(blr.storage_url.bucket_name,
                                                  fields=['id'],
                                                  provider=storage_url.scheme)
                    self._PrintBucketInfo(blr, listing_style)
                    total_buckets += 1
                if not ContainsWildcard(url_str) and not total_buckets:
                    got_bucket_nomatch_errors = True
            else:
                # URL names a bucket, object, or object subdir ->
                # list matching object(s) / subdirs.
                def _PrintPrefixLong(blr):
                    text_util.print_to_fd(
                        '%-33s%s' % ('', six.ensure_text(blr.url_string)))

                if listing_style == ListingStyle.SHORT:
                    # ls helper by default readies us for a short listing.
                    listing_helper = LsHelper(
                        self.WildcardIterator,
                        self.logger,
                        all_versions=self.all_versions,
                        print_bucket_header_func=print_bucket_header,
                        should_recurse=self.recursion_requested,
                        list_subdir_contents=self.list_subdir_contents)
                elif listing_style == ListingStyle.LONG:
                    bucket_listing_fields = [
                        'name',
                        'size',
                        'timeCreated',
                        'updated',
                    ]
                    if self.all_versions:
                        bucket_listing_fields.extend([
                            'generation',
                            'metageneration',
                        ])
                    if self.include_etag:
                        bucket_listing_fields.append('etag')

                    listing_helper = LsHelper(
                        self.WildcardIterator,
                        self.logger,
                        print_object_func=self._PrintLongListing,
                        print_dir_func=_PrintPrefixLong,
                        print_bucket_header_func=print_bucket_header,
                        all_versions=self.all_versions,
                        should_recurse=self.recursion_requested,
                        fields=bucket_listing_fields,
                        list_subdir_contents=self.list_subdir_contents)

                elif listing_style == ListingStyle.LONG_LONG:
                    # List all fields
                    bucket_listing_fields = (UNENCRYPTED_FULL_LISTING_FIELDS +
                                             ENCRYPTED_FIELDS)
                    listing_helper = LsHelper(
                        self.WildcardIterator,
                        self.logger,
                        print_object_func=PrintFullInfoAboutObject,
                        print_dir_func=_PrintPrefixLong,
                        print_bucket_header_func=print_bucket_header,
                        all_versions=self.all_versions,
                        should_recurse=self.recursion_requested,
                        fields=bucket_listing_fields,
                        list_subdir_contents=self.list_subdir_contents)
                else:
                    raise CommandException('Unknown listing style: %s' %
                                           listing_style)

                exp_dirs, exp_objs, exp_bytes = (
                    listing_helper.ExpandUrlAndPrint(storage_url))
                if storage_url.IsObject() and exp_objs == 0 and exp_dirs == 0:
                    got_nomatch_errors = True
                total_bytes += exp_bytes
                total_objs += exp_objs

        if total_objs and listing_style != ListingStyle.SHORT:
            text_util.print_to_fd('TOTAL: %d objects, %d bytes (%s)' %
                                  (total_objs, total_bytes,
                                   MakeHumanReadable(float(total_bytes))))
        if got_nomatch_errors:
            raise CommandException('One or more URLs matched no objects.')
        if got_bucket_nomatch_errors:
            raise NotFoundException(
                'One or more bucket URLs matched no buckets.')

        return 0
Пример #12
0
class HmacCommand(Command):
    """Implementation of gsutil hmac command."""
    command_spec = Command.CreateCommandSpec(
        'hmac',
        min_args=1,
        max_args=8,
        supported_sub_args='ae:lp:s:u:',
        file_url_ok=True,
        urls_start_arg=1,
        gs_api_support=[ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        usage_synopsis=_SYNOPSIS,
        argparse_arguments={
            'create':
            [CommandArgument.MakeZeroOrMoreCloudOrFileURLsArgument()],
            'delete':
            [CommandArgument.MakeZeroOrMoreCloudOrFileURLsArgument()],
            'get': [CommandArgument.MakeZeroOrMoreCloudOrFileURLsArgument()],
            'list': [CommandArgument.MakeZeroOrMoreCloudOrFileURLsArgument()],
            'update':
            [CommandArgument.MakeZeroOrMoreCloudOrFileURLsArgument()],
        },
    )

    help_spec = Command.HelpSpec(
        help_name='hmac',
        help_name_aliases=[],
        help_type='command_help',
        help_one_line_summary=(
            'CRUD operations on service account HMAC keys.'),
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={
            'create': _create_help_text,
            'delete': _delete_help_text,
            'get': _get_help_text,
            'list': _list_help_text,
            'update': _update_help_text,
        })

    def _CreateHmacKey(self, thread_state=None):
        """Creates HMAC key for a service account."""
        if self.args:
            self.service_account_email = self.args[0]
        else:
            err_msg = (
                '%s %s requires a service account to be specified as the '
                'last argument.\n%s')
            raise CommandException(
                err_msg %
                (self.command_name, self.action_subcommand, _CREATE_SYNOPSIS))

        gsutil_api = GetCloudApiInstance(self, thread_state=thread_state)

        response = gsutil_api.CreateHmacKey(self.project_id,
                                            self.service_account_email,
                                            provider='gs')

        print('%-12s %s' % ('Access ID:', response.metadata.accessId))
        print('%-12s %s' % ('Secret:', response.secret))

    def _DeleteHmacKey(self, thread_state=None):
        """Deletes an HMAC key."""
        if self.args:
            access_id = self.args[0]
        else:
            raise _AccessIdException(self.command_name, self.action_subcommand,
                                     _DELETE_SYNOPSIS)

        gsutil_api = GetCloudApiInstance(self, thread_state=thread_state)

        gsutil_api.DeleteHmacKey(self.project_id, access_id, provider='gs')

    def _GetHmacKey(self, thread_state=None):
        """Gets HMAC key from its Access Id."""
        if self.args:
            access_id = self.args[0]
        else:
            raise _AccessIdException(self.command_name, self.action_subcommand,
                                     _GET_SYNOPSIS)

        gsutil_api = GetCloudApiInstance(self, thread_state=thread_state)

        response = gsutil_api.GetHmacKey(self.project_id,
                                         access_id,
                                         provider='gs')

        print(_KeyMetadataOutput(response))

    def _ListHmacKeys(self, thread_state=None):
        """Lists HMAC keys for a project or service account."""
        if self.args:
            raise CommandException(
                '%s %s received unexpected arguments.\n%s' %
                (self.command_name, self.action_subcommand, _LIST_SYNOPSIS))

        gsutil_api = GetCloudApiInstance(self, thread_state=thread_state)

        response = gsutil_api.ListHmacKeys(self.project_id,
                                           self.service_account_email,
                                           self.show_all,
                                           provider='gs')

        short_list_format = '%s\t%-12s %s'
        if self.long_list:
            for item in response:
                print(_KeyMetadataOutput(item))
                print()
        else:
            for item in response:
                print(short_list_format %
                      (item.accessId, item.state, item.serviceAccountEmail))

    def _UpdateHmacKey(self, thread_state=None):
        """Update an HMAC key's state."""
        if not self.state:
            raise CommandException(
                'A state flag must be supplied for %s %s\n%s' %
                (self.command_name, self.action_subcommand, _UPDATE_SYNOPSIS))
        elif self.state not in _VALID_UPDATE_STATES:
            raise CommandException('The state flag value must be one of %s' %
                                   ', '.join(_VALID_UPDATE_STATES))
        if self.args:
            access_id = self.args[0]
        else:
            raise _AccessIdException(self.command_name, self.action_subcommand,
                                     _UPDATE_SYNOPSIS)

        gsutil_api = GetCloudApiInstance(self, thread_state=thread_state)

        response = gsutil_api.UpdateHmacKey(self.project_id,
                                            access_id,
                                            self.state,
                                            self.etag,
                                            provider='gs')

        print(_KeyMetadataOutput(response))

    def RunCommand(self):
        """Command entry point for the hmac command."""

        if self.gsutil_api.GetApiSelector(provider='gs') != ApiSelector.JSON:
            raise CommandException(
                'The "hmac" command can only be used with the GCS JSON API')

        self.action_subcommand = self.args.pop(0)
        self.ParseSubOpts(check_args=True)
        # Commands with both suboptions and subcommands need to reparse for
        # suboptions, so we log again.
        LogCommandParams(sub_opts=self.sub_opts)

        self.service_account_email = None
        self.state = None
        self.show_all = False
        self.long_list = False
        self.etag = None

        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-u':
                    self.service_account_email = a
                elif o == '-p':
                    # Project IDs are sent as header values when using gs and s3 XML APIs.
                    InsistAscii(
                        a, 'Invalid non-ASCII character found in project ID')
                    self.project_id = a
                elif o == '-s':
                    self.state = a
                elif o == '-a':
                    self.show_all = True
                elif o == '-l':
                    self.long_list = True
                elif o == '-e':
                    self.etag = a

        if not self.project_id:
            self.project_id = PopulateProjectId(None)

        method_for_arg = {
            'create': self._CreateHmacKey,
            'delete': self._DeleteHmacKey,
            'get': self._GetHmacKey,
            'list': self._ListHmacKeys,
            'update': self._UpdateHmacKey,
        }
        if self.action_subcommand not in method_for_arg:
            raise CommandException(
                'Invalid subcommand "%s" for the %s command.\n'
                'See "gsutil help hmac".' %
                (self.action_subcommand, self.command_name))

        LogCommandParams(subcommands=[self.action_subcommand])
        method_for_arg[self.action_subcommand]()

        return 0
Пример #13
0
class LabelCommand(Command):
    """Implementation of gsutil label command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'label',
        usage_synopsis=_SYNOPSIS,
        min_args=2,
        max_args=NO_MAX,
        supported_sub_args='l:d:',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=1,
        gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments={
            'set': [
                CommandArgument.MakeNFileURLsArgument(1),
                CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument(),
            ],
            'get': [
                CommandArgument.MakeNCloudURLsArgument(1),
            ],
            'ch': [
                CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument(),
            ],
        },
    )
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='label',
        help_name_aliases=[],
        help_type='command_help',
        help_one_line_summary=(
            'Get, set, or change the label configuration of a bucket.'),
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={
            'get': _get_help_text,
            'set': _set_help_text,
            'ch': _ch_help_text,
        },
    )

    def _CalculateUrlsStartArg(self):
        if not self.args:
            self.RaiseWrongNumberOfArgumentsException()
        if self.args[0].lower() == 'set':
            return 2  # Filename comes before bucket arg(s).
        return 1

    def _SetLabel(self):
        """Parses options and sets labels on the specified buckets."""
        # At this point, "set" has been popped off the front of self.args.
        if len(self.args) < 2:
            self.RaiseWrongNumberOfArgumentsException()

        label_filename = self.args[0]
        if not os.path.isfile(label_filename):
            raise CommandException('Could not find the file "%s".' %
                                   label_filename)
        with codecs.open(label_filename, 'r', UTF8) as label_file:
            label_text = label_file.read()

        @Retry(PreconditionException, tries=3, timeout_secs=1)
        def _SetLabelForBucket(blr):
            url = blr.storage_url
            self.logger.info('Setting label configuration on %s...', blr)

            if url.scheme == 's3':  # Uses only XML.
                self.gsutil_api.XmlPassThroughSetTagging(label_text,
                                                         url,
                                                         provider=url.scheme)
            else:  # Must be a 'gs://' bucket.
                labels_message = None
                # When performing a read-modify-write cycle, include metageneration to
                # avoid race conditions (supported for GS buckets only).
                metageneration = None
                new_label_json = json.loads(label_text)
                if (self.gsutil_api.GetApiSelector(
                        url.scheme) == ApiSelector.JSON):
                    # Perform a read-modify-write so that we can specify which
                    # existing labels need to be deleted.
                    _, bucket_metadata = self.GetSingleBucketUrlFromArg(
                        url.url_string,
                        bucket_fields=['labels', 'metageneration'])
                    metageneration = bucket_metadata.metageneration
                    label_json = {}
                    if bucket_metadata.labels:
                        label_json = json.loads(
                            LabelTranslation.JsonFromMessage(
                                bucket_metadata.labels))
                    # Set all old keys' values to None; this will delete each key that
                    # is not included in the new set of labels.
                    merged_labels = dict(
                        (key, None) for key, _ in six.iteritems(label_json))
                    merged_labels.update(new_label_json)
                    labels_message = LabelTranslation.DictToMessage(
                        merged_labels)
                else:  # ApiSelector.XML
                    # No need to read-modify-write with the XML API.
                    labels_message = LabelTranslation.DictToMessage(
                        new_label_json)

                preconditions = Preconditions(meta_gen_match=metageneration)
                bucket_metadata = apitools_messages.Bucket(
                    labels=labels_message)
                self.gsutil_api.PatchBucket(url.bucket_name,
                                            bucket_metadata,
                                            preconditions=preconditions,
                                            provider=url.scheme,
                                            fields=['id'])

        some_matched = False
        url_args = self.args[1:]
        for url_str in url_args:
            # Throws a CommandException if the argument is not a bucket.
            bucket_iter = self.GetBucketUrlIterFromArg(url_str,
                                                       bucket_fields=['id'])
            for bucket_listing_ref in bucket_iter:
                some_matched = True
                _SetLabelForBucket(bucket_listing_ref)

        if not some_matched:
            raise CommandException(NO_URLS_MATCHED_TARGET % list(url_args))

    def _ChLabel(self):
        """Parses options and changes labels on the specified buckets."""
        self.label_changes = {}
        self.num_deletions = 0

        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-l':
                    label_split = a.split(':')
                    if len(label_split) != 2:
                        raise CommandException(
                            'Found incorrectly formatted option for "gsutil label ch": '
                            '"%s". To add a label, please use the form <key>:<value>.'
                            % a)
                    self.label_changes[label_split[0]] = label_split[1]
                elif o == '-d':
                    # Ensure only the key is supplied; stop if key:value was given.
                    val_split = a.split(':')
                    if len(val_split) != 1:
                        raise CommandException(
                            'Found incorrectly formatted option for "gsutil label ch": '
                            '"%s". To delete a label, provide only its key.' %
                            a)
                    self.label_changes[a] = None
                    self.num_deletions += 1
                else:
                    self.RaiseInvalidArgumentException()
        if not self.label_changes:
            raise CommandException(
                'Please specify at least one label change with the -l or -d flags.'
            )

        @Retry(PreconditionException, tries=3, timeout_secs=1)
        def _ChLabelForBucket(blr):
            url = blr.storage_url
            self.logger.info('Setting label configuration on %s...', blr)

            labels_message = None
            # When performing a read-modify-write cycle, include metageneration to
            # avoid race conditions (supported for GS buckets only).
            metageneration = None
            if (self.gsutil_api.GetApiSelector(
                    url.scheme) == ApiSelector.JSON):
                # The JSON API's PATCH semantics allow us to skip read-modify-write,
                # with the exception of one edge case - attempting to delete a
                # nonexistent label returns an error iff no labels previously existed
                corrected_changes = self.label_changes
                if self.num_deletions:
                    (_, bucket_metadata) = self.GetSingleBucketUrlFromArg(
                        url.url_string,
                        bucket_fields=['labels', 'metageneration'])
                    if not bucket_metadata.labels:
                        metageneration = bucket_metadata.metageneration
                        # Remove each change that would try to delete a nonexistent key.
                        corrected_changes = dict(
                            (k, v)
                            for k, v in six.iteritems(self.label_changes) if v)
                labels_message = LabelTranslation.DictToMessage(
                    corrected_changes)
            else:  # ApiSelector.XML
                # Perform a read-modify-write cycle so that we can specify which
                # existing labels need to be deleted.
                (_, bucket_metadata) = self.GetSingleBucketUrlFromArg(
                    url.url_string, bucket_fields=['labels', 'metageneration'])
                metageneration = bucket_metadata.metageneration

                label_json = {}
                if bucket_metadata.labels:
                    label_json = json.loads(
                        LabelTranslation.JsonFromMessage(
                            bucket_metadata.labels))
                # Modify label_json such that all specified labels are added
                # (overwriting old labels if necessary) and all specified deletions
                # are removed from label_json if already present.
                for key, value in six.iteritems(self.label_changes):
                    if not value and key in label_json:
                        del label_json[key]
                    else:
                        label_json[key] = value
                labels_message = LabelTranslation.DictToMessage(label_json)

            preconditions = Preconditions(meta_gen_match=metageneration)
            bucket_metadata = apitools_messages.Bucket(labels=labels_message)
            self.gsutil_api.PatchBucket(url.bucket_name,
                                        bucket_metadata,
                                        preconditions=preconditions,
                                        provider=url.scheme,
                                        fields=['id'])

        some_matched = False
        url_args = self.args
        if not url_args:
            self.RaiseWrongNumberOfArgumentsException()
        for url_str in url_args:
            # Throws a CommandException if the argument is not a bucket.
            bucket_iter = self.GetBucketUrlIterFromArg(url_str)
            for bucket_listing_ref in bucket_iter:
                some_matched = True
                _ChLabelForBucket(bucket_listing_ref)

        if not some_matched:
            raise CommandException(NO_URLS_MATCHED_TARGET % list(url_args))

    def _GetAndPrintLabel(self, bucket_arg):
        """Gets and prints the labels for a cloud bucket."""
        bucket_url, bucket_metadata = self.GetSingleBucketUrlFromArg(
            bucket_arg, bucket_fields=['labels'])
        if bucket_url.scheme == 's3':
            print((self.gsutil_api.XmlPassThroughGetTagging(
                bucket_url, provider=bucket_url.scheme)))
        else:
            if bucket_metadata.labels:
                print((LabelTranslation.JsonFromMessage(bucket_metadata.labels,
                                                        pretty_print=True)))
            else:
                print(('%s has no label configuration.' % bucket_url))

    def RunCommand(self):
        """Command entry point for the label command."""
        action_subcommand = self.args.pop(0)
        self.ParseSubOpts(check_args=True)

        # Commands with both suboptions and subcommands need to reparse for
        # suboptions, so we log again.
        metrics.LogCommandParams(sub_opts=self.sub_opts)
        if action_subcommand == 'get':
            metrics.LogCommandParams(subcommands=[action_subcommand])
            self._GetAndPrintLabel(self.args[0])
        elif action_subcommand == 'set':
            metrics.LogCommandParams(subcommands=[action_subcommand])
            self._SetLabel()
        elif action_subcommand == 'ch':
            metrics.LogCommandParams(subcommands=[action_subcommand])
            self._ChLabel()
        else:
            raise CommandException(
                'Invalid subcommand "%s" for the %s command.\nSee "gsutil help %s".'
                % (action_subcommand, self.command_name, self.command_name))
        return 0
Пример #14
0
class UpdateCommand(Command):
    """Implementation of gsutil update command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'update',
        command_name_aliases=['refresh'],
        usage_synopsis=_SYNOPSIS,
        min_args=0,
        max_args=1,
        supported_sub_args='fn',
        file_url_ok=True,
        provider_url_ok=False,
        urls_start_arg=0,
        gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
    )
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='update',
        help_name_aliases=['refresh'],
        help_type='command_help',
        help_one_line_summary='Update to the latest gsutil release',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={},
    )

    def _DisallowUpdataIfDataInGsutilDir(self):
        """Disallows the update command if files not in the gsutil distro are found.

    This prevents users from losing data if they are in the habit of running
    gsutil from the gsutil directory and leaving data in that directory.

    This will also detect someone attempting to run gsutil update from a git
    repo, since the top-level directory will contain git files and dirs (like
    .git) that are not distributed with gsutil.

    Raises:
      CommandException: if files other than those distributed with gsutil found.
    """
        # Manifest includes recursive-includes of gslib. Directly add
        # those to the list here so we will skip them in os.listdir() loop without
        # having to build deeper handling of the MANIFEST file here. Also include
        # 'third_party', which isn't present in manifest but gets added to the
        # gsutil distro by the gsutil submodule configuration; and the MANIFEST.in
        # and CHANGES.md files.
        manifest_lines = ['gslib', 'third_party', 'MANIFEST.in', 'CHANGES.md']

        try:
            with open(os.path.join(gslib.GSUTIL_DIR, 'MANIFEST.in'),
                      'r') as fp:
                for line in fp:
                    if line.startswith('include '):
                        manifest_lines.append(line.split()[-1])
        except IOError:
            self.logger.warn(
                'MANIFEST.in not found in %s.\nSkipping user data '
                'check.\n', gslib.GSUTIL_DIR)
            return

        # Look just at top-level directory. We don't try to catch data dropped into
        # subdirs (like gslib) because that would require deeper parsing of
        # MANFFEST.in, and most users who drop data into gsutil dir do so at the top
        # level directory.
        for filename in os.listdir(gslib.GSUTIL_DIR):
            if filename.endswith('.pyc'):
                # Ignore compiled code.
                continue
            if filename not in manifest_lines:
                raise CommandException('\n'.join(
                    textwrap.wrap(
                        'A file (%s) that is not distributed with gsutil was found in '
                        'the gsutil directory. The update command cannot run with user '
                        'data in the gsutil directory.' %
                        os.path.join(gslib.GSUTIL_DIR, filename))))

    def _ExplainIfSudoNeeded(self, tf, dirs_to_remove):
        """Explains what to do if sudo needed to update gsutil software.

    Happens if gsutil was previously installed by a different user (typically if
    someone originally installed in a shared file system location, using sudo).

    Args:
      tf: Opened TarFile.
      dirs_to_remove: List of directories to remove.

    Raises:
      CommandException: if errors encountered.
    """
        # If running under Windows or Cygwin we don't need (or have) sudo.
        if IS_CYGWIN or IS_WINDOWS:
            return

        user_id = os.getuid()
        if os.stat(gslib.GSUTIL_DIR).st_uid == user_id:
            return

        # Won't fail - this command runs after main startup code that insists on
        # having a config file.
        config_file_list = GetBotoConfigFileList()
        config_files = ' '.join(config_file_list)
        self._CleanUpUpdateCommand(tf, dirs_to_remove)

        # Pick current protection of each boto config file for command that restores
        # protection (rather than fixing at 600) to support use cases like how GCE
        # installs a service account with an /etc/boto.cfg file protected to 644.
        chmod_cmds = []
        for config_file in config_file_list:
            mode = oct(stat.S_IMODE((os.stat(config_file)[stat.ST_MODE])))
            chmod_cmds.append('\n\tsudo chmod %s %s' % (mode, config_file))

        raise CommandException('\n'.join(
            textwrap.wrap(
                'Since it was installed by a different user previously, you will need '
                'to update using the following commands. You will be prompted for your '
                'password, and the install will run as "root". If you\'re unsure what '
                'this means please ask your system administrator for help:')
        ) + ('\n\tsudo chmod 0644 %s\n\tsudo env BOTO_CONFIG="%s" %s update'
             '%s') % (config_files, config_files, self.gsutil_path,
                      ' '.join(chmod_cmds)),
                               informational=True)

    # This list is checked during gsutil update by doing a lowercased
    # slash-left-stripped check. For example "/Dev" would match the "dev" entry.
    unsafe_update_dirs = [
        'applications',
        'auto',
        'bin',
        'boot',
        'desktop',
        'dev',
        'documents and settings',
        'etc',
        'export',
        'home',
        'kernel',
        'lib',
        'lib32',
        'library',
        'lost+found',
        'mach_kernel',
        'media',
        'mnt',
        'net',
        'null',
        'network',
        'opt',
        'private',
        'proc',
        'program files',
        'python',
        'root',
        'sbin',
        'scripts',
        'srv',
        'sys',
        'system',
        'tmp',
        'users',
        'usr',
        'var',
        'volumes',
        'win',
        'win32',
        'windows',
        'winnt',
    ]

    def _EnsureDirsSafeForUpdate(self, dirs):
        """Raises Exception if any of dirs is known to be unsafe for gsutil update.

    This provides a fail-safe check to ensure we don't try to overwrite
    or delete any important directories. (That shouldn't happen given the
    way we construct tmp dirs, etc., but since the gsutil update cleanup
    uses shutil.rmtree() it's prudent to add extra checks.)

    Args:
      dirs: List of directories to check.

    Raises:
      CommandException: If unsafe directory encountered.
    """
        for d in dirs:
            if not d:
                d = 'null'
            if d.lstrip(os.sep).lower() in self.unsafe_update_dirs:
                raise CommandException(
                    'EnsureDirsSafeForUpdate: encountered unsafe '
                    'directory (%s); aborting update' % d)

    def _CleanUpUpdateCommand(self, tf, dirs_to_remove):
        """Cleans up temp files etc. from running update command.

    Args:
      tf: Opened TarFile, or None if none currently open.
      dirs_to_remove: List of directories to remove.

    """
        if tf:
            tf.close()
        self._EnsureDirsSafeForUpdate(dirs_to_remove)
        for directory in dirs_to_remove:
            try:
                shutil.rmtree(directory)
            except OSError:
                # Ignore errors while attempting to remove old dirs under Windows. They
                # happen because of Windows exclusive file locking, and the update
                # actually succeeds but just leaves the old versions around in the
                # user's temp dir.
                if not IS_WINDOWS:
                    raise

    def RunCommand(self):
        """Command entry point for the update command."""

        if gslib.IS_PACKAGE_INSTALL:
            raise CommandException(
                'The update command is only available for gsutil installed from a '
                'tarball. If you installed gsutil via another method, use the same '
                'method to update it.')

        if os.environ.get('CLOUDSDK_WRAPPER') == '1':
            raise CommandException(
                'The update command is disabled for Cloud SDK installs. Please run '
                '"gcloud components update" to update it. Note: the Cloud SDK '
                'incorporates updates to the underlying tools approximately every 2 '
                'weeks, so if you are attempting to update to a recently created '
                'release / pre-release of gsutil it may not yet be available via '
                'the Cloud SDK.')

        https_validate_certificates = CERTIFICATE_VALIDATION_ENABLED
        if not https_validate_certificates:
            raise CommandException(
                'Your boto configuration has https_validate_certificates = False.\n'
                'The update command cannot be run this way, for security reasons.'
            )

        self._DisallowUpdataIfDataInGsutilDir()

        force_update = False
        no_prompt = False
        if self.sub_opts:
            for o, unused_a in self.sub_opts:
                if o == '-f':
                    force_update = True
                if o == '-n':
                    no_prompt = True

        dirs_to_remove = []
        tmp_dir = tempfile.mkdtemp()
        dirs_to_remove.append(tmp_dir)
        os.chdir(tmp_dir)

        if not no_prompt:
            self.logger.info('Checking for software update...')
        if self.args:
            update_from_url_str = self.args[0]
            if not update_from_url_str.endswith('.tar.gz'):
                raise CommandException(
                    'The update command only works with tar.gz files.')
            for i, result in enumerate(
                    self.WildcardIterator(update_from_url_str)):
                if i > 0:
                    raise CommandException(
                        'Invalid update URL. Must name a single .tar.gz file.')
                storage_url = result.storage_url
                if storage_url.IsFileUrl() and not storage_url.IsDirectory():
                    if not force_update:
                        raise CommandException((
                            '"update" command does not support "file://" URLs without the '
                            '-f option.'))
                elif not (storage_url.IsCloudUrl() and storage_url.IsObject()):
                    raise CommandException(
                        'Invalid update object URL. Must name a single .tar.gz file.'
                    )
        else:
            update_from_url_str = GSUTIL_PUB_TARBALL

        # Try to retrieve version info from tarball metadata; failing that; download
        # the tarball and extract the VERSION file. The version lookup will fail
        # when running the update system test, because it retrieves the tarball from
        # a temp file rather than a cloud URL (files lack the version metadata).
        tarball_version = LookUpGsutilVersion(self.gsutil_api,
                                              update_from_url_str)
        if tarball_version:
            tf = None
        else:
            tf = self._FetchAndOpenGsutilTarball(update_from_url_str)
            tf.extractall()
            with open(os.path.join('gsutil', 'VERSION'), 'r') as ver_file:
                tarball_version = ver_file.read().strip()

        if not force_update and gslib.VERSION == tarball_version:
            self._CleanUpUpdateCommand(tf, dirs_to_remove)
            if self.args:
                raise CommandException('You already have %s installed.' %
                                       update_from_url_str,
                                       informational=True)
            else:
                raise CommandException(
                    'You already have the latest gsutil release '
                    'installed.',
                    informational=True)

        if not no_prompt:
            (_, major) = CompareVersions(tarball_version, gslib.VERSION)
            if major:
                print('\n'.join(
                    textwrap.wrap(
                        'This command will update to the "%s" version of gsutil at %s. '
                        'NOTE: This a major new version, so it is strongly recommended '
                        'that you review the release note details at %s before updating to '
                        'this version, especially if you use gsutil in scripts.'
                        % (tarball_version, gslib.GSUTIL_DIR,
                           RELEASE_NOTES_URL))))
            else:
                print(
                    'This command will update to the "%s" version of\ngsutil at %s'
                    % (tarball_version, gslib.GSUTIL_DIR))
        self._ExplainIfSudoNeeded(tf, dirs_to_remove)

        if no_prompt:
            answer = 'y'
        else:
            answer = raw_input('Proceed? [y/N] ')
        if not answer or answer.lower()[0] != 'y':
            self._CleanUpUpdateCommand(tf, dirs_to_remove)
            raise CommandException('Not running update.', informational=True)

        if not tf:
            tf = self._FetchAndOpenGsutilTarball(update_from_url_str)

        # Ignore keyboard interrupts during the update to reduce the chance someone
        # hitting ^C leaves gsutil in a broken state.
        RegisterSignalHandler(signal.SIGINT, signal.SIG_IGN)

        # gslib.GSUTIL_DIR lists the path where the code should end up (like
        # /usr/local/gsutil), which is one level down from the relative path in the
        # tarball (since the latter creates files in ./gsutil). So, we need to
        # extract at the parent directory level.
        gsutil_bin_parent_dir = os.path.normpath(
            os.path.join(gslib.GSUTIL_DIR, '..'))

        # Extract tarball to a temporary directory in a sibling to GSUTIL_DIR.
        old_dir = tempfile.mkdtemp(dir=gsutil_bin_parent_dir)
        new_dir = tempfile.mkdtemp(dir=gsutil_bin_parent_dir)
        dirs_to_remove.append(old_dir)
        dirs_to_remove.append(new_dir)
        self._EnsureDirsSafeForUpdate(dirs_to_remove)
        try:
            tf.extractall(path=new_dir)
        except Exception, e:
            self._CleanUpUpdateCommand(tf, dirs_to_remove)
            raise CommandException('Update failed: %s.' % e)

        # For enterprise mode (shared/central) installation, users with
        # different user/group than the installation user/group must be
        # able to run gsutil so we need to do some permissions adjustments
        # here. Since enterprise mode is not not supported for Windows
        # users, we can skip this step when running on Windows, which
        # avoids the problem that Windows has no find or xargs command.
        if not IS_WINDOWS:
            # Make all files and dirs in updated area owner-RW and world-R, and make
            # all directories owner-RWX and world-RX.
            for dirname, subdirs, filenames in os.walk(new_dir):
                for filename in filenames:
                    fd = os.open(os.path.join(dirname, filename), os.O_RDONLY)
                    os.fchmod(
                        fd, stat.S_IWRITE | stat.S_IRUSR | stat.S_IRGRP
                        | stat.S_IROTH)
                    os.close(fd)
                for subdir in subdirs:
                    fd = os.open(os.path.join(dirname, subdir), os.O_RDONLY)
                    os.fchmod(
                        fd, stat.S_IRWXU | stat.S_IXGRP | stat.S_IXOTH
                        | stat.S_IRGRP | stat.S_IROTH)
                    os.close(fd)

            # Make main gsutil script owner-RWX and world-RX.
            fd = os.open(os.path.join(new_dir, 'gsutil', 'gsutil'),
                         os.O_RDONLY)
            os.fchmod(
                fd, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH
                | stat.S_IXOTH)
            os.close(fd)

        # Move old installation aside and new into place.
        os.rename(gslib.GSUTIL_DIR, os.path.join(old_dir, 'old'))
        os.rename(os.path.join(new_dir, 'gsutil'), gslib.GSUTIL_DIR)
        self._CleanUpUpdateCommand(tf, dirs_to_remove)
        RegisterSignalHandler(signal.SIGINT, signal.SIG_DFL)
        self.logger.info('Update complete.')
        return 0
Пример #15
0
class StatCommand(Command):
    """Implementation of gsutil stat command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'stat',
        command_name_aliases=[],
        usage_synopsis=_SYNOPSIS,
        min_args=1,
        max_args=NO_MAX,
        supported_sub_args='',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=0,
        gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments=[CommandArgument.MakeZeroOrMoreCloudURLsArgument()])
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='stat',
        help_name_aliases=[],
        help_type='command_help',
        help_one_line_summary='Display object status',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={},
    )

    def RunCommand(self):
        """Command entry point for stat command."""
        # List of fields we'll print for stat objects.
        stat_fields = [
            'updated', 'cacheControl', 'contentDisposition', 'contentEncoding',
            'contentLanguage', 'size', 'contentType', 'componentCount',
            'metadata', 'crc32c', 'md5Hash', 'etag', 'generation',
            'metageneration'
        ]
        found_nonmatching_arg = False
        for url_str in self.args:
            arg_matches = 0
            url = StorageUrlFromString(url_str)
            if not url.IsObject():
                raise CommandException(
                    'The stat command only works with object URLs')
            try:
                if ContainsWildcard(url_str):
                    blr_iter = self.WildcardIterator(url_str).IterObjects(
                        bucket_listing_fields=stat_fields)
                else:
                    single_obj = self.gsutil_api.GetObjectMetadata(
                        url.bucket_name,
                        url.object_name,
                        generation=url.generation,
                        provider=url.scheme,
                        fields=stat_fields)
                    blr_iter = [
                        BucketListingObject(url, root_object=single_obj)
                    ]
                for blr in blr_iter:
                    if blr.IsObject():
                        arg_matches += 1
                        if logging.getLogger().isEnabledFor(logging.INFO):
                            PrintFullInfoAboutObject(blr, incl_acl=False)
            except AccessDeniedException:
                print 'You aren\'t authorized to read %s - skipping' % url_str
            except InvalidUrlError:
                raise
            except NotFoundException:
                pass
            if not arg_matches:
                if logging.getLogger().isEnabledFor(logging.INFO):
                    print 'No URLs matched %s' % url_str
                found_nonmatching_arg = True
        if found_nonmatching_arg:
            return 1
        return 0
Пример #16
0
class NotificationCommand(Command):
    """Implementation of gsutil notification command."""

    # Notification names might look like one of these:
    #  canonical form:  projects/_/buckets/bucket/notificationConfigs/3
    #  JSON API form:   b/bucket/notificationConfigs/5
    # Either of the above might start with a / if a user is copying & pasting.
    def _GetNotificationPathRegex(self):
        if not NotificationCommand._notification_path_regex:
            NotificationCommand._notification_path_regex = re.compile(
                ('/?(projects/[^/]+/)?b(uckets)?/(?P<bucket>[^/]+)/'
                 'notificationConfigs/(?P<notification>[0-9]+)'))
        return NotificationCommand._notification_path_regex

    _notification_path_regex = None

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'notification',
        command_name_aliases=[
            'notify', 'notifyconfig', 'notifications', 'notif'
        ],
        usage_synopsis=_SYNOPSIS,
        min_args=2,
        max_args=NO_MAX,
        supported_sub_args='i:t:m:t:o:f:e:p:s',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=1,
        gs_api_support=[ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments={
            'watchbucket': [
                CommandArgument.MakeFreeTextArgument(),
                CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()
            ],
            'stopchannel': [],
            'list': [CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()],
            'delete': [
                # Takes a list of one of the following:
                #   notification: projects/_/buckets/bla/notificationConfigs/5,
                #   bucket: gs://foobar
                CommandArgument.MakeZeroOrMoreCloudURLsArgument()
            ],
            'create': [
                CommandArgument.MakeFreeTextArgument(),  # Cloud Pub/Sub topic
                CommandArgument.MakeNCloudBucketURLsArgument(1)
            ]
        })
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='notification',
        help_name_aliases=['watchbucket', 'stopchannel', 'notifyconfig'],
        help_type='command_help',
        help_one_line_summary='Configure object change notification',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={
            'create': _create_help_text,
            'list': _list_help_text,
            'delete': _delete_help_text,
            'watchbucket': _watchbucket_help_text,
            'stopchannel': _stopchannel_help_text
        },
    )

    def _WatchBucket(self):
        """Creates a watch on a bucket given in self.args."""
        self.CheckArguments()
        identifier = None
        client_token = None
        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-i':
                    identifier = a
                if o == '-t':
                    client_token = a

        identifier = identifier or str(uuid.uuid4())
        watch_url = self.args[0]
        bucket_arg = self.args[-1]

        if not watch_url.lower().startswith('https://'):
            raise CommandException(
                'The application URL must be an https:// URL.')

        bucket_url = StorageUrlFromString(bucket_arg)
        if not (bucket_url.IsBucket() and bucket_url.scheme == 'gs'):
            raise CommandException(
                'The %s command can only be used with gs:// bucket URLs.' %
                self.command_name)
        if not bucket_url.IsBucket():
            raise CommandException(
                'URL must name a bucket for the %s command.' %
                self.command_name)

        self.logger.info('Watching bucket %s with application URL %s ...',
                         bucket_url, watch_url)

        try:
            channel = self.gsutil_api.WatchBucket(bucket_url.bucket_name,
                                                  watch_url,
                                                  identifier,
                                                  token=client_token,
                                                  provider=bucket_url.scheme)
        except AccessDeniedException, e:
            self.logger.warn(
                NOTIFICATION_AUTHORIZATION_FAILED_MESSAGE.format(
                    watch_error=str(e), watch_url=watch_url))
            raise

        channel_id = channel.id
        resource_id = channel.resourceId
        client_token = channel.token
        self.logger.info('Successfully created watch notification channel.')
        self.logger.info('Watch channel identifier: %s', channel_id)
        self.logger.info('Canonicalized resource identifier: %s', resource_id)
        self.logger.info('Client state token: %s', client_token)

        return 0
Пример #17
0
class PapCommand(Command):
    """Implements the gsutil pap command."""

    command_spec = Command.CreateCommandSpec(
        'pap',
        command_name_aliases=['publicaccessprevention'],
        usage_synopsis=_SYNOPSIS,
        min_args=2,
        max_args=NO_MAX,
        supported_sub_args='',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=2,
        gs_api_support=[ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments={
            'get': [
                CommandArgument.MakeNCloudURLsArgument(1),
            ],
            'set': [
                CommandArgument('mode', choices=['enforced', 'inherited']),
                CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()
            ],
        })
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='pap',
        help_name_aliases=['publicaccessprevention'],
        help_type='command_help',
        help_one_line_summary='Configure public access prevention',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={
            'get': _get_help_text,
            'set': _set_help_text,
        },
    )

    def _ValidateBucketListingRefAndReturnBucketName(self, blr):
        if blr.storage_url.scheme != 'gs':
            raise CommandException(
                'The %s command can only be used with gs:// bucket URLs.' %
                self.command_name)

    def _GetPublicAccessPrevention(self, blr):
        """Gets the public access prevention setting for a bucket."""
        bucket_url = blr.storage_url

        bucket_metadata = self.gsutil_api.GetBucket(
            bucket_url.bucket_name,
            fields=['iamConfiguration'],
            provider=bucket_url.scheme)
        iam_config = bucket_metadata.iamConfiguration
        public_access_prevention = iam_config.publicAccessPrevention or 'inherited'
        bucket = str(bucket_url).rstrip('/')
        print('%s: %s' % (bucket, public_access_prevention))

    def _SetPublicAccessPrevention(self, blr, setting_arg):
        """Sets the Public Access Prevention setting for a bucket enforced or inherited."""
        bucket_url = blr.storage_url

        iam_config = IamConfigurationValue()
        iam_config.publicAccessPrevention = setting_arg

        bucket_metadata = apitools_messages.Bucket(iamConfiguration=iam_config)

        print('Setting Public Access Prevention %s for %s' %
              (setting_arg, str(bucket_url).rstrip('/')))

        self.gsutil_api.PatchBucket(bucket_url.bucket_name,
                                    bucket_metadata,
                                    fields=['iamConfiguration'],
                                    provider=bucket_url.scheme)
        return 0

    def _Pap(self):
        """Handles pap command on Cloud Storage buckets."""
        subcommand = self.args.pop(0)

        if subcommand not in ('get', 'set'):
            raise CommandException('pap only supports get|set')

        subcommand_func = None
        subcommand_args = []
        setting_arg = None

        if subcommand == 'get':
            subcommand_func = self._GetPublicAccessPrevention
        elif subcommand == 'set':
            subcommand_func = self._SetPublicAccessPrevention
            setting_arg = self.args.pop(0)
            subcommand_args.append(setting_arg)

        if self.gsutil_api.GetApiSelector('gs') != ApiSelector.JSON:
            raise CommandException('\n'.join(
                textwrap.wrap(
                    ('The "%s" command can only be with the Cloud Storage '
                     'JSON API.') % self.command_name)))

        # Iterate over bucket args, performing the specified subsubcommand.
        some_matched = False
        url_args = self.args
        if not url_args:
            self.RaiseWrongNumberOfArgumentsException()
        for url_str in url_args:
            # Throws a CommandException if the argument is not a bucket.
            bucket_iter = self.GetBucketUrlIterFromArg(url_str)
            for bucket_listing_ref in bucket_iter:
                if self.gsutil_api.GetApiSelector(
                        bucket_listing_ref.storage_url.scheme
                ) != ApiSelector.JSON:
                    raise CommandException('\n'.join(
                        textwrap.wrap(
                            ('The "%s" command can only be used for GCS '
                             'Buckets.') % self.command_name)))

                some_matched = True
                subcommand_func(bucket_listing_ref, *subcommand_args)

        if not some_matched:
            raise CommandException(NO_URLS_MATCHED_TARGET % list(url_args))
        return 0

    def RunCommand(self):
        """Command entry point for the pap command."""
        action_subcommand = self.args[0]
        self.ParseSubOpts(check_args=True)

        if action_subcommand == 'get' or action_subcommand == 'set':
            metrics.LogCommandParams(sub_opts=self.sub_opts)
            metrics.LogCommandParams(subcommands=[action_subcommand])
            self._Pap()
        else:
            raise CommandException(
                'Invalid subcommand "%s", use get|set instead.' %
                action_subcommand)
Пример #18
0
class RetentionCommand(Command):
  """Implementation of gsutil retention command."""

  # Command specification. See base class for documentation.
  command_spec = Command.CreateCommandSpec(
      'retention',
      command_name_aliases=[],
      usage_synopsis=_SYNOPSIS,
      min_args=2,
      max_args=NO_MAX,
      file_url_ok=False,
      provider_url_ok=False,
      urls_start_arg=1,
      gs_api_support=[ApiSelector.JSON],
      gs_default_api=ApiSelector.JSON,
      argparse_arguments={
          'set': [CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()],
          'clear': [CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()],
          'get': [CommandArgument.MakeNCloudBucketURLsArgument(1)],
          'lock': [CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()],
          'event-default': {
              'set': [CommandArgument.MakeZeroOrMoreCloudURLsArgument()],
              'release': [CommandArgument.MakeZeroOrMoreCloudURLsArgument()]
          },
          'event': {
              'set': [CommandArgument.MakeZeroOrMoreCloudURLsArgument()],
              'release': [CommandArgument.MakeZeroOrMoreCloudURLsArgument()]
          },
          'temp': {
              'set': [CommandArgument.MakeZeroOrMoreCloudURLsArgument()],
              'release': [CommandArgument.MakeZeroOrMoreCloudURLsArgument()]
          },
      })

  # Help specification. See help_provider.py for documentation.
  help_spec = Command.HelpSpec(
      help_name='retention',
      help_name_aliases=[],
      help_type='command_help',
      help_one_line_summary=(
          'Provides utilities to interact with Retention Policy feature.'),
      help_text=_DETAILED_HELP_TEXT,
      subcommand_help_text={
          'get': _get_help_text,
          'set': _set_help_text,
          'clear': _clear_help_text,
          'lock': _lock_help_text,
          'event-default': _event_default_help_text,
          'event': _event_help_text,
          'temp': _temp_help_text
      },
  )

  def RunCommand(self):
    """Command entry point for the retention command."""
    # If the only credential type the user supplies in their boto file is HMAC,
    # GetApiSelector logic will force us to use the XML API, which bucket lock
    # does not support at the moment.
    if self.gsutil_api.GetApiSelector('gs') != ApiSelector.JSON:
      raise CommandException(('The {} command can only be used with the GCS '
                              'JSON API. If you have only supplied hmac '
                              'credentials in your boto file, please instead '
                              'supply a credential type that can be used with '
                              'the JSON API.').format(self.command_name))

    self.preconditions = PreconditionsFromHeaders(self.headers)

    action_subcommand = self.args.pop(0)
    self.ParseSubOpts(check_args=True)
    if action_subcommand == 'set':
      func = self._SetRetention
    elif action_subcommand == 'clear':
      func = self._ClearRetention
    elif action_subcommand == 'get':
      func = self._GetRetention
    elif action_subcommand == 'lock':
      func = self._LockRetention
    elif action_subcommand == 'event-default':
      func = self._DefaultEventHold
    elif action_subcommand == 'event':
      func = self._EventHold
    elif action_subcommand == 'temp':
      func = self._TempHold
    else:
      raise CommandException(
          ('Invalid subcommand "{}" for the {} command.\n'
           'See "gsutil help retention".').format(action_subcommand,
                                                  self.command_name))

    # Commands with both suboptions and subcommands need to reparse for
    # suboptions, so we log again.
    metrics.LogCommandParams(subcommands=[action_subcommand],
                             sub_opts=self.sub_opts)
    return func()

  def BucketUpdateFunc(self, url_args, bucket_metadata_update, fields,
                       log_msg_template):
    preconditions = Preconditions(
        meta_gen_match=self.preconditions.meta_gen_match)

    # Iterate over URLs, expanding wildcards and setting the new bucket metadata
    # on each bucket.
    some_matched = False
    for url_str in url_args:
      bucket_iter = self.GetBucketUrlIterFromArg(url_str, bucket_fields=['id'])
      for blr in bucket_iter:
        url = blr.storage_url
        some_matched = True
        self.logger.info(log_msg_template, blr)
        self.gsutil_api.PatchBucket(url.bucket_name,
                                    bucket_metadata_update,
                                    preconditions=preconditions,
                                    provider=url.scheme,
                                    fields=fields)
    if not some_matched:
      raise CommandException(NO_URLS_MATCHED_TARGET % list(url_args))

  def ObjectUpdateMetadataFunc(self,
                               patch_obj_metadata,
                               log_template,
                               name_expansion_result,
                               thread_state=None):
    """Updates metadata on an object using PatchObjectMetadata.

    Args:
      patch_obj_metadata: Metadata changes that should be applied to the
                          existing object.
      log_template: The log template that should be printed for each object.
      name_expansion_result: NameExpansionResult describing target object.
      thread_state: gsutil Cloud API instance to use for the operation.
    """
    gsutil_api = GetCloudApiInstance(self, thread_state=thread_state)

    exp_src_url = name_expansion_result.expanded_storage_url
    self.logger.info(log_template, exp_src_url)

    cloud_obj_metadata = encoding.JsonToMessage(
        apitools_messages.Object, name_expansion_result.expanded_result)

    preconditions = Preconditions(
        gen_match=self.preconditions.gen_match,
        meta_gen_match=self.preconditions.meta_gen_match)
    if preconditions.gen_match is None:
      preconditions.gen_match = cloud_obj_metadata.generation
    if preconditions.meta_gen_match is None:
      preconditions.meta_gen_match = cloud_obj_metadata.metageneration

    gsutil_api.PatchObjectMetadata(exp_src_url.bucket_name,
                                   exp_src_url.object_name,
                                   patch_obj_metadata,
                                   generation=exp_src_url.generation,
                                   preconditions=preconditions,
                                   provider=exp_src_url.scheme,
                                   fields=['id'])
    PutToQueueWithTimeout(gsutil_api.status_queue,
                          MetadataMessage(message_time=time.time()))

  def _GetObjectNameExpansionIterator(self, url_args):
    return NameExpansionIterator(
        self.command_name,
        self.debug,
        self.logger,
        self.gsutil_api,
        url_args,
        self.recursion_requested,
        all_versions=self.all_versions,
        continue_on_error=self.parallel_operations,
        bucket_listing_fields=['generation', 'metageneration'])

  def _GetSeekAheadNameExpansionIterator(self, url_args):
    return SeekAheadNameExpansionIterator(self.command_name,
                                          self.debug,
                                          self.GetSeekAheadGsutilApi(),
                                          url_args,
                                          self.recursion_requested,
                                          all_versions=self.all_versions,
                                          project_id=self.project_id)

  def _SetRetention(self):
    """Set retention retention_period on one or more buckets."""

    seconds = RetentionInSeconds(self.args[0])
    retention_policy = (apitools_messages.Bucket.RetentionPolicyValue(
        retentionPeriod=seconds))

    log_msg_template = 'Setting Retention Policy on %s...'
    bucket_metadata_update = apitools_messages.Bucket(
        retentionPolicy=retention_policy)
    url_args = self.args[1:]
    self.BucketUpdateFunc(url_args,
                          bucket_metadata_update,
                          fields=['id', 'retentionPolicy'],
                          log_msg_template=log_msg_template)
    return 0

  def _ClearRetention(self):
    """Clear retention retention_period on one or more buckets."""
    retention_policy = (apitools_messages.Bucket.RetentionPolicyValue(
        retentionPeriod=None))
    log_msg_template = 'Clearing Retention Policy on %s...'
    bucket_metadata_update = apitools_messages.Bucket(
        retentionPolicy=retention_policy)
    url_args = self.args
    self.BucketUpdateFunc(url_args,
                          bucket_metadata_update,
                          fields=['id', 'retentionPolicy'],
                          log_msg_template=log_msg_template)
    return 0

  def _GetRetention(self):
    """Get Retention Policy for a single bucket."""
    bucket_url, bucket_metadata = self.GetSingleBucketUrlFromArg(
        self.args[0], bucket_fields=['retentionPolicy'])
    print(RetentionPolicyToString(bucket_metadata.retentionPolicy, bucket_url))
    return 0

  def _LockRetention(self):
    """Lock Retention Policy on one or more buckets."""
    url_args = self.args
    # Iterate over URLs, expanding wildcards and setting the Retention Policy
    # configuration on each.
    some_matched = False
    for url_str in url_args:
      bucket_iter = self.GetBucketUrlIterFromArg(url_str, bucket_fields=['id'])
      for blr in bucket_iter:
        url = blr.storage_url
        some_matched = True
        # Get bucket metadata to provide a precondition.
        bucket_metadata = self.gsutil_api.GetBucket(
            url.bucket_name,
            provider=url.scheme,
            fields=['id', 'metageneration', 'retentionPolicy'])
        if (not (bucket_metadata.retentionPolicy and
                 bucket_metadata.retentionPolicy.retentionPeriod)):
          # TODO: implement '-c' flag to continue_on_error
          raise CommandException(
              'Bucket "{}" does not have an Unlocked Retention Policy.'.format(
                  url.bucket_name))
        elif bucket_metadata.retentionPolicy.isLocked is True:
          self.logger.error('Retention Policy on "%s" is already locked.', blr)
        elif ConfirmLockRequest(url.bucket_name,
                                bucket_metadata.retentionPolicy):
          self.logger.info('Locking Retention Policy on %s...', blr)
          self.gsutil_api.LockRetentionPolicy(url.bucket_name,
                                              bucket_metadata.metageneration,
                                              provider=url.scheme)
        else:
          self.logger.error(
              '  Abort Locking Retention Policy on {}'.format(blr))
    if not some_matched:
      raise CommandException(NO_URLS_MATCHED_TARGET % list(url_args))
    return 0

  def _DefaultEventHold(self):
    """Sets default value for Event-Based Hold on one or more buckets."""
    hold = None
    if self.args:
      if self.args[0].lower() == 'set':
        hold = True
      elif self.args[0].lower() == 'release':
        hold = False
      else:
        raise CommandException(
            ('Invalid subcommand "{}" for the "retention event-default"'
             ' command.\nSee "gsutil help retention event".').format(
                 self.sub_opts))

    verb = 'Setting' if hold else 'Releasing'
    log_msg_template = '{} default Event-Based Hold on %s...'.format(verb)
    bucket_metadata_update = apitools_messages.Bucket(
        defaultEventBasedHold=hold)
    url_args = self.args[1:]
    self.BucketUpdateFunc(url_args,
                          bucket_metadata_update,
                          fields=['id', 'defaultEventBasedHold'],
                          log_msg_template=log_msg_template)
    return 0

  def _EventHold(self):
    """Sets or unsets Event-Based Hold on one or more objects."""
    sub_command_name = 'event'
    sub_command_full_name = 'Event-Based'
    hold = self._ProcessHoldArgs(sub_command_name)
    url_args = self.args[1:]
    obj_metadata_update_wrapper = (SetEventHoldFuncWrapper
                                   if hold else ReleaseEventHoldFuncWrapper)
    self._SetHold(obj_metadata_update_wrapper, url_args, sub_command_full_name)
    return 0

  def _TempHold(self):
    """Sets or unsets Temporary Hold on one or more objects."""
    sub_command_name = 'temp'
    sub_command_full_name = 'Temporary'
    hold = self._ProcessHoldArgs(sub_command_name)
    url_args = self.args[1:]
    obj_metadata_update_wrapper = (SetTempHoldFuncWrapper
                                   if hold else ReleaseTempHoldFuncWrapper)
    self._SetHold(obj_metadata_update_wrapper, url_args, sub_command_full_name)
    return 0

  def _ProcessHoldArgs(self, sub_command_name):
    """Processes command args for Temporary and Event-Based Hold sub-command.

    Args:
      sub_command_name: The name of the subcommand: "temp" / "event"

    Returns:
      Returns a boolean value indicating whether to set (True) or
      release (False)the Hold.
    """
    hold = None
    if self.args[0].lower() == 'set':
      hold = True
    elif self.args[0].lower() == 'release':
      hold = False
    else:
      raise CommandException(
          ('Invalid subcommand "{}" for the "retention {}" command.\n'
           'See "gsutil help retention {}".').format(self.args[0],
                                                     sub_command_name,
                                                     sub_command_name))
    return hold

  def _SetHold(self, obj_metadata_update_wrapper, url_args,
               sub_command_full_name):
    """Common logic to set or unset Event-Based/Temporary Hold on objects.

    Args:
      obj_metadata_update_wrapper: The function for updating related fields in
                                   Object metadata.
      url_args: List of object URIs.
      sub_command_full_name: The full name for sub-command:
                             "Temporary" / "Event-Based"
    """
    if len(url_args) == 1 and not self.recursion_requested:
      url = StorageUrlFromString(url_args[0])
      if not (url.IsCloudUrl() and url.IsObject()):
        raise CommandException('URL ({}) must name an object'.format(
            url_args[0]))

    name_expansion_iterator = self._GetObjectNameExpansionIterator(url_args)
    seek_ahead_iterator = self._GetSeekAheadNameExpansionIterator(url_args)

    # Used to track if any objects' metadata failed to be set.
    self.everything_set_okay = True

    try:
      # TODO: implement '-c' flag to continue_on_error

      # Perform requests in parallel (-m) mode, if requested, using
      # configured number of parallel processes and threads. Otherwise,
      # perform requests with sequential function calls in current process.
      self.Apply(obj_metadata_update_wrapper,
                 name_expansion_iterator,
                 UpdateObjectMetadataExceptionHandler,
                 fail_on_error=True,
                 seek_ahead_iterator=seek_ahead_iterator)

    except AccessDeniedException as e:
      if e.status == 403:
        self._WarnServiceAccounts()
      raise

    if not self.everything_set_okay:
      raise CommandException(
          '{} Hold for some objects could not be set.'.format(
              sub_command_full_name))
Пример #19
0
class IamCommand(Command):
    """Implementation of gsutil iam command."""
    command_spec = Command.CreateCommandSpec(
        'iam',
        min_args=2,
        max_args=NO_MAX,
        supported_sub_args='afRrd:e:',
        file_url_ok=True,
        provider_url_ok=False,
        urls_start_arg=1,
        gs_api_support=[ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments={
            'get': [CommandArgument.MakeNCloudURLsArgument(1)],
            'set': [
                CommandArgument.MakeNFileURLsArgument(1),
                CommandArgument.MakeZeroOrMoreCloudURLsArgument()
            ],
            'ch': [
                CommandArgument.MakeOneOrMoreBindingsArgument(),
                CommandArgument.MakeZeroOrMoreCloudURLsArgument()
            ],
        },
    )

    help_spec = Command.HelpSpec(
        help_name='iam',
        help_name_aliases=[],
        help_type='command_help',
        help_one_line_summary=('Get, set, or change'
                               ' bucket and/or object IAM permissions.'),
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={
            'get': _get_help_text,
            'set': _set_help_text,
            'ch': _ch_help_text,
        })

    def GetIamHelper(self, storage_url, thread_state=None):
        """Gets an IAM policy for a single, resolved bucket / object URL.

    Args:
      storage_url: A CloudUrl instance with no wildcards, pointing to a
                   specific bucket or object.
      thread_state: CloudApiDelegator instance which is passed from
                    command.WorkerThread.__init__() if the global -m flag is
                    specified. Will use self.gsutil_api if thread_state is set
                    to None.

    Returns:
      Policy instance.
    """

        gsutil_api = GetCloudApiInstance(self, thread_state=thread_state)

        if storage_url.IsBucket():
            policy = gsutil_api.GetBucketIamPolicy(
                storage_url.bucket_name,
                provider=storage_url.scheme,
                fields=['bindings', 'etag'],
            )
        else:
            policy = gsutil_api.GetObjectIamPolicy(
                storage_url.bucket_name,
                storage_url.object_name,
                generation=storage_url.generation,
                provider=storage_url.scheme,
                fields=['bindings', 'etag'],
            )
        return policy

    def _GetIam(self, thread_state=None):
        """Gets IAM policy for single bucket or object."""

        pattern = self.args[0]

        matches = PluralityCheckableIterator(
            self.WildcardIterator(pattern).IterAll(
                bucket_listing_fields=['name']))
        if matches.IsEmpty():
            raise CommandException('%s matched no URLs' % pattern)
        if matches.HasPlurality():
            raise CommandException(
                '%s matched more than one URL, which is not allowed by the %s '
                'command' % (pattern, self.command_name))

        storage_url = StorageUrlFromString(list(matches)[0].url_string)
        policy = self.GetIamHelper(storage_url, thread_state=thread_state)
        print json.dumps(json.loads(protojson.encode_message(policy)),
                         sort_keys=True,
                         indent=2)

    def _SetIamHelperInternal(self, storage_url, policy, thread_state=None):
        """Sets IAM policy for a single, resolved bucket / object URL.

    Args:
      storage_url: A CloudUrl instance with no wildcards, pointing to a
                   specific bucket or object.
      policy: A Policy object to set on the bucket / object.
      thread_state: CloudApiDelegator instance which is passed from
                    command.WorkerThread.__init__() if the -m flag is
                    specified. Will use self.gsutil_api if thread_state is set
                    to None.

    Raises:
      ServiceException passed from the API call if an HTTP error was returned.
    """

        # SetIamHelper may be called by a command.WorkerThread. In the
        # single-threaded case, WorkerThread will not pass the CloudApiDelegator
        # instance to thread_state. GetCloudInstance is called to resolve the
        # edge case.
        gsutil_api = GetCloudApiInstance(self, thread_state=thread_state)

        if storage_url.IsBucket():
            gsutil_api.SetBucketIamPolicy(storage_url.bucket_name,
                                          policy,
                                          provider=storage_url.scheme)
        else:
            gsutil_api.SetObjectIamPolicy(storage_url.bucket_name,
                                          storage_url.object_name,
                                          policy,
                                          generation=storage_url.generation,
                                          provider=storage_url.scheme)

    def SetIamHelper(self, storage_url, policy, thread_state=None):
        """Handles the potential exception raised by the internal set function."""
        try:
            self._SetIamHelperInternal(storage_url,
                                       policy,
                                       thread_state=thread_state)
        except ServiceException:
            if self.continue_on_error:
                self.everything_set_okay = False
            else:
                raise

    def PatchIamHelper(self, storage_url, bindings_tuples, thread_state=None):
        """Patches an IAM policy for a single, resolved bucket / object URL.

    The patch is applied by altering the policy from an IAM get request, and
    setting the new IAM with the specified etag. Because concurrent IAM set
    requests may alter the etag, we may need to retry this operation several
    times before success.

    Args:
      storage_url: A CloudUrl instance with no wildcards, pointing to a
                   specific bucket or object.
      bindings_tuples: A list of BindingsTuple instances.
      thread_state: CloudApiDelegator instance which is passed from
                    command.WorkerThread.__init__() if the -m flag is
                    specified. Will use self.gsutil_api if thread_state is set
                    to None.
    """
        try:
            self._PatchIamHelperInternal(storage_url,
                                         bindings_tuples,
                                         thread_state=thread_state)
        except ServiceException:
            if self.continue_on_error:
                self.everything_set_okay = False
            else:
                raise
        except IamChOnResourceWithConditionsException as e:
            if self.continue_on_error:
                self.everything_set_okay = False
                self.tried_ch_on_resource_with_conditions = True
                self.logger.debug(e.message)
            else:
                raise CommandException(e.message)

    @Retry(PreconditionException, tries=3, timeout_secs=1.0)
    def _PatchIamHelperInternal(self,
                                storage_url,
                                bindings_tuples,
                                thread_state=None):

        policy = self.GetIamHelper(storage_url, thread_state=thread_state)
        (etag, bindings) = (policy.etag, policy.bindings)

        # If any of the bindings have conditions present, raise an exception.
        # See the docstring for the IamChOnResourceWithConditionsException class
        # for more details on why we raise this exception.
        for binding in bindings:
            if binding.condition:
                message = 'Could not patch IAM policy for %s.' % storage_url
                message += '\n'
                message += '\n'.join(
                    textwrap.wrap(
                        'The resource had conditions present in its IAM policy bindings, '
                        'which is not supported by "iam ch". %s' %
                        IAM_CH_CONDITIONS_WORKAROUND_MSG))
                raise IamChOnResourceWithConditionsException(message)

        # Create a backup which is untainted by any references to the original
        # bindings.
        orig_bindings = list(bindings)

        for (is_grant, diff) in bindings_tuples:
            bindings = PatchBindings(bindings, BindingsTuple(is_grant, diff))

        if IsEqualBindings(bindings, orig_bindings):
            self.logger.info('No changes made to %s', storage_url)
            return

        policy = apitools_messages.Policy(bindings=bindings, etag=etag)

        # We explicitly wish for etag mismatches to raise an error and allow this
        # function to error out, so we are bypassing the exception handling offered
        # by IamCommand.SetIamHelper in lieu of our own handling (@Retry).
        self._SetIamHelperInternal(storage_url,
                                   policy,
                                   thread_state=thread_state)

    def _PatchIam(self):
        self.continue_on_error = False
        self.recursion_requested = False

        patch_bindings_tuples = []

        if self.sub_opts:
            for o, a in self.sub_opts:
                if o in ['-r', '-R']:
                    self.recursion_requested = True
                elif o == '-f':
                    self.continue_on_error = True
                elif o == '-d':
                    patch_bindings_tuples.append(BindingStringToTuple(
                        False, a))

        patterns = []

        # N.B.: self.sub_opts stops taking in options at the first non-flagged
        # token. The rest of the tokens are sent to self.args. Thus, in order to
        # handle input of the form "-d <binding> <binding> <url>", we will have to
        # parse self.args for a mix of both bindings and CloudUrls. We are not
        # expecting to come across the -r, -f flags here.
        it = iter(self.args)
        for token in it:
            if STORAGE_URI_REGEX.match(token):
                patterns.append(token)
                break
            if token == '-d':
                patch_bindings_tuples.append(
                    BindingStringToTuple(False, it.next()))
            else:
                patch_bindings_tuples.append(BindingStringToTuple(True, token))
        if not patch_bindings_tuples:
            raise CommandException('Must specify at least one binding.')

        # All following arguments are urls.
        for token in it:
            patterns.append(token)

        self.everything_set_okay = True
        self.tried_ch_on_resource_with_conditions = False
        threaded_wildcards = []
        for pattern in patterns:
            surl = StorageUrlFromString(pattern)
            try:
                if surl.IsBucket():
                    if self.recursion_requested:
                        surl.object = '*'
                        threaded_wildcards.append(surl.url_string)
                    else:
                        self.PatchIamHelper(surl, patch_bindings_tuples)
                else:
                    threaded_wildcards.append(surl.url_string)
            except AttributeError:
                error_msg = 'Invalid Cloud URL "%s".' % surl.object_name
                if set(surl.object_name).issubset(set('-Rrf')):
                    error_msg += (
                        ' This resource handle looks like a flag, which must appear '
                        'before all bindings. See "gsutil help iam ch" for more details.'
                    )
                raise CommandException(error_msg)

        if threaded_wildcards:
            name_expansion_iterator = NameExpansionIterator(
                self.command_name,
                self.debug,
                self.logger,
                self.gsutil_api,
                threaded_wildcards,
                self.recursion_requested,
                all_versions=self.all_versions,
                continue_on_error=self.continue_on_error
                or self.parallel_operations,
                bucket_listing_fields=['name'])

            seek_ahead_iterator = SeekAheadNameExpansionIterator(
                self.command_name,
                self.debug,
                self.GetSeekAheadGsutilApi(),
                threaded_wildcards,
                self.recursion_requested,
                all_versions=self.all_versions)

            serialized_bindings_tuples_it = itertools.repeat(
                [SerializeBindingsTuple(t) for t in patch_bindings_tuples])
            self.Apply(_PatchIamWrapper,
                       itertools.izip(serialized_bindings_tuples_it,
                                      name_expansion_iterator),
                       _PatchIamExceptionHandler,
                       fail_on_error=not self.continue_on_error,
                       seek_ahead_iterator=seek_ahead_iterator)

            self.everything_set_okay &= not GetFailureCount() > 0

        # TODO: Add an error counter for files and objects.
        if not self.everything_set_okay:
            msg = 'Some IAM policies could not be patched.'
            if self.tried_ch_on_resource_with_conditions:
                msg += '\n'
                msg += '\n'.join(
                    textwrap.wrap(
                        'Some resources had conditions present in their IAM policy '
                        'bindings, which is not supported by "iam ch". %s' %
                        (IAM_CH_CONDITIONS_WORKAROUND_MSG)))
            raise CommandException(msg)

    # TODO(iam-beta): Add an optional flag to specify etag and edit the policy
    # accordingly to be passed into the helper functions.
    def _SetIam(self):
        """Set IAM policy for given wildcards on the command line."""

        self.continue_on_error = False
        self.recursion_requested = False
        self.all_versions = False
        force_etag = False
        etag = ''
        if self.sub_opts:
            for o, arg in self.sub_opts:
                if o in ['-r', '-R']:
                    self.recursion_requested = True
                elif o == '-f':
                    self.continue_on_error = True
                elif o == '-a':
                    self.all_versions = True
                elif o == '-e':
                    etag = str(arg)
                    force_etag = True
                else:
                    self.RaiseInvalidArgumentException()

        file_url = self.args[0]
        patterns = self.args[1:]

        # Load the IAM policy file and raise error if the file is invalid JSON or
        # does not exist.
        try:
            with open(file_url, 'r') as fp:
                policy = json.loads(fp.read())
        except IOError:
            raise ArgumentException(
                'Specified IAM policy file "%s" does not exist.' % file_url)
        except ValueError as e:
            self.logger.debug('Invalid IAM policy file, ValueError:\n', e)
            raise ArgumentException('Invalid IAM policy file "%s".' % file_url)

        bindings = policy.get('bindings', [])
        if not force_etag:
            etag = policy.get('etag', '')

        policy_json = json.dumps({'bindings': bindings, 'etag': etag})
        try:
            policy = protojson.decode_message(apitools_messages.Policy,
                                              policy_json)
        except DecodeError:
            raise ArgumentException(
                'Invalid IAM policy file "%s" or etag "%s".' %
                (file_url, etag))

        self.everything_set_okay = True

        # This list of wildcard strings will be handled by NameExpansionIterator.
        threaded_wildcards = []

        for pattern in patterns:
            surl = StorageUrlFromString(pattern)
            if surl.IsBucket():
                if self.recursion_requested:
                    surl.object_name = '*'
                    threaded_wildcards.append(surl.url_string)
                else:
                    self.SetIamHelper(surl, policy)
            else:
                threaded_wildcards.append(surl.url_string)

        # N.B.: If threaded_wildcards contains a non-existent bucket
        # (e.g. ["gs://non-existent", "gs://existent"]), NameExpansionIterator
        # will raise an exception in iter.next. This halts all iteration, even
        # when -f is set. This behavior is also evident in acl set. This behavior
        # also appears for any exception that will be raised when iterating over
        # wildcard expansions (access denied if bucket cannot be listed, etc.).
        if threaded_wildcards:
            name_expansion_iterator = NameExpansionIterator(
                self.command_name,
                self.debug,
                self.logger,
                self.gsutil_api,
                threaded_wildcards,
                self.recursion_requested,
                all_versions=self.all_versions,
                continue_on_error=self.continue_on_error
                or self.parallel_operations,
                bucket_listing_fields=['name'])

            seek_ahead_iterator = SeekAheadNameExpansionIterator(
                self.command_name,
                self.debug,
                self.GetSeekAheadGsutilApi(),
                threaded_wildcards,
                self.recursion_requested,
                all_versions=self.all_versions)

            policy_it = itertools.repeat(protojson.encode_message(policy))
            self.Apply(_SetIamWrapper,
                       itertools.izip(policy_it, name_expansion_iterator),
                       _SetIamExceptionHandler,
                       fail_on_error=not self.continue_on_error,
                       seek_ahead_iterator=seek_ahead_iterator)

            self.everything_set_okay &= not GetFailureCount() > 0

        # TODO: Add an error counter for files and objects.
        if not self.everything_set_okay:
            raise CommandException('Some IAM policies could not be set.')

    def RunCommand(self):
        """Command entry point for the acl command."""
        action_subcommand = self.args.pop(0)
        self.ParseSubOpts(check_args=True)
        # Commands with both suboptions and subcommands need to reparse for
        # suboptions, so we log again.
        LogCommandParams(sub_opts=self.sub_opts)
        self.def_acl = False
        if action_subcommand == 'get':
            LogCommandParams(subcommands=[action_subcommand])
            self._GetIam()
        elif action_subcommand == 'set':
            LogCommandParams(subcommands=[action_subcommand])
            self._SetIam()
        elif action_subcommand == 'ch':
            LogCommandParams(subcommands=[action_subcommand])
            self._PatchIam()
        else:
            raise CommandException(
                'Invalid subcommand "%s" for the %s command.\n'
                'See "gsutil help iam".' %
                (action_subcommand, self.command_name))

        return 0
Пример #20
0
class BucketPolicyOnlyCommand(Command):
  """Implements the gsutil bucketpolicyonly command."""

  command_spec = Command.CreateCommandSpec(
      'bucketpolicyonly',
      usage_synopsis=_SYNOPSIS,
      min_args=2,
      max_args=NO_MAX,
      supported_sub_args='',
      file_url_ok=False,
      provider_url_ok=False,
      urls_start_arg=2,
      gs_api_support=[ApiSelector.JSON],
      gs_default_api=ApiSelector.JSON,
      argparse_arguments={
          'get': [CommandArgument.MakeNCloudURLsArgument(1),],
          'set': [
              CommandArgument('mode', choices=['on', 'off']),
              CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()
          ],
      })
  # Help specification. See help_provider.py for documentation.
  help_spec = Command.HelpSpec(
      help_name='bucketpolicyonly',
      help_name_aliases=[],
      help_type='command_help',
      help_one_line_summary='Configure uniform bucket-level access',
      help_text=_DETAILED_HELP_TEXT,
      subcommand_help_text={
          'get': _get_help_text,
          'set': _set_help_text,
      },
  )

  def _ValidateBucketListingRefAndReturnBucketName(self, blr):
    if blr.storage_url.scheme != 'gs':
      raise CommandException(
          'The %s command can only be used with gs:// bucket URLs.' %
          self.command_name)

  def _GetBucketPolicyOnly(self, blr):
    """Gets the Bucket Policy Only setting for a bucket."""
    self._ValidateBucketListingRefAndReturnBucketName(blr)
    bucket_url = blr.storage_url

    bucket_metadata = self.gsutil_api.GetBucket(bucket_url.bucket_name,
                                                fields=['iamConfiguration'],
                                                provider=bucket_url.scheme)
    iam_config = bucket_metadata.iamConfiguration
    bucket_policy_only = iam_config.bucketPolicyOnly

    fields = {
        'bucket': str(bucket_url).rstrip('/'),
        'enabled': bucket_policy_only.enabled
    }

    locked_time_line = ''
    if bucket_policy_only.lockedTime:
      fields['locked_time'] = bucket_policy_only.lockedTime
      locked_time_line = '  LockedTime: {locked_time}\n'

    if bucket_policy_only:
      print(('Bucket Policy Only setting for {bucket}:\n'
             '  Enabled: {enabled}\n' + locked_time_line).format(**fields))

  def _SetBucketPolicyOnly(self, blr, setting_arg):
    """Sets the Bucket Policy Only setting for a bucket on or off."""
    self._ValidateBucketListingRefAndReturnBucketName(blr)
    bucket_url = blr.storage_url

    iam_config = IamConfigurationValue()
    iam_config.bucketPolicyOnly = BucketPolicyOnlyValue()
    iam_config.bucketPolicyOnly.enabled = (setting_arg == 'on')

    bucket_metadata = apitools_messages.Bucket(iamConfiguration=iam_config)

    setting_verb = 'Enabling' if setting_arg == 'on' else 'Disabling'
    print('%s Bucket Policy Only for %s...' %
          (setting_verb, str(bucket_url).rstrip('/')))

    self.gsutil_api.PatchBucket(bucket_url.bucket_name,
                                bucket_metadata,
                                fields=['iamConfiguration'],
                                provider=bucket_url.scheme)
    return 0

  def _BucketPolicyOnly(self):
    """Handles bucketpolicyonly command on a Cloud Storage bucket."""
    subcommand = self.args.pop(0)

    if subcommand not in ('get', 'set'):
      raise CommandException('bucketpolicyonly only supports get|set')

    subcommand_func = None
    subcommand_args = []
    setting_arg = None

    if subcommand == 'get':
      subcommand_func = self._GetBucketPolicyOnly
    elif subcommand == 'set':
      subcommand_func = self._SetBucketPolicyOnly
      setting_arg = self.args.pop(0)
      InsistOnOrOff(setting_arg,
                    'Only on and off values allowed for set option')
      subcommand_args.append(setting_arg)

    # Iterate over bucket args, performing the specified subsubcommand.
    some_matched = False
    url_args = self.args
    if not url_args:
      self.RaiseWrongNumberOfArgumentsException()
    for url_str in url_args:
      # Throws a CommandException if the argument is not a bucket.
      bucket_iter = self.GetBucketUrlIterFromArg(url_str)
      for bucket_listing_ref in bucket_iter:
        some_matched = True
        subcommand_func(bucket_listing_ref, *subcommand_args)

    if not some_matched:
      raise CommandException(NO_URLS_MATCHED_TARGET % list(url_args))
    return 0

  def RunCommand(self):
    """Command entry point for the bucketpolicyonly command."""
    if self.gsutil_api.GetApiSelector(provider='gs') != ApiSelector.JSON:
      raise CommandException('\n'.join(
          textwrap.wrap(
              'The "%s" command can only be used with the Cloud Storage JSON API.'
              % self.command_name)))

    action_subcommand = self.args[0]
    self.ParseSubOpts(check_args=True)

    if action_subcommand == 'get' or action_subcommand == 'set':
      metrics.LogCommandParams(sub_opts=self.sub_opts)
      metrics.LogCommandParams(subcommands=[action_subcommand])
      self._BucketPolicyOnly()
    else:
      raise CommandException('Invalid subcommand "%s", use get|set instead.' %
                             action_subcommand)
Пример #21
0
class WebCommand(Command):
    """Implementation of gsutil web command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'web',
        command_name_aliases=['setwebcfg', 'getwebcfg'],
        usage_synopsis=_SYNOPSIS,
        min_args=2,
        max_args=NO_MAX,
        supported_sub_args='m:e:',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=1,
        gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments={
            'set': [CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()],
            'get': [CommandArgument.MakeNCloudBucketURLsArgument(1)]
        })
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='web',
        help_name_aliases=['getwebcfg', 'setwebcfg'],
        help_type='command_help',
        help_one_line_summary=(
            'Set a main page and/or error page for one or more buckets'),
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={
            'get': _get_help_text,
            'set': _set_help_text
        },
    )

    def _GetWeb(self):
        """Gets website configuration for a bucket."""
        bucket_url, bucket_metadata = self.GetSingleBucketUrlFromArg(
            self.args[0], bucket_fields=['website'])

        if bucket_url.scheme == 's3':
            sys.stdout.write(
                self.gsutil_api.XmlPassThroughGetWebsite(
                    bucket_url, provider=bucket_url.scheme))
        else:
            if bucket_metadata.website and (
                    bucket_metadata.website.mainPageSuffix
                    or bucket_metadata.website.notFoundPage):
                sys.stdout.write(
                    str(encoding.MessageToJson(bucket_metadata.website)) +
                    '\n')
            else:
                sys.stdout.write('%s has no website configuration.\n' %
                                 bucket_url)

        return 0

    def _SetWeb(self):
        """Sets website configuration for a bucket."""
        main_page_suffix = None
        error_page = None
        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-m':
                    main_page_suffix = a
                elif o == '-e':
                    error_page = a

        url_args = self.args

        website = apitools_messages.Bucket.WebsiteValue(
            mainPageSuffix=main_page_suffix, notFoundPage=error_page)

        # Iterate over URLs, expanding wildcards and setting the website
        # configuration on each.
        some_matched = False
        for url_str in url_args:
            bucket_iter = self.GetBucketUrlIterFromArg(url_str,
                                                       bucket_fields=['id'])
            for blr in bucket_iter:
                url = blr.storage_url
                some_matched = True
                self.logger.info('Setting website configuration on %s...', blr)
                bucket_metadata = apitools_messages.Bucket(website=website)
                self.gsutil_api.PatchBucket(url.bucket_name,
                                            bucket_metadata,
                                            provider=url.scheme,
                                            fields=['id'])
        if not some_matched:
            raise CommandException(NO_URLS_MATCHED_TARGET % list(url_args))
        return 0

    def RunCommand(self):
        """Command entry point for the web command."""
        action_subcommand = self.args.pop(0)
        self.ParseSubOpts(check_args=True)
        if action_subcommand == 'get':
            func = self._GetWeb
        elif action_subcommand == 'set':
            func = self._SetWeb
        else:
            raise CommandException(
                ('Invalid subcommand "%s" for the %s command.\n'
                 'See "gsutil help web".') %
                (action_subcommand, self.command_name))

        # Commands with both suboptions and subcommands need to reparse for
        # suboptions, so we log again.
        metrics.LogCommandParams(subcommands=[action_subcommand],
                                 sub_opts=self.sub_opts)
        return func()
Пример #22
0
class HashCommand(Command):
  """Implementation of gsutil hash command."""

  # Command specification. See base class for documentation.
  command_spec = Command.CreateCommandSpec(
      'hash',
      command_name_aliases=[],
      usage_synopsis=_SYNOPSIS,
      min_args=1,
      max_args=constants.NO_MAX,
      supported_sub_args='chm',
      file_url_ok=True,
      provider_url_ok=False,
      urls_start_arg=0,
      gs_api_support=[ApiSelector.JSON],
      gs_default_api=ApiSelector.JSON,
      argparse_arguments=[CommandArgument.MakeZeroOrMoreFileURLsArgument()])
  # Help specification. See help_provider.py for documentation.
  help_spec = Command.HelpSpec(
      help_name='hash',
      help_name_aliases=['checksum'],
      help_type='command_help',
      help_one_line_summary='Calculate file hashes',
      help_text=_DETAILED_HELP_TEXT,
      subcommand_help_text={},
  )

  @classmethod
  def _ParseOpts(cls, sub_opts, logger):
    """Returns behavior variables based on input options.

    Args:
      sub_opts: getopt sub-arguments for the command.
      logger: logging.Logger for the command.

    Returns:
      Tuple of
      calc_crc32c: Boolean, if True, command should calculate a CRC32c checksum.
      calc_md5: Boolean, if True, command should calculate an MD5 hash.
      format_func: Function used for formatting the hash in the desired format.
      cloud_format_func: Function used for formatting the hash in the desired
                         format.
      output_format: String describing the hash output format.
    """
    calc_crc32c = False
    calc_md5 = False
    format_func = (
        lambda digest: hashing_helper.Base64EncodeHash(digest.hexdigest()))
    cloud_format_func = lambda digest: digest
    found_hash_option = False
    output_format = 'base64'

    if sub_opts:
      for o, unused_a in sub_opts:
        if o == '-c':
          calc_crc32c = True
          found_hash_option = True
        elif o == '-h':
          output_format = 'hex'
          format_func = lambda digest: digest.hexdigest()
          cloud_format_func = lambda digest: (
              hashing_helper.Base64ToHexHash(digest).decode('ascii')
          )  # yapf: disable
        elif o == '-m':
          calc_md5 = True
          found_hash_option = True

    if not found_hash_option:
      calc_crc32c = True
      calc_md5 = True

    if calc_crc32c and not boto_util.UsingCrcmodExtension():
      logger.warn(hashing_helper.SLOW_CRCMOD_WARNING)

    return calc_crc32c, calc_md5, format_func, cloud_format_func, output_format

  def _GetHashClassesFromArgs(self, calc_crc32c, calc_md5):
    """Constructs the dictionary of hashes to compute based on the arguments.

    Args:
      calc_crc32c: If True, CRC32c should be included.
      calc_md5: If True, MD5 should be included.

    Returns:
      Dictionary of {string: hash digester}, where string the name of the
          digester algorithm.
    """
    hash_dict = {}
    if calc_crc32c:
      hash_dict['crc32c'] = crcmod.predefined.Crc('crc-32c')
    if calc_md5:
      hash_dict['md5'] = hashing_helper.GetMd5()
    return hash_dict

  def RunCommand(self):
    """Command entry point for the hash command."""
    (calc_crc32c, calc_md5, format_func, cloud_format_func,
     output_format) = (self._ParseOpts(self.sub_opts, self.logger))

    matched_one = False
    for url_str in self.args:
      for file_ref in self.WildcardIterator(url_str).IterObjects(
          bucket_listing_fields=[
              'crc32c',
              'customerEncryption',
              'md5Hash',
              'size',
          ]):
        matched_one = True
        url = StorageUrlFromString(url_str)
        file_name = file_ref.storage_url.object_name
        if StorageUrlFromString(url_str).IsFileUrl():
          file_size = os.path.getsize(file_name)
          self.gsutil_api.status_queue.put(
              FileMessage(url,
                          None,
                          time.time(),
                          size=file_size,
                          finished=False,
                          message_type=FileMessage.FILE_HASH))
          callback_processor = ProgressCallbackWithTimeout(
              file_size,
              FileProgressCallbackHandler(self.gsutil_api.status_queue,
                                          src_url=StorageUrlFromString(url_str),
                                          operation_name='Hashing').call)
          hash_dict = self._GetHashClassesFromArgs(calc_crc32c, calc_md5)
          with open(file_name, 'rb') as fp:
            hashing_helper.CalculateHashesFromContents(
                fp, hash_dict, callback_processor=callback_processor)
          self.gsutil_api.status_queue.put(
              FileMessage(url,
                          None,
                          time.time(),
                          size=file_size,
                          finished=True,
                          message_type=FileMessage.FILE_HASH))
        else:
          hash_dict = {}
          obj_metadata = file_ref.root_object
          file_size = obj_metadata.size
          md5_present = obj_metadata.md5Hash is not None
          crc32c_present = obj_metadata.crc32c is not None
          if not md5_present and not crc32c_present:
            logging.getLogger().warn('No hashes present for %s', url_str)
            continue
          if md5_present:
            hash_dict['md5'] = obj_metadata.md5Hash
          if crc32c_present:
            hash_dict['crc32c'] = obj_metadata.crc32c
        print('Hashes [%s] for %s:' % (output_format, file_name))
        for name, digest in six.iteritems(hash_dict):
          print('\tHash (%s):\t\t%s' % (name,
                                        (format_func(digest) if url.IsFileUrl()
                                         else cloud_format_func(digest))))

    if not matched_one:
      raise CommandException('No files matched')
    _PutToQueueWithTimeout(self.gsutil_api.status_queue,
                           FinalMessage(time.time()))
    return 0
Пример #23
0
class RmCommand(Command):
    """Implementation of gsutil rm command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'rm',
        command_name_aliases=['del', 'delete', 'remove'],
        usage_synopsis=_SYNOPSIS,
        min_args=0,
        max_args=NO_MAX,
        supported_sub_args='afIrR',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=0,
        gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments=[CommandArgument.MakeZeroOrMoreCloudURLsArgument()])
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='rm',
        help_name_aliases=['del', 'delete', 'remove'],
        help_type='command_help',
        help_one_line_summary='Remove objects',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={},
    )

    def RunCommand(self):
        """Command entry point for the rm command."""
        # self.recursion_requested is initialized in command.py (so it can be
        # checked in parent class for all commands).
        self.continue_on_error = self.parallel_operations
        self.read_args_from_stdin = False
        self.all_versions = False
        if self.sub_opts:
            for o, unused_a in self.sub_opts:
                if o == '-a':
                    self.all_versions = True
                elif o == '-f':
                    self.continue_on_error = True
                elif o == '-I':
                    self.read_args_from_stdin = True
                elif o == '-r' or o == '-R':
                    self.recursion_requested = True
                    self.all_versions = True

        if self.read_args_from_stdin:
            if self.args:
                raise CommandException(
                    'No arguments allowed with the -I flag.')
            url_strs = StdinIterator()
        else:
            if not self.args:
                raise CommandException(
                    'The rm command (without -I) expects at '
                    'least one URL.')
            url_strs = self.args

        # Tracks number of object deletes that failed.
        self.op_failure_count = 0

        # Tracks if any buckets were missing.
        self.bucket_not_found_count = 0

        # Tracks buckets that are slated for recursive deletion.
        bucket_urls_to_delete = []
        self.bucket_strings_to_delete = []

        if self.recursion_requested:
            bucket_fields = ['id']
            for url_str in url_strs:
                url = StorageUrlFromString(url_str)
                if url.IsBucket() or url.IsProvider():
                    for blr in self.WildcardIterator(url_str).IterBuckets(
                            bucket_fields=bucket_fields):
                        bucket_urls_to_delete.append(blr.storage_url)
                        self.bucket_strings_to_delete.append(url_str)

        self.preconditions = PreconditionsFromHeaders(self.headers or {})

        try:
            # Expand wildcards, dirs, buckets, and bucket subdirs in URLs.
            name_expansion_iterator = NameExpansionIterator(
                self.command_name,
                self.debug,
                self.logger,
                self.gsutil_api,
                url_strs,
                self.recursion_requested,
                project_id=self.project_id,
                all_versions=self.all_versions,
                continue_on_error=self.continue_on_error
                or self.parallel_operations)

            seek_ahead_iterator = None
            # Cannot seek ahead with stdin args, since we can only iterate them
            # once without buffering in memory.
            if not self.read_args_from_stdin:
                seek_ahead_iterator = SeekAheadNameExpansionIterator(
                    self.command_name,
                    self.debug,
                    self.GetSeekAheadGsutilApi(),
                    url_strs,
                    self.recursion_requested,
                    all_versions=self.all_versions,
                    project_id=self.project_id)

            # Perform remove requests in parallel (-m) mode, if requested, using
            # configured number of parallel processes and threads. Otherwise,
            # perform requests with sequential function calls in current process.
            self.Apply(
                _RemoveFuncWrapper,
                name_expansion_iterator,
                _RemoveExceptionHandler,
                fail_on_error=(not self.continue_on_error),
                shared_attrs=['op_failure_count', 'bucket_not_found_count'],
                seek_ahead_iterator=seek_ahead_iterator)

        # Assuming the bucket has versioning enabled, url's that don't map to
        # objects should throw an error even with all_versions, since the prior
        # round of deletes only sends objects to a history table.
        # This assumption that rm -a is only called for versioned buckets should be
        # corrected, but the fix is non-trivial.
        except CommandException as e:
            # Don't raise if there are buckets to delete -- it's valid to say:
            #   gsutil rm -r gs://some_bucket
            # if the bucket is empty.
            if _ExceptionMatchesBucketToDelete(self.bucket_strings_to_delete,
                                               e):
                DecrementFailureCount()
            else:
                raise
        except ServiceException, e:
            if not self.continue_on_error:
                raise

        if self.bucket_not_found_count:
            raise CommandException(
                'Encountered non-existent bucket during listing')

        if self.op_failure_count and not self.continue_on_error:
            raise CommandException('Some files could not be removed.')

        # If this was a gsutil rm -r command covering any bucket subdirs,
        # remove any dir_$folder$ objects (which are created by various web UI
        # tools to simulate folders).
        if self.recursion_requested:
            folder_object_wildcards = []
            for url_str in url_strs:
                url = StorageUrlFromString(url_str)
                if url.IsObject():
                    folder_object_wildcards.append('%s**_$folder$' % url_str)
            if folder_object_wildcards:
                self.continue_on_error = True
                try:
                    name_expansion_iterator = NameExpansionIterator(
                        self.command_name,
                        self.debug,
                        self.logger,
                        self.gsutil_api,
                        folder_object_wildcards,
                        self.recursion_requested,
                        project_id=self.project_id,
                        all_versions=self.all_versions)
                    # When we're removing folder objects, always continue on error
                    self.Apply(_RemoveFuncWrapper,
                               name_expansion_iterator,
                               _RemoveFoldersExceptionHandler,
                               fail_on_error=False)
                except CommandException as e:
                    # Ignore exception from name expansion due to an absent folder file.
                    if not e.reason.startswith(NO_URLS_MATCHED_GENERIC):
                        raise

        # Now that all data has been deleted, delete any bucket URLs.
        for url in bucket_urls_to_delete:
            self.logger.info('Removing %s...', url)

            @Retry(NotEmptyException, tries=3, timeout_secs=1)
            def BucketDeleteWithRetry():
                self.gsutil_api.DeleteBucket(url.bucket_name,
                                             provider=url.scheme)

            BucketDeleteWithRetry()

        if self.op_failure_count:
            plural_str = 's' if self.op_failure_count else ''
            raise CommandException(
                '%d file%s/object%s could not be removed.' %
                (self.op_failure_count, plural_str, plural_str))

        return 0
Пример #24
0
class HelpCommand(Command):
  """Implementation of gsutil help command."""

  # Command specification. See base class for documentation.
  command_spec = Command.CreateCommandSpec(
      'help',
      command_name_aliases=['?', 'man'],
      usage_synopsis=_SYNOPSIS,
      min_args=0,
      max_args=2,
      supported_sub_args='',
      file_url_ok=True,
      provider_url_ok=False,
      urls_start_arg=0,
  )
  # Help specification. See help_provider.py for documentation.
  help_spec = Command.HelpSpec(
      help_name='help',
      help_name_aliases=['?'],
      help_type='command_help',
      help_one_line_summary='Get help about commands and topics',
      help_text=_DETAILED_HELP_TEXT,
      subcommand_help_text={},
  )

  def RunCommand(self):
    """Command entry point for the help command."""
    (help_type_map, help_name_map) = self._LoadHelpMaps()
    output = []
    if not self.args:
      output.append('%s\nAvailable commands:\n' % top_level_usage_string)
      format_str = '  %-' + str(MAX_HELP_NAME_LEN) + 's%s\n'
      for help_prov in sorted(help_type_map['command_help'],
                              key=lambda hp: hp.help_spec.help_name):
        output.append(format_str % (help_prov.help_spec.help_name,
                                    help_prov.help_spec.help_one_line_summary))
      output.append('\nAdditional help topics:\n')
      for help_prov in sorted(help_type_map['additional_help'],
                              key=lambda hp: hp.help_spec.help_name):
        output.append(format_str % (help_prov.help_spec.help_name,
                                    help_prov.help_spec.help_one_line_summary))
      output.append('\nUse gsutil help <command or topic> for detailed help.')
    else:
      invalid_subcommand = False
      arg = self.args[0]
      if arg not in help_name_map:
        output.append('No help available for "%s"' % arg)
      else:
        help_prov = help_name_map[arg]
        help_name = None
        if len(self.args) > 1:  # We also have a subcommand argument.
          subcommand_map = help_prov.help_spec.subcommand_help_text
          if subcommand_map and self.args[1] in subcommand_map:
            help_name = arg + ' ' + self.args[1]
            help_text = subcommand_map[self.args[1]]
          else:
            invalid_subcommand = True
            if not subcommand_map:
              output.append(
                  ('The "%s" command has no subcommands. You can ask for the '
                   'full help by running:\n\n\tgsutil help %s\n') % (arg, arg))
            else:
              subcommand_examples = []
              for subcommand in subcommand_map:
                subcommand_examples.append('\tgsutil help %s %s' %
                                           (arg, subcommand))
              output.append(
                  ('Subcommand "%s" does not exist for command "%s".\n'
                   'You can either ask for the full help about the command by '
                   'running:\n\n\tgsutil help %s\n\n'
                   'Or you can ask for help about one of the subcommands:\n\n%s'
                  ) % (self.args[1], arg, arg, '\n'.join(subcommand_examples)))
        if not invalid_subcommand:
          if not help_name:  # No subcommand or invalid subcommand.
            help_name = help_prov.help_spec.help_name
            help_text = help_prov.help_spec.help_text

          output.append('<B>NAME</B>\n')
          output.append('  %s - %s\n' %
                        (help_name, help_prov.help_spec.help_one_line_summary))
          output.append('\n\n')
          output.append(help_text.strip('\n'))
          new_alias = OLD_ALIAS_MAP.get(arg, [None])[0]
          if new_alias:
            deprecation_warning = """
  The "%s" alias is deprecated, and will eventually be removed completely.
  Please use the "%s" command instead.""" % (arg, new_alias)

            output.append('\n\n\n<B>DEPRECATION WARNING</B>\n')
            output.append(deprecation_warning)
    self._OutputHelp(''.join(output))
    return 0

  def _OutputHelp(self, help_str):
    """Outputs simply formatted string.

    This function paginates if the string is too long, PAGER is defined, and
    the output is a tty.

    Args:
      help_str: String to format.
    """
    # Remove <B> and </B> tags and replace them with ANSI control codes if
    # writing to a compatible tty.
    if IS_WINDOWS or not IsRunningInteractively():
      help_str = re.sub('<B>', '', help_str)
      help_str = re.sub('</B>', '', help_str)
      text_util.print_to_fd(help_str)
      return
    help_str = re.sub('<B>', '\033[1m', help_str)
    help_str = re.sub('</B>', '\033[0;0m', help_str)
    num_lines = len(help_str.split('\n'))
    if 'PAGER' in os.environ and num_lines >= GetTermLines():
      # Use -r option for less to make bolding work right.
      pager = os.environ['PAGER'].split(' ')
      if pager[0].endswith('less'):
        pager.append('-r')
      try:
        Popen(pager, stdin=PIPE,
              universal_newlines=True).communicate(input=help_str)
      except OSError as e:
        raise CommandException('Unable to open pager (%s): %s' %
                               (' '.join(pager), e))
    else:
      text_util.print_to_fd(help_str)

  def _LoadHelpMaps(self):
    """Returns tuple of help type and help name.

    help type is a dict with key: help type
                             value: list of HelpProviders
    help name is a dict with key: help command name or alias
                             value: HelpProvider

    Returns:
      (help type, help name)
    """

    # Import all gslib.commands submodules.
    for _, module_name, _ in pkgutil.iter_modules(gslib.commands.__path__):
      __import__('gslib.commands.%s' % module_name)
    # Import all gslib.addlhelp submodules.
    for _, module_name, _ in pkgutil.iter_modules(gslib.addlhelp.__path__):
      __import__('gslib.addlhelp.%s' % module_name)

    help_type_map = {}
    help_name_map = {}
    for s in gslib.help_provider.ALL_HELP_TYPES:
      help_type_map[s] = []
    # Only include HelpProvider subclasses in the dict.
    for help_prov in itertools.chain(HelpProvider.__subclasses__(),
                                     Command.__subclasses__()):
      if help_prov is Command:
        # Skip the Command base class itself; we just want its subclasses,
        # where the help command text lives (in addition to non-Command
        # HelpProviders, like naming.py).
        continue
      gslib.help_provider.SanityCheck(help_prov, help_name_map)
      help_name_map[help_prov.help_spec.help_name] = help_prov
      for help_name_aliases in help_prov.help_spec.help_name_aliases:
        help_name_map[help_name_aliases] = help_prov
      help_type_map[help_prov.help_spec.help_type].append(help_prov)
    return (help_type_map, help_name_map)
Пример #25
0
class StatCommand(Command):
  """Implementation of gsutil stat command."""

  # Command specification. See base class for documentation.
  command_spec = Command.CreateCommandSpec(
      'stat',
      command_name_aliases=[],
      usage_synopsis=_SYNOPSIS,
      min_args=1,
      max_args=NO_MAX,
      supported_sub_args='',
      file_url_ok=False,
      provider_url_ok=False,
      urls_start_arg=0,
      gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
      gs_default_api=ApiSelector.JSON,
      argparse_arguments=[
          CommandArgument.MakeZeroOrMoreCloudURLsArgument()
      ]
  )
  # Help specification. See help_provider.py for documentation.
  help_spec = Command.HelpSpec(
      help_name='stat',
      help_name_aliases=[],
      help_type='command_help',
      help_one_line_summary='Display object status',
      help_text=_DETAILED_HELP_TEXT,
      subcommand_help_text={},
  )

  def RunCommand(self):
    """Command entry point for stat command."""
    stat_fields = ENCRYPTED_FIELDS + UNENCRYPTED_FULL_LISTING_FIELDS
    found_nonmatching_arg = False
    for url_str in self.args:
      arg_matches = 0
      url = StorageUrlFromString(url_str)
      if not url.IsObject():
        raise CommandException('The stat command only works with object URLs')
      try:
        if ContainsWildcard(url_str):
          blr_iter = self.WildcardIterator(url_str).IterObjects(
              bucket_listing_fields=stat_fields)
        else:
          try:
            single_obj = self.gsutil_api.GetObjectMetadata(
                url.bucket_name, url.object_name, generation=url.generation,
                provider=url.scheme, fields=stat_fields)
          except EncryptionException:
            # Retry without requesting hashes.
            single_obj = self.gsutil_api.GetObjectMetadata(
                url.bucket_name, url.object_name, generation=url.generation,
                provider=url.scheme, fields=UNENCRYPTED_FULL_LISTING_FIELDS)
          blr_iter = [BucketListingObject(url, root_object=single_obj)]
        for blr in blr_iter:
          if blr.IsObject():
            arg_matches += 1
            # TODO: Request fewer fields if we're not printing the object.
            if logging.getLogger().isEnabledFor(logging.INFO):
              PrintFullInfoAboutObject(blr, incl_acl=False)
      except AccessDeniedException:
        if logging.getLogger().isEnabledFor(logging.INFO):
          sys.stderr.write('You aren\'t authorized to read %s - skipping' %
                           url_str)
      except InvalidUrlError:
        raise
      except NotFoundException:
        pass
      if not arg_matches:
        if logging.getLogger().isEnabledFor(logging.INFO):
          sys.stderr.write(NO_URLS_MATCHED_TARGET % url_str)
        found_nonmatching_arg = True
    if found_nonmatching_arg:
      return 1
    return 0
Пример #26
0
class DefStorageClassCommand(Command):
    """Implementation of gsutil defstorageclass command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'defstorageclass',
        usage_synopsis=_SYNOPSIS,
        min_args=2,
        max_args=NO_MAX,
        supported_sub_args='',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=2,
        gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments={
            'set': [
                # FreeTextArgument allows for using storage class abbreviations.
                CommandArgument.MakeFreeTextArgument(),
                CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument(),
            ],
            'get': [
                CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument(),
            ],
        },
    )
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='defstorageclass',
        help_name_aliases=['defaultstorageclass'],
        help_type='command_help',
        help_one_line_summary='Get or set the default storage class on buckets',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={
            'get': _get_help_text,
            'set': _set_help_text,
        },
    )

    def _CheckIsGsUrl(self, url_str):
        if not url_str.startswith('gs://'):
            raise CommandException(
                '"%s" does not support the URL "%s". Did you mean to use a gs:// '
                'URL?' % (self.command_name, url_str))

    def _CalculateUrlsStartArg(self):
        if not self.args:
            self.RaiseWrongNumberOfArgumentsException()
        if self.args[0].lower() == 'set':
            return 2
        else:
            return 1

    def _SetDefStorageClass(self):
        """Sets the default storage class for a bucket."""
        # At this point, "set" has been popped off the front of self.args.
        normalized_storage_class = NormalizeStorageClass(self.args[0])
        url_args = self.args[1:]
        if not url_args:
            self.RaiseWrongNumberOfArgumentsException()

        some_matched = False
        for url_str in url_args:
            self._CheckIsGsUrl(url_str)
            # Throws a CommandException if the argument is not a bucket.
            bucket_iter = self.GetBucketUrlIterFromArg(url_str,
                                                       bucket_fields=['id'])
            for blr in bucket_iter:
                some_matched = True
                bucket_metadata = apitools_messages.Bucket()
                self.logger.info(
                    'Setting default storage class to "%s" for bucket %s' %
                    (normalized_storage_class, blr.url_string.rstrip('/')))
                bucket_metadata.storageClass = normalized_storage_class
                self.gsutil_api.PatchBucket(blr.storage_url.bucket_name,
                                            bucket_metadata,
                                            provider=blr.storage_url.scheme,
                                            fields=['id'])
        if not some_matched:
            raise CommandException(NO_URLS_MATCHED_TARGET % list(url_args))

    def _GetDefStorageClass(self):
        """Gets the default storage class for a bucket."""
        # At this point, "get" has been popped off the front of self.args.
        url_args = self.args
        some_matched = False
        for url_str in url_args:
            self._CheckIsGsUrl(url_str)
            bucket_iter = self.GetBucketUrlIterFromArg(
                url_str, bucket_fields=['storageClass'])
            for blr in bucket_iter:
                some_matched = True
                print(
                    '%s: %s' %
                    (blr.url_string.rstrip('/'), blr.root_object.storageClass))
        if not some_matched:
            raise CommandException(NO_URLS_MATCHED_TARGET % list(url_args))

    def RunCommand(self):
        """Command entry point for the defstorageclass command."""
        action_subcommand = self.args.pop(0)
        subcommand_args = [action_subcommand]
        if action_subcommand == 'get':
            func = self._GetDefStorageClass
        elif action_subcommand == 'set':
            func = self._SetDefStorageClass
            normalized_storage_class = NormalizeStorageClass(self.args[0])
            subcommand_args.append(normalized_storage_class)
        else:
            raise CommandException(
                ('Invalid subcommand "%s" for the %s command.\n'
                 'See "gsutil help %s".') %
                (action_subcommand, self.command_name, self.command_name))
        metrics.LogCommandParams(subcommands=subcommand_args)
        func()
        return 0
Пример #27
0
class KmsCommand(Command):
  """Implements of gsutil kms command."""

  command_spec = Command.CreateCommandSpec(
      'kms',
      usage_synopsis=_SYNOPSIS,
      min_args=1,
      max_args=NO_MAX,
      supported_sub_args='dk:p:w',
      file_url_ok=False,
      provider_url_ok=False,
      urls_start_arg=1,
      gs_api_support=[ApiSelector.JSON],
      gs_default_api=ApiSelector.JSON,
      argparse_arguments={
          'authorize': [],
          'encryption': [CommandArgument.MakeNCloudBucketURLsArgument(1)],
          'serviceaccount': [],
      })
  # Help specification. See help_provider.py for documentation.
  help_spec = Command.HelpSpec(
      help_name='kms',
      help_name_aliases=[],
      help_type='command_help',
      help_one_line_summary='Configure Cloud KMS encryption',
      help_text=_DETAILED_HELP_TEXT,
      subcommand_help_text={
          'authorize': _authorize_help_text,
          'encryption': _encryption_help_text,
          'serviceaccount': _serviceaccount_help_text
      },
  )

  def _GatherSubOptions(self, subcommand_name):
    self.CheckArguments()
    self.clear_kms_key = False
    self.kms_key = None
    self.warn_on_key_authorize_failure = False

    if self.sub_opts:
      for o, a in self.sub_opts:
        if o == '-p':
          self.project_id = a
        elif o == '-k':
          self.kms_key = a
          ValidateCMEK(self.kms_key)
        elif o == '-d':
          self.clear_kms_key = True
        elif o == '-w':
          self.warn_on_key_authorize_failure = True

    if self.warn_on_key_authorize_failure and (
        self.subcommand_name != 'encryption' or not self.kms_key):
      raise CommandException('\n'.join(
          textwrap.wrap(
              'The "-w" option should only be specified for the "encryption" '
              'subcommand and must be used with the "-k" option.')))
    # Determine the project (used in the serviceaccount and authorize
    # subcommands), either from the "-p" option's value or the default specified
    # in the user's Boto config file.
    if not self.project_id:
      self.project_id = PopulateProjectId(None)

  def _AuthorizeProject(self, project_id, kms_key):
    """Authorizes a project's service account to be used with a KMS key.

    Authorizes the Cloud Storage-owned service account for project_id to be used
    with kms_key.

    Args:
      project_id: (str) Project id string (not number).
      kms_key: (str) Fully qualified resource name for the KMS key.

    Returns:
      (str, bool) A 2-tuple consisting of:
      1) The email address for the service account associated with the project,
         which is authorized to encrypt/decrypt with the specified key.
      2) A bool value - True if we had to grant the service account permission
         to encrypt/decrypt with the given key; False if the required permission
         was already present.
    """
    # Request the Cloud Storage-owned service account for project_id, creating
    # it if it does not exist.
    service_account = self.gsutil_api.GetProjectServiceAccount(
        project_id, provider='gs').email_address

    kms_api = KmsApi(logger=self.logger)
    self.logger.debug('Getting IAM policy for %s', kms_key)
    try:
      policy = kms_api.GetKeyIamPolicy(kms_key)
      self.logger.debug('Current policy is %s', policy)

      # Check if the required binding is already present; if not, add it and
      # update the key's IAM policy.
      added_new_binding = False
      binding = Binding(role='roles/cloudkms.cryptoKeyEncrypterDecrypter',
                        members=['serviceAccount:%s' % service_account])
      if binding not in policy.bindings:
        policy.bindings.append(binding)
        kms_api.SetKeyIamPolicy(kms_key, policy)
        added_new_binding = True
      return (service_account, added_new_binding)
    except AccessDeniedException:
      if self.warn_on_key_authorize_failure:
        text_util.print_to_fd('\n'.join(
            textwrap.wrap(
                'Warning: Check that your Cloud Platform project\'s service '
                'account has the "cloudkms.cryptoKeyEncrypterDecrypter" role '
                'for the specified key. Without this role, you may not be '
                'able to encrypt or decrypt objects using the key which will '
                'prevent you from uploading or downloading objects.')))
        return (service_account, False)
      else:
        raise

  def _Authorize(self):
    self._GatherSubOptions('authorize')
    if not self.kms_key:
      raise CommandException('%s %s requires a key to be specified with -k' %
                             (self.command_name, self.subcommand_name))

    _, newly_authorized = self._AuthorizeProject(self.project_id, self.kms_key)
    if newly_authorized:
      print('Authorized project %s to encrypt and decrypt with key:\n%s' %
            (self.project_id, self.kms_key))
    else:
      print('Project %s was already authorized to encrypt and decrypt with '
            'key:\n%s.' % (self.project_id, self.kms_key))
    return 0

  def _EncryptionClearKey(self, bucket_metadata, bucket_url):
    """Clears the defaultKmsKeyName on a Cloud Storage bucket.

    Args:
      bucket_metadata: (apitools_messages.Bucket) Metadata for the given bucket.
      bucket_url: (gslib.storage_url.StorageUrl) StorageUrl of the given bucket.
    """
    bucket_metadata.encryption = apitools_messages.Bucket.EncryptionValue()
    print('Clearing default encryption key for %s...' %
          str(bucket_url).rstrip('/'))
    self.gsutil_api.PatchBucket(bucket_url.bucket_name,
                                bucket_metadata,
                                fields=['encryption'],
                                provider=bucket_url.scheme)

  def _EncryptionSetKey(self, bucket_metadata, bucket_url,
                        svc_acct_for_project_num):
    """Sets defaultKmsKeyName on a Cloud Storage bucket.

    Args:
      bucket_metadata: (apitools_messages.Bucket) Metadata for the given bucket.
      bucket_url: (gslib.storage_url.StorageUrl) StorageUrl of the given bucket.
      svc_acct_for_project_num: (Dict[int, str]) Mapping of project numbers to
          their corresponding service account.
    """
    bucket_project_number = bucket_metadata.projectNumber
    try:
      # newly_authorized will always be False if the project number is in our
      # cache dict, since we've already called _AuthorizeProject on it.
      service_account, newly_authorized = (
          svc_acct_for_project_num[bucket_project_number], False)
    except KeyError:
      service_account, newly_authorized = self._AuthorizeProject(
          bucket_project_number, self.kms_key)
      svc_acct_for_project_num[bucket_project_number] = service_account
    if newly_authorized:
      text_util.print_to_fd('Authorized service account %s to use key:\n%s' %
                            (service_account, self.kms_key))

    bucket_metadata.encryption = apitools_messages.Bucket.EncryptionValue(
        defaultKmsKeyName=self.kms_key)
    print('Setting default KMS key for bucket %s...' %
          str(bucket_url).rstrip('/'))
    self.gsutil_api.PatchBucket(bucket_url.bucket_name,
                                bucket_metadata,
                                fields=['encryption'],
                                provider=bucket_url.scheme)

  def _Encryption(self):
    self._GatherSubOptions('encryption')
    # For each project, we should only make one API call to look up its
    # associated Cloud Storage-owned service account; subsequent lookups can be
    # pulled from this cache dict.
    svc_acct_for_project_num = {}

    def _EncryptionForBucket(blr):
      """Set, clear, or get the defaultKmsKeyName for a bucket."""
      bucket_url = blr.storage_url

      if bucket_url.scheme != 'gs':
        raise CommandException(
            'The %s command can only be used with gs:// bucket URLs.' %
            self.command_name)

      # Determine the project from the provided bucket.
      bucket_metadata = self.gsutil_api.GetBucket(
          bucket_url.bucket_name,
          fields=['encryption', 'projectNumber'],
          provider=bucket_url.scheme)

      # "-d" flag was specified, so clear the default KMS key and return.
      if self.clear_kms_key:
        self._EncryptionClearKey(bucket_metadata, bucket_url)
        return 0
      # "-k" flag was specified, so set the default KMS key and return.
      if self.kms_key:
        self._EncryptionSetKey(bucket_metadata, bucket_url,
                               svc_acct_for_project_num)
        return 0
      # Neither "-d" nor "-k" was specified, so emit the default KMS key and
      # return.
      bucket_url_string = str(bucket_url).rstrip('/')
      if (bucket_metadata.encryption and
          bucket_metadata.encryption.defaultKmsKeyName):
        print('Default encryption key for %s:\n%s' %
              (bucket_url_string, bucket_metadata.encryption.defaultKmsKeyName))
      else:
        print('Bucket %s has no default encryption key' % bucket_url_string)
      return 0

    # Iterate over bucket args, performing the specified encryption operation
    # for each.
    some_matched = False
    url_args = self.args
    if not url_args:
      self.RaiseWrongNumberOfArgumentsException()
    for url_str in url_args:
      # Throws a CommandException if the argument is not a bucket.
      bucket_iter = self.GetBucketUrlIterFromArg(url_str)
      for bucket_listing_ref in bucket_iter:
        some_matched = True
        _EncryptionForBucket(bucket_listing_ref)

    if not some_matched:
      raise CommandException(NO_URLS_MATCHED_TARGET % list(url_args))
    return 0

  def _ServiceAccount(self):
    self.CheckArguments()
    if not self.args:
      self.args = ['gs://']
    if self.sub_opts:
      for o, a in self.sub_opts:
        if o == '-p':
          self.project_id = a

    if not self.project_id:
      self.project_id = PopulateProjectId(None)

    # Request the service account for that project; this might create the
    # service account if it doesn't already exist.
    self.logger.debug('Checking service account for project %s',
                      self.project_id)

    service_account = self.gsutil_api.GetProjectServiceAccount(
        self.project_id, provider='gs').email_address

    print(service_account)

    return 0

  def _RunSubCommand(self, func):
    try:
      self.sub_opts, self.args = getopt.getopt(
          self.args, self.command_spec.supported_sub_args)
      # Commands with both suboptions and subcommands need to reparse for
      # suboptions, so we log again.
      metrics.LogCommandParams(sub_opts=self.sub_opts)
      return func(self)
    except getopt.GetoptError:
      self.RaiseInvalidArgumentException()

  def RunCommand(self):
    """Command entry point for the kms command."""
    # If the only credential type the user supplies in their boto file is hmac,
    # GetApiSelector logic will force us to use the XML API. As the XML API does
    # not support all the operations needed for kms subcommands, fail early.
    if self.gsutil_api.GetApiSelector(provider='gs') != ApiSelector.JSON:
      raise CommandException('\n'.join(
          textwrap.wrap(
              'The "%s" command can only be used with the GCS JSON API. If you '
              'have only supplied hmac credentials in your boto file, please '
              'instead supply a credential type that can be used with the JSON '
              'API.' % self.command_name)))

  def RunCommand(self):
    """Command entry point for the kms command."""
    # If the only credential type the user supplies in their boto file is hmac,
    # GetApiSelector logic will force us to use the XML API. As the XML API does
    # not support all the operations needed for kms subcommands, fail early.
    if self.gsutil_api.GetApiSelector(provider='gs') != ApiSelector.JSON:
      raise CommandException('\n'.join(
          textwrap.wrap(
              'The "%s" command can only be used with the GCS JSON API, which '
              'cannot use HMAC credentials. Please supply a credential '
              'type that is compatible with the JSON API (e.g. OAuth2) in your '
              'boto config file.' % self.command_name)))

    method_for_subcommand = {
        'authorize': KmsCommand._Authorize,
        'encryption': KmsCommand._Encryption,
        'serviceaccount': KmsCommand._ServiceAccount
    }
    self.subcommand_name = self.args.pop(0)
    if self.subcommand_name in method_for_subcommand:
      metrics.LogCommandParams(subcommands=[self.subcommand_name])
      return self._RunSubCommand(method_for_subcommand[self.subcommand_name])
    else:
      raise CommandException('Invalid subcommand "%s" for the %s command.' %
                             (self.subcommand_name, self.command_name))
Пример #28
0
class CorsCommand(Command):
    """Implementation of gsutil cors command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'cors',
        command_name_aliases=['getcors', 'setcors'],
        usage_synopsis=_SYNOPSIS,
        min_args=2,
        max_args=NO_MAX,
        supported_sub_args='',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=1,
        gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments={
            'set': [
                CommandArgument.MakeNFileURLsArgument(1),
                CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()
            ],
            'get': [CommandArgument.MakeNCloudBucketURLsArgument(1)]
        })
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='cors',
        help_name_aliases=['getcors', 'setcors', 'cross-origin'],
        help_type='command_help',
        help_one_line_summary=(
            'Set a CORS JSON document for one or more buckets'),
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={
            'get': _get_help_text,
            'set': _set_help_text
        },
    )

    def _CalculateUrlsStartArg(self):
        if not self.args:
            self.RaiseWrongNumberOfArgumentsException()
        if self.args[0].lower() == 'set':
            return 2
        else:
            return 1

    def _SetCors(self):
        """Sets CORS configuration on a Google Cloud Storage bucket."""
        cors_arg = self.args[0]
        url_args = self.args[1:]
        # Disallow multi-provider 'cors set' requests.
        if not UrlsAreForSingleProvider(url_args):
            raise CommandException(
                '"%s" command spanning providers not allowed.' %
                self.command_name)

        # Open, read and parse file containing JSON document.
        cors_file = open(cors_arg, 'r')
        cors_txt = cors_file.read()
        cors_file.close()

        self.api = self.gsutil_api.GetApiSelector(
            StorageUrlFromString(url_args[0]).scheme)

        # Iterate over URLs, expanding wildcards and setting the CORS on each.
        some_matched = False
        for url_str in url_args:
            bucket_iter = self.GetBucketUrlIterFromArg(url_str,
                                                       bucket_fields=['id'])
            for blr in bucket_iter:
                url = blr.storage_url
                some_matched = True
                self.logger.info('Setting CORS on %s...', blr)
                if url.scheme == 's3':
                    self.gsutil_api.XmlPassThroughSetCors(cors_txt,
                                                          url,
                                                          provider=url.scheme)
                else:
                    cors = CorsTranslation.JsonCorsToMessageEntries(cors_txt)
                    if not cors:
                        cors = REMOVE_CORS_CONFIG
                    bucket_metadata = apitools_messages.Bucket(cors=cors)
                    self.gsutil_api.PatchBucket(url.bucket_name,
                                                bucket_metadata,
                                                provider=url.scheme,
                                                fields=['id'])
        if not some_matched:
            raise CommandException('No URLs matched')
        return 0

    def _GetCors(self):
        """Gets CORS configuration for a Google Cloud Storage bucket."""
        bucket_url, bucket_metadata = self.GetSingleBucketUrlFromArg(
            self.args[0], bucket_fields=['cors'])

        if bucket_url.scheme == 's3':
            sys.stdout.write(
                self.gsutil_api.XmlPassThroughGetCors(
                    bucket_url, provider=bucket_url.scheme))
        else:
            if bucket_metadata.cors:
                sys.stdout.write(
                    CorsTranslation.MessageEntriesToJson(bucket_metadata.cors))
            else:
                sys.stdout.write('%s has no CORS configuration.\n' %
                                 bucket_url)
        return 0

    def RunCommand(self):
        """Command entry point for the cors command."""
        action_subcommand = self.args.pop(0)
        if action_subcommand == 'get':
            func = self._GetCors
        elif action_subcommand == 'set':
            func = self._SetCors
        else:
            raise CommandException(
                ('Invalid subcommand "%s" for the %s command.\n'
                 'See "gsutil help cors".') %
                (action_subcommand, self.command_name))
        return func()
Пример #29
0
class LsCommand(Command):
    """Implementation of gsutil ls command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'ls',
        command_name_aliases=['dir', 'list'],
        usage_synopsis=_SYNOPSIS,
        min_args=0,
        max_args=NO_MAX,
        supported_sub_args='aeblLhp:rR',
        file_url_ok=False,
        provider_url_ok=True,
        urls_start_arg=0,
        gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments=[CommandArgument.MakeZeroOrMoreCloudURLsArgument()])
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='ls',
        help_name_aliases=['dir', 'list'],
        help_type='command_help',
        help_one_line_summary='List providers, buckets, or objects',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={},
    )

    def _PrintBucketInfo(self, bucket_blr, listing_style):
        """Print listing info for given bucket.

    Args:
      bucket_blr: BucketListingReference for the bucket being listed
      listing_style: ListingStyle enum describing type of output desired.

    Returns:
      Tuple (total objects, total bytes) in the bucket.
    """
        if (listing_style == ListingStyle.SHORT
                or listing_style == ListingStyle.LONG):
            print bucket_blr
            return
        # listing_style == ListingStyle.LONG_LONG:
        # We're guaranteed by the caller that the root object is populated.
        bucket = bucket_blr.root_object
        location_constraint = bucket.location
        storage_class = bucket.storageClass
        fields = {
            'bucket': bucket_blr.url_string,
            'storage_class': storage_class,
            'location_constraint': location_constraint,
            'acl': AclTranslation.JsonFromMessage(bucket.acl),
            'default_acl':
            AclTranslation.JsonFromMessage(bucket.defaultObjectAcl)
        }

        fields['versioning'] = bucket.versioning and bucket.versioning.enabled
        fields['website_config'] = 'Present' if bucket.website else 'None'
        fields['logging_config'] = 'Present' if bucket.logging else 'None'
        fields['cors_config'] = 'Present' if bucket.cors else 'None'
        fields['lifecycle_config'] = 'Present' if bucket.lifecycle else 'None'

        # For field values that are multiline, add indenting to make it look
        # prettier.
        for key in fields:
            previous_value = fields[key]
            if (not isinstance(previous_value, basestring)
                    or '\n' not in previous_value):
                continue
            new_value = previous_value.replace('\n', '\n\t  ')
            # Start multiline values on a new line if they aren't already.
            if not new_value.startswith('\n'):
                new_value = '\n\t  ' + new_value
            fields[key] = new_value

        print(
            '{bucket} :\n'
            '\tStorage class:\t\t\t{storage_class}\n'
            '\tLocation constraint:\t\t{location_constraint}\n'
            '\tVersioning enabled:\t\t{versioning}\n'
            '\tLogging configuration:\t\t{logging_config}\n'
            '\tWebsite configuration:\t\t{website_config}\n'
            '\tCORS configuration: \t\t{cors_config}\n'
            '\tLifecycle configuration:\t{lifecycle_config}\n'
            '\tACL:\t\t\t\t{acl}\n'
            '\tDefault ACL:\t\t\t{default_acl}'.format(**fields))
        if bucket_blr.storage_url.scheme == 's3':
            print(
                'Note: this is an S3 bucket so configuration values may be '
                'blank. To retrieve bucket configuration values, use '
                'individual configuration commands such as gsutil acl get '
                '<bucket>.')

    def _PrintLongListing(self, bucket_listing_ref):
        """Prints an object with ListingStyle.LONG."""
        obj = bucket_listing_ref.root_object
        url_str = bucket_listing_ref.url_string
        if (obj.metadata and S3_DELETE_MARKER_GUID
                in obj.metadata.additionalProperties):
            size_string = '0'
            num_bytes = 0
            num_objs = 0
            url_str += '<DeleteMarker>'
        else:
            size_string = (MakeHumanReadable(obj.size)
                           if self.human_readable else str(obj.size))
            num_bytes = obj.size
            num_objs = 1

        timestamp = JSON_TIMESTAMP_RE.sub(
            r'\1T\2Z',
            str(obj.updated).decode(UTF8).encode('ascii'))
        printstr = '%(size)10s  %(timestamp)s  %(url)s'
        encoded_etag = None
        encoded_metagen = None
        if self.all_versions:
            printstr += '  metageneration=%(metageneration)s'
            encoded_metagen = str(obj.metageneration).encode(UTF8)
        if self.include_etag:
            printstr += '  etag=%(etag)s'
            encoded_etag = obj.etag.encode(UTF8)
        format_args = {
            'size': size_string,
            'timestamp': timestamp,
            'url': url_str.encode(UTF8),
            'metageneration': encoded_metagen,
            'etag': encoded_etag
        }
        print printstr % format_args
        return (num_objs, num_bytes)

    def RunCommand(self):
        """Command entry point for the ls command."""
        got_nomatch_errors = False
        got_bucket_nomatch_errors = False
        listing_style = ListingStyle.SHORT
        get_bucket_info = False
        self.recursion_requested = False
        self.all_versions = False
        self.include_etag = False
        self.human_readable = False
        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-a':
                    self.all_versions = True
                elif o == '-e':
                    self.include_etag = True
                elif o == '-b':
                    get_bucket_info = True
                elif o == '-h':
                    self.human_readable = True
                elif o == '-l':
                    listing_style = ListingStyle.LONG
                elif o == '-L':
                    listing_style = ListingStyle.LONG_LONG
                elif o == '-p':
                    self.project_id = a
                elif o == '-r' or o == '-R':
                    self.recursion_requested = True

        if not self.args:
            # default to listing all gs buckets
            self.args = ['gs://']

        total_objs = 0
        total_bytes = 0

        def MaybePrintBucketHeader(blr):
            if len(self.args) > 1:
                print '%s:' % blr.url_string.encode(UTF8)

        print_bucket_header = MaybePrintBucketHeader

        for url_str in self.args:
            storage_url = StorageUrlFromString(url_str)
            if storage_url.IsFileUrl():
                raise CommandException('Only cloud URLs are supported for %s' %
                                       self.command_name)
            bucket_fields = None
            if (listing_style == ListingStyle.SHORT
                    or listing_style == ListingStyle.LONG):
                bucket_fields = ['id']
            elif listing_style == ListingStyle.LONG_LONG:
                bucket_fields = [
                    'location', 'storageClass', 'versioning', 'acl',
                    'defaultObjectAcl', 'website', 'logging', 'cors',
                    'lifecycle'
                ]
            if storage_url.IsProvider():
                # Provider URL: use bucket wildcard to list buckets.
                for blr in self.WildcardIterator(
                        '%s://*' % storage_url.scheme).IterBuckets(
                            bucket_fields=bucket_fields):
                    self._PrintBucketInfo(blr, listing_style)
            elif storage_url.IsBucket() and get_bucket_info:
                # ls -b bucket listing request: List info about bucket(s).
                total_buckets = 0
                for blr in self.WildcardIterator(url_str).IterBuckets(
                        bucket_fields=bucket_fields):
                    if not ContainsWildcard(url_str) and not blr.root_object:
                        # Iterator does not make an HTTP call for non-wildcarded
                        # listings with fields=='id'. Ensure the bucket exists by calling
                        # GetBucket.
                        self.gsutil_api.GetBucket(blr.storage_url.bucket_name,
                                                  fields=['id'],
                                                  provider=storage_url.scheme)
                    self._PrintBucketInfo(blr, listing_style)
                    total_buckets += 1
                if not ContainsWildcard(url_str) and not total_buckets:
                    got_bucket_nomatch_errors = True
            else:
                # URL names a bucket, object, or object subdir ->
                # list matching object(s) / subdirs.
                def _PrintPrefixLong(blr):
                    print '%-33s%s' % ('', blr.url_string.encode(UTF8))

                if listing_style == ListingStyle.SHORT:
                    # ls helper by default readies us for a short listing.
                    ls_helper = LsHelper(
                        self.WildcardIterator,
                        self.logger,
                        all_versions=self.all_versions,
                        print_bucket_header_func=print_bucket_header,
                        should_recurse=self.recursion_requested)
                elif listing_style == ListingStyle.LONG:
                    bucket_listing_fields = ['name', 'updated', 'size']
                    if self.all_versions:
                        bucket_listing_fields.extend(
                            ['generation', 'metageneration'])
                    if self.include_etag:
                        bucket_listing_fields.append('etag')

                    ls_helper = LsHelper(
                        self.WildcardIterator,
                        self.logger,
                        print_object_func=self._PrintLongListing,
                        print_dir_func=_PrintPrefixLong,
                        print_bucket_header_func=print_bucket_header,
                        all_versions=self.all_versions,
                        should_recurse=self.recursion_requested,
                        fields=bucket_listing_fields)

                elif listing_style == ListingStyle.LONG_LONG:
                    # List all fields
                    bucket_listing_fields = None
                    ls_helper = LsHelper(
                        self.WildcardIterator,
                        self.logger,
                        print_object_func=PrintFullInfoAboutObject,
                        print_dir_func=_PrintPrefixLong,
                        print_bucket_header_func=print_bucket_header,
                        all_versions=self.all_versions,
                        should_recurse=self.recursion_requested,
                        fields=bucket_listing_fields)
                else:
                    raise CommandException('Unknown listing style: %s' %
                                           listing_style)

                exp_dirs, exp_objs, exp_bytes = ls_helper.ExpandUrlAndPrint(
                    storage_url)
                if storage_url.IsObject() and exp_objs == 0 and exp_dirs == 0:
                    got_nomatch_errors = True
                total_bytes += exp_bytes
                total_objs += exp_objs

        if total_objs and listing_style != ListingStyle.SHORT:
            print('TOTAL: %d objects, %d bytes (%s)' %
                  (total_objs, total_bytes,
                   MakeHumanReadable(float(total_bytes))))
        if got_nomatch_errors:
            raise CommandException('One or more URLs matched no objects.')
        if got_bucket_nomatch_errors:
            raise NotFoundException(
                'One or more bucket URLs matched no buckets.')

        return 0
Пример #30
0
class NotificationCommand(Command):
    """Implementation of gsutil notification command."""

    # Notification names might look like one of these:
    #  canonical form:  projects/_/buckets/bucket/notificationConfigs/3
    #  JSON API form:   b/bucket/notificationConfigs/5
    # Either of the above might start with a / if a user is copying & pasting.
    def _GetNotificationPathRegex(self):
        if not NotificationCommand._notification_path_regex:
            NotificationCommand._notification_path_regex = re.compile(
                ('/?(projects/[^/]+/)?b(uckets)?/(?P<bucket>[^/]+)/'
                 'notificationConfigs/(?P<notification>[0-9]+)'))
        return NotificationCommand._notification_path_regex

    _notification_path_regex = None

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'notification',
        command_name_aliases=[
            'notify',
            'notifyconfig',
            'notifications',
            'notif',
        ],
        usage_synopsis=_SYNOPSIS,
        min_args=2,
        max_args=NO_MAX,
        supported_sub_args='i:t:m:t:of:e:p:s',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=1,
        gs_api_support=[ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments={
            'watchbucket': [
                CommandArgument.MakeFreeTextArgument(),
                CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument(),
            ],
            'stopchannel': [],
            'list': [
                CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument(),
            ],
            'delete': [
                # Takes a list of one of the following:
                #   notification: projects/_/buckets/bla/notificationConfigs/5,
                #   bucket: gs://foobar
                CommandArgument.MakeZeroOrMoreCloudURLsArgument(),
            ],
            'create': [
                CommandArgument.MakeFreeTextArgument(),  # Cloud Pub/Sub topic
                CommandArgument.MakeNCloudBucketURLsArgument(1),
            ]
        },
    )
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='notification',
        help_name_aliases=[
            'watchbucket',
            'stopchannel',
            'notifyconfig',
        ],
        help_type='command_help',
        help_one_line_summary='Configure object change notification',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={
            'create': _create_help_text,
            'list': _list_help_text,
            'delete': _delete_help_text,
            'watchbucket': _watchbucket_help_text,
            'stopchannel': _stopchannel_help_text,
        },
    )

    def _WatchBucket(self):
        """Creates a watch on a bucket given in self.args."""
        self.CheckArguments()
        identifier = None
        client_token = None
        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-i':
                    identifier = a
                if o == '-t':
                    client_token = a

        identifier = identifier or str(uuid.uuid4())
        watch_url = self.args[0]
        bucket_arg = self.args[-1]

        if not watch_url.lower().startswith('https://'):
            raise CommandException(
                'The application URL must be an https:// URL.')

        bucket_url = StorageUrlFromString(bucket_arg)
        if not (bucket_url.IsBucket() and bucket_url.scheme == 'gs'):
            raise CommandException(
                'The %s command can only be used with gs:// bucket URLs.' %
                self.command_name)
        if not bucket_url.IsBucket():
            raise CommandException(
                'URL must name a bucket for the %s command.' %
                self.command_name)

        self.logger.info('Watching bucket %s with application URL %s ...',
                         bucket_url, watch_url)

        try:
            channel = self.gsutil_api.WatchBucket(bucket_url.bucket_name,
                                                  watch_url,
                                                  identifier,
                                                  token=client_token,
                                                  provider=bucket_url.scheme)
        except AccessDeniedException as e:
            self.logger.warn(
                NOTIFICATION_AUTHORIZATION_FAILED_MESSAGE.format(
                    watch_error=str(e), watch_url=watch_url))
            raise

        channel_id = channel.id
        resource_id = channel.resourceId
        client_token = channel.token
        self.logger.info('Successfully created watch notification channel.')
        self.logger.info('Watch channel identifier: %s', channel_id)
        self.logger.info('Canonicalized resource identifier: %s', resource_id)
        self.logger.info('Client state token: %s', client_token)

        return 0

    def _StopChannel(self):
        channel_id = self.args[0]
        resource_id = self.args[1]

        self.logger.info('Removing channel %s with resource identifier %s ...',
                         channel_id, resource_id)
        self.gsutil_api.StopChannel(channel_id, resource_id, provider='gs')
        self.logger.info('Succesfully removed channel.')

        return 0

    def _ListChannels(self, bucket_arg):
        """Lists active channel watches on a bucket given in self.args."""
        bucket_url = StorageUrlFromString(bucket_arg)
        if not (bucket_url.IsBucket() and bucket_url.scheme == 'gs'):
            raise CommandException(
                'The %s command can only be used with gs:// bucket URLs.' %
                self.command_name)
        if not bucket_url.IsBucket():
            raise CommandException(
                'URL must name a bucket for the %s command.' %
                self.command_name)
        channels = self.gsutil_api.ListChannels(bucket_url.bucket_name,
                                                provider='gs').items
        self.logger.info(
            'Bucket %s has the following active Object Change Notifications:',
            bucket_url.bucket_name)
        for idx, channel in enumerate(channels):
            self.logger.info('\tNotification channel %d:', idx + 1)
            self.logger.info('\t\tChannel identifier: %s', channel.channel_id)
            self.logger.info('\t\tResource identifier: %s',
                             channel.resource_id)
            self.logger.info('\t\tApplication URL: %s', channel.push_url)
            self.logger.info('\t\tCreated by: %s', channel.subscriber_email)
            self.logger.info(
                '\t\tCreation time: %s',
                str(datetime.fromtimestamp(channel.creation_time_ms / 1000)))

        return 0

    def _Create(self):
        self.CheckArguments()

        # User-specified options
        pubsub_topic = None
        payload_format = None
        custom_attributes = {}
        event_types = []
        object_name_prefix = None
        should_setup_topic = True

        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-e':
                    event_types.append(a)
                elif o == '-f':
                    payload_format = a
                elif o == '-m':
                    if ':' not in a:
                        raise CommandException(
                            'Custom attributes specified with -m should be of the form '
                            'key:value')
                    key, value = a.split(':')
                    custom_attributes[key] = value
                elif o == '-p':
                    object_name_prefix = a
                elif o == '-s':
                    should_setup_topic = False
                elif o == '-t':
                    pubsub_topic = a

        if payload_format not in PAYLOAD_FORMAT_MAP:
            raise CommandException(
                "Must provide a payload format with -f of either 'json' or 'none'"
            )
        payload_format = PAYLOAD_FORMAT_MAP[payload_format]

        bucket_arg = self.args[-1]

        bucket_url = StorageUrlFromString(bucket_arg)
        if not bucket_url.IsCloudUrl() or not bucket_url.IsBucket():
            raise CommandException(
                "%s %s requires a GCS bucket name, but got '%s'" %
                (self.command_name, self.subcommand_name, bucket_arg))
        if bucket_url.scheme != 'gs':
            raise CommandException(
                'The %s command can only be used with gs:// bucket URLs.' %
                self.command_name)
        bucket_name = bucket_url.bucket_name
        self.logger.debug('Creating notification for bucket %s', bucket_url)

        # Find the project this bucket belongs to
        bucket_metadata = self.gsutil_api.GetBucket(bucket_name,
                                                    fields=['projectNumber'],
                                                    provider=bucket_url.scheme)
        bucket_project_number = bucket_metadata.projectNumber

        # If not specified, choose a sensible default for the Cloud Pub/Sub topic
        # name.
        if not pubsub_topic:
            pubsub_topic = 'projects/%s/topics/%s' % (PopulateProjectId(None),
                                                      bucket_name)
        if not pubsub_topic.startswith('projects/'):
            # If a user picks a topic ID (mytopic) but doesn't pass the whole name (
            # projects/my-project/topics/mytopic ), pick a default project.
            pubsub_topic = 'projects/%s/topics/%s' % (PopulateProjectId(None),
                                                      pubsub_topic)
        self.logger.debug('Using Cloud Pub/Sub topic %s', pubsub_topic)

        just_modified_topic_permissions = False
        if should_setup_topic:
            # Ask GCS for the email address that represents GCS's permission to
            # publish to a Cloud Pub/Sub topic from this project.
            service_account = self.gsutil_api.GetProjectServiceAccount(
                bucket_project_number,
                provider=bucket_url.scheme).email_address
            self.logger.debug('Service account for project %d: %s',
                              bucket_project_number, service_account)
            just_modified_topic_permissions = self._CreateTopic(
                pubsub_topic, service_account)

        for attempt_number in range(0, 2):
            try:
                create_response = self.gsutil_api.CreateNotificationConfig(
                    bucket_name,
                    pubsub_topic=pubsub_topic,
                    payload_format=payload_format,
                    custom_attributes=custom_attributes,
                    event_types=event_types if event_types else None,
                    object_name_prefix=object_name_prefix,
                    provider=bucket_url.scheme)
                break
            except PublishPermissionDeniedException:
                if attempt_number == 0 and just_modified_topic_permissions:
                    # If we have just set the IAM policy, it may take up to 10 seconds to
                    # take effect.
                    self.logger.info(
                        'Retrying create notification in 10 seconds '
                        '(new permissions may take up to 10 seconds to take effect.)'
                    )
                    time.sleep(10)
                else:
                    raise

        notification_name = 'projects/_/buckets/%s/notificationConfigs/%s' % (
            bucket_name, create_response.id)
        self.logger.info('Created notification config %s', notification_name)

        return 0

    def _CreateTopic(self, pubsub_topic, service_account):
        """Assures that a topic exists, creating it if necessary.

    Also adds GCS as a publisher on that bucket, if necessary.

    Args:
      pubsub_topic: name of the Cloud Pub/Sub topic to use/create.
      service_account: the GCS service account that needs publish permission.

    Returns:
      true if we modified IAM permissions, otherwise false.
    """

        pubsub_api = PubsubApi(logger=self.logger)

        # Verify that the Pub/Sub topic exists. If it does not, create it.
        try:
            pubsub_api.GetTopic(topic_name=pubsub_topic)
            self.logger.debug('Topic %s already exists', pubsub_topic)
        except NotFoundException:
            self.logger.debug('Creating topic %s', pubsub_topic)
            pubsub_api.CreateTopic(topic_name=pubsub_topic)
            self.logger.info('Created Cloud Pub/Sub topic %s', pubsub_topic)

        # Verify that the service account is in the IAM policy.
        policy = pubsub_api.GetTopicIamPolicy(topic_name=pubsub_topic)
        binding = Binding(role='roles/pubsub.publisher',
                          members=['serviceAccount:%s' % service_account])

        # This could be more extensive. We could, for instance, check for roles
        # that are stronger that pubsub.publisher, like owner. We could also
        # recurse up the hierarchy looking to see if there are project-level
        # permissions. This can get very complex very quickly, as the caller
        # may not necessarily have access to the project-level IAM policy.
        # There's no danger in double-granting permission just to make sure it's
        # there, though.
        if binding not in policy.bindings:
            policy.bindings.append(binding)
            # transactional safety via etag field.
            pubsub_api.SetTopicIamPolicy(topic_name=pubsub_topic,
                                         policy=policy)
            return True
        else:
            self.logger.debug(
                'GCS already has publish permission to topic %s.',
                pubsub_topic)
            return False

    def _EnumerateNotificationsFromArgs(self,
                                        accept_notification_configs=True):
        """Yields bucket/notification tuples from command-line args.

    Given a list of strings that are bucket names (gs://foo) or notification
    config IDs, yield tuples of bucket names and their associated notifications.

    Args:
      accept_notification_configs: whether notification configs are valid args.
    Yields:
      Tuples of the form (bucket_name, Notification)
    """
        path_regex = self._GetNotificationPathRegex()

        for list_entry in self.args:
            match = path_regex.match(list_entry)
            if match:
                if not accept_notification_configs:
                    raise CommandException(
                        '%s %s accepts only bucket names, but you provided %s'
                        %
                        (self.command_name, self.subcommand_name, list_entry))
                bucket_name = match.group('bucket')
                notification_id = match.group('notification')
                found = False
                for notification in self.gsutil_api.ListNotificationConfigs(
                        bucket_name, provider='gs'):
                    if notification.id == notification_id:
                        yield (bucket_name, notification)
                        found = True
                        break
                if not found:
                    raise NotFoundException('Could not find notification %s' %
                                            list_entry)
            else:
                storage_url = StorageUrlFromString(list_entry)
                if not storage_url.IsCloudUrl():
                    raise CommandException(
                        'The %s command must be used on cloud buckets or notification '
                        'config names.' % self.command_name)
                if storage_url.scheme != 'gs':
                    raise CommandException(
                        'The %s command only works on gs:// buckets.')
                path = None
                if storage_url.IsProvider():
                    path = 'gs://*'
                elif storage_url.IsBucket():
                    path = list_entry
                if not path:
                    raise CommandException(
                        'The %s command cannot be used on cloud objects, only buckets'
                        % self.command_name)
                for blr in self.WildcardIterator(path).IterBuckets(
                        bucket_fields=['id']):
                    for notification in self.gsutil_api.ListNotificationConfigs(
                            blr.storage_url.bucket_name, provider='gs'):
                        yield (blr.storage_url.bucket_name, notification)

    def _List(self):
        self.CheckArguments()
        if self.sub_opts:
            if '-o' in dict(self.sub_opts):
                for bucket_name in self.args:
                    self._ListChannels(bucket_name)
        else:
            for bucket_name, notification in self._EnumerateNotificationsFromArgs(
                    accept_notification_configs=False):
                self._PrintNotificationDetails(bucket_name, notification)
        return 0

    def _PrintNotificationDetails(self, bucket, notification):
        print(
            'projects/_/buckets/{bucket}/notificationConfigs/{notification}\n'
            '\tCloud Pub/Sub topic: {topic}'.format(
                bucket=bucket,
                notification=notification.id,
                topic=notification.topic[len('//pubsub.googleapis.com/'):]))
        if notification.custom_attributes:
            print('\tCustom attributes:')
            for attr in notification.custom_attributes.additionalProperties:
                print('\t\t%s: %s' % (attr.key, attr.value))
        filters = []
        if notification.event_types:
            filters.append('\t\tEvent Types: %s' %
                           ', '.join(notification.event_types))
        if notification.object_name_prefix:
            filters.append("\t\tObject name prefix: '%s'" %
                           notification.object_name_prefix)
        if filters:
            print('\tFilters:')
            for line in filters:
                print(line)
        self.logger.info('')

    def _Delete(self):
        for bucket_name, notification in self._EnumerateNotificationsFromArgs(
        ):
            self._DeleteNotification(bucket_name, notification.id)
        return 0

    def _DeleteNotification(self, bucket_name, notification_id):
        self.gsutil_api.DeleteNotificationConfig(bucket_name,
                                                 notification=notification_id,
                                                 provider='gs')
        return 0

    def _RunSubCommand(self, func):
        try:
            (self.sub_opts,
             self.args) = getopt.getopt(self.args,
                                        self.command_spec.supported_sub_args)
            # Commands with both suboptions and subcommands need to reparse for
            # suboptions, so we log again.
            metrics.LogCommandParams(sub_opts=self.sub_opts)
            return func(self)
        except getopt.GetoptError:
            self.RaiseInvalidArgumentException()

    SUBCOMMANDS = {
        'create': _Create,
        'list': _List,
        'delete': _Delete,
        'watchbucket': _WatchBucket,
        'stopchannel': _StopChannel
    }

    def RunCommand(self):
        """Command entry point for the notification command."""
        self.subcommand_name = self.args.pop(0)
        if self.subcommand_name in NotificationCommand.SUBCOMMANDS:
            metrics.LogCommandParams(subcommands=[self.subcommand_name])
            return self._RunSubCommand(
                NotificationCommand.SUBCOMMANDS[self.subcommand_name])
        else:
            raise CommandException(
                'Invalid subcommand "%s" for the %s command.' %
                (self.subcommand_name, self.command_name))