class FakeCommandWithCompleters(Command):
  """Command with various completer types."""

  command_spec = Command.CreateCommandSpec(
      'fake2',
      argparse_arguments=[
          CommandArgument.MakeZeroOrMoreCloudURLsArgument(),
          CommandArgument.MakeZeroOrMoreFileURLsArgument(),
          CommandArgument.MakeZeroOrMoreCloudOrFileURLsArgument(),
          CommandArgument.MakeFreeTextArgument(),
          CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument(),
          CommandArgument.MakeFileURLOrCannedACLArgument(),
      ]
  )

  help_spec = Command.HelpSpec(
      help_name='fake2',
      help_name_aliases=[],
      help_type='command_help',
      help_one_line_summary='fake command for tests',
      help_text='fake command for tests',
      subcommand_help_text={}
  )

  def __init__(self):
    pass
예제 #2
0
class NotificationCommand(Command):
    """Implementation of gsutil notification command."""

    # Notification names might look like one of these:
    #  canonical form:  projects/_/buckets/bucket/notificationConfigs/3
    #  JSON API form:   b/bucket/notificationConfigs/5
    # Either of the above might start with a / if a user is copying & pasting.
    def _GetNotificationPathRegex(self):
        if not NotificationCommand._notification_path_regex:
            NotificationCommand._notification_path_regex = re.compile(
                ('/?(projects/[^/]+/)?b(uckets)?/(?P<bucket>[^/]+)/'
                 'notificationConfigs/(?P<notification>[0-9]+)'))
        return NotificationCommand._notification_path_regex

    _notification_path_regex = None

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'notification',
        command_name_aliases=[
            'notify',
            'notifyconfig',
            'notifications',
            'notif',
        ],
        usage_synopsis=_SYNOPSIS,
        min_args=2,
        max_args=NO_MAX,
        supported_sub_args='i:t:m:t:of:e:p:s',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=1,
        gs_api_support=[ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments={
            'watchbucket': [
                CommandArgument.MakeFreeTextArgument(),
                CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument(),
            ],
            'stopchannel': [],
            'list': [
                CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument(),
            ],
            'delete': [
                # Takes a list of one of the following:
                #   notification: projects/_/buckets/bla/notificationConfigs/5,
                #   bucket: gs://foobar
                CommandArgument.MakeZeroOrMoreCloudURLsArgument(),
            ],
            'create': [
                CommandArgument.MakeFreeTextArgument(),  # Cloud Pub/Sub topic
                CommandArgument.MakeNCloudBucketURLsArgument(1),
            ]
        },
    )
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='notification',
        help_name_aliases=[
            'watchbucket',
            'stopchannel',
            'notifyconfig',
        ],
        help_type='command_help',
        help_one_line_summary='Configure object change notification',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={
            'create': _create_help_text,
            'list': _list_help_text,
            'delete': _delete_help_text,
            'watchbucket': _watchbucket_help_text,
            'stopchannel': _stopchannel_help_text,
        },
    )

    def _WatchBucket(self):
        """Creates a watch on a bucket given in self.args."""
        self.CheckArguments()
        identifier = None
        client_token = None
        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-i':
                    identifier = a
                if o == '-t':
                    client_token = a

        identifier = identifier or str(uuid.uuid4())
        watch_url = self.args[0]
        bucket_arg = self.args[-1]

        if not watch_url.lower().startswith('https://'):
            raise CommandException(
                'The application URL must be an https:// URL.')

        bucket_url = StorageUrlFromString(bucket_arg)
        if not (bucket_url.IsBucket() and bucket_url.scheme == 'gs'):
            raise CommandException(
                'The %s command can only be used with gs:// bucket URLs.' %
                self.command_name)
        if not bucket_url.IsBucket():
            raise CommandException(
                'URL must name a bucket for the %s command.' %
                self.command_name)

        self.logger.info('Watching bucket %s with application URL %s ...',
                         bucket_url, watch_url)

        try:
            channel = self.gsutil_api.WatchBucket(bucket_url.bucket_name,
                                                  watch_url,
                                                  identifier,
                                                  token=client_token,
                                                  provider=bucket_url.scheme)
        except AccessDeniedException as e:
            self.logger.warn(
                NOTIFICATION_AUTHORIZATION_FAILED_MESSAGE.format(
                    watch_error=str(e), watch_url=watch_url))
            raise

        channel_id = channel.id
        resource_id = channel.resourceId
        client_token = channel.token
        self.logger.info('Successfully created watch notification channel.')
        self.logger.info('Watch channel identifier: %s', channel_id)
        self.logger.info('Canonicalized resource identifier: %s', resource_id)
        self.logger.info('Client state token: %s', client_token)

        return 0

    def _StopChannel(self):
        channel_id = self.args[0]
        resource_id = self.args[1]

        self.logger.info('Removing channel %s with resource identifier %s ...',
                         channel_id, resource_id)
        self.gsutil_api.StopChannel(channel_id, resource_id, provider='gs')
        self.logger.info('Succesfully removed channel.')

        return 0

    def _ListChannels(self, bucket_arg):
        """Lists active channel watches on a bucket given in self.args."""
        bucket_url = StorageUrlFromString(bucket_arg)
        if not (bucket_url.IsBucket() and bucket_url.scheme == 'gs'):
            raise CommandException(
                'The %s command can only be used with gs:// bucket URLs.' %
                self.command_name)
        if not bucket_url.IsBucket():
            raise CommandException(
                'URL must name a bucket for the %s command.' %
                self.command_name)
        channels = self.gsutil_api.ListChannels(bucket_url.bucket_name,
                                                provider='gs').items
        self.logger.info(
            'Bucket %s has the following active Object Change Notifications:',
            bucket_url.bucket_name)
        for idx, channel in enumerate(channels):
            self.logger.info('\tNotification channel %d:', idx + 1)
            self.logger.info('\t\tChannel identifier: %s', channel.channel_id)
            self.logger.info('\t\tResource identifier: %s',
                             channel.resource_id)
            self.logger.info('\t\tApplication URL: %s', channel.push_url)
            self.logger.info('\t\tCreated by: %s', channel.subscriber_email)
            self.logger.info(
                '\t\tCreation time: %s',
                str(datetime.fromtimestamp(channel.creation_time_ms / 1000)))

        return 0

    def _Create(self):
        self.CheckArguments()

        # User-specified options
        pubsub_topic = None
        payload_format = None
        custom_attributes = {}
        event_types = []
        object_name_prefix = None
        should_setup_topic = True

        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-e':
                    event_types.append(a)
                elif o == '-f':
                    payload_format = a
                elif o == '-m':
                    if ':' not in a:
                        raise CommandException(
                            'Custom attributes specified with -m should be of the form '
                            'key:value')
                    key, value = a.split(':')
                    custom_attributes[key] = value
                elif o == '-p':
                    object_name_prefix = a
                elif o == '-s':
                    should_setup_topic = False
                elif o == '-t':
                    pubsub_topic = a

        if payload_format not in PAYLOAD_FORMAT_MAP:
            raise CommandException(
                "Must provide a payload format with -f of either 'json' or 'none'"
            )
        payload_format = PAYLOAD_FORMAT_MAP[payload_format]

        bucket_arg = self.args[-1]

        bucket_url = StorageUrlFromString(bucket_arg)
        if not bucket_url.IsCloudUrl() or not bucket_url.IsBucket():
            raise CommandException(
                "%s %s requires a GCS bucket name, but got '%s'" %
                (self.command_name, self.subcommand_name, bucket_arg))
        if bucket_url.scheme != 'gs':
            raise CommandException(
                'The %s command can only be used with gs:// bucket URLs.' %
                self.command_name)
        bucket_name = bucket_url.bucket_name
        self.logger.debug('Creating notification for bucket %s', bucket_url)

        # Find the project this bucket belongs to
        bucket_metadata = self.gsutil_api.GetBucket(bucket_name,
                                                    fields=['projectNumber'],
                                                    provider=bucket_url.scheme)
        bucket_project_number = bucket_metadata.projectNumber

        # If not specified, choose a sensible default for the Cloud Pub/Sub topic
        # name.
        if not pubsub_topic:
            pubsub_topic = 'projects/%s/topics/%s' % (PopulateProjectId(None),
                                                      bucket_name)
        if not pubsub_topic.startswith('projects/'):
            # If a user picks a topic ID (mytopic) but doesn't pass the whole name (
            # projects/my-project/topics/mytopic ), pick a default project.
            pubsub_topic = 'projects/%s/topics/%s' % (PopulateProjectId(None),
                                                      pubsub_topic)
        self.logger.debug('Using Cloud Pub/Sub topic %s', pubsub_topic)

        just_modified_topic_permissions = False
        if should_setup_topic:
            # Ask GCS for the email address that represents GCS's permission to
            # publish to a Cloud Pub/Sub topic from this project.
            service_account = self.gsutil_api.GetProjectServiceAccount(
                bucket_project_number,
                provider=bucket_url.scheme).email_address
            self.logger.debug('Service account for project %d: %s',
                              bucket_project_number, service_account)
            just_modified_topic_permissions = self._CreateTopic(
                pubsub_topic, service_account)

        for attempt_number in range(0, 2):
            try:
                create_response = self.gsutil_api.CreateNotificationConfig(
                    bucket_name,
                    pubsub_topic=pubsub_topic,
                    payload_format=payload_format,
                    custom_attributes=custom_attributes,
                    event_types=event_types if event_types else None,
                    object_name_prefix=object_name_prefix,
                    provider=bucket_url.scheme)
                break
            except PublishPermissionDeniedException:
                if attempt_number == 0 and just_modified_topic_permissions:
                    # If we have just set the IAM policy, it may take up to 10 seconds to
                    # take effect.
                    self.logger.info(
                        'Retrying create notification in 10 seconds '
                        '(new permissions may take up to 10 seconds to take effect.)'
                    )
                    time.sleep(10)
                else:
                    raise

        notification_name = 'projects/_/buckets/%s/notificationConfigs/%s' % (
            bucket_name, create_response.id)
        self.logger.info('Created notification config %s', notification_name)

        return 0

    def _CreateTopic(self, pubsub_topic, service_account):
        """Assures that a topic exists, creating it if necessary.

    Also adds GCS as a publisher on that bucket, if necessary.

    Args:
      pubsub_topic: name of the Cloud Pub/Sub topic to use/create.
      service_account: the GCS service account that needs publish permission.

    Returns:
      true if we modified IAM permissions, otherwise false.
    """

        pubsub_api = PubsubApi(logger=self.logger)

        # Verify that the Pub/Sub topic exists. If it does not, create it.
        try:
            pubsub_api.GetTopic(topic_name=pubsub_topic)
            self.logger.debug('Topic %s already exists', pubsub_topic)
        except NotFoundException:
            self.logger.debug('Creating topic %s', pubsub_topic)
            pubsub_api.CreateTopic(topic_name=pubsub_topic)
            self.logger.info('Created Cloud Pub/Sub topic %s', pubsub_topic)

        # Verify that the service account is in the IAM policy.
        policy = pubsub_api.GetTopicIamPolicy(topic_name=pubsub_topic)
        binding = Binding(role='roles/pubsub.publisher',
                          members=['serviceAccount:%s' % service_account])

        # This could be more extensive. We could, for instance, check for roles
        # that are stronger that pubsub.publisher, like owner. We could also
        # recurse up the hierarchy looking to see if there are project-level
        # permissions. This can get very complex very quickly, as the caller
        # may not necessarily have access to the project-level IAM policy.
        # There's no danger in double-granting permission just to make sure it's
        # there, though.
        if binding not in policy.bindings:
            policy.bindings.append(binding)
            # transactional safety via etag field.
            pubsub_api.SetTopicIamPolicy(topic_name=pubsub_topic,
                                         policy=policy)
            return True
        else:
            self.logger.debug(
                'GCS already has publish permission to topic %s.',
                pubsub_topic)
            return False

    def _EnumerateNotificationsFromArgs(self,
                                        accept_notification_configs=True):
        """Yields bucket/notification tuples from command-line args.

    Given a list of strings that are bucket names (gs://foo) or notification
    config IDs, yield tuples of bucket names and their associated notifications.

    Args:
      accept_notification_configs: whether notification configs are valid args.
    Yields:
      Tuples of the form (bucket_name, Notification)
    """
        path_regex = self._GetNotificationPathRegex()

        for list_entry in self.args:
            match = path_regex.match(list_entry)
            if match:
                if not accept_notification_configs:
                    raise CommandException(
                        '%s %s accepts only bucket names, but you provided %s'
                        %
                        (self.command_name, self.subcommand_name, list_entry))
                bucket_name = match.group('bucket')
                notification_id = match.group('notification')
                found = False
                for notification in self.gsutil_api.ListNotificationConfigs(
                        bucket_name, provider='gs'):
                    if notification.id == notification_id:
                        yield (bucket_name, notification)
                        found = True
                        break
                if not found:
                    raise NotFoundException('Could not find notification %s' %
                                            list_entry)
            else:
                storage_url = StorageUrlFromString(list_entry)
                if not storage_url.IsCloudUrl():
                    raise CommandException(
                        'The %s command must be used on cloud buckets or notification '
                        'config names.' % self.command_name)
                if storage_url.scheme != 'gs':
                    raise CommandException(
                        'The %s command only works on gs:// buckets.')
                path = None
                if storage_url.IsProvider():
                    path = 'gs://*'
                elif storage_url.IsBucket():
                    path = list_entry
                if not path:
                    raise CommandException(
                        'The %s command cannot be used on cloud objects, only buckets'
                        % self.command_name)
                for blr in self.WildcardIterator(path).IterBuckets(
                        bucket_fields=['id']):
                    for notification in self.gsutil_api.ListNotificationConfigs(
                            blr.storage_url.bucket_name, provider='gs'):
                        yield (blr.storage_url.bucket_name, notification)

    def _List(self):
        self.CheckArguments()
        if self.sub_opts:
            if '-o' in dict(self.sub_opts):
                for bucket_name in self.args:
                    self._ListChannels(bucket_name)
        else:
            for bucket_name, notification in self._EnumerateNotificationsFromArgs(
                    accept_notification_configs=False):
                self._PrintNotificationDetails(bucket_name, notification)
        return 0

    def _PrintNotificationDetails(self, bucket, notification):
        print(
            'projects/_/buckets/{bucket}/notificationConfigs/{notification}\n'
            '\tCloud Pub/Sub topic: {topic}'.format(
                bucket=bucket,
                notification=notification.id,
                topic=notification.topic[len('//pubsub.googleapis.com/'):]))
        if notification.custom_attributes:
            print('\tCustom attributes:')
            for attr in notification.custom_attributes.additionalProperties:
                print('\t\t%s: %s' % (attr.key, attr.value))
        filters = []
        if notification.event_types:
            filters.append('\t\tEvent Types: %s' %
                           ', '.join(notification.event_types))
        if notification.object_name_prefix:
            filters.append("\t\tObject name prefix: '%s'" %
                           notification.object_name_prefix)
        if filters:
            print('\tFilters:')
            for line in filters:
                print(line)
        self.logger.info('')

    def _Delete(self):
        for bucket_name, notification in self._EnumerateNotificationsFromArgs(
        ):
            self._DeleteNotification(bucket_name, notification.id)
        return 0

    def _DeleteNotification(self, bucket_name, notification_id):
        self.gsutil_api.DeleteNotificationConfig(bucket_name,
                                                 notification=notification_id,
                                                 provider='gs')
        return 0

    def _RunSubCommand(self, func):
        try:
            (self.sub_opts,
             self.args) = getopt.getopt(self.args,
                                        self.command_spec.supported_sub_args)
            # Commands with both suboptions and subcommands need to reparse for
            # suboptions, so we log again.
            metrics.LogCommandParams(sub_opts=self.sub_opts)
            return func(self)
        except getopt.GetoptError:
            self.RaiseInvalidArgumentException()

    SUBCOMMANDS = {
        'create': _Create,
        'list': _List,
        'delete': _Delete,
        'watchbucket': _WatchBucket,
        'stopchannel': _StopChannel
    }

    def RunCommand(self):
        """Command entry point for the notification command."""
        self.subcommand_name = self.args.pop(0)
        if self.subcommand_name in NotificationCommand.SUBCOMMANDS:
            metrics.LogCommandParams(subcommands=[self.subcommand_name])
            return self._RunSubCommand(
                NotificationCommand.SUBCOMMANDS[self.subcommand_name])
        else:
            raise CommandException(
                'Invalid subcommand "%s" for the %s command.' %
                (self.subcommand_name, self.command_name))
예제 #3
0
class CorsCommand(Command):
    """Implementation of gsutil cors command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'cors',
        command_name_aliases=['getcors', 'setcors'],
        usage_synopsis=_SYNOPSIS,
        min_args=2,
        max_args=NO_MAX,
        supported_sub_args='',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=1,
        gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments={
            'set': [
                CommandArgument.MakeNFileURLsArgument(1),
                CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()
            ],
            'get': [CommandArgument.MakeNCloudBucketURLsArgument(1)]
        })
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='cors',
        help_name_aliases=['getcors', 'setcors', 'cross-origin'],
        help_type='command_help',
        help_one_line_summary=(
            'Set a CORS JSON document for one or more buckets'),
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={
            'get': _get_help_text,
            'set': _set_help_text
        },
    )

    def _CalculateUrlsStartArg(self):
        if not self.args:
            self.RaiseWrongNumberOfArgumentsException()
        if self.args[0].lower() == 'set':
            return 2
        else:
            return 1

    def _SetCors(self):
        """Sets CORS configuration on a Google Cloud Storage bucket."""
        cors_arg = self.args[0]
        url_args = self.args[1:]
        # Disallow multi-provider 'cors set' requests.
        if not UrlsAreForSingleProvider(url_args):
            raise CommandException(
                '"%s" command spanning providers not allowed.' %
                self.command_name)

        # Open, read and parse file containing JSON document.
        cors_file = open(cors_arg, 'r')
        cors_txt = cors_file.read()
        cors_file.close()

        self.api = self.gsutil_api.GetApiSelector(
            StorageUrlFromString(url_args[0]).scheme)

        # Iterate over URLs, expanding wildcards and setting the CORS on each.
        some_matched = False
        for url_str in url_args:
            bucket_iter = self.GetBucketUrlIterFromArg(url_str,
                                                       bucket_fields=['id'])
            for blr in bucket_iter:
                url = blr.storage_url
                some_matched = True
                self.logger.info('Setting CORS on %s...', blr)
                if url.scheme == 's3':
                    self.gsutil_api.XmlPassThroughSetCors(cors_txt,
                                                          url,
                                                          provider=url.scheme)
                else:
                    cors = CorsTranslation.JsonCorsToMessageEntries(cors_txt)
                    if not cors:
                        cors = REMOVE_CORS_CONFIG
                    bucket_metadata = apitools_messages.Bucket(cors=cors)
                    self.gsutil_api.PatchBucket(url.bucket_name,
                                                bucket_metadata,
                                                provider=url.scheme,
                                                fields=['id'])
        if not some_matched:
            raise CommandException('No URLs matched')
        return 0

    def _GetCors(self):
        """Gets CORS configuration for a Google Cloud Storage bucket."""
        bucket_url, bucket_metadata = self.GetSingleBucketUrlFromArg(
            self.args[0], bucket_fields=['cors'])

        if bucket_url.scheme == 's3':
            sys.stdout.write(
                self.gsutil_api.XmlPassThroughGetCors(
                    bucket_url, provider=bucket_url.scheme))
        else:
            if bucket_metadata.cors:
                sys.stdout.write(
                    CorsTranslation.MessageEntriesToJson(bucket_metadata.cors))
            else:
                sys.stdout.write('%s has no CORS configuration.\n' %
                                 bucket_url)
        return 0

    def RunCommand(self):
        """Command entry point for the cors command."""
        action_subcommand = self.args.pop(0)
        if action_subcommand == 'get':
            func = self._GetCors
        elif action_subcommand == 'set':
            func = self._SetCors
        else:
            raise CommandException(
                ('Invalid subcommand "%s" for the %s command.\n'
                 'See "gsutil help cors".') %
                (action_subcommand, self.command_name))
        return func()
예제 #4
0
class DefStorageClassCommand(Command):
    """Implementation of gsutil defstorageclass command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'defstorageclass',
        usage_synopsis=_SYNOPSIS,
        min_args=2,
        max_args=NO_MAX,
        supported_sub_args='',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=2,
        gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments={
            'set': [
                # FreeTextArgument allows for using storage class abbreviations.
                CommandArgument.MakeFreeTextArgument(),
                CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument(),
            ],
            'get': [
                CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument(),
            ],
        },
    )
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='defstorageclass',
        help_name_aliases=['defaultstorageclass'],
        help_type='command_help',
        help_one_line_summary='Get or set the default storage class on buckets',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={
            'get': _get_help_text,
            'set': _set_help_text,
        },
    )

    def _CheckIsGsUrl(self, url_str):
        if not url_str.startswith('gs://'):
            raise CommandException(
                '"%s" does not support the URL "%s". Did you mean to use a gs:// '
                'URL?' % (self.command_name, url_str))

    def _CalculateUrlsStartArg(self):
        if not self.args:
            self.RaiseWrongNumberOfArgumentsException()
        if self.args[0].lower() == 'set':
            return 2
        else:
            return 1

    def _SetDefStorageClass(self):
        """Sets the default storage class for a bucket."""
        # At this point, "set" has been popped off the front of self.args.
        normalized_storage_class = NormalizeStorageClass(self.args[0])
        url_args = self.args[1:]
        if not url_args:
            self.RaiseWrongNumberOfArgumentsException()

        some_matched = False
        for url_str in url_args:
            self._CheckIsGsUrl(url_str)
            # Throws a CommandException if the argument is not a bucket.
            bucket_iter = self.GetBucketUrlIterFromArg(url_str,
                                                       bucket_fields=['id'])
            for blr in bucket_iter:
                some_matched = True
                bucket_metadata = apitools_messages.Bucket()
                self.logger.info(
                    'Setting default storage class to "%s" for bucket %s' %
                    (normalized_storage_class, blr.url_string.rstrip('/')))
                bucket_metadata.storageClass = normalized_storage_class
                self.gsutil_api.PatchBucket(blr.storage_url.bucket_name,
                                            bucket_metadata,
                                            provider=blr.storage_url.scheme,
                                            fields=['id'])
        if not some_matched:
            raise CommandException(NO_URLS_MATCHED_TARGET % list(url_args))

    def _GetDefStorageClass(self):
        """Gets the default storage class for a bucket."""
        # At this point, "get" has been popped off the front of self.args.
        url_args = self.args
        some_matched = False
        for url_str in url_args:
            self._CheckIsGsUrl(url_str)
            bucket_iter = self.GetBucketUrlIterFromArg(
                url_str, bucket_fields=['storageClass'])
            for blr in bucket_iter:
                some_matched = True
                print(
                    '%s: %s' %
                    (blr.url_string.rstrip('/'), blr.root_object.storageClass))
        if not some_matched:
            raise CommandException(NO_URLS_MATCHED_TARGET % list(url_args))

    def RunCommand(self):
        """Command entry point for the defstorageclass command."""
        action_subcommand = self.args.pop(0)
        subcommand_args = [action_subcommand]
        if action_subcommand == 'get':
            func = self._GetDefStorageClass
        elif action_subcommand == 'set':
            func = self._SetDefStorageClass
            normalized_storage_class = NormalizeStorageClass(self.args[0])
            subcommand_args.append(normalized_storage_class)
        else:
            raise CommandException(
                ('Invalid subcommand "%s" for the %s command.\n'
                 'See "gsutil help %s".') %
                (action_subcommand, self.command_name, self.command_name))
        metrics.LogCommandParams(subcommands=subcommand_args)
        func()
        return 0
예제 #5
0
class VersioningCommand(Command):
  """Implementation of gsutil versioning command."""

  # Command specification. See base class for documentation.
  command_spec = Command.CreateCommandSpec(
      'versioning',
      command_name_aliases=['setversioning', 'getversioning'],
      usage_synopsis=_SYNOPSIS,
      min_args=2,
      max_args=NO_MAX,
      supported_sub_args='',
      file_url_ok=False,
      provider_url_ok=False,
      urls_start_arg=2,
      gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
      gs_default_api=ApiSelector.JSON,
      argparse_arguments={
          'set': [
              CommandArgument('mode', choices=['on', 'off']),
              CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()
          ],
          'get': [
              CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()
          ]
      }
  )
  # Help specification. See help_provider.py for documentation.
  help_spec = Command.HelpSpec(
      help_name='versioning',
      help_name_aliases=['getversioning', 'setversioning'],
      help_type='command_help',
      help_one_line_summary=(
          'Enable or suspend versioning for one or more buckets'),
      help_text=_DETAILED_HELP_TEXT,
      subcommand_help_text={'get': _get_help_text, 'set': _set_help_text},
  )

  def _CalculateUrlsStartArg(self):
    if not self.args:
      self.RaiseWrongNumberOfArgumentsException()
    if self.args[0].lower() == 'set':
      return 2
    else:
      return 1

  def _SetVersioning(self):
    """Gets versioning configuration for a bucket."""
    versioning_arg = self.args[0].lower()
    if versioning_arg not in ('on', 'off'):
      raise CommandException('Argument to "%s set" must be either <on|off>'
                             % (self.command_name))
    url_args = self.args[1:]
    if not url_args:
      self.RaiseWrongNumberOfArgumentsException()

    # Iterate over URLs, expanding wildcards and set the versioning
    # configuration on each.
    some_matched = False
    for url_str in url_args:
      bucket_iter = self.GetBucketUrlIterFromArg(url_str, bucket_fields=['id'])
      for blr in bucket_iter:
        url = blr.storage_url
        some_matched = True
        bucket_metadata = apitools_messages.Bucket(
            versioning=apitools_messages.Bucket.VersioningValue())
        if versioning_arg == 'on':
          self.logger.info('Enabling versioning for %s...', url)
          bucket_metadata.versioning.enabled = True
        else:
          self.logger.info('Suspending versioning for %s...', url)
          bucket_metadata.versioning.enabled = False
        self.gsutil_api.PatchBucket(url.bucket_name, bucket_metadata,
                                    provider=url.scheme, fields=['id'])
    if not some_matched:
      raise CommandException(NO_URLS_MATCHED_TARGET % list(url_args))

  def _GetVersioning(self):
    """Gets versioning configuration for one or more buckets."""
    url_args = self.args

    # Iterate over URLs, expanding wildcards and getting the versioning
    # configuration on each.
    some_matched = False
    for url_str in url_args:
      bucket_iter = self.GetBucketUrlIterFromArg(url_str,
                                                 bucket_fields=['versioning'])
      for blr in bucket_iter:
        some_matched = True
        if blr.root_object.versioning and blr.root_object.versioning.enabled:
          print('%s: Enabled' % blr.url_string.rstrip('/'))
        else:
          print('%s: Suspended' % blr.url_string.rstrip('/'))
    if not some_matched:
      raise CommandException(NO_URLS_MATCHED_TARGET % list(url_args))

  def RunCommand(self):
    """Command entry point for the versioning command."""
    action_subcommand = self.args.pop(0)
    if action_subcommand == 'get':
      func = self._GetVersioning
      metrics.LogCommandParams(subcommands=[action_subcommand])
    elif action_subcommand == 'set':
      func = self._SetVersioning
      versioning_arg = self.args[0].lower()
      if versioning_arg in ('on', 'off'):
        metrics.LogCommandParams(
            subcommands=[action_subcommand, versioning_arg])
    else:
      raise CommandException((
          'Invalid subcommand "%s" for the %s command.\n'
          'See "gsutil help %s".') % (
              action_subcommand, self.command_name, self.command_name))
    func()
    return 0
예제 #6
0
class NotificationCommand(Command):
    """Implementation of gsutil notification command."""

    # Notification names might look like one of these:
    #  canonical form:  projects/_/buckets/bucket/notificationConfigs/3
    #  JSON API form:   b/bucket/notificationConfigs/5
    # Either of the above might start with a / if a user is copying & pasting.
    def _GetNotificationPathRegex(self):
        if not NotificationCommand._notification_path_regex:
            NotificationCommand._notification_path_regex = re.compile(
                ('/?(projects/[^/]+/)?b(uckets)?/(?P<bucket>[^/]+)/'
                 'notificationConfigs/(?P<notification>[0-9]+)'))
        return NotificationCommand._notification_path_regex

    _notification_path_regex = None

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'notification',
        command_name_aliases=[
            'notify', 'notifyconfig', 'notifications', 'notif'
        ],
        usage_synopsis=_SYNOPSIS,
        min_args=2,
        max_args=NO_MAX,
        supported_sub_args='i:t:m:t:o:f:e:p:s',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=1,
        gs_api_support=[ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments={
            'watchbucket': [
                CommandArgument.MakeFreeTextArgument(),
                CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()
            ],
            'stopchannel': [],
            'list': [CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()],
            'delete': [
                # Takes a list of one of the following:
                #   notification: projects/_/buckets/bla/notificationConfigs/5,
                #   bucket: gs://foobar
                CommandArgument.MakeZeroOrMoreCloudURLsArgument()
            ],
            'create': [
                CommandArgument.MakeFreeTextArgument(),  # Cloud Pub/Sub topic
                CommandArgument.MakeNCloudBucketURLsArgument(1)
            ]
        })
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='notification',
        help_name_aliases=['watchbucket', 'stopchannel', 'notifyconfig'],
        help_type='command_help',
        help_one_line_summary='Configure object change notification',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={
            'create': _create_help_text,
            'list': _list_help_text,
            'delete': _delete_help_text,
            'watchbucket': _watchbucket_help_text,
            'stopchannel': _stopchannel_help_text
        },
    )

    def _WatchBucket(self):
        """Creates a watch on a bucket given in self.args."""
        self.CheckArguments()
        identifier = None
        client_token = None
        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-i':
                    identifier = a
                if o == '-t':
                    client_token = a

        identifier = identifier or str(uuid.uuid4())
        watch_url = self.args[0]
        bucket_arg = self.args[-1]

        if not watch_url.lower().startswith('https://'):
            raise CommandException(
                'The application URL must be an https:// URL.')

        bucket_url = StorageUrlFromString(bucket_arg)
        if not (bucket_url.IsBucket() and bucket_url.scheme == 'gs'):
            raise CommandException(
                'The %s command can only be used with gs:// bucket URLs.' %
                self.command_name)
        if not bucket_url.IsBucket():
            raise CommandException(
                'URL must name a bucket for the %s command.' %
                self.command_name)

        self.logger.info('Watching bucket %s with application URL %s ...',
                         bucket_url, watch_url)

        try:
            channel = self.gsutil_api.WatchBucket(bucket_url.bucket_name,
                                                  watch_url,
                                                  identifier,
                                                  token=client_token,
                                                  provider=bucket_url.scheme)
        except AccessDeniedException, e:
            self.logger.warn(
                NOTIFICATION_AUTHORIZATION_FAILED_MESSAGE.format(
                    watch_error=str(e), watch_url=watch_url))
            raise

        channel_id = channel.id
        resource_id = channel.resourceId
        client_token = channel.token
        self.logger.info('Successfully created watch notification channel.')
        self.logger.info('Watch channel identifier: %s', channel_id)
        self.logger.info('Canonicalized resource identifier: %s', resource_id)
        self.logger.info('Client state token: %s', client_token)

        return 0
예제 #7
0
class LabelCommand(Command):
    """Implementation of gsutil label command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'label',
        usage_synopsis=_SYNOPSIS,
        min_args=2,
        max_args=NO_MAX,
        supported_sub_args='l:d:',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=1,
        gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments={
            'set': [
                CommandArgument.MakeNFileURLsArgument(1),
                CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument(),
            ],
            'get': [
                CommandArgument.MakeNCloudURLsArgument(1),
            ],
            'ch': [
                CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument(),
            ],
        },
    )
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='label',
        help_name_aliases=[],
        help_type='command_help',
        help_one_line_summary=(
            'Get, set, or change the label configuration of a bucket.'),
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={
            'get': _get_help_text,
            'set': _set_help_text,
            'ch': _ch_help_text,
        },
    )

    def _CalculateUrlsStartArg(self):
        if not self.args:
            self.RaiseWrongNumberOfArgumentsException()
        if self.args[0].lower() == 'set':
            return 2  # Filename comes before bucket arg(s).
        return 1

    def _SetLabel(self):
        """Parses options and sets labels on the specified buckets."""
        # At this point, "set" has been popped off the front of self.args.
        if len(self.args) < 2:
            self.RaiseWrongNumberOfArgumentsException()

        label_filename = self.args[0]
        if not os.path.isfile(label_filename):
            raise CommandException('Could not find the file "%s".' %
                                   label_filename)
        with codecs.open(label_filename, 'r', UTF8) as label_file:
            label_text = label_file.read()

        @Retry(PreconditionException, tries=3, timeout_secs=1)
        def _SetLabelForBucket(blr):
            url = blr.storage_url
            self.logger.info('Setting label configuration on %s...', blr)

            if url.scheme == 's3':  # Uses only XML.
                self.gsutil_api.XmlPassThroughSetTagging(label_text,
                                                         url,
                                                         provider=url.scheme)
            else:  # Must be a 'gs://' bucket.
                labels_message = None
                # When performing a read-modify-write cycle, include metageneration to
                # avoid race conditions (supported for GS buckets only).
                metageneration = None
                new_label_json = json.loads(label_text)
                if (self.gsutil_api.GetApiSelector(
                        url.scheme) == ApiSelector.JSON):
                    # Perform a read-modify-write so that we can specify which
                    # existing labels need to be deleted.
                    _, bucket_metadata = self.GetSingleBucketUrlFromArg(
                        url.url_string,
                        bucket_fields=['labels', 'metageneration'])
                    metageneration = bucket_metadata.metageneration
                    label_json = {}
                    if bucket_metadata.labels:
                        label_json = json.loads(
                            LabelTranslation.JsonFromMessage(
                                bucket_metadata.labels))
                    # Set all old keys' values to None; this will delete each key that
                    # is not included in the new set of labels.
                    merged_labels = dict(
                        (key, None) for key, _ in six.iteritems(label_json))
                    merged_labels.update(new_label_json)
                    labels_message = LabelTranslation.DictToMessage(
                        merged_labels)
                else:  # ApiSelector.XML
                    # No need to read-modify-write with the XML API.
                    labels_message = LabelTranslation.DictToMessage(
                        new_label_json)

                preconditions = Preconditions(meta_gen_match=metageneration)
                bucket_metadata = apitools_messages.Bucket(
                    labels=labels_message)
                self.gsutil_api.PatchBucket(url.bucket_name,
                                            bucket_metadata,
                                            preconditions=preconditions,
                                            provider=url.scheme,
                                            fields=['id'])

        some_matched = False
        url_args = self.args[1:]
        for url_str in url_args:
            # Throws a CommandException if the argument is not a bucket.
            bucket_iter = self.GetBucketUrlIterFromArg(url_str,
                                                       bucket_fields=['id'])
            for bucket_listing_ref in bucket_iter:
                some_matched = True
                _SetLabelForBucket(bucket_listing_ref)

        if not some_matched:
            raise CommandException(NO_URLS_MATCHED_TARGET % list(url_args))

    def _ChLabel(self):
        """Parses options and changes labels on the specified buckets."""
        self.label_changes = {}
        self.num_deletions = 0

        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-l':
                    label_split = a.split(':')
                    if len(label_split) != 2:
                        raise CommandException(
                            'Found incorrectly formatted option for "gsutil label ch": '
                            '"%s". To add a label, please use the form <key>:<value>.'
                            % a)
                    self.label_changes[label_split[0]] = label_split[1]
                elif o == '-d':
                    # Ensure only the key is supplied; stop if key:value was given.
                    val_split = a.split(':')
                    if len(val_split) != 1:
                        raise CommandException(
                            'Found incorrectly formatted option for "gsutil label ch": '
                            '"%s". To delete a label, provide only its key.' %
                            a)
                    self.label_changes[a] = None
                    self.num_deletions += 1
                else:
                    self.RaiseInvalidArgumentException()
        if not self.label_changes:
            raise CommandException(
                'Please specify at least one label change with the -l or -d flags.'
            )

        @Retry(PreconditionException, tries=3, timeout_secs=1)
        def _ChLabelForBucket(blr):
            url = blr.storage_url
            self.logger.info('Setting label configuration on %s...', blr)

            labels_message = None
            # When performing a read-modify-write cycle, include metageneration to
            # avoid race conditions (supported for GS buckets only).
            metageneration = None
            if (self.gsutil_api.GetApiSelector(
                    url.scheme) == ApiSelector.JSON):
                # The JSON API's PATCH semantics allow us to skip read-modify-write,
                # with the exception of one edge case - attempting to delete a
                # nonexistent label returns an error iff no labels previously existed
                corrected_changes = self.label_changes
                if self.num_deletions:
                    (_, bucket_metadata) = self.GetSingleBucketUrlFromArg(
                        url.url_string,
                        bucket_fields=['labels', 'metageneration'])
                    if not bucket_metadata.labels:
                        metageneration = bucket_metadata.metageneration
                        # Remove each change that would try to delete a nonexistent key.
                        corrected_changes = dict(
                            (k, v)
                            for k, v in six.iteritems(self.label_changes) if v)
                labels_message = LabelTranslation.DictToMessage(
                    corrected_changes)
            else:  # ApiSelector.XML
                # Perform a read-modify-write cycle so that we can specify which
                # existing labels need to be deleted.
                (_, bucket_metadata) = self.GetSingleBucketUrlFromArg(
                    url.url_string, bucket_fields=['labels', 'metageneration'])
                metageneration = bucket_metadata.metageneration

                label_json = {}
                if bucket_metadata.labels:
                    label_json = json.loads(
                        LabelTranslation.JsonFromMessage(
                            bucket_metadata.labels))
                # Modify label_json such that all specified labels are added
                # (overwriting old labels if necessary) and all specified deletions
                # are removed from label_json if already present.
                for key, value in six.iteritems(self.label_changes):
                    if not value and key in label_json:
                        del label_json[key]
                    else:
                        label_json[key] = value
                labels_message = LabelTranslation.DictToMessage(label_json)

            preconditions = Preconditions(meta_gen_match=metageneration)
            bucket_metadata = apitools_messages.Bucket(labels=labels_message)
            self.gsutil_api.PatchBucket(url.bucket_name,
                                        bucket_metadata,
                                        preconditions=preconditions,
                                        provider=url.scheme,
                                        fields=['id'])

        some_matched = False
        url_args = self.args
        if not url_args:
            self.RaiseWrongNumberOfArgumentsException()
        for url_str in url_args:
            # Throws a CommandException if the argument is not a bucket.
            bucket_iter = self.GetBucketUrlIterFromArg(url_str)
            for bucket_listing_ref in bucket_iter:
                some_matched = True
                _ChLabelForBucket(bucket_listing_ref)

        if not some_matched:
            raise CommandException(NO_URLS_MATCHED_TARGET % list(url_args))

    def _GetAndPrintLabel(self, bucket_arg):
        """Gets and prints the labels for a cloud bucket."""
        bucket_url, bucket_metadata = self.GetSingleBucketUrlFromArg(
            bucket_arg, bucket_fields=['labels'])
        if bucket_url.scheme == 's3':
            print((self.gsutil_api.XmlPassThroughGetTagging(
                bucket_url, provider=bucket_url.scheme)))
        else:
            if bucket_metadata.labels:
                print((LabelTranslation.JsonFromMessage(bucket_metadata.labels,
                                                        pretty_print=True)))
            else:
                print(('%s has no label configuration.' % bucket_url))

    def RunCommand(self):
        """Command entry point for the label command."""
        action_subcommand = self.args.pop(0)
        self.ParseSubOpts(check_args=True)

        # Commands with both suboptions and subcommands need to reparse for
        # suboptions, so we log again.
        metrics.LogCommandParams(sub_opts=self.sub_opts)
        if action_subcommand == 'get':
            metrics.LogCommandParams(subcommands=[action_subcommand])
            self._GetAndPrintLabel(self.args[0])
        elif action_subcommand == 'set':
            metrics.LogCommandParams(subcommands=[action_subcommand])
            self._SetLabel()
        elif action_subcommand == 'ch':
            metrics.LogCommandParams(subcommands=[action_subcommand])
            self._ChLabel()
        else:
            raise CommandException(
                'Invalid subcommand "%s" for the %s command.\nSee "gsutil help %s".'
                % (action_subcommand, self.command_name, self.command_name))
        return 0
예제 #8
0
class BucketPolicyOnlyCommand(Command):
  """Implements the gsutil bucketpolicyonly command."""

  command_spec = Command.CreateCommandSpec(
      'bucketpolicyonly',
      usage_synopsis=_SYNOPSIS,
      min_args=2,
      max_args=NO_MAX,
      supported_sub_args='',
      file_url_ok=False,
      provider_url_ok=False,
      urls_start_arg=2,
      gs_api_support=[ApiSelector.JSON],
      gs_default_api=ApiSelector.JSON,
      argparse_arguments={
          'get': [CommandArgument.MakeNCloudURLsArgument(1),],
          'set': [
              CommandArgument('mode', choices=['on', 'off']),
              CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()
          ],
      })
  # Help specification. See help_provider.py for documentation.
  help_spec = Command.HelpSpec(
      help_name='bucketpolicyonly',
      help_name_aliases=[],
      help_type='command_help',
      help_one_line_summary='Configure uniform bucket-level access',
      help_text=_DETAILED_HELP_TEXT,
      subcommand_help_text={
          'get': _get_help_text,
          'set': _set_help_text,
      },
  )

  def _ValidateBucketListingRefAndReturnBucketName(self, blr):
    if blr.storage_url.scheme != 'gs':
      raise CommandException(
          'The %s command can only be used with gs:// bucket URLs.' %
          self.command_name)

  def _GetBucketPolicyOnly(self, blr):
    """Gets the Bucket Policy Only setting for a bucket."""
    self._ValidateBucketListingRefAndReturnBucketName(blr)
    bucket_url = blr.storage_url

    bucket_metadata = self.gsutil_api.GetBucket(bucket_url.bucket_name,
                                                fields=['iamConfiguration'],
                                                provider=bucket_url.scheme)
    iam_config = bucket_metadata.iamConfiguration
    bucket_policy_only = iam_config.bucketPolicyOnly

    fields = {
        'bucket': str(bucket_url).rstrip('/'),
        'enabled': bucket_policy_only.enabled
    }

    locked_time_line = ''
    if bucket_policy_only.lockedTime:
      fields['locked_time'] = bucket_policy_only.lockedTime
      locked_time_line = '  LockedTime: {locked_time}\n'

    if bucket_policy_only:
      print(('Bucket Policy Only setting for {bucket}:\n'
             '  Enabled: {enabled}\n' + locked_time_line).format(**fields))

  def _SetBucketPolicyOnly(self, blr, setting_arg):
    """Sets the Bucket Policy Only setting for a bucket on or off."""
    self._ValidateBucketListingRefAndReturnBucketName(blr)
    bucket_url = blr.storage_url

    iam_config = IamConfigurationValue()
    iam_config.bucketPolicyOnly = BucketPolicyOnlyValue()
    iam_config.bucketPolicyOnly.enabled = (setting_arg == 'on')

    bucket_metadata = apitools_messages.Bucket(iamConfiguration=iam_config)

    setting_verb = 'Enabling' if setting_arg == 'on' else 'Disabling'
    print('%s Bucket Policy Only for %s...' %
          (setting_verb, str(bucket_url).rstrip('/')))

    self.gsutil_api.PatchBucket(bucket_url.bucket_name,
                                bucket_metadata,
                                fields=['iamConfiguration'],
                                provider=bucket_url.scheme)
    return 0

  def _BucketPolicyOnly(self):
    """Handles bucketpolicyonly command on a Cloud Storage bucket."""
    subcommand = self.args.pop(0)

    if subcommand not in ('get', 'set'):
      raise CommandException('bucketpolicyonly only supports get|set')

    subcommand_func = None
    subcommand_args = []
    setting_arg = None

    if subcommand == 'get':
      subcommand_func = self._GetBucketPolicyOnly
    elif subcommand == 'set':
      subcommand_func = self._SetBucketPolicyOnly
      setting_arg = self.args.pop(0)
      InsistOnOrOff(setting_arg,
                    'Only on and off values allowed for set option')
      subcommand_args.append(setting_arg)

    # Iterate over bucket args, performing the specified subsubcommand.
    some_matched = False
    url_args = self.args
    if not url_args:
      self.RaiseWrongNumberOfArgumentsException()
    for url_str in url_args:
      # Throws a CommandException if the argument is not a bucket.
      bucket_iter = self.GetBucketUrlIterFromArg(url_str)
      for bucket_listing_ref in bucket_iter:
        some_matched = True
        subcommand_func(bucket_listing_ref, *subcommand_args)

    if not some_matched:
      raise CommandException(NO_URLS_MATCHED_TARGET % list(url_args))
    return 0

  def RunCommand(self):
    """Command entry point for the bucketpolicyonly command."""
    if self.gsutil_api.GetApiSelector(provider='gs') != ApiSelector.JSON:
      raise CommandException('\n'.join(
          textwrap.wrap(
              'The "%s" command can only be used with the Cloud Storage JSON API.'
              % self.command_name)))

    action_subcommand = self.args[0]
    self.ParseSubOpts(check_args=True)

    if action_subcommand == 'get' or action_subcommand == 'set':
      metrics.LogCommandParams(sub_opts=self.sub_opts)
      metrics.LogCommandParams(subcommands=[action_subcommand])
      self._BucketPolicyOnly()
    else:
      raise CommandException('Invalid subcommand "%s", use get|set instead.' %
                             action_subcommand)
예제 #9
0
class LifecycleCommand(Command):
  """Implementation of gsutil lifecycle command."""

  # Command specification. See base class for documentation.
  command_spec = Command.CreateCommandSpec(
      'lifecycle',
      command_name_aliases=['lifecycleconfig'],
      usage_synopsis=_SYNOPSIS,
      min_args=2,
      max_args=NO_MAX,
      supported_sub_args='',
      file_url_ok=True,
      provider_url_ok=False,
      urls_start_arg=1,
      gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
      gs_default_api=ApiSelector.JSON,
      argparse_arguments={
          'set': [
              CommandArgument.MakeNFileURLsArgument(1),
              CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()
          ],
          'get': [
              CommandArgument.MakeNCloudBucketURLsArgument(1)
          ]
      }
  )
  # Help specification. See help_provider.py for documentation.
  help_spec = Command.HelpSpec(
      help_name='lifecycle',
      help_name_aliases=['getlifecycle', 'setlifecycle'],
      help_type='command_help',
      help_one_line_summary=(
          'Get or set lifecycle configuration for a bucket'),
      help_text=_DETAILED_HELP_TEXT,
      subcommand_help_text={'get': _get_help_text, 'set': _set_help_text},
  )

  def _SetLifecycleConfig(self):
    """Sets lifecycle configuration for a Google Cloud Storage bucket."""
    lifecycle_arg = self.args[0]
    url_args = self.args[1:]
    # Disallow multi-provider 'lifecycle set' requests.
    if not UrlsAreForSingleProvider(url_args):
      raise CommandException('"%s" command spanning providers not allowed.' %
                             self.command_name)

    # Open, read and parse file containing JSON document.
    lifecycle_file = open(lifecycle_arg, 'r')
    lifecycle_txt = lifecycle_file.read()
    lifecycle_file.close()

    # Iterate over URLs, expanding wildcards and setting the lifecycle on each.
    some_matched = False
    for url_str in url_args:
      bucket_iter = self.GetBucketUrlIterFromArg(url_str,
                                                 bucket_fields=['lifecycle'])
      for blr in bucket_iter:
        url = blr.storage_url
        some_matched = True
        self.logger.info('Setting lifecycle configuration on %s...', blr)
        if url.scheme == 's3':
          self.gsutil_api.XmlPassThroughSetLifecycle(
              lifecycle_txt, url, provider=url.scheme)
        else:
          lifecycle = LifecycleTranslation.JsonLifecycleToMessage(lifecycle_txt)
          bucket_metadata = apitools_messages.Bucket(lifecycle=lifecycle)
          self.gsutil_api.PatchBucket(url.bucket_name, bucket_metadata,
                                      provider=url.scheme, fields=['id'])
    if not some_matched:
      raise CommandException('No URLs matched')
    return 0

  def _GetLifecycleConfig(self):
    """Gets lifecycle configuration for a Google Cloud Storage bucket."""
    bucket_url, bucket_metadata = self.GetSingleBucketUrlFromArg(
        self.args[0], bucket_fields=['lifecycle'])

    if bucket_url.scheme == 's3':
      sys.stdout.write(self.gsutil_api.XmlPassThroughGetLifecycle(
          bucket_url, provider=bucket_url.scheme))
    else:
      if bucket_metadata.lifecycle and bucket_metadata.lifecycle.rule:
        sys.stdout.write(LifecycleTranslation.JsonLifecycleFromMessage(
            bucket_metadata.lifecycle))
      else:
        sys.stdout.write('%s has no lifecycle configuration.\n' % bucket_url)

    return 0

  def RunCommand(self):
    """Command entry point for the lifecycle command."""
    subcommand = self.args.pop(0)
    if subcommand == 'get':
      return self._GetLifecycleConfig()
    elif subcommand == 'set':
      return self._SetLifecycleConfig()
    else:
      raise CommandException('Invalid subcommand "%s" for the %s command.' %
                             (subcommand, self.command_name))
예제 #10
0
class RbCommand(Command):
  """Implementation of gsutil rb command."""

  # Command specification. See base class for documentation.
  command_spec = Command.CreateCommandSpec(
      'rb',
      command_name_aliases=[
          'deletebucket',
          'removebucket',
          'removebuckets',
          'rmdir',
      ],
      usage_synopsis=_SYNOPSIS,
      min_args=1,
      max_args=NO_MAX,
      supported_sub_args='f',
      file_url_ok=False,
      provider_url_ok=False,
      urls_start_arg=0,
      gs_api_support=[
          ApiSelector.XML,
          ApiSelector.JSON,
      ],
      gs_default_api=ApiSelector.JSON,
      argparse_arguments=[
          CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument(),
      ],
  )
  # Help specification. See help_provider.py for documentation.
  help_spec = Command.HelpSpec(
      help_name='rb',
      help_name_aliases=[
          'deletebucket',
          'removebucket',
          'removebuckets',
          'rmdir',
      ],
      help_type='command_help',
      help_one_line_summary='Remove buckets',
      help_text=_DETAILED_HELP_TEXT,
      subcommand_help_text={},
  )

  def RunCommand(self):
    """Command entry point for the rb command."""
    self.continue_on_error = False
    if self.sub_opts:
      for o, unused_a in self.sub_opts:
        if o == '-f':
          self.continue_on_error = True

    did_some_work = False
    some_failed = False
    for url_str in self.args:
      wildcard_url = StorageUrlFromString(url_str)
      if wildcard_url.IsObject():
        raise CommandException('"rb" command requires a provider or '
                               'bucket URL')
      # Wrap WildcardIterator call in try/except so we can avoid printing errors
      # with -f option if a non-existent URL listed, permission denial happens
      # while listing, etc.
      try:
        # Materialize iterator results into a list to catch exceptions.
        # Since this is listing buckets this list shouldn't be too large to fit
        # in memory at once.
        # Also, avoid listing all fields to avoid performing unnecessary bucket
        # metadata GETs. These would also be problematic when billing is
        # disabled, as deletes are allowed but GetBucket is not.
        blrs = list(
            self.WildcardIterator(url_str).IterBuckets(bucket_fields=['id']))
      except:  # pylint: disable=bare-except
        some_failed = True
        if self.continue_on_error:
          continue
        else:
          raise
      for blr in blrs:
        url = blr.storage_url
        self.logger.info('Removing %s...', url)
        try:
          self.gsutil_api.DeleteBucket(url.bucket_name, provider=url.scheme)
        except NotEmptyException as e:
          some_failed = True
          if self.continue_on_error:
            continue
          elif 'VersionedBucketNotEmpty' in e.reason:
            raise CommandException('Bucket is not empty. Note: this is a '
                                   'versioned bucket, so to delete all '
                                   'objects\nyou need to use:'
                                   '\n\tgsutil rm -r %s' % url)
          else:
            raise
        except:  # pylint: disable=bare-except
          some_failed = True
          if not self.continue_on_error:
            raise
        did_some_work = True
    if not did_some_work:
      raise CommandException(NO_URLS_MATCHED_TARGET % list(self.args))
    return 1 if some_failed else 0
예제 #11
0
class DefAclCommand(Command):
    """Implementation of gsutil defacl command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'defacl',
        command_name_aliases=['setdefacl', 'getdefacl', 'chdefacl'],
        usage_synopsis=_SYNOPSIS,
        min_args=2,
        max_args=NO_MAX,
        supported_sub_args='fg:u:d:p:',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=1,
        gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments={
            'set': [
                CommandArgument.MakeFileURLOrCannedACLArgument(),
                CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()
            ],
            'get': [CommandArgument.MakeNCloudBucketURLsArgument(1)],
            'ch': [CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()],
        })
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='defacl',
        help_name_aliases=[
            'default acl', 'setdefacl', 'getdefacl', 'chdefacl'
        ],
        help_type='command_help',
        help_one_line_summary='Get, set, or change default ACL on buckets',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={
            'get': _get_help_text,
            'set': _set_help_text,
            'ch': _ch_help_text
        },
    )

    def _CalculateUrlsStartArg(self):
        if not self.args:
            self.RaiseWrongNumberOfArgumentsException()
        if (self.args[0].lower() == 'set'
                or self.command_alias_used == 'setdefacl'):
            return 1
        else:
            return 0

    def _SetDefAcl(self):
        if not StorageUrlFromString(self.args[-1]).IsBucket():
            raise CommandException(
                'URL must name a bucket for the %s command' %
                self.command_name)
        try:
            self.SetAclCommandHelper(SetAclFuncWrapper, SetAclExceptionHandler)
        except AccessDeniedException:
            self._WarnServiceAccounts()
            raise

    def _GetDefAcl(self):
        if not StorageUrlFromString(self.args[0]).IsBucket():
            raise CommandException(
                'URL must name a bucket for the %s command' %
                self.command_name)
        self.GetAndPrintAcl(self.args[0])

    def _ChDefAcl(self):
        """Parses options and changes default object ACLs on specified buckets."""
        self.parse_versions = True
        self.changes = []

        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-g':
                    self.changes.append(
                        aclhelpers.AclChange(
                            a, scope_type=aclhelpers.ChangeType.GROUP))
                if o == '-u':
                    self.changes.append(
                        aclhelpers.AclChange(
                            a, scope_type=aclhelpers.ChangeType.USER))
                if o == '-p':
                    self.changes.append(
                        aclhelpers.AclChange(
                            a, scope_type=aclhelpers.ChangeType.PROJECT))
                if o == '-d':
                    self.changes.append(aclhelpers.AclDel(a))

        if not self.changes:
            raise CommandException('Please specify at least one access change '
                                   'with the -g, -u, or -d flags')

        if (not UrlsAreForSingleProvider(self.args)
                or StorageUrlFromString(self.args[0]).scheme != 'gs'):
            raise CommandException(
                'The "{0}" command can only be used with gs:// URLs'.format(
                    self.command_name))

        bucket_urls = set()
        for url_arg in self.args:
            for result in self.WildcardIterator(url_arg):
                if not result.storage_url.IsBucket():
                    raise CommandException(
                        'The defacl ch command can only be applied to buckets.'
                    )
                bucket_urls.add(result.storage_url)

        for storage_url in bucket_urls:
            self.ApplyAclChanges(storage_url)

    @Retry(ServiceException, tries=3, timeout_secs=1)
    def ApplyAclChanges(self, url):
        """Applies the changes in self.changes to the provided URL."""
        bucket = self.gsutil_api.GetBucket(
            url.bucket_name,
            provider=url.scheme,
            fields=['defaultObjectAcl', 'metageneration'])

        # Default object ACLs can be blank if the ACL was set to private, or
        # if the user doesn't have permission. We warn about this with defacl get,
        # so just try the modification here and if the user doesn't have
        # permission they'll get an AccessDeniedException.
        current_acl = bucket.defaultObjectAcl

        modification_count = 0
        for change in self.changes:
            modification_count += change.Execute(url, current_acl, 'defacl',
                                                 self.logger)
        if modification_count == 0:
            self.logger.info('No changes to %s', url)
            return

        if not current_acl:
            # Use a sentinel value to indicate a private (no entries) default
            # object ACL.
            current_acl.append(PRIVATE_DEFAULT_OBJ_ACL)

        try:
            preconditions = Preconditions(meta_gen_match=bucket.metageneration)
            bucket_metadata = apitools_messages.Bucket(
                defaultObjectAcl=current_acl)
            self.gsutil_api.PatchBucket(url.bucket_name,
                                        bucket_metadata,
                                        preconditions=preconditions,
                                        provider=url.scheme,
                                        fields=['id'])
        except BadRequestException as e:
            # Don't retry on bad requests, e.g. invalid email address.
            raise CommandException('Received bad request from server: %s' %
                                   str(e))
        except AccessDeniedException:
            self._WarnServiceAccounts()
            raise CommandException(
                'Failed to set acl for %s. Please ensure you have '
                'OWNER-role access to this resource.' % url)

        self.logger.info('Updated default ACL on %s', url)

    def RunCommand(self):
        """Command entry point for the defacl command."""
        action_subcommand = self.args.pop(0)
        self.ParseSubOpts(check_args=True)
        self.def_acl = True
        self.continue_on_error = False
        if action_subcommand == 'get':
            func = self._GetDefAcl
        elif action_subcommand == 'set':
            func = self._SetDefAcl
        elif action_subcommand in ('ch', 'change'):
            func = self._ChDefAcl
        else:
            raise CommandException(
                ('Invalid subcommand "%s" for the %s command.\n'
                 'See "gsutil help defacl".') %
                (action_subcommand, self.command_name))
        func()
        return 0
예제 #12
0
class NotificationCommand(Command):
    """Implementation of gsutil notification command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'notification',
        command_name_aliases=[
            'notify', 'notifyconfig', 'notifications', 'notif'
        ],
        usage_synopsis=_SYNOPSIS,
        min_args=3,
        max_args=NO_MAX,
        supported_sub_args='i:t:',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=1,
        gs_api_support=[ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments={
            'watchbucket': [
                CommandArgument.MakeFreeTextArgument(),
                CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()
            ],
            'stopchannel': []
        })
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='notification',
        help_name_aliases=['watchbucket', 'stopchannel', 'notifyconfig'],
        help_type='command_help',
        help_one_line_summary='Configure object change notification',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={
            'watchbucket': _watchbucket_help_text,
            'stopchannel': _stopchannel_help_text
        },
    )

    def _WatchBucket(self):
        """Creates a watch on a bucket given in self.args."""
        self.CheckArguments()
        identifier = None
        client_token = None
        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-i':
                    identifier = a
                if o == '-t':
                    client_token = a

        identifier = identifier or str(uuid.uuid4())
        watch_url = self.args[0]
        bucket_arg = self.args[-1]

        if not watch_url.lower().startswith('https://'):
            raise CommandException(
                'The application URL must be an https:// URL.')

        bucket_url = StorageUrlFromString(bucket_arg)
        if not (bucket_url.IsBucket() and bucket_url.scheme == 'gs'):
            raise CommandException(
                'The %s command can only be used with gs:// bucket URLs.' %
                self.command_name)
        if not bucket_url.IsBucket():
            raise CommandException(
                'URL must name a bucket for the %s command.' %
                self.command_name)

        self.logger.info('Watching bucket %s with application URL %s ...',
                         bucket_url, watch_url)

        try:
            channel = self.gsutil_api.WatchBucket(bucket_url.bucket_name,
                                                  watch_url,
                                                  identifier,
                                                  token=client_token,
                                                  provider=bucket_url.scheme)
        except AccessDeniedException, e:
            self.logger.warn(
                NOTIFICATION_AUTHORIZATION_FAILED_MESSAGE.format(
                    watch_error=str(e), watch_url=watch_url))
            raise

        channel_id = channel.id
        resource_id = channel.resourceId
        client_token = channel.token
        self.logger.info('Successfully created watch notification channel.')
        self.logger.info('Watch channel identifier: %s', channel_id)
        self.logger.info('Canonicalized resource identifier: %s', resource_id)
        self.logger.info('Client state token: %s', client_token)

        return 0
예제 #13
0
class MbCommand(Command):
    """Implementation of gsutil mb command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'mb',
        command_name_aliases=['makebucket', 'createbucket', 'md', 'mkdir'],
        usage_synopsis=_SYNOPSIS,
        min_args=1,
        max_args=NO_MAX,
        supported_sub_args='b:c:l:p:s:k:',
        supported_private_args=['autoclass', 'retention=', 'pap=', 'rpo='],
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=0,
        gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments=[
            CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()
        ],
    )
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='mb',
        help_name_aliases=[
            'createbucket',
            'makebucket',
            'md',
            'mkdir',
            'location',
            'dra',
            'dras',
            'reduced_availability',
            'durable_reduced_availability',
            'rr',
            'reduced_redundancy',
            'standard',
            'storage class',
            'nearline',
            'nl',
        ],
        help_type='command_help',
        help_one_line_summary='Make buckets',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={},
    )

    def RunCommand(self):
        """Command entry point for the mb command."""
        autoclass = False
        bucket_policy_only = None
        kms_key = None
        location = None
        storage_class = None
        seconds = None
        public_access_prevention = None
        rpo = None
        json_only_flags_in_command = []
        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '--autoclass':
                    autoclass = True
                    json_only_flags_in_command.append(o)
                elif o == '-k':
                    kms_key = a
                    ValidateCMEK(kms_key)
                    json_only_flags_in_command.append(o)
                elif o == '-l':
                    location = a
                elif o == '-p':
                    # Project IDs are sent as header values when using gs and s3 XML APIs.
                    InsistAscii(
                        a, 'Invalid non-ASCII character found in project ID')
                    self.project_id = a
                elif o == '-c' or o == '-s':
                    storage_class = NormalizeStorageClass(a)
                elif o == '--retention':
                    seconds = RetentionInSeconds(a)
                elif o == '--rpo':
                    rpo = a.strip()
                    if rpo not in VALID_RPO_VALUES:
                        raise CommandException(
                            'Invalid value for --rpo. Must be one of: {},'
                            ' provided: {}'.format(VALID_RPO_VALUES_STRING, a))
                    json_only_flags_in_command.append(o)
                elif o == '-b':
                    InsistOnOrOff(
                        a, 'Only on and off values allowed for -b option')
                    bucket_policy_only = (a == 'on')
                    json_only_flags_in_command.append(o)
                elif o == '--pap':
                    public_access_prevention = a
                    json_only_flags_in_command.append(o)

        bucket_metadata = apitools_messages.Bucket(location=location,
                                                   rpo=rpo,
                                                   storageClass=storage_class)
        if autoclass:
            bucket_metadata.autoclass = apitools_messages.Bucket.AutoclassValue(
                enabled=autoclass)
        if bucket_policy_only or public_access_prevention:
            bucket_metadata.iamConfiguration = IamConfigurationValue()
            iam_config = bucket_metadata.iamConfiguration
            if bucket_policy_only:
                iam_config.bucketPolicyOnly = BucketPolicyOnlyValue()
                iam_config.bucketPolicyOnly.enabled = bucket_policy_only
            if public_access_prevention:
                iam_config.publicAccessPrevention = public_access_prevention

        if kms_key:
            encryption = apitools_messages.Bucket.EncryptionValue()
            encryption.defaultKmsKeyName = kms_key
            bucket_metadata.encryption = encryption

        for bucket_url_str in self.args:
            bucket_url = StorageUrlFromString(bucket_url_str)
            if seconds is not None:
                if bucket_url.scheme != 'gs':
                    raise CommandException(
                        'Retention policy can only be specified for '
                        'GCS buckets.')
                retention_policy = (
                    apitools_messages.Bucket.RetentionPolicyValue(
                        retentionPeriod=seconds))
                bucket_metadata.retentionPolicy = retention_policy

            if json_only_flags_in_command and self.gsutil_api.GetApiSelector(
                    bucket_url.scheme) != ApiSelector.JSON:
                raise CommandException(
                    'The {} option(s) can only be used for GCS'
                    ' Buckets with the JSON API'.format(
                        ', '.join(json_only_flags_in_command)))

            if not bucket_url.IsBucket():
                raise CommandException(
                    'The mb command requires a URL that specifies a '
                    'bucket.\n"%s" is not valid.' % bucket_url)
            if (not BUCKET_NAME_RE.match(bucket_url.bucket_name)
                    or TOO_LONG_DNS_NAME_COMP.search(bucket_url.bucket_name)):
                raise InvalidUrlError('Invalid bucket name in URL "%s"' %
                                      bucket_url.bucket_name)

            self.logger.info('Creating %s...', bucket_url)
            # Pass storage_class param only if this is a GCS bucket. (In S3 the
            # storage class is specified on the key object.)
            try:
                self.gsutil_api.CreateBucket(bucket_url.bucket_name,
                                             project_id=self.project_id,
                                             metadata=bucket_metadata,
                                             provider=bucket_url.scheme)
            except AccessDeniedException as e:
                message = e.reason
                if 'key' in message:
                    # This will print the error reason and append the following as a
                    # suggested next step:
                    #
                    # To authorize, run:
                    #   gsutil kms authorize \
                    #     -k <kms_key> \
                    #     -p <project_id>
                    message += ' To authorize, run:\n  gsutil kms authorize'
                    message += ' \\\n    -k %s' % kms_key
                    if (self.project_id):
                        message += ' \\\n    -p %s' % self.project_id
                    raise CommandException(message)
                else:
                    raise

            except BadRequestException as e:
                if (e.status == 400
                        and e.reason == 'DotfulBucketNameNotUnderTld'
                        and bucket_url.scheme == 'gs'):
                    bucket_name = bucket_url.bucket_name
                    final_comp = bucket_name[bucket_name.rfind('.') + 1:]
                    raise CommandException('\n'.join(
                        textwrap.wrap(
                            'Buckets with "." in the name must be valid DNS names. The bucket'
                            ' you are attempting to create (%s) is not a valid DNS name,'
                            ' because the final component (%s) is not currently a valid part'
                            ' of the top-level DNS tree.' %
                            (bucket_name, final_comp))))
                else:
                    raise

        return 0
class LoggingCommand(Command):
  """Implementation of gsutil logging command."""

  # Command specification. See base class for documentation.
  command_spec = Command.CreateCommandSpec(
      'logging',
      command_name_aliases=['disablelogging', 'enablelogging', 'getlogging'],
      usage_synopsis=_SYNOPSIS,
      min_args=2,
      max_args=NO_MAX,
      supported_sub_args='b:o:',
      file_url_ok=False,
      provider_url_ok=False,
      urls_start_arg=0,
      gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
      gs_default_api=ApiSelector.JSON,
      argparse_arguments=[
          CommandArgument('mode', choices=['on', 'off']),
          CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()
      ]
  )
  # Help specification. See help_provider.py for documentation.
  help_spec = Command.HelpSpec(
      help_name='logging',
      help_name_aliases=['loggingconfig', 'logs', 'log', 'getlogging',
                         'enablelogging', 'disablelogging'],
      help_type='command_help',
      help_one_line_summary='Configure or retrieve logging on buckets',
      help_text=_DETAILED_HELP_TEXT,
      subcommand_help_text={'get': _get_help_text, 'set': _set_help_text},
  )

  def _Get(self):
    """Gets logging configuration for a bucket."""
    bucket_url, bucket_metadata = self.GetSingleBucketUrlFromArg(
        self.args[0], bucket_fields=['logging'])

    if bucket_url.scheme == 's3':
      sys.stdout.write(self.gsutil_api.XmlPassThroughGetLogging(
          bucket_url, provider=bucket_url.scheme))
    else:
      if (bucket_metadata.logging and bucket_metadata.logging.logBucket and
          bucket_metadata.logging.logObjectPrefix):
        sys.stdout.write(str(encoding.MessageToJson(
            bucket_metadata.logging)) + '\n')
      else:
        sys.stdout.write('%s has no logging configuration.\n' % bucket_url)
    return 0

  def _Enable(self):
    """Enables logging configuration for a bucket."""
    # Disallow multi-provider 'logging set on' calls, because the schemas
    # differ.
    if not UrlsAreForSingleProvider(self.args):
      raise CommandException('"logging set on" command spanning providers not '
                             'allowed.')
    target_bucket_url = None
    target_prefix = None
    for opt, opt_arg in self.sub_opts:
      if opt == '-b':
        target_bucket_url = StorageUrlFromString(opt_arg)
      if opt == '-o':
        target_prefix = opt_arg

    if not target_bucket_url:
      raise CommandException('"logging set on" requires \'-b <log_bucket>\' '
                             'option')
    if not target_bucket_url.IsBucket():
      raise CommandException('-b option must specify a bucket URL.')

    # Iterate over URLs, expanding wildcards and setting logging on each.
    some_matched = False
    for url_str in self.args:
      bucket_iter = self.GetBucketUrlIterFromArg(url_str, bucket_fields=['id'])
      for blr in bucket_iter:
        url = blr.storage_url
        some_matched = True
        self.logger.info('Enabling logging on %s...', blr)
        logging = apitools_messages.Bucket.LoggingValue(
            logBucket=target_bucket_url.bucket_name,
            logObjectPrefix=target_prefix or url.bucket_name)

        bucket_metadata = apitools_messages.Bucket(logging=logging)
        self.gsutil_api.PatchBucket(url.bucket_name, bucket_metadata,
                                    provider=url.scheme, fields=['id'])
    if not some_matched:
      raise CommandException(NO_URLS_MATCHED_TARGET % list(self.args))
    return 0

  def _Disable(self):
    """Disables logging configuration for a bucket."""
    # Iterate over URLs, expanding wildcards, and disabling logging on each.
    some_matched = False
    for url_str in self.args:
      bucket_iter = self.GetBucketUrlIterFromArg(url_str, bucket_fields=['id'])
      for blr in bucket_iter:
        url = blr.storage_url
        some_matched = True
        self.logger.info('Disabling logging on %s...', blr)
        logging = apitools_messages.Bucket.LoggingValue()

        bucket_metadata = apitools_messages.Bucket(logging=logging)
        self.gsutil_api.PatchBucket(url.bucket_name, bucket_metadata,
                                    provider=url.scheme, fields=['id'])
    if not some_matched:
      raise CommandException(NO_URLS_MATCHED_TARGET % list(self.args))
    return 0

  def RunCommand(self):
    """Command entry point for the logging command."""
    # Parse the subcommand and alias for the new logging command.
    action_subcommand = self.args.pop(0)
    if action_subcommand == 'get':
      func = self._Get
    elif action_subcommand == 'set':
      state_subcommand = self.args.pop(0)
      if not self.args:
        self.RaiseWrongNumberOfArgumentsException()
      if state_subcommand == 'on':
        func = self._Enable
      elif state_subcommand == 'off':
        func = self._Disable
      else:
        raise CommandException((
            'Invalid subcommand "%s" for the "%s %s" command.\n'
            'See "gsutil help logging".') % (
                state_subcommand, self.command_name, action_subcommand))
    else:
      raise CommandException(('Invalid subcommand "%s" for the %s command.\n'
                              'See "gsutil help logging".') %
                             (action_subcommand, self.command_name))
    self.ParseSubOpts(check_args=True)
    func()
    return 0
예제 #15
0
파일: retention.py 프로젝트: LevonAr/gsutil
class RetentionCommand(Command):
  """Implementation of gsutil retention command."""

  # Command specification. See base class for documentation.
  command_spec = Command.CreateCommandSpec(
      'retention',
      command_name_aliases=[],
      usage_synopsis=_SYNOPSIS,
      min_args=2,
      max_args=NO_MAX,
      file_url_ok=False,
      provider_url_ok=False,
      urls_start_arg=1,
      gs_api_support=[ApiSelector.JSON],
      gs_default_api=ApiSelector.JSON,
      argparse_arguments={
          'set': [CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()],
          'clear': [CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()],
          'get': [CommandArgument.MakeNCloudBucketURLsArgument(1)],
          'lock': [CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()],
          'event-default': {
              'set': [CommandArgument.MakeZeroOrMoreCloudURLsArgument()],
              'release': [CommandArgument.MakeZeroOrMoreCloudURLsArgument()]
          },
          'event': {
              'set': [CommandArgument.MakeZeroOrMoreCloudURLsArgument()],
              'release': [CommandArgument.MakeZeroOrMoreCloudURLsArgument()]
          },
          'temp': {
              'set': [CommandArgument.MakeZeroOrMoreCloudURLsArgument()],
              'release': [CommandArgument.MakeZeroOrMoreCloudURLsArgument()]
          },
      })

  # Help specification. See help_provider.py for documentation.
  help_spec = Command.HelpSpec(
      help_name='retention',
      help_name_aliases=[],
      help_type='command_help',
      help_one_line_summary=(
          'Provides utilities to interact with Retention Policy feature.'),
      help_text=_DETAILED_HELP_TEXT,
      subcommand_help_text={
          'get': _get_help_text,
          'set': _set_help_text,
          'clear': _clear_help_text,
          'lock': _lock_help_text,
          'event-default': _event_default_help_text,
          'event': _event_help_text,
          'temp': _temp_help_text
      },
  )

  def RunCommand(self):
    """Command entry point for the retention command."""
    # If the only credential type the user supplies in their boto file is HMAC,
    # GetApiSelector logic will force us to use the XML API, which bucket lock
    # does not support at the moment.
    if self.gsutil_api.GetApiSelector('gs') != ApiSelector.JSON:
      raise CommandException(('The {} command can only be used with the GCS '
                              'JSON API. If you have only supplied hmac '
                              'credentials in your boto file, please instead '
                              'supply a credential type that can be used with '
                              'the JSON API.').format(self.command_name))

    self.preconditions = PreconditionsFromHeaders(self.headers)

    action_subcommand = self.args.pop(0)
    self.ParseSubOpts(check_args=True)
    if action_subcommand == 'set':
      func = self._SetRetention
    elif action_subcommand == 'clear':
      func = self._ClearRetention
    elif action_subcommand == 'get':
      func = self._GetRetention
    elif action_subcommand == 'lock':
      func = self._LockRetention
    elif action_subcommand == 'event-default':
      func = self._DefaultEventHold
    elif action_subcommand == 'event':
      func = self._EventHold
    elif action_subcommand == 'temp':
      func = self._TempHold
    else:
      raise CommandException(
          ('Invalid subcommand "{}" for the {} command.\n'
           'See "gsutil help retention".').format(action_subcommand,
                                                  self.command_name))

    # Commands with both suboptions and subcommands need to reparse for
    # suboptions, so we log again.
    metrics.LogCommandParams(subcommands=[action_subcommand],
                             sub_opts=self.sub_opts)
    return func()

  def BucketUpdateFunc(self, url_args, bucket_metadata_update, fields,
                       log_msg_template):
    preconditions = Preconditions(
        meta_gen_match=self.preconditions.meta_gen_match)

    # Iterate over URLs, expanding wildcards and setting the new bucket metadata
    # on each bucket.
    some_matched = False
    for url_str in url_args:
      bucket_iter = self.GetBucketUrlIterFromArg(url_str, bucket_fields=['id'])
      for blr in bucket_iter:
        url = blr.storage_url
        some_matched = True
        self.logger.info(log_msg_template, blr)
        self.gsutil_api.PatchBucket(url.bucket_name,
                                    bucket_metadata_update,
                                    preconditions=preconditions,
                                    provider=url.scheme,
                                    fields=fields)
    if not some_matched:
      raise CommandException(NO_URLS_MATCHED_TARGET % list(url_args))

  def ObjectUpdateMetadataFunc(self,
                               patch_obj_metadata,
                               log_template,
                               name_expansion_result,
                               thread_state=None):
    """Updates metadata on an object using PatchObjectMetadata.

    Args:
      patch_obj_metadata: Metadata changes that should be applied to the
                          existing object.
      log_template: The log template that should be printed for each object.
      name_expansion_result: NameExpansionResult describing target object.
      thread_state: gsutil Cloud API instance to use for the operation.
    """
    gsutil_api = GetCloudApiInstance(self, thread_state=thread_state)

    exp_src_url = name_expansion_result.expanded_storage_url
    self.logger.info(log_template, exp_src_url)

    cloud_obj_metadata = encoding.JsonToMessage(
        apitools_messages.Object, name_expansion_result.expanded_result)

    preconditions = Preconditions(
        gen_match=self.preconditions.gen_match,
        meta_gen_match=self.preconditions.meta_gen_match)
    if preconditions.gen_match is None:
      preconditions.gen_match = cloud_obj_metadata.generation
    if preconditions.meta_gen_match is None:
      preconditions.meta_gen_match = cloud_obj_metadata.metageneration

    gsutil_api.PatchObjectMetadata(exp_src_url.bucket_name,
                                   exp_src_url.object_name,
                                   patch_obj_metadata,
                                   generation=exp_src_url.generation,
                                   preconditions=preconditions,
                                   provider=exp_src_url.scheme,
                                   fields=['id'])
    PutToQueueWithTimeout(gsutil_api.status_queue,
                          MetadataMessage(message_time=time.time()))

  def _GetObjectNameExpansionIterator(self, url_args):
    return NameExpansionIterator(
        self.command_name,
        self.debug,
        self.logger,
        self.gsutil_api,
        url_args,
        self.recursion_requested,
        all_versions=self.all_versions,
        continue_on_error=self.parallel_operations,
        bucket_listing_fields=['generation', 'metageneration'])

  def _GetSeekAheadNameExpansionIterator(self, url_args):
    return SeekAheadNameExpansionIterator(self.command_name,
                                          self.debug,
                                          self.GetSeekAheadGsutilApi(),
                                          url_args,
                                          self.recursion_requested,
                                          all_versions=self.all_versions,
                                          project_id=self.project_id)

  def _SetRetention(self):
    """Set retention retention_period on one or more buckets."""

    seconds = RetentionInSeconds(self.args[0])
    retention_policy = (apitools_messages.Bucket.RetentionPolicyValue(
        retentionPeriod=seconds))

    log_msg_template = 'Setting Retention Policy on %s...'
    bucket_metadata_update = apitools_messages.Bucket(
        retentionPolicy=retention_policy)
    url_args = self.args[1:]
    self.BucketUpdateFunc(url_args,
                          bucket_metadata_update,
                          fields=['id', 'retentionPolicy'],
                          log_msg_template=log_msg_template)
    return 0

  def _ClearRetention(self):
    """Clear retention retention_period on one or more buckets."""
    retention_policy = (apitools_messages.Bucket.RetentionPolicyValue(
        retentionPeriod=None))
    log_msg_template = 'Clearing Retention Policy on %s...'
    bucket_metadata_update = apitools_messages.Bucket(
        retentionPolicy=retention_policy)
    url_args = self.args
    self.BucketUpdateFunc(url_args,
                          bucket_metadata_update,
                          fields=['id', 'retentionPolicy'],
                          log_msg_template=log_msg_template)
    return 0

  def _GetRetention(self):
    """Get Retention Policy for a single bucket."""
    bucket_url, bucket_metadata = self.GetSingleBucketUrlFromArg(
        self.args[0], bucket_fields=['retentionPolicy'])
    print(RetentionPolicyToString(bucket_metadata.retentionPolicy, bucket_url))
    return 0

  def _LockRetention(self):
    """Lock Retention Policy on one or more buckets."""
    url_args = self.args
    # Iterate over URLs, expanding wildcards and setting the Retention Policy
    # configuration on each.
    some_matched = False
    for url_str in url_args:
      bucket_iter = self.GetBucketUrlIterFromArg(url_str, bucket_fields=['id'])
      for blr in bucket_iter:
        url = blr.storage_url
        some_matched = True
        # Get bucket metadata to provide a precondition.
        bucket_metadata = self.gsutil_api.GetBucket(
            url.bucket_name,
            provider=url.scheme,
            fields=['id', 'metageneration', 'retentionPolicy'])
        if (not (bucket_metadata.retentionPolicy and
                 bucket_metadata.retentionPolicy.retentionPeriod)):
          # TODO: implement '-c' flag to continue_on_error
          raise CommandException(
              'Bucket "{}" does not have an Unlocked Retention Policy.'.format(
                  url.bucket_name))
        elif bucket_metadata.retentionPolicy.isLocked is True:
          self.logger.error('Retention Policy on "%s" is already locked.', blr)
        elif ConfirmLockRequest(url.bucket_name,
                                bucket_metadata.retentionPolicy):
          self.logger.info('Locking Retention Policy on %s...', blr)
          self.gsutil_api.LockRetentionPolicy(url.bucket_name,
                                              bucket_metadata.metageneration,
                                              provider=url.scheme)
        else:
          self.logger.error(
              '  Abort Locking Retention Policy on {}'.format(blr))
    if not some_matched:
      raise CommandException(NO_URLS_MATCHED_TARGET % list(url_args))
    return 0

  def _DefaultEventHold(self):
    """Sets default value for Event-Based Hold on one or more buckets."""
    hold = None
    if self.args:
      if self.args[0].lower() == 'set':
        hold = True
      elif self.args[0].lower() == 'release':
        hold = False
      else:
        raise CommandException(
            ('Invalid subcommand "{}" for the "retention event-default"'
             ' command.\nSee "gsutil help retention event".').format(
                 self.sub_opts))

    verb = 'Setting' if hold else 'Releasing'
    log_msg_template = '{} default Event-Based Hold on %s...'.format(verb)
    bucket_metadata_update = apitools_messages.Bucket(
        defaultEventBasedHold=hold)
    url_args = self.args[1:]
    self.BucketUpdateFunc(url_args,
                          bucket_metadata_update,
                          fields=['id', 'defaultEventBasedHold'],
                          log_msg_template=log_msg_template)
    return 0

  def _EventHold(self):
    """Sets or unsets Event-Based Hold on one or more objects."""
    sub_command_name = 'event'
    sub_command_full_name = 'Event-Based'
    hold = self._ProcessHoldArgs(sub_command_name)
    url_args = self.args[1:]
    obj_metadata_update_wrapper = (SetEventHoldFuncWrapper
                                   if hold else ReleaseEventHoldFuncWrapper)
    self._SetHold(obj_metadata_update_wrapper, url_args, sub_command_full_name)
    return 0

  def _TempHold(self):
    """Sets or unsets Temporary Hold on one or more objects."""
    sub_command_name = 'temp'
    sub_command_full_name = 'Temporary'
    hold = self._ProcessHoldArgs(sub_command_name)
    url_args = self.args[1:]
    obj_metadata_update_wrapper = (SetTempHoldFuncWrapper
                                   if hold else ReleaseTempHoldFuncWrapper)
    self._SetHold(obj_metadata_update_wrapper, url_args, sub_command_full_name)
    return 0

  def _ProcessHoldArgs(self, sub_command_name):
    """Processes command args for Temporary and Event-Based Hold sub-command.

    Args:
      sub_command_name: The name of the subcommand: "temp" / "event"

    Returns:
      Returns a boolean value indicating whether to set (True) or
      release (False)the Hold.
    """
    hold = None
    if self.args[0].lower() == 'set':
      hold = True
    elif self.args[0].lower() == 'release':
      hold = False
    else:
      raise CommandException(
          ('Invalid subcommand "{}" for the "retention {}" command.\n'
           'See "gsutil help retention {}".').format(self.args[0],
                                                     sub_command_name,
                                                     sub_command_name))
    return hold

  def _SetHold(self, obj_metadata_update_wrapper, url_args,
               sub_command_full_name):
    """Common logic to set or unset Event-Based/Temporary Hold on objects.

    Args:
      obj_metadata_update_wrapper: The function for updating related fields in
                                   Object metadata.
      url_args: List of object URIs.
      sub_command_full_name: The full name for sub-command:
                             "Temporary" / "Event-Based"
    """
    if len(url_args) == 1 and not self.recursion_requested:
      url = StorageUrlFromString(url_args[0])
      if not (url.IsCloudUrl() and url.IsObject()):
        raise CommandException('URL ({}) must name an object'.format(
            url_args[0]))

    name_expansion_iterator = self._GetObjectNameExpansionIterator(url_args)
    seek_ahead_iterator = self._GetSeekAheadNameExpansionIterator(url_args)

    # Used to track if any objects' metadata failed to be set.
    self.everything_set_okay = True

    try:
      # TODO: implement '-c' flag to continue_on_error

      # Perform requests in parallel (-m) mode, if requested, using
      # configured number of parallel processes and threads. Otherwise,
      # perform requests with sequential function calls in current process.
      self.Apply(obj_metadata_update_wrapper,
                 name_expansion_iterator,
                 UpdateObjectMetadataExceptionHandler,
                 fail_on_error=True,
                 seek_ahead_iterator=seek_ahead_iterator)

    except AccessDeniedException as e:
      if e.status == 403:
        self._WarnServiceAccounts()
      raise

    if not self.everything_set_okay:
      raise CommandException(
          '{} Hold for some objects could not be set.'.format(
              sub_command_full_name))
예제 #16
0
class MbCommand(Command):
  """Implementation of gsutil mb command."""

  # Command specification. See base class for documentation.
  command_spec = Command.CreateCommandSpec(
      'mb',
      command_name_aliases=['makebucket', 'createbucket', 'md', 'mkdir'],
      usage_synopsis=_SYNOPSIS,
      min_args=1,
      max_args=NO_MAX,
      supported_sub_args='c:l:p:s:',
      file_url_ok=False,
      provider_url_ok=False,
      urls_start_arg=0,
      gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
      gs_default_api=ApiSelector.JSON,
      argparse_arguments=[
          CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()
      ]
  )
  # Help specification. See help_provider.py for documentation.
  help_spec = Command.HelpSpec(
      help_name='mb',
      help_name_aliases=[
          'createbucket', 'makebucket', 'md', 'mkdir', 'location', 'dra',
          'dras', 'reduced_availability', 'durable_reduced_availability', 'rr',
          'reduced_redundancy', 'standard', 'storage class', 'nearline', 'nl'],
      help_type='command_help',
      help_one_line_summary='Make buckets',
      help_text=_DETAILED_HELP_TEXT,
      subcommand_help_text={},
  )

  def RunCommand(self):
    """Command entry point for the mb command."""
    location = None
    storage_class = None
    if self.sub_opts:
      for o, a in self.sub_opts:
        if o == '-l':
          location = a
        elif o == '-p':
          # Project IDs are sent as header values when using gs and s3 XML APIs.
          InsistAscii(a, 'Invalid non-ASCII character found in project ID')
          self.project_id = a
        elif o == '-c' or o == '-s':
          storage_class = NormalizeStorageClass(a)

    bucket_metadata = apitools_messages.Bucket(location=location,
                                               storageClass=storage_class)

    for bucket_url_str in self.args:
      bucket_url = StorageUrlFromString(bucket_url_str)
      if not bucket_url.IsBucket():
        raise CommandException('The mb command requires a URL that specifies a '
                               'bucket.\n"%s" is not valid.' % bucket_url)
      if (not BUCKET_NAME_RE.match(bucket_url.bucket_name) or
          TOO_LONG_DNS_NAME_COMP.search(bucket_url.bucket_name)):
        raise InvalidUrlError(
            'Invalid bucket name in URL "%s"' % bucket_url.bucket_name)

      self.logger.info('Creating %s...', bucket_url)
      # Pass storage_class param only if this is a GCS bucket. (In S3 the
      # storage class is specified on the key object.)
      try:
        self.gsutil_api.CreateBucket(
            bucket_url.bucket_name, project_id=self.project_id,
            metadata=bucket_metadata, provider=bucket_url.scheme)
      except BadRequestException as e:
        if (e.status == 400 and e.reason == 'DotfulBucketNameNotUnderTld' and
            bucket_url.scheme == 'gs'):
          bucket_name = bucket_url.bucket_name
          final_comp = bucket_name[bucket_name.rfind('.')+1:]
          raise CommandException('\n'.join(textwrap.wrap(
              'Buckets with "." in the name must be valid DNS names. The bucket'
              ' you are attempting to create (%s) is not a valid DNS name,'
              ' because the final component (%s) is not currently a valid part'
              ' of the top-level DNS tree.' % (bucket_name, final_comp))))
        else:
          raise

    return 0
예제 #17
0
class PapCommand(Command):
    """Implements the gsutil pap command."""

    command_spec = Command.CreateCommandSpec(
        'pap',
        command_name_aliases=['publicaccessprevention'],
        usage_synopsis=_SYNOPSIS,
        min_args=2,
        max_args=NO_MAX,
        supported_sub_args='',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=2,
        gs_api_support=[ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments={
            'get': [
                CommandArgument.MakeNCloudURLsArgument(1),
            ],
            'set': [
                CommandArgument('mode', choices=['enforced', 'inherited']),
                CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()
            ],
        })
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='pap',
        help_name_aliases=['publicaccessprevention'],
        help_type='command_help',
        help_one_line_summary='Configure public access prevention',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={
            'get': _get_help_text,
            'set': _set_help_text,
        },
    )

    def _ValidateBucketListingRefAndReturnBucketName(self, blr):
        if blr.storage_url.scheme != 'gs':
            raise CommandException(
                'The %s command can only be used with gs:// bucket URLs.' %
                self.command_name)

    def _GetPublicAccessPrevention(self, blr):
        """Gets the public access prevention setting for a bucket."""
        bucket_url = blr.storage_url

        bucket_metadata = self.gsutil_api.GetBucket(
            bucket_url.bucket_name,
            fields=['iamConfiguration'],
            provider=bucket_url.scheme)
        iam_config = bucket_metadata.iamConfiguration
        public_access_prevention = iam_config.publicAccessPrevention or 'inherited'
        bucket = str(bucket_url).rstrip('/')
        print('%s: %s' % (bucket, public_access_prevention))

    def _SetPublicAccessPrevention(self, blr, setting_arg):
        """Sets the Public Access Prevention setting for a bucket enforced or inherited."""
        bucket_url = blr.storage_url

        iam_config = IamConfigurationValue()
        iam_config.publicAccessPrevention = setting_arg

        bucket_metadata = apitools_messages.Bucket(iamConfiguration=iam_config)

        print('Setting Public Access Prevention %s for %s' %
              (setting_arg, str(bucket_url).rstrip('/')))

        self.gsutil_api.PatchBucket(bucket_url.bucket_name,
                                    bucket_metadata,
                                    fields=['iamConfiguration'],
                                    provider=bucket_url.scheme)
        return 0

    def _Pap(self):
        """Handles pap command on Cloud Storage buckets."""
        subcommand = self.args.pop(0)

        if subcommand not in ('get', 'set'):
            raise CommandException('pap only supports get|set')

        subcommand_func = None
        subcommand_args = []
        setting_arg = None

        if subcommand == 'get':
            subcommand_func = self._GetPublicAccessPrevention
        elif subcommand == 'set':
            subcommand_func = self._SetPublicAccessPrevention
            setting_arg = self.args.pop(0)
            subcommand_args.append(setting_arg)

        if self.gsutil_api.GetApiSelector('gs') != ApiSelector.JSON:
            raise CommandException('\n'.join(
                textwrap.wrap(
                    ('The "%s" command can only be with the Cloud Storage '
                     'JSON API.') % self.command_name)))

        # Iterate over bucket args, performing the specified subsubcommand.
        some_matched = False
        url_args = self.args
        if not url_args:
            self.RaiseWrongNumberOfArgumentsException()
        for url_str in url_args:
            # Throws a CommandException if the argument is not a bucket.
            bucket_iter = self.GetBucketUrlIterFromArg(url_str)
            for bucket_listing_ref in bucket_iter:
                if self.gsutil_api.GetApiSelector(
                        bucket_listing_ref.storage_url.scheme
                ) != ApiSelector.JSON:
                    raise CommandException('\n'.join(
                        textwrap.wrap(
                            ('The "%s" command can only be used for GCS '
                             'Buckets.') % self.command_name)))

                some_matched = True
                subcommand_func(bucket_listing_ref, *subcommand_args)

        if not some_matched:
            raise CommandException(NO_URLS_MATCHED_TARGET % list(url_args))
        return 0

    def RunCommand(self):
        """Command entry point for the pap command."""
        action_subcommand = self.args[0]
        self.ParseSubOpts(check_args=True)

        if action_subcommand == 'get' or action_subcommand == 'set':
            metrics.LogCommandParams(sub_opts=self.sub_opts)
            metrics.LogCommandParams(subcommands=[action_subcommand])
            self._Pap()
        else:
            raise CommandException(
                'Invalid subcommand "%s", use get|set instead.' %
                action_subcommand)
예제 #18
0
class WebCommand(Command):
    """Implementation of gsutil web command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'web',
        command_name_aliases=['setwebcfg', 'getwebcfg'],
        usage_synopsis=_SYNOPSIS,
        min_args=2,
        max_args=NO_MAX,
        supported_sub_args='m:e:',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=1,
        gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments={
            'set': [CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()],
            'get': [CommandArgument.MakeNCloudBucketURLsArgument(1)]
        })
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='web',
        help_name_aliases=['getwebcfg', 'setwebcfg'],
        help_type='command_help',
        help_one_line_summary=(
            'Set a main page and/or error page for one or more buckets'),
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={
            'get': _get_help_text,
            'set': _set_help_text
        },
    )

    def _GetWeb(self):
        """Gets website configuration for a bucket."""
        bucket_url, bucket_metadata = self.GetSingleBucketUrlFromArg(
            self.args[0], bucket_fields=['website'])

        if bucket_url.scheme == 's3':
            sys.stdout.write(
                self.gsutil_api.XmlPassThroughGetWebsite(
                    bucket_url, provider=bucket_url.scheme))
        else:
            if bucket_metadata.website and (
                    bucket_metadata.website.mainPageSuffix
                    or bucket_metadata.website.notFoundPage):
                sys.stdout.write(
                    str(encoding.MessageToJson(bucket_metadata.website)) +
                    '\n')
            else:
                sys.stdout.write('%s has no website configuration.\n' %
                                 bucket_url)

        return 0

    def _SetWeb(self):
        """Sets website configuration for a bucket."""
        main_page_suffix = None
        error_page = None
        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-m':
                    main_page_suffix = a
                elif o == '-e':
                    error_page = a

        url_args = self.args

        website = apitools_messages.Bucket.WebsiteValue(
            mainPageSuffix=main_page_suffix, notFoundPage=error_page)

        # Iterate over URLs, expanding wildcards and setting the website
        # configuration on each.
        some_matched = False
        for url_str in url_args:
            bucket_iter = self.GetBucketUrlIterFromArg(url_str,
                                                       bucket_fields=['id'])
            for blr in bucket_iter:
                url = blr.storage_url
                some_matched = True
                self.logger.info('Setting website configuration on %s...', blr)
                bucket_metadata = apitools_messages.Bucket(website=website)
                self.gsutil_api.PatchBucket(url.bucket_name,
                                            bucket_metadata,
                                            provider=url.scheme,
                                            fields=['id'])
        if not some_matched:
            raise CommandException(NO_URLS_MATCHED_TARGET % list(url_args))
        return 0

    def RunCommand(self):
        """Command entry point for the web command."""
        action_subcommand = self.args.pop(0)
        self.ParseSubOpts(check_args=True)
        if action_subcommand == 'get':
            func = self._GetWeb
        elif action_subcommand == 'set':
            func = self._SetWeb
        else:
            raise CommandException(
                ('Invalid subcommand "%s" for the %s command.\n'
                 'See "gsutil help web".') %
                (action_subcommand, self.command_name))

        # Commands with both suboptions and subcommands need to reparse for
        # suboptions, so we log again.
        metrics.LogCommandParams(subcommands=[action_subcommand],
                                 sub_opts=self.sub_opts)
        return func()
예제 #19
0
class MbCommand(Command):
    """Implementation of gsutil mb command."""

    # Command specification. See base class for documentation.
    command_spec = Command.CreateCommandSpec(
        'mb',
        command_name_aliases=['makebucket', 'createbucket', 'md', 'mkdir'],
        usage_synopsis=_SYNOPSIS,
        min_args=1,
        max_args=NO_MAX,
        supported_sub_args='c:l:p:',
        file_url_ok=False,
        provider_url_ok=False,
        urls_start_arg=0,
        gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
        gs_default_api=ApiSelector.JSON,
        argparse_arguments=[
            CommandArgument.MakeZeroOrMoreCloudBucketURLsArgument()
        ])
    # Help specification. See help_provider.py for documentation.
    help_spec = Command.HelpSpec(
        help_name='mb',
        help_name_aliases=[
            'createbucket', 'makebucket', 'md', 'mkdir', 'location', 'dra',
            'dras', 'reduced_availability', 'durable_reduced_availability',
            'rr', 'reduced_redundancy', 'standard', 'storage class'
        ],
        help_type='command_help',
        help_one_line_summary='Make buckets',
        help_text=_DETAILED_HELP_TEXT,
        subcommand_help_text={},
    )

    def RunCommand(self):
        """Command entry point for the mb command."""
        location = None
        storage_class = None
        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-l':
                    location = a
                elif o == '-p':
                    self.project_id = a
                elif o == '-c':
                    storage_class = self._Normalize_Storage_Class(a)

        bucket_metadata = apitools_messages.Bucket(location=location,
                                                   storageClass=storage_class)

        for bucket_uri_str in self.args:
            bucket_uri = StorageUrlFromString(bucket_uri_str)
            if not bucket_uri.IsBucket():
                raise CommandException(
                    'The mb command requires a URI that specifies a '
                    'bucket.\n"%s" is not valid.' % bucket_uri)

            self.logger.info('Creating %s...', bucket_uri)
            # Pass storage_class param only if this is a GCS bucket. (In S3 the
            # storage class is specified on the key object.)
            try:
                self.gsutil_api.CreateBucket(bucket_uri.bucket_name,
                                             project_id=self.project_id,
                                             metadata=bucket_metadata,
                                             provider=bucket_uri.scheme)
            except BadRequestException as e:
                if (e.status == 400
                        and e.reason == 'DotfulBucketNameNotUnderTld'
                        and bucket_uri.scheme == 'gs'):
                    bucket_name = bucket_uri.bucket_name
                    final_comp = bucket_name[bucket_name.rfind('.') + 1:]
                    raise CommandException('\n'.join(
                        textwrap.wrap(
                            'Buckets with "." in the name must be valid DNS names. The bucket'
                            ' you are attempting to create (%s) is not a valid DNS name,'
                            ' because the final component (%s) is not currently a valid part'
                            ' of the top-level DNS tree.' %
                            (bucket_name, final_comp))))
                else:
                    raise

        return 0

    def _Normalize_Storage_Class(self, sc):
        sc = sc.upper()
        if sc in ('DRA', 'DURABLE_REDUCED_AVAILABILITY'):
            return 'DURABLE_REDUCED_AVAILABILITY'
        if sc in ('S', 'STD', 'STANDARD'):
            return 'STANDARD'
        return sc