Exemple #1
0
    def RunCommand(self):
        """Command entry point for the hmac command."""

        if self.gsutil_api.GetApiSelector(provider='gs') != ApiSelector.JSON:
            raise CommandException(
                'The "hmac" command can only be used with the GCS JSON API')

        self.action_subcommand = self.args.pop(0)
        self.ParseSubOpts(check_args=True)
        # Commands with both suboptions and subcommands need to reparse for
        # suboptions, so we log again.
        LogCommandParams(sub_opts=self.sub_opts)

        self.service_account_email = None
        self.state = None
        self.show_all = False
        self.long_list = False
        self.etag = None

        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-u':
                    self.service_account_email = a
                elif o == '-p':
                    # Project IDs are sent as header values when using gs and s3 XML APIs.
                    InsistAscii(
                        a, 'Invalid non-ASCII character found in project ID')
                    self.project_id = a
                elif o == '-s':
                    self.state = a
                elif o == '-a':
                    self.show_all = True
                elif o == '-l':
                    self.long_list = True
                elif o == '-e':
                    self.etag = a

        if not self.project_id:
            self.project_id = PopulateProjectId(None)

        method_for_arg = {
            'create': self._CreateHmacKey,
            'delete': self._DeleteHmacKey,
            'get': self._GetHmacKey,
            'list': self._ListHmacKeys,
            'update': self._UpdateHmacKey,
        }
        if self.action_subcommand not in method_for_arg:
            raise CommandException(
                'Invalid subcommand "%s" for the %s command.\n'
                'See "gsutil help hmac".' %
                (self.action_subcommand, self.command_name))

        LogCommandParams(subcommands=[self.action_subcommand])
        method_for_arg[self.action_subcommand]()

        return 0
Exemple #2
0
    def RunCommand(self):
        """Command entry point for the mb command."""
        location = None
        storage_class = None
        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-l':
                    location = a
                elif o == '-p':
                    # Project IDs are sent as header values when using gs and s3 XML APIs.
                    InsistAscii(
                        a, 'Invalid non-ASCII character found in project ID')
                    self.project_id = a
                elif o == '-c' or o == '-s':
                    storage_class = NormalizeStorageClass(a)

        bucket_metadata = apitools_messages.Bucket(location=location,
                                                   storageClass=storage_class)

        for bucket_url_str in self.args:
            bucket_url = StorageUrlFromString(bucket_url_str)
            if not bucket_url.IsBucket():
                raise CommandException(
                    'The mb command requires a URL that specifies a '
                    'bucket.\n"%s" is not valid.' % bucket_url)
            if (not BUCKET_NAME_RE.match(bucket_url.bucket_name)
                    or TOO_LONG_DNS_NAME_COMP.search(bucket_url.bucket_name)):
                raise InvalidUrlError('Invalid bucket name in URL "%s"' %
                                      bucket_url.bucket_name)

            self.logger.info('Creating %s...', bucket_url)
            # Pass storage_class param only if this is a GCS bucket. (In S3 the
            # storage class is specified on the key object.)
            try:
                self.gsutil_api.CreateBucket(bucket_url.bucket_name,
                                             project_id=self.project_id,
                                             metadata=bucket_metadata,
                                             provider=bucket_url.scheme)
            except BadRequestException as e:
                if (e.status == 400
                        and e.reason == 'DotfulBucketNameNotUnderTld'
                        and bucket_url.scheme == 'gs'):
                    bucket_name = bucket_url.bucket_name
                    final_comp = bucket_name[bucket_name.rfind('.') + 1:]
                    raise CommandException('\n'.join(
                        textwrap.wrap(
                            'Buckets with "." in the name must be valid DNS names. The bucket'
                            ' you are attempting to create (%s) is not a valid DNS name,'
                            ' because the final component (%s) is not currently a valid part'
                            ' of the top-level DNS tree.' %
                            (bucket_name, final_comp))))
                else:
                    raise

        return 0
 def test_valid_header_coding(self):
   headers = {
       'content-type': 'text/plain',
       'x-goog-meta-foo': 'bãr',
   }
   HandleHeaderCoding(headers)
   # Custom metadata header values should be decoded to unicode; others should
   # not be decoded, but should contain only ASCII characters.
   self.assertTrue(isinstance(headers['x-goog-meta-foo'], six.text_type))
   InsistAscii(
       headers['content-type'],
       'Value of non-custom-metadata header contained non-ASCII characters')
Exemple #4
0
    def RunCommand(self):
        """Command entry point for the ls command."""
        got_nomatch_errors = False
        got_bucket_nomatch_errors = False
        listing_style = ListingStyle.SHORT
        get_bucket_info = False
        self.recursion_requested = False
        self.all_versions = False
        self.include_etag = False
        self.human_readable = False
        self.list_subdir_contents = True
        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-a':
                    self.all_versions = True
                elif o == '-e':
                    self.include_etag = True
                elif o == '-b':
                    get_bucket_info = True
                elif o == '-h':
                    self.human_readable = True
                elif o == '-l':
                    listing_style = ListingStyle.LONG
                elif o == '-L':
                    listing_style = ListingStyle.LONG_LONG
                elif o == '-p':
                    # Project IDs are sent as header values when using gs and s3 XML APIs.
                    InsistAscii(
                        a, 'Invalid non-ASCII character found in project ID')
                    self.project_id = a
                elif o == '-r' or o == '-R':
                    self.recursion_requested = True
                elif o == '-d':
                    self.list_subdir_contents = False

        if not self.args:
            # default to listing all gs buckets
            self.args = ['gs://']

        total_objs = 0
        total_bytes = 0

        def MaybePrintBucketHeader(blr):
            if len(self.args) > 1:
                text_util.print_to_fd('%s:' % six.ensure_text(blr.url_string))

        print_bucket_header = MaybePrintBucketHeader

        for url_str in self.args:
            storage_url = StorageUrlFromString(url_str)
            if storage_url.IsFileUrl():
                raise CommandException('Only cloud URLs are supported for %s' %
                                       self.command_name)
            bucket_fields = None
            if (listing_style == ListingStyle.SHORT
                    or listing_style == ListingStyle.LONG):
                bucket_fields = ['id']
            elif listing_style == ListingStyle.LONG_LONG:
                bucket_fields = [
                    'acl',
                    'billing',
                    'cors',
                    'defaultObjectAcl',
                    'encryption',
                    'iamConfiguration',
                    'labels',
                    'location',
                    'locationType',
                    'logging',
                    'lifecycle',
                    'metageneration',
                    'retentionPolicy',
                    'defaultEventBasedHold',
                    'storageClass',
                    'timeCreated',
                    'updated',
                    'versioning',
                    'website',
                ]
            if storage_url.IsProvider():
                # Provider URL: use bucket wildcard to list buckets.
                for blr in self.WildcardIterator(
                        '%s://*' % storage_url.scheme).IterBuckets(
                            bucket_fields=bucket_fields):
                    self._PrintBucketInfo(blr, listing_style)
            elif storage_url.IsBucket() and get_bucket_info:
                # ls -b bucket listing request: List info about bucket(s).
                total_buckets = 0
                for blr in self.WildcardIterator(url_str).IterBuckets(
                        bucket_fields=bucket_fields):
                    if not ContainsWildcard(url_str) and not blr.root_object:
                        # Iterator does not make an HTTP call for non-wildcarded
                        # listings with fields=='id'. Ensure the bucket exists by calling
                        # GetBucket.
                        self.gsutil_api.GetBucket(blr.storage_url.bucket_name,
                                                  fields=['id'],
                                                  provider=storage_url.scheme)
                    self._PrintBucketInfo(blr, listing_style)
                    total_buckets += 1
                if not ContainsWildcard(url_str) and not total_buckets:
                    got_bucket_nomatch_errors = True
            else:
                # URL names a bucket, object, or object subdir ->
                # list matching object(s) / subdirs.
                def _PrintPrefixLong(blr):
                    text_util.print_to_fd(
                        '%-33s%s' % ('', six.ensure_text(blr.url_string)))

                if listing_style == ListingStyle.SHORT:
                    # ls helper by default readies us for a short listing.
                    listing_helper = LsHelper(
                        self.WildcardIterator,
                        self.logger,
                        all_versions=self.all_versions,
                        print_bucket_header_func=print_bucket_header,
                        should_recurse=self.recursion_requested,
                        list_subdir_contents=self.list_subdir_contents)
                elif listing_style == ListingStyle.LONG:
                    bucket_listing_fields = [
                        'name',
                        'size',
                        'timeCreated',
                        'updated',
                    ]
                    if self.all_versions:
                        bucket_listing_fields.extend([
                            'generation',
                            'metageneration',
                        ])
                    if self.include_etag:
                        bucket_listing_fields.append('etag')

                    listing_helper = LsHelper(
                        self.WildcardIterator,
                        self.logger,
                        print_object_func=self._PrintLongListing,
                        print_dir_func=_PrintPrefixLong,
                        print_bucket_header_func=print_bucket_header,
                        all_versions=self.all_versions,
                        should_recurse=self.recursion_requested,
                        fields=bucket_listing_fields,
                        list_subdir_contents=self.list_subdir_contents)

                elif listing_style == ListingStyle.LONG_LONG:
                    # List all fields
                    bucket_listing_fields = (UNENCRYPTED_FULL_LISTING_FIELDS +
                                             ENCRYPTED_FIELDS)
                    listing_helper = LsHelper(
                        self.WildcardIterator,
                        self.logger,
                        print_object_func=PrintFullInfoAboutObject,
                        print_dir_func=_PrintPrefixLong,
                        print_bucket_header_func=print_bucket_header,
                        all_versions=self.all_versions,
                        should_recurse=self.recursion_requested,
                        fields=bucket_listing_fields,
                        list_subdir_contents=self.list_subdir_contents)
                else:
                    raise CommandException('Unknown listing style: %s' %
                                           listing_style)

                exp_dirs, exp_objs, exp_bytes = (
                    listing_helper.ExpandUrlAndPrint(storage_url))
                if storage_url.IsObject() and exp_objs == 0 and exp_dirs == 0:
                    got_nomatch_errors = True
                total_bytes += exp_bytes
                total_objs += exp_objs

        if total_objs and listing_style != ListingStyle.SHORT:
            text_util.print_to_fd('TOTAL: %d objects, %d bytes (%s)' %
                                  (total_objs, total_bytes,
                                   MakeHumanReadable(float(total_bytes))))
        if got_nomatch_errors:
            raise CommandException('One or more URLs matched no objects.')
        if got_bucket_nomatch_errors:
            raise NotFoundException(
                'One or more bucket URLs matched no buckets.')

        return 0
Exemple #5
0
    def RunCommand(self):
        """Command entry point for the mb command."""
        bucket_policy_only = None
        location = None
        storage_class = None
        seconds = None
        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-l':
                    location = a
                elif o == '-p':
                    # Project IDs are sent as header values when using gs and s3 XML APIs.
                    InsistAscii(
                        a, 'Invalid non-ASCII character found in project ID')
                    self.project_id = a
                elif o == '-c' or o == '-s':
                    storage_class = NormalizeStorageClass(a)
                elif o == '--retention':
                    seconds = RetentionInSeconds(a)
                elif o == '-b':
                    if self.gsutil_api.GetApiSelector(
                            'gs') != ApiSelector.JSON:
                        raise CommandException(
                            'The -b <on|off> option '
                            'can only be used with the JSON API')
                    InsistOnOrOff(
                        a, 'Only on and off values allowed for -b option')
                    bucket_policy_only = (a == 'on')

        bucket_metadata = apitools_messages.Bucket(location=location,
                                                   storageClass=storage_class)
        if bucket_policy_only:
            bucket_metadata.iamConfiguration = IamConfigurationValue()
            iam_config = bucket_metadata.iamConfiguration
            iam_config.bucketPolicyOnly = BucketPolicyOnlyValue()
            iam_config.bucketPolicyOnly.enabled = bucket_policy_only

        for bucket_url_str in self.args:
            bucket_url = StorageUrlFromString(bucket_url_str)
            if seconds is not None:
                if bucket_url.scheme != 'gs':
                    raise CommandException(
                        'Retention policy can only be specified for '
                        'GCS buckets.')
                retention_policy = (
                    apitools_messages.Bucket.RetentionPolicyValue(
                        retentionPeriod=seconds))
                bucket_metadata.retentionPolicy = retention_policy

            if not bucket_url.IsBucket():
                raise CommandException(
                    'The mb command requires a URL that specifies a '
                    'bucket.\n"%s" is not valid.' % bucket_url)
            if (not BUCKET_NAME_RE.match(bucket_url.bucket_name)
                    or TOO_LONG_DNS_NAME_COMP.search(bucket_url.bucket_name)):
                raise InvalidUrlError('Invalid bucket name in URL "%s"' %
                                      bucket_url.bucket_name)

            self.logger.info('Creating %s...', bucket_url)
            # Pass storage_class param only if this is a GCS bucket. (In S3 the
            # storage class is specified on the key object.)
            try:
                self.gsutil_api.CreateBucket(bucket_url.bucket_name,
                                             project_id=self.project_id,
                                             metadata=bucket_metadata,
                                             provider=bucket_url.scheme)
            except BadRequestException as e:
                if (e.status == 400
                        and e.reason == 'DotfulBucketNameNotUnderTld'
                        and bucket_url.scheme == 'gs'):
                    bucket_name = bucket_url.bucket_name
                    final_comp = bucket_name[bucket_name.rfind('.') + 1:]
                    raise CommandException('\n'.join(
                        textwrap.wrap(
                            'Buckets with "." in the name must be valid DNS names. The bucket'
                            ' you are attempting to create (%s) is not a valid DNS name,'
                            ' because the final component (%s) is not currently a valid part'
                            ' of the top-level DNS tree.' %
                            (bucket_name, final_comp))))
                else:
                    raise

        return 0
Exemple #6
0
    def RunCommand(self):
        """Command entry point for the mb command."""
        autoclass = False
        bucket_policy_only = None
        kms_key = None
        location = None
        storage_class = None
        seconds = None
        public_access_prevention = None
        rpo = None
        json_only_flags_in_command = []
        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '--autoclass':
                    autoclass = True
                    json_only_flags_in_command.append(o)
                elif o == '-k':
                    kms_key = a
                    ValidateCMEK(kms_key)
                    json_only_flags_in_command.append(o)
                elif o == '-l':
                    location = a
                elif o == '-p':
                    # Project IDs are sent as header values when using gs and s3 XML APIs.
                    InsistAscii(
                        a, 'Invalid non-ASCII character found in project ID')
                    self.project_id = a
                elif o == '-c' or o == '-s':
                    storage_class = NormalizeStorageClass(a)
                elif o == '--retention':
                    seconds = RetentionInSeconds(a)
                elif o == '--rpo':
                    rpo = a.strip()
                    if rpo not in VALID_RPO_VALUES:
                        raise CommandException(
                            'Invalid value for --rpo. Must be one of: {},'
                            ' provided: {}'.format(VALID_RPO_VALUES_STRING, a))
                    json_only_flags_in_command.append(o)
                elif o == '-b':
                    InsistOnOrOff(
                        a, 'Only on and off values allowed for -b option')
                    bucket_policy_only = (a == 'on')
                    json_only_flags_in_command.append(o)
                elif o == '--pap':
                    public_access_prevention = a
                    json_only_flags_in_command.append(o)

        bucket_metadata = apitools_messages.Bucket(location=location,
                                                   rpo=rpo,
                                                   storageClass=storage_class)
        if autoclass:
            bucket_metadata.autoclass = apitools_messages.Bucket.AutoclassValue(
                enabled=autoclass)
        if bucket_policy_only or public_access_prevention:
            bucket_metadata.iamConfiguration = IamConfigurationValue()
            iam_config = bucket_metadata.iamConfiguration
            if bucket_policy_only:
                iam_config.bucketPolicyOnly = BucketPolicyOnlyValue()
                iam_config.bucketPolicyOnly.enabled = bucket_policy_only
            if public_access_prevention:
                iam_config.publicAccessPrevention = public_access_prevention

        if kms_key:
            encryption = apitools_messages.Bucket.EncryptionValue()
            encryption.defaultKmsKeyName = kms_key
            bucket_metadata.encryption = encryption

        for bucket_url_str in self.args:
            bucket_url = StorageUrlFromString(bucket_url_str)
            if seconds is not None:
                if bucket_url.scheme != 'gs':
                    raise CommandException(
                        'Retention policy can only be specified for '
                        'GCS buckets.')
                retention_policy = (
                    apitools_messages.Bucket.RetentionPolicyValue(
                        retentionPeriod=seconds))
                bucket_metadata.retentionPolicy = retention_policy

            if json_only_flags_in_command and self.gsutil_api.GetApiSelector(
                    bucket_url.scheme) != ApiSelector.JSON:
                raise CommandException(
                    'The {} option(s) can only be used for GCS'
                    ' Buckets with the JSON API'.format(
                        ', '.join(json_only_flags_in_command)))

            if not bucket_url.IsBucket():
                raise CommandException(
                    'The mb command requires a URL that specifies a '
                    'bucket.\n"%s" is not valid.' % bucket_url)
            if (not BUCKET_NAME_RE.match(bucket_url.bucket_name)
                    or TOO_LONG_DNS_NAME_COMP.search(bucket_url.bucket_name)):
                raise InvalidUrlError('Invalid bucket name in URL "%s"' %
                                      bucket_url.bucket_name)

            self.logger.info('Creating %s...', bucket_url)
            # Pass storage_class param only if this is a GCS bucket. (In S3 the
            # storage class is specified on the key object.)
            try:
                self.gsutil_api.CreateBucket(bucket_url.bucket_name,
                                             project_id=self.project_id,
                                             metadata=bucket_metadata,
                                             provider=bucket_url.scheme)
            except AccessDeniedException as e:
                message = e.reason
                if 'key' in message:
                    # This will print the error reason and append the following as a
                    # suggested next step:
                    #
                    # To authorize, run:
                    #   gsutil kms authorize \
                    #     -k <kms_key> \
                    #     -p <project_id>
                    message += ' To authorize, run:\n  gsutil kms authorize'
                    message += ' \\\n    -k %s' % kms_key
                    if (self.project_id):
                        message += ' \\\n    -p %s' % self.project_id
                    raise CommandException(message)
                else:
                    raise

            except BadRequestException as e:
                if (e.status == 400
                        and e.reason == 'DotfulBucketNameNotUnderTld'
                        and bucket_url.scheme == 'gs'):
                    bucket_name = bucket_url.bucket_name
                    final_comp = bucket_name[bucket_name.rfind('.') + 1:]
                    raise CommandException('\n'.join(
                        textwrap.wrap(
                            'Buckets with "." in the name must be valid DNS names. The bucket'
                            ' you are attempting to create (%s) is not a valid DNS name,'
                            ' because the final component (%s) is not currently a valid part'
                            ' of the top-level DNS tree.' %
                            (bucket_name, final_comp))))
                else:
                    raise

        return 0