示例#1
0
 def __init__(self, url_string):
     self.scheme = None
     self.bucket_name = None
     self.object_name = None
     self.generation = None
     self.delim = '/'
     provider_match = PROVIDER_REGEX.match(url_string)
     bucket_match = BUCKET_REGEX.match(url_string)
     if provider_match:
         self.scheme = provider_match.group('provider')
     elif bucket_match:
         self.scheme = bucket_match.group('provider')
         self.bucket_name = bucket_match.group('bucket')
     else:
         object_match = OBJECT_REGEX.match(url_string)
         if object_match:
             self.scheme = object_match.group('provider')
             self.bucket_name = object_match.group('bucket')
             self.object_name = object_match.group('object')
             if self.scheme == 'gs':
                 generation_match = GS_GENERATION_REGEX.match(
                     self.object_name)
                 if generation_match:
                     self.object_name = generation_match.group('object')
                     self.generation = generation_match.group('generation')
             elif self.scheme == 's3':
                 version_match = S3_VERSION_REGEX.match(self.object_name)
                 if version_match:
                     self.object_name = version_match.group('object')
                     self.generation = version_match.group('version_id')
         else:
             raise InvalidUrlError(
                 'CloudUrl: URL string %s did not match URL regex' %
                 url_string)
    def __init__(self, url_string):
        self.scheme = None
        self.delim = '/'
        self.bucket_name = None
        self.object_name = None
        self.generation = None
        provider_match = PROVIDER_REGEX.match(url_string)
        bucket_match = BUCKET_REGEX.match(url_string)
        if provider_match:
            self.scheme = provider_match.group('provider')
        elif bucket_match:
            self.scheme = bucket_match.group('provider')
            self.bucket_name = bucket_match.group('bucket')
        else:
            object_match = OBJECT_REGEX.match(url_string)
            if object_match:
                self.scheme = object_match.group('provider')
                self.bucket_name = object_match.group('bucket')
                self.object_name = object_match.group('object')
                if self.object_name == '.' or self.object_name == '..':
                    raise InvalidUrlError(
                        '%s is an invalid root-level object name' %
                        self.object_name)
                if self.scheme == 'gs':
                    generation_match = GS_GENERATION_REGEX.match(
                        self.object_name)
                    if generation_match:
                        self.object_name = generation_match.group('object')
                        self.generation = generation_match.group('generation')
                elif self.scheme == 's3':
                    version_match = S3_VERSION_REGEX.match(self.object_name)
                    if version_match:
                        self.object_name = version_match.group('object')
                        self.generation = version_match.group('version_id')
            else:
                raise InvalidUrlError(
                    'CloudUrl: URL string %s did not match URL regex' %
                    url_string)

        if url_string[(len(self.scheme) + len('://')):].startswith(self.delim):
            raise InvalidUrlError(
                'Cloud URL scheme should be followed by colon and two slashes: "://".'
                ' Found: "{}"'.format(url_string))

        self._WarnIfUnsupportedDoubleWildcard()
示例#3
0
文件: mb.py 项目: barber223/AudioApp
    def RunCommand(self):
        """Command entry point for the mb command."""
        location = None
        storage_class = None
        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-l':
                    location = a
                elif o == '-p':
                    # Project IDs are sent as header values when using gs and s3 XML APIs.
                    InsistAscii(
                        a, 'Invalid non-ASCII character found in project ID')
                    self.project_id = a
                elif o == '-c' or o == '-s':
                    storage_class = NormalizeStorageClass(a)

        bucket_metadata = apitools_messages.Bucket(location=location,
                                                   storageClass=storage_class)

        for bucket_url_str in self.args:
            bucket_url = StorageUrlFromString(bucket_url_str)
            if not bucket_url.IsBucket():
                raise CommandException(
                    'The mb command requires a URL that specifies a '
                    'bucket.\n"%s" is not valid.' % bucket_url)
            if (not BUCKET_NAME_RE.match(bucket_url.bucket_name)
                    or TOO_LONG_DNS_NAME_COMP.search(bucket_url.bucket_name)):
                raise InvalidUrlError('Invalid bucket name in URL "%s"' %
                                      bucket_url.bucket_name)

            self.logger.info('Creating %s...', bucket_url)
            # Pass storage_class param only if this is a GCS bucket. (In S3 the
            # storage class is specified on the key object.)
            try:
                self.gsutil_api.CreateBucket(bucket_url.bucket_name,
                                             project_id=self.project_id,
                                             metadata=bucket_metadata,
                                             provider=bucket_url.scheme)
            except BadRequestException as e:
                if (e.status == 400
                        and e.reason == 'DotfulBucketNameNotUnderTld'
                        and bucket_url.scheme == 'gs'):
                    bucket_name = bucket_url.bucket_name
                    final_comp = bucket_name[bucket_name.rfind('.') + 1:]
                    raise CommandException('\n'.join(
                        textwrap.wrap(
                            'Buckets with "." in the name must be valid DNS names. The bucket'
                            ' you are attempting to create (%s) is not a valid DNS name,'
                            ' because the final component (%s) is not currently a valid part'
                            ' of the top-level DNS tree.' %
                            (bucket_name, final_comp))))
                else:
                    raise

        return 0
示例#4
0
def StorageUrlFromString(url_str):
    """Static factory function for creating a StorageUrl from a string."""

    scheme = _GetSchemeFromUrlString(url_str)

    if scheme not in ('file', 's3', 'gs'):
        raise InvalidUrlError('Unrecognized scheme "%s"' % scheme)
    if scheme == 'file':
        path = _GetPathFromUrlString(url_str)
        is_stream = (path == '-')
        return _FileUrl(url_str, is_stream=is_stream)
    return _CloudUrl(url_str)
示例#5
0
 def __init__(self, url_string):
     self.scheme = None
     self.bucket_name = None
     self.object_name = None
     self.generation = None
     self.delim = '/'
     provider_match = PROVIDER_REGEX.match(url_string)
     bucket_match = BUCKET_REGEX.match(url_string)
     if provider_match:
         self.scheme = provider_match.group('provider')
     elif bucket_match:
         self.scheme = bucket_match.group('provider')
         self.bucket_name = bucket_match.group('bucket')
         if (not ContainsWildcard(self.bucket_name)
                 and (not BUCKET_NAME_RE.match(self.bucket_name)
                      or TOO_LONG_DNS_NAME_COMP.search(self.bucket_name))):
             raise InvalidUrlError('Invalid bucket name in URL "%s"' %
                                   url_string)
     else:
         object_match = OBJECT_REGEX.match(url_string)
         if object_match:
             self.scheme = object_match.group('provider')
             self.bucket_name = object_match.group('bucket')
             self.object_name = object_match.group('object')
             if self.scheme == 'gs':
                 generation_match = GS_GENERATION_REGEX.match(
                     self.object_name)
                 if generation_match:
                     self.object_name = generation_match.group('object')
                     self.generation = generation_match.group('generation')
             elif self.scheme == 's3':
                 version_match = S3_VERSION_REGEX.match(self.object_name)
                 if version_match:
                     self.object_name = version_match.group('object')
                     self.generation = version_match.group('version_id')
         else:
             raise InvalidUrlError(
                 'CloudUrl: URL string %s did not match URL regex' %
                 url_string)
示例#6
0
def StorageUrlFromString(url_str):
    """Static factory function for creating a StorageUrl from a string."""

    scheme = GetSchemeFromUrlString(url_str)

    if not IsKnownUrlScheme(scheme):
        raise InvalidUrlError('Unrecognized scheme "%s"' % scheme)
    if scheme == 'file':
        path = _GetPathFromUrlString(url_str)
        is_stream = (path == '-')
        is_fifo = False
        try:
            is_fifo = stat.S_ISFIFO(os.stat(path).st_mode)
        except OSError:
            pass
        return _FileUrl(url_str, is_stream=is_stream, is_fifo=is_fifo)
    return _CloudUrl(url_str)
示例#7
0
def StorageUrlFromString(url_str):
    """Static factory function for creating a StorageUrl from a string."""

    end_scheme_idx = url_str.find('://')
    if end_scheme_idx == -1:
        # File is the default scheme.
        scheme = 'file'
        path = url_str
    else:
        scheme = url_str[0:end_scheme_idx].lower()
        path = url_str[end_scheme_idx + 3:]

    if scheme not in ('file', 's3', 'gs'):
        raise InvalidUrlError('Unrecognized scheme "%s"' % scheme)
    if scheme == 'file':
        is_stream = (path == '-')
        return _FileUrl(url_str, is_stream=is_stream)
    return _CloudUrl(url_str)
示例#8
0
    def _SetUploadUrl(self, url):
        """Saves URL and resets upload state.

    Called when we start a new resumable upload or get a new tracker
    URL for the upload.

    Args:
      url: URL string for the upload.

    Raises InvalidUrlError if URL is syntactically invalid.
    """
        parse_result = urllib.parse.urlparse(url)
        if (parse_result.scheme.lower() not in ['http', 'https']
                or not parse_result.netloc):
            raise InvalidUrlError('Invalid upload URL (%s)' % url)
        self.upload_url = url
        self.upload_url_host = parse_result.netloc
        self.upload_url_path = '%s?%s' % (parse_result.path,
                                          parse_result.query)
        self.service_has_bytes = 0
示例#9
0
    def RunCommand(self):
        """Command entry point for the mb command."""
        bucket_policy_only = None
        location = None
        storage_class = None
        seconds = None
        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-l':
                    location = a
                elif o == '-p':
                    # Project IDs are sent as header values when using gs and s3 XML APIs.
                    InsistAscii(
                        a, 'Invalid non-ASCII character found in project ID')
                    self.project_id = a
                elif o == '-c' or o == '-s':
                    storage_class = NormalizeStorageClass(a)
                elif o == '--retention':
                    seconds = RetentionInSeconds(a)
                elif o == '-b':
                    if self.gsutil_api.GetApiSelector(
                            'gs') != ApiSelector.JSON:
                        raise CommandException(
                            'The -b <on|off> option '
                            'can only be used with the JSON API')
                    InsistOnOrOff(
                        a, 'Only on and off values allowed for -b option')
                    bucket_policy_only = (a == 'on')

        bucket_metadata = apitools_messages.Bucket(location=location,
                                                   storageClass=storage_class)
        if bucket_policy_only:
            bucket_metadata.iamConfiguration = IamConfigurationValue()
            iam_config = bucket_metadata.iamConfiguration
            iam_config.bucketPolicyOnly = BucketPolicyOnlyValue()
            iam_config.bucketPolicyOnly.enabled = bucket_policy_only

        for bucket_url_str in self.args:
            bucket_url = StorageUrlFromString(bucket_url_str)
            if seconds is not None:
                if bucket_url.scheme != 'gs':
                    raise CommandException(
                        'Retention policy can only be specified for '
                        'GCS buckets.')
                retention_policy = (
                    apitools_messages.Bucket.RetentionPolicyValue(
                        retentionPeriod=seconds))
                bucket_metadata.retentionPolicy = retention_policy

            if not bucket_url.IsBucket():
                raise CommandException(
                    'The mb command requires a URL that specifies a '
                    'bucket.\n"%s" is not valid.' % bucket_url)
            if (not BUCKET_NAME_RE.match(bucket_url.bucket_name)
                    or TOO_LONG_DNS_NAME_COMP.search(bucket_url.bucket_name)):
                raise InvalidUrlError('Invalid bucket name in URL "%s"' %
                                      bucket_url.bucket_name)

            self.logger.info('Creating %s...', bucket_url)
            # Pass storage_class param only if this is a GCS bucket. (In S3 the
            # storage class is specified on the key object.)
            try:
                self.gsutil_api.CreateBucket(bucket_url.bucket_name,
                                             project_id=self.project_id,
                                             metadata=bucket_metadata,
                                             provider=bucket_url.scheme)
            except BadRequestException as e:
                if (e.status == 400
                        and e.reason == 'DotfulBucketNameNotUnderTld'
                        and bucket_url.scheme == 'gs'):
                    bucket_name = bucket_url.bucket_name
                    final_comp = bucket_name[bucket_name.rfind('.') + 1:]
                    raise CommandException('\n'.join(
                        textwrap.wrap(
                            'Buckets with "." in the name must be valid DNS names. The bucket'
                            ' you are attempting to create (%s) is not a valid DNS name,'
                            ' because the final component (%s) is not currently a valid part'
                            ' of the top-level DNS tree.' %
                            (bucket_name, final_comp))))
                else:
                    raise

        return 0
示例#10
0
    def RunCommand(self):
        """Command entry point for the mb command."""
        autoclass = False
        bucket_policy_only = None
        kms_key = None
        location = None
        storage_class = None
        seconds = None
        public_access_prevention = None
        rpo = None
        json_only_flags_in_command = []
        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '--autoclass':
                    autoclass = True
                    json_only_flags_in_command.append(o)
                elif o == '-k':
                    kms_key = a
                    ValidateCMEK(kms_key)
                    json_only_flags_in_command.append(o)
                elif o == '-l':
                    location = a
                elif o == '-p':
                    # Project IDs are sent as header values when using gs and s3 XML APIs.
                    InsistAscii(
                        a, 'Invalid non-ASCII character found in project ID')
                    self.project_id = a
                elif o == '-c' or o == '-s':
                    storage_class = NormalizeStorageClass(a)
                elif o == '--retention':
                    seconds = RetentionInSeconds(a)
                elif o == '--rpo':
                    rpo = a.strip()
                    if rpo not in VALID_RPO_VALUES:
                        raise CommandException(
                            'Invalid value for --rpo. Must be one of: {},'
                            ' provided: {}'.format(VALID_RPO_VALUES_STRING, a))
                    json_only_flags_in_command.append(o)
                elif o == '-b':
                    InsistOnOrOff(
                        a, 'Only on and off values allowed for -b option')
                    bucket_policy_only = (a == 'on')
                    json_only_flags_in_command.append(o)
                elif o == '--pap':
                    public_access_prevention = a
                    json_only_flags_in_command.append(o)

        bucket_metadata = apitools_messages.Bucket(location=location,
                                                   rpo=rpo,
                                                   storageClass=storage_class)
        if autoclass:
            bucket_metadata.autoclass = apitools_messages.Bucket.AutoclassValue(
                enabled=autoclass)
        if bucket_policy_only or public_access_prevention:
            bucket_metadata.iamConfiguration = IamConfigurationValue()
            iam_config = bucket_metadata.iamConfiguration
            if bucket_policy_only:
                iam_config.bucketPolicyOnly = BucketPolicyOnlyValue()
                iam_config.bucketPolicyOnly.enabled = bucket_policy_only
            if public_access_prevention:
                iam_config.publicAccessPrevention = public_access_prevention

        if kms_key:
            encryption = apitools_messages.Bucket.EncryptionValue()
            encryption.defaultKmsKeyName = kms_key
            bucket_metadata.encryption = encryption

        for bucket_url_str in self.args:
            bucket_url = StorageUrlFromString(bucket_url_str)
            if seconds is not None:
                if bucket_url.scheme != 'gs':
                    raise CommandException(
                        'Retention policy can only be specified for '
                        'GCS buckets.')
                retention_policy = (
                    apitools_messages.Bucket.RetentionPolicyValue(
                        retentionPeriod=seconds))
                bucket_metadata.retentionPolicy = retention_policy

            if json_only_flags_in_command and self.gsutil_api.GetApiSelector(
                    bucket_url.scheme) != ApiSelector.JSON:
                raise CommandException(
                    'The {} option(s) can only be used for GCS'
                    ' Buckets with the JSON API'.format(
                        ', '.join(json_only_flags_in_command)))

            if not bucket_url.IsBucket():
                raise CommandException(
                    'The mb command requires a URL that specifies a '
                    'bucket.\n"%s" is not valid.' % bucket_url)
            if (not BUCKET_NAME_RE.match(bucket_url.bucket_name)
                    or TOO_LONG_DNS_NAME_COMP.search(bucket_url.bucket_name)):
                raise InvalidUrlError('Invalid bucket name in URL "%s"' %
                                      bucket_url.bucket_name)

            self.logger.info('Creating %s...', bucket_url)
            # Pass storage_class param only if this is a GCS bucket. (In S3 the
            # storage class is specified on the key object.)
            try:
                self.gsutil_api.CreateBucket(bucket_url.bucket_name,
                                             project_id=self.project_id,
                                             metadata=bucket_metadata,
                                             provider=bucket_url.scheme)
            except AccessDeniedException as e:
                message = e.reason
                if 'key' in message:
                    # This will print the error reason and append the following as a
                    # suggested next step:
                    #
                    # To authorize, run:
                    #   gsutil kms authorize \
                    #     -k <kms_key> \
                    #     -p <project_id>
                    message += ' To authorize, run:\n  gsutil kms authorize'
                    message += ' \\\n    -k %s' % kms_key
                    if (self.project_id):
                        message += ' \\\n    -p %s' % self.project_id
                    raise CommandException(message)
                else:
                    raise

            except BadRequestException as e:
                if (e.status == 400
                        and e.reason == 'DotfulBucketNameNotUnderTld'
                        and bucket_url.scheme == 'gs'):
                    bucket_name = bucket_url.bucket_name
                    final_comp = bucket_name[bucket_name.rfind('.') + 1:]
                    raise CommandException('\n'.join(
                        textwrap.wrap(
                            'Buckets with "." in the name must be valid DNS names. The bucket'
                            ' you are attempting to create (%s) is not a valid DNS name,'
                            ' because the final component (%s) is not currently a valid part'
                            ' of the top-level DNS tree.' %
                            (bucket_name, final_comp))))
                else:
                    raise

        return 0