def test_valid_header_coding(self):
   headers = {
       'content-type': 'text/plain',
       'x-goog-meta-foo': 'bãr',
   }
   HandleHeaderCoding(headers)
   # Custom metadata header values should be decoded to unicode; others should
   # not be decoded, but should contain only ASCII characters.
   self.assertIs(type(headers['x-goog-meta-foo']), unicode)
   InsistAscii(
       headers['content-type'],
       'Value of non-custom-metadata header contained non-ASCII characters')
Exemplo n.º 2
0
  def RunCommand(self):
    """Command entry point for the mb command."""
    location = None
    storage_class = None
    if self.sub_opts:
      for o, a in self.sub_opts:
        if o == '-l':
          location = a
        elif o == '-p':
          # Project IDs are sent as header values when using gs and s3 XML APIs.
          InsistAscii(a, 'Invalid non-ASCII character found in project ID')
          self.project_id = a
        elif o == '-c' or o == '-s':
          storage_class = NormalizeStorageClass(a)

    bucket_metadata = apitools_messages.Bucket(location=location,
                                               storageClass=storage_class)

    for bucket_url_str in self.args:
      bucket_url = StorageUrlFromString(bucket_url_str)
      if not bucket_url.IsBucket():
        raise CommandException('The mb command requires a URL that specifies a '
                               'bucket.\n"%s" is not valid.' % bucket_url)
      if (not BUCKET_NAME_RE.match(bucket_url.bucket_name) or
          TOO_LONG_DNS_NAME_COMP.search(bucket_url.bucket_name)):
        raise InvalidUrlError(
            'Invalid bucket name in URL "%s"' % bucket_url.bucket_name)

      self.logger.info('Creating %s...', bucket_url)
      # Pass storage_class param only if this is a GCS bucket. (In S3 the
      # storage class is specified on the key object.)
      try:
        self.gsutil_api.CreateBucket(
            bucket_url.bucket_name, project_id=self.project_id,
            metadata=bucket_metadata, provider=bucket_url.scheme)
      except BadRequestException as e:
        if (e.status == 400 and e.reason == 'DotfulBucketNameNotUnderTld' and
            bucket_url.scheme == 'gs'):
          bucket_name = bucket_url.bucket_name
          final_comp = bucket_name[bucket_name.rfind('.')+1:]
          raise CommandException('\n'.join(textwrap.wrap(
              'Buckets with "." in the name must be valid DNS names. The bucket'
              ' you are attempting to create (%s) is not a valid DNS name,'
              ' because the final component (%s) is not currently a valid part'
              ' of the top-level DNS tree.' % (bucket_name, final_comp))))
        else:
          raise

    return 0
Exemplo n.º 3
0
  def RunCommand(self):
    """Command entry point for the ls command."""
    got_nomatch_errors = False
    got_bucket_nomatch_errors = False
    listing_style = ListingStyle.SHORT
    get_bucket_info = False
    self.recursion_requested = False
    self.all_versions = False
    self.include_etag = False
    self.human_readable = False
    self.list_subdir_contents = True
    if self.sub_opts:
      for o, a in self.sub_opts:
        if o == '-a':
          self.all_versions = True
        elif o == '-e':
          self.include_etag = True
        elif o == '-b':
          get_bucket_info = True
        elif o == '-h':
          self.human_readable = True
        elif o == '-l':
          listing_style = ListingStyle.LONG
        elif o == '-L':
          listing_style = ListingStyle.LONG_LONG
        elif o == '-p':
          # Project IDs are sent as header values when using gs and s3 XML APIs.
          InsistAscii(a, 'Invalid non-ASCII character found in project ID')
          self.project_id = a
        elif o == '-r' or o == '-R':
          self.recursion_requested = True
        elif o == '-d':
          self.list_subdir_contents = False

    if not self.args:
      # default to listing all gs buckets
      self.args = ['gs://']

    total_objs = 0
    total_bytes = 0

    def MaybePrintBucketHeader(blr):
      if len(self.args) > 1:
        print '%s:' % blr.url_string.encode(UTF8)
    print_bucket_header = MaybePrintBucketHeader

    for url_str in self.args:
      storage_url = StorageUrlFromString(url_str)
      if storage_url.IsFileUrl():
        raise CommandException('Only cloud URLs are supported for %s'
                               % self.command_name)
      bucket_fields = None
      if (listing_style == ListingStyle.SHORT or
          listing_style == ListingStyle.LONG):
        bucket_fields = ['id']
      elif listing_style == ListingStyle.LONG_LONG:
        bucket_fields = ['acl',
                         'cors',
                         'defaultObjectAcl',
                         'labels',
                         'location',
                         'logging',
                         'lifecycle',
                         'metageneration',
                         'storageClass',
                         'timeCreated',
                         'updated',
                         'versioning',
                         'website']
      if storage_url.IsProvider():
        # Provider URL: use bucket wildcard to list buckets.
        for blr in self.WildcardIterator(
            '%s://*' % storage_url.scheme).IterBuckets(
                bucket_fields=bucket_fields):
          self._PrintBucketInfo(blr, listing_style)
      elif storage_url.IsBucket() and get_bucket_info:
        # ls -b bucket listing request: List info about bucket(s).
        total_buckets = 0
        for blr in self.WildcardIterator(url_str).IterBuckets(
            bucket_fields=bucket_fields):
          if not ContainsWildcard(url_str) and not blr.root_object:
            # Iterator does not make an HTTP call for non-wildcarded
            # listings with fields=='id'. Ensure the bucket exists by calling
            # GetBucket.
            self.gsutil_api.GetBucket(
                blr.storage_url.bucket_name,
                fields=['id'], provider=storage_url.scheme)
          self._PrintBucketInfo(blr, listing_style)
          total_buckets += 1
        if not ContainsWildcard(url_str) and not total_buckets:
          got_bucket_nomatch_errors = True
      else:
        # URL names a bucket, object, or object subdir ->
        # list matching object(s) / subdirs.
        def _PrintPrefixLong(blr):
          print '%-33s%s' % ('', blr.url_string.encode(UTF8))

        if listing_style == ListingStyle.SHORT:
          # ls helper by default readies us for a short listing.
          ls_helper = LsHelper(self.WildcardIterator, self.logger,
                               all_versions=self.all_versions,
                               print_bucket_header_func=print_bucket_header,
                               should_recurse=self.recursion_requested,
                               list_subdir_contents=self.list_subdir_contents)
        elif listing_style == ListingStyle.LONG:
          bucket_listing_fields = ['name', 'timeCreated', 'updated', 'size']
          if self.all_versions:
            bucket_listing_fields.extend(['generation', 'metageneration'])
          if self.include_etag:
            bucket_listing_fields.append('etag')

          ls_helper = LsHelper(self.WildcardIterator, self.logger,
                               print_object_func=self._PrintLongListing,
                               print_dir_func=_PrintPrefixLong,
                               print_bucket_header_func=print_bucket_header,
                               all_versions=self.all_versions,
                               should_recurse=self.recursion_requested,
                               fields=bucket_listing_fields,
                               list_subdir_contents=self.list_subdir_contents)

        elif listing_style == ListingStyle.LONG_LONG:
          # List all fields
          bucket_listing_fields = (UNENCRYPTED_FULL_LISTING_FIELDS +
                                   ENCRYPTED_FIELDS)
          ls_helper = LsHelper(self.WildcardIterator, self.logger,
                               print_object_func=PrintFullInfoAboutObject,
                               print_dir_func=_PrintPrefixLong,
                               print_bucket_header_func=print_bucket_header,
                               all_versions=self.all_versions,
                               should_recurse=self.recursion_requested,
                               fields=bucket_listing_fields,
                               list_subdir_contents=self.list_subdir_contents)
        else:
          raise CommandException('Unknown listing style: %s' % listing_style)

        exp_dirs, exp_objs, exp_bytes = ls_helper.ExpandUrlAndPrint(storage_url)
        if storage_url.IsObject() and exp_objs == 0 and exp_dirs == 0:
          got_nomatch_errors = True
        total_bytes += exp_bytes
        total_objs += exp_objs

    if total_objs and listing_style != ListingStyle.SHORT:
      print ('TOTAL: %d objects, %d bytes (%s)' %
             (total_objs, total_bytes, MakeHumanReadable(float(total_bytes))))
    if got_nomatch_errors:
      raise CommandException('One or more URLs matched no objects.')
    if got_bucket_nomatch_errors:
      raise NotFoundException('One or more bucket URLs matched no buckets.')

    return 0