Beispiel #1
0
  def _RecurseExpandUrlAndPrint(self, url_str, print_initial_newline=True):
    """Iterates over the given URL string and calls print functions.

    Args:
      url_str: String describing StorageUrl to iterate over.
               Must be of depth one or higher.
      print_initial_newline: If true, print a newline before recursively
                             expanded prefixes.

    Returns:
      (num_objects, num_bytes) total number of objects and bytes iterated.
    """
    num_objects = 0
    num_dirs = 0
    num_bytes = 0
    for blr in self._iterator_func(
        '%s' % url_str, all_versions=self.all_versions).IterAll(
            expand_top_level_buckets=True,
            bucket_listing_fields=self.bucket_listing_fields):
      if self._MatchesExcludedPattern(blr):
        continue

      if blr.IsObject():
        nd = 0
        no, nb = self._print_object_func(blr)
      elif blr.IsPrefix():
        if self.should_recurse:
          if print_initial_newline:
            self._print_newline_func()
          else:
            print_initial_newline = True
          self._print_dir_header_func(blr)
          expansion_url_str = StorageUrlFromString(
              blr.url_string).CreatePrefixUrl(wildcard_suffix='*')

          nd, no, nb = self._RecurseExpandUrlAndPrint(expansion_url_str)
          self._print_dir_summary_func(nb, blr)
        else:
          nd, no, nb = 1, 0, 0
          self._print_dir_func(blr)
      else:
        # We handle all buckets at the top level, so this should never happen.
        raise CommandException(
            'Sub-level iterator returned a bucketListingRef of type Bucket')
      num_dirs += nd
      num_objects += no
      num_bytes += nb

    return num_dirs, num_objects, num_bytes
Beispiel #2
0
 def __iter__(self):
   for blr in self.blr_iter:
     if blr.IsPrefix():
       # This is a bucket subdirectory, list objects according to the wildcard.
       prefix_url = StorageUrlFromString(blr.url_string).CreatePrefixUrl(
           wildcard_suffix=self.subdir_exp_wildcard)
       implicit_subdir_iterator = PluralityCheckableIterator(
           self.name_exp_instance.WildcardIterator(prefix_url).IterAll(
               bucket_listing_fields=self.bucket_listing_fields))
       if not implicit_subdir_iterator.IsEmpty():
         for exp_blr in implicit_subdir_iterator:
           yield (True, exp_blr)
       else:
         # Prefix that contains no objects, for example in the $folder$ case
         # or an empty filesystem directory.
         yield (False, blr)
     elif blr.IsObject():
       yield (False, blr)
     else:
       raise CommandException(
           '_ImplicitBucketSubdirIterator got a bucket reference %s' % blr)
Beispiel #3
0
    def __iter__(self):
        """Iterates over all source URLs passed to the iterator.

    For each src url, expands wildcards, object-less bucket names,
    subdir bucket names, and directory names, and generates a flat listing of
    all the matching objects/files.

    You should instantiate this object using the static factory function
    NameExpansionIterator, because consumers of this iterator need the
    PluralityCheckableIterator wrapper built by that function.

    Yields:
      gslib.name_expansion.NameExpansionResult.

    Raises:
      CommandException: if errors encountered.
    """
        for url_str in self.url_strs:
            storage_url = StorageUrlFromString(url_str)

            if storage_url.IsFileUrl() and storage_url.IsStream():
                if self.url_strs.has_plurality:
                    raise CommandException(
                        'Multiple URL strings are not supported '
                        'with streaming ("-") URLs.')
                yield NameExpansionResult(storage_url, False, False,
                                          storage_url)
                continue

            # Step 1: Expand any explicitly specified wildcards. The output from this
            # step is an iterator of BucketListingRef.
            # Starting with gs://buck*/abc* this step would expand to gs://bucket/abcd

            src_names_bucket = False
            if (storage_url.IsCloudUrl() and storage_url.IsBucket()
                    and not self.recursion_requested):
                # UNIX commands like rm and cp will omit directory references.
                # If url_str refers only to buckets and we are not recursing,
                # then produce references of type BUCKET, because they are guaranteed
                # to pass through Step 2 and be omitted in Step 3.
                post_step1_iter = PluralityCheckableIterator(
                    self.WildcardIterator(url_str).IterBuckets(
                        bucket_fields=['id']))
            else:
                # Get a list of objects and prefixes, expanding the top level for
                # any listed buckets.  If our source is a bucket, however, we need
                # to treat all of the top level expansions as names_container=True.
                post_step1_iter = PluralityCheckableIterator(
                    self.WildcardIterator(url_str).IterAll(
                        bucket_listing_fields=['name'],
                        expand_top_level_buckets=True))
                if storage_url.IsCloudUrl() and storage_url.IsBucket():
                    src_names_bucket = True

            # Step 2: Expand bucket subdirs. The output from this
            # step is an iterator of (names_container, BucketListingRef).
            # Starting with gs://bucket/abcd this step would expand to:
            #   iter([(True, abcd/o1.txt), (True, abcd/o2.txt)]).
            subdir_exp_wildcard = self._flatness_wildcard[
                self.recursion_requested]
            if self.recursion_requested:
                post_step2_iter = _ImplicitBucketSubdirIterator(
                    self, post_step1_iter, subdir_exp_wildcard)
            else:
                post_step2_iter = _NonContainerTuplifyIterator(post_step1_iter)
            post_step2_iter = PluralityCheckableIterator(post_step2_iter)

            # Because we actually perform and check object listings here, this will
            # raise if url_args includes a non-existent object.  However,
            # plurality_checkable_iterator will buffer the exception for us, not
            # raising it until the iterator is actually asked to yield the first
            # result.
            if post_step2_iter.IsEmpty():
                if self.continue_on_error:
                    try:
                        raise CommandException('No URLs matched: %s' % url_str)
                    except CommandException, e:
                        # Yield a specialized tuple of (exception, stack_trace) to
                        # the wrapping PluralityCheckableIterator.
                        yield (e, sys.exc_info()[2])
                else:
                    raise CommandException('No URLs matched: %s' % url_str)

            # Step 3. Omit any directories, buckets, or bucket subdirectories for
            # non-recursive expansions.
            post_step3_iter = PluralityCheckableIterator(
                _OmitNonRecursiveIterator(post_step2_iter,
                                          self.recursion_requested,
                                          self.command_name,
                                          self.cmd_supports_recursion,
                                          self.logger))

            src_url_expands_to_multi = post_step3_iter.HasPlurality()
            is_multi_source_request = (self.url_strs.has_plurality
                                       or src_url_expands_to_multi)

            # Step 4. Expand directories and buckets. This step yields the iterated
            # values. Starting with gs://bucket this step would expand to:
            #  [abcd/o1.txt, abcd/o2.txt, xyz/o1.txt, xyz/o2.txt]
            # Starting with file://dir this step would expand to:
            #  [dir/a.txt, dir/b.txt, dir/c/]
            for (names_container, blr) in post_step3_iter:
                src_names_container = src_names_bucket or names_container

                if blr.IsObject():
                    yield NameExpansionResult(storage_url,
                                              is_multi_source_request,
                                              src_names_container,
                                              blr.storage_url)
                else:
                    # Use implicit wildcarding to do the enumeration.
                    # At this point we are guaranteed that:
                    # - Recursion has been requested because non-object entries are
                    #   filtered in step 3 otherwise.
                    # - This is a prefix or bucket subdirectory because only
                    #   non-recursive iterations product bucket references.
                    expanded_url = StorageUrlFromString(blr.url_string)
                    if expanded_url.IsFileUrl():
                        # Convert dir to implicit recursive wildcard.
                        url_to_iterate = '%s%s%s' % (blr, os.sep,
                                                     subdir_exp_wildcard)
                    else:
                        # Convert subdir to implicit recursive wildcard.
                        url_to_iterate = expanded_url.CreatePrefixUrl(
                            wildcard_suffix=subdir_exp_wildcard)

                    wc_iter = PluralityCheckableIterator(
                        self.WildcardIterator(url_to_iterate).IterObjects(
                            bucket_listing_fields=['name']))
                    src_url_expands_to_multi = (src_url_expands_to_multi
                                                or wc_iter.HasPlurality())
                    is_multi_source_request = (self.url_strs.has_plurality
                                               or src_url_expands_to_multi)
                    # This will be a flattened listing of all underlying objects in the
                    # subdir.
                    for blr in wc_iter:
                        yield NameExpansionResult(storage_url,
                                                  is_multi_source_request,
                                                  True, blr.storage_url)
Beispiel #4
0
    def ExpandUrlAndPrint(self, url):
        """Iterates over the given URL and calls print functions.

    Args:
      url: StorageUrl to iterate over.

    Returns:
      (num_objects, num_bytes) total number of objects and bytes iterated.
    """
        num_objects = 0
        num_dirs = 0
        num_bytes = 0
        print_newline = False

        if url.IsBucket() or self.should_recurse:
            # IsBucket() implies a top-level listing.
            if url.IsBucket():
                self._print_bucket_header_func(url)
            return self._RecurseExpandUrlAndPrint(url.url_string,
                                                  print_initial_newline=False)
        else:
            # User provided a prefix or object URL, but it's impossible to tell
            # which until we do a listing and see what matches.
            top_level_iterator = PluralityCheckableIterator(
                self._iterator_func(
                    url.CreatePrefixUrl(wildcard_suffix=None),
                    all_versions=self.all_versions).IterAll(
                        expand_top_level_buckets=True,
                        bucket_listing_fields=self.bucket_listing_fields))
            plurality = top_level_iterator.HasPlurality()

            for blr in top_level_iterator:
                if self._MatchesExcludedPattern(blr):
                    continue
                if blr.IsObject():
                    nd = 0
                    no, nb = self._print_object_func(blr)
                    print_newline = True
                elif blr.IsPrefix():
                    if print_newline:
                        self._print_newline_func()
                    else:
                        print_newline = True
                    if plurality and self.list_subdir_contents:
                        self._print_dir_header_func(blr)
                    elif plurality and not self.list_subdir_contents:
                        print_newline = False
                    expansion_url_str = StorageUrlFromString(
                        blr.url_string).CreatePrefixUrl(
                            wildcard_suffix='*' if self.
                            list_subdir_contents else None)
                    nd, no, nb = self._RecurseExpandUrlAndPrint(
                        expansion_url_str)
                    self._print_dir_summary_func(nb, blr)
                else:
                    # We handle all buckets at the top level, so this should never happen.
                    raise CommandException(
                        'Sub-level iterator returned a CsBucketListingRef of type Bucket'
                    )
                num_objects += no
                num_dirs += nd
                num_bytes += nb
            return num_dirs, num_objects, num_bytes
Beispiel #5
0
    def CatUrlStrings(self,
                      url_strings,
                      show_header=False,
                      start_byte=0,
                      end_byte=None,
                      cat_out_fd=None):
        """Prints each of the url strings to stdout.

    Args:
      url_strings: String iterable.
      show_header: If true, print a header per file.
      start_byte: Starting byte of the file to print, used for constructing
                  range requests.
      end_byte: Ending byte of the file to print; used for constructing range
                requests. If this is negative, the start_byte is ignored and
                and end range is sent over HTTP (such as range: bytes -9)
      cat_out_fd: File descriptor to which output should be written. Defaults to
                 stdout if no file descriptor is supplied.
    Returns:
      0 on success.

    Raises:
      CommandException if no URLs can be found.
    """
        printed_one = False
        # This should refer to whatever sys.stdin refers to when this method is
        # run, not when this method is defined, so we do the initialization here
        # rather than define sys.stdin as the cat_out_fd parameter's default value.
        if cat_out_fd is None:
            cat_out_fd = sys.stdout
        # We manipulate the stdout so that all other data other than the Object
        # contents go to stderr.
        old_stdout = sys.stdout
        sys.stdout = sys.stderr
        try:
            if url_strings and url_strings[0] in ('-', 'file://-'):
                self._WriteBytesBufferedFileToFile(sys.stdin, cat_out_fd)
            else:
                for url_str in url_strings:
                    did_some_work = False
                    # TODO: Get only the needed fields here.
                    for blr in self.command_obj.WildcardIterator(
                            url_str
                    ).IterObjects(
                            bucket_listing_fields=_CAT_BUCKET_LISTING_FIELDS):
                        decryption_keywrapper = None
                        if (blr.root_object
                                and blr.root_object.customerEncryption and
                                blr.root_object.customerEncryption.keySha256):
                            decryption_key = FindMatchingCSEKInBotoConfig(
                                blr.root_object.customerEncryption.keySha256,
                                config)
                            if not decryption_key:
                                raise EncryptionException(
                                    'Missing decryption key with SHA256 hash %s. No decryption '
                                    'key matches object %s' %
                                    (blr.root_object.customerEncryption.
                                     keySha256, blr.url_string))
                            decryption_keywrapper = CryptoKeyWrapperFromKey(
                                decryption_key)

                        did_some_work = True
                        if show_header:
                            if printed_one:
                                print
                            print '==> %s <==' % blr
                            printed_one = True
                        cat_object = blr.root_object
                        storage_url = StorageUrlFromString(blr.url_string)
                        if storage_url.IsCloudUrl():
                            compressed_encoding = ObjectIsGzipEncoded(
                                cat_object)
                            self.command_obj.gsutil_api.GetObjectMedia(
                                cat_object.bucket,
                                cat_object.name,
                                cat_out_fd,
                                compressed_encoding=compressed_encoding,
                                start_byte=start_byte,
                                end_byte=end_byte,
                                object_size=cat_object.size,
                                generation=storage_url.generation,
                                decryption_tuple=decryption_keywrapper,
                                provider=storage_url.scheme)
                        else:
                            with open(storage_url.object_name, 'rb') as f:
                                self._WriteBytesBufferedFileToFile(
                                    f, cat_out_fd)
                    if not did_some_work:
                        raise CommandException(NO_URLS_MATCHED_TARGET %
                                               url_str)
        finally:
            sys.stdout = old_stdout

        return 0
Beispiel #6
0
  def CatUrlStrings(self, url_strings, show_header=False, start_byte=0,
                    end_byte=None):
    """Prints each of the url strings to stdout.

    Args:
      url_strings: String iterable.
      show_header: If true, print a header per file.
      start_byte: Starting byte of the file to print, used for constructing
                  range requests.
      end_byte: Ending byte of the file to print; used for constructing range
                requests. If this is negative, the start_byte is ignored and
                and end range is sent over HTTP (such as range: bytes -9)
    Returns:
      0 on success.

    Raises:
      CommandException if no URLs can be found.
    """
    printed_one = False
    # We manipulate the stdout so that all other data other than the Object
    # contents go to stderr.
    cat_outfd = sys.stdout
    sys.stdout = sys.stderr
    try:
      for url_str in url_strings:
        did_some_work = False
        # TODO: Get only the needed fields here.
        for blr in self.command_obj.WildcardIterator(url_str).IterObjects():

          decryption_tuple = None
          if (blr.root_object and
              blr.root_object.customerEncryption and
              blr.root_object.customerEncryption.keySha256):
            decryption_key = FindMatchingCryptoKey(
                blr.root_object.customerEncryption.keySha256)
            if not decryption_key:
              raise EncryptionException(
                  'Missing decryption key with SHA256 hash %s. No decryption '
                  'key matches object %s'
                  % (blr.root_object.customerEncryption.keySha256,
                     blr.url_string))
            decryption_tuple = CryptoTupleFromKey(decryption_key)

          did_some_work = True
          if show_header:
            if printed_one:
              print
            print '==> %s <==' % blr
            printed_one = True
          cat_object = blr.root_object
          storage_url = StorageUrlFromString(blr.url_string)
          if storage_url.IsCloudUrl():
            compressed_encoding = ObjectIsGzipEncoded(cat_object)
            self.command_obj.gsutil_api.GetObjectMedia(
                cat_object.bucket, cat_object.name, cat_outfd,
                compressed_encoding=compressed_encoding,
                start_byte=start_byte, end_byte=end_byte,
                object_size=cat_object.size, generation=storage_url.generation,
                decryption_tuple=decryption_tuple, provider=storage_url.scheme)
          else:
            cat_outfd.write(open(storage_url.object_name, 'rb').read())
        if not did_some_work:
          raise CommandException(NO_URLS_MATCHED_TARGET % url_str)
      sys.stdout = cat_outfd
    finally:
      sys.stdout = cat_outfd

    return 0
Beispiel #7
0
def PrintFullInfoAboutObject(bucket_listing_ref, incl_acl=True):
    """Print full info for given object (like what displays for gsutil ls -L).

  Args:
    bucket_listing_ref: BucketListingRef being listed.
                        Must have ref_type OBJECT and a populated root_object
                        with the desired fields.
    incl_acl: True if ACL info should be output.

  Returns:
    Tuple (number of objects, object_length)

  Raises:
    Exception: if calling bug encountered.
  """
    url_str = bucket_listing_ref.url_string
    storage_url = StorageUrlFromString(url_str)
    obj = bucket_listing_ref.root_object

    if (obj.metadata
            and S3_DELETE_MARKER_GUID in obj.metadata.additionalProperties):
        num_bytes = 0
        num_objs = 0
        url_str += '<DeleteMarker>'
    else:
        num_bytes = obj.size
        num_objs = 1

    text_util.print_to_fd('{}:'.format(url_str))
    if obj.timeCreated:
        text_util.print_to_fd(
            MakeMetadataLine(
                'Creation time',
                obj.timeCreated.strftime('%a, %d %b %Y %H:%M:%S GMT')))
    if obj.updated:
        text_util.print_to_fd(
            MakeMetadataLine(
                'Update time',
                obj.updated.strftime('%a, %d %b %Y %H:%M:%S GMT')))
    if (obj.timeStorageClassUpdated
            and obj.timeStorageClassUpdated != obj.timeCreated):
        text_util.print_to_fd(
            MakeMetadataLine(
                'Storage class update time',
                obj.timeStorageClassUpdated.strftime(
                    '%a, %d %b %Y %H:%M:%S GMT')))
    if obj.storageClass:
        text_util.print_to_fd(
            MakeMetadataLine('Storage class', obj.storageClass))
    if obj.temporaryHold:
        text_util.print_to_fd(MakeMetadataLine('Temporary Hold', 'Enabled'))
    if obj.eventBasedHold:
        text_util.print_to_fd(MakeMetadataLine('Event-Based Hold', 'Enabled'))
    if obj.retentionExpirationTime:
        text_util.print_to_fd(
            MakeMetadataLine(
                'Retention Expiration',
                obj.retentionExpirationTime.strftime(
                    '%a, %d %b %Y %H:%M:%S GMT')))
    if obj.kmsKeyName:
        text_util.print_to_fd(MakeMetadataLine('KMS key', obj.kmsKeyName))
    if obj.cacheControl:
        text_util.print_to_fd(
            MakeMetadataLine('Cache-Control', obj.cacheControl))
    if obj.contentDisposition:
        text_util.print_to_fd(
            MakeMetadataLine('Content-Disposition', obj.contentDisposition))
    if obj.contentEncoding:
        text_util.print_to_fd(
            MakeMetadataLine('Content-Encoding', obj.contentEncoding))
    if obj.contentLanguage:
        text_util.print_to_fd(
            MakeMetadataLine('Content-Language', obj.contentLanguage))
    text_util.print_to_fd(MakeMetadataLine('Content-Length', obj.size))
    text_util.print_to_fd(MakeMetadataLine('Content-Type', obj.contentType))
    if obj.componentCount:
        text_util.print_to_fd(
            MakeMetadataLine('Component-Count', obj.componentCount))
    if obj.timeDeleted:
        text_util.print_to_fd(
            MakeMetadataLine(
                'Archived time',
                obj.timeDeleted.strftime('%a, %d %b %Y %H:%M:%S GMT')))
    marker_props = {}
    if obj.metadata and obj.metadata.additionalProperties:
        non_marker_props = []
        for add_prop in obj.metadata.additionalProperties:
            if add_prop.key not in S3_MARKER_GUIDS:
                non_marker_props.append(add_prop)
            else:
                marker_props[add_prop.key] = add_prop.value
        if non_marker_props:
            text_util.print_to_fd(MakeMetadataLine('Metadata', ''))
            for ap in non_marker_props:
                ap_key = '{}'.format(ap.key)
                ap_value = '{}'.format(ap.value)
                meta_data_line = MakeMetadataLine(ap_key, ap_value, indent=2)
                text_util.print_to_fd(meta_data_line)
    if obj.customerEncryption:
        if not obj.crc32c:
            text_util.print_to_fd(
                MakeMetadataLine('Hash (crc32c)', 'encrypted'))
        if not obj.md5Hash:
            text_util.print_to_fd(MakeMetadataLine('Hash (md5)', 'encrypted'))
        text_util.print_to_fd(
            MakeMetadataLine('Encryption algorithm',
                             obj.customerEncryption.encryptionAlgorithm))
        text_util.print_to_fd(
            MakeMetadataLine('Encryption key SHA256',
                             obj.customerEncryption.keySha256))
    if obj.crc32c:
        text_util.print_to_fd(MakeMetadataLine('Hash (crc32c)', obj.crc32c))
    if obj.md5Hash:
        text_util.print_to_fd(MakeMetadataLine('Hash (md5)', obj.md5Hash))
    text_util.print_to_fd(MakeMetadataLine('ETag', obj.etag.strip('"\'')))
    if obj.generation:
        generation_str = GenerationFromUrlAndString(storage_url,
                                                    obj.generation)
        text_util.print_to_fd(MakeMetadataLine('Generation', generation_str))
    if obj.metageneration:
        text_util.print_to_fd(
            MakeMetadataLine('Metageneration', obj.metageneration))
    if incl_acl:
        # JSON API won't return acls as part of the response unless we have
        # full control scope
        if obj.acl:
            text_util.print_to_fd(
                MakeMetadataLine('ACL',
                                 AclTranslation.JsonFromMessage(obj.acl)))
        elif S3_ACL_MARKER_GUID in marker_props:
            text_util.print_to_fd(
                MakeMetadataLine('ACL', marker_props[S3_ACL_MARKER_GUID]))
        else:
            # Empty ACLs are possible with Bucket Policy Only and no longer imply
            # ACCESS DENIED anymore.
            text_util.print_to_fd(MakeMetadataLine('ACL', '[]'))

    return (num_objs, num_bytes)
def PrintFullInfoAboutObject(bucket_listing_ref, incl_acl=True):
    """Print full info for given object (like what displays for gsutil ls -L).

  Args:
    bucket_listing_ref: BucketListingRef being listed.
                        Must have ref_type OBJECT and a populated root_object
                        with the desired fields.
    incl_acl: True if ACL info should be output.

  Returns:
    Tuple (number of objects, object_length)

  Raises:
    Exception: if calling bug encountered.
  """
    url_str = bucket_listing_ref.url_string
    storage_url = StorageUrlFromString(url_str)
    obj = bucket_listing_ref.root_object

    if (obj.metadata
            and S3_DELETE_MARKER_GUID in obj.metadata.additionalProperties):
        num_bytes = 0
        num_objs = 0
        url_str += '<DeleteMarker>'
    else:
        num_bytes = obj.size
        num_objs = 1

    print('%s:' % url_str.encode(UTF8))
    if obj.timeCreated:
        print(
            MakeMetadataLine(
                'Creation time',
                obj.timeCreated.strftime('%a, %d %b %Y %H:%M:%S GMT')))
    if obj.updated:
        print(
            MakeMetadataLine(
                'Update time',
                obj.updated.strftime('%a, %d %b %Y %H:%M:%S GMT')))
    if (obj.timeStorageClassUpdated
            and obj.timeStorageClassUpdated != obj.timeCreated):
        print(
            MakeMetadataLine(
                'Storage class update time',
                obj.timeStorageClassUpdated.strftime(
                    '%a, %d %b %Y %H:%M:%S GMT')))
    if obj.storageClass:
        print(MakeMetadataLine('Storage class', obj.storageClass))
    if obj.kmsKeyName:
        print(MakeMetadataLine('KMS key', obj.kmsKeyName))
    if obj.cacheControl:
        print(MakeMetadataLine('Cache-Control', obj.cacheControl))
    if obj.contentDisposition:
        print(MakeMetadataLine('Content-Disposition', obj.contentDisposition))
    if obj.contentEncoding:
        print(MakeMetadataLine('Content-Encoding', obj.contentEncoding))
    if obj.contentLanguage:
        print(MakeMetadataLine('Content-Language', obj.contentLanguage))
    print(MakeMetadataLine('Content-Length', obj.size))
    print(MakeMetadataLine('Content-Type', obj.contentType))
    if obj.componentCount:
        print(MakeMetadataLine('Component-Count', obj.componentCount))
    if obj.timeDeleted:
        print(
            MakeMetadataLine(
                'Archived time',
                obj.timeDeleted.strftime('%a, %d %b %Y %H:%M:%S GMT')))
    marker_props = {}
    if obj.metadata and obj.metadata.additionalProperties:
        non_marker_props = []
        for add_prop in obj.metadata.additionalProperties:
            if add_prop.key not in S3_MARKER_GUIDS:
                non_marker_props.append(add_prop)
            else:
                marker_props[add_prop.key] = add_prop.value
        if non_marker_props:
            print(MakeMetadataLine('Metadata', ''))
            for ap in non_marker_props:
                print(
                    MakeMetadataLine(('%s' % ap.key).encode(UTF8),
                                     ('%s' % ap.value).encode(UTF8),
                                     indent=2))
    if obj.customerEncryption:
        if not obj.crc32c:
            print(MakeMetadataLine('Hash (crc32c)', 'encrypted'))
        if not obj.md5Hash:
            print(MakeMetadataLine('Hash (md5)', 'encrypted'))
        print(
            MakeMetadataLine('Encryption algorithm',
                             obj.customerEncryption.encryptionAlgorithm))
        print(
            MakeMetadataLine('Encryption key SHA256',
                             obj.customerEncryption.keySha256))
    if obj.crc32c:
        print(MakeMetadataLine('Hash (crc32c)', obj.crc32c))
    if obj.md5Hash:
        print(MakeMetadataLine('Hash (md5)', obj.md5Hash))
    print(MakeMetadataLine('ETag', obj.etag.strip('"\'')))
    if obj.generation:
        generation_str = GenerationFromUrlAndString(storage_url,
                                                    obj.generation)
        print(MakeMetadataLine('Generation', generation_str))
    if obj.metageneration:
        print(MakeMetadataLine('Metageneration', obj.metageneration))
    if incl_acl:
        # JSON API won't return acls as part of the response unless we have
        # full control scope
        if obj.acl:
            print(
                MakeMetadataLine('ACL',
                                 AclTranslation.JsonFromMessage(obj.acl)))
        elif S3_ACL_MARKER_GUID in marker_props:
            print(MakeMetadataLine('ACL', marker_props[S3_ACL_MARKER_GUID]))
        else:
            print(MakeMetadataLine('ACL', 'ACCESS DENIED'))
            print(
                MakeMetadataLine(
                    'Note',
                    'You need OWNER permission on the object to read its ACL',
                    2))
    return (num_objs, num_bytes)
  def ExpandUrlAndPrint(self, url):
    """Iterates over the given URL and calls print functions.

    Args:
      url: StorageUrl to iterate over.

    Returns:
      (num_objects, num_bytes) total number of objects and bytes iterated.
    """
    num_objects = 0
    num_dirs = 0
    num_bytes = 0
    print_newline = False

    if url.IsBucket() or self.should_recurse:
      # IsBucket() implies a top-level listing.
      if url.IsBucket():
        self._print_bucket_header_func(url)
      return self._RecurseExpandUrlAndPrint(url.url_string,
                                            print_initial_newline=False)
    else:
      # User provided a prefix or object URL, but can't tell which unless there
      # is a generation or we do a listing to see what matches.
      if url.HasGeneration():
        iteration_url = url.url_string
      else:
        iteration_url = url.CreatePrefixUrl()
      top_level_iterator = PluralityCheckableIterator(
          self._iterator_func(
              iteration_url, all_versions=self.all_versions).IterAll(
                  expand_top_level_buckets=True,
                  bucket_listing_fields=self.bucket_listing_fields))
      plurality = top_level_iterator.HasPlurality()

      try:
        top_level_iterator.PeekException()
      except EncryptionException:
        # Detailed listing on a single object can perform a GetObjectMetadata
        # call, which raises if a matching encryption key isn't found.
        # Re-iterate without requesting encrypted fields.
        top_level_iterator = PluralityCheckableIterator(
            self._iterator_func(
                url.CreatePrefixUrl(wildcard_suffix=None),
                all_versions=self.all_versions).IterAll(
                    expand_top_level_buckets=True,
                    bucket_listing_fields=UNENCRYPTED_FULL_LISTING_FIELDS))
        plurality = top_level_iterator.HasPlurality()

      for blr in top_level_iterator:
        if self._MatchesExcludedPattern(blr):
          continue
        if blr.IsObject():
          nd = 0
          no, nb = self._print_object_func(blr)
          print_newline = True
        elif blr.IsPrefix():
          if print_newline:
            self._print_newline_func()
          else:
            print_newline = True
          if plurality and self.list_subdir_contents:
            self._print_dir_header_func(blr)
          elif plurality and not self.list_subdir_contents:
            print_newline = False
          expansion_url_str = StorageUrlFromString(
              blr.url_string).CreatePrefixUrl(
                  wildcard_suffix='*' if self.list_subdir_contents else None)
          nd, no, nb = self._RecurseExpandUrlAndPrint(expansion_url_str)
          self._print_dir_summary_func(nb, blr)
        else:
          # We handle all buckets at the top level, so this should never happen.
          raise CommandException(
              'Sub-level iterator returned a CsBucketListingRef of type Bucket')
        num_objects += no
        num_dirs += nd
        num_bytes += nb
      return num_dirs, num_objects, num_bytes