Exemple #1
0
 def _GetAndPrintLabel(self, bucket_arg):
   """Gets and prints the labels for a cloud bucket."""
   (bucket_url, bucket_metadata) = self.GetSingleBucketUrlFromArg(
       bucket_arg, bucket_fields=['labels'])
   if bucket_url.scheme == 's3':
     print(self.gsutil_api.XmlPassThroughGetTagging(
         bucket_url, provider=bucket_url.scheme))
   else:
     if bucket_metadata.labels:
       print(LabelTranslation.JsonFromMessage(
           bucket_metadata.labels, pretty_print=True))
     else:
       print('%s has no label configuration.' % bucket_url)
Exemple #2
0
    def _ChLabelForBucket(blr):
      url = blr.storage_url
      self.logger.info('Setting label configuration on %s...', blr)

      labels_message = None
      # When performing a read-modify-write cycle, include metageneration to
      # avoid race conditions (supported for GS buckets only).
      metageneration = None
      if (self.gsutil_api.GetApiSelector(url.scheme) ==
          ApiSelector.JSON):
        # The JSON API's PATCH semantics allow us to skip read-modify-write,
        # with the exception of one edge case - attempting to delete a
        # nonexistent label returns an error iff no labels previously existed
        corrected_changes = self.label_changes
        if self.num_deletions:
          (_, bucket_metadata) = self.GetSingleBucketUrlFromArg(
              url.url_string, bucket_fields=['labels', 'metageneration'])
          if not bucket_metadata.labels:
            metageneration = bucket_metadata.metageneration
            # Remove each change that would try to delete a nonexistent key.
            corrected_changes = dict(
                (k, v) for k, v in self.label_changes.iteritems() if v)
        labels_message = LabelTranslation.DictToMessage(corrected_changes)
      else:  # ApiSelector.XML
        # Perform a read-modify-write cycle so that we can specify which
        # existing labels need to be deleted.
        (_, bucket_metadata) = self.GetSingleBucketUrlFromArg(
            url.url_string, bucket_fields=['labels', 'metageneration'])
        metageneration = bucket_metadata.metageneration

        label_json = {}
        if bucket_metadata.labels:
          label_json = json.loads(
              LabelTranslation.JsonFromMessage(bucket_metadata.labels))
        # Modify label_json such that all specified labels are added
        # (overwriting old labels if necessary) and all specified deletions
        # are removed from label_json if already present.
        for key, value in self.label_changes.iteritems():
          if not value and key in label_json:
            del label_json[key]
          else:
            label_json[key] = value
        labels_message = LabelTranslation.DictToMessage(label_json)

      preconditions = Preconditions(meta_gen_match=metageneration)
      bucket_metadata = apitools_messages.Bucket(labels=labels_message)
      self.gsutil_api.PatchBucket(url.bucket_name,
                                  bucket_metadata,
                                  preconditions=preconditions,
                                  provider=url.scheme,
                                  fields=['id'])
Exemple #3
0
    def _SetLabelForBucket(blr):
      url = blr.storage_url
      self.logger.info('Setting label configuration on %s...', blr)

      if url.scheme == 's3':  # Uses only XML.
        self.gsutil_api.XmlPassThroughSetTagging(
            label_text, url, provider=url.scheme)
      else:  # Must be a 'gs://' bucket.
        labels_message = None
        # When performing a read-modify-write cycle, include metageneration to
        # avoid race conditions (supported for GS buckets only).
        metageneration = None
        new_label_json = json.loads(label_text)
        if (self.gsutil_api.GetApiSelector(url.scheme) ==
            ApiSelector.JSON):
          # Perform a read-modify-write so that we can specify which
          # existing labels need to be deleted.
          (_, bucket_metadata) = self.GetSingleBucketUrlFromArg(
              url.url_string, bucket_fields=['labels', 'metageneration'])
          metageneration = bucket_metadata.metageneration
          label_json = {}
          if bucket_metadata.labels:
            label_json = json.loads(
                LabelTranslation.JsonFromMessage(bucket_metadata.labels))
          # Set all old keys' values to None; this will delete each key that
          # is not included in the new set of labels.
          merged_labels = dict(
              (key, None) for key, _ in label_json.iteritems())
          merged_labels.update(new_label_json)
          labels_message = LabelTranslation.DictToMessage(merged_labels)
        else:  # ApiSelector.XML
          # No need to read-modify-write with the XML API.
          labels_message = LabelTranslation.DictToMessage(new_label_json)

        preconditions = Preconditions(meta_gen_match=metageneration)
        bucket_metadata = apitools_messages.Bucket(labels=labels_message)
        self.gsutil_api.PatchBucket(url.bucket_name,
                                    bucket_metadata,
                                    preconditions=preconditions,
                                    provider=url.scheme,
                                    fields=['id'])
Exemple #4
0
    def _PrintBucketInfo(self, bucket_blr, listing_style):
        """Print listing info for given bucket.

    Args:
      bucket_blr: BucketListingReference for the bucket being listed
      listing_style: ListingStyle enum describing type of output desired.

    Returns:
      Tuple (total objects, total bytes) in the bucket.
    """
        if (listing_style == ListingStyle.SHORT
                or listing_style == ListingStyle.LONG):
            print bucket_blr
            return
        # listing_style == ListingStyle.LONG_LONG:
        # We're guaranteed by the caller that the root object is populated.
        bucket = bucket_blr.root_object
        location_constraint = bucket.location
        storage_class = bucket.storageClass
        fields = {
            'bucket': bucket_blr.url_string,
            'storage_class': storage_class,
            'location_constraint': location_constraint,
            'acl': AclTranslation.JsonFromMessage(bucket.acl),
            'default_acl':
            AclTranslation.JsonFromMessage(bucket.defaultObjectAcl)
        }

        fields['versioning'] = bucket.versioning and bucket.versioning.enabled
        fields['website_config'] = 'Present' if bucket.website else 'None'
        fields['logging_config'] = 'Present' if bucket.logging else 'None'
        fields['cors_config'] = 'Present' if bucket.cors else 'None'
        fields['lifecycle_config'] = 'Present' if bucket.lifecycle else 'None'
        if bucket.labels:
            fields['labels'] = LabelTranslation.JsonFromMessage(
                bucket.labels, pretty_print=True)
        else:
            fields['labels'] = 'None'
        if bucket.timeCreated:
            fields['time_created'] = bucket.timeCreated.strftime(
                '%a, %d %b %Y %H:%M:%S GMT')
        if bucket.updated:
            fields['updated'] = bucket.updated.strftime(
                '%a, %d %b %Y %H:%M:%S GMT')

        # For field values that are multiline, add indenting to make it look
        # prettier.
        for key in fields:
            previous_value = fields[key]
            if (not isinstance(previous_value, basestring)
                    or '\n' not in previous_value):
                continue
            new_value = previous_value.replace('\n', '\n\t  ')
            # Start multiline values on a new line if they aren't already.
            if not new_value.startswith('\n'):
                new_value = '\n\t  ' + new_value
            fields[key] = new_value

        # Only display time-related properties if the given API returned them.
        time_created_line = ''
        time_updated_line = ''
        if 'time_created' in fields:
            time_created_line = '\tTime created:\t\t\t{time_created}\n'
        if 'updated' in fields:
            time_updated_line = '\tTime updated:\t\t\t{updated}\n'

        print(('{bucket} :\n'
               '\tStorage class:\t\t\t{storage_class}\n'
               '\tLocation constraint:\t\t{location_constraint}\n'
               '\tVersioning enabled:\t\t{versioning}\n'
               '\tLogging configuration:\t\t{logging_config}\n'
               '\tWebsite configuration:\t\t{website_config}\n'
               '\tCORS configuration: \t\t{cors_config}\n'
               '\tLifecycle configuration:\t{lifecycle_config}\n'
               '\tLabels:\t\t\t\t{labels}\n' + time_created_line +
               time_updated_line + '\tACL:\t\t\t\t{acl}\n'
               '\tDefault ACL:\t\t\t{default_acl}').format(**fields))
        if bucket_blr.storage_url.scheme == 's3':
            print(
                'Note: this is an S3 bucket so configuration values may be '
                'blank. To retrieve bucket configuration values, use '
                'individual configuration commands such as gsutil acl get '
                '<bucket>.')