Ejemplo n.º 1
0
  def ApplyAclChanges(self, url):
    """Applies the changes in self.changes to the provided URL."""
    bucket = self.gsutil_api.GetBucket(
        url.bucket_name, provider=url.scheme,
        fields=['defaultObjectAcl', 'metageneration'])
    current_acl = bucket.defaultObjectAcl
    if not current_acl:
      self._WarnServiceAccounts()
      self.logger.warning('Failed to set acl for %s. Please ensure you have '
                          'OWNER-role access to this resource.', url)
      return

    modification_count = 0
    for change in self.changes:
      modification_count += change.Execute(
          url, current_acl, 'defacl', self.logger)
    if modification_count == 0:
      self.logger.info('No changes to %s', url)
      return

    try:
      preconditions = Preconditions(meta_gen_match=bucket.metageneration)
      bucket_metadata = apitools_messages.Bucket(defaultObjectAcl=current_acl)
      self.gsutil_api.PatchBucket(url.bucket_name, bucket_metadata,
                                  preconditions=preconditions,
                                  provider=url.scheme, fields=['id'])
    except BadRequestException as e:
      # Don't retry on bad requests, e.g. invalid email address.
      raise CommandException('Received bad request from server: %s' % str(e))

    self.logger.info('Updated default ACL on %s', url)
Ejemplo n.º 2
0
    def _SetWeb(self):
        """Sets website configuration for a bucket."""
        main_page_suffix = None
        error_page = None
        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-m':
                    main_page_suffix = a
                elif o == '-e':
                    error_page = a

        url_args = self.args

        website = apitools_messages.Bucket.WebsiteValue(
            mainPageSuffix=main_page_suffix, notFoundPage=error_page)

        # Iterate over URLs, expanding wildcards and setting the website
        # configuration on each.
        some_matched = False
        for url_str in url_args:
            bucket_iter = self.GetBucketUrlIterFromArg(url_str,
                                                       bucket_fields=['id'])
            for blr in bucket_iter:
                url = blr.storage_url
                some_matched = True
                self.logger.info('Setting website configuration on %s...', blr)
                bucket_metadata = apitools_messages.Bucket(website=website)
                self.gsutil_api.PatchBucket(url.bucket_name,
                                            bucket_metadata,
                                            provider=url.scheme,
                                            fields=['id'])
        if not some_matched:
            raise CommandException(NO_URLS_MATCHED_TARGET % list(url_args))
        return 0
Ejemplo n.º 3
0
  def _SetLifecycleConfig(self):
    """Sets lifecycle configuration for a Google Cloud Storage bucket."""
    lifecycle_arg = self.args[0]
    url_args = self.args[1:]
    # Disallow multi-provider 'lifecycle set' requests.
    if not UrlsAreForSingleProvider(url_args):
      raise CommandException('"%s" command spanning providers not allowed.' %
                             self.command_name)

    # Open, read and parse file containing JSON document.
    lifecycle_file = open(lifecycle_arg, 'r')
    lifecycle_txt = lifecycle_file.read()
    lifecycle_file.close()

    # Iterate over URLs, expanding wildcards and setting the lifecycle on each.
    some_matched = False
    for url_str in url_args:
      bucket_iter = self.GetBucketUrlIterFromArg(url_str,
                                                 bucket_fields=['lifecycle'])
      for blr in bucket_iter:
        url = blr.storage_url
        some_matched = True
        self.logger.info('Setting lifecycle configuration on %s...', blr)
        if url.scheme == 's3':
          self.gsutil_api.XmlPassThroughSetLifecycle(
              lifecycle_txt, url, provider=url.scheme)
        else:
          lifecycle = LifecycleTranslation.JsonLifecycleToMessage(lifecycle_txt)
          bucket_metadata = apitools_messages.Bucket(lifecycle=lifecycle)
          self.gsutil_api.PatchBucket(url.bucket_name, bucket_metadata,
                                      provider=url.scheme, fields=['id'])
    if not some_matched:
      raise CommandException('No URLs matched')
    return 0
Ejemplo n.º 4
0
 def __init__(self, bucket_name, versioned=False):
   self.root_object = apitools_messages.Bucket(
       name=bucket_name,
       versioning=apitools_messages.Bucket.VersioningValue(enabled=versioned))
   # Dict of object_name: (dict of 'live': MockObject
   #                               'versioned': ordered list of MockObject).
   self.objects = {}
Ejemplo n.º 5
0
  def _SetVersioning(self):
    """Gets versioning configuration for a bucket."""
    versioning_arg = self.args[0].lower()
    if versioning_arg not in ('on', 'off'):
      raise CommandException('Argument to "%s set" must be either <on|off>'
                             % (self.command_name))
    url_args = self.args[1:]
    if not url_args:
      self.RaiseWrongNumberOfArgumentsException()

    # Iterate over URLs, expanding wildcards and set the versioning
    # configuration on each.
    some_matched = False
    for url_str in url_args:
      bucket_iter = self.GetBucketUrlIterFromArg(url_str, bucket_fields=['id'])
      for blr in bucket_iter:
        url = blr.storage_url
        some_matched = True
        bucket_metadata = apitools_messages.Bucket(
            versioning=apitools_messages.Bucket.VersioningValue())
        if versioning_arg == 'on':
          self.logger.info('Enabling versioning for %s...', url)
          bucket_metadata.versioning.enabled = True
        else:
          self.logger.info('Suspending versioning for %s...', url)
          bucket_metadata.versioning.enabled = False
        self.gsutil_api.PatchBucket(url.bucket_name, bucket_metadata,
                                    provider=url.scheme, fields=['id'])
    if not some_matched:
      raise CommandException(NO_URLS_MATCHED_TARGET % list(url_args))
Ejemplo n.º 6
0
  def ApplyAclChanges(self, url):
    """Applies the changes in self.changes to the provided URL."""
    bucket = self.gsutil_api.GetBucket(
        url.bucket_name, provider=url.scheme,
        fields=['defaultObjectAcl', 'metageneration'])

    # Default object ACLs can be blank if the ACL was set to private, or
    # if the user doesn't have permission. We warn about this with defacl get,
    # so just try the modification here and if the user doesn't have
    # permission they'll get an AccessDeniedException.
    current_acl = bucket.defaultObjectAcl

    modification_count = 0
    for change in self.changes:
      modification_count += change.Execute(
          url, current_acl, 'defacl', self.logger)
    if modification_count == 0:
      self.logger.info('No changes to %s', url)
      return

    try:
      preconditions = Preconditions(meta_gen_match=bucket.metageneration)
      bucket_metadata = apitools_messages.Bucket(defaultObjectAcl=current_acl)
      self.gsutil_api.PatchBucket(url.bucket_name, bucket_metadata,
                                  preconditions=preconditions,
                                  provider=url.scheme, fields=['id'])
    except BadRequestException as e:
      # Don't retry on bad requests, e.g. invalid email address.
      raise CommandException('Received bad request from server: %s' % str(e))
    except AccessDeniedException:
      self._WarnServiceAccounts()
      raise CommandException('Failed to set acl for %s. Please ensure you have '
                             'OWNER-role access to this resource.' % url)

    self.logger.info('Updated default ACL on %s', url)
Ejemplo n.º 7
0
  def _DefaultEventHold(self):
    """Sets default value for Event-Based Hold on one or more buckets."""
    hold = None
    if self.args:
      if self.args[0].lower() == 'set':
        hold = True
      elif self.args[0].lower() == 'release':
        hold = False
      else:
        raise CommandException(
            ('Invalid subcommand "{}" for the "retention event-default"'
             ' command.\nSee "gsutil help retention event".'
            ).format(self.sub_opts))

    verb = 'Setting' if hold else 'Releasing'
    log_msg_template = '{} default Event-Based Hold on %s...'.format(verb)
    bucket_metadata_update = apitools_messages.Bucket(
        defaultEventBasedHold=hold)
    url_args = self.args[1:]
    self.BucketUpdateFunc(
        url_args,
        bucket_metadata_update,
        fields=['id', 'defaultEventBasedHold'],
        log_msg_template=log_msg_template)
    return 0
Ejemplo n.º 8
0
    def _SetDefStorageClass(self):
        """Sets the default storage class for a bucket."""
        # At this point, "set" has been popped off the front of self.args.
        normalized_storage_class = NormalizeStorageClass(self.args[0])
        url_args = self.args[1:]
        if not url_args:
            self.RaiseWrongNumberOfArgumentsException()

        some_matched = False
        for url_str in url_args:
            self._CheckIsGsUrl(url_str)
            # Throws a CommandException if the argument is not a bucket.
            bucket_iter = self.GetBucketUrlIterFromArg(url_str,
                                                       bucket_fields=['id'])
            for blr in bucket_iter:
                some_matched = True
                bucket_metadata = apitools_messages.Bucket()
                self.logger.info(
                    'Setting default storage class to "%s" for bucket %s' %
                    (normalized_storage_class, blr.url_string.rstrip('/')))
                bucket_metadata.storageClass = normalized_storage_class
                self.gsutil_api.PatchBucket(blr.storage_url.bucket_name,
                                            bucket_metadata,
                                            provider=blr.storage_url.scheme,
                                            fields=['id'])
        if not some_matched:
            raise CommandException(NO_URLS_MATCHED_TARGET % list(url_args))
Ejemplo n.º 9
0
  def CreateBucketJson(self, bucket_name=None, test_objects=0,
                       storage_class=None, location=None,
                       versioning_enabled=False,
                       retention_policy=None,
                       bucket_policy_only=False):
    """Creates a test bucket using the JSON API.

    The bucket and all of its contents will be deleted after the test.

    Args:
      bucket_name: Create the bucket with this name. If not provided, a
                   temporary test bucket name is constructed.
      test_objects: The number of objects that should be placed in the bucket.
                    Defaults to 0.
      storage_class: Storage class to use. If not provided we use standard.
      location: Location to use.
      versioning_enabled: If True, set the bucket's versioning attribute to
          True.
      retention_policy: Retention policy to be used on the bucket.
      bucket_policy_only: If True, set the bucket's iamConfiguration's
          bucketPolicyOnly attribute to True.

    Returns:
      Apitools Bucket for the created bucket.
    """
    bucket_name = util.MakeBucketNameValid(
        bucket_name or self.MakeTempName('bucket'))
    bucket_metadata = apitools_messages.Bucket(name=bucket_name.lower())
    if storage_class:
      bucket_metadata.storageClass = storage_class
    if location:
      bucket_metadata.location = location
    if versioning_enabled:
      bucket_metadata.versioning = (
          apitools_messages.Bucket.VersioningValue(enabled=True))
    if retention_policy:
      bucket_metadata.retentionPolicy = retention_policy
    if bucket_policy_only:
      iam_config = apitools_messages.Bucket.IamConfigurationValue()
      iam_config.bucketPolicyOnly = iam_config.BucketPolicyOnlyValue()
      iam_config.bucketPolicyOnly.enabled = True
      bucket_metadata.iamConfiguration = iam_config

    # TODO: Add retry and exponential backoff.
    bucket = self.json_api.CreateBucket(bucket_name,
                                        metadata=bucket_metadata)
    # Add bucket to list of buckets to be cleaned up.
    # TODO: Clean up JSON buckets using JSON API.
    self.bucket_uris.append(
        boto.storage_uri('gs://%s' % bucket_name,
                         suppress_consec_slashes=False))
    for i in range(test_objects):
      self.CreateObjectJson(bucket_name=bucket_name,
                            object_name=self.MakeTempName('obj'),
                            contents='test %d' % i)
    return bucket
Ejemplo n.º 10
0
  def ApplyAclChanges(self, name_expansion_result, thread_state=None):
    """Applies the changes in self.changes to the provided URL.

    Args:
      name_expansion_result: NameExpansionResult describing the target object.
      thread_state: If present, gsutil Cloud API instance to apply the changes.
    """
    if thread_state:
      gsutil_api = thread_state
    else:
      gsutil_api = self.gsutil_api

    url = name_expansion_result.expanded_storage_url

    if url.IsBucket():
      bucket = gsutil_api.GetBucket(url.bucket_name, provider=url.scheme,
                                    fields=['acl', 'metageneration'])
      current_acl = bucket.acl
    elif url.IsObject():
      gcs_object = encoding.JsonToMessage(apitools_messages.Object,
                                          name_expansion_result.expanded_result)
      current_acl = gcs_object.acl
    if not current_acl:
      self._RaiseForAccessDenied(url)

    modification_count = 0
    for change in self.changes:
      modification_count += change.Execute(url, current_acl, 'acl', self.logger)
    if modification_count == 0:
      self.logger.info('No changes to %s', url)
      return

    try:
      if url.IsBucket():
        preconditions = Preconditions(meta_gen_match=bucket.metageneration)
        bucket_metadata = apitools_messages.Bucket(acl=current_acl)
        gsutil_api.PatchBucket(url.bucket_name, bucket_metadata,
                               preconditions=preconditions,
                               provider=url.scheme, fields=['id'])
      else:  # Object
        preconditions = Preconditions(gen_match=gcs_object.generation,
                                      meta_gen_match=gcs_object.metageneration)

        object_metadata = apitools_messages.Object(acl=current_acl)
        gsutil_api.PatchObjectMetadata(
            url.bucket_name, url.object_name, object_metadata,
            preconditions=preconditions, provider=url.scheme,
            generation=url.generation, fields=['id'])
    except BadRequestException as e:
      # Don't retry on bad requests, e.g. invalid email address.
      raise CommandException('Received bad request from server: %s' % str(e))
    except AccessDeniedException:
      self._RaiseForAccessDenied(url)

    self.logger.info('Updated ACL on %s', url)
Ejemplo n.º 11
0
    def RunCommand(self):
        """Command entry point for the mb command."""
        location = None
        storage_class = None
        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-l':
                    location = a
                elif o == '-p':
                    # Project IDs are sent as header values when using gs and s3 XML APIs.
                    InsistAscii(
                        a, 'Invalid non-ASCII character found in project ID')
                    self.project_id = a
                elif o == '-c' or o == '-s':
                    storage_class = NormalizeStorageClass(a)

        bucket_metadata = apitools_messages.Bucket(location=location,
                                                   storageClass=storage_class)

        for bucket_url_str in self.args:
            bucket_url = StorageUrlFromString(bucket_url_str)
            if not bucket_url.IsBucket():
                raise CommandException(
                    'The mb command requires a URL that specifies a '
                    'bucket.\n"%s" is not valid.' % bucket_url)
            if (not BUCKET_NAME_RE.match(bucket_url.bucket_name)
                    or TOO_LONG_DNS_NAME_COMP.search(bucket_url.bucket_name)):
                raise InvalidUrlError('Invalid bucket name in URL "%s"' %
                                      bucket_url.bucket_name)

            self.logger.info('Creating %s...', bucket_url)
            # Pass storage_class param only if this is a GCS bucket. (In S3 the
            # storage class is specified on the key object.)
            try:
                self.gsutil_api.CreateBucket(bucket_url.bucket_name,
                                             project_id=self.project_id,
                                             metadata=bucket_metadata,
                                             provider=bucket_url.scheme)
            except BadRequestException as e:
                if (e.status == 400
                        and e.reason == 'DotfulBucketNameNotUnderTld'
                        and bucket_url.scheme == 'gs'):
                    bucket_name = bucket_url.bucket_name
                    final_comp = bucket_name[bucket_name.rfind('.') + 1:]
                    raise CommandException('\n'.join(
                        textwrap.wrap(
                            'Buckets with "." in the name must be valid DNS names. The bucket'
                            ' you are attempting to create (%s) is not a valid DNS name,'
                            ' because the final component (%s) is not currently a valid part'
                            ' of the top-level DNS tree.' %
                            (bucket_name, final_comp))))
                else:
                    raise

        return 0
Ejemplo n.º 12
0
        def _ChLabelForBucket(blr):
            url = blr.storage_url
            self.logger.info('Setting label configuration on %s...', blr)

            labels_message = None
            # When performing a read-modify-write cycle, include metageneration to
            # avoid race conditions (supported for GS buckets only).
            metageneration = None
            if (self.gsutil_api.GetApiSelector(
                    url.scheme) == ApiSelector.JSON):
                # The JSON API's PATCH semantics allow us to skip read-modify-write,
                # with the exception of one edge case - attempting to delete a
                # nonexistent label returns an error iff no labels previously existed
                corrected_changes = self.label_changes
                if self.num_deletions:
                    (_, bucket_metadata) = self.GetSingleBucketUrlFromArg(
                        url.url_string,
                        bucket_fields=['labels', 'metageneration'])
                    if not bucket_metadata.labels:
                        metageneration = bucket_metadata.metageneration
                        # Remove each change that would try to delete a nonexistent key.
                        corrected_changes = dict(
                            (k, v)
                            for k, v in six.iteritems(self.label_changes) if v)
                labels_message = LabelTranslation.DictToMessage(
                    corrected_changes)
            else:  # ApiSelector.XML
                # Perform a read-modify-write cycle so that we can specify which
                # existing labels need to be deleted.
                (_, bucket_metadata) = self.GetSingleBucketUrlFromArg(
                    url.url_string, bucket_fields=['labels', 'metageneration'])
                metageneration = bucket_metadata.metageneration

                label_json = {}
                if bucket_metadata.labels:
                    label_json = json.loads(
                        LabelTranslation.JsonFromMessage(
                            bucket_metadata.labels))
                # Modify label_json such that all specified labels are added
                # (overwriting old labels if necessary) and all specified deletions
                # are removed from label_json if already present.
                for key, value in six.iteritems(self.label_changes):
                    if not value and key in label_json:
                        del label_json[key]
                    else:
                        label_json[key] = value
                labels_message = LabelTranslation.DictToMessage(label_json)

            preconditions = Preconditions(meta_gen_match=metageneration)
            bucket_metadata = apitools_messages.Bucket(labels=labels_message)
            self.gsutil_api.PatchBucket(url.bucket_name,
                                        bucket_metadata,
                                        preconditions=preconditions,
                                        provider=url.scheme,
                                        fields=['id'])
Ejemplo n.º 13
0
 def _ClearRetention(self):
   """Clear retention retention_period on one or more buckets."""
   retention_policy = (apitools_messages.Bucket.RetentionPolicyValue(
       retentionPeriod=None))
   log_msg_template = 'Clearing Retention Policy on %s...'
   bucket_metadata_update = apitools_messages.Bucket(
       retentionPolicy=retention_policy)
   url_args = self.args
   self.BucketUpdateFunc(url_args,
                         bucket_metadata_update,
                         fields=['id', 'retentionPolicy'],
                         log_msg_template=log_msg_template)
   return 0
Ejemplo n.º 14
0
  def test_satisfies_pzs_is_displayed_if_present(self, mock_wildcard):
    bucket_uri = self.CreateBucket(bucket_name='foo')
    bucket_metadata = apitools_messages.Bucket(name='foo', satisfiesPZS=True)
    bucket_uri.root_object = bucket_metadata
    bucket_uri.url_string = 'foo'
    bucket_uri.storage_url = mock.Mock()

    mock_wildcard.return_value.IterBuckets.return_value = [bucket_uri]
    # MockKey doesn't support hash_algs, so the MD5 will not match.
    with SetBotoConfigForTest([('GSUtil', 'check_hashes', 'never')]):
      stdout = self.RunCommand('ls', ['-Lb', suri(bucket_uri)],
                               return_stdout=True)
    self.assertRegex(stdout, 'Satisfies PZS:\t\t\tTrue')
Ejemplo n.º 15
0
    def RunCommand(self):
        """Command entry point for the mb command."""
        location = None
        storage_class = None
        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-l':
                    location = a
                elif o == '-p':
                    self.project_id = a
                elif o == '-c':
                    storage_class = self._Normalize_Storage_Class(a)

        bucket_metadata = apitools_messages.Bucket(location=location,
                                                   storageClass=storage_class)

        for bucket_uri_str in self.args:
            bucket_uri = StorageUrlFromString(bucket_uri_str)
            if not bucket_uri.IsBucket():
                raise CommandException(
                    'The mb command requires a URI that specifies a '
                    'bucket.\n"%s" is not valid.' % bucket_uri)

            self.logger.info('Creating %s...', bucket_uri)
            # Pass storage_class param only if this is a GCS bucket. (In S3 the
            # storage class is specified on the key object.)
            try:
                self.gsutil_api.CreateBucket(bucket_uri.bucket_name,
                                             project_id=self.project_id,
                                             metadata=bucket_metadata,
                                             provider=bucket_uri.scheme)
            except BadRequestException as e:
                if (e.status == 400
                        and e.reason == 'DotfulBucketNameNotUnderTld'
                        and bucket_uri.scheme == 'gs'):
                    bucket_name = bucket_uri.bucket_name
                    final_comp = bucket_name[bucket_name.rfind('.') + 1:]
                    raise CommandException('\n'.join(
                        textwrap.wrap(
                            'Buckets with "." in the name must be valid DNS names. The bucket'
                            ' you are attempting to create (%s) is not a valid DNS name,'
                            ' because the final component (%s) is not currently a valid part'
                            ' of the top-level DNS tree.' %
                            (bucket_name, final_comp))))
                else:
                    raise

        return 0
Ejemplo n.º 16
0
  def _SetRetention(self):
    """Set retention retention_period on one or more buckets."""

    seconds = RetentionInSeconds(self.args[0])
    retention_policy = (apitools_messages.Bucket.RetentionPolicyValue(
        retentionPeriod=seconds))

    log_msg_template = 'Setting Retention Policy on %s...'
    bucket_metadata_update = apitools_messages.Bucket(
        retentionPolicy=retention_policy)
    url_args = self.args[1:]
    self.BucketUpdateFunc(url_args,
                          bucket_metadata_update,
                          fields=['id', 'retentionPolicy'],
                          log_msg_template=log_msg_template)
    return 0
Ejemplo n.º 17
0
        def _SetLabelForBucket(blr):
            url = blr.storage_url
            self.logger.info('Setting label configuration on %s...', blr)

            if url.scheme == 's3':  # Uses only XML.
                self.gsutil_api.XmlPassThroughSetTagging(label_text,
                                                         url,
                                                         provider=url.scheme)
            else:  # Must be a 'gs://' bucket.
                labels_message = None
                # When performing a read-modify-write cycle, include metageneration to
                # avoid race conditions (supported for GS buckets only).
                metageneration = None
                new_label_json = json.loads(label_text)
                if (self.gsutil_api.GetApiSelector(
                        url.scheme) == ApiSelector.JSON):
                    # Perform a read-modify-write so that we can specify which
                    # existing labels need to be deleted.
                    _, bucket_metadata = self.GetSingleBucketUrlFromArg(
                        url.url_string,
                        bucket_fields=['labels', 'metageneration'])
                    metageneration = bucket_metadata.metageneration
                    label_json = {}
                    if bucket_metadata.labels:
                        label_json = json.loads(
                            LabelTranslation.JsonFromMessage(
                                bucket_metadata.labels))
                    # Set all old keys' values to None; this will delete each key that
                    # is not included in the new set of labels.
                    merged_labels = dict(
                        (key, None) for key, _ in six.iteritems(label_json))
                    merged_labels.update(new_label_json)
                    labels_message = LabelTranslation.DictToMessage(
                        merged_labels)
                else:  # ApiSelector.XML
                    # No need to read-modify-write with the XML API.
                    labels_message = LabelTranslation.DictToMessage(
                        new_label_json)

                preconditions = Preconditions(meta_gen_match=metageneration)
                bucket_metadata = apitools_messages.Bucket(
                    labels=labels_message)
                self.gsutil_api.PatchBucket(url.bucket_name,
                                            bucket_metadata,
                                            preconditions=preconditions,
                                            provider=url.scheme,
                                            fields=['id'])
Ejemplo n.º 18
0
    def _SetPublicAccessPrevention(self, blr, setting_arg):
        """Sets the Public Access Prevention setting for a bucket enforced or inherited."""
        bucket_url = blr.storage_url

        iam_config = IamConfigurationValue()
        iam_config.publicAccessPrevention = setting_arg

        bucket_metadata = apitools_messages.Bucket(iamConfiguration=iam_config)

        print('Setting Public Access Prevention %s for %s' %
              (setting_arg, str(bucket_url).rstrip('/')))

        self.gsutil_api.PatchBucket(bucket_url.bucket_name,
                                    bucket_metadata,
                                    fields=['iamConfiguration'],
                                    provider=bucket_url.scheme)
        return 0
  def _Disable(self):
    """Disables logging configuration for a bucket."""
    # Iterate over URLs, expanding wildcards, and disabling logging on each.
    some_matched = False
    for url_str in self.args:
      bucket_iter = self.GetBucketUrlIterFromArg(url_str, bucket_fields=['id'])
      for blr in bucket_iter:
        url = blr.storage_url
        some_matched = True
        self.logger.info('Disabling logging on %s...', blr)
        logging = apitools_messages.Bucket.LoggingValue()

        bucket_metadata = apitools_messages.Bucket(logging=logging)
        self.gsutil_api.PatchBucket(url.bucket_name, bucket_metadata,
                                    provider=url.scheme, fields=['id'])
    if not some_matched:
      raise CommandException(NO_URLS_MATCHED_TARGET % list(self.args))
    return 0
Ejemplo n.º 20
0
    def _Enable(self):
        """Enables logging configuration for a bucket."""
        # Disallow multi-provider 'logging set on' calls, because the schemas
        # differ.
        if not UrlsAreForSingleProvider(self.args):
            raise CommandException(
                '"logging set on" command spanning providers not '
                'allowed.')
        target_bucket_url = None
        target_prefix = None
        for opt, opt_arg in self.sub_opts:
            if opt == '-b':
                target_bucket_url = StorageUrlFromString(opt_arg)
            if opt == '-o':
                target_prefix = opt_arg

        if not target_bucket_url:
            raise CommandException(
                '"logging set on" requires \'-b <log_bucket>\' '
                'option')
        if not target_bucket_url.IsBucket():
            raise CommandException('-b option must specify a bucket URL.')

        # Iterate over URLs, expanding wildcards and setting logging on each.
        some_matched = False
        for url_str in self.args:
            bucket_iter = self.GetBucketUrlIterFromArg(url_str,
                                                       bucket_fields=['id'])
            for blr in bucket_iter:
                url = blr.storage_url
                some_matched = True
                self.logger.info('Enabling logging on %s...', blr)
                logging = apitools_messages.Bucket.LoggingValue(
                    logBucket=target_bucket_url.bucket_name,
                    logObjectPrefix=target_prefix or url.bucket_name)

                bucket_metadata = apitools_messages.Bucket(logging=logging)
                self.gsutil_api.PatchBucket(url.bucket_name,
                                            bucket_metadata,
                                            provider=url.scheme,
                                            fields=['id'])
        if not some_matched:
            raise CommandException(NO_URLS_MATCHED_TARGET % list(self.args))
        return 0
Ejemplo n.º 21
0
    def _SetCors(self):
        """Sets CORS configuration on a Google Cloud Storage bucket."""
        cors_arg = self.args[0]
        url_args = self.args[1:]
        # Disallow multi-provider 'cors set' requests.
        if not UrlsAreForSingleProvider(url_args):
            raise CommandException(
                '"%s" command spanning providers not allowed.' %
                self.command_name)

        # Open, read and parse file containing JSON document.
        cors_file = open(cors_arg, 'r')
        cors_txt = cors_file.read()
        cors_file.close()

        self.api = self.gsutil_api.GetApiSelector(
            StorageUrlFromString(url_args[0]).scheme)

        cors = CorsTranslation.JsonCorsToMessageEntries(cors_txt)
        if not cors:
            cors = REMOVE_CORS_CONFIG

        # Iterate over URLs, expanding wildcards and setting the CORS on each.
        some_matched = False
        for url_str in url_args:
            bucket_iter = self.GetBucketUrlIterFromArg(url_str,
                                                       bucket_fields=['id'])
            for blr in bucket_iter:
                url = blr.storage_url
                some_matched = True
                self.logger.info('Setting CORS on %s...', blr)
                if url.scheme == 's3':
                    self.gsutil_api.XmlPassThroughSetCors(cors_txt,
                                                          url,
                                                          provider=url.scheme)
                else:
                    bucket_metadata = apitools_messages.Bucket(cors=cors)
                    self.gsutil_api.PatchBucket(url.bucket_name,
                                                bucket_metadata,
                                                provider=url.scheme,
                                                fields=['id'])
        if not some_matched:
            raise CommandException('No URLs matched')
        return 0
Ejemplo n.º 22
0
  def _SetBucketPolicyOnly(self, blr, setting_arg):
    """Sets the Bucket Policy Only setting for a bucket on or off."""
    self._ValidateBucketListingRefAndReturnBucketName(blr)
    bucket_url = blr.storage_url

    iam_config = IamConfigurationValue()
    iam_config.bucketPolicyOnly = BucketPolicyOnlyValue()
    iam_config.bucketPolicyOnly.enabled = (setting_arg == 'on')

    bucket_metadata = apitools_messages.Bucket(iamConfiguration=iam_config)

    setting_verb = 'Enabling' if setting_arg == 'on' else 'Disabling'
    print('%s Bucket Policy Only for %s...' %
          (setting_verb, str(bucket_url).rstrip('/')))

    self.gsutil_api.PatchBucket(bucket_url.bucket_name,
                                bucket_metadata,
                                fields=['iamConfiguration'],
                                provider=bucket_url.scheme)
    return 0
Ejemplo n.º 23
0
    def CreateBucketJson(self,
                         bucket_name=None,
                         test_objects=0,
                         storage_class=None,
                         location=None):
        """Creates a test bucket using the JSON API.

    The bucket and all of its contents will be deleted after the test.

    Args:
      bucket_name: Create the bucket with this name. If not provided, a
                   temporary test bucket name is constructed.
      test_objects: The number of objects that should be placed in the bucket.
                    Defaults to 0.
      storage_class: Storage class to use. If not provided we use standard.
      location: Location to use.

    Returns:
      Apitools Bucket for the created bucket.
    """
        bucket_name = bucket_name or self.MakeTempName('bucket')
        bucket_metadata = apitools_messages.Bucket(name=bucket_name.lower())
        if storage_class:
            bucket_metadata.storageClass = storage_class
        if location:
            bucket_metadata.location = location

        # TODO: Add retry and exponential backoff.
        bucket = self.json_api.CreateBucket(bucket_name.lower(),
                                            metadata=bucket_metadata)
        # Add bucket to list of buckets to be cleaned up.
        # TODO: Clean up JSON buckets using JSON API.
        self.bucket_uris.append(
            boto.storage_uri('gs://%s' % (bucket_name.lower()),
                             suppress_consec_slashes=False))
        for i in range(test_objects):
            self.CreateObjectJson(bucket_name=bucket_name,
                                  object_name=self.MakeTempName('obj'),
                                  contents='test %d' % i)
        return bucket
Ejemplo n.º 24
0
    def _SetUbla(self, blr, setting_arg):
        """Sets the Uniform bucket-level access setting for a bucket on or off."""
        self._ValidateBucketListingRefAndReturnBucketName(blr)
        bucket_url = blr.storage_url

        iam_config = IamConfigurationValue()
        # TODO(mynameisrafe): Replace bucketPolicyOnly with uniformBucketLevelAccess
        # when the property is live.
        iam_config.bucketPolicyOnly = uniformBucketLevelAccessValue()
        iam_config.bucketPolicyOnly.enabled = (setting_arg == 'on')

        bucket_metadata = apitools_messages.Bucket(iamConfiguration=iam_config)

        setting_verb = 'Enabling' if setting_arg == 'on' else 'Disabling'
        print('%s Uniform bucket-level access for %s...' %
              (setting_verb, str(bucket_url).rstrip('/')))

        self.gsutil_api.PatchBucket(bucket_url.bucket_name,
                                    bucket_metadata,
                                    fields=['iamConfiguration'],
                                    provider=bucket_url.scheme)
        return 0
Ejemplo n.º 25
0
    def RunCommand(self):
        """Command entry point for the mb command."""
        bucket_policy_only = None
        location = None
        storage_class = None
        seconds = None
        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '-l':
                    location = a
                elif o == '-p':
                    # Project IDs are sent as header values when using gs and s3 XML APIs.
                    InsistAscii(
                        a, 'Invalid non-ASCII character found in project ID')
                    self.project_id = a
                elif o == '-c' or o == '-s':
                    storage_class = NormalizeStorageClass(a)
                elif o == '--retention':
                    seconds = RetentionInSeconds(a)
                elif o == '-b':
                    if self.gsutil_api.GetApiSelector(
                            'gs') != ApiSelector.JSON:
                        raise CommandException(
                            'The -b <on|off> option '
                            'can only be used with the JSON API')
                    InsistOnOrOff(
                        a, 'Only on and off values allowed for -b option')
                    bucket_policy_only = (a == 'on')

        bucket_metadata = apitools_messages.Bucket(location=location,
                                                   storageClass=storage_class)
        if bucket_policy_only:
            bucket_metadata.iamConfiguration = IamConfigurationValue()
            iam_config = bucket_metadata.iamConfiguration
            iam_config.bucketPolicyOnly = BucketPolicyOnlyValue()
            iam_config.bucketPolicyOnly.enabled = bucket_policy_only

        for bucket_url_str in self.args:
            bucket_url = StorageUrlFromString(bucket_url_str)
            if seconds is not None:
                if bucket_url.scheme != 'gs':
                    raise CommandException(
                        'Retention policy can only be specified for '
                        'GCS buckets.')
                retention_policy = (
                    apitools_messages.Bucket.RetentionPolicyValue(
                        retentionPeriod=seconds))
                bucket_metadata.retentionPolicy = retention_policy

            if not bucket_url.IsBucket():
                raise CommandException(
                    'The mb command requires a URL that specifies a '
                    'bucket.\n"%s" is not valid.' % bucket_url)
            if (not BUCKET_NAME_RE.match(bucket_url.bucket_name)
                    or TOO_LONG_DNS_NAME_COMP.search(bucket_url.bucket_name)):
                raise InvalidUrlError('Invalid bucket name in URL "%s"' %
                                      bucket_url.bucket_name)

            self.logger.info('Creating %s...', bucket_url)
            # Pass storage_class param only if this is a GCS bucket. (In S3 the
            # storage class is specified on the key object.)
            try:
                self.gsutil_api.CreateBucket(bucket_url.bucket_name,
                                             project_id=self.project_id,
                                             metadata=bucket_metadata,
                                             provider=bucket_url.scheme)
            except BadRequestException as e:
                if (e.status == 400
                        and e.reason == 'DotfulBucketNameNotUnderTld'
                        and bucket_url.scheme == 'gs'):
                    bucket_name = bucket_url.bucket_name
                    final_comp = bucket_name[bucket_name.rfind('.') + 1:]
                    raise CommandException('\n'.join(
                        textwrap.wrap(
                            'Buckets with "." in the name must be valid DNS names. The bucket'
                            ' you are attempting to create (%s) is not a valid DNS name,'
                            ' because the final component (%s) is not currently a valid part'
                            ' of the top-level DNS tree.' %
                            (bucket_name, final_comp))))
                else:
                    raise

        return 0
Ejemplo n.º 26
0
  def ApplyAclChanges(self, name_expansion_result, thread_state=None):
    """Applies the changes in self.changes to the provided URL.

    Args:
      name_expansion_result: NameExpansionResult describing the target object.
      thread_state: If present, gsutil Cloud API instance to apply the changes.
    """
    if thread_state:
      gsutil_api = thread_state
    else:
      gsutil_api = self.gsutil_api

    url = name_expansion_result.expanded_storage_url
    if url.IsBucket():
      bucket = gsutil_api.GetBucket(url.bucket_name, provider=url.scheme,
                                    fields=['acl', 'metageneration'])
      current_acl = bucket.acl
    elif url.IsObject():
      gcs_object = encoding.JsonToMessage(apitools_messages.Object,
                                          name_expansion_result.expanded_result)
      current_acl = gcs_object.acl

    if not current_acl:
      self._RaiseForAccessDenied(url)
    if self._ApplyAclChangesAndReturnChangeCount(url, current_acl) == 0:
      self.logger.info('No changes to %s', url)
      return

    try:
      if url.IsBucket():
        preconditions = Preconditions(meta_gen_match=bucket.metageneration)
        bucket_metadata = apitools_messages.Bucket(acl=current_acl)
        gsutil_api.PatchBucket(url.bucket_name, bucket_metadata,
                               preconditions=preconditions,
                               provider=url.scheme, fields=['id'])
      else:  # Object
        preconditions = Preconditions(gen_match=gcs_object.generation,
                                      meta_gen_match=gcs_object.metageneration)
        object_metadata = apitools_messages.Object(acl=current_acl)
        try:
          gsutil_api.PatchObjectMetadata(
              url.bucket_name, url.object_name, object_metadata,
              preconditions=preconditions, provider=url.scheme,
              generation=url.generation, fields=['id'])
        except PreconditionException as e:
          # Special retry case where we want to do an additional step, the read
          # of the read-modify-write cycle, to fetch the correct object
          # metadata before reattempting ACL changes.
          self._RefetchObjectMetadataAndApplyAclChanges(url, gsutil_api)

      self.logger.info('Updated ACL on %s', url)
    except BadRequestException as e:
      # Don't retry on bad requests, e.g. invalid email address.
      raise CommandException('Received bad request from server: %s' % str(e))
    except AccessDeniedException:
      self._RaiseForAccessDenied(url)
    except PreconditionException as e:
      # For objects, retry attempts should have already been handled.
      if url.IsObject():
        raise CommandException(str(e))
      # For buckets, raise PreconditionException and continue to next retry.
      raise e
Ejemplo n.º 27
0
    def RunCommand(self):
        """Command entry point for the mb command."""
        autoclass = False
        bucket_policy_only = None
        kms_key = None
        location = None
        storage_class = None
        seconds = None
        public_access_prevention = None
        rpo = None
        json_only_flags_in_command = []
        if self.sub_opts:
            for o, a in self.sub_opts:
                if o == '--autoclass':
                    autoclass = True
                    json_only_flags_in_command.append(o)
                elif o == '-k':
                    kms_key = a
                    ValidateCMEK(kms_key)
                    json_only_flags_in_command.append(o)
                elif o == '-l':
                    location = a
                elif o == '-p':
                    # Project IDs are sent as header values when using gs and s3 XML APIs.
                    InsistAscii(
                        a, 'Invalid non-ASCII character found in project ID')
                    self.project_id = a
                elif o == '-c' or o == '-s':
                    storage_class = NormalizeStorageClass(a)
                elif o == '--retention':
                    seconds = RetentionInSeconds(a)
                elif o == '--rpo':
                    rpo = a.strip()
                    if rpo not in VALID_RPO_VALUES:
                        raise CommandException(
                            'Invalid value for --rpo. Must be one of: {},'
                            ' provided: {}'.format(VALID_RPO_VALUES_STRING, a))
                    json_only_flags_in_command.append(o)
                elif o == '-b':
                    InsistOnOrOff(
                        a, 'Only on and off values allowed for -b option')
                    bucket_policy_only = (a == 'on')
                    json_only_flags_in_command.append(o)
                elif o == '--pap':
                    public_access_prevention = a
                    json_only_flags_in_command.append(o)

        bucket_metadata = apitools_messages.Bucket(location=location,
                                                   rpo=rpo,
                                                   storageClass=storage_class)
        if autoclass:
            bucket_metadata.autoclass = apitools_messages.Bucket.AutoclassValue(
                enabled=autoclass)
        if bucket_policy_only or public_access_prevention:
            bucket_metadata.iamConfiguration = IamConfigurationValue()
            iam_config = bucket_metadata.iamConfiguration
            if bucket_policy_only:
                iam_config.bucketPolicyOnly = BucketPolicyOnlyValue()
                iam_config.bucketPolicyOnly.enabled = bucket_policy_only
            if public_access_prevention:
                iam_config.publicAccessPrevention = public_access_prevention

        if kms_key:
            encryption = apitools_messages.Bucket.EncryptionValue()
            encryption.defaultKmsKeyName = kms_key
            bucket_metadata.encryption = encryption

        for bucket_url_str in self.args:
            bucket_url = StorageUrlFromString(bucket_url_str)
            if seconds is not None:
                if bucket_url.scheme != 'gs':
                    raise CommandException(
                        'Retention policy can only be specified for '
                        'GCS buckets.')
                retention_policy = (
                    apitools_messages.Bucket.RetentionPolicyValue(
                        retentionPeriod=seconds))
                bucket_metadata.retentionPolicy = retention_policy

            if json_only_flags_in_command and self.gsutil_api.GetApiSelector(
                    bucket_url.scheme) != ApiSelector.JSON:
                raise CommandException(
                    'The {} option(s) can only be used for GCS'
                    ' Buckets with the JSON API'.format(
                        ', '.join(json_only_flags_in_command)))

            if not bucket_url.IsBucket():
                raise CommandException(
                    'The mb command requires a URL that specifies a '
                    'bucket.\n"%s" is not valid.' % bucket_url)
            if (not BUCKET_NAME_RE.match(bucket_url.bucket_name)
                    or TOO_LONG_DNS_NAME_COMP.search(bucket_url.bucket_name)):
                raise InvalidUrlError('Invalid bucket name in URL "%s"' %
                                      bucket_url.bucket_name)

            self.logger.info('Creating %s...', bucket_url)
            # Pass storage_class param only if this is a GCS bucket. (In S3 the
            # storage class is specified on the key object.)
            try:
                self.gsutil_api.CreateBucket(bucket_url.bucket_name,
                                             project_id=self.project_id,
                                             metadata=bucket_metadata,
                                             provider=bucket_url.scheme)
            except AccessDeniedException as e:
                message = e.reason
                if 'key' in message:
                    # This will print the error reason and append the following as a
                    # suggested next step:
                    #
                    # To authorize, run:
                    #   gsutil kms authorize \
                    #     -k <kms_key> \
                    #     -p <project_id>
                    message += ' To authorize, run:\n  gsutil kms authorize'
                    message += ' \\\n    -k %s' % kms_key
                    if (self.project_id):
                        message += ' \\\n    -p %s' % self.project_id
                    raise CommandException(message)
                else:
                    raise

            except BadRequestException as e:
                if (e.status == 400
                        and e.reason == 'DotfulBucketNameNotUnderTld'
                        and bucket_url.scheme == 'gs'):
                    bucket_name = bucket_url.bucket_name
                    final_comp = bucket_name[bucket_name.rfind('.') + 1:]
                    raise CommandException('\n'.join(
                        textwrap.wrap(
                            'Buckets with "." in the name must be valid DNS names. The bucket'
                            ' you are attempting to create (%s) is not a valid DNS name,'
                            ' because the final component (%s) is not currently a valid part'
                            ' of the top-level DNS tree.' %
                            (bucket_name, final_comp))))
                else:
                    raise

        return 0