Пример #1
0
    def _SetIamHelperInternal(self, storage_url, policy, thread_state=None):
        """Sets IAM policy for a single, resolved bucket / object URL.

    Args:
      storage_url: A CloudUrl instance with no wildcards, pointing to a
                   specific bucket or object.
      policy: A Policy object to set on the bucket / object.
      thread_state: CloudApiDelegator instance which is passed from
                    command.WorkerThread.__init__() if the -m flag is
                    specified. Will use self.gsutil_api if thread_state is set
                    to None.

    Raises:
      ServiceException passed from the API call if an HTTP error was returned.
    """

        # SetIamHelper may be called by a command.WorkerThread. In the
        # single-threaded case, WorkerThread will not pass the CloudApiDelegator
        # instance to thread_state. GetCloudInstance is called to resolve the
        # edge case.
        gsutil_api = GetCloudApiInstance(self, thread_state=thread_state)

        if storage_url.IsBucket():
            gsutil_api.SetBucketIamPolicy(storage_url.bucket_name,
                                          policy,
                                          provider=storage_url.scheme)
        else:
            gsutil_api.SetObjectIamPolicy(storage_url.bucket_name,
                                          storage_url.object_name,
                                          policy,
                                          generation=storage_url.generation,
                                          provider=storage_url.scheme)
Пример #2
0
    def GetIamHelper(self, storage_url, thread_state=None):
        """Gets an IAM policy for a single, resolved bucket / object URL.

    Args:
      storage_url: A CloudUrl instance with no wildcards, pointing to a
                   specific bucket or object.
      thread_state: CloudApiDelegator instance which is passed from
                    command.WorkerThread.__init__() if the global -m flag is
                    specified. Will use self.gsutil_api if thread_state is set
                    to None.

    Returns:
      Policy instance.
    """

        gsutil_api = GetCloudApiInstance(self, thread_state=thread_state)

        if storage_url.IsBucket():
            policy = gsutil_api.GetBucketIamPolicy(
                storage_url.bucket_name,
                provider=storage_url.scheme,
                fields=['bindings', 'etag'],
            )
        else:
            policy = gsutil_api.GetObjectIamPolicy(
                storage_url.bucket_name,
                storage_url.object_name,
                generation=storage_url.generation,
                provider=storage_url.scheme,
                fields=['bindings', 'etag'],
            )
        return policy
Пример #3
0
    def _DeleteHmacKey(self, thread_state=None):
        """Deletes an HMAC key."""
        if self.args:
            access_id = self.args[0]
        else:
            raise _AccessIdException(self.command_name, self.action_subcommand,
                                     _DELETE_SYNOPSIS)

        gsutil_api = GetCloudApiInstance(self, thread_state=thread_state)

        gsutil_api.DeleteHmacKey(self.project_id, access_id, provider='gs')
Пример #4
0
    def RemoveFunc(self, name_expansion_result, thread_state=None):
        gsutil_api = GetCloudApiInstance(self, thread_state=thread_state)

        exp_src_url = name_expansion_result.expanded_storage_url
        self.logger.info('Removing %s...', exp_src_url)
        gsutil_api.DeleteObject(exp_src_url.bucket_name,
                                exp_src_url.object_name,
                                preconditions=self.preconditions,
                                generation=exp_src_url.generation,
                                provider=exp_src_url.scheme)
        _PutToQueueWithTimeout(gsutil_api.status_queue,
                               MetadataMessage(message_time=time.time()))
Пример #5
0
  def _GetHmacKey(self, thread_state=None):
    """Gets HMAC key from its Access Id."""
    if self.args:
      access_id = self.args[0]
    else:
      raise _AccessIdException(self.command_name, self.action_subcommand,
                               _GET_SYNOPSIS)

    gsutil_api = GetCloudApiInstance(self, thread_state=thread_state)

    response = gsutil_api.GetHmacKey(self.project_id, access_id, provider='gs')

    print(_KeyMetadataOutput(response))
Пример #6
0
    def SetMetadataFunc(self, name_expansion_result, thread_state=None):
        """Sets metadata on an object.

    Args:
      name_expansion_result: NameExpansionResult describing target object.
      thread_state: gsutil Cloud API instance to use for the operation.
    """
        gsutil_api = GetCloudApiInstance(self, thread_state=thread_state)

        exp_src_url = name_expansion_result.expanded_storage_url
        self.logger.info('Setting metadata on %s...', exp_src_url)

        cloud_obj_metadata = encoding.JsonToMessage(
            apitools_messages.Object, name_expansion_result.expanded_result)

        preconditions = Preconditions(
            gen_match=self.preconditions.gen_match,
            meta_gen_match=self.preconditions.meta_gen_match)
        if preconditions.gen_match is None:
            preconditions.gen_match = cloud_obj_metadata.generation
        if preconditions.meta_gen_match is None:
            preconditions.meta_gen_match = cloud_obj_metadata.metageneration

        # Patch handles the patch semantics for most metadata, but we need to
        # merge the custom metadata field manually.
        patch_obj_metadata = ObjectMetadataFromHeaders(self.metadata_change)

        api = gsutil_api.GetApiSelector(provider=exp_src_url.scheme)
        # For XML we only want to patch through custom metadata that has
        # changed.  For JSON we need to build the complete set.
        if api == ApiSelector.XML:
            pass
        elif api == ApiSelector.JSON:
            CopyObjectMetadata(patch_obj_metadata,
                               cloud_obj_metadata,
                               override=True)
            patch_obj_metadata = cloud_obj_metadata
            # Patch body does not need the object generation and metageneration.
            patch_obj_metadata.generation = None
            patch_obj_metadata.metageneration = None

        gsutil_api.PatchObjectMetadata(exp_src_url.bucket_name,
                                       exp_src_url.object_name,
                                       patch_obj_metadata,
                                       generation=exp_src_url.generation,
                                       preconditions=preconditions,
                                       provider=exp_src_url.scheme,
                                       fields=['id'])
        _PutToQueueWithTimeout(gsutil_api.status_queue,
                               MetadataMessage(message_time=time.time()))
Пример #7
0
  def _CreateHmacKey(self, thread_state=None):
    """Creates HMAC key for a service account."""
    if self.args:
      self.service_account_email = self.args[0]
    else:
      err_msg = ('%s %s requires a service account to be specified as the '
                 'last argument.\n%s')
      raise CommandException(
          err_msg %
          (self.command_name, self.action_subcommand, _CREATE_SYNOPSIS))

    gsutil_api = GetCloudApiInstance(self, thread_state=thread_state)

    response = gsutil_api.CreateHmacKey(self.project_id,
                                        self.service_account_email,
                                        provider='gs')

    print('%-12s %s' % ('Access ID:', response.metadata.accessId))
    print('%-12s %s' % ('Secret:', response.secret))
Пример #8
0
  def ObjectUpdateMetadataFunc(self,
                               patch_obj_metadata,
                               log_template,
                               name_expansion_result,
                               thread_state=None):
    """Updates metadata on an object using PatchObjectMetadata.

    Args:
      patch_obj_metadata: Metadata changes that should be applied to the
                          existing object.
      log_template: The log template that should be printed for each object.
      name_expansion_result: NameExpansionResult describing target object.
      thread_state: gsutil Cloud API instance to use for the operation.
    """
    gsutil_api = GetCloudApiInstance(self, thread_state=thread_state)

    exp_src_url = name_expansion_result.expanded_storage_url
    self.logger.info(log_template, exp_src_url)

    cloud_obj_metadata = encoding.JsonToMessage(
        apitools_messages.Object, name_expansion_result.expanded_result)

    preconditions = Preconditions(
        gen_match=self.preconditions.gen_match,
        meta_gen_match=self.preconditions.meta_gen_match)
    if preconditions.gen_match is None:
      preconditions.gen_match = cloud_obj_metadata.generation
    if preconditions.meta_gen_match is None:
      preconditions.meta_gen_match = cloud_obj_metadata.metageneration

    gsutil_api.PatchObjectMetadata(
        exp_src_url.bucket_name,
        exp_src_url.object_name,
        patch_obj_metadata,
        generation=exp_src_url.generation,
        preconditions=preconditions,
        provider=exp_src_url.scheme,
        fields=['id'])
    PutToQueueWithTimeout(
        gsutil_api.status_queue, MetadataMessage(message_time=time.time()))
    def RemoveFunc(self, name_expansion_result, thread_state=None):
        gsutil_api = GetCloudApiInstance(self, thread_state=thread_state)

        exp_src_url = name_expansion_result.expanded_storage_url
        self.logger.info('Removing %s...', exp_src_url)
        try:
            gsutil_api.DeleteObject(exp_src_url.bucket_name,
                                    exp_src_url.object_name,
                                    preconditions=self.preconditions,
                                    generation=exp_src_url.generation,
                                    provider=exp_src_url.scheme)
        except NotFoundException as e:
            # DeleteObject will sometimes return a 504 (DEADLINE_EXCEEDED) when
            # the operation was in fact successful. When a retry is attempted in
            # these cases, it will fail with a (harmless) 404. The 404 is harmless
            # since it really just means the file was already deleted, which is
            # what we want anyway. Here we simply downgrade the message to info
            # rather than error and correct the command-level failure total.
            self.logger.info('Cannot find %s', exp_src_url)
            DecrementFailureCount()
        _PutToQueueWithTimeout(gsutil_api.status_queue,
                               MetadataMessage(message_time=time.time()))
Пример #10
0
    def _ListHmacKeys(self, thread_state=None):
        """Lists HMAC keys for a project or service account."""
        if self.args:
            raise CommandException(
                '%s %s received unexpected arguments.\n%s' %
                (self.command_name, self.action_subcommand, _LIST_SYNOPSIS))

        gsutil_api = GetCloudApiInstance(self, thread_state=thread_state)

        response = gsutil_api.ListHmacKeys(self.project_id,
                                           self.service_account_email,
                                           self.show_all,
                                           provider='gs')

        short_list_format = '%s\t%-12s %s'
        if self.long_list:
            for item in response:
                print(_KeyMetadataOutput(item))
                print()
        else:
            for item in response:
                print(short_list_format %
                      (item.accessId, item.state, item.serviceAccountEmail))
Пример #11
0
    def _UpdateHmacKey(self, thread_state=None):
        """Update an HMAC key's state."""
        if not self.state:
            raise CommandException(
                'A state flag must be supplied for %s %s\n%s' %
                (self.command_name, self.action_subcommand, _UPDATE_SYNOPSIS))
        elif self.state not in _VALID_UPDATE_STATES:
            raise CommandException('The state flag value must be one of %s' %
                                   ', '.join(_VALID_UPDATE_STATES))
        if self.args:
            access_id = self.args[0]
        else:
            raise _AccessIdException(self.command_name, self.action_subcommand,
                                     _UPDATE_SYNOPSIS)

        gsutil_api = GetCloudApiInstance(self, thread_state=thread_state)

        response = gsutil_api.UpdateHmacKey(self.project_id,
                                            access_id,
                                            self.state,
                                            self.etag,
                                            provider='gs')

        print(_KeyMetadataOutput(response))
Пример #12
0
  def RewriteFunc(self, name_expansion_result, thread_state=None):
    gsutil_api = GetCloudApiInstance(self, thread_state=thread_state)
    transform_url = name_expansion_result.expanded_storage_url

    self.CheckProvider(transform_url)

    # Get all fields so that we can ensure that the target metadata is
    # specified correctly.
    src_metadata = gsutil_api.GetObjectMetadata(
        transform_url.bucket_name,
        transform_url.object_name,
        generation=transform_url.generation,
        provider=transform_url.scheme)

    if self.no_preserve_acl:
      # Leave ACL unchanged.
      src_metadata.acl = []
    elif not src_metadata.acl:
      raise CommandException(
          'No OWNER permission found for object %s. OWNER permission is '
          'required for rewriting objects, (otherwise their ACLs would be '
          'reset).' % transform_url)

    # Note: If other transform types are added, they must ensure that the
    # encryption key configuration matches the boto configuration, because
    # gsutil maintains an invariant that all objects it writes use the
    # encryption_key value (including decrypting if no key is present).

    # Store metadata about src encryption to make logic below easier to read.
    src_encryption_kms_key = (src_metadata.kmsKeyName
                              if src_metadata.kmsKeyName else None)

    src_encryption_sha256 = None
    if (src_metadata.customerEncryption and
        src_metadata.customerEncryption.keySha256):
      src_encryption_sha256 = src_metadata.customerEncryption.keySha256
      # In python3, hashes are bytes, use ascii since it should be ascii
      src_encryption_sha256 = src_encryption_sha256.encode('ascii')

    src_was_encrypted = (src_encryption_sha256 is not None or
                         src_encryption_kms_key is not None)

    # Also store metadata about dest encryption.
    dest_encryption_kms_key = None
    if (self.boto_file_encryption_keywrapper is not None and
        self.boto_file_encryption_keywrapper.crypto_type == CryptoKeyType.CMEK):
      dest_encryption_kms_key = self.boto_file_encryption_keywrapper.crypto_key

    dest_encryption_sha256 = None
    if (self.boto_file_encryption_keywrapper is not None and
        self.boto_file_encryption_keywrapper.crypto_type == CryptoKeyType.CSEK):
      dest_encryption_sha256 = (
          self.boto_file_encryption_keywrapper.crypto_key_sha256)

    should_encrypt_dest = self.boto_file_encryption_keywrapper is not None

    encryption_unchanged = (src_encryption_sha256 == dest_encryption_sha256 and
                            src_encryption_kms_key == dest_encryption_kms_key)

    # Prevent accidental key rotation.
    if (_TransformTypes.CRYPTO_KEY not in self.transform_types and
        not encryption_unchanged):
      raise EncryptionException(
          'The "-k" flag was not passed to the rewrite command, but the '
          'encryption_key value in your boto config file did not match the key '
          'used to encrypt the object "%s" (hash: %s). To encrypt the object '
          'using a different key, you must specify the "-k" flag.' %
          (transform_url, src_encryption_sha256))

    # Determine if we can skip this rewrite operation (this should only be done
    # when ALL of the specified transformations are redundant).
    redundant_transforms = []

    # STORAGE_CLASS transform is redundant if the target storage class matches
    # the existing storage class.
    if (_TransformTypes.STORAGE_CLASS in self.transform_types and
        self.dest_storage_class == NormalizeStorageClass(
            src_metadata.storageClass)):
      redundant_transforms.append('storage class')

    # CRYPTO_KEY transform is redundant if we're using the same encryption
    # key that was used to encrypt the source. However, if no encryption key was
    # specified, we should still perform the rewrite. This results in the
    # rewritten object either being encrypted with its bucket's default KMS key
    # or having no CSEK/CMEK encryption applied. While we could attempt fetching
    # the bucket's metadata and checking its default KMS key before performing
    # the rewrite (in the case where we appear to be transitioning from
    # no key to no key), that is vulnerable to the race condition where the
    # default KMS key is changed between when we check it and when we rewrite
    # the object.
    if (_TransformTypes.CRYPTO_KEY in self.transform_types and
        should_encrypt_dest and encryption_unchanged):
      redundant_transforms.append('encryption key')

    if len(redundant_transforms) == len(self.transform_types):
      self.logger.info('Skipping %s, all transformations were redundant: %s' %
                       (transform_url, redundant_transforms))
      return

    # First make a deep copy of the source metadata, then overwrite any
    # requested attributes (e.g. if a storage class change was specified).
    dest_metadata = encoding.PyValueToMessage(
        apitools_messages.Object, encoding.MessageToPyValue(src_metadata))

    # Remove some unnecessary/invalid fields.
    dest_metadata.generation = None
    # Service has problems if we supply an ID, but it is responsible for
    # generating one, so it is not necessary to include it here.
    dest_metadata.id = None
    # Ensure we don't copy over the KMS key name or CSEK key info from the
    # source object; those should only come from the boto config's
    # encryption_key value.
    dest_metadata.customerEncryption = None
    dest_metadata.kmsKeyName = None

    # Both a storage class change and CMEK encryption should be set as part of
    # the dest object's metadata. CSEK encryption, if specified, is added to the
    # request later via headers obtained from the keywrapper value passed to
    # encryption_tuple.
    if _TransformTypes.STORAGE_CLASS in self.transform_types:
      dest_metadata.storageClass = self.dest_storage_class
    if dest_encryption_kms_key is not None:
      dest_metadata.kmsKeyName = dest_encryption_kms_key

    # Make sure we have the CSEK key necessary to decrypt.
    decryption_keywrapper = None
    if src_encryption_sha256 is not None:
      if src_encryption_sha256 in self.csek_hash_to_keywrapper:
        decryption_keywrapper = (
            self.csek_hash_to_keywrapper[src_encryption_sha256])
      else:
        raise EncryptionException(
            'Missing decryption key with SHA256 hash %s. No decryption key '
            'matches object %s' % (src_encryption_sha256, transform_url))

    operation_name = 'Rewriting'
    if _TransformTypes.CRYPTO_KEY in self.transform_types:
      if src_was_encrypted and should_encrypt_dest:
        if not encryption_unchanged:
          operation_name = 'Rotating'
        # Else, keep "Rewriting". This might occur when -k was specified and was
        # redundant, but we're performing the operation anyway because some
        # other transformation was not redundant.
      elif src_was_encrypted and not should_encrypt_dest:
        operation_name = 'Decrypting'
      elif not src_was_encrypted and should_encrypt_dest:
        operation_name = 'Encrypting'

    # TODO: Remove this call (used to verify tests) and make it processed by
    # the UIThread.
    sys.stderr.write(
        _ConstructAnnounceText(operation_name, transform_url.url_string))
    sys.stderr.flush()

    # Message indicating beginning of operation.
    gsutil_api.status_queue.put(
        FileMessage(transform_url,
                    None,
                    time.time(),
                    finished=False,
                    size=src_metadata.size,
                    message_type=FileMessage.FILE_REWRITE))

    progress_callback = FileProgressCallbackHandler(
        gsutil_api.status_queue,
        src_url=transform_url,
        operation_name=operation_name).call

    gsutil_api.CopyObject(src_metadata,
                          dest_metadata,
                          src_generation=transform_url.generation,
                          preconditions=self.preconditions,
                          progress_callback=progress_callback,
                          decryption_tuple=decryption_keywrapper,
                          encryption_tuple=self.boto_file_encryption_keywrapper,
                          provider=transform_url.scheme,
                          fields=[])

    # Message indicating end of operation.
    gsutil_api.status_queue.put(
        FileMessage(transform_url,
                    None,
                    time.time(),
                    finished=True,
                    size=src_metadata.size,
                    message_type=FileMessage.FILE_REWRITE))