Exemplo n.º 1
0
    def upload_object(self,
                      source_stream,
                      destination_resource,
                      progress_callback=None,
                      request_config=None,
                      serialization_data=None,
                      tracker_callback=None,
                      upload_strategy=cloud_api.UploadStrategy.SIMPLE):
        """See CloudApi class for function doc strings."""
        del progress_callback  # Unused.
        if self._upload_http_client is None:
            self._upload_http_client = transports.GetApitoolsTransport()

        validated_request_config = cloud_api.get_provider_request_config(
            request_config, GcsRequestConfig)

        if upload_strategy == cloud_api.UploadStrategy.SIMPLE:
            upload = gcs_upload.SimpleUpload(self, self._upload_http_client,
                                             source_stream,
                                             DEFAULT_CONTENT_TYPE,
                                             destination_resource,
                                             validated_request_config)
        elif upload_strategy == cloud_api.UploadStrategy.RESUMABLE:
            upload = gcs_upload.ResumableUpload(
                self, self._upload_http_client, source_stream,
                DEFAULT_CONTENT_TYPE, destination_resource,
                validated_request_config, serialization_data, tracker_callback)
        else:
            raise command_errors.Error('Invalid upload strategy: {}.'.format(
                upload_strategy.value))

        return gcs_metadata_util.get_object_resource_from_metadata(
            upload.run())
Exemplo n.º 2
0
def GenerateIdToken(service_account_id, audience, include_email=False):
    """Generates an id token for the given service account."""
    service_account_ref = resources.REGISTRY.Parse(
        service_account_id,
        collection='iamcredentials.serviceAccounts',
        params={
            'projectsId': '-',
            'serviceAccountsId': service_account_id
        })

    http_client = transports.GetApitoolsTransport(
        enable_resource_quota=False,
        response_encoding=transport.ENCODING,
        allow_account_impersonation=False)
    # pylint: disable=protected-access
    iam_client = apis_internal._GetClientInstance('iamcredentials',
                                                  'v1',
                                                  http_client=http_client)

    response = iam_client.projects_serviceAccounts.GenerateIdToken(
        iam_client.MESSAGES_MODULE.
        IamcredentialsProjectsServiceAccountsGenerateIdTokenRequest(
            name=service_account_ref.RelativeName(),
            generateIdTokenRequest=iam_client.MESSAGES_MODULE.
            GenerateIdTokenRequest(audience=audience,
                                   includeEmail=include_email)))
    return response.token
Exemplo n.º 3
0
    def ReadObject(self, object_ref):
        """Read a file from the given Cloud Storage bucket.

    Args:
      object_ref: storage_util.ObjectReference, The object to read from.

    Raises:
      BadFileException if the file read is not successful.

    Returns:
      file-like object containing the data read.
    """
        data = io.BytesIO()
        chunksize = self._GetChunkSize()
        download = transfer.Download.FromStream(data, chunksize=chunksize)
        download.bytes_http = transports.GetApitoolsTransport(
            response_encoding=None)
        get_req = self.messages.StorageObjectsGetRequest(
            bucket=object_ref.bucket, object=object_ref.object)

        log.info('Reading [%s]', object_ref)
        try:
            self.client.objects.Get(get_req, download=download)
        except api_exceptions.HttpError as err:
            raise exceptions.BadFileException(
                'Could not read [{object_}]. Please retry: {err}'.format(
                    object_=object_ref, err=http_exc.HttpException(err)))

        data.seek(0)
        return data
Exemplo n.º 4
0
 def __init__(self):
     self.client = core_apis.GetClientInstance('storage', 'v1')
     self.messages = core_apis.GetMessagesModule('storage', 'v1')
     self._stream_response_handler = _StorageStreamResponseHandler()
     self._download_http_client = transports.GetApitoolsTransport(
         response_encoding=None,
         response_handler=self._stream_response_handler)
Exemplo n.º 5
0
 def HttpClient(self):
     # Import http only when needed, as it depends on credential infrastructure
     # which is not needed in all cases.
     assert self.active
     from googlecloudsdk.core.credentials import transports  # pylint: disable=g-import-not-at-top
     http_client = transports.GetApitoolsTransport(
         response_encoding=transport.ENCODING, ca_certs=self.ca_certs)
     return http_client
Exemplo n.º 6
0
    def upload_object(self,
                      source_stream,
                      destination_resource,
                      request_config,
                      source_resource=None,
                      serialization_data=None,
                      tracker_callback=None,
                      upload_strategy=cloud_api.UploadStrategy.SIMPLE):
        """See CloudApi class for function doc strings."""
        if self._upload_http_client is None:
            self._upload_http_client = transports.GetApitoolsTransport(
                redact_request_body_reason=
                ('Object data is not displayed to keep the log output clean.'
                 ' Set log_http_show_request_body property to True to print the'
                 ' body of this request.'))

        if source_resource:
            source_path = source_resource.storage_url.versionless_url_string
        else:
            source_path = None
        should_gzip_in_flight = gzip_util.should_gzip_in_flight(
            request_config.gzip_settings, source_path)
        if should_gzip_in_flight:
            log.info('Using compressed transport encoding for {}.'.format(
                source_path))
        if upload_strategy == cloud_api.UploadStrategy.SIMPLE:
            upload = gcs_upload.SimpleUpload(self, self._upload_http_client,
                                             source_stream,
                                             destination_resource,
                                             should_gzip_in_flight,
                                             request_config, source_resource)
        elif upload_strategy == cloud_api.UploadStrategy.RESUMABLE:
            upload = gcs_upload.ResumableUpload(
                self, self._upload_http_client, source_stream,
                destination_resource, should_gzip_in_flight, request_config,
                source_resource, serialization_data, tracker_callback)
        else:
            raise command_errors.Error('Invalid upload strategy: {}.'.format(
                upload_strategy.value))

        encryption_key = getattr(request_config.resource_args,
                                 'encryption_key', None)
        try:
            with self._encryption_headers_context(encryption_key):
                metadata = upload.run()
        except (
                apitools_exceptions.StreamExhausted,
                apitools_exceptions.TransferError,
        ) as error:
            raise cloud_errors.ResumableUploadAbortError(
                '{}\n This likely occurred because the file being uploaded changed '
                'size between resumable upload attempts. If this error persists, try '
                'deleting the tracker files present in {}'.format(
                    str(error),
                    properties.VALUES.storage.tracker_files_directory.Get()))

        return gcs_metadata_util.get_object_resource_from_metadata(metadata)
Exemplo n.º 7
0
  def download_object(self,
                      bucket_name,
                      object_name,
                      download_stream,
                      compressed_encoding=False,
                      decryption_wrapper=None,
                      digesters=None,
                      download_strategy=cloud_api.DownloadStrategy.ONE_SHOT,
                      generation=None,
                      object_size=None,
                      progress_callback=None,
                      serialization_data=None,
                      start_byte=0,
                      end_byte=None):
    """See super class."""
    # S3 requires a string, but GCS uses an int for generation.
    if generation:
      generation = int(generation)

    if not serialization_data:
      # New download.
      apitools_download = apitools_transfer.Download.FromStream(
          download_stream,
          auto_transfer=False,
          total_size=object_size,
          num_retries=DEFAULT_NUM_RETRIES)
      apitools_download.bytes_http = transports.GetApitoolsTransport(
          response_encoding=None)
    else:
      # TODO(b/161437901): Handle resumed download.
      pass

    # TODO(b/161460749) Handle download retries.
    request = self.messages.StorageObjectsGetRequest(
        bucket=bucket_name,
        object=object_name,
        generation=generation)

    if download_strategy == cloud_api.DownloadStrategy.ONE_SHOT:
      return self._download_object(
          bucket_name,
          object_name,
          download_stream,
          apitools_download,
          request,
          compressed_encoding=compressed_encoding,
          decryption_wrapper=decryption_wrapper,
          generation=generation,
          serialization_data=serialization_data,
          start_byte=start_byte,
          end_byte=end_byte)
    else:
      # TODO(b/161437901): Handle resumable download.
      pass
Exemplo n.º 8
0
 def HttpClient(self):
     assert self.active
     if not self.client_key and self.client_cert and self.client_cert_domain:
         raise ValueError(
             'Kubeconfig authentication requires a client certificate '
             'authentication method.')
     if self.client_cert_domain:
         # Import http only when needed, as it depends on credential infrastructure
         # which is not needed in all cases.
         from googlecloudsdk.core import transports  # pylint: disable=g-import-not-at-top
         http_client = transports.GetApitoolsTransport(
             response_encoding=transport.ENCODING,
             ca_certs=self.ca_certs,
             client_certificate=self.client_cert,
             client_key=self.client_key,
             client_cert_domain=self.client_cert_domain)
         return http_client
     from googlecloudsdk.core.credentials import transports  # pylint: disable=g-import-not-at-top
     http_client = transports.GetApitoolsTransport(
         response_encoding=transport.ENCODING, ca_certs=self.ca_certs)
     return http_client
Exemplo n.º 9
0
    def CopyFileFromGCS(self, source_obj_ref, local_path, overwrite=False):
        """Download a file from the given Cloud Storage bucket.

    Args:
      source_obj_ref: storage_util.ObjectReference, the path of the file on GCS
        to download.
      local_path: str, the path of the file to download to. Path must be on the
        local filesystem.
      overwrite: bool, whether or not to overwrite local_path if it already
        exists.

    Raises:
      BadFileException if the file download is not successful.
    """
        chunksize = self._GetChunkSize()
        download = transfer.Download.FromFile(local_path,
                                              chunksize=chunksize,
                                              overwrite=overwrite)
        download.bytes_http = transports.GetApitoolsTransport(
            response_encoding=None)
        get_req = self.messages.StorageObjectsGetRequest(
            bucket=source_obj_ref.bucket, object=source_obj_ref.object)

        gsc_path = '{bucket}/{object_path}'.format(
            bucket=source_obj_ref.bucket,
            object_path=source_obj_ref.object,
        )

        log.info('Downloading [{gcs}] to [{local_file}]'.format(
            local_file=local_path, gcs=gsc_path))
        try:
            self.client.objects.Get(get_req, download=download)
            # When there's a download, Get() returns None so we Get() again to check
            # the file size.
            response = self.client.objects.Get(get_req)
        except api_exceptions.HttpError as err:
            raise exceptions.BadFileException(
                'Could not copy [{gcs}] to [{local_file}]. Please retry: {err}'
                .format(local_file=local_path,
                        gcs=gsc_path,
                        err=http_exc.HttpException(err)))
        finally:
            # Close the stream to release the file handle so we can check its contents
            download.stream.close()

        file_size = _GetFileSize(local_path)
        if response.size != file_size:
            log.debug('Download size: {0} bytes, but expected size is {1} '
                      'bytes.'.format(file_size, response.size))
            raise exceptions.BadFileException(
                'Cloud Storage download failure. Downloaded file [{0}] does not '
                'match Cloud Storage object. Please retry.'.format(local_path))
Exemplo n.º 10
0
def _GetClientInstance(version='v1'):
  """Get a client instance for service usage."""
  # pylint:disable=protected-access
  # Specifically disable resource quota in all cases for service management.
  # We need to use this API to turn on APIs and sometimes the user doesn't have
  # this API turned on. We should always use the shared project to do this
  # so we can bootstrap users getting the appropriate APIs enabled. If the user
  # has explicitly set the quota project, then respect that.
  enable_resource_quota = (
      properties.VALUES.billing.quota_project.IsExplicitlySet())
  http_client = transports.GetApitoolsTransport(
      response_encoding=transport.ENCODING,
      enable_resource_quota=enable_resource_quota)
  return apis_internal._GetClientInstance(
      'serviceusage', version, http_client=http_client)
Exemplo n.º 11
0
def _GetClientInstance(api_name,
                       api_version,
                       no_http=False,
                       http_client=None,
                       check_response_func=None):
    """Returns an instance of the API client specified in the args.

  Args:
    api_name: str, The API name (or the command surface name, if different).
    api_version: str, The version of the API.
    no_http: bool, True to not create an http object for this client.
    http_client: bring your own http client to use.
      Incompatible with no_http=True.
    check_response_func: error handling callback to give to apitools.

  Returns:
    base_api.BaseApiClient, An instance of the specified API client.
  """

    # pylint: disable=g-import-not-at-top
    if no_http:
        assert http_client is None
    elif http_client is None:
        # Normal gcloud authentication
        # Import http only when needed, as it depends on credential infrastructure
        # which is not needed in all cases.
        from googlecloudsdk.core.credentials import transports
        http_client = transports.GetApitoolsTransport(
            response_encoding=transport.ENCODING)

    client_class = _GetClientClass(api_name, api_version)
    client_instance = client_class(url=_GetEffectiveApiEndpoint(
        api_name, api_version, client_class),
                                   get_credentials=False,
                                   http=http_client)
    if check_response_func is not None:
        client_instance.check_response_func = check_response_func
    api_key = properties.VALUES.core.api_key.Get()
    if api_key:
        client_instance.AddGlobalParam('key', api_key)
        header = 'X-Google-Project-Override'
        client_instance.additional_http_headers[header] = 'apikey'
    return client_instance
Exemplo n.º 12
0
def GenerateAccessToken(service_account_id, scopes):
    """Generates an access token for the given service account."""
    service_account_ref = resources.REGISTRY.Parse(
        service_account_id,
        collection='iamcredentials.serviceAccounts',
        params={
            'projectsId': '-',
            'serviceAccountsId': service_account_id
        })

    http_client = transports.GetApitoolsTransport(
        enable_resource_quota=False,
        response_encoding=transport.ENCODING,
        allow_account_impersonation=False)
    # pylint: disable=protected-access
    iam_client = apis_internal._GetClientInstance('iamcredentials',
                                                  'v1',
                                                  http_client=http_client)

    try:
        response = iam_client.projects_serviceAccounts.GenerateAccessToken(
            iam_client.MESSAGES_MODULE.
            IamcredentialsProjectsServiceAccountsGenerateAccessTokenRequest(
                name=service_account_ref.RelativeName(),
                generateAccessTokenRequest=iam_client.MESSAGES_MODULE.
                GenerateAccessTokenRequest(scope=scopes)))
        return response
    except apitools_exceptions.HttpForbiddenError as e:
        raise exceptions.HttpException(
            e,
            error_format='Error {code} (Forbidden) - failed to impersonate '
            '[{service_acc}]. Make sure the account that\'s trying '
            'to impersonate it has access to the service account '
            'itself and the "roles/iam.serviceAccountTokenCreator" '
            'role.'.format(code=e.status_code, service_acc=service_account_id))
    except apitools_exceptions.HttpError as e:
        raise exceptions.HttpException(e)
Exemplo n.º 13
0
    def download_object(self,
                        cloud_resource,
                        download_stream,
                        compressed_encoding=False,
                        decryption_wrapper=None,
                        digesters=None,
                        download_strategy=cloud_api.DownloadStrategy.RESUMABLE,
                        progress_callback=None,
                        start_byte=0,
                        end_byte=None):
        """See super class."""
        # S3 requires a string, but GCS uses an int for generation.
        generation = (int(cloud_resource.generation)
                      if cloud_resource.generation else None)

        if start_byte and download_strategy == cloud_api.DownloadStrategy.RESUMABLE:
            # Resuming download.
            serialization_data = get_download_serialization_data(
                cloud_resource, start_byte)
            apitools_download = apitools_transfer.Download.FromData(
                download_stream,
                serialization_data,
                num_retries=properties.VALUES.storage.max_retries.GetInt(),
                client=self.client)
        else:
            # New download.
            serialization_data = None
            apitools_download = apitools_transfer.Download.FromStream(
                download_stream,
                auto_transfer=False,
                total_size=cloud_resource.size,
                num_retries=properties.VALUES.storage.max_retries.GetInt())

        self._stream_response_handler.update_destination_info(
            stream=download_stream,
            digesters=digesters,
            processed_bytes=start_byte,
            progress_callback=progress_callback)

        if self._download_http_client is None:
            self._download_http_client = transports.GetApitoolsTransport(
                response_encoding=None,
                response_handler=self._stream_response_handler)
        apitools_download.bytes_http = self._download_http_client

        # TODO(b/161460749) Handle download retries.
        request = self.messages.StorageObjectsGetRequest(
            bucket=cloud_resource.bucket,
            object=cloud_resource.name,
            generation=generation)

        if download_strategy == cloud_api.DownloadStrategy.ONE_SHOT:
            return self._download_object(
                cloud_resource,
                download_stream,
                apitools_download,
                request,
                compressed_encoding=compressed_encoding,
                decryption_wrapper=decryption_wrapper,
                generation=generation,
                serialization_data=serialization_data,
                start_byte=start_byte,
                end_byte=end_byte)
        else:
            return self._download_object_resumable(
                cloud_resource,
                download_stream,
                apitools_download,
                request,
                compressed_encoding=compressed_encoding,
                decryption_wrapper=decryption_wrapper,
                generation=generation,
                serialization_data=serialization_data,
                start_byte=start_byte,
                end_byte=end_byte)
Exemplo n.º 14
0
    def download_object(self,
                        cloud_resource,
                        download_stream,
                        request_config,
                        digesters=None,
                        do_not_decompress=False,
                        download_strategy=cloud_api.DownloadStrategy.RESUMABLE,
                        progress_callback=None,
                        start_byte=0,
                        end_byte=None):
        """See super class."""
        if request_config.system_posix_data:
            if cloud_resource.metadata and cloud_resource.metadata.metadata:
                custom_metadata_dict = encoding_helper.MessageToDict(
                    cloud_resource.metadata.metadata)
            else:
                custom_metadata_dict = {}

            posix_attributes_to_set = (
                posix_util.get_posix_attributes_from_custom_metadata_dict(
                    cloud_resource.storage_url.url_string,
                    custom_metadata_dict))
            if not posix_util.are_file_permissions_valid(
                    cloud_resource.storage_url.url_string,
                    request_config.system_posix_data, posix_attributes_to_set):
                raise posix_util.SETTING_INVALID_POSIX_ERROR
        else:
            posix_attributes_to_set = None

        if download_util.return_and_report_if_nothing_to_download(
                cloud_resource, progress_callback):
            return cloud_api.DownloadApiClientReturnValue(
                posix_attributes=posix_attributes_to_set,
                server_reported_encoding=None)

        serialization_data = get_download_serialization_data(
            cloud_resource, start_byte)
        apitools_download = apitools_transfer.Download.FromData(
            download_stream,
            serialization_data,
            num_retries=properties.VALUES.storage.max_retries.GetInt(),
            client=self.client)

        self._stream_response_handler.update_destination_info(
            stream=download_stream,
            digesters=digesters,
            processed_bytes=start_byte,
            progress_callback=progress_callback)

        if self._download_http_client is None:
            self._download_http_client = transports.GetApitoolsTransport(
                response_encoding=None,
                response_handler=self._stream_response_handler)
        apitools_download.bytes_http = self._download_http_client

        additional_headers = {}
        if do_not_decompress:
            # TODO(b/161453101): Optimize handling of gzip-encoded downloads.
            additional_headers['accept-encoding'] = 'gzip'

        decryption_key = getattr(request_config.resource_args,
                                 'decryption_key', None)
        additional_headers.update(_get_encryption_headers(decryption_key))

        if download_strategy == cloud_api.DownloadStrategy.ONE_SHOT:
            server_reported_encoding = gcs_download.launch(
                apitools_download,
                start_byte=start_byte,
                end_byte=end_byte,
                additional_headers=additional_headers)
        else:
            server_reported_encoding = gcs_download.launch_retriable(
                download_stream,
                apitools_download,
                start_byte=start_byte,
                end_byte=end_byte,
                additional_headers=additional_headers)

        return cloud_api.DownloadApiClientReturnValue(
            posix_attributes=posix_attributes_to_set,
            server_reported_encoding=server_reported_encoding)