def _UploadObject(self, upload_stream, object_metadata, canned_acl=None,
                    size=None, preconditions=None, provider=None, fields=None,
                    serialization_data=None, tracker_callback=None,
                    progress_callback=None, apitools_strategy='simple'):
    """Upload implementation, apitools_strategy plus gsutil Cloud API args."""
    ValidateDstObjectMetadata(object_metadata)
    assert not canned_acl, 'Canned ACLs not supported by JSON API.'

    bytes_uploaded_container = BytesUploadedContainer()

    callback_per_bytes = CALLBACK_PER_X_BYTES
    total_size = 0
    if progress_callback and size:
      total_size = size
      progress_callback(0, size)

    callback_class_factory = UploadCallbackConnectionClassFactory(
        bytes_uploaded_container, total_size=total_size,
        callback_per_bytes=callback_per_bytes,
        progress_callback=progress_callback)

    upload_http = GetNewHttp()
    upload_http_class = callback_class_factory.GetConnectionClass()
    upload_http.connections = {'http': upload_http_class,
                               'https': upload_http_class}

    # Disable apitools' default print callbacks.
    def _NoopCallback(unused_response, unused_upload_object):
      pass

    authorized_upload_http = self.credentials.authorize(upload_http)
    WrapUploadHttpRequest(authorized_upload_http)
    # Since bytes_http is created in this function, we don't get the
    # user-agent header from api_client's http automatically.
    additional_headers = {
        'user-agent': self.api_client.user_agent
    }

    try:
      if not serialization_data:
        # This is a new upload, set up initial upload state.
        content_type = object_metadata.contentType
        if not content_type:
          content_type = DEFAULT_CONTENT_TYPE

        if not preconditions:
          preconditions = Preconditions()

        apitools_request = apitools_messages.StorageObjectsInsertRequest(
            bucket=object_metadata.bucket, object=object_metadata,
            ifGenerationMatch=preconditions.gen_match,
            ifMetagenerationMatch=preconditions.meta_gen_match)

        global_params = apitools_messages.StandardQueryParameters()
        if fields:
          global_params.fields = ','.join(set(fields))

      if apitools_strategy == 'simple':  # One-shot upload.
        apitools_upload = apitools_transfer.Upload(
            upload_stream, content_type, total_size=size, auto_transfer=True)
        apitools_upload.strategy = apitools_strategy
        apitools_upload.bytes_http = authorized_upload_http

        return self.api_client.objects.Insert(
            apitools_request,
            upload=apitools_upload,
            global_params=global_params)
      else:  # Resumable upload.
        try:
          if serialization_data:
            # Resuming an existing upload.
            apitools_upload = apitools_transfer.Upload.FromData(
                upload_stream, serialization_data, self.api_client.http)
            apitools_upload.chunksize = _ResumableChunkSize()
            apitools_upload.bytes_http = authorized_upload_http
          else:
            # New resumable upload.
            apitools_upload = apitools_transfer.Upload(
                upload_stream, content_type, total_size=size,
                chunksize=_ResumableChunkSize(), auto_transfer=False)
            apitools_upload.strategy = apitools_strategy
            apitools_upload.bytes_http = authorized_upload_http
            self.api_client.objects.Insert(
                apitools_request,
                upload=apitools_upload,
                global_params=global_params)

          # If we're resuming an upload, apitools has at this point received
          # from the server how many bytes it already has. Update our
          # callback class with this information.
          bytes_uploaded_container.bytes_uploaded = apitools_upload.progress
          if tracker_callback:
            tracker_callback(json.dumps(apitools_upload.serialization_data))

          http_response = apitools_upload.StreamInChunks(
              callback=_NoopCallback, finish_callback=_NoopCallback,
              additional_headers=additional_headers)
          return self.api_client.objects.ProcessHttpResponse(
              self.api_client.objects.GetMethodConfig('Insert'), http_response)
        except TRANSLATABLE_APITOOLS_EXCEPTIONS, e:
          resumable_ex = self._TranslateApitoolsResumableUploadException(e)
          if resumable_ex:
            raise resumable_ex
          else:
            raise
    except TRANSLATABLE_APITOOLS_EXCEPTIONS, e:
      self._TranslateExceptionAndRaise(e, bucket_name=object_metadata.bucket,
                                       object_name=object_metadata.name)
Exemple #2
0
  def _UploadObject(self, upload_stream, object_metadata, canned_acl=None,
                    size=None, preconditions=None, provider=None, fields=None,
                    serialization_data=None, tracker_callback=None,
                    progress_callback=None, apitools_strategy='simple'):
    """Upload implementation, apitools_strategy plus gsutil Cloud API args."""
    ValidateDstObjectMetadata(object_metadata)
    assert not canned_acl, 'Canned ACLs not supported by JSON API.'

    bytes_uploaded_container = BytesTransferredContainer()

    total_size = 0
    if progress_callback and size:
      total_size = size
      progress_callback(0, size)

    callback_class_factory = UploadCallbackConnectionClassFactory(
        bytes_uploaded_container, total_size=total_size,
        progress_callback=progress_callback)

    upload_http = GetNewHttp()
    upload_http_class = callback_class_factory.GetConnectionClass()
    upload_http.connections = {'http': upload_http_class,
                               'https': upload_http_class}

    # Disable apitools' default print callbacks.
    def _NoopCallback(unused_response, unused_upload_object):
      pass

    authorized_upload_http = self.credentials.authorize(upload_http)
    WrapUploadHttpRequest(authorized_upload_http)
    # Since bytes_http is created in this function, we don't get the
    # user-agent header from api_client's http automatically.
    additional_headers = {
        'user-agent': self.api_client.user_agent
    }

    try:
      content_type = None
      apitools_request = None
      global_params = None
      if not serialization_data:
        # This is a new upload, set up initial upload state.
        content_type = object_metadata.contentType
        if not content_type:
          content_type = DEFAULT_CONTENT_TYPE

        if not preconditions:
          preconditions = Preconditions()

        apitools_request = apitools_messages.StorageObjectsInsertRequest(
            bucket=object_metadata.bucket, object=object_metadata,
            ifGenerationMatch=preconditions.gen_match,
            ifMetagenerationMatch=preconditions.meta_gen_match)

        global_params = apitools_messages.StandardQueryParameters()
        if fields:
          global_params.fields = ','.join(set(fields))

      if apitools_strategy == 'simple':  # One-shot upload.
        apitools_upload = apitools_transfer.Upload(
            upload_stream, content_type, total_size=size, auto_transfer=True)
        apitools_upload.strategy = apitools_strategy
        apitools_upload.bytes_http = authorized_upload_http

        return self.api_client.objects.Insert(
            apitools_request,
            upload=apitools_upload,
            global_params=global_params)
      else:  # Resumable upload.
        return self._PerformResumableUpload(
            upload_stream, authorized_upload_http, content_type, size,
            serialization_data, apitools_strategy, apitools_request,
            global_params, bytes_uploaded_container, tracker_callback,
            _NoopCallback, additional_headers)
    except TRANSLATABLE_APITOOLS_EXCEPTIONS, e:
      self._TranslateExceptionAndRaise(e, bucket_name=object_metadata.bucket,
                                       object_name=object_metadata.name)