def _create_upload(self, client, file_obj=None, size=None, content_type=None, chunk_size=None, strategy=None, extra_headers=None): """Helper for upload methods. Creates a :class:`google.cloud.core.streaming.Upload` object to handle the details of uploading a file to Cloud Storage. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the blob's bucket. :type file_obj: file :param file_obj: A file handle open for reading. :type size: int :param size: The size of the upload, in bytes. :type content_type: str :param content_type: Optional type of content being uploaded. :type chunk_size: int :param chunk_size: The size of each chunk when doing resumable and media uploads. :type strategy: str :param strategy: Either :attr:`google.cloud.core.streaming.transfer.SIMPLE_UPLOAD` or :attr:`google.cloud.core.streaming.transfer.RESUMABLE_UPLOAD`. :type extra_headers: dict :param extra_headers: Additional headers to be sent with the upload initiation request. :rtype: Tuple[google.cloud.core.streaming.Upload, google.cloud.core.streaming.Request, google.cloud.core.streaming.Response] :returns: The Upload object, the upload HTTP request, and the upload initiation response. """ client = self._require_client(client) # Use ``_base_connection`` rather ``_connection`` since the current # connection may be a batch. A batch wraps a client's connection, # but does not store the ``http`` object. The rest (API_BASE_URL and # build_api_url) are also defined on the Batch class, but we just # use the wrapped connection since it has all three (http, # API_BASE_URL and build_api_url). connection = client._base_connection content_type = (content_type or self._properties.get('contentType') or 'application/octet-stream') headers = { 'Accept': 'application/json', 'Accept-Encoding': 'gzip, deflate', 'User-Agent': connection.USER_AGENT, } if extra_headers: headers.update(extra_headers) headers.update(_get_encryption_headers(self._encryption_key)) # Use apitools' Upload functionality upload = Upload(file_obj, content_type, total_size=size, auto_transfer=False) if chunk_size is not None: upload.chunksize = chunk_size if strategy is not None: upload.strategy = RESUMABLE_UPLOAD url_builder = _UrlBuilder(bucket_name=self.bucket.name, object_name=self.name) upload_config = _UploadConfig() # Temporary URL until strategy is determined. base_url = connection.API_BASE_URL + '/upload' upload_url = connection.build_api_url(api_base_url=base_url, path=self.bucket.path + '/o') # Configure the upload request parameters. request = Request(upload_url, 'POST', headers) if self._properties: headers['content-type'] = 'application/json' request.body = json.dumps(self._properties) upload.configure_request(upload_config, request, url_builder) # Configure final URL query_params = url_builder.query_params base_url = connection.API_BASE_URL + '/upload' request.url = connection.build_api_url(api_base_url=base_url, path=self.bucket.path + '/o', query_params=query_params) # Start the upload session response = upload.initialize_upload(request, connection.http) return upload, request, response