def test_multipart_upload_with_bad_checksum(authorized_transport, checksum, bucket): with open(ICO_FILE, u"rb") as file_obj: actual_contents = file_obj.read() blob_name = os.path.basename(ICO_FILE) check_does_not_exist(authorized_transport, blob_name) # Create the actual upload object. upload_url = utils.MULTIPART_UPLOAD upload = resumable_requests.MultipartUpload(upload_url, checksum=checksum) # Transmit the resource. metadata = {u"name": blob_name, u"metadata": {u"color": u"yellow"}} fake_checksum_object = _helpers._get_checksum_object(checksum) fake_checksum_object.update(b"bad data") fake_prepared_checksum_digest = _helpers.prepare_checksum_digest( fake_checksum_object.digest()) with mock.patch.object(_helpers, "prepare_checksum_digest", return_value=fake_prepared_checksum_digest): with pytest.raises(common.InvalidResponse) as exc_info: response = upload.transmit(authorized_transport, actual_contents, metadata, ICO_CONTENT_TYPE) response = exc_info.value.response message = response.json()["error"]["message"] # Attempt to verify that this is a checksum mismatch error. assert checksum.upper() in message assert fake_prepared_checksum_digest in message # Make sure the upload is tombstoned. check_tombstoned(upload, authorized_transport, actual_contents, metadata, ICO_CONTENT_TYPE)
async def test_resumable_upload_with_bad_checksum(authorized_transport, img_stream, bucket, cleanup, checksum): fake_checksum_object = _helpers._get_checksum_object(checksum) fake_checksum_object.update(b"bad data") fake_prepared_checksum_digest = _helpers.prepare_checksum_digest( fake_checksum_object.digest()) with mock.patch.object(_helpers, "prepare_checksum_digest", return_value=fake_prepared_checksum_digest): with pytest.raises(common.DataCorruption) as exc_info: await _resumable_upload_helper(authorized_transport, img_stream, cleanup, checksum=checksum) expected_checksums = { "md5": "1bsd83IYNug8hd+V1ING3Q==", "crc32c": "YQGPxA==" } expected_message = (_async_resumable_media._upload. _UPLOAD_CHECKSUM_MISMATCH_MESSAGE.format( checksum.upper(), fake_prepared_checksum_digest, expected_checksums[checksum], )) assert exc_info.value.args[0] == expected_message
def test__get_checksum_object(checksum): checksum_object = _helpers._get_checksum_object(checksum) checksum_types = { "md5": type(hashlib.md5()), "crc32c": type(_helpers._get_crc32c_object()), None: type(None), } assert isinstance(checksum_object, checksum_types[checksum])
def _prepare_request(self, data, metadata, content_type): """Prepare the contents of an HTTP request. This is everything that must be done before a request that doesn't require network I/O (or other I/O). This is based on the `sans-I/O`_ philosophy. .. note: This method will be used only once, so ``headers`` will be mutated by having a new key added to it. Args: data (bytes): The resource content to be uploaded. metadata (Mapping[str, str]): The resource metadata, such as an ACL list. content_type (str): The content type of the resource, e.g. a JPEG image has content type ``image/jpeg``. Returns: Tuple[str, str, bytes, Mapping[str, str]]: The quadruple * HTTP verb for the request (always POST) * the URL for the request * the body of the request * headers for the request Raises: ValueError: If the current upload has already finished. TypeError: If ``data`` isn't bytes. .. _sans-I/O: https://sans-io.readthedocs.io/ """ if self.finished: raise ValueError("An upload can only be used once.") if not isinstance(data, bytes): raise TypeError("`data` must be bytes, received", type(data)) checksum_object = sync_helpers._get_checksum_object( self._checksum_type) if checksum_object is not None: checksum_object.update(data) actual_checksum = sync_helpers.prepare_checksum_digest( checksum_object.digest()) metadata_key = sync_helpers._get_metadata_key(self._checksum_type) metadata[metadata_key] = actual_checksum content, multipart_boundary = construct_multipart_request( data, metadata, content_type) multipart_content_type = _RELATED_HEADER + multipart_boundary + b'"' self._headers[_CONTENT_TYPE_HEADER] = multipart_content_type return _POST, self.upload_url, content, self._headers
def _update_checksum(self, start_byte, payload): """Update the checksum with the payload if not already updated. Because error recovery can result in bytes being transmitted more than once, the checksum tracks the number of bytes checked in self._bytes_checksummed and skips bytes that have already been summed. """ if not self._checksum_type: return if not self._checksum_object: self._checksum_object = _helpers._get_checksum_object(self._checksum_type) if start_byte < self._bytes_checksummed: offset = self._bytes_checksummed - start_byte data = payload[offset:] else: data = payload self._checksum_object.update(data) self._bytes_checksummed += len(data)
def test__get_checksum_object_invalid(): with pytest.raises(ValueError): _helpers._get_checksum_object("invalid")