def test_storage(self): region = 'ap-southeast-1' bucket_name = 'mock-bucket-' + uuid.uuid4().hex s3 = boto3.resource('s3', region_name=region) stubber = Stubber(s3.meta.client) s3_bucket = s3.Bucket(bucket_name) bucket = S3Bucket(region, s3_bucket) storage = S3Storage(bucket) key = 'mock-key' mock_content = b'' body = StreamingBody(None, len(mock_content)) body.read = Mock(return_value=mock_content) body.close = Mock() stubber.add_response('put_object', {}) stubber.add_response('get_object', { 'Body': body }) stubber.add_response('delete_object', {}) stubber.activate() f = storage.save(key, mock_content) self.assertEqual(f.content, mock_content) f.delete()
def upload_next_chunk(self, chunk: StreamingBody): if self.__status == Status.ACTIVE: try: logging.debug("%s: uploading part %s" % (self.__key, self.__iterator)) response = self.__s3.upload_part(Bucket=self.__bucket, Key=self.__key, PartNumber=self.__iterator, UploadId=self.__upload_id, Body=chunk.read()) self.__uploaded_parts.append({ 'PartNumber': self.__iterator, 'ETag': response.get('ETag') }) except StopIteration as e: logging.debug("%s: all parts were uploaded" % self.__key) except ClientError as e: self.__status = Status.ERROR logging.error( "upload of a part of a file ended up with an error: %s" % e) return e else: logging.debug("%s: part %s uploaded" % (self.__key, self.__iterator)) self.__iterator = self.__iterator + 1 return response
def decode_streaming_body_payload(streaming_body: StreamingBody) -> Dict[str, Any]: return json.loads(streaming_body.read().decode("utf-8"))
def __s3_object_with_body(self, i: int) -> dict: body = StreamingBody(raw_stream=MagicMock(), content_length=100) rv = self.__s3_object_contents(i) body.read = MagicMock(return_value=rv) return dict(Body=body)