Exemplo n.º 1
0
    def test_make_request_with_sse(self):
        key_bytes = os.urandom(32)
        # Obviously a bad key here, but we just want to ensure we can use
        # a str/unicode type as a key.
        key_str = 'abcd' * 8

        # Put two objects with an sse key, one with random bytes,
        # one with str/unicode.  Then verify we can GetObject() both
        # objects.
        self.client.put_object(
            Bucket=self.bucket_name, Key='foo.txt',
            Body=six.BytesIO(b'mycontents'), SSECustomerAlgorithm='AES256',
            SSECustomerKey=key_bytes)
        self.addCleanup(self.client.delete_object,
                        Bucket=self.bucket_name, Key='foo.txt')
        self.client.put_object(
            Bucket=self.bucket_name, Key='foo2.txt',
            Body=six.BytesIO(b'mycontents2'), SSECustomerAlgorithm='AES256',
            SSECustomerKey=key_str)
        self.addCleanup(self.client.delete_object,
                        Bucket=self.bucket_name, Key='foo2.txt')

        self.assertEqual(
            self.client.get_object(Bucket=self.bucket_name,
                                   Key='foo.txt',
                                   SSECustomerAlgorithm='AES256',
                                   SSECustomerKey=key_bytes)['Body'].read(),
            b'mycontents')
        self.assertEqual(
            self.client.get_object(Bucket=self.bucket_name,
                                   Key='foo2.txt',
                                   SSECustomerAlgorithm='AES256',
                                   SSECustomerKey=key_str)['Body'].read(),
            b'mycontents2')
Exemplo n.º 2
0
 def test_streaming_line_iterator_keepends(self):
     body = six.BytesIO(b'1234567890\n1234567890\n12345')
     stream = response.StreamingBody(body, content_length=27)
     self.assert_lines(
         stream.iter_lines(keepends=True),
         [b'1234567890\n', b'1234567890\n', b'12345'],
     )
Exemplo n.º 3
0
    def test_upload_fileobj(self):
        fileobj = six.BytesIO(b'foo')
        self.client.upload_fileobj(
            Fileobj=fileobj, Bucket=self.bucket_name, Key='foo')
        self.addCleanup(self.delete_object, 'foo')

        self.object_exists('foo')
Exemplo n.º 4
0
def add_glacier_checksums(params, **kwargs):
    """Add glacier checksums to the http request.

    This will add two headers to the http request:

        * x-amz-content-sha256
        * x-amz-sha256-tree-hash

    These values will only be added if they are not present
    in the HTTP request.

    """
    request_dict = params
    headers = request_dict['headers']
    body = request_dict['body']
    if isinstance(body, six.binary_type):
        # If the user provided a bytes type instead of a file
        # like object, we're temporarily create a BytesIO object
        # so we can use the util functions to calculate the
        # checksums which assume file like objects.  Note that
        # we're not actually changing the body in the request_dict.
        body = six.BytesIO(body)
    starting_position = body.tell()
    if 'x-amz-content-sha256' not in headers:
        headers['x-amz-content-sha256'] = utils.calculate_sha256(body,
                                                                 as_hex=True)
    body.seek(starting_position)
    if 'x-amz-sha256-tree-hash' not in headers:
        headers['x-amz-sha256-tree-hash'] = utils.calculate_tree_hash(body)
    body.seek(starting_position)
Exemplo n.º 5
0
    def test_make_request_with_sse_copy_source(self):
        encrypt_key = 'a' * 32
        other_encrypt_key = 'b' * 32

        # Upload the object using one encrypt key
        self.client.put_object(
            Bucket=self.bucket_name, Key='foo.txt',
            Body=six.BytesIO(b'mycontents'), SSECustomerAlgorithm='AES256',
            SSECustomerKey=encrypt_key)
        self.addCleanup(self.client.delete_object,
                        Bucket=self.bucket_name, Key='foo.txt')

        # Copy the object using the original encryption key as the copy source
        # and encrypt with a new encryption key.
        self.client.copy_object(
            Bucket=self.bucket_name,
            CopySource=self.bucket_name+'/foo.txt',
            Key='bar.txt', CopySourceSSECustomerAlgorithm='AES256',
            CopySourceSSECustomerKey=encrypt_key,
            SSECustomerAlgorithm='AES256',
            SSECustomerKey=other_encrypt_key
        )
        self.addCleanup(self.client.delete_object,
                        Bucket=self.bucket_name, Key='bar.txt')

        # Download the object using the new encryption key.
        # The content should not have changed.
        self.assertEqual(
            self.client.get_object(
                Bucket=self.bucket_name, Key='bar.txt',
                SSECustomerAlgorithm='AES256',
                SSECustomerKey=other_encrypt_key)['Body'].read(),
            b'mycontents')
Exemplo n.º 6
0
 def test_streaming_line_iterator_ends_newline(self):
     body = six.BytesIO(b'1234567890\n1234567890\n12345\n')
     stream = response.StreamingBody(body, content_length=28)
     self.assert_lines(
         stream.iter_lines(),
         [b'1234567890', b'1234567890', b'12345'],
     )
Exemplo n.º 7
0
    def test_upload_fileobj_progress(self):
        # This has to be an integration test because the fileobj will never
        # actually be read from when using the stubber and therefore the
        # progress callbacks will not be invoked.
        chunksize = 5 * (1024**2)
        config = boto3.s3.transfer.TransferConfig(
            multipart_chunksize=chunksize,
            multipart_threshold=chunksize,
            max_concurrency=1,
        )
        fileobj = six.BytesIO(b'0' * (chunksize * 3))

        def progress_callback(amount):
            self.progress += amount

        self.client.upload_fileobj(
            Fileobj=fileobj,
            Bucket=self.bucket_name,
            Key='foo',
            Config=config,
            Callback=progress_callback,
        )
        self.addCleanup(self.delete_object, 'foo')

        self.object_exists('foo')
        self.assertEqual(self.progress, chunksize * 3)
Exemplo n.º 8
0
    def test_transfer_methods_do_not_use_threads(self):
        # This is just a smoke test to make sure that
        # setting use_threads to False has no issues transferring files as
        # the non-threaded implementation is ran under the same integration
        # and functional tests in s3transfer as the normal threaded
        # implementation
        #
        # The methods used are arbitrary other than one of the methods
        # use ``boto3_wasabi.s3.transfer.S3Transfer`` and the other should be
        # using ``s3transfer.manager.TransferManager`` directly
        content = b'my content'
        filename = self.files.create_file('myfile', content.decode('utf-8'))
        key = 'foo'
        config = boto3_wasabi.s3.transfer.TransferConfig(use_threads=False)

        self.client.upload_file(
            Bucket=self.bucket_name, Key=key, Filename=filename,
            Config=config)
        self.addCleanup(self.delete_object, key)
        self.assertTrue(self.object_exists(key))

        fileobj = six.BytesIO()
        self.client.download_fileobj(
            Bucket=self.bucket_name, Key='foo', Fileobj=fileobj, Config=config)
        self.assertEqual(fileobj.getvalue(), content)
Exemplo n.º 9
0
    def test_get_response_streaming_ng(self):
        http_response = Response()
        http_response.headers = {
            'content-type': 'application/xml',
            'date': 'Sat, 08 Mar 2014 12:05:44 GMT',
            'server': 'AmazonS3',
            'transfer-encoding': 'chunked',
            'x-amz-id-2': 'AAAAAAAAAAAAAAAAAAA',
            'x-amz-request-id': 'XXXXXXXXXXXXXXXX'
        }
        http_response.raw = six.BytesIO(XMLBODY1)
        http_response.status_code = 403
        http_response.reason = 'Forbidden'

        session = botocore.session.get_session()
        s3 = session.get_service('s3')
        operation = s3.get_operation('GetObject')  # streaming operation

        self.assertEqual(
            response.get_response(operation.model, http_response)[1], {
                'Error': {
                    'Message': 'Access Denied',
                    'Code': 'AccessDenied',
                },
                'ResponseMetadata': {
                    'HostId': 'AAAAAAAAAAAAAAAAAAA',
                    'RequestId': 'XXXXXXXXXXXXXXXX',
                    'HTTPStatusCode': 403
                },
            })
Exemplo n.º 10
0
    def test_get_response_nonstreaming_ok(self):
        http_response = Response()
        http_response.headers = {
            'content-type': 'application/xml',
            'date': 'Sun, 09 Mar 2014 02:55:43 GMT',
            'server': 'AmazonS3',
            'transfer-encoding': 'chunked',
            'x-amz-id-2': 'AAAAAAAAAAAAAAAAAAA',
            'x-amz-request-id': 'XXXXXXXXXXXXXXXX'
        }
        http_response.raw = six.BytesIO(XMLBODY1)
        http_response.status_code = 403
        http_response.reason = 'Forbidden'
        http_response.request = Request()

        session = botocore.session.get_session()
        service_model = session.get_service_model('s3')
        operation_model = service_model.operation_model('ListObjects')

        self.assert_response_with_subset_metadata(
            response.get_response(operation_model, http_response)[1], {
                'ResponseMetadata': {
                    'RequestId': 'XXXXXXXXXXXXXXXX',
                    'HostId': 'AAAAAAAAAAAAAAAAAAA',
                    'HTTPStatusCode': 403
                },
                'Error': {
                    'Message': 'Access Denied',
                    'Code': 'AccessDenied'
                }
            })
Exemplo n.º 11
0
 def test_streaming_line_iter_chunk_sizes(self):
     for chunk_size in range(1, 30):
         body = six.BytesIO(b'1234567890\n1234567890\n12345')
         stream = response.StreamingBody(body, content_length=27)
         self.assert_lines(
             stream.iter_lines(chunk_size),
             [b'1234567890', b'1234567890', b'12345'],
         )
Exemplo n.º 12
0
 def __init__(self, raw_request):
     if isinstance(raw_request, six.text_type):
         raw_request = raw_request.encode('utf-8')
     self.rfile = six.BytesIO(raw_request)
     self.raw_requestline = self.rfile.readline()
     self.error_code = None
     self.error_message = None
     self.parse_request()
Exemplo n.º 13
0
 def test_streaming_line_abstruse_newline_standard(self):
     for chunk_size in range(1, 30):
         body = six.BytesIO(b'1234567890\r\n1234567890\r\n12345\r\n')
         stream = response.StreamingBody(body, content_length=31)
         self.assert_lines(
             stream.iter_lines(chunk_size),
             [b'1234567890', b'1234567890', b'12345'],
         )
Exemplo n.º 14
0
 def test_payload_is_binary_file(self):
     request = AWSRequest()
     request.data = six.BytesIO(u'\u2713'.encode('utf-8'))
     auth = self.create_signer()
     payload = auth.payload(request)
     self.assertEqual(
         payload,
         '1dabba21cdad44541f6b15796f8d22978fc7ea10c46aeceeeeb66c23b3ac7604')
Exemplo n.º 15
0
 def test_streaming_body_is_an_iterator(self):
     body = six.BytesIO(b'a' * 1024 + b'b' * 1024 + b'c' * 2)
     stream = response.StreamingBody(body, content_length=2050)
     self.assertEqual(b'a' * 1024, next(stream))
     self.assertEqual(b'b' * 1024, next(stream))
     self.assertEqual(b'c' * 2, next(stream))
     with self.assertRaises(StopIteration):
         next(stream)
Exemplo n.º 16
0
 def test_streaming_body_with_invalid_length(self):
     body = six.BytesIO(b'123456789')
     stream = response.StreamingBody(body, content_length=10)
     with self.assertRaises(IncompleteReadError):
         self.assertEqual(stream.read(9), b'123456789')
         # The next read will have nothing returned and raise
         # an IncompleteReadError because we were expectd 10 bytes, not 9.
         stream.read()
Exemplo n.º 17
0
 def test_request_retried_for_sigv4(self):
     body = six.BytesIO(b"Hello world!")
     exception = ConnectionClosedError(endpoint_url='')
     self.http_stubber.responses.append(exception)
     self.http_stubber.responses.append(None)
     with self.http_stubber:
         response = self.client.put_object(Bucket=self.bucket_name,
                                           Key='foo.txt', Body=body)
         self.assert_status_code(response, 200)
Exemplo n.º 18
0
 def test_add_md5_with_file_like_body(self):
     request_dict = {
         'body': six.BytesIO(b'foobar'),
         'headers': {}
     }
     self.md5_digest.return_value = b'8X\xf6"0\xac<\x91_0\x0cfC\x12\xc6?'
     handlers.calculate_md5(request_dict)
     self.assertEqual(request_dict['headers']['Content-MD5'],
                      'OFj2IjCsPJFfMAxmQxLGPw==')
Exemplo n.º 19
0
    def test_validates_file_like_object(self):
        value = six.BytesIO(b'foo')

        errors = self.get_validation_error_message(
            given_shapes=self.shapes,
            input_params={'Blob': value},
        )
        error_msg = errors.generate_report()
        self.assertEqual(error_msg, '')
Exemplo n.º 20
0
 def setUp(self):
     self.credentials = botocore.credentials.Credentials(access_key='foo',
                                                         secret_key='bar',
                                                         token='baz')
     self.auth = botocore.auth.S3SigV4Auth(self.credentials, 'ec2',
                                           'eu-central-1')
     self.request = AWSRequest(data=six.BytesIO(b"foo bar baz"))
     self.request.method = 'PUT'
     self.request.url = 'https://s3.eu-central-1.amazonaws.com/'
Exemplo n.º 21
0
 def test_can_set_utf_8_headers(self):
     bucket_name = self.create_bucket(self.region)
     body = six.BytesIO(b"Hello world!")
     response = self.client.put_object(
         Bucket=bucket_name, Key="foo.txt", Body=body,
         ContentDisposition="attachment; filename=5小時接力起跑.jpg;")
     self.assert_status_code(response, 200)
     self.addCleanup(self.client.delete_object,
                     Bucket=bucket_name, Key="foo.txt")
Exemplo n.º 22
0
 def test_content_sha256_set_if_payload_signing_disabled(self):
     request = AWSRequest()
     request.data = six.BytesIO(u'\u2713'.encode('utf-8'))
     request.url = 'https://amazonaws.com'
     request.context['payload_signing_enabled'] = False
     request.method = 'PUT'
     auth = self.create_signer()
     auth.add_auth(request)
     sha_header = request.headers['X-Amz-Content-SHA256']
     self.assertEqual(sha_header, 'UNSIGNED-PAYLOAD')
Exemplo n.º 23
0
 def test_streaming_json_upload(self):
     stream = six.BytesIO(b'{"fakejson": true}')
     service = self.session.get_service('cloudsearchdomain')
     operation = service.get_operation('UploadDocuments')
     built = operation.build_parameters(contentType='application/json',
                                        documents=stream)
     endpoint = service.get_endpoint(region_name='us-east-1',
                                     endpoint_url='http://example.com')
     request = endpoint.create_request(built, signer=None)
     self.assertEqual(request.body, stream)
Exemplo n.º 24
0
 def test_download_fileobj(self):
     obj = six.BytesIO()
     inject.bucket_download_fileobj(self.bucket, Key='key', Fileobj=obj)
     self.bucket.meta.client.download_fileobj.assert_called_with(
         Bucket=self.bucket.name,
         Key='key',
         Fileobj=obj,
         ExtraArgs=None,
         Callback=None,
         Config=None)
Exemplo n.º 25
0
 def test_checksum_added_only_if_not_exists(self):
     request_dict = {
         'headers': {
             'x-amz-content-sha256': 'pre-exists',
         },
         'body': six.BytesIO(b'hello world'),
     }
     handlers.add_glacier_checksums(request_dict)
     self.assertEqual(request_dict['headers']['x-amz-content-sha256'],
                      'pre-exists')
Exemplo n.º 26
0
 def test_download_fileobj(self):
     fileobj = six.BytesIO()
     inject.object_download_fileobj(self.obj, Fileobj=fileobj)
     self.obj.meta.client.download_fileobj.assert_called_with(
         Bucket=self.obj.bucket_name,
         Key=self.obj.key,
         Fileobj=fileobj,
         ExtraArgs=None,
         Callback=None,
         Config=None)
Exemplo n.º 27
0
    def test_download_fileobj(self):
        fileobj = six.BytesIO()
        self.client.put_object(
            Bucket=self.bucket_name, Key='foo', Body=b'beach')
        self.addCleanup(self.delete_object, 'foo')

        self.client.download_fileobj(
            Bucket=self.bucket_name, Key='foo', Fileobj=fileobj)

        self.assertEqual(fileobj.getvalue(), b'beach')
Exemplo n.º 28
0
 def test_upload_fileobj(self):
     fileobj = six.BytesIO(b'foo')
     inject.bucket_upload_fileobj(self.bucket, Key='key', Fileobj=fileobj)
     self.bucket.meta.client.upload_fileobj.assert_called_with(
         Bucket=self.bucket.name,
         Fileobj=fileobj,
         Key='key',
         ExtraArgs=None,
         Callback=None,
         Config=None)
Exemplo n.º 29
0
    def test_get_response_nonstreaming_ng(self):
        http_response = Response()
        http_response.headers = {
            'content-type': 'application/xml',
            'date': 'Sat, 08 Mar 2014 12:05:44 GMT',
            'server': 'AmazonS3',
            'transfer-encoding': 'chunked',
            'x-amz-id-2': 'AAAAAAAAAAAAAAAAAAA',
            'x-amz-request-id': 'XXXXXXXXXXXXXXXX'
        }
        http_response.raw = six.BytesIO(XMLBODY2)
        http_response.status_code = 200
        http_response.reason = 'ok'
        http_response.request = Request()

        session = botocore.session.get_session()
        s3 = session.get_service('s3')
        operation = s3.get_operation('ListObjects')  # non-streaming operation

        self.assertEqual(
            response.get_response(operation.model, http_response)[1], {
                u'Contents': [{
                    u'ETag':
                    '"00000000000000000000000000000000"',
                    u'Key':
                    'test.png',
                    u'LastModified':
                    datetime.datetime(2014, 3, 1, 17, 6, 40, tzinfo=tzutc()),
                    u'Owner': {
                        u'DisplayName': 'dummy',
                        u'ID': 'AAAAAAAAAAAAAAAAAAA'
                    },
                    u'Size':
                    6702,
                    u'StorageClass':
                    'STANDARD'
                }],
                u'IsTruncated':
                False,
                u'Marker':
                "",
                u'MaxKeys':
                1000,
                u'Name':
                'mybucket',
                u'Prefix':
                "",
                'ResponseMetadata': {
                    'RequestId': 'XXXXXXXXXXXXXXXX',
                    'HostId': 'AAAAAAAAAAAAAAAAAAA',
                    'HTTPStatusCode': 200,
                }
            })
Exemplo n.º 30
0
    def stub_get_object(self, full_contents, start_byte=0, end_byte=None):
        """
        Stubs out the get_object operation.

        :param full_contents: The FULL contents of the object
        :param start_byte: The first byte to grab.
        :param end_byte: The last byte to grab.
        """
        get_object_response = {}
        expected_params = {}
        contents = full_contents
        end_byte_range = end_byte

        # If the start byte is set and the end byte is not, the end byte is
        # the last byte.
        if start_byte != 0 and end_byte is None:
            end_byte = len(full_contents) - 1

        # The range on get object where the the end byte is the last byte
        # should set the input range as e.g. Range='bytes=3-'
        if end_byte == len(full_contents) - 1:
            end_byte_range = ''

        # If this is a ranged get, ContentRange needs to be returned,
        # contents needs to be pruned, and Range needs to be an expected param.
        if end_byte is not None:
            contents = full_contents[start_byte:end_byte + 1]
            part_range = 'bytes=%s-%s' % (start_byte, end_byte_range)
            content_range = 'bytes=%s-%s/%s' % (
                start_byte, end_byte, len(full_contents))
            get_object_response['ContentRange'] = content_range
            expected_params['Range'] = part_range

        get_object_response.update({
            "AcceptRanges": "bytes",
            "ETag": self.etag,
            "ContentLength": len(contents),
            "ContentType": "binary/octet-stream",
            "Body": six.BytesIO(contents),
            "ResponseMetadata": {
                "HTTPStatusCode": 200
            }
        })
        expected_params.update({
            "Bucket": self.bucket,
            "Key": self.key
        })

        self.stubber.add_response(
            method='get_object', service_response=get_object_response,
            expected_params=expected_params)