def test_put_object_copy_error(self): obj = 'object' self.conn.make_request('PUT', self.bucket, obj) dst_bucket = 'dst-bucket' self.conn.make_request('PUT', dst_bucket) dst_obj = 'dst_object' headers = {'x-amz-copy-source': '/%s/%s' % (self.bucket, obj)} auth_error_conn = Connection(aws_secret_key='invalid') status, headers, body = \ auth_error_conn.make_request('PUT', dst_bucket, dst_obj, headers) self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch') self.assertEqual(headers['content-type'], 'application/xml') # /src/nothing -> /dst/dst headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, 'nothing')} status, headers, body = \ self.conn.make_request('PUT', dst_bucket, dst_obj, headers) self.assertEqual(get_error_code(body), 'NoSuchKey') self.assertEqual(headers['content-type'], 'application/xml') # /nothing/src -> /dst/dst headers = {'X-Amz-Copy-Source': '/%s/%s' % ('nothing', obj)} status, headers, body = \ self.conn.make_request('PUT', dst_bucket, dst_obj, headers) # TODO: source bucket is not check. # self.assertEqual(get_error_code(body), 'NoSuchBucket') # /src/src -> /nothing/dst headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj)} status, headers, body = \ self.conn.make_request('PUT', 'nothing', dst_obj, headers) self.assertEqual(get_error_code(body), 'NoSuchBucket') self.assertEqual(headers['content-type'], 'application/xml')
def test_upload_part_error(self): bucket = 'bucket' self.conn.make_request('PUT', bucket) query = 'uploads' key = 'obj' status, headers, body = \ self.conn.make_request('POST', bucket, key, query=query) elem = fromstring(body, 'InitiateMultipartUploadResult') upload_id = elem.find('UploadId').text query = 'partNumber=%s&uploadId=%s' % (1, upload_id) auth_error_conn = Connection(aws_secret_key='invalid') status, headers, body = \ auth_error_conn.make_request('PUT', bucket, key, query=query) self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch') status, headers, body = \ self.conn.make_request('PUT', 'nothing', key, query=query) self.assertEqual(get_error_code(body), 'NoSuchBucket') query = 'partNumber=%s&uploadId=%s' % (1, 'nothing') status, headers, body = \ self.conn.make_request('PUT', bucket, key, query=query) self.assertEqual(get_error_code(body), 'NoSuchUpload') query = 'partNumber=%s&uploadId=%s' % (0, upload_id) status, headers, body = \ self.conn.make_request('PUT', bucket, key, query=query) self.assertEqual(get_error_code(body), 'InvalidArgument') err_msg = 'Part number must be an integer between 1 and' self.assertTrue(err_msg in get_error_msg(body))
def test_list_parts_error(self): bucket = 'bucket' self.conn.make_request('PUT', bucket) key = 'obj' query = 'uploads' status, headers, body = \ self.conn.make_request('POST', bucket, key, query=query) elem = fromstring(body, 'InitiateMultipartUploadResult') upload_id = elem.find('UploadId').text query = 'uploadId=%s' % upload_id auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid') status, headers, body = \ auth_error_conn.make_request('GET', bucket, key, query=query) self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch') status, headers, body = \ self.conn.make_request('GET', 'nothing', key, query=query) self.assertEqual(get_error_code(body), 'NoSuchBucket') query = 'uploadId=%s' % 'nothing' status, headers, body = \ self.conn.make_request('GET', bucket, key, query=query) self.assertEqual(get_error_code(body), 'NoSuchUpload')
def test_abort_multi_upload_error(self): bucket = 'bucket' self.conn.make_request('PUT', bucket) key = 'obj' query = 'uploads' status, headers, body = \ self.conn.make_request('POST', bucket, key, query=query) elem = fromstring(body, 'InitiateMultipartUploadResult') upload_id = elem.find('UploadId').text self._upload_part(bucket, key, upload_id) query = 'uploadId=%s' % upload_id auth_error_conn = Connection(aws_secret_key='invalid') status, headers, body = \ auth_error_conn.make_request('DELETE', bucket, key, query=query) self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch') status, headers, body = \ self.conn.make_request('DELETE', 'nothing', key, query=query) self.assertEqual(get_error_code(body), 'NoSuchBucket') status, headers, body = \ self.conn.make_request('DELETE', bucket, 'nothing', query=query) self.assertEqual(get_error_code(body), 'NoSuchUpload') query = 'uploadId=%s' % 'nothing' status, headers, body = \ self.conn.make_request('DELETE', bucket, key, query=query) self.assertEqual(get_error_code(body), 'NoSuchUpload')
class S3ApiBase(unittest2.TestCase): def __init__(self, method_name): super(S3ApiBase, self).__init__(method_name) self.method_name = method_name def setUp(self): if 's3api' not in tf.cluster_info: raise tf.SkipTest('s3api middleware is not enabled') try: self.conn = Connection() self.conn.reset() except Exception: message = '%s got an error during initialize process.\n\n%s' % \ (self.method_name, traceback.format_exc()) # TODO: Find a way to make this go to FAIL instead of Error self.fail(message) def assertCommonResponseHeaders(self, headers, etag=None): """ asserting common response headers with args :param headers: a dict of response headers :param etag: a string of md5(content).hexdigest() if not given, this won't assert anything about etag. (e.g. DELETE obj) """ self.assertTrue(headers['x-amz-id-2'] is not None) self.assertTrue(headers['x-amz-request-id'] is not None) self.assertTrue(headers['date'] is not None) # TODO; requires consideration # self.assertTrue(headers['server'] is not None) if etag is not None: self.assertTrue('etag' in headers) # sanity self.assertEqual(etag, headers['etag'].strip('"'))
def test_put_object_error(self): auth_error_conn = Connection(aws_secret_key='invalid') status, headers, body = \ auth_error_conn.make_request('PUT', self.bucket, 'object') self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch') self.assertEqual(headers['content-type'], 'application/xml') status, headers, body = \ self.conn.make_request('PUT', 'bucket2', 'object') self.assertEqual(get_error_code(body), 'NoSuchBucket') self.assertEqual(headers['content-type'], 'application/xml')
def setUp(self): if 's3api' not in tf.cluster_info: raise tf.SkipTest('s3api middleware is not enabled') try: self.conn = Connection() self.conn.reset() except Exception: message = '%s got an error during initialize process.\n\n%s' % \ (self.method_name, traceback.format_exc()) # TODO: Find a way to make this go to FAIL instead of Error self.fail(message)
def test_put_bucket_error(self): status, headers, body = \ self.conn.make_request('PUT', 'bucket+invalid') self.assertEqual(get_error_code(body), 'InvalidBucketName') auth_error_conn = Connection(aws_secret_key='invalid') status, headers, body = auth_error_conn.make_request('PUT', 'bucket') self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch') self.conn.make_request('PUT', 'bucket') status, headers, body = self.conn.make_request('PUT', 'bucket') self.assertEqual(get_error_code(body), 'BucketAlreadyExists')
def test_delete_bucket_error(self): status, headers, body = \ self.conn.make_request('DELETE', 'bucket+invalid') self.assertEqual(get_error_code(body), 'InvalidBucketName') auth_error_conn = Connection(aws_secret_key='invalid') status, headers, body = \ auth_error_conn.make_request('DELETE', 'bucket') self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch') status, headers, body = self.conn.make_request('DELETE', 'bucket') self.assertEqual(get_error_code(body), 'NoSuchBucket')
def setUp(self): super(TestS3Acl, self).setUp() self.bucket = 'bucket' self.obj = 'object' if 's3_access_key2' not in tf.config or \ 's3_secret_key2' not in tf.config: raise tf.SkipTest( 'TestS3Acl requires s3_access_key2 and s3_secret_key2 setting') self.conn.make_request('PUT', self.bucket) access_key2 = tf.config['s3_access_key2'] secret_key2 = tf.config['s3_secret_key2'] self.conn2 = Connection(access_key2, secret_key2, access_key2)
def test_list_multi_uploads_error(self): bucket = 'bucket' self.conn.make_request('PUT', bucket) query = 'uploads' auth_error_conn = Connection(aws_secret_key='invalid') status, headers, body = \ auth_error_conn.make_request('GET', bucket, query=query) self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch') status, headers, body = \ self.conn.make_request('GET', 'nothing', query=query) self.assertEqual(get_error_code(body), 'NoSuchBucket')
def test_get_bucket_acl_error(self): aws_error_conn = Connection(aws_secret_key='invalid') status, headers, body = \ aws_error_conn.make_request('GET', self.bucket, query='acl') self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch') status, headers, body = \ self.conn.make_request('GET', 'nothing', query='acl') self.assertEqual(get_error_code(body), 'NoSuchBucket') status, headers, body = \ self.conn2.make_request('GET', self.bucket, query='acl') self.assertEqual(get_error_code(body), 'AccessDenied')
def setUp(self): super(TestS3Acl, self).setUp() self.bucket = 'bucket' self.obj = 'object' if 's3_access_key3' not in tf.config or \ 's3_secret_key3' not in tf.config: raise tf.SkipTest( 'TestS3Acl requires s3_access_key3 and s3_secret_key3 ' 'configured for reduced-access user') self.conn.make_request('PUT', self.bucket) access_key3 = tf.config['s3_access_key3'] secret_key3 = tf.config['s3_secret_key3'] self.conn3 = Connection(access_key3, secret_key3, access_key3)
def test_initiate_multi_upload_error(self): bucket = 'bucket' key = 'obj' self.conn.make_request('PUT', bucket) query = 'uploads' auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid') status, headers, body = \ auth_error_conn.make_request('POST', bucket, key, query=query) self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch') status, resp_headers, body = \ self.conn.make_request('POST', 'nothing', key, query=query) self.assertEqual(get_error_code(body), 'NoSuchBucket')
def test_delete_object_error(self): obj = 'object' self.conn.make_request('PUT', self.bucket, obj) auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid') status, headers, body = \ auth_error_conn.make_request('DELETE', self.bucket, obj) self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch') self.assertEqual(headers['content-type'], 'application/xml') status, headers, body = \ self.conn.make_request('DELETE', 'invalid', obj) self.assertEqual(get_error_code(body), 'NoSuchBucket') self.assertEqual(headers['content-type'], 'application/xml')
def test_put_bucket_error_key3(self): if 's3_access_key3' not in tf.config or \ 's3_secret_key3' not in tf.config: raise tf.SkipTest('Cannot test for AccessDenied; need ' 's3_access_key3 and s3_secret_key3 configured') self.conn.make_request('PUT', 'bucket') # If the user can't create buckets, they shouldn't even know # whether the bucket exists. conn3 = Connection(tf.config['s3_access_key3'], tf.config['s3_secret_key3'], tf.config['s3_access_key3']) status, headers, body = conn3.make_request('PUT', 'bucket') self.assertEqual(status, 403) self.assertEqual(get_error_code(body), 'AccessDenied')
def test_get_object_acl_error(self): self.conn.make_request('PUT', self.bucket, self.obj) aws_error_conn = Connection(tf.config['s3_access_key'], 'invalid') status, headers, body = \ aws_error_conn.make_request('GET', self.bucket, self.obj, query='acl') self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch') status, headers, body = \ self.conn.make_request('GET', self.bucket, 'nothing', query='acl') self.assertEqual(get_error_code(body), 'NoSuchKey') status, headers, body = \ self.conn3.make_request('GET', self.bucket, self.obj, query='acl') self.assertEqual(get_error_code(body), 'AccessDenied')
def test_put_bucket_error(self): status, headers, body = \ self.conn.make_request('PUT', 'bucket+invalid') self.assertEqual(get_error_code(body), 'InvalidBucketName') auth_error_conn = Connection(aws_secret_key='invalid') status, headers, body = auth_error_conn.make_request('PUT', 'bucket') self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch') self.conn.make_request('PUT', 'bucket') status, headers, body = self.conn.make_request('PUT', 'bucket') # If the user can't create buckets, they shouldn't even know # whether the bucket exists. For some reason, though, when s3_acl # is disabled, we translate 403 -> BucketAlreadyExists?? self.assertIn(get_error_code(body), ('AccessDenied', 'BucketAlreadyExists'))
class S3ApiBase(unittest.TestCase): def __init__(self, method_name): super(S3ApiBase, self).__init__(method_name) self.method_name = method_name @contextmanager def quiet_boto_logging(self): try: logging.getLogger('boto').setLevel(logging.INFO) yield finally: logging.getLogger('boto').setLevel(logging.DEBUG) def setUp(self): if 's3api' not in tf.cluster_info: raise tf.SkipTest('s3api middleware is not enabled') if tf.config.get('account'): user_id = '%s:%s' % (tf.config['account'], tf.config['username']) else: user_id = tf.config['username'] try: self.conn = Connection( tf.config['s3_access_key'], tf.config['s3_secret_key'], user_id=user_id) self.conn.reset() except Exception: message = '%s got an error during initialize process.\n\n%s' % \ (self.method_name, traceback.format_exc()) # TODO: Find a way to make this go to FAIL instead of Error self.fail(message) def assertCommonResponseHeaders(self, headers, etag=None): """ asserting common response headers with args :param headers: a dict of response headers :param etag: a string of md5(content).hexdigest() if not given, this won't assert anything about etag. (e.g. DELETE obj) """ self.assertTrue(headers['x-amz-id-2'] is not None) self.assertTrue(headers['x-amz-request-id'] is not None) self.assertTrue(headers['date'] is not None) # TODO; requires consideration # self.assertTrue(headers['server'] is not None) if etag is not None: self.assertTrue('etag' in headers) # sanity self.assertEqual(etag, headers['etag'].strip('"'))
def test_put_bucket_error_key2(self): if config_true_value(tf.cluster_info['s3api'].get('s3_acl')): if 's3_access_key2' not in tf.config or \ 's3_secret_key2' not in tf.config: raise tf.SkipTest( 'Cannot test for BucketAlreadyExists with second user; ' 'need s3_access_key2 and s3_secret_key2 configured') self.conn.make_request('PUT', 'bucket') # Other users of the same account get the same 409 error conn2 = Connection(tf.config['s3_access_key2'], tf.config['s3_secret_key2'], tf.config['s3_access_key2']) status, headers, body = conn2.make_request('PUT', 'bucket') self.assertEqual(status, 409) self.assertEqual(get_error_code(body), 'BucketAlreadyExists')
def test_head_bucket_error(self): self.conn.make_request('PUT', 'bucket') status, headers, body = \ self.conn.make_request('HEAD', 'bucket+invalid') self.assertEqual(status, 400) self.assertEqual(body, '') # sanity auth_error_conn = Connection(aws_secret_key='invalid') status, headers, body = \ auth_error_conn.make_request('HEAD', 'bucket') self.assertEqual(status, 403) self.assertEqual(body, '') # sanity status, headers, body = self.conn.make_request('HEAD', 'nothing') self.assertEqual(status, 404) self.assertEqual(body, '') # sanity
def test_upload_part_copy_error(self): src_bucket = 'src' src_obj = 'src' self.conn.make_request('PUT', src_bucket) self.conn.make_request('PUT', src_bucket, src_obj) src_path = '%s/%s' % (src_bucket, src_obj) bucket = 'bucket' self.conn.make_request('PUT', bucket) key = 'obj' query = 'uploads' status, headers, body = \ self.conn.make_request('POST', bucket, key, query=query) elem = fromstring(body, 'InitiateMultipartUploadResult') upload_id = elem.find('UploadId').text query = 'partNumber=%s&uploadId=%s' % (1, upload_id) auth_error_conn = Connection(aws_secret_key='invalid') status, headers, body = \ auth_error_conn.make_request('PUT', bucket, key, headers={ 'X-Amz-Copy-Source': src_path }, query=query) self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch') status, headers, body = \ self.conn.make_request('PUT', 'nothing', key, headers={'X-Amz-Copy-Source': src_path}, query=query) self.assertEqual(get_error_code(body), 'NoSuchBucket') query = 'partNumber=%s&uploadId=%s' % (1, 'nothing') status, headers, body = \ self.conn.make_request('PUT', bucket, key, headers={'X-Amz-Copy-Source': src_path}, query=query) self.assertEqual(get_error_code(body), 'NoSuchUpload') src_path = '%s/%s' % (src_bucket, 'nothing') query = 'partNumber=%s&uploadId=%s' % (1, upload_id) status, headers, body = \ self.conn.make_request('PUT', bucket, key, headers={'X-Amz-Copy-Source': src_path}, query=query) self.assertEqual(get_error_code(body), 'NoSuchKey')
def test_put_bucket_acl_error(self): req_headers = {'x-amz-acl': 'public-read'} aws_error_conn = Connection(aws_secret_key='invalid') status, headers, body = \ aws_error_conn.make_request('PUT', self.bucket, headers=req_headers, query='acl') self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch') status, headers, body = \ self.conn.make_request('PUT', 'nothing', headers=req_headers, query='acl') self.assertEqual(get_error_code(body), 'NoSuchBucket') status, headers, body = \ self.conn2.make_request('PUT', self.bucket, headers=req_headers, query='acl') self.assertEqual(get_error_code(body), 'AccessDenied')
def setUp(self): if 's3api' not in tf.cluster_info: raise tf.SkipTest('s3api middleware is not enabled') if tf.config.get('account'): user_id = '%s:%s' % (tf.config['account'], tf.config['username']) else: user_id = tf.config['username'] try: self.conn = Connection( tf.config['s3_access_key'], tf.config['s3_secret_key'], user_id=user_id) self.conn.reset() except Exception: message = '%s got an error during initialize process.\n\n%s' % \ (self.method_name, traceback.format_exc()) # TODO: Find a way to make this go to FAIL instead of Error self.fail(message)
def test_initiate_multi_upload_error(self): bucket = 'bucket' key = 'obj' self.conn.make_request('PUT', bucket) query = 'uploads' auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid') status, headers, body = \ auth_error_conn.make_request('POST', bucket, key, query=query) self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch') status, resp_headers, body = \ self.conn.make_request('POST', 'nothing', key, query=query) self.assertEqual(get_error_code(body), 'NoSuchBucket') status, resp_headers, body = self.conn.make_request( 'POST', bucket, 'x' * (tf.cluster_info['swift']['max_object_name_length'] + 1), query=query) self.assertEqual(get_error_code(body), 'KeyTooLongError')
def test_head_object_error(self): obj = 'object' self.conn.make_request('PUT', self.bucket, obj) auth_error_conn = Connection(aws_secret_key='invalid') status, headers, body = \ auth_error_conn.make_request('HEAD', self.bucket, obj) self.assertEqual(status, 403) self.assertEqual(body, '') # sanity self.assertEqual(headers['content-type'], 'application/xml') status, headers, body = \ self.conn.make_request('HEAD', self.bucket, 'invalid') self.assertEqual(status, 404) self.assertEqual(body, '') # sanity self.assertEqual(headers['content-type'], 'application/xml') status, headers, body = \ self.conn.make_request('HEAD', 'invalid', obj) self.assertEqual(status, 404) self.assertEqual(body, '') # sanity self.assertEqual(headers['content-type'], 'application/xml')
def test_head_object_error(self): obj = 'object' self.conn.make_request('PUT', self.bucket, obj) auth_error_conn = Connection(aws_secret_key='invalid') status, headers, body = \ auth_error_conn.make_request('HEAD', self.bucket, obj) self.assertEqual(status, 403) self.assertEqual(body, b'') # sanity self.assertEqual(headers['content-type'], 'application/xml') status, headers, body = \ self.conn.make_request('HEAD', self.bucket, 'invalid') self.assertEqual(status, 404) self.assertEqual(body, b'') # sanity self.assertEqual(headers['content-type'], 'application/xml') status, headers, body = \ self.conn.make_request('HEAD', 'invalid', obj) self.assertEqual(status, 404) self.assertEqual(body, b'') # sanity self.assertEqual(headers['content-type'], 'application/xml')
def test_put_bucket_error(self): status, headers, body = \ self.conn.make_request('PUT', 'bucket+invalid') self.assertEqual(get_error_code(body), 'InvalidBucketName') auth_error_conn = Connection(aws_secret_key='invalid') status, headers, body = auth_error_conn.make_request('PUT', 'bucket') self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch') self.conn.make_request('PUT', 'bucket') status, headers, body = self.conn.make_request('PUT', 'bucket') self.assertEqual(status, 409) self.assertEqual(get_error_code(body), 'BucketAlreadyExists') if 's3_access_key2' not in tf.config or \ 's3_secret_key2' not in tf.config: raise tf.SkipTest( 'Cannot test for BucketAlreadyExists with second user; need ' 's3_access_key2 and s3_secret_key2 configured') # Other users of the same account get the same error conn2 = Connection(tf.config['s3_access_key2'], tf.config['s3_secret_key2'], tf.config['s3_access_key2']) status, headers, body = conn2.make_request('PUT', 'bucket') self.assertEqual(status, 409) self.assertEqual(get_error_code(body), 'BucketAlreadyExists') if 's3_access_key3' not in tf.config or \ 's3_secret_key3' not in tf.config: raise tf.SkipTest('Cannot test for AccessDenied; need ' 's3_access_key3 and s3_secret_key3 configured') # If the user can't create buckets, they shouldn't even know # whether the bucket exists. conn3 = Connection(tf.config['s3_access_key3'], tf.config['s3_secret_key3'], tf.config['s3_access_key3']) status, headers, body = conn3.make_request('PUT', 'bucket') self.assertEqual(status, 403) self.assertEqual(get_error_code(body), 'AccessDenied')
def test_service_error_signature_not_match(self): auth_error_conn = Connection(aws_secret_key='invalid') status, headers, body = auth_error_conn.make_request('GET') self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch') self.assertEqual(headers['content-type'], 'application/xml')
class TestS3Acl(S3ApiBase): def setUp(self): super(TestS3Acl, self).setUp() self.bucket = 'bucket' self.obj = 'object' if 's3_access_key2' not in tf.config or \ 's3_secret_key2' not in tf.config: raise tf.SkipTest( 'TestS3Acl requires s3_access_key2 and s3_secret_key2 setting') self.conn.make_request('PUT', self.bucket) access_key2 = tf.config['s3_access_key2'] secret_key2 = tf.config['s3_secret_key2'] self.conn2 = Connection(access_key2, secret_key2, access_key2) def test_acl(self): self.conn.make_request('PUT', self.bucket, self.obj) query = 'acl' # PUT Bucket ACL headers = {'x-amz-acl': 'public-read'} status, headers, body = \ self.conn.make_request('PUT', self.bucket, headers=headers, query=query) self.assertEqual(status, 200) self.assertCommonResponseHeaders(headers) self.assertEqual(headers['content-length'], '0') # GET Bucket ACL status, headers, body = \ self.conn.make_request('GET', self.bucket, query=query) self.assertEqual(status, 200) self.assertCommonResponseHeaders(headers) # TODO: Fix the response that last-modified must be in the response. # self.assertTrue(headers['last-modified'] is not None) self.assertEqual(headers['content-length'], str(len(body))) self.assertTrue(headers['content-type'] is not None) elem = fromstring(body, 'AccessControlPolicy') owner = elem.find('Owner') self.assertEqual(owner.find('ID').text, self.conn.user_id) self.assertEqual(owner.find('DisplayName').text, self.conn.user_id) acl = elem.find('AccessControlList') self.assertTrue(acl.find('Grant') is not None) # GET Object ACL status, headers, body = \ self.conn.make_request('GET', self.bucket, self.obj, query=query) self.assertEqual(status, 200) self.assertCommonResponseHeaders(headers) # TODO: Fix the response that last-modified must be in the response. # self.assertTrue(headers['last-modified'] is not None) self.assertEqual(headers['content-length'], str(len(body))) self.assertTrue(headers['content-type'] is not None) elem = fromstring(body, 'AccessControlPolicy') owner = elem.find('Owner') self.assertEqual(owner.find('ID').text, self.conn.user_id) self.assertEqual(owner.find('DisplayName').text, self.conn.user_id) acl = elem.find('AccessControlList') self.assertTrue(acl.find('Grant') is not None) def test_put_bucket_acl_error(self): req_headers = {'x-amz-acl': 'public-read'} aws_error_conn = Connection(aws_secret_key='invalid') status, headers, body = \ aws_error_conn.make_request('PUT', self.bucket, headers=req_headers, query='acl') self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch') status, headers, body = \ self.conn.make_request('PUT', 'nothing', headers=req_headers, query='acl') self.assertEqual(get_error_code(body), 'NoSuchBucket') status, headers, body = \ self.conn2.make_request('PUT', self.bucket, headers=req_headers, query='acl') self.assertEqual(get_error_code(body), 'AccessDenied') def test_get_bucket_acl_error(self): aws_error_conn = Connection(aws_secret_key='invalid') status, headers, body = \ aws_error_conn.make_request('GET', self.bucket, query='acl') self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch') status, headers, body = \ self.conn.make_request('GET', 'nothing', query='acl') self.assertEqual(get_error_code(body), 'NoSuchBucket') status, headers, body = \ self.conn2.make_request('GET', self.bucket, query='acl') self.assertEqual(get_error_code(body), 'AccessDenied') def test_get_object_acl_error(self): self.conn.make_request('PUT', self.bucket, self.obj) aws_error_conn = Connection(aws_secret_key='invalid') status, headers, body = \ aws_error_conn.make_request('GET', self.bucket, self.obj, query='acl') self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch') status, headers, body = \ self.conn.make_request('GET', self.bucket, 'nothing', query='acl') self.assertEqual(get_error_code(body), 'NoSuchKey') status, headers, body = \ self.conn2.make_request('GET', self.bucket, self.obj, query='acl') self.assertEqual(get_error_code(body), 'AccessDenied')
def test_complete_multi_upload_error(self): bucket = 'bucket' keys = ['obj', 'obj2'] self.conn.make_request('PUT', bucket) query = 'uploads' status, headers, body = \ self.conn.make_request('POST', bucket, keys[0], query=query) elem = fromstring(body, 'InitiateMultipartUploadResult') upload_id = elem.find('UploadId').text etags = [] for i in xrange(1, 3): query = 'partNumber=%s&uploadId=%s' % (i, upload_id) status, headers, body = \ self.conn.make_request('PUT', bucket, keys[0], query=query) etags.append(headers['etag']) xml = self._gen_comp_xml(etags) # part 1 too small query = 'uploadId=%s' % upload_id status, headers, body = \ self.conn.make_request('POST', bucket, keys[0], body=xml, query=query) self.assertEqual(get_error_code(body), 'EntityTooSmall') # invalid credentials auth_error_conn = Connection(aws_secret_key='invalid') status, headers, body = \ auth_error_conn.make_request('POST', bucket, keys[0], body=xml, query=query) self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch') # wrong/missing bucket status, headers, body = \ self.conn.make_request('POST', 'nothing', keys[0], query=query) self.assertEqual(get_error_code(body), 'NoSuchBucket') # wrong upload ID query = 'uploadId=%s' % 'nothing' status, headers, body = \ self.conn.make_request('POST', bucket, keys[0], body=xml, query=query) self.assertEqual(get_error_code(body), 'NoSuchUpload') # without Part tag in xml query = 'uploadId=%s' % upload_id xml = self._gen_comp_xml([]) status, headers, body = \ self.conn.make_request('POST', bucket, keys[0], body=xml, query=query) self.assertEqual(get_error_code(body), 'MalformedXML') # with invalid etag in xml invalid_etag = 'invalid' xml = self._gen_comp_xml([invalid_etag]) status, headers, body = \ self.conn.make_request('POST', bucket, keys[0], body=xml, query=query) self.assertEqual(get_error_code(body), 'InvalidPart') # without part in Swift query = 'uploads' status, headers, body = \ self.conn.make_request('POST', bucket, keys[1], query=query) elem = fromstring(body, 'InitiateMultipartUploadResult') upload_id = elem.find('UploadId').text query = 'uploadId=%s' % upload_id xml = self._gen_comp_xml([etags[0]]) status, headers, body = \ self.conn.make_request('POST', bucket, keys[1], body=xml, query=query) self.assertEqual(get_error_code(body), 'InvalidPart')
def test_complete_multi_upload_error(self): bucket = 'bucket' keys = ['obj', 'obj2'] self.conn.make_request('PUT', bucket) query = 'uploads' status, headers, body = \ self.conn.make_request('POST', bucket, keys[0], query=query) elem = fromstring(body, 'InitiateMultipartUploadResult') upload_id = elem.find('UploadId').text etags = [] for i in range(1, 3): query = 'partNumber=%s&uploadId=%s' % (i, upload_id) status, headers, body = \ self.conn.make_request('PUT', bucket, keys[0], query=query) etags.append(headers['etag']) xml = self._gen_comp_xml(etags) # part 1 too small query = 'uploadId=%s' % upload_id status, headers, body = \ self.conn.make_request('POST', bucket, keys[0], body=xml, query=query) self.assertEqual(get_error_code(body), 'EntityTooSmall') # invalid credentials auth_error_conn = Connection(aws_secret_key='invalid') status, headers, body = \ auth_error_conn.make_request('POST', bucket, keys[0], body=xml, query=query) self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch') # wrong/missing bucket status, headers, body = \ self.conn.make_request('POST', 'nothing', keys[0], query=query) self.assertEqual(get_error_code(body), 'NoSuchBucket') # wrong upload ID query = 'uploadId=%s' % 'nothing' status, headers, body = \ self.conn.make_request('POST', bucket, keys[0], body=xml, query=query) self.assertEqual(get_error_code(body), 'NoSuchUpload') # without Part tag in xml query = 'uploadId=%s' % upload_id xml = self._gen_comp_xml([]) status, headers, body = \ self.conn.make_request('POST', bucket, keys[0], body=xml, query=query) self.assertEqual(get_error_code(body), 'MalformedXML') # with invalid etag in xml invalid_etag = 'invalid' xml = self._gen_comp_xml([invalid_etag]) status, headers, body = \ self.conn.make_request('POST', bucket, keys[0], body=xml, query=query) self.assertEqual(get_error_code(body), 'InvalidPart') # without part in Swift query = 'uploads' status, headers, body = \ self.conn.make_request('POST', bucket, keys[1], query=query) elem = fromstring(body, 'InitiateMultipartUploadResult') upload_id = elem.find('UploadId').text query = 'uploadId=%s' % upload_id xml = self._gen_comp_xml([etags[0]]) status, headers, body = \ self.conn.make_request('POST', bucket, keys[1], body=xml, query=query) self.assertEqual(get_error_code(body), 'InvalidPart')
def test_delete_multi_objects_error(self): bucket = 'bucket' put_objects = ['obj'] self._prepare_test_delete_multi_objects(bucket, put_objects) xml = self._gen_multi_delete_xml(put_objects) content_md5 = calculate_md5(xml) query = 'delete' auth_error_conn = Connection(aws_secret_key='invalid') status, headers, body = \ auth_error_conn.make_request('POST', bucket, body=xml, headers={ 'Content-MD5': content_md5 }, query=query) self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch') status, headers, body = \ self.conn.make_request('POST', 'nothing', body=xml, headers={'Content-MD5': content_md5}, query=query) self.assertEqual(get_error_code(body), 'NoSuchBucket') # without Object tag xml = self._gen_invalid_multi_delete_xml() content_md5 = calculate_md5(xml) status, headers, body = \ self.conn.make_request('POST', bucket, body=xml, headers={'Content-MD5': content_md5}, query=query) self.assertEqual(get_error_code(body), 'MalformedXML') # without value of Key tag xml = self._gen_invalid_multi_delete_xml(hasObjectTag=True) content_md5 = calculate_md5(xml) status, headers, body = \ self.conn.make_request('POST', bucket, body=xml, headers={'Content-MD5': content_md5}, query=query) self.assertEqual(get_error_code(body), 'UserKeyMustBeSpecified') # specified number of objects are over max_multi_delete_objects # (Default 1000), but xml size is smaller than 61365 bytes. req_objects = ['obj%s' for var in xrange(1001)] xml = self._gen_multi_delete_xml(req_objects) self.assertTrue(len(xml.encode('utf-8')) <= MAX_MULTI_DELETE_BODY_SIZE) content_md5 = calculate_md5(xml) status, headers, body = \ self.conn.make_request('POST', bucket, body=xml, headers={'Content-MD5': content_md5}, query=query) self.assertEqual(get_error_code(body), 'MalformedXML') # specified xml size is over 61365 bytes, but number of objects are # smaller than max_multi_delete_objects. obj = 'a' * 1024 req_objects = [obj + str(var) for var in xrange(999)] xml = self._gen_multi_delete_xml(req_objects) self.assertTrue(len(xml.encode('utf-8')) > MAX_MULTI_DELETE_BODY_SIZE) content_md5 = calculate_md5(xml) status, headers, body = \ self.conn.make_request('POST', bucket, body=xml, headers={'Content-MD5': content_md5}, query=query) self.assertEqual(get_error_code(body), 'MalformedXML')
def test_delete_multi_objects_error(self): bucket = 'bucket' put_objects = ['obj'] self._prepare_test_delete_multi_objects(bucket, put_objects) xml = self._gen_multi_delete_xml(put_objects) content_md5 = calculate_md5(xml) query = 'delete' auth_error_conn = Connection(aws_secret_key='invalid') status, headers, body = \ auth_error_conn.make_request('POST', bucket, body=xml, headers={ 'Content-MD5': content_md5 }, query=query) self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch') status, headers, body = \ self.conn.make_request('POST', 'nothing', body=xml, headers={'Content-MD5': content_md5}, query=query) self.assertEqual(get_error_code(body), 'NoSuchBucket') # without Object tag xml = self._gen_invalid_multi_delete_xml() content_md5 = calculate_md5(xml) status, headers, body = \ self.conn.make_request('POST', bucket, body=xml, headers={'Content-MD5': content_md5}, query=query) self.assertEqual(get_error_code(body), 'MalformedXML') # without value of Key tag xml = self._gen_invalid_multi_delete_xml(hasObjectTag=True) content_md5 = calculate_md5(xml) status, headers, body = \ self.conn.make_request('POST', bucket, body=xml, headers={'Content-MD5': content_md5}, query=query) self.assertEqual(get_error_code(body), 'UserKeyMustBeSpecified') max_deletes = tf.cluster_info.get('s3api', {}).get( 'max_multi_delete_objects', 1000) # specified number of objects are over max_multi_delete_objects # (Default 1000), but xml size is relatively small req_objects = ['obj%s' for var in range(max_deletes + 1)] xml = self._gen_multi_delete_xml(req_objects) content_md5 = calculate_md5(xml) status, headers, body = \ self.conn.make_request('POST', bucket, body=xml, headers={'Content-MD5': content_md5}, query=query) self.assertEqual(get_error_code(body), 'MalformedXML') # specified xml size is large, but number of objects are # smaller than max_multi_delete_objects. obj = 'a' * 102400 req_objects = [obj + str(var) for var in range(max_deletes - 1)] xml = self._gen_multi_delete_xml(req_objects) content_md5 = calculate_md5(xml) status, headers, body = \ self.conn.make_request('POST', bucket, body=xml, headers={'Content-MD5': content_md5}, query=query) self.assertEqual(get_error_code(body), 'MalformedXML')
def test_delete_multi_objects_error(self): bucket = 'bucket' put_objects = ['obj'] self._prepare_test_delete_multi_objects(bucket, put_objects) xml = self._gen_multi_delete_xml(put_objects) content_md5 = calculate_md5(xml) query = 'delete' auth_error_conn = Connection(aws_secret_key='invalid') status, headers, body = \ auth_error_conn.make_request('POST', bucket, body=xml, headers={ 'Content-MD5': content_md5 }, query=query) self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch') status, headers, body = \ self.conn.make_request('POST', 'nothing', body=xml, headers={'Content-MD5': content_md5}, query=query) self.assertEqual(get_error_code(body), 'NoSuchBucket') # without Object tag xml = self._gen_invalid_multi_delete_xml() content_md5 = calculate_md5(xml) status, headers, body = \ self.conn.make_request('POST', bucket, body=xml, headers={'Content-MD5': content_md5}, query=query) self.assertEqual(get_error_code(body), 'MalformedXML') # without value of Key tag xml = self._gen_invalid_multi_delete_xml(hasObjectTag=True) content_md5 = calculate_md5(xml) status, headers, body = \ self.conn.make_request('POST', bucket, body=xml, headers={'Content-MD5': content_md5}, query=query) self.assertEqual(get_error_code(body), 'UserKeyMustBeSpecified') max_deletes = tf.cluster_info.get('s3api', {}).get('max_multi_delete_objects', 1000) # specified number of objects are over max_multi_delete_objects # (Default 1000), but xml size is relatively small req_objects = ['obj%s' for var in xrange(max_deletes + 1)] xml = self._gen_multi_delete_xml(req_objects) content_md5 = calculate_md5(xml) status, headers, body = \ self.conn.make_request('POST', bucket, body=xml, headers={'Content-MD5': content_md5}, query=query) self.assertEqual(get_error_code(body), 'MalformedXML') # specified xml size is large, but number of objects are # smaller than max_multi_delete_objects. obj = 'a' * 102400 req_objects = [obj + str(var) for var in xrange(max_deletes - 1)] xml = self._gen_multi_delete_xml(req_objects) content_md5 = calculate_md5(xml) status, headers, body = \ self.conn.make_request('POST', bucket, body=xml, headers={'Content-MD5': content_md5}, query=query) self.assertEqual(get_error_code(body), 'MalformedXML')