def test_head_object_if_modified_since(self): size = 1024 * 256 self.assert_head_bucket_result(result=self.s3.head_bucket( Bucket=env.BUCKET)) key = KEY_PREFIX + random_string(16) body = random_bytes(size) expect_md5 = compute_md5(body) self.assert_put_object_result(result=self.s3.put_object( Bucket=env.BUCKET, Key=key, Body=body), etag=expect_md5) self.assert_head_object_result(result=self.s3.head_object( Bucket=env.BUCKET, Key=key, IfModifiedSince=datetime.datetime(1946, 2, 14)), etag=expect_md5, content_length=size) try: self.s3.head_object(Bucket=env.BUCKET, Key=key, IfModifiedSince=datetime.datetime.now()) self.fail() # Non exception occurred is illegal. except Exception as e: # Error code 304 is legal. self.assert_client_error(error=e, expect_status_code=304) self.assert_delete_object_result( result=self.s3.delete_object(Bucket=env.BUCKET, Key=key)) self.assert_delete_object_result( result=self.s3.delete_object(Bucket=env.BUCKET, Key=KEY_PREFIX))
def test_head_object_if_match(self): size = 1024 * 256 self.assert_head_bucket_result(result=self.s3.head_bucket( Bucket=env.BUCKET)) key = KEY_PREFIX + random_string(16) body = random_bytes(size) expect_md5 = compute_md5(body) self.assert_put_object_result(result=self.s3.put_object( Bucket=env.BUCKET, Key=key, Body=body), etag=expect_md5) self.assert_head_object_result(result=self.s3.head_object( Bucket=env.BUCKET, Key=key, IfMatch=expect_md5), etag=expect_md5, content_length=size) try: fake_etag = '1b2cf535f27731c974343645a3985328' self.s3.head_object(Bucket=env.BUCKET, Key=key, IfMatch=fake_etag) self.fail() # Non exception occurred is illegal. except Exception as e: # Error code 412 is legal. self.assert_client_error(error=e, expect_status_code=412) self.assert_delete_object_result( result=self.s3.delete_object(Bucket=env.BUCKET, Key=key)) self.assert_delete_object_result( result=self.s3.delete_object(Bucket=env.BUCKET, Key=KEY_PREFIX))
def test_list_object_v2_etag(self): file_num = 40 files = {} # key -> etag for _ in range(file_num): key = KEY_PREFIX + random_string(16) result = self.s3.put_object(Bucket=env.BUCKET, Key=key, Body=random_bytes(16)) self.assert_put_object_result(result=result) files[key] = result['ETag'].strip('"') # validate list result contents = [] continuation_token = '' truncated = True while truncated: result = self.s3.list_objects_v2( Bucket=env.BUCKET, Prefix=KEY_PREFIX, ContinuationToken=continuation_token, MaxKeys=30) self.assertEqual(result['ResponseMetadata']['HTTPStatusCode'], 200) if 'Contents' in result: result_contents = result['Contents'] self.assertTrue(type(result_contents), list) contents = contents + result_contents if 'NextContinuationToken' in result: next_token = result['NextContinuationToken'] if next_token != '': continuation_token = next_token if 'IsTruncated' in result: truncated = bool(result['IsTruncated']) else: truncated = False for content in contents: key = content['Key'] etag = content['ETag'].strip('"') if not key.endswith('/'): # validate etag with source self.assertEqual(etag, files[key]) # validate etag with head result self.assert_head_object_result(self.s3.head_object( Bucket=env.BUCKET, Key=key), etag=etag) # clean up test data objects = [] for content in contents: objects.append({'Key': content['Key']}) self.s3.delete_objects(Bucket=env.BUCKET, Delete={'Objects': objects})
def _init_object(self): file_keys = [] result = self.s3.list_objects(Bucket=BUCKET, Prefix=KEY_PREFIX) if 'Contents' in result: contents = result['Contents'] for content in contents: file_keys.append({'Key': content.get('Key')}) if len(file_keys) > 0: self.s3.delete_objects( Bucket=BUCKET, Delete={'Objects': file_keys} ) self.s3.put_object(Bucket=BUCKET, Key=self.file_key, Body=random_bytes(self.file_size))
def test_cors_options(self): # Put object key = 'test-options-object' size = 1024 * 256 body = random_bytes(size) expect_md5 = compute_md5(body) self.assert_put_object_result(result=self.s3.put_object( Bucket=env.BUCKET, Key=key, Body=body), etag=expect_md5) # Put bucket cors self.assert_result_status_code(result=self.s3.put_bucket_cors( Bucket=env.BUCKET, CORSConfiguration=CORS_CONFIG)) options_url = '{bucket_url}/{key}'.format(bucket_url=BUCKET_URL, key=key) # Send options requests self.assert_cors_request_result(result=requests.options( url=options_url, headers={ 'Origin': PUT_ORIGIN, 'Access-Control-Request-Method': 'GET' }), response_code=200, response_origin=PUT_ORIGIN, response_method='GET') # Delete bucket cors self.assert_result_status_code( result=self.s3.delete_bucket_cors(Bucket=env.BUCKET), status_code=204) # Send options requests self.assert_cors_request_result(result=requests.options( url=options_url, headers={ 'Origin': PUT_ORIGIN, 'Access-Control-Request-Method': 'GET' }), response_code=200, response_origin=None, response_method=None) # Delete object self.assert_delete_object_result( result=self.s3.delete_object(Bucket=env.BUCKET, Key=key))
def __do_test_put_object(self, file_name, file_size): key = file_name body = random_bytes(file_size) expect_etag = compute_md5(body) # put object self.assert_put_object_result( result=self.s3.put_object(Bucket=BUCKET, Key=key, Body=body), etag=expect_etag) # head object self.assert_head_object_result( result=self.s3.head_object(Bucket=BUCKET, Key=key), etag=expect_etag, content_length=file_size) # get object self.assert_get_object_result( result=self.s3.get_object(Bucket=BUCKET, Key=key), etag=expect_etag, content_length=file_size, body_md5=expect_etag)
def test_head_object(self): size = 1024 * 256 self.assert_head_bucket_result(self.s3.head_bucket(Bucket=env.BUCKET)) key = KEY_PREFIX + random_string(16) body = random_bytes(size) expect_md5 = compute_md5(body) self.assert_put_object_result(result=self.s3.put_object( Bucket=env.BUCKET, Key=key, Body=body), etag=expect_md5) self.assert_head_object_result(result=self.s3.head_object( Bucket=env.BUCKET, Key=key), etag=expect_md5, content_length=size) self.assert_delete_object_result( result=self.s3.delete_object(Bucket=env.BUCKET, Key=key)) self.assert_delete_object_result( result=self.s3.delete_object(Bucket=env.BUCKET, Key=KEY_PREFIX)) try: self.s3.head_object(Bucket=env.BUCKET, Key=key) self.fail() # Non exception occurred is illegal. except Exception as e: # Error code 404 is legal. self.assert_client_error(e, expect_status_code=404)
def test_list_object_page_v1(self): ''' test list object page ''' files = {} # key -> etag self.clear_data() time.sleep(5) file_keys = [] prefix_keys = {} for test_file in self.test_files: for _ in range(test_file.get('file_num', 1)): #key = KEY_PREFIX + random_string(16) prefix = test_file['prefix'] key = prefix + test_file['name'] + random_string(16) if not prefix_keys.get(prefix): prefix_keys[prefix] = [] prefix_keys.get(prefix).append(key) test_file['key'] = key file_keys.append({'Key': key}) result = self.s3.put_object(Bucket=env.BUCKET, Key=key, Body=random_bytes(16)) self.assert_put_object_result(result=result) files[key] = result['ETag'].strip('"') #print("put object key: {}".format(key)) file_keys = sorted(file_keys, key=lambda f: f['Key']) last_key = file_keys[-1].get('Key') # print("put object key count: {}, last_key: {} ".format(len(file_keys), last_key )) prefixs = [] [ prefixs.append(f.get('prefix')) for f in self.test_files if f.get('prefix') not in prefixs ] file_count = 0 for prefix in prefixs: # print("prefix_keys: {} {}".format(prefix, prefix_keys.get(prefix, []))) prefix_keys = [ f['Key'] for f in file_keys if f['Key'].startswith(prefix) ] marker = '' # validate list result contents = [] truncated = True last_marker = '' while truncated: result_contents = [] result = self.s3.list_objects(Bucket=env.BUCKET, Prefix=prefix, Marker=marker, MaxKeys=self.max_page_size) self.assertEqual(result['ResponseMetadata']['HTTPStatusCode'], 200) if 'Contents' in result: result_contents = result['Contents'] #print("list response content type: {}".format(type(result_contents))) # self.assertTrue(type(result_contents), list) contents = contents + result_contents if 'NextMarker' in result: next_marker = result['NextMarker'] if next_marker != '': last_marker = marker marker = next_marker if 'IsTruncated' in result: truncated = bool(result['IsTruncated']) else: truncated = False if not truncated: end = True # print("list object count: {}, maxKey: {}, prefix: {}, first_key: {}, last_key: {}, next_marker: {}, truncate: {} ".format( \ # len(result_contents), self.max_page_size, prefix, result_contents[0]['Key'], result_contents[-1]['Key'], marker, truncated)) if truncated and last_marker != '': # print("list object truncated: last_marker {} page_first_key: {} ".format(last_marker, result_contents[0]['Key'] )) self.assertEqual(last_marker, result_contents[0]['Key']) # print("list object total: {}, maxKey: {}, prefix: {}, first_key: {}, last_key: {}, next_marker: {} ".format( \ # len(contents), self.max_page_size, prefix, contents[0]['Key'], contents[-1]['Key'], marker)) # if prefix_keys[-1] != contents[-1]['Key']: # print("{} {}", prefix_keys[-1], contents[-1]['Key']) self.assertEqual(prefix_keys[-1], contents[-1]['Key']) truncated = True prefix="" marker="" file_count = 0 while truncated: # print("marker: {}".format(marker)) result = self.s3.list_objects(Bucket=env.BUCKET, Prefix=prefix, Marker=marker, MaxKeys=self.max_page_size) truncated = bool(result.get('IsTruncated', False)) marker=result.get('NextMarker', "") for content in result.get('Contents', {}): # print("file: {}".format(content.get('Key'))) if not content.get('Key', "").endswith("/"): file_count += 1 # print("next_marker: {} {}".format(truncated, marker)) self.assertEqual(file_count, len(file_keys)) self.s3.delete_objects( Bucket=env.BUCKET, Delete={'Objects': file_keys} )
def test_list_object_page_v2(self): self.clear_data() time.sleep(5) files = {} # key -> etag file_keys = [] prefix_keys = {} for test_file in self.test_files: for _ in range(test_file.get('file_num', 1)): #key = KEY_PREFIX + random_string(16) prefix = test_file['prefix'] key = prefix + test_file['name'] + random_string(16) if not prefix_keys.get(prefix): prefix_keys[prefix] = [] prefix_keys.get(prefix).append(key) test_file['key'] = key file_keys.append({'Key': key}) result = self.s3.put_object(Bucket=env.BUCKET, Key=key, Body=random_bytes(16)) self.assert_put_object_result(result=result) files[key] = result['ETag'].strip('"') #print("put object key: {}".format(key)) file_keys = sorted(file_keys, key=lambda f: f['Key']) last_key = file_keys[-1].get('Key') prefixs = [] [ prefixs.append(f.get('prefix')) for f in self.test_files if f.get('prefix') not in prefixs ] # validate list result for prefix in prefixs: prefix_keys = [ f['Key'] for f in file_keys if f['Key'].startswith(prefix) ] contents = [] continuation_token = '' last_marker = '' truncated = True while truncated: result = self.s3.list_objects_v2( Bucket=env.BUCKET, Prefix=prefix, ContinuationToken=continuation_token, MaxKeys=self.max_page_size) self.assertEqual(result['ResponseMetadata']['HTTPStatusCode'], 200) if 'Contents' in result: result_contents = result['Contents'] self.assertTrue(type(result_contents), list) contents = contents + result_contents if 'NextContinuationToken' in result: next_token = result['NextContinuationToken'] if next_token != '': last_marker = continuation_token continuation_token = next_token if 'IsTruncated' in result: truncated = bool(result['IsTruncated']) else: truncated = False if truncated and last_marker != '': #print("list object truncated: last_marker {} page_first_key: {} ".format(last_marker, result_contents[0]['Key'] )) self.assertEqual(last_marker, result_contents[0]['Key']) # if prefix_keys[-1] != contents[-1]['Key']: # print("{} {}", prefix_keys[-1], contents[-1]['Key']) self.assertEqual(prefix_keys[-1], contents[-1]['Key']) truncated = True prefix="" continuation_token="" file_count = 0 while truncated: result = self.s3.list_objects_v2( Bucket=env.BUCKET, Prefix=prefix, ContinuationToken=continuation_token, MaxKeys=self.max_page_size) truncated = bool(result.get('IsTruncated', False)) continuation_token=result.get('NextContinuationToken', "") for content in result.get('Contents', {}): if not content.get('Key', "").endswith("/"): file_count += 1 self.assertEqual(file_count, len(file_keys)) self.s3.delete_objects( Bucket=env.BUCKET, Delete={'Objects': file_keys} )