def __init__(self, app, conf, *args, **kwargs): self.app = app self.conf = Config() # Set default values if they are not configured self.conf.allow_no_owner = config_true_value( conf.get('allow_no_owner', False)) self.conf.location = conf.get('location', 'US') self.conf.dns_compliant_bucket_names = config_true_value( conf.get('dns_compliant_bucket_names', True)) self.conf.max_bucket_listing = config_positive_int_value( conf.get('max_bucket_listing', 1000)) self.conf.max_parts_listing = config_positive_int_value( conf.get('max_parts_listing', 1000)) self.conf.max_multi_delete_objects = config_positive_int_value( conf.get('max_multi_delete_objects', 1000)) self.conf.s3_acl = config_true_value(conf.get('s3_acl', False)) self.conf.storage_domain = conf.get('storage_domain', '') self.conf.auth_pipeline_check = config_true_value( conf.get('auth_pipeline_check', True)) self.conf.max_upload_part_num = config_positive_int_value( conf.get('max_upload_part_num', 1000)) self.conf.check_bucket_owner = config_true_value( conf.get('check_bucket_owner', False)) self.conf.force_swift_request_proxy_log = config_true_value( conf.get('force_swift_request_proxy_log', False)) self.conf.allow_multipart_uploads = config_true_value( conf.get('allow_multipart_uploads', True)) self.conf.min_segment_size = config_positive_int_value( conf.get('min_segment_size', 5242880)) self.logger = get_logger(conf, log_route=conf.get('log_name', 's3api')) self.slo_enabled = self.conf.allow_multipart_uploads self.check_pipeline(self.conf)
def test_config(self): conf = Config({ 'a': 'str', 'b': 10, 'c': True, }) conf.update({ 'a': 'str2', 'b': '100', 'c': 'false', }) self.assertEqual(conf['a'], 'str2') self.assertEqual(conf['b'], 100) self.assertEqual(conf['c'], False)
def setUp(self): # setup default config self.conf = Config({ 'allow_no_owner': False, 'location': 'us-east-1', 'dns_compliant_bucket_names': True, 'max_bucket_listing': 1000, 'max_parts_listing': 1000, 'max_multi_delete_objects': 1000, 's3_acl': False, 'storage_domain': 'localhost', 'auth_pipeline_check': True, 'max_upload_part_num': 1000, 'check_bucket_owner': False, 'force_swift_request_proxy_log': False, 'allow_multipart_uploads': True, 'min_segment_size': 5242880, 'log_s3api_command': False }) # those 2 settings has existed the original test setup self.conf.log_level = 'debug' self.app = FakeApp() self.swift = self.app.swift self.s3api = filter_factory({}, **self.conf)(self.app) self.logger = self.s3api.logger = self.swift.logger = debug_logger() self.swift.register('HEAD', '/v1/AUTH_test', swob.HTTPOk, {}, None) self.swift.register('HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent, {}, None) self.swift.register('PUT', '/v1/AUTH_test/bucket', swob.HTTPCreated, {}, None) self.swift.register('POST', '/v1/AUTH_test/bucket', swob.HTTPNoContent, {}, None) self.swift.register('DELETE', '/v1/AUTH_test/bucket', swob.HTTPNoContent, {}, None) self.swift.register('GET', '/v1/AUTH_test/bucket/object', swob.HTTPOk, {'etag': 'object etag'}, "") self.swift.register('PUT', '/v1/AUTH_test/bucket/object', swob.HTTPCreated, {'etag': 'object etag'}, None) self.swift.register('DELETE', '/v1/AUTH_test/bucket/object', swob.HTTPNoContent, {}, None) self.mock_get_swift_info_result = {'object_versioning': {}} for s3api_path in ( 'controllers.obj', 'controllers.bucket', 'controllers.multi_delete', 'controllers.versioning', ): patcher = mock.patch( 'swift.common.middleware.s3api.%s.get_swift_info' % s3api_path, return_value=self.mock_get_swift_info_result) patcher.start() self.addCleanup(patcher.stop)
def test_config(self): conf = Config( { 'a': 'str', 'b': 10, 'c': True, } ) conf.update( { 'a': 'str2', 'b': '100', 'c': 'false', } ) self.assertEqual(conf['a'], 'str2') self.assertEqual(conf['b'], 100) self.assertEqual(conf['c'], False)
def __init__(self, app, wsgi_conf, *args, **kwargs): self.app = app self.conf = Config() # Set default values if they are not configured self.conf.allow_no_owner = config_true_value( wsgi_conf.get('allow_no_owner', False)) self.conf.location = wsgi_conf.get('location', 'us-east-1') self.conf.dns_compliant_bucket_names = config_true_value( wsgi_conf.get('dns_compliant_bucket_names', True)) self.conf.max_bucket_listing = config_positive_int_value( wsgi_conf.get('max_bucket_listing', 1000)) self.conf.max_parts_listing = config_positive_int_value( wsgi_conf.get('max_parts_listing', 1000)) self.conf.max_multi_delete_objects = config_positive_int_value( wsgi_conf.get('max_multi_delete_objects', 1000)) self.conf.multi_delete_concurrency = config_positive_int_value( wsgi_conf.get('multi_delete_concurrency', 2)) self.conf.s3_acl = config_true_value(wsgi_conf.get('s3_acl', False)) self.conf.storage_domain = wsgi_conf.get('storage_domain', '') self.conf.auth_pipeline_check = config_true_value( wsgi_conf.get('auth_pipeline_check', True)) self.conf.max_upload_part_num = config_positive_int_value( wsgi_conf.get('max_upload_part_num', 1000)) self.conf.check_bucket_owner = config_true_value( wsgi_conf.get('check_bucket_owner', False)) self.conf.force_swift_request_proxy_log = config_true_value( wsgi_conf.get('force_swift_request_proxy_log', False)) self.conf.allow_multipart_uploads = config_true_value( wsgi_conf.get('allow_multipart_uploads', True)) self.conf.min_segment_size = config_positive_int_value( wsgi_conf.get('min_segment_size', 5242880)) self.conf.allowable_clock_skew = config_positive_int_value( wsgi_conf.get('allowable_clock_skew', 15 * 60)) self.conf.cors_preflight_allow_origin = list_from_csv( wsgi_conf.get('cors_preflight_allow_origin', '')) if '*' in self.conf.cors_preflight_allow_origin and \ len(self.conf.cors_preflight_allow_origin) > 1: raise ValueError('if cors_preflight_allow_origin should include ' 'all domains, * must be the only entry') self.conf.ratelimit_as_client_error = config_true_value( wsgi_conf.get('ratelimit_as_client_error', False)) self.logger = get_logger(wsgi_conf, log_route=wsgi_conf.get('log_name', 's3api')) self.check_pipeline(wsgi_conf)
def setUp(self): # setup default config self.conf = Config({ 'allow_no_owner': False, 'location': 'us-east-1', 'dns_compliant_bucket_names': True, 'max_bucket_listing': 1000, 'max_parts_listing': 1000, 'max_multi_delete_objects': 1000, 's3_acl': False, 'storage_domain': 'localhost', 'auth_pipeline_check': True, 'max_upload_part_num': 1000, 'check_bucket_owner': False, 'force_swift_request_proxy_log': False, 'allow_multipart_uploads': True, 'min_segment_size': 5242880, }) # those 2 settings has existed the original test setup self.conf.log_level = 'debug' self.app = FakeApp() self.swift = self.app.swift self.s3api = filter_factory({}, **self.conf)(self.app) self.s3api.logger = debug_logger() self.swift.register('HEAD', '/v1/AUTH_test', swob.HTTPOk, {}, None) self.swift.register('HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent, {}, None) self.swift.register('PUT', '/v1/AUTH_test/bucket', swob.HTTPCreated, {}, None) self.swift.register('POST', '/v1/AUTH_test/bucket', swob.HTTPNoContent, {}, None) self.swift.register('DELETE', '/v1/AUTH_test/bucket', swob.HTTPNoContent, {}, None) self.swift.register('GET', '/v1/AUTH_test/bucket/object', swob.HTTPOk, {'etag': 'object etag'}, "") self.swift.register('PUT', '/v1/AUTH_test/bucket/object', swob.HTTPCreated, {'etag': 'object etag'}, None) self.swift.register('DELETE', '/v1/AUTH_test/bucket/object', swob.HTTPNoContent, {}, None)