def setUp(self): skip_if_no_xattrs() self.app = proxy.Application(None, FakeMemcache(), logger=debug_logger('proxy-ut'), account_ring=FakeRing(replicas=1), container_ring=FakeRing(replicas=1)) self.copy_app = ServerSideCopyMiddleware(self.app, {}) monkey_patch_mimetools() self.tmpdir = mkdtemp() self.testdir = os.path.join(self.tmpdir, 'tmp_test_object_server_ObjectController') mkdirs(os.path.join(self.testdir, 'sda', 'tmp')) conf = {'devices': self.testdir, 'mount_check': 'false'} self.obj_ctlr = object_server.ObjectController( conf, logger=debug_logger('obj-ut')) http_connect = get_http_connect(fake_http_connect(200), fake_http_connect(200), FakeServerConnection(self.obj_ctlr)) self.orig_base_http_connect = swift.proxy.controllers.base.http_connect self.orig_obj_http_connect = swift.proxy.controllers.obj.http_connect swift.proxy.controllers.base.http_connect = http_connect swift.proxy.controllers.obj.http_connect = http_connect
def test_policy_index(self): # Policy index can be specified by X-Backend-Storage-Policy-Index # in the request header for object API app = proxy_logging.ProxyLoggingMiddleware(FakeApp(policy_idx='1'), {}) app.access_logger = debug_logger() req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}) resp = app(req.environ, start_response) b''.join(resp) log_parts = self._log_parts(app) self.assertEqual(log_parts[20], '1') # Policy index can be specified by X-Backend-Storage-Policy-Index # in the response header for container API app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {}) app.access_logger = debug_logger() req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': 'GET'}) def fake_call(app, env, start_response): start_response(app.response_str, [('Content-Type', 'text/plain'), ('Content-Length', str(sum(map(len, app.body)))), ('X-Backend-Storage-Policy-Index', '1')]) while env['wsgi.input'].read(5): pass return app.body with mock.patch.object(FakeApp, '__call__', fake_call): resp = app(req.environ, start_response) b''.join(resp) log_parts = self._log_parts(app) self.assertEqual(log_parts[20], '1')
def test_proxy_client_logging(self): app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {}) app.access_logger = debug_logger() req = Request.blank('/', environ={ 'REQUEST_METHOD': 'GET', 'REMOTE_ADDR': '1.2.3.4', 'HTTP_X_FORWARDED_FOR': '4.5.6.7,8.9.10.11'}) resp = app(req.environ, start_response) # exhaust generator [x for x in resp] log_parts = self._log_parts(app) self.assertEqual(log_parts[0], '4.5.6.7') # client ip self.assertEqual(log_parts[1], '1.2.3.4') # remote addr app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {}) app.access_logger = debug_logger() req = Request.blank('/', environ={ 'REQUEST_METHOD': 'GET', 'REMOTE_ADDR': '1.2.3.4', 'HTTP_X_CLUSTER_CLIENT_IP': '4.5.6.7'}) resp = app(req.environ, start_response) # exhaust generator [x for x in resp] log_parts = self._log_parts(app) self.assertEqual(log_parts[0], '4.5.6.7') # client ip self.assertEqual(log_parts[1], '1.2.3.4') # remote addr
def test_dual_logging_middlewares_w_inner(self): class FakeMiddleware(object): """ Fake middleware to make a separate internal request, but construct the response with different data. """ def __init__(self, app, conf): self.app = app self.conf = conf def GET(self, req): # Make the internal request ireq = Request.blank('/', environ={'REQUEST_METHOD': 'GET'}) resp = self.app(ireq.environ, start_response) resp_body = b''.join(resp) if resp_body != b'FAKE APP': return Response(request=req, body=b"FAKE APP WAS NOT RETURNED", content_type="text/plain") # But our response is different return Response(request=req, body=b"FAKE MIDDLEWARE", content_type="text/plain") def __call__(self, env, start_response): req = Request(env) return self.GET(req)(env, start_response) # Since an internal request is being made, inner most proxy logging # middleware, log0, should have performed the logging. app = FakeApp() flg0 = debug_logger() env = {} log0 = proxy_logging.ProxyLoggingMiddleware(app, env, logger=flg0) fake = FakeMiddleware(log0, env) flg1 = debug_logger() log1 = proxy_logging.ProxyLoggingMiddleware(fake, env, logger=flg1) req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'}) resp = log1(req.environ, start_response) resp_body = b''.join(resp) # Inner most logger should have logged the app's response log_parts = self._log_parts(log0) self.assertEqual(log_parts[3], 'GET') self.assertEqual(log_parts[4], '/') self.assertEqual(log_parts[5], 'HTTP/1.0') self.assertEqual(log_parts[6], '200') self.assertEqual(log_parts[11], str(len('FAKE APP'))) # Outer most logger should have logged the other middleware's response log_parts = self._log_parts(log1) self.assertEqual(log_parts[3], 'GET') self.assertEqual(log_parts[4], '/') self.assertEqual(log_parts[5], 'HTTP/1.0') self.assertEqual(log_parts[6], '200') self.assertEqual(resp_body, b'FAKE MIDDLEWARE') self.assertEqual(log_parts[11], str(len(resp_body)))
def test_log_info_none(self): app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {}) app.access_logger = debug_logger() req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'}) list(app(req.environ, start_response)) log_parts = self._log_parts(app) self.assertEqual(log_parts[17], '-') app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {}) app.access_logger = debug_logger() req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'}) req.environ['swift.log_info'] = [] list(app(req.environ, start_response)) log_parts = self._log_parts(app) self.assertEqual(log_parts[17], '-')
def test_log_msg_template_s3api(self): # Access logs configuration should override the default one app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), { 'log_msg_template': ( '{protocol} {path} {method} ' '{account} {container} {object}')}) app.access_logger = debug_logger() req = Request.blank('/bucket/path/to/key', environ={ 'REQUEST_METHOD': 'GET', # This would actually get set in the app, but w/e 'swift.backend_path': '/v1/AUTH_test/bucket/path/to/key'}) with mock.patch("time.time", side_effect=[ 18.0, 18.5, 20.71828182846]): resp = app(req.environ, start_response) # exhaust generator resp_body = b''.join(resp) log_parts = self._log_parts(app) self.assertEqual(log_parts, [ 'HTTP/1.0', '/bucket/path/to/key', 'GET', 'AUTH_test', 'bucket', 'path/to/key', ]) self.assertEqual(resp_body, b'FAKE APP') self.assertTiming('object.policy.0.GET.200.timing', app, exp_timing=2.71828182846 * 1000) self.assertUpdateStats([('object.GET.200.xfer', 8), ('object.policy.0.GET.200.xfer', 8)], app)
def test_log_msg_template(self): # Access logs configuration should override the default one app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), { 'log_anonymization_salt': 'secret_salt', 'log_msg_template': ( 'template which can be edited in config: ' '{protocol} {path} {method} ' '{path.anonymized} {container.anonymized} ' '{request_time} {start_time.datetime} {end_time} {ttfb}')}) app.access_logger = debug_logger() req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'}) with mock.patch('time.time', mock.MagicMock( side_effect=[10000000.0, 10000000.5, 10000001.0])): resp = app(req.environ, start_response) # exhaust generator resp_body = b''.join(resp) log_parts = self._log_parts(app) self.assertEqual(log_parts[0], 'template') self.assertEqual(log_parts[7], 'HTTP/1.0') self.assertEqual(log_parts[8], '/') self.assertEqual(log_parts[9], 'GET') self.assertEqual(log_parts[10], '{SMD5}c65475e457fea0951fbb9ec9596b2177') self.assertEqual(log_parts[11], '-') self.assertEqual(log_parts[13], '26/Apr/1970/17/46/40') self.assertEqual(log_parts[14], '10000001.000000000') self.assertEqual(log_parts[15], '0.5') self.assertEqual(resp_body, b'FAKE APP')
def test_log_request_stat_method_filtering_custom(self): method_map = { 'foo': 'BAD_METHOD', '': 'BAD_METHOD', 'PUTT': 'BAD_METHOD', 'SPECIAL': 'SPECIAL', # will be configured 'GET': 'GET', 'PUT': 'PUT', 'COPY': 'BAD_METHOD', # prove no one's special } # this conf var supports optional leading access_ for conf_key in ['access_log_statsd_valid_http_methods', 'log_statsd_valid_http_methods']: for method, exp_method in method_map.items(): app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), { conf_key: 'SPECIAL, GET,PUT ', # crazy spaces ok }) app.access_logger = debug_logger() req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method}) now = 10000.0 app.log_request(req, 911, 4, 43, now, now + 1.01) self.assertTiming('container.%s.911.timing' % exp_method, app, exp_timing=1.01 * 1000) self.assertUpdateStats([('container.%s.911.xfer' % exp_method, 4 + 43)], app)
def test_incr_failed_connection_mid_request(self): memcache_client = memcached.MemcacheRing(['1.2.3.4:11211']) mock = MockMemcached() memcache_client._client_cache['1.2.3.4:11211'] = MockedMemcachePool( [(mock, mock)] * 2) self.assertEqual(memcache_client.incr('some_key', delta=5), 5) self.assertEqual(memcache_client.get('some_key'), b'5') self.assertEqual(memcache_client.incr('some_key', delta=5), 10) self.assertEqual(memcache_client.get('some_key'), b'10') # Now lets return an empty string, and make sure we aren't logging # the error. fake_stdout = six.StringIO() # force the logging through the DebugLogger instead of the nose # handler. This will use stdout, so we can assert that no stack trace # is logged. logger = debug_logger() with patch("sys.stdout", fake_stdout), \ patch('swift.common.memcached.logging', logger): mock.read_return_empty_str = True self.assertRaises(memcached.MemcacheConnectionError, memcache_client.incr, 'some_key', delta=1) log_lines = logger.get_lines_for_level('error') self.assertIn('Error talking to memcached', log_lines[0]) self.assertFalse(log_lines[1:]) self.assertNotIn('Traceback', fake_stdout.getvalue())
def setUp(self): utils.HASH_PATH_SUFFIX = 'endcap' utils.HASH_PATH_PREFIX = '' self.testdir = mkdtemp() ring_file = os.path.join(self.testdir, 'container.ring.gz') with closing(GzipFile(ring_file, 'wb')) as f: pickle.dump( RingData([[0, 1, 2, 0, 1, 2], [1, 2, 0, 1, 2, 0], [2, 3, 1, 2, 3, 1]], [{ 'id': 0, 'ip': '127.0.0.1', 'port': 1, 'device': 'sda1', 'zone': 0 }, { 'id': 1, 'ip': '127.0.0.1', 'port': 1, 'device': 'sda1', 'zone': 2 }, { 'id': 2, 'ip': '127.0.0.1', 'port': 1, 'device': 'sda1', 'zone': 4 }], 30), f) self.devices_dir = os.path.join(self.testdir, 'devices') os.mkdir(self.devices_dir) self.sda1 = os.path.join(self.devices_dir, 'sda1') os.mkdir(self.sda1) for policy in POLICIES: os.mkdir(os.path.join(self.sda1, get_tmp_dir(policy))) self.logger = debug_logger()
def test_db_migration(self, tempdir, mock_recon): db_path = os.path.join(tempdir, 'sda', 'containers', '0', '0', '0', 'test.db') with test_backend.TestContainerBrokerBeforeSPI.old_broker() as \ old_ContainerBroker: broker = old_ContainerBroker(db_path, account='a', container='c') broker.initialize(normalize_timestamp(0), -1) with broker.get() as conn: try: conn.execute('SELECT storage_policy_index ' 'FROM container_stat') except Exception as err: self.assertTrue('no such column: storage_policy_index' in str(err)) else: self.fail('TestContainerBrokerBeforeSPI broker class ' 'was already migrated') conf = {'devices': tempdir, 'mount_check': False} test_auditor = auditor.ContainerAuditor(conf, logger=debug_logger()) test_auditor.run_once() broker = auditor.ContainerBroker(db_path, account='a', container='c') info = broker.get_info() expected = { 'account': 'a', 'container': 'c', 'object_count': 0, 'bytes_used': 0, 'storage_policy_index': 0, } for k, v in expected.items(): self.assertEqual(info[k], v)
def test_reap_object(self): conf = { 'mount_check': 'false', } r = reaper.AccountReaper(conf, logger=unit.debug_logger()) ring = unit.FakeRing() mock_path = 'swift.account.reaper.direct_delete_object' for policy in POLICIES: r.reset_stats() with patch(mock_path) as fake_direct_delete: r.reap_object('a', 'c', 'partition', cont_nodes, 'o', policy.idx) for i, call_args in enumerate( fake_direct_delete.call_args_list): cnode = cont_nodes[i] host = '%(ip)s:%(port)s' % cnode device = cnode['device'] headers = { 'X-Container-Host': host, 'X-Container-Partition': 'partition', 'X-Container-Device': device, 'X-Backend-Storage-Policy-Index': policy.idx } ring = r.get_object_ring(policy.idx) expected = call(ring.devs[i], 1, 'a', 'c', 'o', headers=headers, conn_timeout=0.5, response_timeout=10) self.assertEqual(call_args, expected) self.assertEqual(r.stats_objects_deleted, 3)
def test_reap_object(self): conf = { 'mount_check': 'false', } r = reaper.AccountReaper(conf, logger=unit.debug_logger()) mock_path = 'swift.account.reaper.direct_delete_object' for policy in POLICIES: r.reset_stats() with patch(mock_path) as fake_direct_delete: with patch('swift.account.reaper.time') as mock_time: mock_time.return_value = 1429117638.86767 r.reap_object('a', 'c', 'partition', cont_nodes, 'o', policy.idx) mock_time.assert_called_once_with() for i, call_args in enumerate( fake_direct_delete.call_args_list): cnode = cont_nodes[i % len(cont_nodes)] host = '%(ip)s:%(port)s' % cnode device = cnode['device'] headers = { 'X-Container-Host': host, 'X-Container-Partition': 'partition', 'X-Container-Device': device, 'X-Backend-Storage-Policy-Index': policy.idx, 'X-Timestamp': '1429117638.86767' } ring = r.get_object_ring(policy.idx) expected = call(dict(ring.devs[i], index=i), 0, 'a', 'c', 'o', headers=headers, conn_timeout=0.5, response_timeout=10) self.assertEqual(call_args, expected) self.assertEqual(policy.object_ring.replicas - 1, i) self.assertEqual(r.stats_objects_deleted, policy.object_ring.replicas)
def test_retry_client_exception(self): logger = debug_logger('direct-client-test') with mock.patch('swift.common.direct_client.sleep') as mock_sleep, \ mocked_http_conn(500) as conn: with self.assertRaises(direct_client.ClientException) as err_ctx: direct_client.retry(direct_client.direct_delete_object, self.node, self.part, self.account, self.container, self.obj, retries=2, error_log=logger.error) self.assertEqual('DELETE', conn.method) self.assertEqual(err_ctx.exception.http_status, 500) self.assertIn('DELETE', err_ctx.exception.message) self.assertIn(quote('/%s/%s/%s/%s/%s' % (self.node['device'], self.part, self.account, self.container, self.obj)), err_ctx.exception.message) self.assertIn(self.node['ip'], err_ctx.exception.message) self.assertIn(self.node['port'], err_ctx.exception.message) self.assertEqual(self.node['ip'], err_ctx.exception.http_host) self.assertEqual(self.node['port'], err_ctx.exception.http_port) self.assertEqual(self.node['device'], err_ctx.exception.http_device) self.assertEqual(500, err_ctx.exception.http_status) self.assertEqual([mock.call(1), mock.call(2)], mock_sleep.call_args_list) error_lines = logger.get_lines_for_level('error') self.assertEqual(3, len(error_lines)) for line in error_lines: self.assertIn('500 Internal Error', line)
def setUp(self): TestRingBase.setUp(self) self.logger = debug_logger() self.container_ring = FakeRing(replicas=self.CONTAINER_REPLICAS, max_more_nodes=9) self.app = proxy_server.Application( None, FakeMemcache(), logger=self.logger, account_ring=FakeRing(), container_ring=self.container_ring ) self.account_info = { "status": 200, "container_count": "10", "total_object_count": "100", "bytes": "1000", "meta": {}, "sysmeta": {}, } class FakeAccountInfoContainerController(proxy_server.ContainerController): def account_info(controller, *args, **kwargs): patch_path = "swift.proxy.controllers.base.get_account_info" with mock.patch(patch_path) as mock_get_info: mock_get_info.return_value = dict(self.account_info) return super(FakeAccountInfoContainerController, controller).account_info(*args, **kwargs) _orig_get_controller = self.app.get_controller def wrapped_get_controller(*args, **kwargs): with mock.patch("swift.proxy.server.ContainerController", new=FakeAccountInfoContainerController): return _orig_get_controller(*args, **kwargs) self.app.get_controller = wrapped_get_controller
def setUp(self): utils.HASH_PATH_SUFFIX = "endcap" utils.HASH_PATH_PREFIX = "" self.testdir = mkdtemp() ring_file = os.path.join(self.testdir, "container.ring.gz") with closing(GzipFile(ring_file, "wb")) as f: pickle.dump( RingData( [[0, 1, 2, 0, 1, 2], [1, 2, 0, 1, 2, 0], [2, 3, 1, 2, 3, 1]], [ {"id": 0, "ip": "127.0.0.1", "port": 1, "device": "sda1", "zone": 0}, {"id": 1, "ip": "127.0.0.1", "port": 1, "device": "sda1", "zone": 2}, {"id": 2, "ip": "127.0.0.1", "port": 1, "device": "sda1", "zone": 4}, ], 30, ), f, ) self.devices_dir = os.path.join(self.testdir, "devices") os.mkdir(self.devices_dir) self.sda1 = os.path.join(self.devices_dir, "sda1") os.mkdir(self.sda1) for policy in POLICIES: os.mkdir(os.path.join(self.sda1, get_tmp_dir(int(policy)))) self.logger = debug_logger()
def test_reap_object(self): conf = { 'mount_check': 'false', } r = reaper.AccountReaper(conf, logger=unit.debug_logger()) ring = unit.FakeRing() mock_path = 'swift.account.reaper.direct_delete_object' for policy in POLICIES: r.reset_stats() with patch(mock_path) as fake_direct_delete: r.reap_object('a', 'c', 'partition', cont_nodes, 'o', policy.idx) for i, call_args in enumerate( fake_direct_delete.call_args_list): cnode = cont_nodes[i] host = '%(ip)s:%(port)s' % cnode device = cnode['device'] headers = { 'X-Container-Host': host, 'X-Container-Partition': 'partition', 'X-Container-Device': device, 'X-Backend-Storage-Policy-Index': policy.idx } ring = r.get_object_ring(policy.idx) expected = call(ring.devs[i], 0, 'a', 'c', 'o', headers=headers, conn_timeout=0.5, response_timeout=10) self.assertEqual(call_args, expected) self.assertEqual(r.stats_objects_deleted, 3)
def test_db_migration(self, tempdir): db_path = os.path.join(tempdir, 'sda', 'containers', '0', '0', '0', 'test.db') with test_backend.TestContainerBrokerBeforeSPI.old_broker() as \ old_ContainerBroker: broker = old_ContainerBroker(db_path, account='a', container='c') broker.initialize(normalize_timestamp(0), -1) with broker.get() as conn: try: conn.execute('SELECT storage_policy_index ' 'FROM container_stat') except Exception as err: self.assert_('no such column: storage_policy_index' in str(err)) else: self.fail('TestContainerBrokerBeforeSPI broker class ' 'was already migrated') conf = {'devices': tempdir, 'mount_check': False} test_auditor = auditor.ContainerAuditor(conf, logger=debug_logger()) test_auditor.run_once() broker = auditor.ContainerBroker(db_path, account='a', container='c') info = broker.get_info() expected = { 'account': 'a', 'container': 'c', 'object_count': 0, 'bytes_used': 0, 'storage_policy_index': 0, } for k, v in expected.items(): self.assertEqual(info[k], v)
def setUp(self): utils.HASH_PATH_SUFFIX = 'endcap' utils.HASH_PATH_PREFIX = 'startcap' self.testdir = os.path.join(mkdtemp(), 'tmp_test_container_updater') rmtree(self.testdir, ignore_errors=1) os.mkdir(self.testdir) ring_file = os.path.join(self.testdir, 'account.ring.gz') with closing(GzipFile(ring_file, 'wb')) as f: pickle.dump( RingData([[0, 1, 0, 1], [1, 0, 1, 0]], [{ 'id': 0, 'ip': '127.0.0.1', 'port': 12345, 'device': 'sda1', 'zone': 0 }, { 'id': 1, 'ip': '127.0.0.1', 'port': 12345, 'device': 'sda1', 'zone': 2 }], 30), f) self.devices_dir = os.path.join(self.testdir, 'devices') os.mkdir(self.devices_dir) self.sda1 = os.path.join(self.devices_dir, 'sda1') os.mkdir(self.sda1) self.logger = debug_logger('test')
def setUp(self): # setup fake rings with handoffs self.obj_ring = FakeRing(max_more_nodes=3) for policy in POLICIES: policy.object_ring = self.obj_ring logger = debug_logger('proxy-server') logger.thread_locals = ('txn1', '127.0.0.2') self.app = PatchedObjControllerApp(None, FakeMemcache(), account_ring=FakeRing(), container_ring=FakeRing(), logger=logger) class FakeContainerInfoObjController(proxy_server.ObjectController): def container_info(controller, *args, **kwargs): patch_path = 'swift.proxy.controllers.base.get_info' with mock.patch(patch_path) as mock_get_info: mock_get_info.return_value = dict(self.container_info) return super(FakeContainerInfoObjController, controller).container_info(*args, **kwargs) # this is taking advantage of the fact that self.app is a # PachedObjControllerApp, so handle_response will route into an # instance of our FakeContainerInfoObjController just by # overriding the class attribute for object_controller self.app.object_controller = FakeContainerInfoObjController
def setUp(self): # SOF self._orig_hash_suffix = utils.HASH_PATH_SUFFIX self._orig_hash_prefix = utils.HASH_PATH_PREFIX utils.HASH_PATH_SUFFIX = "endcap" utils.HASH_PATH_PREFIX = "" self.logger = debug_logger() self.container_ring = FakeRing(max_more_nodes=9) self.app = proxy_server.Application( None, FakeMemcache(), logger=self.logger, account_ring=FakeRing(), container_ring=self.container_ring ) self.account_info = { "status": 200, "container_count": "10", "total_object_count": "100", "bytes": "1000", "meta": {}, "sysmeta": {}, } class FakeAccountInfoContainerController(proxy_server.ContainerController): def account_info(controller, *args, **kwargs): patch_path = "swift.proxy.controllers.base.get_info" with mock.patch(patch_path) as mock_get_info: mock_get_info.return_value = dict(self.account_info) return super(FakeAccountInfoContainerController, controller).account_info(*args, **kwargs) _orig_get_controller = self.app.get_controller def wrapped_get_controller(*args, **kwargs): with mock.patch("swift.proxy.server.ContainerController", new=FakeAccountInfoContainerController): return _orig_get_controller(*args, **kwargs) self.app.get_controller = wrapped_get_controller
def setUp(self): utils.HASH_PATH_SUFFIX = 'endcap' utils.HASH_PATH_PREFIX = '' self.testdir = mkdtemp() ring_file = os.path.join(self.testdir, 'container.ring.gz') with closing(GzipFile(ring_file, 'wb')) as f: pickle.dump( RingData([[0, 1, 2, 0, 1, 2], [1, 2, 0, 1, 2, 0], [2, 3, 1, 2, 3, 1]], [{'id': 0, 'ip': '127.0.0.1', 'port': 1, 'device': 'sda1', 'zone': 0}, {'id': 1, 'ip': '127.0.0.1', 'port': 1, 'device': 'sda1', 'zone': 2}, {'id': 2, 'ip': '127.0.0.1', 'port': 1, 'device': 'sda1', 'zone': 4}, {'id': 3, 'ip': '127.0.0.1', 'port': 1, 'device': 'sda1', 'zone': 6}], 30), f) self.devices_dir = os.path.join(self.testdir, 'devices') os.mkdir(self.devices_dir) self.sda1 = os.path.join(self.devices_dir, 'sda1') os.mkdir(self.sda1) for policy in POLICIES: os.mkdir(os.path.join(self.sda1, get_tmp_dir(policy))) self.logger = debug_logger() self.ts_iter = make_timestamp_iter()
def test_incr_failed_connection_mid_request(self): memcache_client = memcached.MemcacheRing(['1.2.3.4:11211']) mock = MockMemcached() memcache_client._client_cache['1.2.3.4:11211'] = MockedMemcachePool( [(mock, mock)] * 2) self.assertEqual(memcache_client.incr('some_key', delta=5), 5) self.assertEqual(memcache_client.get('some_key'), '5') self.assertEqual(memcache_client.incr('some_key', delta=5), 10) self.assertEqual(memcache_client.get('some_key'), '10') # Now lets return an empty string, and make sure we aren't logging # the error. fake_stdout = six.StringIO() # force the logging through the DebugLogger instead of the nose # handler. This will use stdout, so we can assert that no stack trace # is logged. logger = debug_logger() with patch("sys.stdout", fake_stdout), \ patch('swift.common.memcached.logging', logger): mock.read_return_empty_str = True self.assertRaises(memcached.MemcacheConnectionError, memcache_client.incr, 'some_key', delta=1) log_lines = logger.get_lines_for_level('error') self.assertIn('Error talking to memcached', log_lines[0]) self.assertFalse(log_lines[1:]) self.assertNotIn('Traceback', fake_stdout.getvalue())
def test_ensure_fields(self): app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {}) app.access_logger = debug_logger() req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'}) with mock.patch('time.time', mock.MagicMock( side_effect=[10000000.0, 10000000.5, 10000001.0])): resp = app(req.environ, start_response) resp_body = b''.join(resp) log_parts = self._log_parts(app) self.assertEqual(len(log_parts), 21) self.assertEqual(log_parts[0], '-') self.assertEqual(log_parts[1], '-') self.assertEqual(log_parts[2], '26/Apr/1970/17/46/41') self.assertEqual(log_parts[3], 'GET') self.assertEqual(log_parts[4], '/') self.assertEqual(log_parts[5], 'HTTP/1.0') self.assertEqual(log_parts[6], '200') self.assertEqual(log_parts[7], '-') self.assertEqual(log_parts[8], '-') self.assertEqual(log_parts[9], '-') self.assertEqual(log_parts[10], '-') self.assertEqual(resp_body, b'FAKE APP') self.assertEqual(log_parts[11], str(len(resp_body))) self.assertEqual(log_parts[12], '-') self.assertEqual(log_parts[13], '-') self.assertEqual(log_parts[14], '-') self.assertEqual(log_parts[15], '1.0000') self.assertEqual(log_parts[16], '-') self.assertEqual(log_parts[17], '-') self.assertEqual(log_parts[18], '10000000.000000000') self.assertEqual(log_parts[19], '10000001.000000000') self.assertEqual(log_parts[20], '-')
def test_run_once(self): def prepare_data_dir(): devices_path = tempfile.mkdtemp() # will be deleted by teardown self.to_delete.append(devices_path) path = os.path.join(devices_path, 'sda1', DATADIR) os.makedirs(path) return devices_path def init_reaper(devices): r = reaper.AccountReaper({'devices': devices}) return r devices = prepare_data_dir() r = init_reaper(devices) with patch('swift.account.reaper.AccountReaper.reap_device') as foo, \ unit.mock_check_drive(ismount=True): r.run_once() self.assertEqual(foo.called, 1) with patch('swift.account.reaper.AccountReaper.reap_device') as foo, \ unit.mock_check_drive(ismount=False): r.run_once() self.assertFalse(foo.called) with patch('swift.account.reaper.AccountReaper.reap_device') as foo: r.logger = unit.debug_logger('test-reaper') r.devices = 'thisdeviceisbad' r.run_once() self.assertTrue(r.logger.get_lines_for_level( 'error')[-1].startswith('Exception in top-level account reaper'))
def setUp(self): # setup fake rings with handoffs self.obj_ring = FakeRing(max_more_nodes=3) for policy in POLICIES: policy.object_ring = self.obj_ring logger = debug_logger('proxy-server') logger.thread_locals = ('txn1', '127.0.0.2') self.app = PatchedObjControllerApp( None, FakeMemcache(), account_ring=FakeRing(), container_ring=FakeRing(), logger=logger) class FakeContainerInfoObjController(proxy_server.ObjectController): def container_info(controller, *args, **kwargs): patch_path = 'swift.proxy.controllers.base.get_info' with mock.patch(patch_path) as mock_get_info: mock_get_info.return_value = dict(self.container_info) return super(FakeContainerInfoObjController, controller).container_info(*args, **kwargs) # this is taking advantage of the fact that self.app is a # PachedObjControllerApp, so handle_response will route into an # instance of our FakeContainerInfoObjController just by # overriding the class attribute for object_controller self.app.object_controller = FakeContainerInfoObjController
def test_run_once(self): def prepare_data_dir(): devices_path = tempfile.mkdtemp() # will be deleted by teardown self.to_delete.append(devices_path) path = os.path.join(devices_path, 'sda1', DATADIR) os.makedirs(path) return devices_path def init_reaper(devices): r = reaper.AccountReaper({'devices': devices}) return r devices = prepare_data_dir() r = init_reaper(devices) with patch('swift.account.reaper.AccountReaper.reap_device') as foo, \ unit.mock_check_drive(ismount=True): r.run_once() self.assertEqual(foo.called, 1) with patch('swift.account.reaper.AccountReaper.reap_device') as foo, \ unit.mock_check_drive(ismount=False): r.run_once() self.assertFalse(foo.called) with patch('swift.account.reaper.AccountReaper.reap_device') as foo: r.logger = unit.debug_logger('test-reaper') r.devices = 'thisdeviceisbad' r.run_once() self.assertTrue( r.logger.get_lines_for_level('error')[-1].startswith( 'Exception in top-level account reaper'))
def setUp(self): self.logger = debug_logger('test-container-sync-store') self.logger.level = logging.DEBUG self.test_dir_prefix = tempfile.mkdtemp() self.devices_dir = os.path.join(self.test_dir_prefix, 'srv/node/') os.makedirs(self.devices_dir) # Create dummy container dbs self.devices = ['sdax', 'sdb', 'sdc'] self.partitions = ['21765', '38965', '13234'] self.suffixes = ['312', '435'] self.hashes = ['f19ed', '53ef', '0ab5', '9c3a'] for device in self.devices: data_dir_path = os.path.join(self.devices_dir, device, DATADIR) os.makedirs(data_dir_path) for part in self.partitions: for suffix in self.suffixes: for hsh in self.hashes: db_dir = os.path.join(data_dir_path, part, suffix, hsh) os.makedirs(db_dir) db_file = os.path.join(db_dir, '%s.db' % hsh) with open(db_file, 'w') as outfile: outfile.write('%s' % db_file)
def setUp(self): TestRingBase.setUp(self) self.logger = debug_logger() self.container_ring = FakeRing(max_more_nodes=9) self.app = proxy_server.Application(None, FakeMemcache(), logger=self.logger, account_ring=FakeRing(), container_ring=self.container_ring) self.account_info = { 'status': 200, 'container_count': '10', 'total_object_count': '100', 'bytes': '1000', 'meta': {}, 'sysmeta': {}, } class FakeAccountInfoContainerController( proxy_server.ContainerController): def account_info(controller, *args, **kwargs): patch_path = 'swift.proxy.controllers.base.get_info' with mock.patch(patch_path) as mock_get_info: mock_get_info.return_value = dict(self.account_info) return super(FakeAccountInfoContainerController, controller).account_info( *args, **kwargs) _orig_get_controller = self.app.get_controller def wrapped_get_controller(*args, **kwargs): with mock.patch('swift.proxy.server.ContainerController', new=FakeAccountInfoContainerController): return _orig_get_controller(*args, **kwargs) self.app.get_controller = wrapped_get_controller
def setUp(self): global not_sleep self.old_loadapp = internal_client.loadapp self.old_sleep = internal_client.sleep internal_client.loadapp = lambda *a, **kw: None internal_client.sleep = not_sleep self.rcache = mkdtemp() self.conf = {'recon_cache_path': self.rcache} self.logger = debug_logger('test-expirer') self.ts = make_timestamp_iter() self.past_time = str(int(time() - 86400)) self.future_time = str(int(time() + 86400)) # Dummy task queue for test self.fake_swift = FakeInternalClient({ '.expiring_objects': { # this task container will be checked self.past_time: [ # tasks ready for execution self.past_time + '-a0/c0/o0', self.past_time + '-a1/c1/o1', self.past_time + '-a2/c2/o2', self.past_time + '-a3/c3/o3', self.past_time + '-a4/c4/o4', self.past_time + '-a5/c5/o5', self.past_time + '-a6/c6/o6', self.past_time + '-a7/c7/o7', # task objects for unicode test self.past_time + u'-a8/c8/o8\u2661', self.past_time + u'-a9/c9/o9\xf8', # this task will be skipped self.future_time + '-a10/c10/o10' ], # this task container will be skipped self.future_time: [self.future_time + '-a11/c11/o11'] } }) self.expirer = expirer.ObjectExpirer(self.conf, logger=self.logger, swift=self.fake_swift) # target object paths which should be expirerd now self.expired_target_path_list = [ swob.wsgi_to_str(tgt) for tgt in ( 'a0/c0/o0', 'a1/c1/o1', 'a2/c2/o2', 'a3/c3/o3', 'a4/c4/o4', 'a5/c5/o5', 'a6/c6/o6', 'a7/c7/o7', 'a8/c8/o8\xe2\x99\xa1', 'a9/c9/o9\xc3\xb8', ) ]
def setUp(self): self.app = proxy_server.Application(None, FakeMemcache(), account_ring=FakeRing(), container_ring=FakeRing(), logger=debug_logger()) self.app.request_node_count = lambda ring: 10000000 self.app.sort_nodes = lambda l: l # stop shuffling the primary nodes
def setUp(self): self.testdir = os.path.join(mkdtemp(), "tmp_test_container_auditor") self.logger = debug_logger() rmtree(self.testdir, ignore_errors=1) os.mkdir(self.testdir) fnames = ["true1.db", "true2.db", "true3.db", "fail1.db", "fail2.db"] for fn in fnames: with open(os.path.join(self.testdir, fn), "w+") as f: f.write(" ")
def setUp(self): self.testdir = os.path.join(mkdtemp(), 'tmp_test_account_auditor') self.logger = debug_logger() rmtree(self.testdir, ignore_errors=1) os.mkdir(self.testdir) fnames = ['true1.db', 'true2.db', 'true3.db', 'fail1.db', 'fail2.db'] for fn in fnames: with open(os.path.join(self.testdir, fn), 'w+') as f: f.write(' ')
def setUp(self): # setup default config dict self.conf = { 'allow_no_owner': False, 'location': 'us-east-1', 'dns_compliant_bucket_names': True, 'max_bucket_listing': 1000, 'max_parts_listing': 1000, 'max_multi_delete_objects': 1000, 's3_acl': False, 'storage_domain': 'localhost', 'auth_pipeline_check': True, 'max_upload_part_num': 1000, 'check_bucket_owner': False, 'force_swift_request_proxy_log': False, 'allow_multipart_uploads': True, 'min_segment_size': 5242880, 'log_level': 'debug' } self.app = FakeApp() self.swift = self.app.swift # note: self.conf has no __file__ key so check_pipeline will be skipped # when constructing self.s3api self.s3api = filter_factory({}, **self.conf)(self.app) self.logger = self.s3api.logger = self.swift.logger = debug_logger() self.swift.register('HEAD', '/v1/AUTH_test', swob.HTTPOk, {}, None) self.swift.register('HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent, {}, None) self.swift.register('PUT', '/v1/AUTH_test/bucket', swob.HTTPCreated, {}, None) self.swift.register('POST', '/v1/AUTH_test/bucket', swob.HTTPNoContent, {}, None) self.swift.register('DELETE', '/v1/AUTH_test/bucket', swob.HTTPNoContent, {}, None) self.swift.register('GET', '/v1/AUTH_test/bucket/object', swob.HTTPOk, {'etag': 'object etag'}, "") self.swift.register('PUT', '/v1/AUTH_test/bucket/object', swob.HTTPCreated, {'etag': 'object etag'}, None) self.swift.register('DELETE', '/v1/AUTH_test/bucket/object', swob.HTTPNoContent, {}, None) self.mock_get_swift_info_result = {'object_versioning': {}} for s3api_path in ( 'controllers.obj', 'controllers.bucket', 'controllers.multi_delete', 'controllers.versioning', ): patcher = mock.patch( 'swift.common.middleware.s3api.%s.get_swift_info' % s3api_path, return_value=self.mock_get_swift_info_result) patcher.start() self.addCleanup(patcher.stop)
def test_log_request_stat_type_bad(self): for bad_path in ['', '/', '/bad', '/baddy/mc_badderson', '/v1', '/v1/']: app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {}) app.access_logger = debug_logger() req = Request.blank(bad_path, environ={'REQUEST_METHOD': 'GET'}) now = 10000.0 app.log_request(req, 123, 7, 13, now, now + 2.71828182846) self.assertEqual([], app.access_logger.log_dict['timing']) self.assertEqual([], app.access_logger.log_dict['update_stats'])
def test_log_query_string(self): app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {}) app.access_logger = debug_logger() req = Request.blank('/', environ={'REQUEST_METHOD': 'GET', 'QUERY_STRING': 'x=3'}) resp = app(req.environ, start_response) # exhaust generator [x for x in resp] log_parts = self._log_parts(app) self.assertEqual(unquote(log_parts[4]), '/?x=3')
def test_log_request_statsd_invalid_stats_types(self): app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {}) app.access_logger = debug_logger() for url in ['/', '/foo', '/foo/bar', '/v1']: req = Request.blank(url, environ={'REQUEST_METHOD': 'GET'}) resp = app(req.environ, start_response) # get body b''.join(resp) self.assertEqual([], app.access_logger.log_dict['timing']) self.assertEqual([], app.access_logger.log_dict['update_stats'])
def test_basic_req_second_time(self): app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {}) app.access_logger = debug_logger() req = Request.blank('/', environ={ 'swift.proxy_access_log_made': True, 'REQUEST_METHOD': 'GET'}) resp = app(req.environ, start_response) resp_body = b''.join(resp) self._log_parts(app, should_be_empty=True) self.assertEqual(resp_body, b'FAKE APP')
def setUp(self): self.app = proxy.Application(None, FakeMemcache(), logger=debug_logger('proxy-ut'), account_ring=FakeRing(replicas=1), container_ring=FakeRing(replicas=1)) monkey_patch_mimetools() self.testdir = \ os.path.join(mkdtemp(), 'tmp_test_object_server_ObjectController') mkdirs(os.path.join(self.testdir, 'sda1', 'tmp')) conf = {'devices': self.testdir, 'mount_check': 'false'} self.obj_ctlr = object_server.ObjectController( conf, logger=debug_logger('obj-ut')) http_connect = get_http_connect(fake_http_connect(200), fake_http_connect(200), FakeServerConnection(self.obj_ctlr)) swift.proxy.controllers.base.http_connect = http_connect swift.proxy.controllers.obj.http_connect = http_connect
def __init__(self, testdir, policy=None): self.logger = debug_logger("test-ssync-sender") self.conn_timeout = 1 self.node_timeout = 2 self.http_timeout = 3 self.network_chunk_size = 65536 self.disk_chunk_size = 4096 conf = {"devices": testdir, "mount_check": "false"} policy = POLICIES.default if policy is None else policy self._diskfile_router = diskfile.DiskFileRouter(conf, self.logger) self._diskfile_mgr = self._diskfile_router[policy]
def init_reaper(self, conf=None, myips=None, fakelogger=False): if conf is None: conf = {} if myips is None: myips = ['10.10.10.1'] r = reaper.AccountReaper(conf) r.myips = myips if fakelogger: r.logger = unit.debug_logger('test-reaper') return r
def setUp(self): global not_sleep self.old_loadapp = internal_client.loadapp self.old_sleep = internal_client.sleep internal_client.loadapp = lambda *a, **kw: None internal_client.sleep = not_sleep self.rcache = mkdtemp() self.conf = {'recon_cache_path': self.rcache} self.logger = debug_logger('test-recon')
def setUp(self): self.tmpdir = mkdtemp() self.testdir = os.path.join(self.tmpdir, 'tmp_test_sof_TestObjectController') conf = {'devices': self.testdir, 'mount_check': 'false'} self.object_controller = object_server.ObjectController( conf, logger=debug_logger()) self.object_controller.bytes_per_sync = 1 self._orig_tpool_exc = tpool.execute tpool.execute = lambda f, *args, **kwargs: f(*args, **kwargs) self.df_mgr = diskfile.DiskFileManager(conf, self.object_controller.logger)
def test_app_exception(self): app = proxy_logging.ProxyLoggingMiddleware( FakeAppThatExcepts(), {}) app.access_logger = debug_logger() req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'}) try: app(req.environ, start_response) except Exception: pass log_parts = self._log_parts(app) self.assertEqual(log_parts[6], '500') self.assertEqual(log_parts[10], '-') # read length
def setUp(self): skip_if_no_xattrs() self.app = proxy.Application(None, FakeMemcache(), logger=debug_logger('proxy-ut'), account_ring=FakeRing(replicas=1), container_ring=FakeRing(replicas=1)) self.copy_app = ServerSideCopyMiddleware(self.app, {}) self.tmpdir = mkdtemp() self.testdir = os.path.join(self.tmpdir, 'tmp_test_object_server_ObjectController') mkdirs(os.path.join(self.testdir, 'sda', 'tmp')) conf = {'devices': self.testdir, 'mount_check': 'false'} self.obj_ctlr = object_server.ObjectController( conf, logger=debug_logger('obj-ut')) http_connect = get_http_connect(fake_http_connect(200), fake_http_connect(200), FakeServerConnection(self.obj_ctlr)) self.orig_base_http_connect = swift.proxy.controllers.base.http_connect self.orig_obj_http_connect = swift.proxy.controllers.obj.http_connect swift.proxy.controllers.base.http_connect = http_connect swift.proxy.controllers.obj.http_connect = http_connect
def setUp(self): global not_sleep self.old_loadapp = internal_client.loadapp self.old_sleep = internal_client.sleep internal_client.loadapp = lambda *a, **kw: None internal_client.sleep = not_sleep self.rcache = mkdtemp() self.conf = {'recon_cache_path': self.rcache} self.logger = debug_logger('test-expirer') self.ts = make_timestamp_iter() self.past_time = str(int(time() - 86400)) self.future_time = str(int(time() + 86400)) # Dummy task queue for test self.fake_swift = FakeInternalClient({ '.expiring_objects': { # this task container will be checked self.past_time: [ # tasks ready for execution self.past_time + '-a0/c0/o0', self.past_time + '-a1/c1/o1', self.past_time + '-a2/c2/o2', self.past_time + '-a3/c3/o3', self.past_time + '-a4/c4/o4', self.past_time + '-a5/c5/o5', self.past_time + '-a6/c6/o6', self.past_time + '-a7/c7/o7', # task objects for unicode test self.past_time + u'-a8/c8/o8\u2661', self.past_time + u'-a9/c9/o9\xf8', # this task will be skipped self.future_time + '-a10/c10/o10'], # this task container will be skipped self.future_time: [ self.future_time + '-a11/c11/o11']} }) self.expirer = expirer.ObjectExpirer(self.conf, logger=self.logger, swift=self.fake_swift) # target object paths which should be expirerd now self.expired_target_path_list = [ swob.wsgi_to_str(tgt) for tgt in ( 'a0/c0/o0', 'a1/c1/o1', 'a2/c2/o2', 'a3/c3/o3', 'a4/c4/o4', 'a5/c5/o5', 'a6/c6/o6', 'a7/c7/o7', 'a8/c8/o8\xe2\x99\xa1', 'a9/c9/o9\xc3\xb8', ) ]
def setUp(self): self.app = proxy.Application( None, FakeMemcache(), logger=debug_logger("proxy-ut"), account_ring=FakeRing(replicas=1), container_ring=FakeRing(replicas=1), ) monkey_patch_mimetools() self.tmpdir = mkdtemp() self.testdir = os.path.join(self.tmpdir, "tmp_test_object_server_ObjectController") mkdirs(os.path.join(self.testdir, "sda", "tmp")) conf = {"devices": self.testdir, "mount_check": "false"} self.obj_ctlr = object_server.ObjectController(conf, logger=debug_logger("obj-ut")) http_connect = get_http_connect( fake_http_connect(200), fake_http_connect(200), FakeServerConnection(self.obj_ctlr) ) self.orig_base_http_connect = swift.proxy.controllers.base.http_connect self.orig_obj_http_connect = swift.proxy.controllers.obj.http_connect swift.proxy.controllers.base.http_connect = http_connect swift.proxy.controllers.obj.http_connect = http_connect
def setUp(self): self.logger = debug_logger() self.test_dir = tempfile.mkdtemp() self.ports = self.PORTS self._sim_map = {} try: self._sim_map = start_simulators(self.test_dir, *self.ports) self.client_map = {} for port in self.ports: self.client_map[port] = KineticSwiftClient( self.logger, 'localhost', port) except Exception: e, v, t = sys.exc_info() self.tearDown() raise e, v, t
def init_reaper(self, conf=None, myips=None, fakelogger=False): if conf is None: conf = {} if myips is None: myips = ['10.10.10.1'] r = reaper.AccountReaper(conf) r.stats_return_codes = {} r.stats_containers_deleted = 0 r.stats_containers_remaining = 0 r.stats_containers_possibly_remaining = 0 r.stats_objects_deleted = 0 r.stats_objects_remaining = 0 r.stats_objects_possibly_remaining = 0 r.myips = myips if fakelogger: r.logger = unit.debug_logger('test-reaper') return r
def test_retry_http_exception(self): logger = debug_logger('direct-client-test') with mock.patch('swift.common.direct_client.sleep') as mock_sleep, \ mocked_http_conn(HTTPException('Kaboom!')) as conn: with self.assertRaises(HTTPException) as err_ctx: direct_client.retry(direct_client.direct_delete_object, self.node, self.part, self.account, self.container, self.obj, retries=2, error_log=logger.error) self.assertEqual('DELETE', conn.method) self.assertEqual('Kaboom!', str(err_ctx.exception)) self.assertEqual([mock.call(1), mock.call(2)], mock_sleep.call_args_list) error_lines = logger.get_lines_for_level('error') self.assertEqual(3, len(error_lines)) for line in error_lines: self.assertIn('Kaboom!', line)
def setUp(self): # setup default config self.conf = Config({ 'allow_no_owner': False, 'location': 'us-east-1', 'dns_compliant_bucket_names': True, 'max_bucket_listing': 1000, 'max_parts_listing': 1000, 'max_multi_delete_objects': 1000, 's3_acl': False, 'storage_domain': 'localhost', 'auth_pipeline_check': True, 'max_upload_part_num': 1000, 'check_bucket_owner': False, 'force_swift_request_proxy_log': False, 'allow_multipart_uploads': True, 'min_segment_size': 5242880, }) # those 2 settings has existed the original test setup self.conf.log_level = 'debug' self.app = FakeApp() self.swift = self.app.swift self.s3api = filter_factory({}, **self.conf)(self.app) self.s3api.logger = debug_logger() self.swift.register('HEAD', '/v1/AUTH_test', swob.HTTPOk, {}, None) self.swift.register('HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent, {}, None) self.swift.register('PUT', '/v1/AUTH_test/bucket', swob.HTTPCreated, {}, None) self.swift.register('POST', '/v1/AUTH_test/bucket', swob.HTTPNoContent, {}, None) self.swift.register('DELETE', '/v1/AUTH_test/bucket', swob.HTTPNoContent, {}, None) self.swift.register('GET', '/v1/AUTH_test/bucket/object', swob.HTTPOk, {'etag': 'object etag'}, "") self.swift.register('PUT', '/v1/AUTH_test/bucket/object', swob.HTTPCreated, {'etag': 'object etag'}, None) self.swift.register('DELETE', '/v1/AUTH_test/bucket/object', swob.HTTPNoContent, {}, None)
def setUp(self): logger = debug_logger('proxy-server') logger.thread_locals = ('txn1', '127.0.0.2') self.app = proxy_server.Application( None, FakeMemcache(), account_ring=FakeRing(), container_ring=FakeRing(), object_ring=FakeRing(), logger=logger) self.controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o') self.controller.container_info = mock.MagicMock(return_value={ 'partition': 1, 'nodes': [ {'ip': '127.0.0.1', 'port': '1', 'device': 'sda'}, {'ip': '127.0.0.1', 'port': '2', 'device': 'sda'}, {'ip': '127.0.0.1', 'port': '3', 'device': 'sda'}, ], 'write_acl': None, 'read_acl': None, 'sync_key': None, 'versions': None})
def setUp(self): utils.HASH_PATH_SUFFIX = 'endcap' utils.HASH_PATH_PREFIX = 'startcap' self.testdir = os.path.join(mkdtemp(), 'tmp_test_container_updater') rmtree(self.testdir, ignore_errors=1) os.mkdir(self.testdir) ring_file = os.path.join(self.testdir, 'account.ring.gz') with closing(GzipFile(ring_file, 'wb')) as f: pickle.dump( RingData([[0, 1, 0, 1], [1, 0, 1, 0]], [{'id': 0, 'ip': '127.0.0.1', 'port': 12345, 'device': 'sda1', 'zone': 0}, {'id': 1, 'ip': '127.0.0.1', 'port': 12345, 'device': 'sda1', 'zone': 2}], 30), f) self.devices_dir = os.path.join(self.testdir, 'devices') os.mkdir(self.devices_dir) self.sda1 = os.path.join(self.devices_dir, 'sda1') os.mkdir(self.sda1) self.logger = debug_logger('test')
def setUp(self): # setup fake rings with handoffs self.obj_ring = FakeRing(max_more_nodes=3) for policy in POLICIES: policy.object_ring = self.obj_ring logger = debug_logger('proxy-server') logger.thread_locals = ('txn1', '127.0.0.2') self.app = PatchedObjControllerApp( None, FakeMemcache(), account_ring=FakeRing(), container_ring=FakeRing(), logger=logger) class FakeContainerInfoObjController(proxy_server.ObjectController): def container_info(controller, *args, **kwargs): patch_path = 'swift.proxy.controllers.base.get_info' with mock.patch(patch_path) as mock_get_info: mock_get_info.return_value = dict(self.container_info) return super(FakeContainerInfoObjController, controller).container_info(*args, **kwargs) self.app.object_controller = FakeContainerInfoObjController