Example #1
0
    def test_account_info_in_response_env(self):
        controller = proxy_server.AccountController(self.app, 'AUTH_bob')
        with mocked_http_conn(200) as mock_conn:
            req = Request.blank('/v1/AUTH_bob')
            resp = controller.HEAD(req)
        self.assertEqual(2, resp.status_int // 100)
        self.assertEqual(
            ['/AUTH_bob'],
            # requests are like /sdX/0/..
            [r['path'][6:] for r in mock_conn.requests])
        info_cache = resp.environ['swift.infocache']
        self.assertIn('account/AUTH_bob', info_cache)
        header_info = headers_to_account_info(resp.headers)
        self.assertEqual(header_info, info_cache['account/AUTH_bob'])

        # The failure doesn't lead to cache eviction
        errors = [500] * self.ACCOUNT_REPLICAS
        with mocked_http_conn(*errors) as mock_conn:
            req = Request.blank('/v1/AUTH_bob', {
                'PATH_INFO': '/v1/AUTH_bob',
                'swift.infocache': info_cache
            })
            resp = controller.HEAD(req)
        self.assertEqual(5, resp.status_int // 100)
        self.assertEqual(
            ['/AUTH_bob'] * self.ACCOUNT_REPLICAS,
            # requests are like /sdX/0/..
            [r['path'][6:] for r in mock_conn.requests])
        self.assertIs(info_cache, resp.environ['swift.infocache'])
        # The *old* header info is all still there
        self.assertIn('account/AUTH_bob', info_cache)
        self.assertEqual(header_info, info_cache['account/AUTH_bob'])
Example #2
0
    def test_obj_put_async_root_update_redirected_previous_success(self):
        policies = list(POLICIES)
        random.shuffle(policies)
        # setup updater
        conf = {
            'devices': self.devices_dir,
            'mount_check': 'false',
            'swift_dir': self.testdir,
        }
        daemon = object_updater.ObjectUpdater(conf, logger=self.logger)
        async_dir = os.path.join(self.sda1, get_async_dir(policies[0]))
        os.mkdir(async_dir)
        dfmanager = DiskFileManager(conf, daemon.logger)

        ts_obj = next(self.ts_iter)
        self._write_async_update(dfmanager, ts_obj, policies[0])
        orig_async_path, orig_async_data = self._check_async_file(async_dir)

        # run once
        with mocked_http_conn(
                507, 200, 507) as conn:
            with mock.patch('swift.obj.updater.dump_recon_cache'):
                daemon.run_once()

        self._check_update_requests(conn.requests, ts_obj, policies[0])
        self.assertEqual(['/sda1/0/a/c/o'] * 3,
                         [req['path'] for req in conn.requests])
        self.assertEqual(
            {'failures': 1, 'async_pendings': 1},
            daemon.logger.get_increment_counts())
        async_path, async_data = self._check_async_file(async_dir)
        self.assertEqual(dict(orig_async_data, successes=[1]), async_data)

        # run again - expect 3 redirected updates despite previous success
        ts_redirect = next(self.ts_iter)
        resp_headers_1 = {'Location': '/.shards_a/c_shard_1/o',
                          'X-Backend-Redirect-Timestamp': ts_redirect.internal}
        fake_responses = (
            # 1st round of redirects, 2nd round of redirects
            [(301, resp_headers_1)] * 2 + [(200, {})] * 3)
        fake_status_codes, fake_headers = zip(*fake_responses)
        with mocked_http_conn(
                *fake_status_codes, headers=fake_headers) as conn:
            with mock.patch('swift.obj.updater.dump_recon_cache'):
                daemon.run_once()

        self._check_update_requests(conn.requests[:2], ts_obj, policies[0])
        self._check_update_requests(conn.requests[2:], ts_obj, policies[0])
        root_part = daemon.container_ring.get_part('a/c')
        shard_1_part = daemon.container_ring.get_part('.shards_a/c_shard_1')
        self.assertEqual(
            ['/sda1/%s/a/c/o' % root_part] * 2 +
            ['/sda1/%s/.shards_a/c_shard_1/o' % shard_1_part] * 3,
            [req['path'] for req in conn.requests])
        self.assertEqual(
            {'redirects': 1, 'successes': 1, 'failures': 1, 'unlinks': 1,
             'async_pendings': 1},
            daemon.logger.get_increment_counts())
        self.assertFalse(os.listdir(async_dir))  # no async file
Example #3
0
    def test_get_shard_ranges_for_object_put(self):
        ts_iter = make_timestamp_iter()
        shard_ranges = [dict(ShardRange(
            '.sharded_a/sr%d' % i, next(ts_iter), '%d_lower' % i,
            '%d_upper' % i, object_count=i, bytes_used=1024 * i,
            meta_timestamp=next(ts_iter)))
            for i in range(3)]
        base = Controller(self.app)
        req = Request.blank('/v1/a/c/o', method='PUT')
        resp_headers = {'X-Backend-Record-Type': 'shard'}
        with mocked_http_conn(
            200, 200,
            body_iter=iter([b'',
                            json.dumps(shard_ranges[1:2]).encode('ascii')]),
            headers=resp_headers
        ) as fake_conn:
            actual = base._get_shard_ranges(req, 'a', 'c', '1_test')

        # account info
        captured = fake_conn.requests
        self.assertEqual('HEAD', captured[0]['method'])
        self.assertEqual('a', captured[0]['path'][7:])
        # container GET
        self.assertEqual('GET', captured[1]['method'])
        self.assertEqual('a/c', captured[1]['path'][7:])
        params = sorted(captured[1]['qs'].split('&'))
        self.assertEqual(
            ['format=json', 'includes=1_test'], params)
        self.assertEqual(
            'shard', captured[1]['headers'].get('X-Backend-Record-Type'))
        self.assertEqual(shard_ranges[1:2], [dict(pr) for pr in actual])
        self.assertFalse(self.app.logger.get_lines_for_level('error'))
Example #4
0
        def do_test(headers_out, expected, container_path=None):
            # write an async
            dfmanager = DiskFileManager(conf, daemon.logger)
            self._write_async_update(dfmanager, next(ts_iter), policies[0],
                                     headers=headers_out,
                                     container_path=container_path)
            request_log = []

            def capture(*args, **kwargs):
                request_log.append((args, kwargs))

            # run once
            fake_status_codes = [
                200,  # object update success
                200,  # object update success
                200,  # object update conflict
            ]
            with mocked_http_conn(*fake_status_codes, give_connect=capture):
                daemon.run_once()
            self.assertEqual(len(fake_status_codes), len(request_log))
            for request_args, request_kwargs in request_log:
                ip, part, method, path, headers, qs, ssl = request_args
                self.assertEqual(method, 'PUT')
                self.assertDictEqual(expected, headers)
            self.assertEqual(
                daemon.logger.get_increment_counts(),
                {'successes': 1, 'unlinks': 1, 'async_pendings': 1})
            self.assertFalse(os.listdir(async_dir))
            daemon.logger.clear()
Example #5
0
    def test_run_once_unicode_problem(self):
        class InternalClient(object):

            container_ring = FakeRing()

            def get_account_info(*a, **kw):
                return 1, 2

            def iter_containers(*a, **kw):
                return [{'name': u'1234'}]

            def iter_objects(*a, **kw):
                return [{'name': u'1234-troms\xf8'}]

            def make_request(*a, **kw):
                pass

            def delete_container(*a, **kw):
                pass

        x = expirer.ObjectExpirer(self.conf, logger=self.logger)
        x.swift = InternalClient()

        requests = []

        def capture_requests(ipaddr, port, method, path, *args, **kwargs):
            requests.append((method, path))

        with mocked_http_conn(
                200, 200, 200, give_connect=capture_requests):
            x.run_once()
        self.assertEqual(len(requests), 3)
Example #6
0
    def test_run_once_unicode_problem(self):
        class InternalClient(object):

            container_ring = FakeRing()

            def get_account_info(*a, **kw):
                return 1, 2

            def iter_containers(*a, **kw):
                return [{'name': u'1234'}]

            def iter_objects(*a, **kw):
                return [{'name': u'1234-troms\xf8'}]

            def make_request(*a, **kw):
                pass

            def delete_container(*a, **kw):
                pass

        x = expirer.ObjectExpirer(self.conf, logger=self.logger)
        x.swift = InternalClient()

        requests = []

        def capture_requests(ipaddr, port, method, path, *args, **kwargs):
            requests.append((method, path))

        with mocked_http_conn(200, 200, 200, give_connect=capture_requests):
            x.run_once()
        self.assertEqual(len(requests), 3)
Example #7
0
 def test_response_codes_for_GET(self):
     nodes = self.app.container_ring.replicas
     handoffs = self.app.request_node_count(nodes) - nodes
     GET_TEST_CASES = [
         ([], 503),
         ([200], 200),
         ([404, 200], 200),
         ([404] * nodes + [200], 200),
         ([Timeout()] * nodes + [404] * handoffs, 404),
         ([Timeout()] * (nodes + handoffs), 503),
         ([Timeout()] * (nodes + handoffs - 1) + [404], 404),
         ([503, 200], 200),
         ([507, 200], 200),
     ]
     failures = []
     for case, expected in GET_TEST_CASES:
         try:
             with mocked_http_conn(*case):
                 req = Request.blank('/v1/a/c')
                 resp = req.get_response(self.app)
                 try:
                     self.assertEqual(resp.status_int, expected)
                 except AssertionError:
                     msg = '%r => %s (expected %s)' % (
                         case, resp.status_int, expected)
                     failures.append(msg)
         except AssertionError as e:
             # left over status failure
             msg = '%r => %s' % (case, e)
             failures.append(msg)
     if failures:
         self.fail('Some requests did not have expected response:\n' +
                   '\n'.join(failures))
Example #8
0
        def do_test(headers_out, expected, container_path=None):
            # write an async
            dfmanager = DiskFileManager(conf, daemon.logger)
            self._write_async_update(dfmanager,
                                     next(ts_iter),
                                     policies[0],
                                     headers=headers_out,
                                     container_path=container_path)
            request_log = []

            def capture(*args, **kwargs):
                request_log.append((args, kwargs))

            # run once
            fake_status_codes = [
                200,  # object update success
                200,  # object update success
                200,  # object update conflict
            ]
            with mocked_http_conn(*fake_status_codes, give_connect=capture):
                daemon.run_once()
            self.assertEqual(len(fake_status_codes), len(request_log))
            for request_args, request_kwargs in request_log:
                ip, part, method, path, headers, qs, ssl = request_args
                self.assertEqual(method, 'PUT')
                self.assertDictEqual(expected, headers)
            self.assertEqual(daemon.logger.get_increment_counts(), {
                'successes': 1,
                'unlinks': 1,
                'async_pendings': 1
            })
            self.assertFalse(os.listdir(async_dir))
            daemon.logger.clear()
Example #9
0
 def test_status_map(statuses, expected):
     self.app._error_limiting = {}
     req = Request.blank('/v1/a/c', method=method)
     with mocked_http_conn(*statuses) as fake_conn:
         resp = req.get_response(self.app)
     self.assertEqual(resp.status_int, expected)
     for req in fake_conn.requests:
         self.assertEqual(req['method'], method)
         self.assertTrue(req['path'].endswith('/a/c'))
Example #10
0
 def test_status_map(statuses, expected):
     self.app._error_limiting = {}
     req = Request.blank('/v1/a/c', method=method)
     with mocked_http_conn(*statuses) as fake_conn:
         resp = req.get_response(self.app)
     self.assertEqual(resp.status_int, expected)
     for req in fake_conn.requests:
         self.assertEqual(req['method'], method)
         self.assertTrue(req['path'].endswith('/a/c'))
 def test_status_map(statuses, expected):
     self.app._error_limiting = {}
     req = Request.blank("/v1/a/c", method=method)
     with mocked_http_conn(*statuses) as fake_conn:
         print "a" * 50
         resp = req.get_response(self.app)
     self.assertEqual(resp.status_int, expected)
     for req in fake_conn.requests:
         self.assertEqual(req["method"], method)
         self.assert_(req["path"].endswith("/a/c"))
Example #12
0
    def test_obj_put_async_root_update_redirected(self):
        policies = list(POLICIES)
        random.shuffle(policies)
        # setup updater
        conf = {
            'devices': self.devices_dir,
            'mount_check': 'false',
            'swift_dir': self.testdir,
        }
        daemon = object_updater.ObjectUpdater(conf, logger=self.logger)
        async_dir = os.path.join(self.sda1, get_async_dir(policies[0]))
        os.mkdir(async_dir)
        dfmanager = DiskFileManager(conf, daemon.logger)

        ts_obj = next(self.ts_iter)
        self._write_async_update(dfmanager, ts_obj, policies[0])

        # run once
        ts_redirect_1 = next(self.ts_iter)
        ts_redirect_2 = next(self.ts_iter)
        fake_responses = [
            # first round of update attempts, newest redirect should be chosen
            (200, {}),
            (301, {
                'Location': '/.shards_a/c_shard_new/o',
                'X-Backend-Redirect-Timestamp': ts_redirect_2.internal
            }),
            (301, {
                'Location': '/.shards_a/c_shard_old/o',
                'X-Backend-Redirect-Timestamp': ts_redirect_1.internal
            }),
            # second round of update attempts
            (200, {}),
            (200, {}),
            (200, {}),
        ]
        fake_status_codes, fake_headers = zip(*fake_responses)
        with mocked_http_conn(*fake_status_codes,
                              headers=fake_headers) as conn:
            with mock.patch('swift.obj.updater.dump_recon_cache'):
                daemon.run_once()

        self._check_update_requests(conn.requests[:3], ts_obj, policies[0])
        self._check_update_requests(conn.requests[3:], ts_obj, policies[0])
        self.assertEqual(['/sda1/0/a/c/o'] * 3 +
                         ['/sda1/0/.shards_a/c_shard_new/o'] * 3,
                         [req['path'] for req in conn.requests])
        self.assertEqual(
            {
                'redirects': 1,
                'successes': 1,
                'unlinks': 1,
                'async_pendings': 1
            }, daemon.logger.get_increment_counts())
        self.assertFalse(os.listdir(async_dir))  # no async file
Example #13
0
    def test_obj_put_legacy_updates(self):
        ts = (normalize_timestamp(t) for t in itertools.count(int(time())))
        policy = POLICIES.get_by_index(0)
        # setup updater
        conf = {
            'devices': self.devices_dir,
            'mount_check': 'false',
            'swift_dir': self.testdir,
        }
        async_dir = os.path.join(self.sda1, get_async_dir(policy))
        os.mkdir(async_dir)

        account, container, obj = 'a', 'c', 'o'
        # write an async
        for op in ('PUT', 'DELETE'):
            self.logger._clear()
            daemon = object_updater.ObjectUpdater(conf, logger=self.logger)
            dfmanager = DiskFileManager(conf, daemon.logger)
            # don't include storage-policy-index in headers_out pickle
            headers_out = HeaderKeyDict({
                'x-size': 0,
                'x-content-type': 'text/plain',
                'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
                'x-timestamp': next(ts),
            })
            data = {
                'op': op,
                'account': account,
                'container': container,
                'obj': obj,
                'headers': headers_out
            }
            dfmanager.pickle_async_update(self.sda1, account, container, obj,
                                          data, next(ts), policy)

            request_log = []

            def capture(*args, **kwargs):
                request_log.append((args, kwargs))

            # run once
            fake_status_codes = [200, 200, 200]
            with mocked_http_conn(*fake_status_codes, give_connect=capture):
                daemon.run_once()
            self.assertEqual(len(fake_status_codes), len(request_log))
            for request_args, request_kwargs in request_log:
                ip, part, method, path, headers, qs, ssl = request_args
                self.assertEqual(method, op)
                self.assertEqual(headers['X-Backend-Storage-Policy-Index'],
                                 str(int(policy)))
            self.assertEqual(daemon.logger.get_increment_counts(), {
                'successes': 1,
                'unlinks': 1,
                'async_pendings': 1
            })
Example #14
0
 def _check_get_shard_ranges_bad_data(self, body):
     base = Controller(self.app)
     req = Request.blank('/v1/a/c/o', method='PUT')
     # empty response
     headers = {'X-Backend-Record-Type': 'shard'}
     with mocked_http_conn(200, 200, body_iter=iter([b'', body]),
                           headers=headers):
         actual = base._get_shard_ranges(req, 'a', 'c', '1_test')
     self.assertIsNone(actual)
     lines = self.app.logger.get_lines_for_level('error')
     return lines
Example #15
0
    def test_run_once_unicode_problem(self):
        requests = []

        def capture_requests(ipaddr, port, method, path, *args, **kwargs):
            requests.append((method, path))

        # 3 DELETE requests for each 10 executed task objects to pop_queue
        code_list = [200] * 3 * 10
        with mocked_http_conn(*code_list, give_connect=capture_requests):
            self.expirer.run_once()
        self.assertEqual(len(requests), 30)
Example #16
0
    def test_run_once_unicode_problem(self):
        requests = []

        def capture_requests(ipaddr, port, method, path, *args, **kwargs):
            requests.append((method, path))

        # 3 DELETE requests for each 10 executed task objects to pop_queue
        code_list = [200] * 3 * 10
        with mocked_http_conn(*code_list, give_connect=capture_requests):
            self.expirer.run_once()
        self.assertEqual(len(requests), 30)
Example #17
0
 def test_get_shard_ranges_request_failed(self):
     base = Controller(self.app)
     req = Request.blank('/v1/a/c/o', method='PUT')
     with mocked_http_conn(200, 404, 404, 404):
         actual = base._get_shard_ranges(req, 'a', 'c', '1_test')
     self.assertIsNone(actual)
     self.assertFalse(self.app.logger.get_lines_for_level('error'))
     warning_lines = self.app.logger.get_lines_for_level('warning')
     self.assertIn('Failed to get container listing', warning_lines[0])
     self.assertIn('/a/c', warning_lines[0])
     self.assertFalse(warning_lines[1:])
Example #18
0
    def test_obj_put_async_updates(self):
        ts = (normalize_timestamp(t) for t in
              itertools.count(int(time())))
        policy = random.choice(list(POLICIES))
        # setup updater
        conf = {
            'devices': self.devices_dir,
            'mount_check': 'false',
            'swift_dir': self.testdir,
        }
        daemon = object_updater.ObjectUpdater(conf, logger=self.logger)
        async_dir = os.path.join(self.sda1, get_async_dir(policy))
        os.mkdir(async_dir)

        # write an async
        dfmanager = DiskFileManager(conf, daemon.logger)
        account, container, obj = 'a', 'c', 'o'
        op = 'PUT'
        headers_out = swob.HeaderKeyDict({
            'x-size': 0,
            'x-content-type': 'text/plain',
            'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
            'x-timestamp': next(ts),
            'X-Backend-Storage-Policy-Index': int(policy),
        })
        data = {'op': op, 'account': account, 'container': container,
                'obj': obj, 'headers': headers_out}
        dfmanager.pickle_async_update(self.sda1, account, container, obj,
                                      data, next(ts), policy)

        request_log = []

        def capture(*args, **kwargs):
            request_log.append((args, kwargs))

        # run once
        fake_status_codes = [
            200,  # object update success
            200,  # object update success
            200,  # object update conflict
        ]
        with mocked_http_conn(*fake_status_codes, give_connect=capture):
            daemon.run_once()
        self.assertEqual(len(fake_status_codes), len(request_log))
        for request_args, request_kwargs in request_log:
            ip, part, method, path, headers, qs, ssl = request_args
            self.assertEqual(method, 'PUT')
            self.assertEqual(headers['X-Backend-Storage-Policy-Index'],
                             str(int(policy)))
        self.assertEqual(daemon.logger.get_increment_counts(),
                         {'successes': 1, 'unlinks': 1, 'async_pendings': 1})
Example #19
0
 def test_get_shard_ranges_missing_record_type(self):
     base = Controller(self.app)
     req = Request.blank('/v1/a/c/o', method='PUT')
     sr = ShardRange('a/c', Timestamp.now())
     body = json.dumps([dict(sr)]).encode('ascii')
     with mocked_http_conn(
             200, 200, body_iter=iter([b'', body])):
         actual = base._get_shard_ranges(req, 'a', 'c', '1_test')
     self.assertIsNone(actual)
     error_lines = self.app.logger.get_lines_for_level('error')
     self.assertIn('Failed to get shard ranges', error_lines[0])
     self.assertIn('unexpected record type', error_lines[0])
     self.assertIn('/a/c', error_lines[0])
     self.assertFalse(error_lines[1:])
Example #20
0
    def test_run_once_unicode_problem(self):
        fake_swift = FakeInternalClient({
            '.expiring_objects': {u'1234': [u'1234-troms\xf8']}
        })
        x = expirer.ObjectExpirer(self.conf, logger=self.logger,
                                  swift=fake_swift)

        requests = []

        def capture_requests(ipaddr, port, method, path, *args, **kwargs):
            requests.append((method, path))

        with mocked_http_conn(200, 200, 200, give_connect=capture_requests):
            x.run_once()
        self.assertEqual(len(requests), 3)
Example #21
0
    def test_obj_put_async_root_update_redirected(self):
        policies = list(POLICIES)
        random.shuffle(policies)
        # setup updater
        conf = {
            'devices': self.devices_dir,
            'mount_check': 'false',
            'swift_dir': self.testdir,
        }
        daemon = object_updater.ObjectUpdater(conf, logger=self.logger)
        async_dir = os.path.join(self.sda1, get_async_dir(policies[0]))
        os.mkdir(async_dir)
        dfmanager = DiskFileManager(conf, daemon.logger)

        ts_obj = next(self.ts_iter)
        self._write_async_update(dfmanager, ts_obj, policies[0])

        # run once
        ts_redirect_1 = next(self.ts_iter)
        ts_redirect_2 = next(self.ts_iter)
        fake_responses = [
            # first round of update attempts, newest redirect should be chosen
            (200, {}),
            (301, {'Location': '/.shards_a/c_shard_new/o',
                   'X-Backend-Redirect-Timestamp': ts_redirect_2.internal}),
            (301, {'Location': '/.shards_a/c_shard_old/o',
                   'X-Backend-Redirect-Timestamp': ts_redirect_1.internal}),
            # second round of update attempts
            (200, {}),
            (200, {}),
            (200, {}),
        ]
        fake_status_codes, fake_headers = zip(*fake_responses)
        with mocked_http_conn(
                *fake_status_codes, headers=fake_headers) as conn:
            with mock.patch('swift.obj.updater.dump_recon_cache'):
                daemon.run_once()

        self._check_update_requests(conn.requests[:3], ts_obj, policies[0])
        self._check_update_requests(conn.requests[3:], ts_obj, policies[0])
        self.assertEqual(['/sda1/0/a/c/o'] * 3 +
                         ['/sda1/0/.shards_a/c_shard_new/o'] * 3,
                         [req['path'] for req in conn.requests])
        self.assertEqual(
            {'redirects': 1, 'successes': 1,
             'unlinks': 1, 'async_pendings': 1},
            daemon.logger.get_increment_counts())
        self.assertFalse(os.listdir(async_dir))  # no async file
Example #22
0
    def test_pop_queue(self):
        x = expirer.ObjectExpirer({}, logger=self.logger,
                                  swift=FakeInternalClient({}))
        requests = []

        def capture_requests(ipaddr, port, method, path, *args, **kwargs):
            requests.append((method, path))
        with mocked_http_conn(
                200, 200, 200, give_connect=capture_requests) as fake_conn:
            x.pop_queue('c', 'o')
            self.assertRaises(StopIteration, fake_conn.code_iter.next)
        for method, path in requests:
            self.assertEqual(method, 'DELETE')
            device, part, account, container, obj = utils.split_path(
                path, 5, 5, True)
            self.assertEqual(account, '.expiring_objects')
            self.assertEqual(container, 'c')
            self.assertEqual(obj, 'o')
Example #23
0
    def test_pop_queue(self):
        x = expirer.ObjectExpirer({}, logger=self.logger,
                                  swift=FakeInternalClient({}))
        requests = []

        def capture_requests(ipaddr, port, method, path, *args, **kwargs):
            requests.append((method, path))
        with mocked_http_conn(
                200, 200, 200, give_connect=capture_requests) as fake_conn:
            x.pop_queue('a', 'c', 'o')
            self.assertRaises(StopIteration, fake_conn.code_iter.next)
        for method, path in requests:
            self.assertEqual(method, 'DELETE')
            device, part, account, container, obj = utils.split_path(
                path, 5, 5, True)
            self.assertEqual(account, 'a')
            self.assertEqual(container, 'c')
            self.assertEqual(obj, 'o')
Example #24
0
    def test_pop_queue(self):
        class InternalClient(object):
            container_ring = FakeRing()

        x = expirer.ObjectExpirer({}, logger=self.logger, swift=InternalClient())
        requests = []

        def capture_requests(ipaddr, port, method, path, *args, **kwargs):
            requests.append((method, path))

        with mocked_http_conn(200, 200, 200, give_connect=capture_requests) as fake_conn:
            x.pop_queue("c", "o")
            self.assertRaises(StopIteration, fake_conn.code_iter.next)
        for method, path in requests:
            self.assertEqual(method, "DELETE")
            device, part, account, container, obj = utils.split_path(path, 5, 5, True)
            self.assertEqual(account, ".expiring_objects")
            self.assertEqual(container, "c")
            self.assertEqual(obj, "o")
Example #25
0
    def test_response_codes_for_GET(self):
        nodes = self.app.container_ring.replicas
        handoffs = self.app.request_node_count(nodes) - nodes
        GET_TEST_CASES = [
            ([socket.error()] * (nodes + handoffs), 503),
            ([500] * (nodes + handoffs), 503),
            ([200], 200),
            ([404, 200], 200),
            ([404] * nodes + [200], 200),
            ([Timeout()] * nodes + [404] * handoffs, 404),
            ([Timeout()] * (nodes + handoffs), 503),
            ([Timeout()] * (nodes + handoffs - 1) + [404], 404),
            ([503, 200], 200),
            ([507, 200], 200),
        ]
        failures = []
        for case, expected in GET_TEST_CASES:
            try:
                with mocked_http_conn(*case):
                    req = Request.blank('/v1/a/c')
                    resp = req.get_response(self.app)
                    try:
                        self.assertEqual(resp.status_int, expected)
                    except AssertionError:
                        msg = '%r => %s (expected %s)' % (
                            case, resp.status_int, expected)
                        failures.append(msg)
            except AssertionError as e:
                # left over status failure
                msg = '%r => %s' % (case, e)
                failures.append(msg)
        if failures:
            self.fail('Some requests did not have expected response:\n' +
                      '\n'.join(failures))

        # One more test, simulating all nodes being error-limited
        with mocked_http_conn(), mock.patch.object(self.app,
                                                   'iter_nodes',
                                                   return_value=[]):
            req = Request.blank('/v1/a/c')
            resp = req.get_response(self.app)
            self.assertEqual(resp.status_int, 503)
Example #26
0
    def _check_obj_put_async_update_bad_redirect_headers(self, headers):
        policies = list(POLICIES)
        random.shuffle(policies)
        # setup updater
        conf = {
            'devices': self.devices_dir,
            'mount_check': 'false',
            'swift_dir': self.testdir,
        }
        daemon = object_updater.ObjectUpdater(conf, logger=self.logger)
        async_dir = os.path.join(self.sda1, get_async_dir(policies[0]))
        os.mkdir(async_dir)
        dfmanager = DiskFileManager(conf, daemon.logger)

        ts_obj = next(self.ts_iter)
        self._write_async_update(dfmanager, ts_obj, policies[0])
        orig_async_path, orig_async_data = self._check_async_file(async_dir)

        fake_responses = [
            (301, headers),
            (301, headers),
            (301, headers),
        ]
        fake_status_codes, fake_headers = zip(*fake_responses)
        with mocked_http_conn(*fake_status_codes,
                              headers=fake_headers) as conn:
            with mock.patch('swift.obj.updater.dump_recon_cache'):
                daemon.run_once()

        self._check_update_requests(conn.requests, ts_obj, policies[0])
        self.assertEqual(['/sda1/0/a/c/o'] * 3,
                         [req['path'] for req in conn.requests])
        self.assertEqual({
            'failures': 1,
            'async_pendings': 1
        }, daemon.logger.get_increment_counts())
        # async file still intact
        async_path, async_data = self._check_async_file(async_dir)
        self.assertEqual(orig_async_path, async_path)
        self.assertEqual(orig_async_data, async_data)
        return daemon
Example #27
0
    def test_obj_put_legacy_updates(self):
        ts = (normalize_timestamp(t) for t in itertools.count(int(time())))
        policy = POLICIES.get_by_index(0)
        # setup updater
        conf = {"devices": self.devices_dir, "mount_check": "false", "swift_dir": self.testdir}
        async_dir = os.path.join(self.sda1, get_async_dir(policy.idx))
        os.mkdir(async_dir)

        account, container, obj = "a", "c", "o"
        # write an async
        for op in ("PUT", "DELETE"):
            self.logger._clear()
            daemon = object_updater.ObjectUpdater(conf, logger=self.logger)
            dfmanager = DiskFileManager(conf, daemon.logger)
            # don't include storage-policy-index in headers_out pickle
            headers_out = swob.HeaderKeyDict(
                {
                    "x-size": 0,
                    "x-content-type": "text/plain",
                    "x-etag": "d41d8cd98f00b204e9800998ecf8427e",
                    "x-timestamp": ts.next(),
                }
            )
            data = {"op": op, "account": account, "container": container, "obj": obj, "headers": headers_out}
            dfmanager.pickle_async_update(self.sda1, account, container, obj, data, ts.next(), policy.idx)

            request_log = []

            def capture(*args, **kwargs):
                request_log.append((args, kwargs))

            # run once
            fake_status_codes = [200, 200, 200]
            with mocked_http_conn(*fake_status_codes, give_connect=capture):
                daemon.run_once()
            self.assertEqual(len(fake_status_codes), len(request_log))
            for request_args, request_kwargs in request_log:
                ip, part, method, path, headers, qs, ssl = request_args
                self.assertEqual(method, op)
                self.assertEqual(headers["X-Backend-Storage-Policy-Index"], str(policy.idx))
            self.assertEqual(daemon.logger.get_increment_counts(), {"successes": 1, "unlinks": 1, "async_pendings": 1})
Example #28
0
    def _check_obj_put_async_update_bad_redirect_headers(self, headers):
        policies = list(POLICIES)
        random.shuffle(policies)
        # setup updater
        conf = {
            'devices': self.devices_dir,
            'mount_check': 'false',
            'swift_dir': self.testdir,
        }
        daemon = object_updater.ObjectUpdater(conf, logger=self.logger)
        async_dir = os.path.join(self.sda1, get_async_dir(policies[0]))
        os.mkdir(async_dir)
        dfmanager = DiskFileManager(conf, daemon.logger)

        ts_obj = next(self.ts_iter)
        self._write_async_update(dfmanager, ts_obj, policies[0])
        orig_async_path, orig_async_data = self._check_async_file(async_dir)

        fake_responses = [
            (301, headers),
            (301, headers),
            (301, headers),
        ]
        fake_status_codes, fake_headers = zip(*fake_responses)
        with mocked_http_conn(
                *fake_status_codes, headers=fake_headers) as conn:
            with mock.patch('swift.obj.updater.dump_recon_cache'):
                daemon.run_once()

        self._check_update_requests(conn.requests, ts_obj, policies[0])
        self.assertEqual(['/sda1/0/a/c/o'] * 3,
                         [req['path'] for req in conn.requests])
        self.assertEqual(
            {'failures': 1, 'async_pendings': 1},
            daemon.logger.get_increment_counts())
        # async file still intact
        async_path, async_data = self._check_async_file(async_dir)
        self.assertEqual(orig_async_path, async_path)
        self.assertEqual(orig_async_data, async_data)
        return daemon
Example #29
0
    def test_obj_put_async_update_redirection_loop(self):
        policies = list(POLICIES)
        random.shuffle(policies)
        # setup updater
        conf = {
            'devices': self.devices_dir,
            'mount_check': 'false',
            'swift_dir': self.testdir,
        }
        daemon = object_updater.ObjectUpdater(conf, logger=self.logger)
        async_dir = os.path.join(self.sda1, get_async_dir(policies[0]))
        os.mkdir(async_dir)
        dfmanager = DiskFileManager(conf, daemon.logger)

        ts_obj = next(self.ts_iter)
        self._write_async_update(dfmanager, ts_obj, policies[0])
        orig_async_path, orig_async_data = self._check_async_file(async_dir)

        # run once
        ts_redirect = next(self.ts_iter)

        resp_headers_1 = {'Location': '/.shards_a/c_shard_1/o',
                          'X-Backend-Redirect-Timestamp': ts_redirect.internal}
        resp_headers_2 = {'Location': '/.shards_a/c_shard_2/o',
                          'X-Backend-Redirect-Timestamp': ts_redirect.internal}
        fake_responses = (
            # 1st round of redirects, 2nd round of redirects
            [(301, resp_headers_1)] * 3 + [(301, resp_headers_2)] * 3)
        fake_status_codes, fake_headers = zip(*fake_responses)
        with mocked_http_conn(
                *fake_status_codes, headers=fake_headers) as conn:
            with mock.patch('swift.obj.updater.dump_recon_cache'):
                daemon.run_once()
        self._check_update_requests(conn.requests[:3], ts_obj, policies[0])
        self._check_update_requests(conn.requests[3:], ts_obj, policies[0])
        # only *one* set of redirected requests is attempted per cycle
        root_part = daemon.container_ring.get_part('a/c')
        shard_1_part = daemon.container_ring.get_part('.shards_a/c_shard_1')
        shard_2_part = daemon.container_ring.get_part('.shards_a/c_shard_2')
        shard_3_part = daemon.container_ring.get_part('.shards_a/c_shard_3')
        self.assertEqual(['/sda1/%s/a/c/o' % root_part] * 3 +
                         ['/sda1/%s/.shards_a/c_shard_1/o' % shard_1_part] * 3,
                         [req['path'] for req in conn.requests])
        self.assertEqual(
            {'redirects': 2, 'async_pendings': 1},
            daemon.logger.get_increment_counts())
        # update failed, we still have pending file with most recent redirect
        # response Location header value added to data
        async_path, async_data = self._check_async_file(async_dir)
        self.assertEqual(orig_async_path, async_path)
        self.assertEqual(
            dict(orig_async_data, container_path='.shards_a/c_shard_2',
                 redirect_history=['.shards_a/c_shard_1',
                                   '.shards_a/c_shard_2']),
            async_data)

        # next cycle, more redirects! first is to previously visited location
        resp_headers_3 = {'Location': '/.shards_a/c_shard_3/o',
                          'X-Backend-Redirect-Timestamp': ts_redirect.internal}
        fake_responses = (
            # 1st round of redirects, 2nd round of redirects
            [(301, resp_headers_1)] * 3 + [(301, resp_headers_3)] * 3)
        fake_status_codes, fake_headers = zip(*fake_responses)
        with mocked_http_conn(
                *fake_status_codes, headers=fake_headers) as conn:
            with mock.patch('swift.obj.updater.dump_recon_cache'):
                daemon.run_once()
        self._check_update_requests(conn.requests[:3], ts_obj, policies[0])
        self._check_update_requests(conn.requests[3:], ts_obj, policies[0])
        # first try the previously persisted container path, response to that
        # creates a loop so ignore and send to root
        self.assertEqual(
            ['/sda1/%s/.shards_a/c_shard_2/o' % shard_2_part] * 3 +
            ['/sda1/%s/a/c/o' % root_part] * 3,
            [req['path'] for req in conn.requests])
        self.assertEqual(
            {'redirects': 4, 'async_pendings': 1},
            daemon.logger.get_increment_counts())
        # update failed, we still have pending file with most recent redirect
        # response Location header value from root added to persisted data
        async_path, async_data = self._check_async_file(async_dir)
        self.assertEqual(orig_async_path, async_path)
        # note: redirect_history was reset when falling back to root
        self.assertEqual(
            dict(orig_async_data, container_path='.shards_a/c_shard_3',
                 redirect_history=['.shards_a/c_shard_3']),
            async_data)

        # next cycle, more redirects! first is to a location visited previously
        # but not since last fall back to root, so that location IS tried;
        # second is to a location visited since last fall back to root so that
        # location is NOT tried
        fake_responses = (
            # 1st round of redirects, 2nd round of redirects
            [(301, resp_headers_1)] * 3 + [(301, resp_headers_3)] * 3)
        fake_status_codes, fake_headers = zip(*fake_responses)
        with mocked_http_conn(
                *fake_status_codes, headers=fake_headers) as conn:
            with mock.patch('swift.obj.updater.dump_recon_cache'):
                daemon.run_once()
        self._check_update_requests(conn.requests, ts_obj, policies[0])
        self.assertEqual(
            ['/sda1/%s/.shards_a/c_shard_3/o' % shard_3_part] * 3 +
            ['/sda1/%s/.shards_a/c_shard_1/o' % shard_1_part] * 3,
            [req['path'] for req in conn.requests])
        self.assertEqual(
            {'redirects': 6, 'async_pendings': 1},
            daemon.logger.get_increment_counts())
        # update failed, we still have pending file, but container_path is None
        # because most recent redirect location was a repeat
        async_path, async_data = self._check_async_file(async_dir)
        self.assertEqual(orig_async_path, async_path)
        self.assertEqual(
            dict(orig_async_data, container_path=None,
                 redirect_history=[]),
            async_data)

        # next cycle, persisted container path is None so update should go to
        # root, this time it succeeds
        fake_responses = [(200, {})] * 3
        fake_status_codes, fake_headers = zip(*fake_responses)
        with mocked_http_conn(
                *fake_status_codes, headers=fake_headers) as conn:
            with mock.patch('swift.obj.updater.dump_recon_cache'):
                daemon.run_once()
        self._check_update_requests(conn.requests, ts_obj, policies[0])
        self.assertEqual(['/sda1/%s/a/c/o' % root_part] * 3,
                         [req['path'] for req in conn.requests])
        self.assertEqual(
            {'redirects': 6, 'successes': 1, 'unlinks': 1,
             'async_pendings': 1},
            daemon.logger.get_increment_counts())
        self.assertFalse(os.listdir(async_dir))  # no async file
Example #30
0
    def test_obj_put_async_shard_update_redirected_twice(self):
        policies = list(POLICIES)
        random.shuffle(policies)
        # setup updater
        conf = {
            'devices': self.devices_dir,
            'mount_check': 'false',
            'swift_dir': self.testdir,
        }
        daemon = object_updater.ObjectUpdater(conf, logger=self.logger)
        async_dir = os.path.join(self.sda1, get_async_dir(policies[0]))
        os.mkdir(async_dir)
        dfmanager = DiskFileManager(conf, daemon.logger)

        ts_obj = next(self.ts_iter)
        self._write_async_update(dfmanager,
                                 ts_obj,
                                 policies[0],
                                 container_path='.shards_a/c_shard_older')
        orig_async_path, orig_async_data = self._check_async_file(async_dir)

        # run once
        ts_redirect_1 = next(self.ts_iter)
        ts_redirect_2 = next(self.ts_iter)
        ts_redirect_3 = next(self.ts_iter)
        fake_responses = [
            # 1st round of redirects, newest redirect should be chosen
            (301, {
                'Location': '/.shards_a/c_shard_old/o',
                'X-Backend-Redirect-Timestamp': ts_redirect_1.internal
            }),
            (301, {
                'Location': '/.shards_a/c_shard_new/o',
                'X-Backend-Redirect-Timestamp': ts_redirect_2.internal
            }),
            (301, {
                'Location': '/.shards_a/c_shard_old/o',
                'X-Backend-Redirect-Timestamp': ts_redirect_1.internal
            }),
            # 2nd round of redirects
            (301, {
                'Location': '/.shards_a/c_shard_newer/o',
                'X-Backend-Redirect-Timestamp': ts_redirect_3.internal
            }),
            (301, {
                'Location': '/.shards_a/c_shard_newer/o',
                'X-Backend-Redirect-Timestamp': ts_redirect_3.internal
            }),
            (301, {
                'Location': '/.shards_a/c_shard_newer/o',
                'X-Backend-Redirect-Timestamp': ts_redirect_3.internal
            }),
        ]
        fake_status_codes, fake_headers = zip(*fake_responses)
        with mocked_http_conn(*fake_status_codes,
                              headers=fake_headers) as conn:
            with mock.patch('swift.obj.updater.dump_recon_cache'):
                daemon.run_once()

        self._check_update_requests(conn.requests, ts_obj, policies[0])
        # only *one* set of redirected requests is attempted per cycle
        older_part = daemon.container_ring.get_part('.shards_a/c_shard_older')
        new_part = daemon.container_ring.get_part('.shards_a/c_shard_new')
        newer_part = daemon.container_ring.get_part('.shards_a/c_shard_newer')
        self.assertEqual(
            ['/sda1/%s/.shards_a/c_shard_older/o' % older_part] * 3 +
            ['/sda1/%s/.shards_a/c_shard_new/o' % new_part] * 3,
            [req['path'] for req in conn.requests])
        self.assertEqual({
            'redirects': 2,
            'async_pendings': 1
        }, daemon.logger.get_increment_counts())
        # update failed, we still have pending file with most recent redirect
        # response Location header value added to data
        async_path, async_data = self._check_async_file(async_dir)
        self.assertEqual(orig_async_path, async_path)
        self.assertEqual(
            dict(orig_async_data,
                 container_path='.shards_a/c_shard_newer',
                 redirect_history=[
                     '.shards_a/c_shard_new', '.shards_a/c_shard_newer'
                 ]), async_data)

        # next cycle, should get latest redirect from pickled async update
        fake_responses = [(200, {})] * 3
        fake_status_codes, fake_headers = zip(*fake_responses)
        with mocked_http_conn(*fake_status_codes,
                              headers=fake_headers) as conn:
            with mock.patch('swift.obj.updater.dump_recon_cache'):
                daemon.run_once()

        self._check_update_requests(conn.requests, ts_obj, policies[0])
        self.assertEqual(['/sda1/%s/.shards_a/c_shard_newer/o' % newer_part] *
                         3, [req['path'] for req in conn.requests])
        self.assertEqual(
            {
                'redirects': 2,
                'successes': 1,
                'unlinks': 1,
                'async_pendings': 1
            }, daemon.logger.get_increment_counts())
        self.assertFalse(os.listdir(async_dir))  # no async file
Example #31
0
    def test_obj_put_async_shard_update_redirected_twice(self):
        policies = list(POLICIES)
        random.shuffle(policies)
        # setup updater
        conf = {
            'devices': self.devices_dir,
            'mount_check': 'false',
            'swift_dir': self.testdir,
        }
        daemon = object_updater.ObjectUpdater(conf, logger=self.logger)
        async_dir = os.path.join(self.sda1, get_async_dir(policies[0]))
        os.mkdir(async_dir)
        dfmanager = DiskFileManager(conf, daemon.logger)

        ts_obj = next(self.ts_iter)
        self._write_async_update(dfmanager, ts_obj, policies[0],
                                 container_path='.shards_a/c_shard_older')
        orig_async_path, orig_async_data = self._check_async_file(async_dir)

        # run once
        ts_redirect_1 = next(self.ts_iter)
        ts_redirect_2 = next(self.ts_iter)
        ts_redirect_3 = next(self.ts_iter)
        fake_responses = [
            # 1st round of redirects, newest redirect should be chosen
            (301, {'Location': '/.shards_a/c_shard_old/o',
                   'X-Backend-Redirect-Timestamp': ts_redirect_1.internal}),
            (301, {'Location': '/.shards_a/c_shard_new/o',
                   'X-Backend-Redirect-Timestamp': ts_redirect_2.internal}),
            (301, {'Location': '/.shards_a/c_shard_old/o',
                   'X-Backend-Redirect-Timestamp': ts_redirect_1.internal}),
            # 2nd round of redirects
            (301, {'Location': '/.shards_a/c_shard_newer/o',
                   'X-Backend-Redirect-Timestamp': ts_redirect_3.internal}),
            (301, {'Location': '/.shards_a/c_shard_newer/o',
                   'X-Backend-Redirect-Timestamp': ts_redirect_3.internal}),
            (301, {'Location': '/.shards_a/c_shard_newer/o',
                   'X-Backend-Redirect-Timestamp': ts_redirect_3.internal}),
        ]
        fake_status_codes, fake_headers = zip(*fake_responses)
        with mocked_http_conn(
                *fake_status_codes, headers=fake_headers) as conn:
            with mock.patch('swift.obj.updater.dump_recon_cache'):
                daemon.run_once()

        self._check_update_requests(conn.requests, ts_obj, policies[0])
        # only *one* set of redirected requests is attempted per cycle
        older_part = daemon.container_ring.get_part('.shards_a/c_shard_older')
        new_part = daemon.container_ring.get_part('.shards_a/c_shard_new')
        newer_part = daemon.container_ring.get_part('.shards_a/c_shard_newer')
        self.assertEqual(
            ['/sda1/%s/.shards_a/c_shard_older/o' % older_part] * 3 +
            ['/sda1/%s/.shards_a/c_shard_new/o' % new_part] * 3,
            [req['path'] for req in conn.requests])
        self.assertEqual(
            {'redirects': 2, 'async_pendings': 1},
            daemon.logger.get_increment_counts())
        # update failed, we still have pending file with most recent redirect
        # response Location header value added to data
        async_path, async_data = self._check_async_file(async_dir)
        self.assertEqual(orig_async_path, async_path)
        self.assertEqual(
            dict(orig_async_data, container_path='.shards_a/c_shard_newer',
                 redirect_history=['.shards_a/c_shard_new',
                                   '.shards_a/c_shard_newer']),
            async_data)

        # next cycle, should get latest redirect from pickled async update
        fake_responses = [(200, {})] * 3
        fake_status_codes, fake_headers = zip(*fake_responses)
        with mocked_http_conn(
                *fake_status_codes, headers=fake_headers) as conn:
            with mock.patch('swift.obj.updater.dump_recon_cache'):
                daemon.run_once()

        self._check_update_requests(conn.requests, ts_obj, policies[0])
        self.assertEqual(
            ['/sda1/%s/.shards_a/c_shard_newer/o' % newer_part] * 3,
            [req['path'] for req in conn.requests])
        self.assertEqual(
            {'redirects': 2, 'successes': 1, 'unlinks': 1,
             'async_pendings': 1},
            daemon.logger.get_increment_counts())
        self.assertFalse(os.listdir(async_dir))  # no async file
Example #32
0
    def test_obj_put_async_update_redirection_loop(self):
        policies = list(POLICIES)
        random.shuffle(policies)
        # setup updater
        conf = {
            'devices': self.devices_dir,
            'mount_check': 'false',
            'swift_dir': self.testdir,
        }
        daemon = object_updater.ObjectUpdater(conf, logger=self.logger)
        async_dir = os.path.join(self.sda1, get_async_dir(policies[0]))
        os.mkdir(async_dir)
        dfmanager = DiskFileManager(conf, daemon.logger)

        ts_obj = next(self.ts_iter)
        self._write_async_update(dfmanager, ts_obj, policies[0])
        orig_async_path, orig_async_data = self._check_async_file(async_dir)

        # run once
        ts_redirect = next(self.ts_iter)

        resp_headers_1 = {
            'Location': '/.shards_a/c_shard_1/o',
            'X-Backend-Redirect-Timestamp': ts_redirect.internal
        }
        resp_headers_2 = {
            'Location': '/.shards_a/c_shard_2/o',
            'X-Backend-Redirect-Timestamp': ts_redirect.internal
        }
        fake_responses = (
            # 1st round of redirects, 2nd round of redirects
            [(301, resp_headers_1)] * 3 + [(301, resp_headers_2)] * 3)
        fake_status_codes, fake_headers = zip(*fake_responses)
        with mocked_http_conn(*fake_status_codes,
                              headers=fake_headers) as conn:
            with mock.patch('swift.obj.updater.dump_recon_cache'):
                daemon.run_once()
        self._check_update_requests(conn.requests[:3], ts_obj, policies[0])
        self._check_update_requests(conn.requests[3:], ts_obj, policies[0])
        # only *one* set of redirected requests is attempted per cycle
        root_part = daemon.container_ring.get_part('a/c')
        shard_1_part = daemon.container_ring.get_part('.shards_a/c_shard_1')
        shard_2_part = daemon.container_ring.get_part('.shards_a/c_shard_2')
        shard_3_part = daemon.container_ring.get_part('.shards_a/c_shard_3')
        self.assertEqual(['/sda1/%s/a/c/o' % root_part] * 3 +
                         ['/sda1/%s/.shards_a/c_shard_1/o' % shard_1_part] * 3,
                         [req['path'] for req in conn.requests])
        self.assertEqual({
            'redirects': 2,
            'async_pendings': 1
        }, daemon.logger.get_increment_counts())
        # update failed, we still have pending file with most recent redirect
        # response Location header value added to data
        async_path, async_data = self._check_async_file(async_dir)
        self.assertEqual(orig_async_path, async_path)
        self.assertEqual(
            dict(orig_async_data,
                 container_path='.shards_a/c_shard_2',
                 redirect_history=[
                     '.shards_a/c_shard_1', '.shards_a/c_shard_2'
                 ]), async_data)

        # next cycle, more redirects! first is to previously visited location
        resp_headers_3 = {
            'Location': '/.shards_a/c_shard_3/o',
            'X-Backend-Redirect-Timestamp': ts_redirect.internal
        }
        fake_responses = (
            # 1st round of redirects, 2nd round of redirects
            [(301, resp_headers_1)] * 3 + [(301, resp_headers_3)] * 3)
        fake_status_codes, fake_headers = zip(*fake_responses)
        with mocked_http_conn(*fake_status_codes,
                              headers=fake_headers) as conn:
            with mock.patch('swift.obj.updater.dump_recon_cache'):
                daemon.run_once()
        self._check_update_requests(conn.requests[:3], ts_obj, policies[0])
        self._check_update_requests(conn.requests[3:], ts_obj, policies[0])
        # first try the previously persisted container path, response to that
        # creates a loop so ignore and send to root
        self.assertEqual(
            ['/sda1/%s/.shards_a/c_shard_2/o' % shard_2_part] * 3 +
            ['/sda1/%s/a/c/o' % root_part] * 3,
            [req['path'] for req in conn.requests])
        self.assertEqual({
            'redirects': 4,
            'async_pendings': 1
        }, daemon.logger.get_increment_counts())
        # update failed, we still have pending file with most recent redirect
        # response Location header value from root added to persisted data
        async_path, async_data = self._check_async_file(async_dir)
        self.assertEqual(orig_async_path, async_path)
        # note: redirect_history was reset when falling back to root
        self.assertEqual(
            dict(orig_async_data,
                 container_path='.shards_a/c_shard_3',
                 redirect_history=['.shards_a/c_shard_3']), async_data)

        # next cycle, more redirects! first is to a location visited previously
        # but not since last fall back to root, so that location IS tried;
        # second is to a location visited since last fall back to root so that
        # location is NOT tried
        fake_responses = (
            # 1st round of redirects, 2nd round of redirects
            [(301, resp_headers_1)] * 3 + [(301, resp_headers_3)] * 3)
        fake_status_codes, fake_headers = zip(*fake_responses)
        with mocked_http_conn(*fake_status_codes,
                              headers=fake_headers) as conn:
            with mock.patch('swift.obj.updater.dump_recon_cache'):
                daemon.run_once()
        self._check_update_requests(conn.requests, ts_obj, policies[0])
        self.assertEqual(
            ['/sda1/%s/.shards_a/c_shard_3/o' % shard_3_part] * 3 +
            ['/sda1/%s/.shards_a/c_shard_1/o' % shard_1_part] * 3,
            [req['path'] for req in conn.requests])
        self.assertEqual({
            'redirects': 6,
            'async_pendings': 1
        }, daemon.logger.get_increment_counts())
        # update failed, we still have pending file, but container_path is None
        # because most recent redirect location was a repeat
        async_path, async_data = self._check_async_file(async_dir)
        self.assertEqual(orig_async_path, async_path)
        self.assertEqual(
            dict(orig_async_data, container_path=None, redirect_history=[]),
            async_data)

        # next cycle, persisted container path is None so update should go to
        # root, this time it succeeds
        fake_responses = [(200, {})] * 3
        fake_status_codes, fake_headers = zip(*fake_responses)
        with mocked_http_conn(*fake_status_codes,
                              headers=fake_headers) as conn:
            with mock.patch('swift.obj.updater.dump_recon_cache'):
                daemon.run_once()
        self._check_update_requests(conn.requests, ts_obj, policies[0])
        self.assertEqual(['/sda1/%s/a/c/o' % root_part] * 3,
                         [req['path'] for req in conn.requests])
        self.assertEqual(
            {
                'redirects': 6,
                'successes': 1,
                'unlinks': 1,
                'async_pendings': 1
            }, daemon.logger.get_increment_counts())
        self.assertFalse(os.listdir(async_dir))  # no async file