Example #1
0
    def test_get_shard_ranges_for_object_put(self):
        ts_iter = make_timestamp_iter()
        shard_ranges = [dict(ShardRange(
            '.sharded_a/sr%d' % i, next(ts_iter), '%d_lower' % i,
            '%d_upper' % i, object_count=i, bytes_used=1024 * i,
            meta_timestamp=next(ts_iter)))
            for i in range(3)]
        base = Controller(self.app)
        req = Request.blank('/v1/a/c/o', method='PUT')
        resp_headers = {'X-Backend-Record-Type': 'shard'}
        with mocked_http_conn(
            200, 200,
            body_iter=iter([b'',
                            json.dumps(shard_ranges[1:2]).encode('ascii')]),
            headers=resp_headers
        ) as fake_conn:
            actual = base._get_shard_ranges(req, 'a', 'c', '1_test')

        # account info
        captured = fake_conn.requests
        self.assertEqual('HEAD', captured[0]['method'])
        self.assertEqual('a', captured[0]['path'][7:])
        # container GET
        self.assertEqual('GET', captured[1]['method'])
        self.assertEqual('a/c', captured[1]['path'][7:])
        params = sorted(captured[1]['qs'].split('&'))
        self.assertEqual(
            ['format=json', 'includes=1_test'], params)
        self.assertEqual(
            'shard', captured[1]['headers'].get('X-Backend-Record-Type'))
        self.assertEqual(shard_ranges[1:2], [dict(pr) for pr in actual])
        self.assertFalse(self.app.logger.get_lines_for_level('error'))
Example #2
0
 def setUp(self):
     utils.HASH_PATH_SUFFIX = 'endcap'
     utils.HASH_PATH_PREFIX = ''
     self.testdir = mkdtemp()
     ring_file = os.path.join(self.testdir, 'container.ring.gz')
     with closing(GzipFile(ring_file, 'wb')) as f:
         pickle.dump(
             RingData([[0, 1, 2, 0, 1, 2],
                       [1, 2, 0, 1, 2, 0],
                       [2, 3, 1, 2, 3, 1]],
                      [{'id': 0, 'ip': '127.0.0.1', 'port': 1,
                        'device': 'sda1', 'zone': 0},
                       {'id': 1, 'ip': '127.0.0.1', 'port': 1,
                        'device': 'sda1', 'zone': 2},
                       {'id': 2, 'ip': '127.0.0.1', 'port': 1,
                        'device': 'sda1', 'zone': 4},
                       {'id': 3, 'ip': '127.0.0.1', 'port': 1,
                        'device': 'sda1', 'zone': 6}], 30),
             f)
     self.devices_dir = os.path.join(self.testdir, 'devices')
     os.mkdir(self.devices_dir)
     self.sda1 = os.path.join(self.devices_dir, 'sda1')
     os.mkdir(self.sda1)
     for policy in POLICIES:
         os.mkdir(os.path.join(self.sda1, get_tmp_dir(policy)))
     self.logger = debug_logger()
     self.ts_iter = make_timestamp_iter()
Example #3
0
 def setUp(self):
     super(TestS3ApiMultiDelete, self).setUp()
     self.swift.register('HEAD', '/v1/AUTH_test/bucket/Key1',
                         swob.HTTPOk, {}, None)
     self.swift.register('HEAD', '/v1/AUTH_test/bucket/Key2',
                         swob.HTTPNotFound, {}, None)
     self.ts = make_timestamp_iter()
Example #4
0
    def setUp(self):
        global not_sleep

        self.old_loadapp = internal_client.loadapp
        self.old_sleep = internal_client.sleep

        internal_client.loadapp = lambda *a, **kw: None
        internal_client.sleep = not_sleep

        self.rcache = mkdtemp()
        self.conf = {'recon_cache_path': self.rcache}
        self.logger = debug_logger('test-expirer')

        self.ts = make_timestamp_iter()
        self.past_time = str(int(time() - 86400))
        self.future_time = str(int(time() + 86400))
        # Dummy task queue for test
        self.fake_swift = FakeInternalClient({
            '.expiring_objects': {
                # this task container will be checked
                self.past_time: [
                    # tasks ready for execution
                    self.past_time + '-a0/c0/o0',
                    self.past_time + '-a1/c1/o1',
                    self.past_time + '-a2/c2/o2',
                    self.past_time + '-a3/c3/o3',
                    self.past_time + '-a4/c4/o4',
                    self.past_time + '-a5/c5/o5',
                    self.past_time + '-a6/c6/o6',
                    self.past_time + '-a7/c7/o7',
                    # task objects for unicode test
                    self.past_time + u'-a8/c8/o8\u2661',
                    self.past_time + u'-a9/c9/o9\xf8',
                    # this task will be skipped
                    self.future_time + '-a10/c10/o10'
                ],
                # this task container will be skipped
                self.future_time: [self.future_time + '-a11/c11/o11']
            }
        })
        self.expirer = expirer.ObjectExpirer(self.conf,
                                             logger=self.logger,
                                             swift=self.fake_swift)

        # target object paths which should be expirerd now
        self.expired_target_path_list = [
            swob.wsgi_to_str(tgt) for tgt in (
                'a0/c0/o0',
                'a1/c1/o1',
                'a2/c2/o2',
                'a3/c3/o3',
                'a4/c4/o4',
                'a5/c5/o5',
                'a6/c6/o6',
                'a7/c7/o7',
                'a8/c8/o8\xe2\x99\xa1',
                'a9/c9/o9\xc3\xb8',
            )
        ]
Example #5
0
 def test_with_only_tombstone(self):
     # sanity check that auditor doesn't touch solitary tombstones
     ts_iter = make_timestamp_iter()
     self.setup_bad_zero_byte(timestamp=next(ts_iter))
     self.disk_file.delete(next(ts_iter))
     files = os.listdir(self.disk_file._datadir)
     self.assertEqual(1, len(files))
     self.assertTrue(files[0].endswith('ts'))
     kwargs = {'mode': 'once'}
     self.auditor.run_audit(**kwargs)
     files_after = os.listdir(self.disk_file._datadir)
     self.assertEqual(files, files_after)
Example #6
0
 def test_with_only_tombstone(self):
     # sanity check that auditor doesn't touch solitary tombstones
     ts_iter = make_timestamp_iter()
     self.setup_bad_zero_byte(timestamp=next(ts_iter))
     self.disk_file.delete(next(ts_iter))
     files = os.listdir(self.disk_file._datadir)
     self.assertEqual(1, len(files))
     self.assertTrue(files[0].endswith('ts'))
     kwargs = {'mode': 'once'}
     self.auditor.run_audit(**kwargs)
     files_after = os.listdir(self.disk_file._datadir)
     self.assertEqual(files, files_after)
Example #7
0
    def setUp(self):
        global not_sleep

        self.old_loadapp = internal_client.loadapp
        self.old_sleep = internal_client.sleep

        internal_client.loadapp = lambda *a, **kw: None
        internal_client.sleep = not_sleep

        self.rcache = mkdtemp()
        self.conf = {'recon_cache_path': self.rcache}
        self.logger = debug_logger('test-expirer')

        self.ts = make_timestamp_iter()
        self.past_time = str(int(time() - 86400))
        self.future_time = str(int(time() + 86400))
        # Dummy task queue for test
        self.fake_swift = FakeInternalClient({
            '.expiring_objects': {
                # this task container will be checked
                self.past_time: [
                    # tasks ready for execution
                    self.past_time + '-a0/c0/o0',
                    self.past_time + '-a1/c1/o1',
                    self.past_time + '-a2/c2/o2',
                    self.past_time + '-a3/c3/o3',
                    self.past_time + '-a4/c4/o4',
                    self.past_time + '-a5/c5/o5',
                    self.past_time + '-a6/c6/o6',
                    self.past_time + '-a7/c7/o7',
                    # task objects for unicode test
                    self.past_time + u'-a8/c8/o8\u2661',
                    self.past_time + u'-a9/c9/o9\xf8',
                    # this task will be skipped
                    self.future_time + '-a10/c10/o10'],
                # this task container will be skipped
                self.future_time: [
                    self.future_time + '-a11/c11/o11']}
        })
        self.expirer = expirer.ObjectExpirer(self.conf, logger=self.logger,
                                             swift=self.fake_swift)

        # target object paths which should be expirerd now
        self.expired_target_path_list = [
            swob.wsgi_to_str(tgt) for tgt in (
                'a0/c0/o0', 'a1/c1/o1', 'a2/c2/o2', 'a3/c3/o3', 'a4/c4/o4',
                'a5/c5/o5', 'a6/c6/o6', 'a7/c7/o7',
                'a8/c8/o8\xe2\x99\xa1', 'a9/c9/o9\xc3\xb8',
            )
        ]
Example #8
0
 def test_with_tombstone_and_data(self):
     # rsync replication could leave a tombstone and data file in object
     # dir - verify they are both removed during audit
     ts_iter = make_timestamp_iter()
     ts_tomb = ts_iter.next()
     ts_data = ts_iter.next()
     self.setup_bad_zero_byte(timestamp=ts_data)
     tomb_file_path = os.path.join(self.disk_file._datadir, "%s.ts" % ts_tomb.internal)
     with open(tomb_file_path, "wb") as fd:
         write_metadata(fd, {"X-Timestamp": ts_tomb.internal})
     files = os.listdir(self.disk_file._datadir)
     self.assertEqual(2, len(files))
     self.assertTrue(os.path.basename(tomb_file_path) in files, files)
     kwargs = {"mode": "once"}
     self.auditor.run_audit(**kwargs)
     self.assertFalse(os.path.exists(self.disk_file._datadir))
Example #9
0
 def test_with_tombstone_and_data(self):
     # rsync replication could leave a tombstone and data file in object
     # dir - verify they are both removed during audit
     ts_iter = make_timestamp_iter()
     ts_tomb = next(ts_iter)
     ts_data = next(ts_iter)
     self.setup_bad_zero_byte(timestamp=ts_data)
     tomb_file_path = os.path.join(self.disk_file._datadir,
                                   '%s.ts' % ts_tomb.internal)
     with open(tomb_file_path, 'wb') as fd:
         write_metadata(fd, {'X-Timestamp': ts_tomb.internal})
     files = os.listdir(self.disk_file._datadir)
     self.assertEqual(2, len(files))
     self.assertTrue(os.path.basename(tomb_file_path) in files, files)
     kwargs = {'mode': 'once'}
     self.auditor.run_audit(**kwargs)
     self.assertFalse(os.path.exists(self.disk_file._datadir))
 def test_sync_remote_with_timings(self):
     ts_iter = make_timestamp_iter()
     # setup a local container
     broker = self._get_broker('a', 'c', node_index=0)
     put_timestamp = ts_iter.next()
     broker.initialize(put_timestamp.internal, POLICIES.default.idx)
     broker.update_metadata(
         {'x-container-meta-test': ('foo', put_timestamp.internal)})
     # setup remote container
     remote_broker = self._get_broker('a', 'c', node_index=1)
     remote_broker.initialize(ts_iter.next().internal, POLICIES.default.idx)
     timestamp = ts_iter.next()
     for db in (broker, remote_broker):
         db.put_object('/a/c/o',
                       timestamp.internal,
                       0,
                       'content-type',
                       'etag',
                       storage_policy_index=db.storage_policy_index)
     # replicate
     daemon = replicator.ContainerReplicator({})
     part, node = self._get_broker_part_node(remote_broker)
     info = broker.get_replication_info()
     with mock.patch.object(db_replicator, 'DEBUG_TIMINGS_THRESHOLD', -1):
         success = daemon._repl_to_node(node, broker, part, info)
     # nothing to do
     self.assertTrue(success)
     self.assertEqual(1, daemon.stats['no_change'])
     expected_timings = ('info', 'update_metadata', 'merge_timestamps',
                         'get_sync', 'merge_syncs')
     debug_lines = self.rpc.logger.logger.get_lines_for_level('debug')
     self.assertEqual(
         len(expected_timings), len(debug_lines),
         'Expected %s debug lines but only got %s: %s' %
         (len(expected_timings), len(debug_lines), debug_lines))
     for metric in expected_timings:
         expected = 'replicator-rpc-sync time for %s:' % metric
         self.assert_(
             any(expected in line for line in debug_lines),
             'debug timing %r was not in %r' % (expected, debug_lines))
Example #11
0
 def test_sync_remote_with_timings(self):
     ts_iter = make_timestamp_iter()
     # setup a local container
     broker = self._get_broker('a', 'c', node_index=0)
     put_timestamp = ts_iter.next()
     broker.initialize(put_timestamp.internal, POLICIES.default.idx)
     broker.update_metadata(
         {'x-container-meta-test': ('foo', put_timestamp.internal)})
     # setup remote container
     remote_broker = self._get_broker('a', 'c', node_index=1)
     remote_broker.initialize(ts_iter.next().internal, POLICIES.default.idx)
     timestamp = ts_iter.next()
     for db in (broker, remote_broker):
         db.put_object(
             '/a/c/o', timestamp.internal, 0, 'content-type', 'etag',
             storage_policy_index=db.storage_policy_index)
     # replicate
     daemon = replicator.ContainerReplicator({})
     part, node = self._get_broker_part_node(remote_broker)
     info = broker.get_replication_info()
     with mock.patch.object(db_replicator, 'DEBUG_TIMINGS_THRESHOLD', -1):
         success = daemon._repl_to_node(node, broker, part, info)
     # nothing to do
     self.assertTrue(success)
     self.assertEqual(1, daemon.stats['no_change'])
     expected_timings = ('info', 'update_metadata', 'merge_timestamps',
                         'get_sync', 'merge_syncs')
     debug_lines = self.rpc.logger.logger.get_lines_for_level('debug')
     self.assertEqual(len(expected_timings), len(debug_lines),
                      'Expected %s debug lines but only got %s: %s' %
                      (len(expected_timings), len(debug_lines),
                       debug_lines))
     for metric in expected_timings:
         expected = 'replicator-rpc-sync time for %s:' % metric
         self.assert_(any(expected in line for line in debug_lines),
                      'debug timing %r was not in %r' % (
                          expected, debug_lines))
Example #12
0
    def test_obj_put_async_updates(self):
        ts_iter = make_timestamp_iter()
        policies = list(POLICIES)
        random.shuffle(policies)

        # setup updater
        conf = {
            'devices': self.devices_dir,
            'mount_check': 'false',
            'swift_dir': self.testdir,
        }
        daemon = object_updater.ObjectUpdater(conf, logger=self.logger)
        async_dir = os.path.join(self.sda1, get_async_dir(policies[0]))
        os.mkdir(async_dir)

        def do_test(headers_out, expected, container_path=None):
            # write an async
            dfmanager = DiskFileManager(conf, daemon.logger)
            self._write_async_update(dfmanager,
                                     next(ts_iter),
                                     policies[0],
                                     headers=headers_out,
                                     container_path=container_path)
            request_log = []

            def capture(*args, **kwargs):
                request_log.append((args, kwargs))

            # run once
            fake_status_codes = [
                200,  # object update success
                200,  # object update success
                200,  # object update conflict
            ]
            with mocked_http_conn(*fake_status_codes, give_connect=capture):
                daemon.run_once()
            self.assertEqual(len(fake_status_codes), len(request_log))
            for request_args, request_kwargs in request_log:
                ip, part, method, path, headers, qs, ssl = request_args
                self.assertEqual(method, 'PUT')
                self.assertDictEqual(expected, headers)
            self.assertEqual(daemon.logger.get_increment_counts(), {
                'successes': 1,
                'unlinks': 1,
                'async_pendings': 1
            })
            self.assertFalse(os.listdir(async_dir))
            daemon.logger.clear()

        ts = next(ts_iter)
        # use a dict rather than HeaderKeyDict so we can vary the case of the
        # pickled headers
        headers_out = {
            'x-size': 0,
            'x-content-type': 'text/plain',
            'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
            'x-timestamp': ts.normal,
            'X-Backend-Storage-Policy-Index': int(policies[0]),
            'User-Agent': 'object-server %s' % os.getpid()
        }
        expected = {
            'X-Size': '0',
            'X-Content-Type': 'text/plain',
            'X-Etag': 'd41d8cd98f00b204e9800998ecf8427e',
            'X-Timestamp': ts.normal,
            'X-Backend-Storage-Policy-Index': str(int(policies[0])),
            'User-Agent': 'object-updater %s' % os.getpid(),
            'X-Backend-Accept-Redirect': 'true',
        }
        # always expect X-Backend-Accept-Redirect to be true
        do_test(headers_out, expected, container_path='.shards_a/shard_c')
        do_test(headers_out, expected)

        # ...unless X-Backend-Accept-Redirect is already set
        expected['X-Backend-Accept-Redirect'] = 'false'
        headers_out_2 = dict(headers_out)
        headers_out_2['X-Backend-Accept-Redirect'] = 'false'
        do_test(headers_out_2, expected)

        # updater should add policy header if missing
        expected['X-Backend-Accept-Redirect'] = 'true'
        headers_out['X-Backend-Storage-Policy-Index'] = None
        do_test(headers_out, expected)

        # updater should not overwrite a mismatched policy header
        headers_out['X-Backend-Storage-Policy-Index'] = int(policies[1])
        expected['X-Backend-Storage-Policy-Index'] = str(int(policies[1]))
        do_test(headers_out, expected)

        # check for case insensitivity
        headers_out['user-agent'] = headers_out.pop('User-Agent')
        headers_out['x-backend-storage-policy-index'] = headers_out.pop(
            'X-Backend-Storage-Policy-Index')
        do_test(headers_out, expected)
Example #13
0
    def test_obj_put_async_updates(self):
        ts_iter = make_timestamp_iter()
        policies = list(POLICIES)
        random.shuffle(policies)

        # setup updater
        conf = {
            'devices': self.devices_dir,
            'mount_check': 'false',
            'swift_dir': self.testdir,
        }
        daemon = object_updater.ObjectUpdater(conf, logger=self.logger)
        async_dir = os.path.join(self.sda1, get_async_dir(policies[0]))
        os.mkdir(async_dir)

        def do_test(headers_out, expected, container_path=None):
            # write an async
            dfmanager = DiskFileManager(conf, daemon.logger)
            self._write_async_update(dfmanager, next(ts_iter), policies[0],
                                     headers=headers_out,
                                     container_path=container_path)
            request_log = []

            def capture(*args, **kwargs):
                request_log.append((args, kwargs))

            # run once
            fake_status_codes = [
                200,  # object update success
                200,  # object update success
                200,  # object update conflict
            ]
            with mocked_http_conn(*fake_status_codes, give_connect=capture):
                daemon.run_once()
            self.assertEqual(len(fake_status_codes), len(request_log))
            for request_args, request_kwargs in request_log:
                ip, part, method, path, headers, qs, ssl = request_args
                self.assertEqual(method, 'PUT')
                self.assertDictEqual(expected, headers)
            self.assertEqual(
                daemon.logger.get_increment_counts(),
                {'successes': 1, 'unlinks': 1, 'async_pendings': 1})
            self.assertFalse(os.listdir(async_dir))
            daemon.logger.clear()

        ts = next(ts_iter)
        # use a dict rather than HeaderKeyDict so we can vary the case of the
        # pickled headers
        headers_out = {
            'x-size': 0,
            'x-content-type': 'text/plain',
            'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
            'x-timestamp': ts.normal,
            'X-Backend-Storage-Policy-Index': int(policies[0]),
            'User-Agent': 'object-server %s' % os.getpid()
        }
        expected = {
            'X-Size': '0',
            'X-Content-Type': 'text/plain',
            'X-Etag': 'd41d8cd98f00b204e9800998ecf8427e',
            'X-Timestamp': ts.normal,
            'X-Backend-Storage-Policy-Index': str(int(policies[0])),
            'User-Agent': 'object-updater %s' % os.getpid(),
            'X-Backend-Accept-Redirect': 'true',
        }
        # always expect X-Backend-Accept-Redirect to be true
        do_test(headers_out, expected, container_path='.shards_a/shard_c')
        do_test(headers_out, expected)

        # ...unless X-Backend-Accept-Redirect is already set
        expected['X-Backend-Accept-Redirect'] = 'false'
        headers_out_2 = dict(headers_out)
        headers_out_2['X-Backend-Accept-Redirect'] = 'false'
        do_test(headers_out_2, expected)

        # updater should add policy header if missing
        expected['X-Backend-Accept-Redirect'] = 'true'
        headers_out['X-Backend-Storage-Policy-Index'] = None
        do_test(headers_out, expected)

        # updater should not overwrite a mismatched policy header
        headers_out['X-Backend-Storage-Policy-Index'] = int(policies[1])
        expected['X-Backend-Storage-Policy-Index'] = str(int(policies[1]))
        do_test(headers_out, expected)

        # check for case insensitivity
        headers_out['user-agent'] = headers_out.pop('User-Agent')
        headers_out['x-backend-storage-policy-index'] = headers_out.pop(
            'X-Backend-Storage-Policy-Index')
        do_test(headers_out, expected)
Example #14
0
 def setUp(self):
     self.ts = make_timestamp_iter()