예제 #1
0
    def test_two_realms_and_change_a_default(self):
        fname = 'container-sync-realms.conf'
        fcontents = '''
[DEFAULT]
mtime_check_interval = 60

[US]
key = 9ff3b71c849749dbaec4ccdd3cbab62b
cluster_dfw1 = http://dfw1.host/v1/

[UK]
key = e9569809dc8b4951accc1487aa788012
key2 = f6351bd1cc36413baa43f7ba1b45e51d
cluster_lon3 = http://lon3.host/v1/
'''
        with temptree([fname], [fcontents]) as tempdir:
            logger = FakeLogger()
            fpath = os.path.join(tempdir, fname)
            csr = ContainerSyncRealms(fpath, logger)
            self.assertEqual(logger.all_log_lines(), {})
            self.assertEqual(csr.mtime_check_interval, 60)
            self.assertEqual(sorted(csr.realms()), ['UK', 'US'])
            self.assertEqual(csr.key('US'), '9ff3b71c849749dbaec4ccdd3cbab62b')
            self.assertEqual(csr.key2('US'), None)
            self.assertEqual(csr.clusters('US'), ['DFW1'])
            self.assertEqual(
                csr.endpoint('US', 'DFW1'), 'http://dfw1.host/v1/')
            self.assertEqual(csr.key('UK'), 'e9569809dc8b4951accc1487aa788012')
            self.assertEqual(
                csr.key2('UK'), 'f6351bd1cc36413baa43f7ba1b45e51d')
            self.assertEqual(csr.clusters('UK'), ['LON3'])
            self.assertEqual(
                csr.endpoint('UK', 'LON3'), 'http://lon3.host/v1/')
예제 #2
0
 def test_empty(self):
     fname = 'container-sync-realms.conf'
     fcontents = ''
     with temptree([fname], [fcontents]) as tempdir:
         logger = FakeLogger()
         fpath = os.path.join(tempdir, fname)
         csr = ContainerSyncRealms(fpath, logger)
         self.assertEqual(logger.all_log_lines(), {})
         self.assertEqual(csr.mtime_check_interval, 300)
         self.assertEqual(csr.realms(), [])
예제 #3
0
 def test_no_file_there(self):
     unique = uuid.uuid4().hex
     logger = FakeLogger()
     csr = ContainerSyncRealms(unique, logger)
     self.assertEqual(
         logger.all_log_lines(),
         {'debug': [
             "Could not load '%s': [Errno 2] No such file or directory: "
             "'%s'" % (unique, unique)]})
     self.assertEqual(csr.mtime_check_interval, 300)
     self.assertEqual(csr.realms(), [])
예제 #4
0
    def test_object_run_logging(self):
        logger = FakeLogger()
        auditor_worker = auditor.AuditorWorker(self.conf, logger, self.rcache, self.devices)
        auditor_worker.audit_all_objects(device_dirs=["sda"])
        log_lines = logger.get_lines_for_level("info")
        self.assertTrue(len(log_lines) > 0)
        self.assertTrue(log_lines[0].index("ALL - parallel, sda"))

        logger = FakeLogger()
        auditor_worker = auditor.AuditorWorker(self.conf, logger, self.rcache, self.devices, zero_byte_only_at_fps=50)
        auditor_worker.audit_all_objects(device_dirs=["sda"])
        log_lines = logger.get_lines_for_level("info")
        self.assertTrue(len(log_lines) > 0)
        self.assertTrue(log_lines[0].index("ZBF - sda"))
예제 #5
0
 def test_error_parsing(self):
     fname = 'container-sync-realms.conf'
     fcontents = 'invalid'
     with temptree([fname], [fcontents]) as tempdir:
         logger = FakeLogger()
         fpath = os.path.join(tempdir, fname)
         csr = ContainerSyncRealms(fpath, logger)
         self.assertEqual(
             logger.all_log_lines(),
             {'error': [
                 "Could not load '%s': File contains no section headers.\n"
                 "file: %s, line: 1\n"
                 "'invalid'" % (fpath, fpath)]})
         self.assertEqual(csr.mtime_check_interval, 300)
         self.assertEqual(csr.realms(), [])
예제 #6
0
    def test_sweep_logs_multiple_policies(self):
        for policy in _mocked_policies:
            asyncdir = os.path.join(self.sda1, get_async_dir(policy.idx))
            prefix_dir = os.path.join(asyncdir, 'abc')
            mkpath(prefix_dir)

            for o, t in [('abc', 123), ('def', 234), ('ghi', 345)]:
                ohash = hash_path('account', 'container%d' % policy.idx, o)
                o_path = os.path.join(prefix_dir, ohash + '-' +
                                      normalize_timestamp(t))
                write_pickle({}, o_path)

        class MockObjectUpdater(object_updater.ObjectUpdater):
            def process_object_update(self, update_path, device, policy):
                os.unlink(update_path)
                self.stats.successes += 1
                self.stats.unlinks += 1

        logger = FakeLogger()
        ou = MockObjectUpdater({
            'devices': self.devices_dir,
            'mount_check': 'false',
            'swift_dir': self.testdir,
            'interval': '1',
            'concurrency': '1',
            'report_interval': '10.0',
            'node_timeout': '5'}, logger=logger)

        now = [time()]

        def mock_time():
            rv = now[0]
            now[0] += 0.01
            return rv

        with mock.patch('swift.obj.updater.time',
                        mock.MagicMock(time=mock_time)):
            ou.object_sweep(self.sda1)

        completion_lines = [l for l in logger.get_lines_for_level('info')
                            if "sweep complete" in l]

        self.assertEqual(len(completion_lines), 1)
        self.assertIn("sweep complete", completion_lines[0])
        self.assertIn(
            "6 successes, 0 failures, 0 quarantines, 6 unlinks, 0 errors, "
            "0 redirects",
            completion_lines[0])
예제 #7
0
    def test_bad_mtime_check_interval(self):
        fname = 'container-sync-realms.conf'
        fcontents = '''
[DEFAULT]
mtime_check_interval = invalid
'''
        with temptree([fname], [fcontents]) as tempdir:
            logger = FakeLogger()
            fpath = os.path.join(tempdir, fname)
            csr = ContainerSyncRealms(fpath, logger)
            self.assertEqual(
                logger.all_log_lines(),
                {'error': [
                    "Error in '%s' with mtime_check_interval: invalid literal "
                    "for int() with base 10: 'invalid'" % fpath]})
            self.assertEqual(csr.mtime_check_interval, 300)
예제 #8
0
    def test_empty_realm(self):
        fname = 'container-sync-realms.conf'
        fcontents = '''
[US]
'''
        with temptree([fname], [fcontents]) as tempdir:
            logger = FakeLogger()
            fpath = os.path.join(tempdir, fname)
            csr = ContainerSyncRealms(fpath, logger)
            self.assertEqual(logger.all_log_lines(), {})
            self.assertEqual(csr.mtime_check_interval, 300)
            self.assertEqual(csr.realms(), ['US'])
            self.assertEqual(csr.key('US'), None)
            self.assertEqual(csr.key2('US'), None)
            self.assertEqual(csr.clusters('US'), [])
            self.assertEqual(csr.endpoint('US', 'JUST_TESTING'), None)
예제 #9
0
    def setUp(self):
        class FakeFilter(object):
            app = None
            crypto = Crypto({})

        self.fake_logger = FakeLogger()
        self.crypto_context = CryptoWSGIContext(
            FakeFilter(), 'object', self.fake_logger)
예제 #10
0
 def test_os_error(self):
     fname = 'container-sync-realms.conf'
     fcontents = ''
     with temptree([fname], [fcontents]) as tempdir:
         logger = FakeLogger()
         fpath = os.path.join(tempdir, fname)
         os.chmod(tempdir, 0)
         csr = ContainerSyncRealms(fpath, logger)
         try:
             self.assertEqual(
                 logger.all_log_lines(),
                 {'error': [
                     "Could not load '%s': [Errno 13] Permission denied: "
                     "'%s'" % (fpath, fpath)]})
             self.assertEqual(csr.mtime_check_interval, 300)
             self.assertEqual(csr.realms(), [])
         finally:
             os.chmod(tempdir, 0700)
예제 #11
0
    def test_one_realm(self):
        fname = 'container-sync-realms.conf'
        fcontents = '''
[US]
key = 9ff3b71c849749dbaec4ccdd3cbab62b
cluster_dfw1 = http://dfw1.host/v1/
'''
        with temptree([fname], [fcontents]) as tempdir:
            logger = FakeLogger()
            fpath = os.path.join(tempdir, fname)
            csr = ContainerSyncRealms(fpath, logger)
            self.assertEqual(logger.all_log_lines(), {})
            self.assertEqual(csr.mtime_check_interval, 300)
            self.assertEqual(csr.realms(), ['US'])
            self.assertEqual(csr.key('US'), '9ff3b71c849749dbaec4ccdd3cbab62b')
            self.assertEqual(csr.key2('US'), None)
            self.assertEqual(csr.clusters('US'), ['DFW1'])
            self.assertEqual(
                csr.endpoint('US', 'DFW1'), 'http://dfw1.host/v1/')
예제 #12
0
파일: test_expirer.py 프로젝트: 701/swift
    def setUp(self):
        global not_sleep

        self.old_loadapp = internal_client.loadapp
        self.old_sleep = internal_client.sleep

        internal_client.loadapp = lambda *a, **kw: None
        internal_client.sleep = not_sleep

        self.rcache = mkdtemp()
        self.logger = FakeLogger()
    def test_os_error(self):
        fname = 'container-sync-realms.conf'
        fcontents = ''
        with temptree([fname], [fcontents]) as tempdir:
            logger = FakeLogger()
            fpath = os.path.join(tempdir, fname)

            def _mock_getmtime(path):
                raise OSError(errno.EACCES,
                              os.strerror(errno.EACCES) +
                              ": '%s'" % (fpath))
            with patch('os.path.getmtime', _mock_getmtime):
                csr = ContainerSyncRealms(fpath, logger)

            self.assertEqual(
                logger.all_log_lines(),
                {'error': [
                    "Could not load '%s': [Errno 13] Permission denied: "
                    "'%s'" % (fpath, fpath)]})
            self.assertEqual(csr.mtime_check_interval, 300)
            self.assertEqual(csr.realms(), [])
예제 #14
0
 def test_upload_line(self):
     app = proxy_logging.ProxyLoggingMiddleware(FakeAppReadline(),
                                                {'log_headers': 'yes'})
     app.access_logger = FakeLogger()
     req = Request.blank(
         '/v1/a/c',
         environ={
             'REQUEST_METHOD': 'POST',
             'wsgi.input':
             StringIO.StringIO('some stuff\nsome other stuff\n')
         })
     resp = app(req.environ, start_response)
     exhaust_generator = [x for x in resp]
     log_parts = self._log_parts(app)
     self.assertEquals(log_parts[11], str(len('FAKE APP')))
     self.assertEquals(log_parts[10], str(len('some stuff\n')))
     self.assertUpdateStats('container.POST.200.xfer',
                            len('some stuff\n') + len('FAKE APP'), app)
예제 #15
0
    def test_run_forever_catches_usual_exceptions(self):
        raises = [0]

        def raise_exceptions():
            raises[0] += 1
            if raises[0] < 2:
                raise Exception('exception %d' % raises[0])
            raise SystemExit('exiting exception %d' % raises[0])

        x = expirer.ObjectExpirer({})
        x.logger = FakeLogger()
        orig_sleep = expirer.sleep
        try:
            expirer.sleep = not_sleep
            x.run_once = raise_exceptions
            x.run_forever()
        except SystemExit, err:
            pass
예제 #16
0
 def test_upload_size_no_policy(self):
     app = proxy_logging.ProxyLoggingMiddleware(FakeApp(policy_idx=None),
                                                {'log_headers': 'yes'})
     app.access_logger = FakeLogger()
     req = Request.blank('/v1/a/c/o/foo',
                         environ={
                             'REQUEST_METHOD': 'PUT',
                             'wsgi.input': BytesIO(b'some stuff')
                         })
     resp = app(req.environ, start_response)
     # exhaust generator
     [x for x in resp]
     log_parts = self._log_parts(app)
     self.assertEqual(log_parts[11], str(len('FAKE APP')))
     self.assertEqual(log_parts[10], str(len('some stuff')))
     self.assertUpdateStats(
         [('object.PUT.200.xfer', len('some stuff') + len('FAKE APP'))],
         app)
예제 #17
0
 def test_disconnect_on_read(self):
     app = proxy_logging.ProxyLoggingMiddleware(FakeApp(['some', 'stuff']),
                                                {})
     app.access_logger = FakeLogger()
     req = Request.blank('/',
                         environ={
                             'REQUEST_METHOD': 'GET',
                             'wsgi.input': FileLikeExceptor()
                         })
     try:
         resp = app(req.environ, start_response)
         # read body
         ''.join(resp)
     except IOError:
         pass
     log_parts = self._log_parts(app)
     self.assertEquals(log_parts[6], '499')
     self.assertEquals(log_parts[10], '-')  # read length
예제 #18
0
 def test_no_content_length_no_transfer_encoding_with_str_body(self):
     app = proxy_logging.ProxyLoggingMiddleware(
         FakeAppNoContentLengthNoTransferEncoding(
             body='line1\nline2\n',
         ), {})
     app.access_logger = FakeLogger()
     req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
     resp = app(req.environ, start_response)
     # Python 2.7 can have assertRaises act as a context manager, but python
     # 2.6 can't.  So there's this.
     try:
         resp_body = ''.join(resp)
     except Exception as e:
         self.assertEquals(
             "WSGI [proxy-logging]: No content-length or transfer-encoding "
             "header sent and there is content! 'l'", str(e))
     else:
         self.assert_(False)
 def test_os_error(self):
     fname = 'container-sync-realms.conf'
     fcontents = ''
     with temptree([fname], [fcontents]) as tempdir:
         logger = FakeLogger()
         fpath = os.path.join(tempdir, fname)
         os.chmod(tempdir, 0)
         csr = ContainerSyncRealms(fpath, logger)
         try:
             self.assertEqual(
                 logger.lines_dict,
                 {'error': [
                     "Could not load '%s': [Errno 13] Permission denied: "
                     "'%s'" % (fpath, fpath)]})
             self.assertEqual(csr.mtime_check_interval, 300)
             self.assertEqual(csr.realms(), [])
         finally:
             os.chmod(tempdir, 0700)
예제 #20
0
    def test_global_read_only_on_account_off(self):
        conf = {
            'read_only': 'true',
        }

        ro = read_only.filter_factory(conf)(FakeApp())
        ro.logger = FakeLogger()

        def get_fake_read_only(*args, **kwargs):
            return {'sysmeta': {'read-only': 'false'}}

        with mock.patch('swift.common.middleware.read_only.get_info',
                        get_fake_read_only):
            for method in read_methods + write_methods:
                req = Request.blank('/v/a')
                req.method = method
                resp = ro(req.environ, start_response)
                self.assertTrue(resp[0].startswith('204'))
예제 #21
0
 def test_get_hashes(self):
     df = diskfile.DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o',
                            FakeLogger())
     mkdirs(df.datadir)
     with open(
             os.path.join(df.datadir,
                          normalize_timestamp(time()) + '.ts'), 'wb') as f:
         f.write('1234567890')
     part = os.path.join(self.objects, '0')
     hashed, hashes = diskfile.get_hashes(part)
     self.assertEquals(hashed, 1)
     self.assert_('a83' in hashes)
     hashed, hashes = diskfile.get_hashes(part, do_listdir=True)
     self.assertEquals(hashed, 0)
     self.assert_('a83' in hashes)
     hashed, hashes = diskfile.get_hashes(part, recalculate=['a83'])
     self.assertEquals(hashed, 1)
     self.assert_('a83' in hashes)
예제 #22
0
    def test_global_read_only_off_destination_account_on_on_copy(self):
        conf = {}

        ro = read_only.filter_factory(conf)(FakeApp())
        ro.logger = FakeLogger()

        def get_fake_read_only(*args, **kwargs):
            if 'b' in args:
                return {'sysmeta': {'read-only': 'true'}}
            return {}

        with mock.patch('swift.common.middleware.read_only.get_info',
                        get_fake_read_only):
            headers = {'Destination-Account': 'b'}
            req = Request.blank('/v/a', headers=headers)
            req.method = "COPY"
            resp = ro(req.environ, start_response)
            self.assertEqual(ro_resp, resp)
예제 #23
0
    def test_delete_actual_object_does_not_get_unicode(self):
        class InternalClient(object):
            def __init__(self, containers, objects):
                self.containers = containers
                self.objects = objects

            def get_account_info(*a, **kw):
                return 1, 2

            def iter_containers(self, *a, **kw):
                return self.containers

            def delete_container(*a, **kw):
                pass

            def delete_object(*a, **kw):
                pass

            def iter_objects(self, *a, **kw):
                return self.objects

        got_unicode = [False]

        def delete_actual_object_test_for_unicode(actual_obj, timestamp):
            if isinstance(actual_obj, unicode):
                got_unicode[0] = True

        x = expirer.ObjectExpirer({})
        x.logger = FakeLogger()
        x.delete_actual_object = delete_actual_object_test_for_unicode
        self.assertEquals(x.report_objects, 0)
        x.swift = InternalClient([{
            'name': str(int(time() - 86400))
        }], [{
            'name': u'%d-actual-obj' % int(time() - 86400)
        }])
        x.run_once()
        self.assertEquals(x.report_objects, 1)
        self.assertEquals(
            x.logger.log_dict['info'],
            [(('Pass beginning; 1 possible containers; '
               '2 possible objects', ), {}),
             (('Pass completed in 0s; 1 objects expired', ), {})])
        self.assertFalse(got_unicode[0])
예제 #24
0
 def test_run_once_recover_from_failure(self):
     replicator = object_replicator.ObjectReplicator(
         dict(swift_dir=self.testdir, devices=self.devices,
              mount_check='false', timeout='300', stats_interval='1'))
     was_connector = object_replicator.http_connect
     try:
         object_replicator.http_connect = mock_http_connect(200)
         # Write some files into '1' and run replicate- they should be moved
         # to the other partitoins and then node should get deleted.
         cur_part = '1'
         df = diskfile.DiskFile(
             self.devices, 'sda', cur_part, 'a', 'c', 'o', FakeLogger())
         mkdirs(df.datadir)
         f = open(os.path.join(df.datadir,
                               normalize_timestamp(time.time()) + '.data'),
                  'wb')
         f.write('1234567890')
         f.close()
         ohash = hash_path('a', 'c', 'o')
         data_dir = ohash[-3:]
         whole_path_from = os.path.join(self.objects, cur_part, data_dir)
         process_arg_checker = []
         nodes = [node for node in
                  self.ring.get_part_nodes(int(cur_part))
                  if node['ip'] not in _ips()]
         for node in nodes:
             rsync_mod = '%s::object/sda/objects/%s' % (node['ip'],
                                                        cur_part)
             process_arg_checker.append(
                 (0, '', ['rsync', whole_path_from, rsync_mod]))
         self.assertTrue(os.access(os.path.join(self.objects,
                                                '1', data_dir, ohash),
                                   os.F_OK))
         with _mock_process(process_arg_checker):
             replicator.run_once()
         self.assertFalse(process_errors)
         for i, result in [('0', True), ('1', False),
                           ('2', True), ('3', True)]:
             self.assertEquals(os.access(
                 os.path.join(self.objects,
                              i, diskfile.HASH_FILE),
                 os.F_OK), result)
     finally:
         object_replicator.http_connect = was_connector
예제 #25
0
    def test_ratelimit_blacklist(self):
        global time_ticker
        current_rate = 2
        conf_dict = {
            'account_ratelimit': current_rate,
            'max_sleep_time_seconds': 2,
            'account_whitelist': 'a',
            'account_blacklist': 'b'
        }
        self.test_ratelimit = ratelimit.filter_factory(conf_dict)(FakeApp())
        self.test_ratelimit.logger = FakeLogger()
        self.test_ratelimit.BLACK_LIST_SLEEP = 0
        ratelimit.http_connect = mock_http_connect(204)
        req = Request.blank('/v/b/c')
        req.environ['swift.cache'] = FakeMemcache()

        class rate_caller(Thread):
            def __init__(self, parent):
                Thread.__init__(self)
                self.parent = parent

            def run(self):
                self.result = self.parent.test_ratelimit(
                    req.environ, start_response)

        def get_fake_ratelimit(*args, **kwargs):
            return {'sysmeta': {'global-write-ratelimit': 'BLACKLIST'}}

        with mock.patch('swift.common.middleware.ratelimit.get_account_info',
                        get_fake_ratelimit):
            nt = 5
            threads = []
            for i in range(nt):
                rc = rate_caller(self)
                rc.start()
                threads.append(rc)
            for thread in threads:
                thread.join()
            the_497s = [
                t for t in threads
                if ''.join(t.result).startswith('Your account')
            ]
            self.assertEquals(len(the_497s), 5)
            self.assertEquals(time_ticker, 0)
예제 #26
0
    def test_global_read_only_off_src_acct_on_dest_acct_off_on_copy(self):
        conf = {}

        ro = read_only.filter_factory(conf)(FakeApp())
        ro.logger = FakeLogger()

        def fake_account_read_only(self, req, account):
            if account == 'a':
                return 'on'
            return ''

        with mock.patch(
                'swift.common.middleware.read_only.ReadOnlyMiddleware.' +
                'account_read_only', fake_account_read_only):
            headers = {'Destination-Account': 'b'}
            req = Request.blank('/v/a', headers=headers)
            req.method = "COPY"
            resp = ro(req.environ, start_response)
            self.assertTrue(resp[0].startswith('204'))
예제 #27
0
    def __init__(self, conf):
        self.fake_logger = FakeLogger()
        self.fake_swift = self.app = FakeSwift()
        self.register = self.fake_swift.register
        for filter in reversed([
                proxy_logging.filter_factory, copy.filter_factory,
                lambda conf: lambda app: FakeFilter(app, conf, self.register),
                proxy_logging.filter_factory
        ]):
            self.app = filter(conf)(self.app)
            self.app.logger = self.fake_logger
            if hasattr(self.app, 'access_logger'):
                self.app.access_logger = self.fake_logger

        if conf['subrequest_type'] == 'GET':
            self.register(conf['subrequest_type'], SUB_GET_PATH, HTTPOk, {})
        else:
            self.register(conf['subrequest_type'], SUB_PUT_POST_PATH, HTTPOk,
                          {})
예제 #28
0
    def test_account_read_only_on(self):
        conf = {}

        ro = read_only.filter_factory(conf)(FakeApp())
        ro.logger = FakeLogger()

        with mock.patch('swift.common.middleware.read_only.get_info',
                        return_value={'sysmeta': {'read-only': 'true'}}):
            for method in read_methods:
                req = Request.blank('/v/a')
                req.method = method
                resp = ro(req.environ, start_response)
                self.assertEqual(resp, [b'Some Content'])

            for method in write_methods:
                req = Request.blank('/v/a')
                req.method = method
                resp = ro(req.environ, start_response)
                self.assertEqual(ro_resp, resp)
    def test_one_realm(self):
        fname = 'container-sync-realms.conf'
        fcontents = '''
[US]
key = 9ff3b71c849749dbaec4ccdd3cbab62b
cluster_dfw1 = http://dfw1.host/v1/
'''
        with temptree([fname], [fcontents]) as tempdir:
            logger = FakeLogger()
            fpath = os.path.join(tempdir, fname)
            csr = ContainerSyncRealms(fpath, logger)
            self.assertEqual(logger.lines_dict, {})
            self.assertEqual(csr.mtime_check_interval, 300)
            self.assertEqual(csr.realms(), ['US'])
            self.assertEqual(csr.key('US'), '9ff3b71c849749dbaec4ccdd3cbab62b')
            self.assertEqual(csr.key2('US'), None)
            self.assertEqual(csr.clusters('US'), ['DFW1'])
            self.assertEqual(
                csr.endpoint('US', 'DFW1'), 'http://dfw1.host/v1/')
예제 #30
0
    def test_call_invalid_path(self):
        env = {'REQUEST_METHOD': 'GET',
               'SCRIPT_NAME': '',
               'PATH_INFO': '//v1/AUTH_1234567890',
               'SERVER_NAME': '127.0.0.1',
               'SERVER_PORT': '80',
               'swift.cache': FakeMemcache(),
               'SERVER_PROTOCOL': 'HTTP/1.0'}

        app = lambda *args, **kwargs: ['fake_app']
        rate_mid = ratelimit.RateLimitMiddleware(app, {},
                                                 logger=FakeLogger())

        class a_callable(object):

            def __call__(self, *args, **kwargs):
                pass
        resp = rate_mid.__call__(env, a_callable())
        self.assert_('fake_app' == resp[0])
예제 #31
0
 def test_listdir_with_exception(self, mock_listdir):
     e = OSError('permission_denied')
     mock_listdir.side_effect = e
     cu = container_updater.ContainerUpdater({
         'devices': self.devices_dir,
         'mount_check': 'false',
         'swift_dir': self.testdir,
         'interval': '1',
         'concurrency': '1',
         'node_timeout': '15',
         'account_suppression_time': 0
     })
     cu.logger = FakeLogger()
     paths = cu.get_paths()
     self.assertEqual(paths, [])
     log_lines = cu.logger.get_lines_for_level('error')
     msg = ('ERROR:  Failed to get paths to drive partitions: '
            'permission_denied')
     self.assertEqual(log_lines[0], msg)
예제 #32
0
 def test_get_hashes_unmodified_and_zero_bytes(self):
     df = DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o', FakeLogger())
     mkdirs(df.datadir)
     part = os.path.join(self.objects, '0')
     open(os.path.join(part, object_replicator.HASH_FILE), 'w')
     # Now the hash file is zero bytes.
     i = [0]
     def getmtime(filename):
         i[0] += 1
         return 1
     with mock({'os.path.getmtime': getmtime}):
         hashed, hashes = object_replicator.get_hashes(
             part, recalculate=[])
     # getmtime will actually not get called.  Initially, the pickle.load
     # will raise an exception first and later, force_rewrite will
     # short-circuit the if clause to determine whether to write out a fresh
     # hashes_file.
     self.assertEquals(i[0], 0)
     self.assertTrue('a83' in hashes)
예제 #33
0
 def test_multi_segment_resp(self):
     app = proxy_logging.ProxyLoggingMiddleware(
         FakeApp(['some', 'chunks', 'of data']), {})
     app.access_logger = FakeLogger()
     req = Request.blank('/',
                         environ={
                             'REQUEST_METHOD': 'GET',
                             'swift.source': 'SOS'
                         })
     resp = app(req.environ, start_response)
     resp_body = ''.join(resp)
     log_parts = self._log_parts(app)
     self.assertEquals(log_parts[3], 'GET')
     self.assertEquals(log_parts[4], '/')
     self.assertEquals(log_parts[5], 'HTTP/1.0')
     self.assertEquals(log_parts[6], '200')
     self.assertEquals(resp_body, 'somechunksof data')
     self.assertEquals(log_parts[11], str(len(resp_body)))
     self.assertUpdateStats('SOS.GET.200.xfer', len(resp_body), app)
예제 #34
0
 def test_access_log_headers_only(self):
     app = proxy_logging.ProxyLoggingMiddleware(
         FakeApp(), {'log_headers': 'yes',
                     'access_log_headers_only': 'FIRST, seCond'})
     app.access_logger = FakeLogger()
     req = Request.blank('/',
                         environ={'REQUEST_METHOD': 'GET'},
                         headers={'First': '1',
                                  'Second': '2',
                                  'Third': '3'})
     resp = app(req.environ, start_response)
     # exhaust generator
     [x for x in resp]
     log_parts = self._log_parts(app)
     headers = unquote(log_parts[14]).split('\n')
     self.assertTrue('First: 1' in headers)
     self.assertTrue('Second: 2' in headers)
     self.assertTrue('Third: 3' not in headers)
     self.assertTrue('Host: localhost:80' not in headers)
예제 #35
0
    def test_get_hashes_unmodified(self):
        df = diskfile.DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o',
                               FakeLogger())
        mkdirs(df.datadir)
        with open(
                os.path.join(df.datadir,
                             normalize_timestamp(time()) + '.ts'), 'wb') as f:
            f.write('1234567890')
        part = os.path.join(self.objects, '0')
        hashed, hashes = diskfile.get_hashes(part)
        i = [0]

        def _getmtime(filename):
            i[0] += 1
            return 1

        with unit_mock({'swift.obj.diskfile.getmtime': _getmtime}):
            hashed, hashes = diskfile.get_hashes(part, recalculate=['a83'])
        self.assertEquals(i[0], 2)
예제 #36
0
    def test_run_once_with_device_unmounted(self, mock_sweep, mock_ismount):

        mock_ismount.return_value = False
        cu = container_updater.ContainerUpdater({
            'devices': self.devices_dir,
            'mount_check': 'false',
            'swift_dir': self.testdir,
            'interval': '1',
            'concurrency': '1',
            'node_timeout': '15',
            'account_suppression_time': 0
        })
        containers_dir = os.path.join(self.sda1, DATADIR)
        os.mkdir(containers_dir)
        partition_dir = os.path.join(containers_dir, "a")
        os.mkdir(partition_dir)

        cu.run_once()
        self.assertTrue(os.path.exists(containers_dir))  # sanity check

        # only called if a partition dir exists
        self.assertTrue(mock_sweep.called)

        mock_sweep.reset_mock()

        cu = container_updater.ContainerUpdater({
            'devices': self.devices_dir,
            'mount_check': 'true',
            'swift_dir': self.testdir,
            'interval': '1',
            'concurrency': '1',
            'node_timeout': '15',
            'account_suppression_time': 0
        })
        cu.logger = FakeLogger()
        cu.run_once()
        log_lines = cu.logger.get_lines_for_level('warning')
        self.assertTrue(len(log_lines) > 0)
        msg = 'sda1 is not mounted'
        self.assertEqual(log_lines[0], msg)
        # Ensure that the container_sweep did not run
        self.assertFalse(mock_sweep.called)
예제 #37
0
    def test_delete_object(self):
        class InternalClient(object):
            def __init__(self, test, account, container, obj):
                self.test = test
                self.account = account
                self.container = container
                self.obj = obj
                self.delete_object_called = False

            def delete_object(self, account, container, obj):
                self.test.assertEqual(self.account, account)
                self.test.assertEqual(self.container, container)
                self.test.assertEqual(self.obj, obj)
                self.delete_object_called = True

        class DeleteActualObject(object):
            def __init__(self, test, actual_obj, timestamp):
                self.test = test
                self.actual_obj = actual_obj
                self.timestamp = timestamp
                self.called = False

            def __call__(self, actual_obj, timestamp):
                self.test.assertEqual(self.actual_obj, actual_obj)
                self.test.assertEqual(self.timestamp, timestamp)
                self.called = True

        container = 'container'
        obj = 'obj'
        actual_obj = 'actual_obj'
        timestamp = 'timestamp'

        x = expirer.ObjectExpirer({})
        x.logger = FakeLogger()
        x.swift = \
            InternalClient(self, x.expiring_objects_account, container, obj)
        x.delete_actual_object = \
            DeleteActualObject(self, actual_obj, timestamp)

        x.delete_object(actual_obj, timestamp, container, obj)
        self.assertTrue(x.swift.delete_object_called)
        self.assertTrue(x.delete_actual_object.called)
예제 #38
0
 def test_get_maxrate(self):
     conf_dict = {
         'container_ratelimit_10': 200,
         'container_ratelimit_50': 100,
         'container_ratelimit_75': 30
     }
     test_ratelimit = ratelimit.filter_factory(conf_dict)(FakeApp())
     test_ratelimit.logger = FakeLogger()
     self.assertIsNone(
         ratelimit.get_maxrate(test_ratelimit.container_ratelimits, 0))
     self.assertIsNone(
         ratelimit.get_maxrate(test_ratelimit.container_ratelimits, 5))
     self.assertEqual(
         ratelimit.get_maxrate(test_ratelimit.container_ratelimits, 10),
         200)
     self.assertEqual(
         ratelimit.get_maxrate(test_ratelimit.container_ratelimits, 60), 72)
     self.assertEqual(
         ratelimit.get_maxrate(test_ratelimit.container_ratelimits, 160),
         30)
예제 #39
0
    def test_hash_suffix_multi_file_one(self):
        df = DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o', FakeLogger())
        mkdirs(df.datadir)
        for tdiff in [1, 50, 100, 500]:
            for suff in ['.meta', '.data', '.ts']:
                f = open(os.path.join(df.datadir,
                        normalize_timestamp(int(time.time()) - tdiff) + suff),
                         'wb')
                f.write('1234567890')
                f.close()

        ohash = hash_path('a', 'c', 'o')
        data_dir = ohash[-3:]
        whole_path_from = os.path.join(self.objects, '0', data_dir)
        hsh_path = os.listdir(whole_path_from)[0]
        whole_hsh_path = os.path.join(whole_path_from, hsh_path)

        object_replicator.hash_suffix(whole_path_from, 99)
        # only the tombstone should be left
        self.assertEquals(len(os.listdir(whole_hsh_path)), 1)
예제 #40
0
    def test_hash_suffix_hash_dir_is_file_quarantine(self):
        df = DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o', FakeLogger())
        mkdirs(os.path.dirname(df.datadir))
        open(df.datadir, 'wb').close()
        ohash = hash_path('a', 'c', 'o')
        data_dir = ohash[-3:]
        whole_path_from = os.path.join(self.objects, '0', data_dir)
        orig_quarantine_renamer = object_replicator.quarantine_renamer
        called = [False]

        def wrapped(*args, **kwargs):
            called[0] = True
            return orig_quarantine_renamer(*args, **kwargs)

        try:
            object_replicator.quarantine_renamer = wrapped
            object_replicator.hash_suffix(whole_path_from, 101)
        finally:
            object_replicator.quarantine_renamer = orig_quarantine_renamer
        self.assertTrue(called[0])
예제 #41
0
 def setUp(self):
     self._orig_tpool_exc = tpool.execute
     tpool.execute = lambda f, *args, **kwargs: f(*args, **kwargs)
     self.lg = FakeLogger()
     _initxattr()
     _mock_clear_metadata()
     self._saved_df_wm = swiftonfile.swift.obj.diskfile.write_metadata
     self._saved_df_rm = swiftonfile.swift.obj.diskfile.read_metadata
     swiftonfile.swift.obj.diskfile.write_metadata = _mock_write_metadata
     swiftonfile.swift.obj.diskfile.read_metadata = _mock_read_metadata
     self._saved_ut_wm = swiftonfile.swift.common.utils.write_metadata
     self._saved_ut_rm = swiftonfile.swift.common.utils.read_metadata
     swiftonfile.swift.common.utils.write_metadata = _mock_write_metadata
     swiftonfile.swift.common.utils.read_metadata = _mock_read_metadata
     self._saved_do_fsync = swiftonfile.swift.obj.diskfile.do_fsync
     swiftonfile.swift.obj.diskfile.do_fsync = _mock_do_fsync
     self.td = tempfile.mkdtemp()
     self.conf = dict(devices=self.td, mb_per_sync=2,
                      keep_cache_size=(1024 * 1024), mount_check=False)
     self.mgr = DiskFileManager(self.conf, self.lg)
예제 #42
0
 def test_get_ratelimitable_key_tuples(self):
     current_rate = 13
     conf_dict = {'account_ratelimit': current_rate,
                  'container_ratelimit_3': 200}
     fake_memcache = FakeMemcache()
     fake_memcache.store[get_container_memcache_key('a', 'c')] = \
         {'count': 5}
     the_app = ratelimit.RateLimitMiddleware(None, conf_dict,
                                             logger=FakeLogger())
     the_app.memcache_client = fake_memcache
     self.assertEquals(len(the_app.get_ratelimitable_key_tuples(
                 'DELETE', 'a', None, None)), 0)
     self.assertEquals(len(the_app.get_ratelimitable_key_tuples(
                 'PUT', 'a', 'c', None)), 1)
     self.assertEquals(len(the_app.get_ratelimitable_key_tuples(
                 'DELETE', 'a', 'c', None)), 1)
     self.assertEquals(len(the_app.get_ratelimitable_key_tuples(
                 'GET', 'a', 'c', 'o')), 0)
     self.assertEquals(len(the_app.get_ratelimitable_key_tuples(
                 'PUT', 'a', 'c', 'o')), 1)
예제 #43
0
    def setUp(self):
        self.testdir = os.path.join(mkdtemp(), 'tmp_test_object_auditor')
        self.devices = os.path.join(self.testdir, 'node')
        self.logger = FakeLogger()
        rmtree(self.testdir, ignore_errors=1)
        mkdirs(os.path.join(self.devices, 'sda'))
        self.objects = os.path.join(self.devices, 'sda', 'objects')

        os.mkdir(os.path.join(self.devices, 'sdb'))
        self.objects_2 = os.path.join(self.devices, 'sdb', 'objects')

        os.mkdir(self.objects)
        self.parts = {}
        for part in ['0', '1', '2', '3']:
            self.parts[part] = os.path.join(self.objects, part)
            os.mkdir(os.path.join(self.objects, part))

        self.conf = dict(devices=self.devices, mount_check='false')
        self.disk_file = DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o',
                                  self.logger)
예제 #44
0
    def setUp(self):
        skip_if_no_xattrs()
        self.logger = FakeLogger()
        self.testdir = tempfile.mkdtemp()
        self.devices = os.path.join(self.testdir, 'node')
        shutil.rmtree(self.testdir, ignore_errors=1)
        os.mkdir(self.testdir)
        os.mkdir(self.devices)

        self.rb = ring.RingBuilder(8, 6.0, 1)

        for i in range(6):
            ip = "127.0.0.%s" % i
            self.rb.add_dev({'id': i, 'region': 0, 'zone': 0, 'weight': 1,
                             'ip': ip, 'port': 10000, 'device': 'sda1'})
        self.rb.rebalance(seed=1)

        self.existing_device = 'sda1'
        os.mkdir(os.path.join(self.devices, self.existing_device))
        self.objects = os.path.join(self.devices, self.existing_device,
                                    'objects')
        os.mkdir(self.objects)
        self._hash = utils.hash_path('a/c/o')
        digest = binascii.unhexlify(self._hash)
        part = struct.unpack_from('>I', digest)[0] >> 24
        self.next_part = struct.unpack_from('>I', digest)[0] >> 23
        self.objdir = os.path.join(
            self.objects, str(part), self._hash[-3:], self._hash)
        os.makedirs(self.objdir)
        self.object_fname = "1278553064.00000.data"
        self.objname = os.path.join(self.objdir, self.object_fname)
        with open(self.objname, "wb") as dummy:
            dummy.write(b"Hello World!")
            write_metadata(dummy, {'name': '/a/c/o', 'Content-Length': '12'})

        test_policies = [StoragePolicy(0, 'platin', True)]
        storage_policy._POLICIES = StoragePolicyCollection(test_policies)

        self.expected_dir = os.path.join(
            self.objects, str(self.next_part), self._hash[-3:], self._hash)
        self.expected_file = os.path.join(self.expected_dir, self.object_fname)
예제 #45
0
class TestObjectExpirer(TestCase):
    maxDiff = None

    def setUp(self):
        global not_sleep

        self.old_loadapp = internal_client.loadapp
        self.old_sleep = internal_client.sleep

        internal_client.loadapp = lambda *a, **kw: None
        internal_client.sleep = not_sleep

        self.rcache = mkdtemp()
        self.logger = FakeLogger()

    def teardown(self):
        rmtree(self.rcache)
        internal_client.sleep = self.old_sleep
        internal_client.loadapp = self.loadapp

    def test_get_process_values_from_kwargs(self):
        x = expirer.ObjectExpirer({})
        vals = {"processes": 5, "process": 1}
        self.assertEqual((5, 1), x.get_process_values(vals))

    def test_get_process_values_from_config(self):
        vals = {"processes": 5, "process": 1}
        x = expirer.ObjectExpirer(vals)
        self.assertEqual((5, 1), x.get_process_values({}))

    def test_get_process_values_negative_process(self):
        vals = {"processes": 5, "process": -1}
        # from config
        x = expirer.ObjectExpirer(vals)
        self.assertRaises(ValueError, x.get_process_values, {})
        # from kwargs
        x = expirer.ObjectExpirer({})
        self.assertRaises(ValueError, x.get_process_values, vals)

    def test_get_process_values_negative_processes(self):
        vals = {"processes": -5, "process": 1}
        # from config
        x = expirer.ObjectExpirer(vals)
        self.assertRaises(ValueError, x.get_process_values, {})
        # from kwargs
        x = expirer.ObjectExpirer({})
        self.assertRaises(ValueError, x.get_process_values, vals)

    def test_get_process_values_process_greater_than_processes(self):
        vals = {"processes": 5, "process": 7}
        # from config
        x = expirer.ObjectExpirer(vals)
        self.assertRaises(ValueError, x.get_process_values, {})
        # from kwargs
        x = expirer.ObjectExpirer({})
        self.assertRaises(ValueError, x.get_process_values, vals)

    def test_init_concurrency_too_small(self):
        conf = {"concurrency": 0}
        self.assertRaises(ValueError, expirer.ObjectExpirer, conf)
        conf = {"concurrency": -1}
        self.assertRaises(ValueError, expirer.ObjectExpirer, conf)

    def test_process_based_concurrency(self):
        class ObjectExpirer(expirer.ObjectExpirer):
            def __init__(self, conf):
                super(ObjectExpirer, self).__init__(conf)
                self.processes = 3
                self.deleted_objects = {}

            def delete_object(self, actual_obj, timestamp, container, obj):
                if container not in self.deleted_objects:
                    self.deleted_objects[container] = set()
                self.deleted_objects[container].add(obj)

        class InternalClient(object):
            def __init__(self, containers):
                self.containers = containers

            def get_account_info(self, *a, **kw):
                return len(self.containers.keys()), sum([len(self.containers[x]) for x in self.containers])

            def iter_containers(self, *a, **kw):
                return [{"name": x} for x in self.containers.keys()]

            def iter_objects(self, account, container):
                return [{"name": x} for x in self.containers[container]]

            def delete_container(*a, **kw):
                pass

        ukey = u"3"
        containers = {
            0: set("1-one 2-two 3-three".split()),
            1: set("2-two 3-three 4-four".split()),
            2: set("5-five 6-six".split()),
            ukey: set(u"7-seven\u2661".split()),
        }
        x = ObjectExpirer({})
        x.swift = InternalClient(containers)

        deleted_objects = {}
        for i in xrange(3):
            x.process = i
            x.run_once()
            self.assertNotEqual(deleted_objects, x.deleted_objects)
            deleted_objects = deepcopy(x.deleted_objects)
        self.assertEqual(containers[ukey].pop(), deleted_objects[ukey].pop().decode("utf8"))
        self.assertEqual(containers, deleted_objects)

    def test_delete_object(self):
        class InternalClient(object):

            container_ring = None

            def __init__(self, test, account, container, obj):
                self.test = test
                self.account = account
                self.container = container
                self.obj = obj
                self.delete_object_called = False

        class DeleteActualObject(object):
            def __init__(self, test, actual_obj, timestamp):
                self.test = test
                self.actual_obj = actual_obj
                self.timestamp = timestamp
                self.called = False

            def __call__(self, actual_obj, timestamp):
                self.test.assertEqual(self.actual_obj, actual_obj)
                self.test.assertEqual(self.timestamp, timestamp)
                self.called = True

        container = "container"
        obj = "obj"
        actual_obj = "actual_obj"
        timestamp = "timestamp"

        x = expirer.ObjectExpirer({}, logger=self.logger)
        x.swift = InternalClient(self, x.expiring_objects_account, container, obj)
        x.delete_actual_object = DeleteActualObject(self, actual_obj, timestamp)

        delete_object_called = []

        def pop_queue(c, o):
            self.assertEqual(container, c)
            self.assertEqual(obj, o)
            delete_object_called[:] = [True]

        x.pop_queue = pop_queue

        x.delete_object(actual_obj, timestamp, container, obj)
        self.assertTrue(delete_object_called)
        self.assertTrue(x.delete_actual_object.called)

    def test_report(self):
        x = expirer.ObjectExpirer({}, logger=self.logger)

        x.report()
        self.assertEqual(x.logger.log_dict["info"], [])

        x.logger._clear()
        x.report(final=True)
        self.assertTrue("completed" in x.logger.log_dict["info"][-1][0][0], x.logger.log_dict["info"])
        self.assertTrue("so far" not in x.logger.log_dict["info"][-1][0][0], x.logger.log_dict["info"])

        x.logger._clear()
        x.report_last_time = time() - x.report_interval
        x.report()
        self.assertTrue("completed" not in x.logger.log_dict["info"][-1][0][0], x.logger.log_dict["info"])
        self.assertTrue("so far" in x.logger.log_dict["info"][-1][0][0], x.logger.log_dict["info"])

    def test_run_once_nothing_to_do(self):
        x = expirer.ObjectExpirer({}, logger=self.logger)
        x.swift = "throw error because a string does not have needed methods"
        x.run_once()
        self.assertEqual(
            x.logger.log_dict["exception"],
            [(("Unhandled exception",), {}, "'str' object has no attribute " "'get_account_info'")],
        )

    def test_run_once_calls_report(self):
        class InternalClient(object):
            def get_account_info(*a, **kw):
                return 1, 2

            def iter_containers(*a, **kw):
                return []

        x = expirer.ObjectExpirer({}, logger=self.logger)
        x.swift = InternalClient()
        x.run_once()
        self.assertEqual(
            x.logger.log_dict["info"],
            [
                (("Pass beginning; 1 possible containers; " "2 possible objects",), {}),
                (("Pass completed in 0s; 0 objects expired",), {}),
            ],
        )

    def test_container_timestamp_break(self):
        class InternalClient(object):
            def __init__(self, containers):
                self.containers = containers

            def get_account_info(*a, **kw):
                return 1, 2

            def iter_containers(self, *a, **kw):
                return self.containers

            def iter_objects(*a, **kw):
                raise Exception("This should not have been called")

        x = expirer.ObjectExpirer({"recon_cache_path": self.rcache}, logger=self.logger)
        x.swift = InternalClient([{"name": str(int(time() + 86400))}])
        x.run_once()
        for exccall in x.logger.log_dict["exception"]:
            self.assertTrue("This should not have been called" not in exccall[0][0])
        self.assertEqual(
            x.logger.log_dict["info"],
            [
                (("Pass beginning; 1 possible containers; " "2 possible objects",), {}),
                (("Pass completed in 0s; 0 objects expired",), {}),
            ],
        )

        # Reverse test to be sure it still would blow up the way expected.
        fake_swift = InternalClient([{"name": str(int(time() - 86400))}])
        x = expirer.ObjectExpirer({}, logger=self.logger, swift=fake_swift)
        x.run_once()
        self.assertEqual(
            x.logger.log_dict["exception"],
            [(("Unhandled exception",), {}, str(Exception("This should not have been called")))],
        )

    def test_object_timestamp_break(self):
        class InternalClient(object):
            def __init__(self, containers, objects):
                self.containers = containers
                self.objects = objects

            def get_account_info(*a, **kw):
                return 1, 2

            def iter_containers(self, *a, **kw):
                return self.containers

            def delete_container(*a, **kw):
                pass

            def iter_objects(self, *a, **kw):
                return self.objects

        def should_not_be_called(*a, **kw):
            raise Exception("This should not have been called")

        fake_swift = InternalClient(
            [{"name": str(int(time() - 86400))}], [{"name": "%d-actual-obj" % int(time() + 86400)}]
        )
        x = expirer.ObjectExpirer({}, logger=self.logger, swift=fake_swift)
        x.run_once()
        for exccall in x.logger.log_dict["exception"]:
            self.assertTrue("This should not have been called" not in exccall[0][0])
        self.assertEqual(
            x.logger.log_dict["info"],
            [
                (("Pass beginning; 1 possible containers; " "2 possible objects",), {}),
                (("Pass completed in 0s; 0 objects expired",), {}),
            ],
        )

        # Reverse test to be sure it still would blow up the way expected.
        ts = int(time() - 86400)
        fake_swift = InternalClient([{"name": str(int(time() - 86400))}], [{"name": "%d-actual-obj" % ts}])
        x = expirer.ObjectExpirer({}, logger=self.logger, swift=fake_swift)
        x.delete_actual_object = should_not_be_called
        x.run_once()
        excswhiledeleting = []
        for exccall in x.logger.log_dict["exception"]:
            if exccall[0][0].startswith("Exception while deleting "):
                excswhiledeleting.append(exccall[0][0])
        self.assertEqual(
            excswhiledeleting,
            ["Exception while deleting object %d %d-actual-obj " "This should not have been called" % (ts, ts)],
        )

    def test_failed_delete_keeps_entry(self):
        class InternalClient(object):

            container_ring = None

            def __init__(self, containers, objects):
                self.containers = containers
                self.objects = objects

            def get_account_info(*a, **kw):
                return 1, 2

            def iter_containers(self, *a, **kw):
                return self.containers

            def delete_container(*a, **kw):
                pass

            def iter_objects(self, *a, **kw):
                return self.objects

        def deliberately_blow_up(actual_obj, timestamp):
            raise Exception("failed to delete actual object")

        def should_not_get_called(container, obj):
            raise Exception("This should not have been called")

        ts = int(time() - 86400)
        fake_swift = InternalClient([{"name": str(int(time() - 86400))}], [{"name": "%d-actual-obj" % ts}])
        x = expirer.ObjectExpirer({}, logger=self.logger, swift=fake_swift)
        x.iter_containers = lambda: [str(int(time() - 86400))]
        x.delete_actual_object = deliberately_blow_up
        x.pop_queue = should_not_get_called
        x.run_once()
        excswhiledeleting = []
        for exccall in x.logger.log_dict["exception"]:
            if exccall[0][0].startswith("Exception while deleting "):
                excswhiledeleting.append(exccall[0][0])
        self.assertEqual(
            excswhiledeleting,
            ["Exception while deleting object %d %d-actual-obj " "failed to delete actual object" % (ts, ts)],
        )
        self.assertEqual(
            x.logger.log_dict["info"],
            [
                (("Pass beginning; 1 possible containers; " "2 possible objects",), {}),
                (("Pass completed in 0s; 0 objects expired",), {}),
            ],
        )

        # Reverse test to be sure it still would blow up the way expected.
        ts = int(time() - 86400)
        fake_swift = InternalClient([{"name": str(int(time() - 86400))}], [{"name": "%d-actual-obj" % ts}])
        self.logger._clear()
        x = expirer.ObjectExpirer({}, logger=self.logger, swift=fake_swift)
        x.delete_actual_object = lambda o, t: None
        x.pop_queue = should_not_get_called
        x.run_once()
        excswhiledeleting = []
        for exccall in x.logger.log_dict["exception"]:
            if exccall[0][0].startswith("Exception while deleting "):
                excswhiledeleting.append(exccall[0][0])
        self.assertEqual(
            excswhiledeleting,
            ["Exception while deleting object %d %d-actual-obj This should " "not have been called" % (ts, ts)],
        )

    def test_success_gets_counted(self):
        class InternalClient(object):

            container_ring = None

            def __init__(self, containers, objects):
                self.containers = containers
                self.objects = objects

            def get_account_info(*a, **kw):
                return 1, 2

            def iter_containers(self, *a, **kw):
                return self.containers

            def delete_container(*a, **kw):
                pass

            def delete_object(*a, **kw):
                pass

            def iter_objects(self, *a, **kw):
                return self.objects

        fake_swift = InternalClient(
            [{"name": str(int(time() - 86400))}], [{"name": "%d-actual-obj" % int(time() - 86400)}]
        )
        x = expirer.ObjectExpirer({}, logger=self.logger, swift=fake_swift)
        x.delete_actual_object = lambda o, t: None
        x.pop_queue = lambda c, o: None
        self.assertEqual(x.report_objects, 0)
        x.run_once()
        self.assertEqual(x.report_objects, 1)
        self.assertEqual(
            x.logger.log_dict["info"],
            [
                (("Pass beginning; 1 possible containers; " "2 possible objects",), {}),
                (("Pass completed in 0s; 1 objects expired",), {}),
            ],
        )

    def test_delete_actual_object_does_not_get_unicode(self):
        class InternalClient(object):

            container_ring = None

            def __init__(self, containers, objects):
                self.containers = containers
                self.objects = objects

            def get_account_info(*a, **kw):
                return 1, 2

            def iter_containers(self, *a, **kw):
                return self.containers

            def delete_container(*a, **kw):
                pass

            def delete_object(*a, **kw):
                pass

            def iter_objects(self, *a, **kw):
                return self.objects

        got_unicode = [False]

        def delete_actual_object_test_for_unicode(actual_obj, timestamp):
            if isinstance(actual_obj, unicode):
                got_unicode[0] = True

        fake_swift = InternalClient(
            [{"name": str(int(time() - 86400))}], [{"name": u"%d-actual-obj" % int(time() - 86400)}]
        )
        x = expirer.ObjectExpirer({}, logger=self.logger, swift=fake_swift)
        x.delete_actual_object = delete_actual_object_test_for_unicode
        x.pop_queue = lambda c, o: None
        self.assertEqual(x.report_objects, 0)
        x.run_once()
        self.assertEqual(x.report_objects, 1)
        self.assertEqual(
            x.logger.log_dict["info"],
            [
                (("Pass beginning; 1 possible containers; " "2 possible objects",), {}),
                (("Pass completed in 0s; 1 objects expired",), {}),
            ],
        )
        self.assertFalse(got_unicode[0])

    def test_failed_delete_continues_on(self):
        class InternalClient(object):

            container_ring = None

            def __init__(self, containers, objects):
                self.containers = containers
                self.objects = objects

            def get_account_info(*a, **kw):
                return 1, 2

            def iter_containers(self, *a, **kw):
                return self.containers

            def delete_container(*a, **kw):
                raise Exception("failed to delete container")

            def delete_object(*a, **kw):
                pass

            def iter_objects(self, *a, **kw):
                return self.objects

        def fail_delete_actual_object(actual_obj, timestamp):
            raise Exception("failed to delete actual object")

        x = expirer.ObjectExpirer({}, logger=self.logger)

        cts = int(time() - 86400)
        ots = int(time() - 86400)

        containers = [{"name": str(cts)}, {"name": str(cts + 1)}]

        objects = [{"name": "%d-actual-obj" % ots}, {"name": "%d-next-obj" % ots}]

        x.swift = InternalClient(containers, objects)
        x.delete_actual_object = fail_delete_actual_object
        x.run_once()
        excswhiledeleting = []
        for exccall in x.logger.log_dict["exception"]:
            if exccall[0][0].startswith("Exception while deleting "):
                excswhiledeleting.append(exccall[0][0])
        self.assertEqual(
            sorted(excswhiledeleting),
            sorted(
                [
                    "Exception while deleting object %d %d-actual-obj failed to " "delete actual object" % (cts, ots),
                    "Exception while deleting object %d %d-next-obj failed to " "delete actual object" % (cts, ots),
                    "Exception while deleting object %d %d-actual-obj failed to "
                    "delete actual object" % (cts + 1, ots),
                    "Exception while deleting object %d %d-next-obj failed to " "delete actual object" % (cts + 1, ots),
                    "Exception while deleting container %d failed to delete " "container" % (cts,),
                    "Exception while deleting container %d failed to delete " "container" % (cts + 1,),
                ]
            ),
        )
        self.assertEqual(
            x.logger.log_dict["info"],
            [
                (("Pass beginning; 1 possible containers; " "2 possible objects",), {}),
                (("Pass completed in 0s; 0 objects expired",), {}),
            ],
        )

    def test_run_forever_initial_sleep_random(self):
        global last_not_sleep

        def raise_system_exit():
            raise SystemExit("test_run_forever")

        interval = 1234
        x = expirer.ObjectExpirer({"__file__": "unit_test", "interval": interval})
        orig_random = expirer.random
        orig_sleep = expirer.sleep
        try:
            expirer.random = not_random
            expirer.sleep = not_sleep
            x.run_once = raise_system_exit
            x.run_forever()
        except SystemExit as err:
            pass
        finally:
            expirer.random = orig_random
            expirer.sleep = orig_sleep
        self.assertEqual(str(err), "test_run_forever")
        self.assertEqual(last_not_sleep, 0.5 * interval)

    def test_run_forever_catches_usual_exceptions(self):
        raises = [0]

        def raise_exceptions():
            raises[0] += 1
            if raises[0] < 2:
                raise Exception("exception %d" % raises[0])
            raise SystemExit("exiting exception %d" % raises[0])

        x = expirer.ObjectExpirer({}, logger=self.logger)
        orig_sleep = expirer.sleep
        try:
            expirer.sleep = not_sleep
            x.run_once = raise_exceptions
            x.run_forever()
        except SystemExit as err:
            pass
        finally:
            expirer.sleep = orig_sleep
        self.assertEqual(str(err), "exiting exception 2")
        self.assertEqual(x.logger.log_dict["exception"], [(("Unhandled exception",), {}, "exception 1")])

    def test_delete_actual_object(self):
        got_env = [None]

        def fake_app(env, start_response):
            got_env[0] = env
            start_response("204 No Content", [("Content-Length", "0")])
            return []

        internal_client.loadapp = lambda *a, **kw: fake_app

        x = expirer.ObjectExpirer({})
        ts = "1234"
        x.delete_actual_object("/path/to/object", ts)
        self.assertEqual(got_env[0]["HTTP_X_IF_DELETE_AT"], ts)

    def test_delete_actual_object_nourlquoting(self):
        # delete_actual_object should not do its own url quoting because
        # internal client's make_request handles that.
        got_env = [None]

        def fake_app(env, start_response):
            got_env[0] = env
            start_response("204 No Content", [("Content-Length", "0")])
            return []

        internal_client.loadapp = lambda *a, **kw: fake_app

        x = expirer.ObjectExpirer({})
        ts = "1234"
        x.delete_actual_object("/path/to/object name", ts)
        self.assertEqual(got_env[0]["HTTP_X_IF_DELETE_AT"], ts)
        self.assertEqual(got_env[0]["PATH_INFO"], "/v1/path/to/object name")

    def test_delete_actual_object_raises_404(self):
        def fake_app(env, start_response):
            start_response("404 Not Found", [("Content-Length", "0")])
            return []

        internal_client.loadapp = lambda *a, **kw: fake_app

        x = expirer.ObjectExpirer({})
        self.assertRaises(internal_client.UnexpectedResponse, x.delete_actual_object, "/path/to/object", "1234")

    def test_delete_actual_object_handles_412(self):
        def fake_app(env, start_response):
            start_response("412 Precondition Failed", [("Content-Length", "0")])
            return []

        internal_client.loadapp = lambda *a, **kw: fake_app

        x = expirer.ObjectExpirer({})
        x.delete_actual_object("/path/to/object", "1234")

    def test_delete_actual_object_does_not_handle_odd_stuff(self):
        def fake_app(env, start_response):
            start_response("503 Internal Server Error", [("Content-Length", "0")])
            return []

        internal_client.loadapp = lambda *a, **kw: fake_app

        x = expirer.ObjectExpirer({})
        exc = None
        try:
            x.delete_actual_object("/path/to/object", "1234")
        except Exception as err:
            exc = err
        finally:
            pass
        self.assertEqual(503, exc.resp.status_int)

    def test_delete_actual_object_quotes(self):
        name = "this name should get quoted"
        timestamp = "1366063156.863045"
        x = expirer.ObjectExpirer({})
        x.swift.make_request = mock.MagicMock()
        x.delete_actual_object(name, timestamp)
        x.swift.make_request.assert_called_once()
        self.assertEqual(x.swift.make_request.call_args[0][1], "/v1/" + urllib.quote(name))

    def test_pop_queue(self):
        class InternalClient(object):
            container_ring = FakeRing()

        x = expirer.ObjectExpirer({}, logger=self.logger, swift=InternalClient())
        requests = []

        def capture_requests(ipaddr, port, method, path, *args, **kwargs):
            requests.append((method, path))

        with mocked_http_conn(200, 200, 200, give_connect=capture_requests) as fake_conn:
            x.pop_queue("c", "o")
            self.assertRaises(StopIteration, fake_conn.code_iter.next)
        for method, path in requests:
            self.assertEqual(method, "DELETE")
            device, part, account, container, obj = utils.split_path(path, 5, 5, True)
            self.assertEqual(account, ".expiring_objects")
            self.assertEqual(container, "c")
            self.assertEqual(obj, "o")
예제 #46
0
class TestCryptoWsgiContext(unittest.TestCase):
    def setUp(self):
        class FakeFilter(object):
            app = None
            crypto = Crypto({})

        self.fake_logger = FakeLogger()
        self.crypto_context = CryptoWSGIContext(
            FakeFilter(), 'object', self.fake_logger)

    def test_get_keys(self):
        # ok
        env = {CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
        keys = self.crypto_context.get_keys(env)
        self.assertDictEqual(fetch_crypto_keys(), keys)

        # only default required keys are checked
        subset_keys = {'object': fetch_crypto_keys()['object']}
        env = {CRYPTO_KEY_CALLBACK: lambda *args, **kwargs: subset_keys}
        keys = self.crypto_context.get_keys(env)
        self.assertDictEqual(subset_keys, keys)

        # only specified required keys are checked
        subset_keys = {'container': fetch_crypto_keys()['container']}
        env = {CRYPTO_KEY_CALLBACK: lambda *args, **kwargs: subset_keys}
        keys = self.crypto_context.get_keys(env, required=['container'])
        self.assertDictEqual(subset_keys, keys)

        subset_keys = {'object': fetch_crypto_keys()['object'],
                       'container': fetch_crypto_keys()['container']}
        env = {CRYPTO_KEY_CALLBACK: lambda *args, **kwargs: subset_keys}
        keys = self.crypto_context.get_keys(
            env, required=['object', 'container'])
        self.assertDictEqual(subset_keys, keys)

    def test_get_keys_with_crypto_meta(self):
        # verify that key_id from crypto_meta is passed to fetch_crypto_keys
        keys = fetch_crypto_keys()
        mock_fetch_crypto_keys = mock.MagicMock(return_value=keys)
        env = {CRYPTO_KEY_CALLBACK: mock_fetch_crypto_keys}
        key_id = {'secret_id': '123'}
        keys = self.crypto_context.get_keys(env, key_id=key_id)
        self.assertDictEqual(fetch_crypto_keys(), keys)
        mock_fetch_crypto_keys.assert_called_with(key_id={'secret_id': '123'})

        # but it's ok for there to be no crypto_meta
        keys = self.crypto_context.get_keys(env, key_id={})
        self.assertDictEqual(fetch_crypto_keys(), keys)
        mock_fetch_crypto_keys.assert_called_with(key_id={})
        keys = self.crypto_context.get_keys(env)
        self.assertDictEqual(fetch_crypto_keys(), keys)
        mock_fetch_crypto_keys.assert_called_with(key_id=None)

    def test_get_keys_missing_callback(self):
        with self.assertRaises(HTTPException) as cm:
            self.crypto_context.get_keys({})
        self.assertIn('500 Internal Error', cm.exception.status)
        self.assertIn('missing callback',
                      self.fake_logger.get_lines_for_level('error')[0])
        self.assertIn(b'Unable to retrieve encryption keys.',
                      cm.exception.body)

    def test_get_keys_callback_exception(self):
        def callback(*args, **kwargs):
            raise Exception('boom')
        with self.assertRaises(HTTPException) as cm:
            self.crypto_context.get_keys({CRYPTO_KEY_CALLBACK: callback})
        self.assertIn('500 Internal Error', cm.exception.status)
        self.assertIn('from callback: boom',
                      self.fake_logger.get_lines_for_level('error')[0])
        self.assertIn(b'Unable to retrieve encryption keys.',
                      cm.exception.body)

    def test_get_keys_missing_key_for_default_required_list(self):
        bad_keys = dict(fetch_crypto_keys())
        bad_keys.pop('object')
        with self.assertRaises(HTTPException) as cm:
            self.crypto_context.get_keys(
                {CRYPTO_KEY_CALLBACK: lambda *args, **kwargs: bad_keys})
        self.assertIn('500 Internal Error', cm.exception.status)
        self.assertIn("Missing key for 'object'",
                      self.fake_logger.get_lines_for_level('error')[0])
        self.assertIn(b'Unable to retrieve encryption keys.',
                      cm.exception.body)

    def test_get_keys_missing_object_key_for_specified_required_list(self):
        bad_keys = dict(fetch_crypto_keys())
        bad_keys.pop('object')
        with self.assertRaises(HTTPException) as cm:
            self.crypto_context.get_keys(
                {CRYPTO_KEY_CALLBACK: lambda *args, **kwargs: bad_keys},
                required=['object', 'container'])
        self.assertIn('500 Internal Error', cm.exception.status)
        self.assertIn("Missing key for 'object'",
                      self.fake_logger.get_lines_for_level('error')[0])
        self.assertIn(b'Unable to retrieve encryption keys.',
                      cm.exception.body)

    def test_get_keys_missing_container_key_for_specified_required_list(self):
        bad_keys = dict(fetch_crypto_keys())
        bad_keys.pop('container')
        with self.assertRaises(HTTPException) as cm:
            self.crypto_context.get_keys(
                {CRYPTO_KEY_CALLBACK: lambda *args, **kwargs: bad_keys},
                required=['object', 'container'])
        self.assertIn('500 Internal Error', cm.exception.status)
        self.assertIn("Missing key for 'container'",
                      self.fake_logger.get_lines_for_level('error')[0])
        self.assertIn(b'Unable to retrieve encryption keys.',
                      cm.exception.body)

    def test_bad_object_key_for_default_required_list(self):
        bad_keys = dict(fetch_crypto_keys())
        bad_keys['object'] = b'the minor key'
        with self.assertRaises(HTTPException) as cm:
            self.crypto_context.get_keys(
                {CRYPTO_KEY_CALLBACK: lambda *args, **kwargs: bad_keys})
        self.assertIn('500 Internal Error', cm.exception.status)
        self.assertIn("Bad key for 'object'",
                      self.fake_logger.get_lines_for_level('error')[0])
        self.assertIn(b'Unable to retrieve encryption keys.',
                      cm.exception.body)

    def test_bad_container_key_for_default_required_list(self):
        bad_keys = dict(fetch_crypto_keys())
        bad_keys['container'] = b'the major key'
        with self.assertRaises(HTTPException) as cm:
            self.crypto_context.get_keys(
                {CRYPTO_KEY_CALLBACK: lambda *args, **kwargs: bad_keys},
                required=['object', 'container'])
        self.assertIn('500 Internal Error', cm.exception.status)
        self.assertIn("Bad key for 'container'",
                      self.fake_logger.get_lines_for_level('error')[0])
        self.assertIn(b'Unable to retrieve encryption keys.',
                      cm.exception.body)

    def test_get_keys_not_a_dict(self):
        with self.assertRaises(HTTPException) as cm:
            self.crypto_context.get_keys(
                {CRYPTO_KEY_CALLBACK:
                    lambda *args, **kwargs: ['key', 'quay', 'qui']})
        self.assertEqual('500 Internal Error', cm.exception.status)
        self.assertIn("Did not get a keys dict",
                      self.fake_logger.get_lines_for_level('error')[0])
        self.assertIn(b'Unable to retrieve encryption keys.',
                      cm.exception.body)

    def test_get_multiple_keys(self):
        env = {CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
        mutliple_keys = self.crypto_context.get_multiple_keys(env)
        self.assertEqual(
            [fetch_crypto_keys(),
             fetch_crypto_keys(key_id={'secret_id': 'myid'})],
            mutliple_keys)
예제 #47
0
class TestRelinker(unittest.TestCase):
    def setUp(self):
        skip_if_no_xattrs()
        self.logger = FakeLogger()
        self.testdir = tempfile.mkdtemp()
        self.devices = os.path.join(self.testdir, 'node')
        shutil.rmtree(self.testdir, ignore_errors=1)
        os.mkdir(self.testdir)
        os.mkdir(self.devices)

        self.rb = ring.RingBuilder(8, 6.0, 1)

        for i in range(6):
            ip = "127.0.0.%s" % i
            self.rb.add_dev({'id': i, 'region': 0, 'zone': 0, 'weight': 1,
                             'ip': ip, 'port': 10000, 'device': 'sda1'})
        self.rb.rebalance(seed=1)

        self.existing_device = 'sda1'
        os.mkdir(os.path.join(self.devices, self.existing_device))
        self.objects = os.path.join(self.devices, self.existing_device,
                                    'objects')
        os.mkdir(self.objects)
        self._hash = utils.hash_path('a/c/o')
        digest = binascii.unhexlify(self._hash)
        part = struct.unpack_from('>I', digest)[0] >> 24
        self.next_part = struct.unpack_from('>I', digest)[0] >> 23
        self.objdir = os.path.join(
            self.objects, str(part), self._hash[-3:], self._hash)
        os.makedirs(self.objdir)
        self.object_fname = "1278553064.00000.data"
        self.objname = os.path.join(self.objdir, self.object_fname)
        with open(self.objname, "wb") as dummy:
            dummy.write(b"Hello World!")
            write_metadata(dummy, {'name': '/a/c/o', 'Content-Length': '12'})

        test_policies = [StoragePolicy(0, 'platin', True)]
        storage_policy._POLICIES = StoragePolicyCollection(test_policies)

        self.expected_dir = os.path.join(
            self.objects, str(self.next_part), self._hash[-3:], self._hash)
        self.expected_file = os.path.join(self.expected_dir, self.object_fname)

    def _save_ring(self):
        rd = self.rb.get_ring()
        for policy in POLICIES:
            rd.save(os.path.join(
                self.testdir, '%s.ring.gz' % policy.ring_name))
            # Enforce ring reloading in relinker
            policy.object_ring = None

    def tearDown(self):
        shutil.rmtree(self.testdir, ignore_errors=1)
        storage_policy.reload_storage_policies()

    def test_relink(self):
        self.rb.prepare_increase_partition_power()
        self._save_ring()
        relinker.relink(self.testdir, self.devices, True)

        self.assertTrue(os.path.isdir(self.expected_dir))
        self.assertTrue(os.path.isfile(self.expected_file))

        stat_old = os.stat(os.path.join(self.objdir, self.object_fname))
        stat_new = os.stat(self.expected_file)
        self.assertEqual(stat_old.st_ino, stat_new.st_ino)

    def _common_test_cleanup(self, relink=True):
        # Create a ring that has prev_part_power set
        self.rb.prepare_increase_partition_power()
        self.rb.increase_partition_power()
        self._save_ring()

        os.makedirs(self.expected_dir)

        if relink:
            # Create a hardlink to the original object name. This is expected
            # after a normal relinker run
            os.link(os.path.join(self.objdir, self.object_fname),
                    self.expected_file)

    def test_cleanup(self):
        self._common_test_cleanup()
        self.assertEqual(0, relinker.cleanup(self.testdir, self.devices, True))

        # Old objectname should be removed, new should still exist
        self.assertTrue(os.path.isdir(self.expected_dir))
        self.assertTrue(os.path.isfile(self.expected_file))
        self.assertFalse(os.path.isfile(
            os.path.join(self.objdir, self.object_fname)))

    def test_cleanup_not_yet_relinked(self):
        self._common_test_cleanup(relink=False)
        self.assertEqual(1, relinker.cleanup(self.testdir, self.devices, True))

        self.assertTrue(os.path.isfile(
            os.path.join(self.objdir, self.object_fname)))

    def test_cleanup_deleted(self):
        self._common_test_cleanup()

        # Pretend the object got deleted inbetween and there is a tombstone
        fname_ts = self.expected_file[:-4] + "ts"
        os.rename(self.expected_file, fname_ts)

        self.assertEqual(0, relinker.cleanup(self.testdir, self.devices, True))

    def test_cleanup_doesnotexist(self):
        self._common_test_cleanup()

        # Pretend the file in the new place got deleted inbetween
        os.remove(self.expected_file)

        self.assertEqual(
            1, relinker.cleanup(self.testdir, self.devices, True, self.logger))
        self.assertEqual(self.logger.get_lines_for_level('warning'),
                         ['Error cleaning up %s: %s' % (self.objname,
                          repr(exceptions.DiskFileNotExist()))])

    def test_cleanup_non_durable_fragment(self):
        self._common_test_cleanup()

        # Actually all fragments are non-durable and raise and DiskFileNotExist
        # in EC in this test. However, if the counterpart exists in the new
        # location, this is ok - it will be fixed by the reconstructor later on
        storage_policy._POLICIES[0].policy_type = 'erasure_coding'

        self.assertEqual(
            0, relinker.cleanup(self.testdir, self.devices, True, self.logger))
        self.assertEqual(self.logger.get_lines_for_level('warning'), [])

    def test_cleanup_quarantined(self):
        self._common_test_cleanup()
        # Pretend the object in the new place got corrupted
        with open(self.expected_file, "wb") as obj:
            obj.write(b'trash')

        self.assertEqual(
            1, relinker.cleanup(self.testdir, self.devices, True, self.logger))

        self.assertIn('failed audit and was quarantined',
                      self.logger.get_lines_for_level('warning')[0])
예제 #48
0
파일: test_expirer.py 프로젝트: 701/swift
class TestObjectExpirer(TestCase):
    maxDiff = None

    def setUp(self):
        global not_sleep

        self.old_loadapp = internal_client.loadapp
        self.old_sleep = internal_client.sleep

        internal_client.loadapp = lambda *a, **kw: None
        internal_client.sleep = not_sleep

        self.rcache = mkdtemp()
        self.logger = FakeLogger()

    def teardown(self):
        rmtree(self.rcache)
        internal_client.sleep = self.old_sleep
        internal_client.loadapp = self.loadapp

    def test_get_process_values_from_kwargs(self):
        x = expirer.ObjectExpirer({})
        vals = {
            'processes': 5,
            'process': 1,
        }
        self.assertEqual((5, 1), x.get_process_values(vals))

    def test_get_process_values_from_config(self):
        vals = {
            'processes': 5,
            'process': 1,
        }
        x = expirer.ObjectExpirer(vals)
        self.assertEqual((5, 1), x.get_process_values({}))

    def test_get_process_values_negative_process(self):
        vals = {
            'processes': 5,
            'process': -1,
        }
        # from config
        x = expirer.ObjectExpirer(vals)
        self.assertRaises(ValueError, x.get_process_values, {})
        # from kwargs
        x = expirer.ObjectExpirer({})
        self.assertRaises(ValueError, x.get_process_values, vals)

    def test_get_process_values_negative_processes(self):
        vals = {
            'processes': -5,
            'process': 1,
        }
        # from config
        x = expirer.ObjectExpirer(vals)
        self.assertRaises(ValueError, x.get_process_values, {})
        # from kwargs
        x = expirer.ObjectExpirer({})
        self.assertRaises(ValueError, x.get_process_values, vals)

    def test_get_process_values_process_greater_than_processes(self):
        vals = {
            'processes': 5,
            'process': 7,
        }
        # from config
        x = expirer.ObjectExpirer(vals)
        self.assertRaises(ValueError, x.get_process_values, {})
        # from kwargs
        x = expirer.ObjectExpirer({})
        self.assertRaises(ValueError, x.get_process_values, vals)

    def test_init_concurrency_too_small(self):
        conf = {
            'concurrency': 0,
        }
        self.assertRaises(ValueError, expirer.ObjectExpirer, conf)
        conf = {
            'concurrency': -1,
        }
        self.assertRaises(ValueError, expirer.ObjectExpirer, conf)

    def test_process_based_concurrency(self):

        class ObjectExpirer(expirer.ObjectExpirer):

            def __init__(self, conf):
                super(ObjectExpirer, self).__init__(conf)
                self.processes = 3
                self.deleted_objects = {}

            def delete_object(self, actual_obj, timestamp, container, obj):
                if container not in self.deleted_objects:
                    self.deleted_objects[container] = set()
                self.deleted_objects[container].add(obj)

        class InternalClient(object):

            def __init__(self, containers):
                self.containers = containers

            def get_account_info(self, *a, **kw):
                return len(self.containers.keys()), \
                    sum([len(self.containers[x]) for x in self.containers])

            def iter_containers(self, *a, **kw):
                return [{'name': x} for x in self.containers.keys()]

            def iter_objects(self, account, container):
                return [{'name': x} for x in self.containers[container]]

            def delete_container(*a, **kw):
                pass

        containers = {
            0: set('1-one 2-two 3-three'.split()),
            1: set('2-two 3-three 4-four'.split()),
            2: set('5-five 6-six'.split()),
            3: set('7-seven'.split()),
        }
        x = ObjectExpirer({})
        x.swift = InternalClient(containers)

        deleted_objects = {}
        for i in xrange(3):
            x.process = i
            x.run_once()
            self.assertNotEqual(deleted_objects, x.deleted_objects)
            deleted_objects = deepcopy(x.deleted_objects)
        self.assertEqual(containers, deleted_objects)

    def test_delete_object(self):
        class InternalClient(object):

            container_ring = None

            def __init__(self, test, account, container, obj):
                self.test = test
                self.account = account
                self.container = container
                self.obj = obj
                self.delete_object_called = False

        class DeleteActualObject(object):
            def __init__(self, test, actual_obj, timestamp):
                self.test = test
                self.actual_obj = actual_obj
                self.timestamp = timestamp
                self.called = False

            def __call__(self, actual_obj, timestamp):
                self.test.assertEqual(self.actual_obj, actual_obj)
                self.test.assertEqual(self.timestamp, timestamp)
                self.called = True

        container = 'container'
        obj = 'obj'
        actual_obj = 'actual_obj'
        timestamp = 'timestamp'

        x = expirer.ObjectExpirer({}, logger=self.logger)
        x.swift = \
            InternalClient(self, x.expiring_objects_account, container, obj)
        x.delete_actual_object = \
            DeleteActualObject(self, actual_obj, timestamp)

        delete_object_called = []

        def pop_queue(c, o):
            self.assertEqual(container, c)
            self.assertEqual(obj, o)
            delete_object_called[:] = [True]

        x.pop_queue = pop_queue

        x.delete_object(actual_obj, timestamp, container, obj)
        self.assertTrue(delete_object_called)
        self.assertTrue(x.delete_actual_object.called)

    def test_report(self):
        x = expirer.ObjectExpirer({}, logger=self.logger)

        x.report()
        self.assertEqual(x.logger.log_dict['info'], [])

        x.logger._clear()
        x.report(final=True)
        self.assertTrue('completed' in x.logger.log_dict['info'][-1][0][0],
                        x.logger.log_dict['info'])
        self.assertTrue('so far' not in x.logger.log_dict['info'][-1][0][0],
                        x.logger.log_dict['info'])

        x.logger._clear()
        x.report_last_time = time() - x.report_interval
        x.report()
        self.assertTrue('completed' not in x.logger.log_dict['info'][-1][0][0],
                        x.logger.log_dict['info'])
        self.assertTrue('so far' in x.logger.log_dict['info'][-1][0][0],
                        x.logger.log_dict['info'])

    def test_run_once_nothing_to_do(self):
        x = expirer.ObjectExpirer({}, logger=self.logger)
        x.swift = 'throw error because a string does not have needed methods'
        x.run_once()
        self.assertEqual(x.logger.log_dict['exception'],
                         [(("Unhandled exception",), {},
                           "'str' object has no attribute "
                           "'get_account_info'")])

    def test_run_once_calls_report(self):
        class InternalClient(object):
            def get_account_info(*a, **kw):
                return 1, 2

            def iter_containers(*a, **kw):
                return []

        x = expirer.ObjectExpirer({}, logger=self.logger)
        x.swift = InternalClient()
        x.run_once()
        self.assertEqual(
            x.logger.log_dict['info'],
            [(('Pass beginning; 1 possible containers; '
               '2 possible objects',), {}),
             (('Pass completed in 0s; 0 objects expired',), {})])

    def test_container_timestamp_break(self):
        class InternalClient(object):
            def __init__(self, containers):
                self.containers = containers

            def get_account_info(*a, **kw):
                return 1, 2

            def iter_containers(self, *a, **kw):
                return self.containers

            def iter_objects(*a, **kw):
                raise Exception('This should not have been called')

        x = expirer.ObjectExpirer({'recon_cache_path': self.rcache},
                                  logger=self.logger)
        x.swift = InternalClient([{'name': str(int(time() + 86400))}])
        x.run_once()
        for exccall in x.logger.log_dict['exception']:
            self.assertTrue(
                'This should not have been called' not in exccall[0][0])
        self.assertEqual(
            x.logger.log_dict['info'],
            [(('Pass beginning; 1 possible containers; '
               '2 possible objects',), {}),
             (('Pass completed in 0s; 0 objects expired',), {})])

        # Reverse test to be sure it still would blow up the way expected.
        fake_swift = InternalClient([{'name': str(int(time() - 86400))}])
        x = expirer.ObjectExpirer({}, logger=self.logger, swift=fake_swift)
        x.run_once()
        self.assertEqual(
            x.logger.log_dict['exception'],
            [(('Unhandled exception',), {},
              str(Exception('This should not have been called')))])

    def test_object_timestamp_break(self):
        class InternalClient(object):
            def __init__(self, containers, objects):
                self.containers = containers
                self.objects = objects

            def get_account_info(*a, **kw):
                return 1, 2

            def iter_containers(self, *a, **kw):
                return self.containers

            def delete_container(*a, **kw):
                pass

            def iter_objects(self, *a, **kw):
                return self.objects

        def should_not_be_called(*a, **kw):
            raise Exception('This should not have been called')

        fake_swift = InternalClient(
            [{'name': str(int(time() - 86400))}],
            [{'name': '%d-actual-obj' % int(time() + 86400)}])
        x = expirer.ObjectExpirer({}, logger=self.logger, swift=fake_swift)
        x.run_once()
        for exccall in x.logger.log_dict['exception']:
            self.assertTrue(
                'This should not have been called' not in exccall[0][0])
        self.assertEqual(
            x.logger.log_dict['info'],
            [(('Pass beginning; 1 possible containers; '
               '2 possible objects',), {}),
             (('Pass completed in 0s; 0 objects expired',), {})])

        # Reverse test to be sure it still would blow up the way expected.
        ts = int(time() - 86400)
        fake_swift = InternalClient(
            [{'name': str(int(time() - 86400))}],
            [{'name': '%d-actual-obj' % ts}])
        x = expirer.ObjectExpirer({}, logger=self.logger, swift=fake_swift)
        x.delete_actual_object = should_not_be_called
        x.run_once()
        excswhiledeleting = []
        for exccall in x.logger.log_dict['exception']:
            if exccall[0][0].startswith('Exception while deleting '):
                excswhiledeleting.append(exccall[0][0])
        self.assertEqual(
            excswhiledeleting,
            ['Exception while deleting object %d %d-actual-obj '
             'This should not have been called' % (ts, ts)])

    def test_failed_delete_keeps_entry(self):
        class InternalClient(object):

            container_ring = None

            def __init__(self, containers, objects):
                self.containers = containers
                self.objects = objects

            def get_account_info(*a, **kw):
                return 1, 2

            def iter_containers(self, *a, **kw):
                return self.containers

            def delete_container(*a, **kw):
                pass

            def iter_objects(self, *a, **kw):
                return self.objects

        def deliberately_blow_up(actual_obj, timestamp):
            raise Exception('failed to delete actual object')

        def should_not_get_called(container, obj):
            raise Exception('This should not have been called')

        ts = int(time() - 86400)
        fake_swift = InternalClient(
            [{'name': str(int(time() - 86400))}],
            [{'name': '%d-actual-obj' % ts}])
        x = expirer.ObjectExpirer({}, logger=self.logger, swift=fake_swift)
        x.iter_containers = lambda: [str(int(time() - 86400))]
        x.delete_actual_object = deliberately_blow_up
        x.pop_queue = should_not_get_called
        x.run_once()
        excswhiledeleting = []
        for exccall in x.logger.log_dict['exception']:
            if exccall[0][0].startswith('Exception while deleting '):
                excswhiledeleting.append(exccall[0][0])
        self.assertEqual(
            excswhiledeleting,
            ['Exception while deleting object %d %d-actual-obj '
             'failed to delete actual object' % (ts, ts)])
        self.assertEqual(
            x.logger.log_dict['info'],
            [(('Pass beginning; 1 possible containers; '
               '2 possible objects',), {}),
             (('Pass completed in 0s; 0 objects expired',), {})])

        # Reverse test to be sure it still would blow up the way expected.
        ts = int(time() - 86400)
        fake_swift = InternalClient(
            [{'name': str(int(time() - 86400))}],
            [{'name': '%d-actual-obj' % ts}])
        self.logger._clear()
        x = expirer.ObjectExpirer({}, logger=self.logger, swift=fake_swift)
        x.delete_actual_object = lambda o, t: None
        x.pop_queue = should_not_get_called
        x.run_once()
        excswhiledeleting = []
        for exccall in x.logger.log_dict['exception']:
            if exccall[0][0].startswith('Exception while deleting '):
                excswhiledeleting.append(exccall[0][0])
        self.assertEqual(
            excswhiledeleting,
            ['Exception while deleting object %d %d-actual-obj This should '
             'not have been called' % (ts, ts)])

    def test_success_gets_counted(self):
        class InternalClient(object):

            container_ring = None

            def __init__(self, containers, objects):
                self.containers = containers
                self.objects = objects

            def get_account_info(*a, **kw):
                return 1, 2

            def iter_containers(self, *a, **kw):
                return self.containers

            def delete_container(*a, **kw):
                pass

            def delete_object(*a, **kw):
                pass

            def iter_objects(self, *a, **kw):
                return self.objects

        fake_swift = InternalClient(
            [{'name': str(int(time() - 86400))}],
            [{'name': '%d-actual-obj' % int(time() - 86400)}])
        x = expirer.ObjectExpirer({}, logger=self.logger, swift=fake_swift)
        x.delete_actual_object = lambda o, t: None
        x.pop_queue = lambda c, o: None
        self.assertEqual(x.report_objects, 0)
        x.run_once()
        self.assertEqual(x.report_objects, 1)
        self.assertEqual(
            x.logger.log_dict['info'],
            [(('Pass beginning; 1 possible containers; '
               '2 possible objects',), {}),
             (('Pass completed in 0s; 1 objects expired',), {})])

    def test_delete_actual_object_does_not_get_unicode(self):
        class InternalClient(object):

            container_ring = None

            def __init__(self, containers, objects):
                self.containers = containers
                self.objects = objects

            def get_account_info(*a, **kw):
                return 1, 2

            def iter_containers(self, *a, **kw):
                return self.containers

            def delete_container(*a, **kw):
                pass

            def delete_object(*a, **kw):
                pass

            def iter_objects(self, *a, **kw):
                return self.objects

        got_unicode = [False]

        def delete_actual_object_test_for_unicode(actual_obj, timestamp):
            if isinstance(actual_obj, unicode):
                got_unicode[0] = True

        fake_swift = InternalClient(
            [{'name': str(int(time() - 86400))}],
            [{'name': u'%d-actual-obj' % int(time() - 86400)}])
        x = expirer.ObjectExpirer({}, logger=self.logger, swift=fake_swift)
        x.delete_actual_object = delete_actual_object_test_for_unicode
        x.pop_queue = lambda c, o: None
        self.assertEqual(x.report_objects, 0)
        x.run_once()
        self.assertEqual(x.report_objects, 1)
        self.assertEqual(
            x.logger.log_dict['info'],
            [(('Pass beginning; 1 possible containers; '
               '2 possible objects',), {}),
             (('Pass completed in 0s; 1 objects expired',), {})])
        self.assertFalse(got_unicode[0])

    def test_failed_delete_continues_on(self):
        class InternalClient(object):

            container_ring = None

            def __init__(self, containers, objects):
                self.containers = containers
                self.objects = objects

            def get_account_info(*a, **kw):
                return 1, 2

            def iter_containers(self, *a, **kw):
                return self.containers

            def delete_container(*a, **kw):
                raise Exception('failed to delete container')

            def delete_object(*a, **kw):
                pass

            def iter_objects(self, *a, **kw):
                return self.objects

        def fail_delete_actual_object(actual_obj, timestamp):
            raise Exception('failed to delete actual object')

        x = expirer.ObjectExpirer({}, logger=self.logger)

        cts = int(time() - 86400)
        ots = int(time() - 86400)

        containers = [
            {'name': str(cts)},
            {'name': str(cts + 1)},
        ]

        objects = [
            {'name': '%d-actual-obj' % ots},
            {'name': '%d-next-obj' % ots}
        ]

        x.swift = InternalClient(containers, objects)
        x.delete_actual_object = fail_delete_actual_object
        x.run_once()
        excswhiledeleting = []
        for exccall in x.logger.log_dict['exception']:
            if exccall[0][0].startswith('Exception while deleting '):
                excswhiledeleting.append(exccall[0][0])
        self.assertEqual(sorted(excswhiledeleting), sorted([
            'Exception while deleting object %d %d-actual-obj failed to '
            'delete actual object' % (cts, ots),
            'Exception while deleting object %d %d-next-obj failed to '
            'delete actual object' % (cts, ots),
            'Exception while deleting object %d %d-actual-obj failed to '
            'delete actual object' % (cts + 1, ots),
            'Exception while deleting object %d %d-next-obj failed to '
            'delete actual object' % (cts + 1, ots),
            'Exception while deleting container %d failed to delete '
            'container' % (cts,),
            'Exception while deleting container %d failed to delete '
            'container' % (cts + 1,)]))
        self.assertEqual(
            x.logger.log_dict['info'],
            [(('Pass beginning; 1 possible containers; '
               '2 possible objects',), {}),
             (('Pass completed in 0s; 0 objects expired',), {})])

    def test_run_forever_initial_sleep_random(self):
        global last_not_sleep

        def raise_system_exit():
            raise SystemExit('test_run_forever')

        interval = 1234
        x = expirer.ObjectExpirer({'__file__': 'unit_test',
                                   'interval': interval})
        orig_random = expirer.random
        orig_sleep = expirer.sleep
        try:
            expirer.random = not_random
            expirer.sleep = not_sleep
            x.run_once = raise_system_exit
            x.run_forever()
        except SystemExit as err:
            pass
        finally:
            expirer.random = orig_random
            expirer.sleep = orig_sleep
        self.assertEqual(str(err), 'test_run_forever')
        self.assertEqual(last_not_sleep, 0.5 * interval)

    def test_run_forever_catches_usual_exceptions(self):
        raises = [0]

        def raise_exceptions():
            raises[0] += 1
            if raises[0] < 2:
                raise Exception('exception %d' % raises[0])
            raise SystemExit('exiting exception %d' % raises[0])

        x = expirer.ObjectExpirer({}, logger=self.logger)
        orig_sleep = expirer.sleep
        try:
            expirer.sleep = not_sleep
            x.run_once = raise_exceptions
            x.run_forever()
        except SystemExit as err:
            pass
        finally:
            expirer.sleep = orig_sleep
        self.assertEqual(str(err), 'exiting exception 2')
        self.assertEqual(x.logger.log_dict['exception'],
                         [(('Unhandled exception',), {},
                           'exception 1')])

    def test_delete_actual_object(self):
        got_env = [None]

        def fake_app(env, start_response):
            got_env[0] = env
            start_response('204 No Content', [('Content-Length', '0')])
            return []

        internal_client.loadapp = lambda *a, **kw: fake_app

        x = expirer.ObjectExpirer({})
        ts = '1234'
        x.delete_actual_object('/path/to/object', ts)
        self.assertEqual(got_env[0]['HTTP_X_IF_DELETE_AT'], ts)

    def test_delete_actual_object_nourlquoting(self):
        # delete_actual_object should not do its own url quoting because
        # internal client's make_request handles that.
        got_env = [None]

        def fake_app(env, start_response):
            got_env[0] = env
            start_response('204 No Content', [('Content-Length', '0')])
            return []

        internal_client.loadapp = lambda *a, **kw: fake_app

        x = expirer.ObjectExpirer({})
        ts = '1234'
        x.delete_actual_object('/path/to/object name', ts)
        self.assertEqual(got_env[0]['HTTP_X_IF_DELETE_AT'], ts)
        self.assertEqual(got_env[0]['PATH_INFO'], '/v1/path/to/object name')

    def test_delete_actual_object_raises_404(self):

        def fake_app(env, start_response):
            start_response('404 Not Found', [('Content-Length', '0')])
            return []

        internal_client.loadapp = lambda *a, **kw: fake_app

        x = expirer.ObjectExpirer({})
        self.assertRaises(internal_client.UnexpectedResponse,
                          x.delete_actual_object, '/path/to/object', '1234')

    def test_delete_actual_object_handles_412(self):

        def fake_app(env, start_response):
            start_response('412 Precondition Failed',
                           [('Content-Length', '0')])
            return []

        internal_client.loadapp = lambda *a, **kw: fake_app

        x = expirer.ObjectExpirer({})
        x.delete_actual_object('/path/to/object', '1234')

    def test_delete_actual_object_does_not_handle_odd_stuff(self):

        def fake_app(env, start_response):
            start_response(
                '503 Internal Server Error',
                [('Content-Length', '0')])
            return []

        internal_client.loadapp = lambda *a, **kw: fake_app

        x = expirer.ObjectExpirer({})
        exc = None
        try:
            x.delete_actual_object('/path/to/object', '1234')
        except Exception as err:
            exc = err
        finally:
            pass
        self.assertEqual(503, exc.resp.status_int)

    def test_delete_actual_object_quotes(self):
        name = 'this name should get quoted'
        timestamp = '1366063156.863045'
        x = expirer.ObjectExpirer({})
        x.swift.make_request = mock.MagicMock()
        x.delete_actual_object(name, timestamp)
        x.swift.make_request.assert_called_once()
        self.assertEqual(x.swift.make_request.call_args[0][1],
                         '/v1/' + urllib.quote(name))

    def test_pop_queue(self):
        class InternalClient(object):
            container_ring = FakeRing()
        x = expirer.ObjectExpirer({}, logger=self.logger,
                                  swift=InternalClient())
        requests = []

        def capture_requests(ipaddr, port, method, path, *args, **kwargs):
            requests.append((method, path))
        with mocked_http_conn(
                200, 200, 200, give_connect=capture_requests) as fake_conn:
            x.pop_queue('c', 'o')
            self.assertRaises(StopIteration, fake_conn.code_iter.next)
        for method, path in requests:
            self.assertEqual(method, 'DELETE')
            device, part, account, container, obj = utils.split_path(
                path, 5, 5, True)
            self.assertEqual(account, '.expiring_objects')
            self.assertEqual(container, 'c')
            self.assertEqual(obj, 'o')
예제 #49
0
    def test_sweep_logs(self):
        asyncdir = os.path.join(self.sda1, ASYNCDIR_BASE)
        prefix_dir = os.path.join(asyncdir, 'abc')
        mkpath(prefix_dir)

        for o, t in [('abc', 123), ('def', 234), ('ghi', 345),
                     ('jkl', 456), ('mno', 567)]:
            ohash = hash_path('account', 'container', o)
            o_path = os.path.join(prefix_dir, ohash + '-' +
                                  normalize_timestamp(t))
            write_pickle({}, o_path)

        class MockObjectUpdater(object_updater.ObjectUpdater):
            def process_object_update(self, update_path, device, policy):
                os.unlink(update_path)
                self.stats.successes += 1
                self.stats.unlinks += 1

        logger = FakeLogger()
        ou = MockObjectUpdater({
            'devices': self.devices_dir,
            'mount_check': 'false',
            'swift_dir': self.testdir,
            'interval': '1',
            'concurrency': '1',
            'report_interval': '10.0',
            'node_timeout': '5'}, logger=logger)

        now = [time()]

        def mock_time_function():
            rv = now[0]
            now[0] += 5
            return rv

        # With 10s between updates, time() advancing 5s every time we look,
        # and 5 async_pendings on disk, we should get at least two progress
        # lines.
        with mock.patch('swift.obj.updater.time',
                        mock.MagicMock(time=mock_time_function)), \
                mock.patch.object(object_updater, 'ContextPool', MockPool):
            ou.object_sweep(self.sda1)

        info_lines = logger.get_lines_for_level('info')
        self.assertEqual(4, len(info_lines))
        self.assertIn("sweep starting", info_lines[0])
        self.assertIn(self.sda1, info_lines[0])

        self.assertIn("sweep progress", info_lines[1])
        # the space ensures it's a positive number
        self.assertIn(
            "2 successes, 0 failures, 0 quarantines, 2 unlinks, 0 errors, "
            "0 redirects",
            info_lines[1])
        self.assertIn(self.sda1, info_lines[1])

        self.assertIn("sweep progress", info_lines[2])
        self.assertIn(
            "4 successes, 0 failures, 0 quarantines, 4 unlinks, 0 errors, "
            "0 redirects",
            info_lines[2])
        self.assertIn(self.sda1, info_lines[2])

        self.assertIn("sweep complete", info_lines[3])
        self.assertIn(
            "5 successes, 0 failures, 0 quarantines, 5 unlinks, 0 errors, "
            "0 redirects",
            info_lines[3])
        self.assertIn(self.sda1, info_lines[3])
class TestCryptoWsgiContext(unittest.TestCase):
    def setUp(self):
        class FakeFilter(object):
            app = None
            crypto = Crypto({})

        self.fake_logger = FakeLogger()
        self.crypto_context = CryptoWSGIContext(
            FakeFilter(), 'object', self.fake_logger)

    def test_get_keys(self):
        # ok
        env = {CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
        keys = self.crypto_context.get_keys(env)
        self.assertDictEqual(fetch_crypto_keys(), keys)

        # only default required keys are checked
        subset_keys = {'object': fetch_crypto_keys()['object']}
        env = {CRYPTO_KEY_CALLBACK: lambda: subset_keys}
        keys = self.crypto_context.get_keys(env)
        self.assertDictEqual(subset_keys, keys)

        # only specified required keys are checked
        subset_keys = {'container': fetch_crypto_keys()['container']}
        env = {CRYPTO_KEY_CALLBACK: lambda: subset_keys}
        keys = self.crypto_context.get_keys(env, required=['container'])
        self.assertDictEqual(subset_keys, keys)

        subset_keys = {'object': fetch_crypto_keys()['object'],
                       'container': fetch_crypto_keys()['container']}
        env = {CRYPTO_KEY_CALLBACK: lambda: subset_keys}
        keys = self.crypto_context.get_keys(
            env, required=['object', 'container'])
        self.assertDictEqual(subset_keys, keys)

    def test_get_keys_missing_callback(self):
        with self.assertRaises(HTTPException) as cm:
            self.crypto_context.get_keys({})
        self.assertIn('500 Internal Error', cm.exception.message)
        self.assertIn('missing callback',
                      self.fake_logger.get_lines_for_level('error')[0])
        self.assertIn('Unable to retrieve encryption keys.', cm.exception.body)

    def test_get_keys_callback_exception(self):
        def callback():
            raise Exception('boom')
        with self.assertRaises(HTTPException) as cm:
            self.crypto_context.get_keys({CRYPTO_KEY_CALLBACK: callback})
        self.assertIn('500 Internal Error', cm.exception.message)
        self.assertIn('from callback: boom',
                      self.fake_logger.get_lines_for_level('error')[0])
        self.assertIn('Unable to retrieve encryption keys.', cm.exception.body)

    def test_get_keys_missing_key_for_default_required_list(self):
        bad_keys = dict(fetch_crypto_keys())
        bad_keys.pop('object')
        with self.assertRaises(HTTPException) as cm:
            self.crypto_context.get_keys(
                {CRYPTO_KEY_CALLBACK: lambda: bad_keys})
        self.assertIn('500 Internal Error', cm.exception.message)
        self.assertIn("Missing key for 'object'",
                      self.fake_logger.get_lines_for_level('error')[0])
        self.assertIn('Unable to retrieve encryption keys.', cm.exception.body)

    def test_get_keys_missing_object_key_for_specified_required_list(self):
        bad_keys = dict(fetch_crypto_keys())
        bad_keys.pop('object')
        with self.assertRaises(HTTPException) as cm:
            self.crypto_context.get_keys(
                {CRYPTO_KEY_CALLBACK: lambda: bad_keys},
                required=['object', 'container'])
        self.assertIn('500 Internal Error', cm.exception.message)
        self.assertIn("Missing key for 'object'",
                      self.fake_logger.get_lines_for_level('error')[0])
        self.assertIn('Unable to retrieve encryption keys.', cm.exception.body)

    def test_get_keys_missing_container_key_for_specified_required_list(self):
        bad_keys = dict(fetch_crypto_keys())
        bad_keys.pop('container')
        with self.assertRaises(HTTPException) as cm:
            self.crypto_context.get_keys(
                {CRYPTO_KEY_CALLBACK: lambda: bad_keys},
                required=['object', 'container'])
        self.assertIn('500 Internal Error', cm.exception.message)
        self.assertIn("Missing key for 'container'",
                      self.fake_logger.get_lines_for_level('error')[0])
        self.assertIn('Unable to retrieve encryption keys.', cm.exception.body)

    def test_bad_object_key_for_default_required_list(self):
        bad_keys = dict(fetch_crypto_keys())
        bad_keys['object'] = 'the minor key'
        with self.assertRaises(HTTPException) as cm:
            self.crypto_context.get_keys(
                {CRYPTO_KEY_CALLBACK: lambda: bad_keys})
        self.assertIn('500 Internal Error', cm.exception.message)
        self.assertIn("Bad key for 'object'",
                      self.fake_logger.get_lines_for_level('error')[0])
        self.assertIn('Unable to retrieve encryption keys.', cm.exception.body)

    def test_bad_container_key_for_default_required_list(self):
        bad_keys = dict(fetch_crypto_keys())
        bad_keys['container'] = 'the major key'
        with self.assertRaises(HTTPException) as cm:
            self.crypto_context.get_keys(
                {CRYPTO_KEY_CALLBACK: lambda: bad_keys},
                required=['object', 'container'])
        self.assertIn('500 Internal Error', cm.exception.message)
        self.assertIn("Bad key for 'container'",
                      self.fake_logger.get_lines_for_level('error')[0])
        self.assertIn('Unable to retrieve encryption keys.', cm.exception.body)

    def test_get_keys_not_a_dict(self):
        with self.assertRaises(HTTPException) as cm:
            self.crypto_context.get_keys(
                {CRYPTO_KEY_CALLBACK: lambda: ['key', 'quay', 'qui']})
        self.assertIn('500 Internal Error', cm.exception.message)
        self.assertIn("Did not get a keys dict",
                      self.fake_logger.get_lines_for_level('error')[0])
        self.assertIn('Unable to retrieve encryption keys.', cm.exception.body)