def test_two_realms_and_change_a_default(self): fname = 'container-sync-realms.conf' fcontents = ''' [DEFAULT] mtime_check_interval = 60 [US] key = 9ff3b71c849749dbaec4ccdd3cbab62b cluster_dfw1 = http://dfw1.host/v1/ [UK] key = e9569809dc8b4951accc1487aa788012 key2 = f6351bd1cc36413baa43f7ba1b45e51d cluster_lon3 = http://lon3.host/v1/ ''' with temptree([fname], [fcontents]) as tempdir: logger = debug_logger() fpath = os.path.join(tempdir, fname) csr = ContainerSyncRealms(fpath, logger) self.assertEqual(logger.all_log_lines(), {}) self.assertEqual(csr.mtime_check_interval, 60) self.assertEqual(sorted(csr.realms()), ['UK', 'US']) self.assertEqual(csr.key('US'), '9ff3b71c849749dbaec4ccdd3cbab62b') self.assertIsNone(csr.key2('US')) self.assertEqual(csr.clusters('US'), ['DFW1']) self.assertEqual(csr.endpoint('US', 'DFW1'), 'http://dfw1.host/v1/') self.assertEqual(csr.key('UK'), 'e9569809dc8b4951accc1487aa788012') self.assertEqual(csr.key2('UK'), 'f6351bd1cc36413baa43f7ba1b45e51d') self.assertEqual(csr.clusters('UK'), ['LON3']) self.assertEqual(csr.endpoint('UK', 'LON3'), 'http://lon3.host/v1/')
def test_reap_account(self): containers = ('c1', 'c2', 'c3', 'c4') broker = FakeAccountBroker(containers, debug_logger()) self.called_amount = 0 self.r = r = self.init_reaper({}, fakelogger=True) r.start_time = time.time() with patch('swift.account.reaper.AccountReaper.reap_container', self.fake_reap_container), \ patch('swift.account.reaper.AccountReaper.get_account_ring', self.fake_account_ring): nodes = r.get_account_ring().get_part_nodes() for container_shard, node in enumerate(nodes): self.assertTrue( r.reap_account(broker, 'partition', nodes, container_shard=container_shard)) self.assertEqual(self.called_amount, 4) info_lines = r.logger.get_lines_for_level('info') self.assertEqual(len(info_lines), 10) for start_line, stat_line in zip(*[iter(info_lines)] * 2): self.assertEqual(start_line, 'Beginning pass on account a') self.assertTrue(stat_line.find('1 containers deleted')) self.assertTrue(stat_line.find('1 objects deleted')) self.assertTrue(stat_line.find('1 containers remaining')) self.assertTrue(stat_line.find('1 objects remaining')) self.assertTrue(stat_line.find('1 containers possibly remaining')) self.assertTrue(stat_line.find('1 objects possibly remaining')) self.assertTrue(stat_line.find('return codes: 2 2xxs'))
def test_retry_client_exception(self): logger = debug_logger('direct-client-test') with mock.patch('swift.common.direct_client.sleep') as mock_sleep, \ mocked_http_conn(500) as conn: with self.assertRaises(direct_client.ClientException) as err_ctx: direct_client.retry(direct_client.direct_delete_object, self.node, self.part, self.account, self.container, self.obj, retries=2, error_log=logger.error) self.assertEqual('DELETE', conn.method) self.assertEqual(err_ctx.exception.http_status, 500) self.assertIn('DELETE', err_ctx.exception.args[0]) self.assertIn(self.obj_path, err_ctx.exception.args[0]) self.assertIn(self.node['ip'], err_ctx.exception.args[0]) self.assertIn(self.node['port'], err_ctx.exception.args[0]) self.assertEqual(self.node['ip'], err_ctx.exception.http_host) self.assertEqual(self.node['port'], err_ctx.exception.http_port) self.assertEqual(self.node['device'], err_ctx.exception.http_device) self.assertEqual(500, err_ctx.exception.http_status) self.assertEqual([mock.call(1), mock.call(2)], mock_sleep.call_args_list) error_lines = logger.get_lines_for_level('error') self.assertEqual(3, len(error_lines)) for line in error_lines: self.assertIn('500 Internal Error', line)
def test_db_migration(self, tempdir, mock_recon): db_path = os.path.join(tempdir, 'sda', 'containers', '0', '0', '0', 'test.db') with test_backend.TestContainerBrokerBeforeSPI.old_broker() as \ old_ContainerBroker: broker = old_ContainerBroker(db_path, account='a', container='c') broker.initialize(normalize_timestamp(0), -1) with broker.get() as conn: try: conn.execute('SELECT storage_policy_index ' 'FROM container_stat') except Exception as err: self.assertTrue( 'no such column: storage_policy_index' in str(err)) else: self.fail('TestContainerBrokerBeforeSPI broker class ' 'was already migrated') conf = {'devices': tempdir, 'mount_check': False} test_auditor = auditor.ContainerAuditor(conf, logger=debug_logger()) test_auditor.run_once() broker = auditor.ContainerBroker(db_path, account='a', container='c') info = broker.get_info() expected = { 'account': 'a', 'container': 'c', 'object_count': 0, 'bytes_used': 0, 'storage_policy_index': 0, } for k, v in expected.items(): self.assertEqual(info[k], v)
def test_run_once(self): def prepare_data_dir(): devices_path = tempfile.mkdtemp() # will be deleted by teardown self.to_delete.append(devices_path) path = os.path.join(devices_path, 'sda1', DATADIR) os.makedirs(path) return devices_path def init_reaper(devices): r = reaper.AccountReaper({'devices': devices}) return r devices = prepare_data_dir() r = init_reaper(devices) with patch('swift.account.reaper.AccountReaper.reap_device') as foo, \ unit.mock_check_drive(ismount=True): r.run_once() self.assertEqual(foo.called, 1) with patch('swift.account.reaper.AccountReaper.reap_device') as foo, \ unit.mock_check_drive(ismount=False): r.run_once() self.assertFalse(foo.called) with patch('swift.account.reaper.AccountReaper.reap_device') as foo: r.logger = debug_logger('test-reaper') r.devices = 'thisdeviceisbad' r.run_once() self.assertTrue(r.logger.get_lines_for_level( 'error')[-1].startswith('Exception in top-level account reaper'))
def test_current_invalid(self): self.conf = {'swift_dir': self.tempdir, 'current': 'foo'} self.sync = container_sync.ContainerSync(self.app, self.conf, logger=debug_logger()) self.assertIsNone(self.sync.realm) self.assertIsNone(self.sync.cluster) info = {} def capture_swift_info(key, **options): info[key] = options with mock.patch( 'swift.common.middleware.container_sync.register_swift_info', new=capture_swift_info): self.sync.register_info() for realm, realm_info in info['container_sync']['realms'].items(): for cluster, options in realm_info['clusters'].items(): self.assertEqual(options.get('current', False), False) error_lines = self.sync.logger.get_lines_for_level('error') self.assertEqual(error_lines, ['Invalid current ' '//REALM/CLUSTER (foo)'])
def setUp(self): utils.HASH_PATH_SUFFIX = b'endcap' utils.HASH_PATH_PREFIX = b'startcap' self.testdir = os.path.join(mkdtemp(), 'tmp_test_container_updater') rmtree(self.testdir, ignore_errors=1) os.mkdir(self.testdir) ring_file = os.path.join(self.testdir, 'account.ring.gz') with closing(GzipFile(ring_file, 'wb')) as f: pickle.dump( RingData([[0, 1, 0, 1], [1, 0, 1, 0]], [{ 'id': 0, 'ip': '127.0.0.1', 'port': 12345, 'device': 'sda1', 'zone': 0 }, { 'id': 1, 'ip': '127.0.0.1', 'port': 12345, 'device': 'sda1', 'zone': 2 }], 30), f) self.devices_dir = os.path.join(self.testdir, 'devices') os.mkdir(self.devices_dir) self.sda1 = os.path.join(self.devices_dir, 'sda1') os.mkdir(self.sda1) self.logger = debug_logger('test')
def setUp(self): global not_sleep self.old_loadapp = internal_client.loadapp self.old_sleep = internal_client.sleep internal_client.loadapp = lambda *a, **kw: None internal_client.sleep = not_sleep self.rcache = mkdtemp() self.conf = {'recon_cache_path': self.rcache} self.logger = debug_logger('test-expirer') self.ts = make_timestamp_iter() self.past_time = str(int(time() - 86400)) self.future_time = str(int(time() + 86400)) # Dummy task queue for test self.fake_swift = FakeInternalClient({ '.expiring_objects': { # this task container will be checked self.past_time: [ # tasks ready for execution self.past_time + '-a0/c0/o0', self.past_time + '-a1/c1/o1', self.past_time + '-a2/c2/o2', self.past_time + '-a3/c3/o3', self.past_time + '-a4/c4/o4', self.past_time + '-a5/c5/o5', self.past_time + '-a6/c6/o6', self.past_time + '-a7/c7/o7', # task objects for unicode test self.past_time + u'-a8/c8/o8\u2661', self.past_time + u'-a9/c9/o9\xf8', # this task will be skipped self.future_time + '-a10/c10/o10' ], # this task container will be skipped self.future_time: [self.future_time + '-a11/c11/o11'] } }) self.expirer = expirer.ObjectExpirer(self.conf, logger=self.logger, swift=self.fake_swift) # target object paths which should be expirerd now self.expired_target_path_list = [ swob.wsgi_to_str(tgt) for tgt in ( 'a0/c0/o0', 'a1/c1/o1', 'a2/c2/o2', 'a3/c3/o3', 'a4/c4/o4', 'a5/c5/o5', 'a6/c6/o6', 'a7/c7/o7', 'a8/c8/o8\xe2\x99\xa1', 'a9/c9/o9\xc3\xb8', ) ]
def run_custom_daemon(self, klass, conf_section, conf_index, custom_conf, **kwargs): conf_file = self.configs[conf_section][conf_index] conf = utils.readconf(conf_file, conf_section) conf.update(custom_conf) daemon = klass(conf, debug_logger('probe')) daemon.run_once(**kwargs) return daemon
def setUp(self): class FakeFilter(object): app = None crypto = Crypto({}) self.fake_logger = debug_logger() self.crypto_context = CryptoWSGIContext(FakeFilter(), 'object', self.fake_logger)
def setUp(self): self.testdir = os.path.join(mkdtemp(), 'tmp_test_database_auditor') self.logger = debug_logger() rmtree(self.testdir, ignore_errors=1) os.mkdir(self.testdir) fnames = ['true1.db', 'true2.db', 'true3.db', 'fail1.db', 'fail2.db'] for fn in fnames: with open(os.path.join(self.testdir, fn), 'w+') as f: f.write(' ')
def setUp(self): # setup default config dict self.conf = { 'allow_no_owner': False, 'location': 'us-east-1', 'dns_compliant_bucket_names': True, 'max_bucket_listing': 1000, 'max_parts_listing': 1000, 'max_multi_delete_objects': 1000, 's3_acl': False, 'storage_domain': 'localhost', 'auth_pipeline_check': True, 'max_upload_part_num': 1000, 'check_bucket_owner': False, 'force_swift_request_proxy_log': False, 'allow_multipart_uploads': True, 'min_segment_size': 5242880, 'log_level': 'debug' } self.app = FakeApp() self.swift = self.app.swift # note: self.conf has no __file__ key so check_pipeline will be skipped # when constructing self.s3api self.s3api = filter_factory({}, **self.conf)(self.app) self.logger = self.s3api.logger = self.swift.logger = debug_logger() self.swift.register('HEAD', '/v1/AUTH_test', swob.HTTPOk, {}, None) self.swift.register('HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent, {}, None) self.swift.register('PUT', '/v1/AUTH_test/bucket', swob.HTTPCreated, {}, None) self.swift.register('POST', '/v1/AUTH_test/bucket', swob.HTTPNoContent, {}, None) self.swift.register('DELETE', '/v1/AUTH_test/bucket', swob.HTTPNoContent, {}, None) self.swift.register('GET', '/v1/AUTH_test/bucket/object', swob.HTTPOk, {'etag': 'object etag'}, "") self.swift.register('PUT', '/v1/AUTH_test/bucket/object', swob.HTTPCreated, {'etag': 'object etag'}, None) self.swift.register('DELETE', '/v1/AUTH_test/bucket/object', swob.HTTPNoContent, {}, None) self.mock_get_swift_info_result = {'object_versioning': {}} for s3api_path in ( 'controllers.obj', 'controllers.bucket', 'controllers.multi_delete', 'controllers.versioning', ): patcher = mock.patch( 'swift.common.middleware.s3api.%s.get_swift_info' % s3api_path, return_value=self.mock_get_swift_info_result) patcher.start() self.addCleanup(patcher.stop)
def test_empty(self): fname = 'container-sync-realms.conf' fcontents = '' with temptree([fname], [fcontents]) as tempdir: logger = debug_logger() fpath = os.path.join(tempdir, fname) csr = ContainerSyncRealms(fpath, logger) self.assertEqual(logger.all_log_lines(), {}) self.assertEqual(csr.mtime_check_interval, 300) self.assertEqual(csr.realms(), [])
def init_reaper(self, conf=None, myips=None, fakelogger=False): if conf is None: conf = {} if myips is None: myips = ['10.10.10.1'] r = reaper.AccountReaper(conf) r.myips = myips if fakelogger: r.logger = debug_logger('test-reaper') return r
def test_get_sig(self): fname = 'container-sync-realms.conf' fcontents = '' with temptree([fname], [fcontents]) as tempdir: logger = debug_logger() fpath = os.path.join(tempdir, fname) csr = ContainerSyncRealms(fpath, logger) self.assertEqual( csr.get_sig('GET', '/some/path', '1387212345.67890', 'my_nonce', 'realm_key', 'user_key'), '5a6eb486eb7b44ae1b1f014187a94529c3f9c8f9')
def test_sweep_logs_multiple_policies(self): for policy in _mocked_policies: asyncdir = os.path.join(self.sda1, get_async_dir(policy.idx)) prefix_dir = os.path.join(asyncdir, 'abc') mkpath(prefix_dir) for o, t in [('abc', 123), ('def', 234), ('ghi', 345)]: ohash = hash_path('account', 'container%d' % policy.idx, o) o_path = os.path.join(prefix_dir, ohash + '-' + normalize_timestamp(t)) write_pickle({}, o_path) class MockObjectUpdater(object_updater.ObjectUpdater): def process_object_update(self, update_path, device, policy): os.unlink(update_path) self.stats.successes += 1 self.stats.unlinks += 1 logger = debug_logger() ou = MockObjectUpdater( { 'devices': self.devices_dir, 'mount_check': 'false', 'swift_dir': self.testdir, 'interval': '1', 'concurrency': '1', 'report_interval': '10.0', 'node_timeout': '5' }, logger=logger) now = [time()] def mock_time(): rv = now[0] now[0] += 0.01 return rv with mock.patch('swift.obj.updater.time', mock.MagicMock(time=mock_time)): ou.object_sweep(self.sda1) completion_lines = [ l for l in logger.get_lines_for_level('info') if "sweep complete" in l ] self.assertEqual(len(completion_lines), 1) self.assertIn("sweep complete", completion_lines[0]) self.assertIn( "6 successes, 0 failures, 0 quarantines, 6 unlinks, 0 errors, " "0 redirects", completion_lines[0])
def test_no_file_there(self): unique = uuid.uuid4().hex logger = debug_logger() csr = ContainerSyncRealms(unique, logger) self.assertEqual( logger.all_log_lines(), { 'debug': [ "Could not load '%s': [Errno 2] No such file or directory: " "'%s'" % (unique, unique) ] }) self.assertEqual(csr.mtime_check_interval, 300) self.assertEqual(csr.realms(), [])
def setUp(self): super(S3TokenMiddlewareTestBase, self).setUp() self.logger = debug_logger() self.time_patcher = mock.patch.object(time, 'time', lambda: 1234) self.time_patcher.start() self.app = FakeApp() self.conf = { 'auth_uri': self.TEST_AUTH_URI, } self.middleware = self.make_middleware(self.conf) self.requests_mock = rm_fixture.Fixture() self.requests_mock.setUp()
def setUp(self): skip_if_no_xattrs() self.app = proxy.Application(None, logger=debug_logger('proxy-ut'), account_ring=FakeRing(replicas=1), container_ring=FakeRing(replicas=1)) self.copy_app = ServerSideCopyMiddleware(self.app, {}) self.tmpdir = mkdtemp() self.testdir = os.path.join(self.tmpdir, 'tmp_test_object_server_ObjectController') mkdirs(os.path.join(self.testdir, 'sda', 'tmp')) conf = {'devices': self.testdir, 'mount_check': 'false'} self.obj_ctlr = object_server.ObjectController( conf, logger=debug_logger('obj-ut')) http_connect = get_http_connect(fake_http_connect(200), fake_http_connect(200), FakeServerConnection(self.obj_ctlr)) self.orig_base_http_connect = swift.proxy.controllers.base.http_connect self.orig_obj_http_connect = swift.proxy.controllers.obj.http_connect swift.proxy.controllers.base.http_connect = http_connect swift.proxy.controllers.obj.http_connect = http_connect
def test_reap_account_no_container(self): broker = FakeAccountBroker(tuple(), debug_logger()) self.r = r = self.init_reaper({}, fakelogger=True) self.called_amount = 0 r.start_time = time.time() with patch('swift.account.reaper.AccountReaper.reap_container', self.fake_reap_container), \ patch('swift.account.reaper.AccountReaper.get_account_ring', self.fake_account_ring): nodes = r.get_account_ring().get_part_nodes() self.assertTrue(r.reap_account(broker, 'partition', nodes)) self.assertTrue(r.logger.get_lines_for_level( 'info')[-1].startswith('Completed pass')) self.assertEqual(self.called_amount, 0)
def __init__(self): self._calls = [] self.req_bodies = [] self._unclosed_req_keys = defaultdict(int) self._unread_req_paths = defaultdict(int) self.req_method_paths = [] self.swift_sources = [] self.txn_ids = [] self.uploaded = {} # mapping of (method, path) --> (response class, headers, body) self._responses = {} self.logger = debug_logger('fake-swift') self.account_ring = FakeRing() self.container_ring = FakeRing() self.get_object_ring = lambda policy_index: FakeRing()
def test_get_maxrate(self): conf_dict = {'container_ratelimit_10': 200, 'container_ratelimit_50': 100, 'container_ratelimit_75': 30} test_ratelimit = ratelimit.filter_factory(conf_dict)(FakeApp()) test_ratelimit.logger = debug_logger() self.assertIsNone(ratelimit.get_maxrate( test_ratelimit.container_ratelimits, 0)) self.assertIsNone(ratelimit.get_maxrate( test_ratelimit.container_ratelimits, 5)) self.assertEqual(ratelimit.get_maxrate( test_ratelimit.container_ratelimits, 10), 200) self.assertEqual(ratelimit.get_maxrate( test_ratelimit.container_ratelimits, 60), 72) self.assertEqual(ratelimit.get_maxrate( test_ratelimit.container_ratelimits, 160), 30)
def test_empty_realm(self): fname = 'container-sync-realms.conf' fcontents = ''' [US] ''' with temptree([fname], [fcontents]) as tempdir: logger = debug_logger() fpath = os.path.join(tempdir, fname) csr = ContainerSyncRealms(fpath, logger) self.assertEqual(logger.all_log_lines(), {}) self.assertEqual(csr.mtime_check_interval, 300) self.assertEqual(csr.realms(), ['US']) self.assertIsNone(csr.key('US')) self.assertIsNone(csr.key2('US')) self.assertEqual(csr.clusters('US'), []) self.assertIsNone(csr.endpoint('US', 'JUST_TESTING'))
def test_bad_mtime_check_interval(self): fname = 'container-sync-realms.conf' fcontents = ''' [DEFAULT] mtime_check_interval = invalid ''' with temptree([fname], [fcontents]) as tempdir: logger = debug_logger() fpath = os.path.join(tempdir, fname) csr = ContainerSyncRealms(fpath, logger) logs = logger.all_log_lines() self.assertEqual(logs, {'error': [ANY]}) line = logs['error'][0] self.assertIn( "Error in '%s' with mtime_check_interval: " "could not convert string to float:" % fpath, line) self.assertEqual(csr.mtime_check_interval, 300)
def test_bad_mtime_check_interval(self): fname = 'container-sync-realms.conf' fcontents = ''' [DEFAULT] mtime_check_interval = invalid ''' with temptree([fname], [fcontents]) as tempdir: logger = debug_logger() fpath = os.path.join(tempdir, fname) csr = ContainerSyncRealms(fpath, logger) self.assertEqual( logger.all_log_lines(), { 'error': [ "Error in '%s' with mtime_check_interval: invalid literal " "for int() with base 10: 'invalid'" % fpath ] }) self.assertEqual(csr.mtime_check_interval, 300)
def __init__(self, conf): self.fake_logger = debug_logger() self.fake_swift = self.app = FakeSwift() self.register = self.fake_swift.register for filter in reversed([ proxy_logging.filter_factory, copy.filter_factory, lambda conf: lambda app: FakeFilter(app, conf, self.register), proxy_logging.filter_factory ]): self.app = filter(conf)(self.app) self.app.logger = self.fake_logger if hasattr(self.app, 'access_logger'): self.app.access_logger = self.fake_logger if conf['subrequest_type'] == 'GET': self.register(conf['subrequest_type'], SUB_GET_PATH, HTTPOk, {}) else: self.register(conf['subrequest_type'], SUB_PUT_POST_PATH, HTTPOk, {})
def test_one_realm(self): fname = 'container-sync-realms.conf' fcontents = ''' [US] key = 9ff3b71c849749dbaec4ccdd3cbab62b cluster_dfw1 = http://dfw1.host/v1/ ''' with temptree([fname], [fcontents]) as tempdir: logger = debug_logger() fpath = os.path.join(tempdir, fname) csr = ContainerSyncRealms(fpath, logger) self.assertEqual(logger.all_log_lines(), {}) self.assertEqual(csr.mtime_check_interval, 300) self.assertEqual(csr.realms(), ['US']) self.assertEqual(csr.key('US'), '9ff3b71c849749dbaec4ccdd3cbab62b') self.assertIsNone(csr.key2('US')) self.assertEqual(csr.clusters('US'), ['DFW1']) self.assertEqual(csr.endpoint('US', 'DFW1'), 'http://dfw1.host/v1/')
def setUp(self): utils.HASH_PATH_SUFFIX = b'endcap' utils.HASH_PATH_PREFIX = b'' self.testdir = mkdtemp() ring_file = os.path.join(self.testdir, 'container.ring.gz') with closing(GzipFile(ring_file, 'wb')) as f: pickle.dump( RingData([[0, 1, 2, 0, 1, 2], [1, 2, 0, 1, 2, 0], [2, 3, 1, 2, 3, 1]], [{ 'id': 0, 'ip': '127.0.0.1', 'port': 1, 'device': 'sda1', 'zone': 0 }, { 'id': 1, 'ip': '127.0.0.1', 'port': 1, 'device': 'sda1', 'zone': 2 }, { 'id': 2, 'ip': '127.0.0.1', 'port': 1, 'device': 'sda1', 'zone': 4 }, { 'id': 3, 'ip': '127.0.0.1', 'port': 1, 'device': 'sda1', 'zone': 6 }], 30), f) self.devices_dir = os.path.join(self.testdir, 'devices') os.mkdir(self.devices_dir) self.sda1 = os.path.join(self.devices_dir, 'sda1') os.mkdir(self.sda1) for policy in POLICIES: os.mkdir(os.path.join(self.sda1, get_tmp_dir(policy))) self.logger = debug_logger() self.ts_iter = make_timestamp_iter()
def test_ratelimit_blacklist(self): global time_ticker current_rate = 2 conf_dict = {'account_ratelimit': current_rate, 'max_sleep_time_seconds': 2, 'account_whitelist': 'a', 'account_blacklist': 'b'} self.test_ratelimit = ratelimit.filter_factory(conf_dict)(FakeApp()) self.test_ratelimit.logger = debug_logger() self.test_ratelimit.BLACK_LIST_SLEEP = 0 req = Request.blank('/v1/b/c') req.environ['swift.cache'] = FakeMemcache() class rate_caller(threading.Thread): def __init__(self, parent): threading.Thread.__init__(self) self.parent = parent def run(self): self.result = self.parent.test_ratelimit(req.environ.copy(), start_response) def get_fake_ratelimit(*args, **kwargs): return {'sysmeta': {'global-write-ratelimit': 'BLACKLIST'}} with mock.patch('swift.common.middleware.ratelimit.get_account_info', get_fake_ratelimit): nt = 5 threads = [] for i in range(nt): rc = rate_caller(self) rc.start() threads.append(rc) for thread in threads: thread.join() the_497s = [ t for t in threads if b''.join(t.result).startswith(b'Your account')] self.assertEqual(len(the_497s), 5) self.assertEqual(time_ticker, 0)
def test_error_parsing(self): fname = 'container-sync-realms.conf' fcontents = 'invalid' with temptree([fname], [fcontents]) as tempdir: logger = debug_logger() fpath = os.path.join(tempdir, fname) csr = ContainerSyncRealms(fpath, logger) if six.PY2: fmt = "Could not load '%s': " \ "File contains no section headers.\n" \ "file: %s, line: 1\n" \ "'invalid'" else: fmt = "Could not load '%s': " \ "File contains no section headers.\n" \ "file: '%s', line: 1\n" \ "'invalid'" self.assertEqual(logger.all_log_lines(), {'error': [fmt % (fpath, fpath)]}) self.assertEqual(csr.mtime_check_interval, 300) self.assertEqual(csr.realms(), [])