async def test_frontend_to_backend(backend, web_server, client, create_msg): client_change_queue = aio.Queue() client.register_change_cb(lambda: client_change_queue.put_nowait(None)) client.set_filter(common.Filter(msg='message no 1')) await client_change_queue.get() assert_client_vs_server_state(client) for _ in range(10): await backend.register(ts_now(), create_msg()) await client_change_queue.get() assert len(client.server_state['entries']) == 2 assert all('message no 1' in e.msg.msg for e in client.server_state['entries']) client.set_filter(common.Filter()) await client_change_queue.get() assert len(client.server_state['entries']) == 10 assert client_change_queue.empty() client.set_filter(common.Filter(msg='bla bla')) await client_change_queue.get() assert len(client.server_state['entries']) == 0 assert client_change_queue.empty() client.set_filter(common.Filter(severity=common.Severity.ERROR)) await client_change_queue.get() assert len(client.server_state['entries']) == 2 assert all(e.msg.severity == common.Severity.ERROR for e in client.server_state['entries'])
async def test_archive(create_backend, create_msg, timestamp, db_path, short_register_delay, enable_archive): low_size = 50 high_size = 100 change_queue = aio.Queue() backend = await create_backend(low_size=low_size, high_size=high_size, enable_archive=enable_archive) backend.register_change_cb(change_queue.put_nowait) entries = [] for _ in range(high_size): await backend.register(timestamp, create_msg()) entries = await change_queue.get() + entries # wait for posible background db cleanup await asyncio.sleep(0.1) assert backend.last_id == high_size assert backend.first_id == 1 result = await backend.query(common.Filter()) assert len(result) == high_size count = len(list(db_path.parent.glob(f'{db_path.name}.*'))) assert count == 0 await backend.register(timestamp, create_msg()) entries = await change_queue.get() + entries # wait for expected background db cleanup await asyncio.sleep(0.1) assert backend.first_id == backend.last_id - low_size + 1 assert backend.last_id == high_size + 1 result = await backend.query(common.Filter()) assert len(result) == low_size count = len(list(db_path.parent.glob(f'{db_path.name}.*'))) assert count == (1 if enable_archive else 0) await backend.async_close() assert backend.is_closed if enable_archive: archive_path = util.first(db_path.parent.glob('*.*'), lambda i: i.name == f'{db_path.name}.1') backend = await create_backend(path=archive_path, high_size=high_size, low_size=low_size) assert not backend.is_closed entries_archived = await backend.query(common.Filter()) assert len(entries_archived) == (high_size - low_size + 1) assert result + entries_archived == entries await backend.async_close() assert backend.is_closed
async def test_persistence(create_backend, create_msg, timestamp): backend = await create_backend() size = 100 for _ in range(size): await backend.register(timestamp, create_msg()) query_res = [] while len(query_res) != size: query_res = await backend.query(common.Filter()) await backend.async_close() backend = await create_backend() query_res_after = await backend.query(common.Filter()) assert query_res_after == query_res await backend.async_close()
def _sanitize_filter(filter_json): if not filter_json: filter_json = common.filter_to_json( common.Filter(max_results=max_results_limit)) if (filter_json['max_results'] is None or filter_json['max_results'] > max_results_limit): filter_json = dict(filter_json, max_results=max_results_limit) return filter_json
async def test_query(create_backend, create_msg, timestamp): change_queue = aio.Queue() backend = await create_backend() backend.register_change_cb(change_queue.put_nowait) facilities = list(common.Facility) severities = list(common.Severity) hosts = [f'h{i}' for i in range(1, 6)] apps = [f'app{i}' for i in range(1, 6)] procids = [f'{i:04}' for i in range(1, 6)] msgs = [] for _ in range(3): for facility, severity, host, app, procid in zip( facilities, severities, hosts, apps, procids): msg = create_msg(facility=facility, severity=severity, hostname=host, app_name=app, procid=procid) msgs.insert(0, msg) await backend.register(timestamp, msg) await change_queue.get() query_res = await backend.query(common.Filter()) assert [e.msg for e in query_res] == msgs assert len(set(e.id for e in query_res)) == len(msgs) query_res = await backend.query(common.Filter(max_results=3)) assert [e.msg for e in query_res] == msgs[:3] query_res = await backend.query(common.Filter(last_id=10)) assert [e.id for e in query_res] == [i for i in reversed(range(1, 11))] assert [e.msg for e in query_res] == msgs[-10:] for facility, severity, hostname, app_name, procid in zip( facilities, severities, hosts, apps, procids): query_res = await backend.query(common.Filter( facility=facility, severity=severity, hostname=hostname, app_name=app_name, procid=procid)) assert len(query_res) == 3 assert all(e.msg.facility == facility for e in query_res) assert all(e.msg.severity == severity for e in query_res) assert all(e.msg.hostname == hostname for e in query_res) assert all(e.msg.app_name == app_name for e in query_res) assert all(e.msg.procid == procid for e in query_res) query_res = await backend.query(common.Filter(msgid='msgid')) assert len(query_res) == len(msgs) query_res = await backend.query(common.Filter(msg='')) assert len(query_res) == len(msgs) query_res = await backend.query(common.Filter(msg='xyz')) assert len(query_res) == 0 await backend.async_close()
async def _archive_db(self, first_id): archive_path = await self._async_group.spawn( self._executor, _ext_get_new_archive_path, self._path) archive = await database.create_database( archive_path, self._disable_journal) try: entries = await self._db.query(common.Filter( last_id=first_id - 1 if first_id is not None else None)) await archive.add_entries(entries) finally: await aio.uncancellable(archive.async_close())
def server_state(self): if not self._conn.remote_data: return return { 'filter': (common.Filter(**self._conn.remote_data['filter']) if self._conn.remote_data['filter'] else None), 'entries': [ common.entry_from_json(e) for e in self._conn.remote_data['entries'] ], 'first_id': self._conn.remote_data['first_id'], 'last_id': self._conn.remote_data['last_id'] }
async def test_query_on_timestamp(create_backend, create_msg, timestamp, time_filter, exp_ts_ind): change_queue = aio.Queue() backend = await create_backend() backend.register_change_cb(change_queue.put_nowait) msgs = [] tss = [timestamp - 20, timestamp - 10, timestamp] for ts in tss: for _ in range(5): msg = create_msg() msgs.insert(0, msg) await backend.register(ts, msg) await change_queue.get() filter = common.Filter( **{'entry_timestamp_' + k: tss[v] for k, v in time_filter.items()}) query_res = await backend.query(filter) assert len(query_res) == len(exp_ts_ind) * 5 assert all(e.timestamp in [tss[i] for i in exp_ts_ind] for e in query_res) await backend.async_close()
async def test_max_size(backend, web_server, client, create_msg, small_max_results): client_change_queue = aio.Queue() client.register_change_cb(lambda: client_change_queue.put_nowait(None)) await client_change_queue.get() assert_client_vs_server_state(client) for _ in range(40): await backend.register(ts_now(), create_msg()) await client_change_queue.get() assert len(client.server_state['entries']) == 20 entry_ids_exp = list(reversed(range(21, 41))) assert [e.id for e in client.server_state['entries']] == entry_ids_exp client.set_filter(common.Filter(max_results=35)) with pytest.raises(asyncio.TimeoutError): await asyncio.wait_for(client_change_queue.get(), 0.1) assert_client_vs_server_state(client) assert len(client.server_state['entries']) == 20 assert [e.id for e in client.server_state['entries']] == entry_ids_exp
version=None, timestamp=None, hostname=None, app_name=None, procid=None, msgid=None, data=None, msg=None)] filters = [ common.Filter( max_results=10, last_id=None, entry_timestamp_from=None, entry_timestamp_to=datetime.datetime.utcnow().timestamp(), facility=common.Facility.KERNEL, severity=common.Severity.CRITICAL, hostname='host', app_name='app1', procid='1234', msgid='msg.id', msg='this is message'), common.Filter( max_results=None, last_id=None, entry_timestamp_from=None, entry_timestamp_to=None, facility=common.Facility.KERNEL, severity=common.Severity.ERROR, hostname=None, app_name=None, procid=None,
def state(self): return common.Filter(**self._conn.local_data)
async def create_client(port): client = Client() client._conn = await juggler.connect(f'ws://127.0.0.1:{port}/ws', autoflush_delay=0) client._conn.set_local_data(common.Filter()._asdict()) return client