def sbs_repo(): data_sbs_repo = sbs.Repository(""" module Test Data = Integer """) return sbs.Repository(chatter.sbs_repo, data_sbs_repo)
def test_serialization(encode_serializer, decode_serializer, schema, t, v): encode_repo = sbs.Repository(schema, serializer=encode_serializer) decode_repo = sbs.Repository(schema, serializer=decode_serializer) encoded_v = encode_repo.encode('Module', t, v) decoded_v = decode_repo.decode('Module', t, encoded_v) assert decoded_v == v
def test_repository_initialization_with_repository(serializer): repo1 = sbs.Repository(""" module M T = Integer """, serializer=serializer) repo2 = sbs.Repository(repo1, serializer=serializer) assert repo1.encode('M', 'T', 1) == repo2.encode('M', 'T', 1)
async def test_example_docs(): from hat import aio from hat import chatter from hat import sbs from hat import util sbs_repo = sbs.Repository( chatter.sbs_repo, r""" module Example Msg = Integer """) port = util.get_unused_tcp_port() address = f'tcp+sbs://127.0.0.1:{port}' server_conns = aio.Queue() server = await chatter.listen(sbs_repo, address, server_conns.put_nowait) client_conn = await chatter.connect(sbs_repo, address) server_conn = await server_conns.get() data = chatter.Data('Example', 'Msg', 123) client_conn.send(data) msg = await server_conn.receive() assert msg.data == data await server.async_close() await client_conn.wait_closed() await server_conn.wait_closed()
def test_invalid_serialization(serializer, schema, t, v): repo = sbs.Repository(schema, serializer=serializer) with pytest.raises(Exception): encoded_v = repo.encode('Module', t, v) decoded_v = repo.decode('Module', t, encoded_v) if v == decoded_v: raise Exception()
def test_loading_schema_file(tmp_path, serializer): path = tmp_path / 'schema.sbs' with open(path, 'w', encoding='utf-8') as f: f.write("module M T = Integer") repo = sbs.Repository(path, serializer=serializer) value = 123 encoded_value = repo.encode('M', 'T', value) decoded_value = repo.decode('M', 'T', encoded_value) assert value == decoded_value
def test_multiple_modules(serializer): repo = sbs.Repository(""" module M1 T = Integer """, """ module M2 T = M1.T """, serializer=serializer) value = 1 encoded_value = repo.encode('M2', 'T', value) decoded_value = repo.decode('M2', 'T', encoded_value) assert value == decoded_value
def test_parametrized_types(serializer): repo = sbs.Repository(""" module M T1(x) = Integer """, serializer=serializer) encoded = repo.encode(None, 'Integer', 1) with pytest.raises(Exception): repo.encode('M', 'T1', 1) with pytest.raises(Exception): repo.decode('M', 'T1', encoded)
def test_example(): repo = sbs.Repository(''' module Module Entry(K, V) = Tuple { key: K value: V } T = Array(Maybe(Entry(String, Integer))) ''') data = [ ('Nothing', None), ('Just', { 'key': 'abc', 'value': 123 }) ] encoded_data = repo.encode('Module', 'T', data) decoded_data = repo.decode('Module', 'T', encoded_data) assert data == decoded_data
def test_event_encoding_duration(duration, serializer, event_count, bulk_encoding): sbs_repo = sbs.Repository(hat.event.common.sbs_repo, serializer=serializer) events = [ hat.event.common.event_to_sbs( hat.event.common.Event( event_id=hat.event.common.EventId(server=0, instance=i), event_type=['some', 'event', 'type', str(i)], timestamp=hat.event.common.now(), source_timestamp=None, payload=hat.event.common.EventPayload( type=hat.event.common.EventPayloadType.JSON, data={f'key{j}': f'value{j}' for j in range(10)}))) for i in range(event_count) ] if bulk_encoding: data = [events] else: data = [[event] for event in events] results = collections.deque() with duration(f'{serializer.__name__} encode - ' f'event_count: {event_count}; ' f'bulk_encoding: {bulk_encoding}'): for i in data: result = sbs_repo.encode('HatEvent', 'MsgRegisterReq', i) results.append(result) with duration(f'{serializer.__name__} decode - ' f'event_count: {event_count}; ' f'bulk_encoding: {bulk_encoding}'): for i in results: sbs_repo.decode('HatEvent', 'MsgRegisterReq', i)
from pathlib import Path import asyncio import contextlib from hat import aio from hat import chatter from hat import json from hat import sbs import hat.event.server.common package_path = Path(__file__).parent json_schema_id = "test://modules/remote.yaml#" json_schema_repo = json.SchemaRepository(package_path / 'remote.yaml') sbs_repo = sbs.Repository(chatter.sbs_repo, package_path / 'remote.sbs') async def create(conf, engine): module = RemoteModule() module._subscription = hat.event.server.common.Subscription( conf['subscriptions']) module._async_group = aio.Group() module._conn = await chatter.connect(sbs_repo, conf['address']) module._async_group.spawn(aio.call_on_cancel, module._on_close) module._send('ModuleCreate', None) return module class RemoteModule(hat.event.server.common.Module): @property
import struct import typing from hat import chatter from hat import sbs from hat import util from hat.util import json import hat.monitor.common package_path = Path(__file__).parent json_schema_repo = json.SchemaRepository( json.json_schema_repo, hat.monitor.common.json_schema_repo, json.SchemaRepository.from_json(package_path / 'json_schema_repo.json')) sbs_repo = sbs.Repository( chatter.sbs_repo, sbs.Repository.from_json(package_path / 'sbs_repo.json')) Order = util.extend_enum_doc(enum.Enum('Order', ['DESCENDING', 'ASCENDING'])) OrderBy = util.extend_enum_doc( enum.Enum('OrderBy', ['TIMESTAMP', 'SOURCE_TIMESTAMP'])) EventPayloadType = util.extend_enum_doc( enum.Enum('EventPayloadType', ['BINARY', 'JSON', 'SBS'])) EventId = util.namedtuple('EventId', ['server', 'int: server identifier'], ['instance', 'int: event instance identifier']) EventType = typing.List[str] EventPayload = util.namedtuple(
def generate(): repo = sbs.Repository(*src_paths) data = repo.to_json() for dst_path in dst_paths: json.encode_file(data, dst_path, indent=None)
def test_invalid_repository_initialization_argument_type(): with pytest.raises(Exception): sbs.Repository(None)
def test_invalid_schema(schema): with pytest.raises(Exception): sbs.Repository(schema)