def main(): """Main""" args = _create_parser().parse_args() json_schema_repo = json.SchemaRepository( json.json_schema_repo, *args.additional_json_schemas_paths) translators = [] for module in itertools.chain(builtin_translators, args.module_names): translators += importlib.import_module(module).translators format = {'yaml': json.Format.YAML, 'json': json.Format.JSON}[args.format] if args.action == 'list': output = [_translator_to_json(trans) for trans in translators] elif args.action == 'translate': trans = util.first( translators[::-1], lambda i: i.input_type == args.input_type and i. output_type == args.output_type) if not trans: raise Exception('translator not found') input_conf = json.decode(sys.stdin.read(), format=format) if trans.input_schema: json_schema_repo.validate(trans.input_schema, input_conf) output = trans.translate(input_conf) if trans.output_schema: json_schema_repo.validate(trans.output_schema, output) else: raise NotImplementedError() print(json.encode(output, format=format, indent=4))
def run_translator(action, args, data=None, decode=True): data_str = (json.encode(data, format=format) if data is not None else None) p = subprocess.run([ 'python', '-m', 'hat.translator', action, '--format', format.name.lower(), '--module', 'test_sys.test_translator.test_main', '--additional-json-schemas-path', str(Path(__file__).with_suffix('.yaml')) ] + args, input=data_str, stdout=subprocess.PIPE, check=True, stderr=subprocess.DEVNULL, universal_newlines=True) return json.decode(p.stdout, format=format) if decode else p.stdout
def event_payload_from_sbs(data): """Create new EventPayload based on SBS data Args: data (hat.sbs.Data): SBS data Returns: EventPayload """ return { 'binary': lambda: EventPayload(type=EventPayloadType.BINARY, data=data[1]), 'json': lambda: EventPayload(type=EventPayloadType.JSON, data=json.decode(data[1])), 'sbs': lambda: EventPayload(type=EventPayloadType.SBS, data=_sbs_data_from_sbs(data[1])) }[data[0]]()
async def _receive_loop(self): try: while True: msg_ws = await self._ws.receive() if self._ws.closed or msg_ws.type == aiohttp.WSMsgType.CLOSING: break if msg_ws.type != aiohttp.WSMsgType.TEXT: raise Exception('unsupported message type') msg = json.decode(msg_ws.data) process_payload = { 'DATA': self._process_juggler_data, 'MESSAGE': self._process_juggler_message }.get(msg['type']) process_payload(msg['payload']) finally: self._async_group.close() await aio.uncancellable(self._ws.close(), raise_cancel=False) if self._session: await self._session.close() self._message_queue.close()
def test_schema_repository_init_duplicate_id(): schema = json.decode("id: 'xyz://abc'", format=json.Format.YAML) repo = json.SchemaRepository(schema) assert repo.to_json() == json.SchemaRepository(repo, repo).to_json() with pytest.raises(Exception): json.SchemaRepository(schema, schema)
def test_schema_repository_init(): schema = json.decode("id: 'xyz://abc'", format=json.Format.YAML) repo = json.SchemaRepository(schema) assert repo.to_json()
def test_json_schema_repository_validate_invalid(schemas, schema_id, data): repo = json.SchemaRepository(*[json.decode(i, format=json.Format.YAML) for i in schemas]) with pytest.raises(Exception): repo.validate(schema_id, data)
def test_json_schema_repository_validate(schemas, schema_id, data): repo = json.SchemaRepository(*[json.decode(i, format=json.Format.YAML) for i in schemas]) repo.validate(schema_id, data)