def test_schema_repository_init_paths(tmp_path): dir_path = tmp_path / 'repo' json_path = dir_path / 'schema.json' yaml_path = dir_path / 'schema.yaml' dir_path.mkdir() with open(json_path, 'w', encoding='utf-8') as f: f.write('{"id": "xyz1://abc1"}') with open(yaml_path, 'w', encoding='utf-8') as f: f.write("id: 'xyz2://abc2'") repo1 = json.SchemaRepository(dir_path) repo2 = json.SchemaRepository(json_path, yaml_path) assert repo1.to_json() == repo2.to_json()
def main(): """Main""" args = _create_parser().parse_args() json_schema_repo = json.SchemaRepository( json.json_schema_repo, *args.additional_json_schemas_paths) translators = [] for module in itertools.chain(builtin_translators, args.module_names): translators += importlib.import_module(module).translators format = {'yaml': json.Format.YAML, 'json': json.Format.JSON}[args.format] if args.action == 'list': output = [_translator_to_json(trans) for trans in translators] elif args.action == 'translate': trans = util.first( translators[::-1], lambda i: i.input_type == args.input_type and i. output_type == args.output_type) if not trans: raise Exception('translator not found') input_conf = json.decode(sys.stdin.read(), format=format) if trans.input_schema: json_schema_repo.validate(trans.input_schema, input_conf) output = trans.translate(input_conf) if trans.output_schema: json_schema_repo.validate(trans.output_schema, output) else: raise NotImplementedError() print(json.encode(output, format=format, indent=4))
async def factory(conf, schema_repo=None): if schema_repo is None: schema_repo = json.SchemaRepository() manager = await hat.gui.view.create_view_manager(conf, schema_repo) managers.append(manager) return manager await manager.async_close()
def main(): """Main""" aio.init_asyncio() args = create_parser().parse_args() conf = json.decode_file(args.conf) json_schema_repo = json.SchemaRepository( hat.gui.common.json_schema_repo, *args.additional_json_schemas_paths) json_schema_repo.validate('hat://gui/main.yaml#', conf) for adapter_conf in conf['adapters']: module = importlib.import_module(adapter_conf['module']) json_schema_id = module.json_schema_id if json_schema_id: json_schema_repo.validate(json_schema_id, adapter_conf) logging.config.dictConfig(conf['log']) with contextlib.suppress(asyncio.CancelledError): aio.run_asyncio(async_main(conf, args.ui_path, json_schema_repo))
def main(): """Main""" aio.init_asyncio() args = _create_parser().parse_args() conf = json.decode_file(args.conf) json_schema_repo = json.SchemaRepository( hat.event.common.json_schema_repo, *args.additional_json_schemas_paths) json_schema_repo.validate('hat://event/main.yaml#', conf) sub_confs = ([conf['backend_engine']['backend']] + conf['module_engine']['modules']) for sub_conf in sub_confs: module = importlib.import_module(sub_conf['module']) json_schema_id = module.json_schema_id if json_schema_id: json_schema_repo.validate(json_schema_id, sub_conf) logging.config.dictConfig(conf['log']) with contextlib.suppress(asyncio.CancelledError): aio.run_asyncio(async_main(conf))
def _ext_get_view(view, json_schema_repo): data = {} view_path = util.parse_env_path(view['view_path']) try: for i in view_path.rglob('*'): if i.is_dir(): continue name = i.relative_to(view_path).as_posix() if i.suffix in {'.js', '.css', '.txt'}: with open(i, encoding='utf-8') as f: content = f.read() elif i.suffix in {'.json', '.yaml', '.yml'}: content = json.decode_file(i) elif i.suffix in {'.xml', '.svg'}: with open(i, encoding='utf-8') as f: content = hat.gui.vt.parse(f) else: with open(i, 'rb') as f: content = f.read() content = base64.b64encode(content).decode('utf-8') data[name] = content except Exception as e: mlog.error('error loading view data %s', e, exc_info=e) raise conf = None if view['conf_path'] is not None: conf_path = util.parse_env_path(view['conf_path']) conf = json.decode_file(conf_path) schema = util.first(v for k, v in data.items() if k in {'schema.json', 'schema.yaml', 'schema.yml'}) if schema: repo = json.SchemaRepository(json_schema_repo, schema) repo.validate(schema['id'], conf) return View(name=view['name'], conf=conf, data=data)
def main(): """Application main entry point""" args = _create_parser().parse_args() json_schema_repo = json.SchemaRepository(json.default_schemas_json_path, args.schemas_json_path) if args.log_conf_path: log_conf = json.decode_file(args.log_conf_path) json_schema_repo.validate('hat://logging.yaml#', log_conf) logging.config.dictConfig(log_conf) if args.action == 'calculate': calculate(json_schema_repo, args.params_path, args.method, args.result_path, args.output_path, args.output_type, args.output_panel_id, args.output_path) elif args.action == 'output': output(json_schema_repo, args.output_path, args.result_path, args.output_type, args.output_panel_id, args.output_path) else: server(json_schema_repo, args.addr, args.pem_path, args.ui_path)
from pathlib import Path import datetime import enum import struct import typing from hat import chatter from hat import sbs from hat import util from hat.util import json import hat.monitor.common package_path = Path(__file__).parent json_schema_repo = json.SchemaRepository( json.json_schema_repo, hat.monitor.common.json_schema_repo, json.SchemaRepository.from_json(package_path / 'json_schema_repo.json')) sbs_repo = sbs.Repository( chatter.sbs_repo, sbs.Repository.from_json(package_path / 'sbs_repo.json')) Order = util.extend_enum_doc(enum.Enum('Order', ['DESCENDING', 'ASCENDING'])) OrderBy = util.extend_enum_doc( enum.Enum('OrderBy', ['TIMESTAMP', 'SOURCE_TIMESTAMP'])) EventPayloadType = util.extend_enum_doc( enum.Enum('EventPayloadType', ['BINARY', 'JSON', 'SBS'])) EventId = util.namedtuple('EventId', ['server', 'int: server identifier'], ['instance', 'int: event instance identifier'])
def test_schema_repository_init_duplicate_id(): schema = json.decode("id: 'xyz://abc'", format=json.Format.YAML) repo = json.SchemaRepository(schema) assert repo.to_json() == json.SchemaRepository(repo, repo).to_json() with pytest.raises(Exception): json.SchemaRepository(schema, schema)
def test_schema_repository_init(): schema = json.decode("id: 'xyz://abc'", format=json.Format.YAML) repo = json.SchemaRepository(schema) assert repo.to_json()
def test_schema_repository_init_empty(): repo = json.SchemaRepository() assert not repo.to_json()
def test_json_schema_repository_validate_invalid(schemas, schema_id, data): repo = json.SchemaRepository(*[json.decode(i, format=json.Format.YAML) for i in schemas]) with pytest.raises(Exception): repo.validate(schema_id, data)
def test_json_schema_repository_validate(schemas, schema_id, data): repo = json.SchemaRepository(*[json.decode(i, format=json.Format.YAML) for i in schemas]) repo.validate(schema_id, data)
def generate(): repo = json.SchemaRepository(*src_paths) data = repo.to_json() for dst_path in dst_paths: json.encode_file(data, dst_path, indent=None)