def test_dataclass_model_conversions_selection(): assert deserialize(Simple, {"b": 0}, conversions={Simple: SimpleModel2}) == Simple(0) assert serialize(Simple(0), conversions={Simple: SimpleModel2}) == {"b": 0} assert deserialize(Lazy, {"b": 0}, conversions={Lazy: lazy_model2}) == Lazy(0) assert serialize(Lazy(0), conversions={Lazy: lazy_model2}) == {"b": 0}
def test_flattened_serialized(): assert (serialization_schema(Base) == serialization_schema(WithFlattened) == base_schema) assert (serialize(Base, Base()) == serialize(WithFlattened, WithFlattened()) == { "serialized": 0 })
def test_inherited_serialized(): assert (serialization_schema(Base) == serialization_schema(Inherited) == serialization_schema(InheritedOverriden) == base_schema) assert (serialize(Base, Base()) == serialize(Inherited, Inherited()) == { "serialized": 0 }) assert serialize(InheritedOverriden, InheritedOverriden()) == { "serialized": 1 }
def test_flattened_converted_error(): with raises(TypeError): deserialize(Data3, {"attr": 0}) with raises(TypeError): serialize(Data3, Data3(Field2(0))) with raises(TypeError): deserialization_schema(Data3) with raises(TypeError): serialization_schema(Data3) with raises(TypeError): graphql_schema(query=[get_data3])
def annotate_whatrec(self, ioc: LoadedIoc, what: WhatRecord) -> WhatRecord: """ Annotate WhatRecord instances with things ServerState knows about. """ matches = [ (what.record.instance if what.record else None), what.pva_group ] for instance in matches: if instance is None: continue if not instance.is_pva: # For now, V3 only instance.metadata["gateway"] = apischema.serialize( self.get_gateway_matches(instance.name) ) ioc.shell_state.annotate_record(instance) for plugin in self.plugins: if not plugin.results: continue info = list(plugin.results.find_record_metadata(instance.name)) if info: plugin_key = StringWithContext(plugin.name, context=()) instance.metadata[plugin_key] = info return what
def main( filename: AnyPath, dbd: Optional[str] = None, standin_directory: Optional[List[str]] = None, macros: Optional[str] = None, friendly: bool = False, use_gdb: bool = False, format: Optional[str] = None, expand: bool = False, friendly_format: str = "console", v3: bool = False, ): result = parse_from_cli_args( filename=filename, dbd=dbd, standin_directory=standin_directory, macros=macros, use_gdb=use_gdb, format=format, expand=expand, v3=v3, ) if friendly: fmt = FormatContext() print(fmt.render_object(result, friendly_format)) else: # TODO: JSON -> obj -> JSON round tripping json_info = apischema.serialize(result) print(json.dumps(json_info, indent=4))
def _cli_main(): parser = _get_argparser() args = parser.parse_args() results = main(**vars(args)) json_results = apischema.serialize(results) dump_args = {"indent": 4} if args.pretty else {} print(json.dumps(json_results, sort_keys=True, **dump_args))
def save_to_cache(self) -> bool: if not settings.CACHE_PATH: return False with open(self.cache_filename, "wt") as fp: json.dump(apischema.serialize(self), fp=fp) return True
def annotate_record(self, record: RecordInstance) -> Optional[Dict[str, Any]]: """Annotate record with access security information.""" if self.config is not None: asg = self.config.get_group_from_record(record) if asg is not None: return apischema.serialize(asg)
def test_simple(): db = Database.from_string("""\ record(ai, "rec:X") { field(A, "test") field(B, test) } """, version=3) assert db.records["rec:X"] == RecordInstance( context=(LoadContext("None", 1), ), record_type="ai", name="rec:X", is_pva=False, fields={ "A": RecordField( context=(LoadContext("None", 2), ), name="A", dtype="", value="test", ), "B": RecordField( context=(LoadContext("None", 3), ), name="B", dtype="", value="test", ), }) apischema.deserialize(Database, apischema.serialize(db))
def test_typed_dict(): assert (deserialization_schema(TD3) == serialization_schema(TD3) == { "type": "object", "properties": { "key1": { "type": "string" }, "key2": { "type": "integer" }, "key3": { "type": "boolean" }, }, "required": ["key2"], "additionalProperties": False, "$schema": "http://json-schema.org/draft/2019-09/schema#", }) assert deserialize(TD3, { "Key2": 0, "Key3": True }, aliaser=str.capitalize) == { "key2": 0, "key3": True, } with raises(ValidationError): assert deserialize(TD3, {}) assert serialize(TD1, {"key1": ""}) == {"key1": ""}
def _cli_main(): parser = _get_argparser() kwargs = vars(parser.parse_args()) results = main(**kwargs) json_results = apischema.serialize(results) dump_args = {"indent": 4} if kwargs["pretty"] else {} print(json.dumps(json_results, sort_keys=True, **dump_args))
def main( path: AnyPath, friendly: bool = False, no_recurse: bool = False, keep_os_env: bool = False, graph: bool = False, graph_output: Optional[str] = None, file=sys.stdout, ): makefile_path = Makefile.find_makefile(path) makefile = Makefile.from_file(makefile_path, keep_os_env=keep_os_env) info = DependencyGroup.from_makefile(makefile, recurse=not no_recurse, keep_os_env=keep_os_env) if graph: group_graph = DependencyGroupGraph(info) render_graph_to_file(group_graph.to_digraph(), filename=graph_output) # An alternative to 'whatrecord graph'; both should have the same # result in the end. return if not friendly: json_info = apischema.serialize(info) print(json.dumps(json_info, indent=4)) else: fmt = FormatContext() print(fmt.render_object(info, "console"), file=file)
def serialize_tree(self, tree: Tree) -> dict: try: return serialize( ConfigurationFile, tree.bridge.data, ) except Exception: logger.exception('Error serializing file')
async def _cli_main(): parser = _get_argparser() args = parser.parse_args() results = await main(**vars(args)) whatrecord_results = PytmcPluginResults.from_metadata_items(results) json_results = apischema.serialize(whatrecord_results) dump_args = {"indent": 4} if args.pretty else {} print(json.dumps(json_results, sort_keys=True, **dump_args))
def test_serialize(cls): instance = try_to_instantiate(cls) serialized = apischema.serialize(instance) print(cls) print("Serialized:") print(serialized) deserialized = apischema.deserialize(cls, serialized) print("Deserialized:") print(deserialized) if cls not in SKIP_DESERIALIZATION: assert deserialized == instance
async def async_load_ioc( identifier: Union[int, str], md: IocMetadata, standin_directories, use_gdb: bool = True, use_cache: bool = True, ) -> IocLoadResult: """ Helper function for loading an IOC in a subprocess and relying on the cache. """ if not settings.CACHE_PATH: use_cache = False with time_context() as ctx: try: md.standin_directories.update(standin_directories) if use_cache: cached_ioc = load_cached_ioc(md) if cached_ioc: return IocLoadResult(identifier=identifier, load_time=ctx(), cache_hit=True, result="use_cache") loaded = LoadedIoc.from_metadata(md) if use_gdb: await md.get_binary_information() if use_cache: loaded.metadata.save_to_cache() loaded.save_to_cache() # Avoid pickling massive JSON blob; instruct server to load # from cache with token 'use_cache' serialized = "use_cache" else: serialized = apischema.serialize(loaded) except Exception as ex: return IocLoadResult( identifier=identifier, load_time=ctx(), cache_hit=False, result=IocLoadFailure( ex_class=type(ex).__name__, ex_message=str(ex), traceback=traceback.format_exc(), ), ) return IocLoadResult( identifier=identifier, load_time=ctx(), cache_hit=False, result=serialized, )
def _cli_main(): parser = _get_argparser() kwargs = vars(parser.parse_args()) kwargs["scope"] = { "subtree": ldap.SCOPE_SUBTREE, "base": ldap.SCOPE_BASE, "onelevel": ldap.SCOPE_ONELEVEL, }[kwargs["scope"]] results = main(**kwargs) json_results = apischema.serialize(results) dump_args = {"indent": 4} if kwargs["pretty"] else {} print(json.dumps(json_results, sort_keys=True, **dump_args))
def test_msi_base_examples(substitution, template, include, macros, expected, all_global): macros = macros or {} if substitution: macro_ctx = MacroContext() macro_ctx.define(**macros) sub = TemplateSubstitution.from_string( macro_ctx.expand(substitution), msi_format=True, filename="test.substitutions", all_global_scope=all_global, ) result = [] for single_sub in sub.substitutions: result.extend(single_sub.expand(template).splitlines()) result = "\n".join(line for line in result if line.strip()) expected = "\n".join(line for line in expected.splitlines() if line.strip()) print("result") print("------") print(result) print("------") print("expected") print("------") print(expected) print("------") assert result == expected elif template: sub = Substitution( context=[LoadContext("None", 1)], filename=None, macros=macros, ) result = sub.expand(template, search_paths=[conftest.MODULE_PATH]) print("macros", macros) print("result") print("------") print(result) print("------") print("expected") print("------") print(expected) print("------") assert result.strip() == expected.strip() else: raise ValueError("Invalid test params?") serialized = apischema.serialize(sub) pprint.pprint(serialized) apischema.deserialize(type(sub), serialized)
async def main(records, as_json=False, file=sys.stdout): info = await get_record_info(*records) if as_json: # TODO: JSON -> obj -> JSON round tripping json_info = apischema.serialize(info) print(json.dumps(json_info, indent=4)) else: fmt = FormatContext() for pv, pv_get_info in info.items(): print(pv, file=file) print("-" * len(pv), file=file) print(file=file) print(fmt.render_object(pv_get_info, "console"), file=file)
def test_lazy_dataclass_model(): assert deserialize(Lazy, {"a": 0}) == Lazy(0) assert serialize(Lazy(0)) == {"a": 0} assert deserialization_schema(Lazy) == { "$schema": "http://json-schema.org/draft/2019-09/schema#", "type": "object", "properties": { "a": { "type": "integer" } }, "required": ["a"], "additionalProperties": False, }
def named_tuple( self, cls: Type[Tuple], types: Mapping[str, AnyType], defaults: Mapping[str, Any], ) -> Thunk[graphql.GraphQLType]: fields = [] for field_name, field_type in types.items(): default = graphql.Undefined if field_name in defaults: with suppress(Exception): default = serialize(defaults[field_name]) fields.append(ObjectField(field_name, field_type, default=default)) return self.object(cls, fields)
def test_simple_dataclass_model(d_conv, s_conv, alias): assert deserialize(Data, {alias: 0}, conversion=d_conv) == Data(0) assert serialize(Data, Data(0), conversion=s_conv) == {alias: 0} assert (deserialization_schema(Data, conversion=d_conv) == serialization_schema(Data, conversion=s_conv) == { "$schema": "http://json-schema.org/draft/2019-09/schema#", "type": "object", "properties": { alias: { "type": "integer" } }, "required": [alias], "additionalProperties": False, })
def test_field_generic_conversion(): assert serialize(Foo[str], Foo({1: "a", 0: "b"})) == {"values": ["b", "a"]} assert serialization_schema(Foo[str]) == { "type": "object", "properties": { "values": { "type": "array", "items": { "type": "string" } } }, "required": ["values"], "additionalProperties": False, "$schema": "http://json-schema.org/draft/2019-09/schema#", }
def test_breaktable(version): db = Database.from_string("""\ breaktable(typeAttenLength) { 0.8 0.18 0.9 0.25 8.0 150.13 8.5 174.81 9.0 204.32 } """, version=version) assert db.breaktables["typeAttenLength"] == DatabaseBreakTable( name='typeAttenLength', values=('0.8', '0.18', '0.9', '0.25', '8.0', '150.13', '8.5', '174.81', '9.0', '204.32')) apischema.deserialize(Database, apischema.serialize(db))
def test_dbd_menus(version): db = Database.from_string("""\ menu(stringoutPOST) { choice(stringoutPOST_OnChange, "On Change") choice(stringoutPOST_Always, "Always") } menu(menuScan) { choice(menuScanPassive, "Passive") choice(menuScanEvent, "Event") choice(menuScanI_O_Intr, "I/O Intr") choice(menuScan10_second, "10 second") choice(menuScan5_second, "5 second") choice(menuScan2_second, "2 second") choice(menuScan1_second, "1 second") choice(menuScan_5_second, ".5 second") choice(menuScan_2_second, ".2 second") choice(menuScan_1_second, ".1 second") } """, version=version) assert db.menus["stringoutPOST"] == DatabaseMenu( context=(LoadContext("None", 1), ), name="stringoutPOST", choices={ "stringoutPOST_OnChange": "On Change", "stringoutPOST_Always": "Always", }, ) assert db.menus["menuScan"] == DatabaseMenu( context=(LoadContext("None", 5), ), name="menuScan", choices={ "menuScanPassive": "Passive", "menuScanEvent": "Event", "menuScanI_O_Intr": "I/O Intr", "menuScan10_second": "10 second", "menuScan5_second": "5 second", "menuScan2_second": "2 second", "menuScan1_second": "1 second", "menuScan_5_second": ".5 second", "menuScan_2_second": ".2 second", "menuScan_1_second": ".1 second", }, ) apischema.deserialize(Database, apischema.serialize(db))
def _object_field(self, field: Field, field_type: AnyType) -> ObjectField: field_type, conversions, _ = get_field_conversion( field, field_type, self.operation) default: Any = graphql.Undefined if not is_required(field): with suppress(Exception): default = serialize(get_default(field), conversions=conversions) return ObjectField( field.name, field_type, alias=get_alias(field), conversions=conversions, default=default, required=is_required(field), schema=field.metadata.get(SCHEMA_METADATA), )
def check_serialization( obj, deserialize: bool = True, require_same_source: bool = True ): """ Round-trip a dataclass object with the serialization library. Requires apischema and APISCHEMA_SKIP to be False. Checks: * ``obj`` can be serialized to JSON * Serialized JSON can be deserialized back into an equivalent ``obj`` * Deserialized object has the same source code representation """ if obj is None or apischema is None or APISCHEMA_SKIP: return try: serialized = apischema.serialize( obj, exclude_defaults=True, no_copy=True, ) except Exception: print(json.dumps(dataclasses.asdict(obj), indent=2)) raise print(f"Serialized {type(obj)} to:") print(json.dumps(serialized, indent=2)) print() if not deserialize: return serialized, None deserialized = apischema.deserialize(type(obj), serialized, no_copy=True) print(f"Deserialized {type(obj)} back to:") print(repr(deserialized)) print("Or:") print(deserialized) if require_same_source: assert str(obj) == str(deserialized), \ "Deserialized object does not produce identical source code" return serialized, deserialized
def test_dbd_recordtype(version): db = Database.from_string("""\ recordtype(stringin) { field(NAME, DBF_STRING) { size(61) special(SPC_NOMOD) prompt("Record Name") } field(PINI, DBF_MENU) { menu(menuPini) interest(1) promptgroup("20 - Scan") prompt("Process at iocInit") } } """, version=version) assert db.record_types["stringin"] == RecordType( context=(LoadContext("None", 1), ), name="stringin", cdefs=[], fields={ "NAME": RecordTypeField( context=(LoadContext("None", 2), ), name="NAME", type="DBF_STRING", special="SPC_NOMOD", prompt="Record Name", size="61", body={}, ), "PINI": RecordTypeField( context=(LoadContext("None", 7), ), name="PINI", type="DBF_MENU", menu="menuPini", interest="1", promptgroup="20 - Scan", prompt="Process at iocInit", body={}, ), }) apischema.deserialize(Database, apischema.serialize(db))
def test_parse(snl_program: SequencerProgram): # from ..format import FormatContext # ctx = FormatContext() # print(ctx.render_object(program, "console")) print(snl_program) serialized = apischema.serialize(snl_program) # pprint.pprint(serialized) apischema.deserialize(SequencerProgram, serialized) # deserialized = apischema.deserialize(SequencerProgram, serialized) # assert deserialized == snl_program round_trip = str(snl_program) print("Round-tripped to") print(round_trip) assert "(context=(" not in round_trip assert "(Tree=(" not in round_trip assert "(Token=(" not in round_trip