def test_basic(): assert isinstance( apischema.deserialize( grafana.AnyPanel, { "collapsed": False, "gridPos": {"h": 1, "w": 24, "x": 0, "y": 0}, "id": 2, "panels": [], "title": "LFE Vacuum", "type": "row", }, ), grafana.RowPanel ) assert isinstance( apischema.deserialize( grafana.AnyPanel, { "type": "bargauge", }, ), grafana.BarGaugePanel )
def test_raw(): deserializer(to_raw_deserializer(sfx_version)) assert deserialize(SuffixedVersion, {"version": 42}) == "42" assert deserialize(SuffixedVersion, { "version": 42, "suffix": "ok" }) == "42ok" deserializer(to_raw_deserializer(sfx_version)) with raises(ValidationError): deserialize(SuffixedVersion, {"version": "42"})
def test_flattened_converted_error(): with raises(TypeError): deserialize(Data3, {"attr": 0}) with raises(TypeError): serialize(Data3, Data3(Field2(0))) with raises(TypeError): deserialization_schema(Data3) with raises(TypeError): serialization_schema(Data3) with raises(TypeError): graphql_schema(query=[get_data3])
def test_generic_object_deserialization(): assert deserialize(Collection[int], { "item": 0, "number": 3 }, conversion=repeat_conv) == [0, 0, 0] with raises(ValidationError): deserialize(Collection[str], { "item": 0, "number": 3 }, conversion=repeat_conv)
def test_msi_base_examples(substitution, template, include, macros, expected, all_global): macros = macros or {} if substitution: macro_ctx = MacroContext() macro_ctx.define(**macros) sub = TemplateSubstitution.from_string( macro_ctx.expand(substitution), msi_format=True, filename="test.substitutions", all_global_scope=all_global, ) result = [] for single_sub in sub.substitutions: result.extend(single_sub.expand(template).splitlines()) result = "\n".join(line for line in result if line.strip()) expected = "\n".join(line for line in expected.splitlines() if line.strip()) print("result") print("------") print(result) print("------") print("expected") print("------") print(expected) print("------") assert result == expected elif template: sub = Substitution( context=[LoadContext("None", 1)], filename=None, macros=macros, ) result = sub.expand(template, search_paths=[conftest.MODULE_PATH]) print("macros", macros) print("result") print("------") print(result) print("------") print("expected") print("------") print(expected) print("------") assert result.strip() == expected.strip() else: raise ValueError("Invalid test params?") serialized = apischema.serialize(sub) pprint.pprint(serialized) apischema.deserialize(type(sub), serialized)
def main(filename: str, filtered_devices: Optional[Sequence[str]] = None, verbose: int = 0, *, cleanup: bool = True): serialized_config = yaml.safe_load(open(filename)) config = apischema.deserialize(ConfigurationFile, serialized_config) console = rich.console.Console() try: with console.status("[bold green] Performing checks..."): for info in get_configurations_from_file( config, filtered_devices=filtered_devices): if isinstance(info, ConfigFileHappiError): console.print("Failed to load", info.dev_name) continue if isinstance(info, Exception): console.print("Failed to load", info) continue severity, results = check_device(info.device, info.dev_config.checks) log_results_rich( console, device=info.device, config=info.dev_config, severity=severity, results=results, verbose=verbose, ) finally: if cleanup: ophyd_cleanup()
def from_cache(cls: Type[T], key: CacheKey) -> Optional[T]: """Load the object based on its key from the whatrecord cache.""" if cls is Cached: raise RuntimeError( f"Class {cls} is not intended to be saved/loaded") if cls._cache_path_ is None: return None try: with open(cls._get_cache_filename(key), "rb") as fp: serialized = json.load(fp) except FileNotFoundError: return None except json.JSONDecodeError: logger.debug("Failed to deserialize %s %s", cls, key, exc_info=True) return None try: return apischema.deserialize(cls, serialized) except Exception: logger.debug("Failed to deserialize %s %s", cls, key, exc_info=True) return None
def test_breaktable(version): db = Database.from_string("""\ breaktable(typeAttenLength) { 0.8 0.18 0.9 0.25 8.0 150.13 8.5 174.81 9.0 204.32 } """, version=version) assert db.breaktables["typeAttenLength"] == DatabaseBreakTable( name='typeAttenLength', values=('0.8', '0.18', '0.9', '0.25', '8.0', '150.13', '8.5', '174.81', '9.0', '204.32')) apischema.deserialize(Database, apischema.serialize(db))
def open_file(self, *args, filename: Optional[str] = None, **kwargs): """ Open an existing file and create a new tab containing it. The parameters are open as to accept inputs from any signal. Parameters ---------- filename : str, optional The name to save the file as. If omitted, a dialog will appear to prompt the user for a filepath. """ if filename is None: filename, _ = QFileDialog.getOpenFileName( parent=self, caption='Select a config', filter='Json Files (*.json)', ) if not filename: return with open(filename, 'r') as fd: serialized = json.load(fd) data = deserialize(ConfigurationFile, serialized) widget = Tree(config_file=data, full_path=filename) self.tab_widget.addTab(widget, self.get_tab_name(filename)) self.tab_widget.setCurrentIndex(self.tab_widget.count() - 1)
def test_dbd_menus(version): db = Database.from_string("""\ menu(stringoutPOST) { choice(stringoutPOST_OnChange, "On Change") choice(stringoutPOST_Always, "Always") } menu(menuScan) { choice(menuScanPassive, "Passive") choice(menuScanEvent, "Event") choice(menuScanI_O_Intr, "I/O Intr") choice(menuScan10_second, "10 second") choice(menuScan5_second, "5 second") choice(menuScan2_second, "2 second") choice(menuScan1_second, "1 second") choice(menuScan_5_second, ".5 second") choice(menuScan_2_second, ".2 second") choice(menuScan_1_second, ".1 second") } """, version=version) assert db.menus["stringoutPOST"] == DatabaseMenu( context=(LoadContext("None", 1), ), name="stringoutPOST", choices={ "stringoutPOST_OnChange": "On Change", "stringoutPOST_Always": "Always", }, ) assert db.menus["menuScan"] == DatabaseMenu( context=(LoadContext("None", 5), ), name="menuScan", choices={ "menuScanPassive": "Passive", "menuScanEvent": "Event", "menuScanI_O_Intr": "I/O Intr", "menuScan10_second": "10 second", "menuScan5_second": "5 second", "menuScan2_second": "2 second", "menuScan1_second": "1 second", "menuScan_5_second": ".5 second", "menuScan_2_second": ".2 second", "menuScan_1_second": ".1 second", }, ) apischema.deserialize(Database, apischema.serialize(db))
async def get_iocs(pattern: str = "*", server: Optional[str] = None, regex: bool = False) -> IocGetMatchesResponse: """Get record information from the server.""" response = await make_query("/api/ioc/matches", server=server, params=dict(pattern=pattern, regex=str(regex))) return apischema.deserialize(IocGetMatchesResponse, response)
async def get_record_info(*records, server: Optional[str] = None ) -> Dict[str, PVGetInfo]: """Get record information from the server.""" response = await make_query("/api/pv/info", server=server, params=dict(pv=list(records))) return apischema.deserialize(Dict[str, PVGetInfo], response)
def _parse_rpc_response( data: dict) -> Union[SubscriptionNotification, Error, Ok]: if "params" in data: req = create_request(data) dtype = _NOTIFICATION_MAP[req.method] res: SubscriptionNotification = deserialize(dtype, req.params) return res return cast(Union[Ok, Error], parse(data))
def test_dbd_recordtype(version): db = Database.from_string("""\ recordtype(stringin) { field(NAME, DBF_STRING) { size(61) special(SPC_NOMOD) prompt("Record Name") } field(PINI, DBF_MENU) { menu(menuPini) interest(1) promptgroup("20 - Scan") prompt("Process at iocInit") } } """, version=version) assert db.record_types["stringin"] == RecordType( context=(LoadContext("None", 1), ), name="stringin", cdefs=[], fields={ "NAME": RecordTypeField( context=(LoadContext("None", 2), ), name="NAME", type="DBF_STRING", special="SPC_NOMOD", prompt="Record Name", size="61", body={}, ), "PINI": RecordTypeField( context=(LoadContext("None", 7), ), name="PINI", type="DBF_MENU", menu="menuPini", interest="1", promptgroup="20 - Scan", prompt="Process at iocInit", body={}, ), }) apischema.deserialize(Database, apischema.serialize(db))
async def update(self) -> Optional[PluginResults]: """Call the plugin and get new information, storing it in results.""" self.results_json = (await util.run_script_with_json_output(self.script )) or {} self.files_to_monitor = self.results_json.get("files_to_monitor", {}) self.results = apischema.deserialize(PluginResults, self.results_json) if self.results: self.files_to_monitor = self.results.files_to_monitor return self.results
def id_from_global(cls: Type[Node_], global_id: GlobalId[Node_]) -> Id: if global_id.node_type != cls: raise ValueError( f"Expected {cls.__name__} global id," f" found {global_id.node_type.__name__} global id" ) id_type = getattr(cls, ID_TYPE_ATTR) # Use coercion to handle integer id return cast(Id, deserialize(id_type, global_id.id, coerce=True))
def test_parse(snl_program: SequencerProgram): # from ..format import FormatContext # ctx = FormatContext() # print(ctx.render_object(program, "console")) print(snl_program) serialized = apischema.serialize(snl_program) # pprint.pprint(serialized) apischema.deserialize(SequencerProgram, serialized) # deserialized = apischema.deserialize(SequencerProgram, serialized) # assert deserialized == snl_program round_trip = str(snl_program) print("Round-tripped to") print(round_trip) assert "(context=(" not in round_trip assert "(Tree=(" not in round_trip assert "(Token=(" not in round_trip
def test_serialize(cls): instance = try_to_instantiate(cls) serialized = apischema.serialize(instance) print(cls) print("Serialized:") print(serialized) deserialized = apischema.deserialize(cls, serialized) print("Deserialized:") print(deserialized) if cls not in SKIP_DESERIALIZATION: assert deserialized == instance
def test_dbd_cdef(version): db = Database.from_string("""\ recordtype(stringin) { %#include "test.h" %#include "test1.h" %#include "test2.h" } """, version=version) assert db.record_types["stringin"] == RecordType( context=(LoadContext("None", 1), ), name="stringin", cdefs=[ '#include "test.h"', '#include "test1.h"', '#include "test2.h"', ], fields={}, ) apischema.deserialize(Database, apischema.serialize(db))
def test_parse(substitution_file, expanded_file): sub = TemplateSubstitution.from_file(substitution_file) serialized = apischema.serialize(sub) pprint.pprint(serialized) apischema.deserialize(TemplateSubstitution, serialized) result = sub.expand_files() if expanded_file.exists(): with open(expanded_file, "rt") as fp: expected = fp.read() print("result") print("------") print(result) print("------") print("expected") print("------") print(expected) print("------") assert result.rstrip() == expected.rstrip()
def from_cache(self) -> Optional[IocMetadata]: if not settings.CACHE_PATH: return try: with open(self.cache_filename, "rb") as fp: return apischema.deserialize(type(self), json.load(fp)) except FileNotFoundError: ... except json.decoder.JSONDecodeError: # Truncated output file, perhaps ...
def test_tab_in_field(): db = Database.from_string("""\ record(ai, "rec:X") { field(A, "test\tvalue") } """, version=3) assert db.records["rec:X"] == RecordInstance( context=(LoadContext("None", 1), ), record_type="ai", name="rec:X", is_pva=False, fields={ "A": RecordField( context=(LoadContext("None", 2), ), name="A", dtype="", value="test\tvalue", ), }) apischema.deserialize(Database, apischema.serialize(db))
def test_lazy_dataclass_model(): assert deserialize(Lazy, {"a": 0}) == Lazy(0) assert serialize(Lazy(0)) == {"a": 0} assert deserialization_schema(Lazy) == { "$schema": "http://json-schema.org/draft/2019-09/schema#", "type": "object", "properties": { "a": { "type": "integer" } }, "required": ["a"], "additionalProperties": False, }
def test_alias_and_standalone_alias(): db = Database.from_string("""\ record(ai, "rec:X") { alias("rec:Y") field(A, "test") field(B, test) } alias("rec:X", "rec:Z") """, version=3) assert db.aliases["rec:Y"] == "rec:X" assert db.aliases["rec:Z"] == "rec:X" assert db.standalone_aliases["rec:Z"] == "rec:X" assert db.records["rec:X"] == RecordInstance( context=(LoadContext("None", 1), ), record_type="ai", name="rec:X", is_pva=False, aliases=["rec:Y", "rec:Z"], fields={ "A": RecordField( context=(LoadContext("None", 3), ), name="A", dtype="", value="test", ), "B": RecordField( context=(LoadContext("None", 4), ), name="B", dtype="", value="test", ), }) apischema.deserialize(Database, apischema.serialize(db))
def test_simple_dataclass_model(d_conv, s_conv, alias): assert deserialize(Data, {alias: 0}, conversion=d_conv) == Data(0) assert serialize(Data, Data(0), conversion=s_conv) == {alias: 0} assert (deserialization_schema(Data, conversion=d_conv) == serialization_schema(Data, conversion=s_conv) == { "$schema": "http://json-schema.org/draft/2019-09/schema#", "type": "object", "properties": { alias: { "type": "integer" } }, "required": [alias], "additionalProperties": False, })
def get_metadata(book_path: Path) -> Optional[Book]: content = get_raw_metadata(book_path) if not content: return metadata = content.get("metadata") from bd_bagarre.database import session publisher_line = metadata.get("dc:publisher") publisher = None if publisher_line: publisher = session.query(Publisher).filter_by( name=publisher_line).first() if not publisher: publisher = Publisher(name=publisher_line) session.add(publisher) session.flush() authors = [] lines = metadata.get("dc:creator", []) lines = lines if isinstance(lines, list) else [lines] for line in lines: author = session.query(Author).filter_by(name=line["#text"]).first() if not author: author = Author(name=line["#text"]) session.add(author) authors.append(author) book = deserialize( Book, dict( title=metadata.get("dc:title", ""), summary=metadata.get("dc:description", ""), cover_path=str(book_path / "cover.jpg"), ), ) session.add(book) session.flush() book.authors = authors book.publisher_obj = publisher book.language = get_language(metadata) book.tags = metadata.get("dc:subject", []) book.files = get_files(book_path, book) book.identifiers = get_identifiers(metadata) session.flush() return book
def check_serialization( obj, deserialize: bool = True, require_same_source: bool = True ): """ Round-trip a dataclass object with the serialization library. Requires apischema and APISCHEMA_SKIP to be False. Checks: * ``obj`` can be serialized to JSON * Serialized JSON can be deserialized back into an equivalent ``obj`` * Deserialized object has the same source code representation """ if obj is None or apischema is None or APISCHEMA_SKIP: return try: serialized = apischema.serialize( obj, exclude_defaults=True, no_copy=True, ) except Exception: print(json.dumps(dataclasses.asdict(obj), indent=2)) raise print(f"Serialized {type(obj)} to:") print(json.dumps(serialized, indent=2)) print() if not deserialize: return serialized, None deserialized = apischema.deserialize(type(obj), serialized, no_copy=True) print(f"Deserialized {type(obj)} back to:") print(repr(deserialized)) print("Or:") print(deserialized) if require_same_source: assert str(obj) == str(deserialized), \ "Deserialized object does not produce identical source code" return serialized, deserialized
def test_flattened_converted(): data2 = deserialize(Data2, {"attr": 0}) assert isinstance(data2.data_field2, Field2) and data2.data_field2.attr == 0 assert serialize(Data2, data2) == {"attr": 0} assert (deserialization_schema(Data) == serialization_schema(Data) == { "$schema": "http://json-schema.org/draft/2019-09/schema#", "type": "object", "allOf": [ { "type": "object", "additionalProperties": False }, { "type": "object", "properties": { "attr": { "type": "integer" } }, "required": ["attr"], "additionalProperties": False, }, ], "unevaluatedProperties": False, }) schema = graphql_schema(query=[get_data2]) assert graphql_sync(schema, "{getData2{attr}}").data == { "getData2": { "attr": 0 } } assert (print_schema(schema) == """\ type Query { getData2: Data2! } type Data2 { attr: Int! } """)
def main(): global BACKGROUND_PICTURE_FOLDER logging.basicConfig(level=logging.WARNING) parser = ArgumentParser(description="Generate cards from info") parser.add_argument( "folder", type=Path, help= "folder containing the background pictures and the JSON units.json", ) parser.add_argument("-rotate", action="store_true", default=False, help="Rotate the card by 90°") parser.add_argument( "-no-bleeding-margins", action="store_true", default=False, help="Do not add bleeding margin around the cards", ) args = parser.parse_args() BACKGROUND_PICTURE_FOLDER = args.folder (BACKGROUND_PICTURE_FOLDER / "generated").mkdir(exist_ok=True) json_file = args.folder / "units.json" unit_list = json.load(json_file.open(encoding="utf-8")) deserialized_units = [] for unit in unit_list: try: deserialized_units.append(deserialize(Unit, unit)) except ValidationError as e: logging.error("Error on %s", unit) raise e for unit in deserialized_units: generate_card(unit, args.rotate, not args.no_bleeding_margins)
from pytest import raises from apischema import ValidationError, deserialize, schema Tag = NewType("Tag", str) schema(min_len=3, pattern=r"^\w*$", examples=["available", "EMEA"])(Tag) @dataclass class Resource: id: int tags: list[Tag] = field( default_factory=list, metadata=schema( description="regroup multiple resources", max_items=3, unique=True ), ) with raises(ValidationError) as err: # pytest check exception is raised deserialize( Resource, {"id": 42, "tags": ["tag", "duplicate", "duplicate", "bad&", "_"]} ) assert err.value.errors == [ {"loc": ["tags"], "msg": "item count greater than 3 (maxItems)"}, {"loc": ["tags"], "msg": "duplicate items (uniqueItems)"}, {"loc": ["tags", 3], "msg": "not matching '^\\w*$' (pattern)"}, {"loc": ["tags", 4], "msg": "string length lower than 3 (minLength)"}, ]