def read(cls, path: Path) -> "GeneratorConfig": ctx = XmlContext( element_name_generator=text.pascal_case, attribute_name_generator=text.camel_case, ) config = ParserConfig(fail_on_unknown_properties=False) parser = XmlParser(context=ctx, config=config) return parser.from_path(path, cls)
def __init__(self, *, config: Configuration) -> None: self.config = config self.serializer = XmlSerializer(config=SerializerConfig( pretty_print=False)) self.parser = XmlParser(context=XmlContext()) self.requests: WeakValueDictionary[str, Future] = WeakValueDictionary({}) super().__init__()
def serialize_xml_from_file(xml_file_path: pathlib.Path, serialize_clazz: Optional[Type[T]]): """ Method to serialize XML data from a file. :param xml_file_path: A pathlib.path path object that leads to the targeted XML file. :param serialize_clazz: A class Object. :return: the serialized object. """ parser = XmlParser(context=XmlContext()) return parser.from_path(xml_file_path, serialize_clazz)
def assert_bindings( schema: str, instance: str, class_name: str, version: str, mode: str, save_output: bool, output_format: str, structure_style: str, ): __tracebackhide__ = True if mode == "xml": instance_path = Path(instance) pck_arr = list(map(text.snake_case, instance_path.parts)) package = f"output.xml_models.{'.'.join(pck_arr)}" instance_path = w3c.joinpath(instance_path) source = str(instance_path) else: schema_path = Path(schema) pck_arr = list(map(text.snake_case, schema_path.parts)) package = f"output.models.{'.'.join(pck_arr)}" schema_path = w3c.joinpath(schema) source = str(schema_path) clazz = generate_models(source, package, class_name, output_format, structure_style) if mode == "build": return if isinstance(clazz, Exception): raise clazz try: instance_path = w3c.joinpath(instance) schema_path = w3c.joinpath(schema) context = XmlContext(class_type=output_format) parser = XmlParser(context=context) obj = parser.from_path(instance_path, clazz) except Exception as e: raise e save_path = None if save_output: save_path = output.joinpath(instance) save_path.parent.mkdir(parents=True, exist_ok=True) if mode == "json": assert_json_bindings(context, obj, save_path) else: assert_xml_bindings( context, obj, parser.ns_map, schema_path, instance_path, save_path, version )
def fetch_test_cases() -> Iterator[TestCase]: suite = XmlParser().from_path(w3c.joinpath("suite.xml"), TestSuite) parser = XmlParser() for test_set_ref in suite.test_set_ref: test_set_ref_path = w3c.joinpath(test_set_ref.href) path = test_set_ref_path.parent test_set = parser.from_path(test_set_ref_path, TestSet) for test_group in reversed(test_set.test_group): for test_case in make_test_cases(path, test_group): yield test_case
def validate_bindings(schema: Path, clazz: Type, output_format: str): __tracebackhide__ = True chapter = schema.stem.replace("chapter", "") sample = here.joinpath(f"samples/chapter{chapter}.xml") output_xml = here.joinpath(f"output/chapter{chapter}.xsdata.xml") output_json = here.joinpath(f"output/chapter{chapter}.xsdata.json") context = XmlContext(class_type=output_format) obj = XmlParser(context=context).from_path(sample, clazz) config = SerializerConfig(pretty_print=True) actual_json = JsonSerializer(context=context, config=config).render(obj) actual_xml = XmlSerializer(context=context, config=config).render(obj) if output_json.exists() and chapter != "13": assert output_json.read_text() == actual_json assert obj == JsonParser(context=context).from_string( actual_json, clazz) else: output_json.write_text(actual_json, encoding="utf-8") if output_xml.exists(): assert output_xml.read_text() == actual_xml else: output_xml.write_text(actual_xml, encoding="utf-8") validator = etree.XMLSchema(etree.parse(str(schema))) validator.assertValid(etree.fromstring(actual_xml.encode()))
def fetch(): now = datetime.datetime.now() today = now.strftime("%d.%m.%Y") url = f"https://bnm.md/en/official_exchange_rates?get_xml=1&date={today}" response = requests.get(url) response.raise_for_status() return XmlParser().from_string(response.text, Currencies)
def deserialize(resource: Union[str, Path, bytes], target_class: Optional[Type[T]] = None) -> Optional[T]: parser = XmlParser(context=XmlContext()) obj = None if os.path.isfile(resource): resource = Path(resource) if isinstance(resource, str): obj = parser.from_string(resource, target_class) if isinstance(resource, Path): obj = parser.from_path(resource, target_class) if isinstance(resource, bytes): obj = parser.from_bytes(resource, target_class) if obj and hasattr(obj, 'complemento') and obj.complemento: complementos = __deserialize_complementos(obj) setattr(obj.complemento, 'any_element', complementos) return obj
class TraktorNmlMixin(ABC): parser = XmlParser() def __init__(self, path, nml): self.path = path self.nml = nml def save(self): with self.path.open(mode="w") as file_obj: serialized = XmlSerializer(pretty_print=True).render(self.nml) serialized = serialized.split("\n") serialized = "\n".join(line.lstrip() for line in serialized) file_obj.write(serialized)
def test_xml_documents(): filepath = fixtures_dir.joinpath("artists") package = "tests.fixtures.artists" runner = CliRunner() result = runner.invoke(cli, [str(filepath), "--package", package]) if result.exception: raise result.exception clazz = load_class(result.output, "Metadata") parser = XmlParser() serializer = XmlSerializer(writer=XmlEventWriter) serializer.config.pretty_print = True serializer.config.xml_declaration = False ns_map = {None: "http://musicbrainz.org/ns/mmd-2.0#"} for i in range(1, 4): ap = filepath.joinpath(f"art00{i}.xml") obj = parser.from_path(ap, clazz) actual = serializer.render(obj, ns_map) assert ap.read_bytes().splitlines() == actual.encode().splitlines()
def validate_bindings(schema: Path, clazz: Type): __tracebackhide__ = True obj = XmlParser().from_path(schema.with_suffix(".xml"), clazz) actual = JsonSerializer(indent=4).render(obj) expected = schema.with_suffix(".json") if expected.exists(): assert expected.read_text() == actual assert obj == JsonParser().from_string(actual, clazz) else: expected.write_text(actual) xml = XmlSerializer(pretty_print=True).render(obj) validator = etree.XMLSchema(etree.parse(str(schema))) assert validator.validate(etree.fromstring( xml.encode())), validator.error_log expected.with_suffix(".xsdata.xml").write_text(xml)
def gen_bindings(source_dir: Path, out_dir: Path, ns_prefix="sdformat"): # copy the special type definitions, but add the namespace declarations ElementTree.register_namespace("xs", xs) types_file = source_dir / "schema" / "types.xsd" if not types_file.exists(): types_file = Path(__file__).parent / "fallback_types.xsd" types_xsd = ElementTree.parse(types_file) types_xsd.getroot().attrib.update({ "xmlns": f"{ns_prefix}/types.xsd", "targetNamespace": f"{ns_prefix}/types.xsd" }) types_string = ElementTree.tostring(types_xsd.getroot()) types_xsd = etree.fromstring(types_string) for el in types_xsd.findall(".//xs:restriction", namespaces={"xs": xs}): prefix, attrib_type = el.attrib["base"].split(":") el.attrib["base"] = "xs:" + attrib_type with open(out_dir / ("types.xsd"), "wb") as out_file: etree.ElementTree(types_xsd).write(out_file) # use a fixed set of namespaces that matches the menu structure of the SDF # spec. This leads to nicer bindings the schema location full_filename_map = { "root.sdf": "sdf.xsd", "world.sdf": "world.xsd", "scene.sdf": "scene.xsd", "state.sdf": "state.xsd", "physics.sdf": "physics.xsd", "light.sdf": "light.xsd", "actor.sdf": "actor.xsd", "model.sdf": "model.xsd", "link.sdf": "link.xsd", "sensor.sdf": "sensor.xsd", "joint.sdf": "joint.xsd", "collision.sdf": "collision.xsd", "visual.sdf": "visual.xsd", "material.sdf": "material.xsd", "geometry.sdf": "geometry.xsd", } filename_map = { key: value for key, value in full_filename_map.items() if (source_dir / key).exists() } namespaces = { "types": f"{ns_prefix}/types.xsd", "xs": "http://www.w3.org/2001/XMLSchema", "sdf": f"{ns_prefix}/sdf.xsd", "world": f"{ns_prefix}/world.xsd", "scene": f"{ns_prefix}/scene.xsd", "state": f"{ns_prefix}/state.xsd", "physics": f"{ns_prefix}/physics.xsd", "light": f"{ns_prefix}/light.xsd", "actor": f"{ns_prefix}/actor.xsd", "model": f"{ns_prefix}/model.xsd", "link": f"{ns_prefix}/link.xsd", "sensor": f"{ns_prefix}/sensor.xsd", "joint": f"{ns_prefix}/joint.xsd", "collision": f"{ns_prefix}/collision.xsd", "visual": f"{ns_prefix}/visual.xsd", "material": f"{ns_prefix}/material.xsd", "geometry": f"{ns_prefix}/geometry.xsd", } xml_ctx = XmlContext() sdf_parser = XmlParser(context=xml_ctx) def _parse_sane(in_file: Path) -> Element: root: etree.Element = etree.parse(str(in_file)).getroot() for description in root.findall(".//description"): if description.text is None: description.text = "" text: str = description.text for child in [x for x in description]: text += etree.tostring(child).decode("UTF-8") description.remove(child) text = text.replace("<", "<").replace(">", ">") if text == "": description.text = None else: description.text = text xml_string = etree.tostring(root).decode("UTF-8") return sdf_parser.from_string(xml_string, Element) for in_file, out_file in filename_map.items(): sdf_root: Element = _parse_sane(source_dir / in_file) queue = [sdf_root] while queue: el = queue.pop(0) for include in el.include: included_el = _parse_sane(source_dir / include.filename) if include.description: included_el.description = include.description if include.filename in filename_map.keys(): # this element will be converted in it's own file # nullify children and set appropriate namespace ref included_el.maxOccurs = include.maxOccurs included_el.minOccurs = included_el.minOccurs included_el.type = included_el.name + ":" + included_el.name included_el.element = list() included_el.include = list() included_el.attribute = list() el.element.append(included_el) queue.extend(el.element) local_ns = namespaces.copy() local_ns[None] = local_ns.pop(sdf_root.name) xsd_root = etree.Element(f"{{{xs}}}schema", nsmap=local_ns) xsd_root.set("targetNamespace", local_ns[None]) xsd_root.set("version", "1.1") # convert the full element and then pop the outer layer # as we are only interested in the nested complex type xsd_element = sdf_root.to_xsd() xsd_type: etree.Element = xsd_element.find(f"./{{{xs}}}complexType") name = xsd_element.attrib["name"] xsd_type.set("name", name) xsd_root.append(xsd_type) # convert ref to typedef # potentially pulling out a type when needed for ref_el in xsd_type.findall(f".//*[@ref]"): ref_name = ref_el.attrib["ref"] if ref_name == name: ref_el.attrib.pop("ref") ref_el.set("type", name) else: for candidate in xsd_type.findall( f".//{{{xs}}}element[@name='{ref_name}']"): all_children = candidate.findall(".//*") if not ref_el in all_children: continue # promote nested type to schema level referred_type = candidate.find(f"./{{{xs}}}complexType") referred_type.set("name", ref_name) xsd_root.append(referred_type) candidate.set("type", ref_name) ref_el.attrib.pop("ref") ref_el.set("type", ref_name) break else: raise RuntimeError("Could not find referred element.") keep_ns = list() for type_el in xsd_root.findall(".//*[@type]"): element_type = type_el.attrib["type"] if ":" in element_type: prefix, _ = type_el.attrib["type"].split(":") keep_ns.append(prefix) for type_el in xsd_root.findall(".//*[@base]"): element_type = type_el.attrib["base"] if ":" in element_type: prefix, _ = type_el.attrib["base"].split(":") keep_ns.append(prefix) etree.cleanup_namespaces(xsd_root, keep_ns_prefixes=list(set(keep_ns))) used_ns = xsd_root.nsmap used_ns.pop("xs") for _, uri in used_ns.items(): import_el = etree.Element(f"{{{xs}}}import") import_el.set("namespace", uri) file_name = Path(uri).stem + ".xsd" import_el.set("schemaLocation", f"./{file_name}") xsd_root.insert(0, import_el) if not out_dir.exists(): out_dir.mkdir(exist_ok=True, parents=True) # write type file with open(out_dir / out_file, "wb") as out: etree.ElementTree(xsd_root).write(out, pretty_print=True)
def parse(source, handler): parser = XmlParser(context=context, handler=handler) parser.from_bytes(source, Books)
import requests import traceback from xsdata.formats.dataclass.parsers import XmlParser from xsdata.formats.dataclass.parsers.config import ParserConfig config = ParserConfig(fail_on_unknown_properties=True) parser = XmlParser(config=config) from lxml import etree from lxml import objectify xmlschema_doc = etree.parse('btlx_11.xsd') xmlschema = etree.XMLSchema(xmlschema_doc) from btlx.btlx_11 import Btlx fname = 'btlx_filelist.txt' with open(fname, 'r') as fd: for url in fd.readlines(): try: if url.startswith('#'): continue url = url.strip() r = requests.get(url, allow_redirects=True) # Upgrade to BTLx 1.1 btlx_str = r.text.replace('Version="1.0"', 'Version="1.1"') # Validate BTLx against XSD
from pathlib import Path from tests.fixtures.primer import PurchaseOrder from xsdata.formats.dataclass.parsers import XmlParser parser = XmlParser() filepath = Path("tests/fixtures/primer/order.xml") order = parser.from_path(filepath, PurchaseOrder) order.bill_to
def __init__(self, str_xml): Document.__init__(self) parser = XmlParser() self.score = parser.from_string(str_xml, ScorePartwise) self.tempo = _find_tempo(self.score)
def xml_parser(xml_context): return XmlParser(context=xml_context)
def open_model(self, filename: Path): parser = XmlParser(context=XmlContext()) stm = parser.from_path(filename, Model) self.modelOpened.emit(stm)
<CharCode>EUR</CharCode> <Nominal>1</Nominal> <Name>Euro</Name> <Value>19.2743</Value> </Valute> <Valute ID="44"> <NumCode>840</NumCode> <CharCode>USD</CharCode> <Nominal>1</Nominal> <Name>US Dollar</Name> <Value>17.7177</Value> </Valute> </ValCurs> """ result = XmlParser().from_string(xml, Currencies) assert result == Currencies( date="19.04.2020", name="Official exchange rate", values=[ Currency( id=47, name="Euro", num_code=978, iso_code="EUR", nominal=1, value=Decimal("19.2743"), ), Currency( id=44, name="US Dollar",
def loads( sdf: str, *, version: str = None, custom_constructor: Dict[Type[T], Callable] = None, handler: str = None, ): """Convert an XML string into a sdformat.models tree. Parameters ---------- sdf : str The SDFormat XML to be parsed. version : str The SDFormat version to use while parsing. If None (default) it will automatically determine the version from the <sdf> element. If specified the given version will be used instead. custom_constructor : Dict[Type[T], Callable] Overwrite the default constructor for a certain model class with callable. This is useful for doing pre- or post-initialization of bound classes or to replace them entirely. handler : str The handler that the parser should use when traversing the XML. If unspecified the default xsData parser will be used (lxml if it is installed, otherwise xml.etree). Possible values are: "XmlEventHandler" A xml.etree event-based handler. "LxmlEventHandler" A lxml.etree event-based handler. Returns ------- SdfRoot : object An instance of ``skbot.ignition.models.vXX.Sdf`` where XX corresponds to the version of the SDFormat XML. Notes ----- ``custom_constructure`` is currently disabled and has no effect. It will become available with xsData v21.8. Examples -------- .. minigallery:: skbot.ignition.sdformat.loads """ if custom_constructor is None: custom_constructor = dict() def custom_class_factory(clazz, params): if clazz in custom_constructor: return custom_constructor[clazz](**params) return clazz(**params) if version is None: version = get_version(sdf) if handler in ["XmlSaxHandler", "LxmlSaxHandler"]: warnings.warn( "SAX handlers have been deprecated in xsData >= 21.9;" " falling back to EventHandler. If you need the SAX handler, please open an issue." " To make this warning dissapear change `handler` to the corresponding EventHandler.", DeprecationWarning, ) if handler == "XmlSaxHandler": handler = "XmlEventHandler" elif handler == "LxmlSaxHandler": handler = "LxmlEventHandler" handler_class = { None: handlers.default_handler(), "XmlEventHandler": handlers.XmlEventHandler, "LxmlEventHandler": handlers.LxmlEventHandler, }[handler] binding_location = _parser_roots[version] bindings = importlib.import_module(binding_location, __name__) sdf_parser = XmlParser( ParserConfig(class_factory=custom_class_factory), context=xml_ctx, handler=handler_class, ) try: root_el = sdf_parser.from_string(sdf, bindings.Sdf) except XSDataParserError as e: raise ParseError("Invalid SDFormat XML.") from e return root_el
from dataclasses import dataclass, field from datetime import datetime from typing import Any from xsdata.formats.converter import Converter, converter from xsdata.formats.dataclass.parsers import XmlParser @dataclass class Root: updated_at: datetime = field(metadata={"type": "Attribute"}) class DatetimeConverter(Converter): def deserialize(self, value: str, **kwargs: Any) -> datetime: return datetime.fromisoformat(value) def serialize(self, value: datetime, **kwargs: Any) -> str: return value.isoformat(sep=" ") converter.register_converter(datetime, DatetimeConverter()) root = XmlParser().from_string('<root updated_at="2011-11-04T00:05:23">', Root) assert root == Root(updated_at=datetime(2011, 11, 4, 0, 5, 23))
def timing(description: str) -> Any: start = time.time() yield ellapsed_time = time.time() - start print(f"{description}: {ellapsed_time}") with timing("importing module"): from netex.models import * sample = str(Path(__file__).parent.joinpath("NeTEx_HTM__2020-10-12.tar.xz")) with timing("decompress sample"): subprocess.run(["tar", "-xf", sample, "-C", "/tmp"]) sample = "/tmp/NeTEx_HTM__2020-10-12.xml" context = XmlContext() config = ParserConfig(fail_on_unknown_properties=False) with timing("XmlContext warmup"): context.build_recursive(PublicationDelivery) print(f"Context cache size: {len(context.cache)}") parser = XmlParser(context=context, config=config, handler=LxmlEventHandler) with timing("Parse[LxmlEventHandler]"): parser.parse(sample, PublicationDelivery) parser = XmlParser(context=context, config=config, handler=XmlEventHandler) with timing("Parse[EventHandler]"): parser.parse(sample, PublicationDelivery)
def CreateFromDocument(document): parser = XmlParser(config=ParserConfig(fail_on_unknown_properties=True)) if isinstance(document, str): return parser.from_string(document, ismrmrdHeader) return parser.from_bytes(document, ismrmrdHeader)
here = Path(__file__).parent fixtures = here.joinpath("fixtures") is_travis = "TRAVIS" in os.environ @dataclass class Documentation: title: str skip_message: str source: str target: str xml_parser = XmlParser() json_parser = JsonParser() xml_serializer = XmlSerializer(pretty_print=True) json_serializer = JsonSerializer(indent=4) xmls = sorted([ xsd for xsd in fixtures.glob("defxmlschema/*/chapter*.xml") if not str(xsd).endswith("xsdata.xml") ]) total = 0 skipped = 0 @pytest.mark.parametrize("fixture", xmls, ids=lambda x: x.name) def test_binding(fixture: Path):
def open_template(self, filename: Path): parser = XmlParser(context=XmlContext()) stt = parser.from_path(filename, Template) self.templateOpened.emit(stt)
def __init_parser(self): """ Inits the internal parser. """ config = ParserConfig(fail_on_unknown_properties=True) self.parser = XmlParser(config=config)
import sys from pathlib import Path from xsdata.formats.dataclass.parsers import XmlParser from xsdata.formats.dataclass.serializers import XmlSerializer from xsdata.formats.dataclass.serializers.config import SerializerConfig from reqif.models import ReqIf here = Path(__file__).parent xml_fixture = here.joinpath("sample.xml") parser = XmlParser() config = SerializerConfig(pretty_print=True, encoding="ascii") serializer = XmlSerializer(context=parser.context, config=config) obj = parser.from_path(xml_fixture, ReqIf) ns_map = { None: "http://www.omg.org/spec/ReqIF/20110401/reqif.xsd", "xhtml": "http://www.w3.org/1999/xhtml", } serializer.write(sys.stdout, obj, ns_map=ns_map)
class Application(SIPApplication): def __init__(self, *, config: Configuration) -> None: self.config = config self.serializer = XmlSerializer(config=SerializerConfig( pretty_print=False)) self.parser = XmlParser(context=XmlContext()) self.requests: WeakValueDictionary[str, Future] = WeakValueDictionary({}) super().__init__() def start(self): notification_center = NotificationCenter() notification_center.add_observer(self, name='SIPEngineGotMessage') notification_center.add_observer(self, name='SIPMessageDidSucceed') notification_center.add_observer(self, name='SIPMessageDidFail') notification_center.add_observer(self, name='SIPApplicationDidStart') super().start(MemoryStorage()) async def send_request(self, arc_name: str, scaip_request: ScaipRequest): logger.info(f"send_request to {arc_name}: {scaip_request}") config = self.config arc_config = config.get_arc_config(arc_name) if not arc_config: raise ValueError(f"no configuration found for ARC {arc_name}") xml_model = scaip_request.to_xml_model() xml_str = self.serializer.render(xml_model) result = self.new_result_future(scaip_request.reference) if scaip_request.caller_id.startswith( "sip") and scaip_request.caller_id != "sip:": caller_id = URI(scaip_request.caller_id) sender = SIPURI(user=caller_id.user, host=caller_id.host, port=caller_id.port) else: sender = self.get_user_agent_uri() receiver = SIPURI(user=arc_config.username, host=arc_config.hostname, port=arc_config.port) message = Message(FromHeader(sender), ToHeader(receiver), RouteHeader(receiver), 'application/scaip+xml', xml_str) message.send() logger.info(f"sent message: {xml_str}") scaip_response = await result logger.info(f"received response: {scaip_response}") return scaip_response def new_result_future(self, reference: str) -> Future: loop = asyncio.get_running_loop() result = loop.create_future() self.requests[reference] = result return result def get_user_agent_uri(self) -> SIPURI: sip_config = self.config.sip return SIPURI(user=sip_config.username, host=sip_config.hostname, port=sip_config.port) def _NH_SIPApplicationDidStart(self, notification): logger.info("SIPApplicationDidStart") def _NH_SIPMessageDidSucceed(self, notification): logger.info("SIPMessageDidSucceed") def _NH_SIPMessageDidFail(self, notification): logger.info("SIPMessageDidFail") message = notification.sender xml_model = self.parser.from_bytes(message.body, Mrq) result = self.requests.get(xml_model.ref, None) if result: # TODO: return proper error result.set_exception( HTTPException(status_code=500, detail="SIPMessageDidFail")) def _NH_SIPEngineGotMessage(self, notification): logger.info("SIPEngineGotMessage") logger.info(f"got XML: {notification.data.body}") xml_model = self.parser.from_bytes(notification.data.body, Mrs) scaip_response = ScaipResponse.from_xml_model(xml_model) result = self.requests.get(xml_model.ref, None) if result: result.set_result(scaip_response)