def loader_test(self, filename: str, model: Type[YAMLRoot], loader) -> None: """ Test the various permutations of the supplied loader using the input file 'filename' -- both load and loads :param filename: un-pathed file name to load :param model: model to load the file name into :param loader: package that contains 'load' and 'loads' operations """ metadata = FileInfo() name, typ = filename.rsplit('.', 1) expected_yaml = self.env.expected_path(name + '_' + typ + ".yaml") python_obj: YAMLRoot = loader.load(filename, self.env.indir, model, metadata) self.env.eval_single_file(expected_yaml, yaml_dumper.dumps(python_obj)) # Make sure metadata gets filled out properly rel_path = os.path.abspath(os.path.join(test_base.env.cwd, '..')) self.assertEqual('tests/test_loaders_dumpers/input', os.path.relpath(metadata.base_path, rel_path)) self.assertEqual(f'tests/test_loaders_dumpers/input/{filename}', os.path.relpath(metadata.source_file, rel_path)) fileinfo = FileInfo() hbread(filename, fileinfo, self.env.indir) self.assertEqual(fileinfo, metadata) # Load from a string expected = hbread(filename, base_path=self.env.indir) python_obj: YAMLRoot = loader.loads(expected, model, metadata.clear()) self.env.eval_single_file(expected_yaml, yaml_dumper.dumps(python_obj))
def load_source( source: Union[str, dict, TextIO], loader: Callable[[Union[str, Dict], FileInfo], Optional[Dict]], target_class: Type[YAMLRoot], accept_header: Optional[str] = "text/plain, application/yaml;q=0.9", metadata: Optional[FileInfo] = None) -> Optional[YAMLRoot]: """ Base loader - convert a file, url, string, open file handle or dictionary into an instance of target_class :param source: URL, file name, block of text, Existing Object or open file handle :param loader: Take a stringified image or a dictionary and return a loadable dictionary :param target_class: Destination class :param accept_header: Accept header to use if doing a request :param metadata: Metadata about the source. Filled in as we go along :return: Instance of the target class if loader worked """ # Makes coding easier down the line if we've got this, even if it is strictly internal if metadata is None: metadata = FileInfo() if not isinstance(source, dict): data = hbread(source, metadata, metadata.base_path, accept_header) else: data = source data_as_dict = loader(data, metadata) return target_class(**data_as_dict) if data_as_dict is not None else None
def as_rdf_graph(element: YAMLRoot, contexts: CONTEXTS_PARAM_TYPE, namespaces: CONTEXT_TYPE = None) -> Graph: """ Convert element into an RDF graph guided by the context(s) in contexts :param element: element to represent in RDF :param contexts: JSON-LD context(s) in the form of: * file name * URL * JSON String * dict * JSON Object * A list containing elements of any type named above :param namespaces: A file name, URL, JSON String, dict or JSON object that includes the set of namespaces to be bound to the return graph. If absent, contexts get used :return: rdflib Graph containing element """ if isinstance(contexts, list): inp_contexts = [json.loads(hbread(c)) for c in contexts] else: inp_contexts = json.loads(hbread(contexts)) rdf_jsonld = expand(json_dumper.dumps(element), options=dict(expandContext=inp_contexts)) g = rdflib_graph_from_pyld_jsonld(rdf_jsonld) if namespaces is not None: ns_source = json.loads(hbread(namespaces)) else: ns_source = inp_contexts # TODO: make a utility out of this or add it to prefixcommons if ns_source and '@context' in ns_source: ns_contexts = ns_source['@context'] if isinstance(ns_contexts, dict): ns_contexts = [ns_contexts] for ns_context in ns_contexts: if isinstance(ns_context, dict): for pfx, ns in ns_context.items(): if isinstance(ns, dict): if '@id' in ns and ns.get('@prefix', False): ns = ns['@id'] else: continue if not pfx.startswith('@'): g.bind(pfx, ns) return g