def test_rdf_shex(self): """ Generate ShEx and RDF for the model and verify that the RDF represents a valid instance """ test_dir = os.path.join(self.tmpdir_path, 'meta_conformance_test') make_and_clear_directory(self.tmpdir_path) make_and_clear_directory(test_dir) json_file = os.path.join(test_dir, 'meta.jsonld') json_str = JSONLDGenerator(source_yaml_path, importmap=BIOLINK_IMPORT_MAP).serialize() with open(json_file, 'w') as f: f.write(json_str) context_file = os.path.join(test_dir, 'metacontext.jsonld') ContextGenerator( source_yaml_path, importmap=BIOLINK_IMPORT_MAP).serialize(output=context_file) self.assertTrue(os.path.exists(context_file)) rdf_file = os.path.join(test_dir, 'meta.ttl') RDFGenerator(source_yaml_path, importmap=BIOLINK_IMPORT_MAP).serialize( output=rdf_file, context=context_file) self.assertTrue(os.path.exists(rdf_file)) shex_file = os.path.join(test_dir, 'meta.shex') ShExGenerator(source_yaml_path, importmap=BIOLINK_IMPORT_MAP).serialize( output=shex_file, collections=False) self.assertTrue(os.path.exists(shex_file)) if DO_SHEX_VALIDATION: g = Graph() g.load(rdf_file, format='ttl') focus = METAMODEL_NAMESPACE.metamodel start = METAMODEL_NAMESPACE.SchemaDefinition results = ShExEvaluator(g, shex_file, focus, start).evaluate(debug=False) success = all(r.result for r in results) if not success: for r in results: if not r.result: print(r.reason) else: make_and_clear_directory(test_dir) self.assertTrue(success) else: print( "*** ShEX validation step was skipped. Set: tests.__init__.DO_SHEX_VALIDATION to run it" )
def test_rdf_shex(self): """ Generate ShEx and RDF for the model and verify that the RDF represents a valid instance """ test_dir = self.temp_file_path('meta_conformance_test', is_dir=True) json_file = os.path.join(test_dir, 'meta.jsonld') json_str = JSONLDGenerator(env.meta_yaml, importmap=env.import_map).serialize() with open(json_file, 'w') as f: f.write(json_str) context_file = os.path.join(test_dir, 'metacontext.jsonld') ContextGenerator( env.meta_yaml, importmap=env.import_map).serialize(output=context_file) self.assertTrue(os.path.exists(context_file)) rdf_file = os.path.join(test_dir, 'meta.ttl') RDFGenerator(env.meta_yaml, importmap=env.import_map).serialize(output=rdf_file, context=context_file) self.assertTrue(os.path.exists(rdf_file)) shex_file = os.path.join(test_dir, 'meta.shex') shexgen.ShExGenerator(env.meta_yaml, importmap=env.import_map).serialize( output=shex_file, collections=False) self.assertTrue(os.path.exists(shex_file)) if SKIP_SHEX_VALIDATION: print( f"tests/test_scripts/test_gen_shex.py: {SKIP_SHEX_VALIDATION_REASON}" ) else: g = Graph() g.load(rdf_file, format='ttl') focus = METAMODEL_NAMESPACE.metamodel start = METAMODEL_NAMESPACE.SchemaDefinition results = ShExEvaluator(g, shex_file, focus, start).evaluate(debug=False) success = all(r.result for r in results) if not success: for r in results: if not r.result: print(r.reason) else: make_and_clear_directory(test_dir) self.assertTrue(success)
def test_meta_output(self): """ Generate a context AND a jsonld for the metamodel and make sure it parses as RDF """ tmp_jsonld_path = self.temp_file_path('metajson.jsonld') tmp_rdf_path = self.temp_file_path('metardf.ttl') tmp_meta_context_path = self.temp_file_path('metacontext.jsonld') # Generate an image of the metamodel gen = ContextGenerator(env.meta_yaml, importmap=env.import_map) base = gen.schema.id if base[-1] not in '/#': base += '/' base += gen.schema.name # Generate context with open(tmp_meta_context_path, 'w') as tfile: tfile.write(gen.serialize()) # Generate JSON with open(tmp_jsonld_path, 'w') as tfile: tfile.write( jsonldgen.JSONLDGenerator( env.meta_yaml, fmt=jsonldgen.JSONLDGenerator.valid_formats[0], importmap=env.import_map).serialize( context=tmp_meta_context_path)) # Convert JSON to TTL g = Graph() g.load(tmp_jsonld_path, format="json-ld") g.serialize(tmp_rdf_path, format="ttl") g.bind('meta', METAMODEL_NAMESPACE) new_ttl = g.serialize(format="turtle").decode() # Make sure that the generated TTL matches the JSON-LD (probably not really needed, as this is more of a test # of rdflib than our tooling but it doesn't hurt new_g = Graph() new_g.parse(data=new_ttl, format="turtle") # Make sure that both match the expected size (classes, slots, types, and model name for error reporting) self.check_size(g, new_g, URIRef(base), 15, 112, 13, "meta")
def test_issue_80(self): """ Make sure that types are generated as part of the output """ yaml_fname = os.path.join(sourcedir, 'issue_80.yaml') python = PythonGenerator(yaml_fname).serialize() print(self.header("Python")) print(python) spec = compile(python, 'test', 'exec') module = ModuleType('test') exec(spec, module.__dict__) example = module.Person("http://example.org/person/17", "Fred Jones", 43) # JSON Representation print(self.header("JSON")) print(as_json(example)) # Generate a context for this particular model print(self.header("Context")) context = ContextGenerator(yaml_fname).serialize() print(context) # RDF Representation print(self.header("RDF")) print(as_rdf(example, contexts=context).serialize(format="turtle").decode())
description: A person known by this person (indicating some level of reciprocated interaction between the parties). range: person slot_uri: foaf:knows multivalued: true """ python_src = PythonGenerator(yaml).serialize() print(python_src) spec = compile(PythonGenerator(yaml).serialize(), 'test', 'exec') module = ModuleType('test') exec(spec, module.__dict__) print(f'<img src="{YumlGenerator(yaml).serialize()}"/>') print(f'\n-----\n{YumlGenerator(yaml).serialize()}\n') cntxt = loads( ContextGenerator(yaml).serialize(base="http://example.org/context/")) print(as_json(cntxt)) shex = ShExGenerator(yaml).serialize(collections=False) print(shex) # Generate a person joe_smith = module.Person(id="42", last_name="smith", first_name=['Joe', 'Bob'], age=43) print(joe_smith) # Add the context and turn it into RDF jsonld = as_json(yaml_to_json(joe_smith, cntxt)) print(jsonld)
def test_mappings_rdf(self): """ Test the imported mappings in the biolink metamodel """ test_dir = self.env.temp_file_path('mappings_rdf_test', is_dir=True) # Create the mappings json file json_file = os.path.join(test_dir, 'mappings.jsonld') json_str = JSONLDGenerator(env.meta_yaml, importmap=env.import_map).serialize() with open(json_file, 'w') as f: f.write(json_str) # Create the mappings context file context_file = os.path.join(test_dir, 'mappings.context.jsonld') ContextGenerator( env.meta_yaml, importmap=env.import_map).serialize(output=context_file) self.assertTrue(os.path.exists(context_file)) # Generate context and use it to create the RDF self.single_file_generator('context.jsonld', ContextGenerator, filtr=ldcontext_metadata_filter, subdir='includes') # Generate a copy of the JSON representation of the model context_loc = json_file context_args = { "context": [ 'file://' + LOCAL_METAMODEL_LDCONTEXT_FILE, 'file://' + context_loc ] } msg += self.single_file_generator('json', JSONLDGenerator, serialize_args=context_args, filtr=json_metadata_context_filter, fail_if_expected_missing=False) # Make a fresh copy of the RDF and validate it as well msg += self.single_file_generator( 'ttl', RDFGenerator, serialize_args=context_args, comparator=GeneratorTestCase.rdf_comparator, fail_if_expected_missing=False) if msg: self.fail(msg) g = Graph() rdf_file = os.path.join(sourcedir, 'meta_mappings.ttl') g.load(rdf_file, format='turtle') ns = PrefixLibrary() ns.add_rdf(g) ns['FULL'] = "http://example.org/fulluri/" ns['EX'] = "http://example.org/mappings/" ns['META'] = "https://w3id.org/biolink/biolinkml/meta/" # Make sure that the expected triples got added self.assertEqual({ns.EX.slot1_close, ns.FULL.slot1_close}, set(g.objects(ns.EX.s1, ns.SKOS.closeMatch))) self.assertEqual({ns.EX.slot1, ns.FULL.slot1}, set(g.objects(ns.EX.s1, ns.SKOS.exactMatch))) self.assertEqual( ns.EX.s3, g.value(ns.EX.s1, ns.META.deprecated_element_has_exact_replacement, any=False)) self.assertEqual( ns.EX.s4, g.value(ns.EX.s1, ns.META.deprecated_element_has_possible_replacement, any=False)) self.assertEqual({ns.EX.class1_close, ns.FULL.class1_close}, set(g.objects(ns.EX.C1, ns.SKOS.closeMatch))) self.assertEqual({ns.EX.class1, ns.FULL.class1}, set(g.objects(ns.EX.C1, ns.SKOS.exactMatch))) self.assertEqual( ns.EX.c2, g.value(ns.EX.C1, ns.META.deprecated_element_has_exact_replacement, any=False)) self.assertEqual( ns.EX.c3, g.value(ns.EX.C1, ns.META.deprecated_element_has_possible_replacement, any=False)) if DO_SHEX_VALIDATION: EX = Namespace("http://example.org/mappings/") focus = EX.testMetamodelMappings start = METAMODEL_NAMESPACE.SchemaDefinition results = ShExEvaluator(g, LOCAL_SHEXJ_FILE_NAME, focus, start).evaluate(debug=False) self.assertTrue(self._evaluate_shex_results(results)) else: print( "*** RDF Model validation step was skipped. Set: tests.__init__.DO_SHEX_VALIDATION to run it" )