예제 #1
0
    def test_meta_output(self):
        """ Generate a context AND a jsonld for the metamodel and make sure it parses as RDF """
        jsonld_path = os.path.join(testscriptstempdir, 'metajson.jsonld')
        rdf_path = os.path.join(testscriptstempdir, 'metardf.ttl')
        meta_context_path = os.path.join(testscriptstempdir,
                                         'metacontext.jsonld')

        # Generate an image of the metamodel
        gen = ContextGenerator(source_yaml_path)
        base = gen.schema.id
        if base[-1] not in '/#':
            base += '/'
        base += gen.schema.name
        with open(meta_context_path, 'w') as tfile:
            tfile.write(gen.serialize())
        with open(jsonld_path, 'w') as tfile:
            tfile.write(JSONLDGenerator(source_yaml_path, fmt=JSONLDGenerator.valid_formats[0])\
                .serialize(context=meta_context_path))
        g = Graph()
        g.load(jsonld_path, format="json-ld")
        g.serialize(rdf_path, format="ttl")
        g.bind('meta', METAMODEL_NAMESPACE)
        new_ttl = g.serialize(format="turtle").decode()
        new_g = Graph()
        new_g.parse(data=new_ttl, format="turtle")
        self.check_size(g, new_g, URIRef(base), 11, 79, 11, "meta")
예제 #2
0
 def test_default_vocab(self):
     json_ld_text = ContextGenerator(without_default).serialize()
     json_ld = loads(json_ld_text)
     self.assertEqual('http://example.org/sssom/schema/', json_ld['@context']['@vocab'])
     self.assertEqual('http://example.org/sssom/schema/name', json_ld['@context']['name']['@id'])
     json_ld_text2 = ContextGenerator(with_default).serialize()
     json_ld2 = loads(json_ld_text2)
     self.assertEqual('https://w3id.org/sssom/', json_ld2['@context']['@vocab'])
     self.assertNotIn('name', json_ld2['@context']['@vocab'])
예제 #3
0
    def test_issue_368(self):
        """ Make sure that types are generated as part of the output """
        env.generate_single_file(
            'issue_368_imports.py',
            lambda: PythonGenerator(env.input_path('issue_368_imports.yaml'),
                                    mergeimports=False).serialize(),
            comparator=lambda exp, act: compare_python(
                exp, act, self.env.expected_path('issue_368_imports.py')),
            value_is_returned=True)
        env.generate_single_file(
            'issue_368.py',
            lambda: PythonGenerator(env.input_path('issue_368.yaml'),
                                    mergeimports=False).serialize(),
            comparator=lambda exp, act: compare_python(
                exp, act, self.env.expected_path('issue_368.py')),
            value_is_returned=True)

        with open(env.expected_path('issue_368.py')) as f:
            python = f.read()

        has_imports = False
        for line in python.split("\n"):
            if line.startswith("from . issue_368_imports"):
                imps = line.replace("from . issue_368_imports import ",
                                    "").split(", ")
                assert 'SampleEnum' in imps
                assert 'ParentClass' in imps
                has_imports = True
        assert has_imports
        module = compile_python(env.expected_path('issue_368.py'))

        enum_inst = module.SampleEnum("pva")  # EnumInstanceImpl
        example = module.SampleClass(slot_1="pva")
        assert hasattr(example, "slot_1")
        assert example.slot_1.code.text == enum_inst.code.text
        assert str(example.slot_1) == "pva: PVA description"

        def dump_and_load(dumper: Callable, sfx: str) -> None:
            fname = env.actual_path(f'issue_368_1.{sfx}')
            dumper(example, fname)
            with open(fname) as f:
                print(f'\n----- {sfx} -----')
                print(f.read())

        dump_and_load(json_dumper.dump, 'json')
        dump_and_load(yaml_dumper.dump, 'yaml')

        env.generate_single_file(
            'issue_368.context.jsonld',
            lambda: ContextGenerator(env.input_path('issue_368.yaml'),
                                     emit_metadata=False).serialize(),
            filtr=ldcontext_metadata_filter,
            value_is_returned=True)
        dump_and_load(
            lambda obj, fname: rdf_dumper.dump(
                obj, fname, env.expected_path("issue_368.context.jsonld")),
            'ttl')
예제 #4
0
    def test_mappings_rdf(self):
        """ Test the imported mappings in the biolink metamodel """
        test_dir = self.env.temp_file_path('mappings_rdf_test', is_dir=True)

        # Create the mappings json file
        json_file = os.path.join(test_dir, 'mappings.jsonld')
        json_str = JSONLDGenerator(env.meta_yaml, importmap=env.import_map).serialize()
        with open(json_file, 'w') as f:
            f.write(json_str)

        # Create the mappings context file
        context_file = os.path.join(test_dir, 'mappings.context.jsonld')
        ContextGenerator(env.meta_yaml, importmap=env.import_map).serialize(output=context_file)
        self.assertTrue(os.path.exists(context_file))

        # Generate context and use it to create the RDF
        self.single_file_generator('context.jsonld', ContextGenerator, filtr=ldcontext_metadata_filter, subdir='includes')

        # Generate a copy of the JSON representation of the model
        context_loc = json_file
        context_args = {"context": ['file://' + LOCAL_METAMODEL_LDCONTEXT_FILE, 'file://' + context_loc]}
        msg += self.single_file_generator('json', JSONLDGenerator,  serialize_args=context_args,
                                         filtr=json_metadata_context_filter, fail_if_expected_missing=False)

        # Make a fresh copy of the RDF and validate it as well
        msg += self.single_file_generator('ttl', RDFGenerator, serialize_args=context_args,
                                          comparator=GeneratorTestCase.rdf_comparator, fail_if_expected_missing=False)
        if msg:
            self.fail(msg)

        g = Graph()
        rdf_file = os.path.join(sourcedir, 'meta_mappings.ttl')
        g.load(rdf_file, format='turtle')
        ns = PrefixLibrary()
        ns.add_rdf(g)
        ns['FULL'] = "http://example.org/fulluri/"
        ns['EX'] = "http://example.org/mappings/"
        ns['META'] = "https://w3id.org/biolink/biolinkml/meta/"
        # Make sure that the expected triples got added

        self.assertEqual({ns.EX.slot1_close, ns.FULL.slot1_close}, set(g.objects(ns.EX.s1, ns.SKOS.closeMatch)))
        self.assertEqual({ns.EX.slot1, ns.FULL.slot1}, set(g.objects(ns.EX.s1, ns.SKOS.exactMatch)))
        self.assertEqual(ns.EX.s3, g.value(ns.EX.s1, ns.META.deprecated_element_has_exact_replacement, any=False))
        self.assertEqual(ns.EX.s4, g.value(ns.EX.s1, ns.META.deprecated_element_has_possible_replacement, any=False))

        self.assertEqual({ns.EX.class1_close, ns.FULL.class1_close}, set(g.objects(ns.EX.C1, ns.SKOS.closeMatch)))
        self.assertEqual({ns.EX.class1, ns.FULL.class1}, set(g.objects(ns.EX.C1, ns.SKOS.exactMatch)))
        self.assertEqual(ns.EX.c2, g.value(ns.EX.C1, ns.META.deprecated_element_has_exact_replacement, any=False))
        self.assertEqual(ns.EX.c3, g.value(ns.EX.C1, ns.META.deprecated_element_has_possible_replacement, any=False))
        if DO_SHEX_VALIDATION:
            EX = Namespace("http://example.org/mappings/")
            focus = EX.testMetamodelMappings
            start = METAMODEL_NAMESPACE.SchemaDefinition
            results = ShExEvaluator(g, LOCAL_SHEXJ_FILE_NAME, focus, start).evaluate(debug=False)
            self.assertTrue(self._evaluate_shex_results(results))
        else:
            print("*** RDF Model validation step was skipped. Set: tests.__init__.DO_SHEX_VALIDATION to run it")
예제 #5
0
 def test_issue_344(self):
     """ Test to check if prefixes of CURIEs from granular mappings show up in the json-ld context """
     x = env.generate_single_file(
         'issue_344_context.json',
         lambda: ContextGenerator(env.input_path('issue_344.yaml'),
                                  importmap=env.import_map).serialize(),
         value_is_returned=True)
     context = json.load(
         open(os.path.join(env.outdir, 'issue_344_context.json')))
     self.assertIn('PCO', context['@context'])
     self.assertIn('PATO', context['@context'])
     self.assertIn('GO', context['@context'])
예제 #6
0
    def test_meta_output(self):
        """ Generate a context AND a jsonld for the metamodel and make sure it parses as RDF """
        tmp_jsonld_path = self.temp_file_path('metajson.jsonld')
        tmp_rdf_path = self.temp_file_path('metardf.ttl')
        tmp_meta_context_path = self.temp_file_path('metacontext.jsonld')

        # Generate an image of the metamodel
        gen = ContextGenerator(env.meta_yaml, importmap=env.import_map)
        base = gen.schema.id
        if base[-1] not in '/#':
            base += '/'
        base += gen.schema.name

        # Generate context
        with open(tmp_meta_context_path, 'w') as tfile:
            tfile.write(gen.serialize())

        # Generate JSON
        with open(tmp_jsonld_path, 'w') as tfile:
            tfile.write(
                jsonldgen.JSONLDGenerator(
                    env.meta_yaml,
                    fmt=jsonldgen.JSONLDGenerator.valid_formats[0],
                    importmap=env.import_map).serialize(
                        context=tmp_meta_context_path))

        # Convert JSON to TTL
        g = Graph()
        g.load(tmp_jsonld_path, format="json-ld")
        g.serialize(tmp_rdf_path, format="ttl")
        g.bind('meta', METAMODEL_NAMESPACE)
        new_ttl = g.serialize(format="turtle").decode()

        # Make sure that the generated TTL matches the JSON-LD (probably not really needed, as this is more of a test
        # of rdflib than our tooling but it doesn't hurt
        new_g = Graph()
        new_g.parse(data=new_ttl, format="turtle")

        # Make sure that both match the expected size (classes, slots, types, and model name for error reporting)
        self.check_size(g, new_g, URIRef(base), 15, 112, 13, "meta")
예제 #7
0
 def _do_test(self, tfn):
     env.generate_single_file(
         f'{tfn}.yaml',
         lambda: YAMLGenerator(env.input_path(f'{tfn}.yaml'),
                               log_level=INFO).serialize(),
         filtr=yaml_filter,
         value_is_returned=True)
     env.generate_single_file(
         f'{tfn}.context.jsonld',
         lambda: ContextGenerator(env.input_path(f'{tfn}.yaml')).serialize(
         ),
         filtr=ldcontext_metadata_filter,
         value_is_returned=True)
예제 #8
0
 def output_generator(dirname) -> None:
     with open(os.path.join(dirname, 'issue_80.json'), 'w') as f:
         f.write(as_json(example))
     context = os.path.join(dirname, 'issue_80.context.jsonld')
     with open(context, 'w') as f:
         f.write(
             ContextGenerator(
                 env.input_path('issue_80.yaml')).serialize())
     with open(os.path.join(dirname, 'issue_80.ttl'), 'w') as f:
         f.write(
             as_rdf(
                 example,
                 contexts=context).serialize(format="turtle").decode())
예제 #9
0
    def test_context(self):
        """ Verify that the root context.jsonld is current """
        new_context = ContextGenerator(LOCAL_YAML_PATH).serialize(
            base=META_BASE_URI)
        target = os.path.join(targetdir, 'context.jsonld')
        with open(target, 'w') as f:
            f.write(new_context)

        with open(LOCAL_CONTEXT_PATH) as f:
            old_context = f.read()
        self.assertEqual(
            self._strip_meta(old_context), self._strip_meta(new_context),
            f'\n{LOCAL_CONTEXT_PATH} does not match output -- new file is in test/target'
        )
예제 #10
0
 def _gen_context_file(self, fname: str, metauris: bool = False) -> str:
     cntxt_txt = ldcontext_metadata_filter(
         ContextGenerator(env.meta_yaml,
                          useuris=not metauris,
                          importmap=env.import_map).serialize())
     cntxt_file_path = self.expected_file_path(fname)
     if os.path.exists(cntxt_file_path):
         with open(cntxt_file_path) as f:
             expected = ldcontext_metadata_filter(f.read())
     else:
         expected = ''
     if expected != cntxt_txt:
         with open(cntxt_file_path, 'w') as f:
             f.write(cntxt_txt)
     return urljoin('file:', cntxt_file_path)
예제 #11
0
    def test_rdf_shex(self):
        """ Generate ShEx and RDF for the model and verify that the RDF represents a valid instance """
        test_dir = os.path.join(self.tmpdir_path, 'meta_conformance_test')
        make_and_clear_directory(self.tmpdir_path)
        make_and_clear_directory(test_dir)

        json_file = os.path.join(test_dir, 'meta.jsonld')
        json_str = JSONLDGenerator(source_yaml_path,
                                   importmap=BIOLINK_IMPORT_MAP).serialize()
        with open(json_file, 'w') as f:
            f.write(json_str)

        context_file = os.path.join(test_dir, 'metacontext.jsonld')
        ContextGenerator(
            source_yaml_path,
            importmap=BIOLINK_IMPORT_MAP).serialize(output=context_file)
        self.assertTrue(os.path.exists(context_file))

        rdf_file = os.path.join(test_dir, 'meta.ttl')
        RDFGenerator(source_yaml_path, importmap=BIOLINK_IMPORT_MAP).serialize(
            output=rdf_file, context=context_file)
        self.assertTrue(os.path.exists(rdf_file))

        shex_file = os.path.join(test_dir, 'meta.shex')
        ShExGenerator(source_yaml_path,
                      importmap=BIOLINK_IMPORT_MAP).serialize(
                          output=shex_file, collections=False)
        self.assertTrue(os.path.exists(shex_file))

        if DO_SHEX_VALIDATION:
            g = Graph()
            g.load(rdf_file, format='ttl')
            focus = METAMODEL_NAMESPACE.metamodel
            start = METAMODEL_NAMESPACE.SchemaDefinition
            results = ShExEvaluator(g, shex_file, focus,
                                    start).evaluate(debug=False)
            success = all(r.result for r in results)
            if not success:
                for r in results:
                    if not r.result:
                        print(r.reason)
            else:
                make_and_clear_directory(test_dir)
            self.assertTrue(success)
        else:
            print(
                "*** ShEX validation step was skipped. Set: tests.__init__.DO_SHEX_VALIDATION to run it"
            )
예제 #12
0
    def test_rdf_shex(self):
        """ Generate ShEx and RDF for the model and verify that the RDF represents a valid instance """
        test_dir = self.temp_file_path('meta_conformance_test', is_dir=True)

        json_file = os.path.join(test_dir, 'meta.jsonld')
        json_str = JSONLDGenerator(env.meta_yaml,
                                   importmap=env.import_map).serialize()
        with open(json_file, 'w') as f:
            f.write(json_str)

        context_file = os.path.join(test_dir, 'metacontext.jsonld')
        ContextGenerator(
            env.meta_yaml,
            importmap=env.import_map).serialize(output=context_file)
        self.assertTrue(os.path.exists(context_file))

        rdf_file = os.path.join(test_dir, 'meta.ttl')
        RDFGenerator(env.meta_yaml,
                     importmap=env.import_map).serialize(output=rdf_file,
                                                         context=context_file)
        self.assertTrue(os.path.exists(rdf_file))

        shex_file = os.path.join(test_dir, 'meta.shex')
        shexgen.ShExGenerator(env.meta_yaml,
                              importmap=env.import_map).serialize(
                                  output=shex_file, collections=False)
        self.assertTrue(os.path.exists(shex_file))

        if SKIP_SHEX_VALIDATION:
            print(
                f"tests/test_scripts/test_gen_shex.py: {SKIP_SHEX_VALIDATION_REASON}"
            )
        else:
            g = Graph()
            g.load(rdf_file, format='ttl')
            focus = METAMODEL_NAMESPACE.metamodel
            start = METAMODEL_NAMESPACE.SchemaDefinition
            results = ShExEvaluator(g, shex_file, focus,
                                    start).evaluate(debug=False)
            success = all(r.result for r in results)
            if not success:
                for r in results:
                    if not r.result:
                        print(r.reason)
            else:
                make_and_clear_directory(test_dir)
            self.assertTrue(success)
예제 #13
0
    def test_issue_80(self):
        """ Make sure that types are generated as part of the output """
        yaml_fname = os.path.join(sourcedir, 'issue_80.yaml')
        python = PythonGenerator(yaml_fname).serialize()
        print(self.header("Python"))
        print(python)
        spec = compile(python, 'test', 'exec')
        module = ModuleType('test')
        exec(spec, module.__dict__)
        example = module.Person("http://example.org/person/17", "Fred Jones", 43)

        # JSON Representation
        print(self.header("JSON"))
        print(as_json(example))

        # Generate a context for this particular model
        print(self.header("Context"))
        context = ContextGenerator(yaml_fname).serialize()
        print(context)

        # RDF Representation
        print(self.header("RDF"))
        print(as_rdf(example, contexts=context).serialize(format="turtle").decode())
예제 #14
0
        description: A person known by this person (indicating some level of reciprocated interaction between the parties).
        range: person
        slot_uri: foaf:knows
        multivalued: true
"""
python_src = PythonGenerator(yaml).serialize()
print(python_src)
spec = compile(PythonGenerator(yaml).serialize(), 'test', 'exec')
module = ModuleType('test')
exec(spec, module.__dict__)

print(f'<img src="{YumlGenerator(yaml).serialize()}"/>')
print(f'\n-----\n{YumlGenerator(yaml).serialize()}\n')

cntxt = loads(
    ContextGenerator(yaml).serialize(base="http://example.org/context/"))
print(as_json(cntxt))

shex = ShExGenerator(yaml).serialize(collections=False)
print(shex)

# Generate a person
joe_smith = module.Person(id="42",
                          last_name="smith",
                          first_name=['Joe', 'Bob'],
                          age=43)
print(joe_smith)

# Add the context and turn it into RDF
jsonld = as_json(yaml_to_json(joe_smith, cntxt))
print(jsonld)