Beispiel #1
0
    def test_no_property_context(self):
        self.store.create_model("MyClass", NO_PROPERTY_CONTEXT_DICT)
        client = ClientResourceManager(self.store)
        client.import_store_models()
        model = client.get_model("MyClass")

        obj = model.new(test_hasX=2)

        with self.assertRaises(OMAttributeTypeCheckError):
            obj.test_hasX = "not a number"
Beispiel #2
0
    def test_no_property_context(self):
        self.store.create_model("MyClass", NO_PROPERTY_CONTEXT_DICT)
        client = ClientResourceManager(self.store)
        client.import_store_models()
        model = client.get_model("MyClass")

        obj = model.new(test_hasX=2)

        with self.assertRaises(OMAttributeTypeCheckError):
            obj.test_hasX = "not a number"
Beispiel #3
0
    def test_no_datatype_context(self):
        context = deepcopy(NO_PROPERTY_CONTEXT_DICT)
        context["@context"]["hasX"] = "test:hasX"
        self.store.create_model("MyClass", context)
        client = ClientResourceManager(self.store)
        client.import_store_models()
        model = client.get_model("MyClass")

        obj = model.new(hasX=2)
        with self.assertRaises(OMAttributeTypeCheckError):
            obj.hasX = "not a number"
Beispiel #4
0
    def test_complete_context(self):
        context = deepcopy(NO_PROPERTY_CONTEXT_DICT)
        context["@context"]["hasX"] = {"@id": "test:hasX", "@type": "xsd:int"}
        self.store.create_model("MyClass", context)
        client = ClientResourceManager(self.store)
        client.import_store_models()
        model = client.get_model("MyClass")

        obj = model.new(hasX=2)
        with self.assertRaises(OMAttributeTypeCheckError):
            obj.hasX = "not a number"
Beispiel #5
0
def disclaim2(self):
    return new_disclaim


data_store = SPARQLDataStore(data_graph, schema_graph=schema_graph)
# ChildClass is generated before its ancestors!!
child_prefix = "http://localhost/children/"
uri_fragment = "this"
data_store.create_model("ChildClass", context, iri_prefix=child_prefix, iri_fragment=uri_fragment, incremental_iri=True)
data_store.create_model("GrandParentClass", context, iri_prefix="http://localhost/ancestors/",
                        iri_fragment=uri_fragment)
data_store.create_model("ParentClass", context, iri_prefix="http://localhost/parents/")


client_manager = ClientResourceManager(data_store)
client_manager.import_store_models()
# Methods
client_manager.declare_method(square_value, "square_value", EXAMPLE + "GrandParentClass")
client_manager.declare_method(print_new_value, "print_new_value", EXAMPLE + "ChildClass")
# Method overloading
client_manager.declare_method(disclaim1, "disclaim", EXAMPLE + "GrandParentClass")
client_manager.declare_method(disclaim2, "disclaim", EXAMPLE + "ParentClass")

child_model = client_manager.get_model("ChildClass")
grand_parent_model = client_manager.get_model("GrandParentClass")
parent_model = client_manager.get_model("ParentClass")


class InstanceTest(TestCase):
Beispiel #6
0
        },
        "secret": {
            "@id": "ex:secret",
            "@type": "xsd:string"
        },
        "bad_property": {
            "@id": "ex:badProperty",
            "@type": "xsd:string"
        }
    }
}

data_store = SPARQLDataStore(data_graph, schema_graph=schema_graph)
data_store.create_model("LocalClass", context, iri_prefix="http://localhost/objects/")

client_manager = ClientResourceManager(data_store)
client_manager.import_store_models()
lc_model = client_manager.get_model("LocalClass")


class PropertyTest(TestCase):

    def tearDown(self):
        """ Clears the data graph """
        data_graph.update("CLEAR DEFAULT")

    def test_read_and_write_only(self):
        with self.assertRaises(OMPropertyDefError):
            data_store.create_model("BadClass", context, data_graph)

    def test_write_only(self):
Beispiel #7
0
context = {
    "@context": {
        "ex": EXAMPLE,
        "id": "@id",
        "type": "@type",
        "MyClass": "ex:MyClass",
    }
}

data_store = SPARQLDataStore(data_graph, schema_graph=schema_graph)
data_store.create_model("MyClass",
                        context,
                        iri_generator=UUIDFragmentIriGenerator())

client_manager = ClientResourceManager(data_store)
client_manager.import_store_models()
crud_controller = HashLessCRUDer(client_manager)
model = client_manager.get_model("MyClass")


class DatatypeTest(TestCase):
    def tearDown(self):
        """ Clears the data graph """
        data_graph.update("CLEAR DEFAULT")

    def test_generation(self):
        hashless_iri = "http://example.org/doc1"
        obj1 = model.new(hashless_iri=hashless_iri)
        self.assertEquals(obj1.hashless_iri, hashless_iri)
        self.assertTrue(hashless_iri in obj1.id)
Beispiel #8
0
parse_graph_safely(schema_graph, schema_url, format="turtle")

ctx_iri = "https://raw.githubusercontent.com/oldm/OldMan/master/examples/quickstart_context.jsonld"

data_graph = Graph()
data_store = SPARQLDataStore(data_graph, schema_graph=schema_graph)
# Only for SPARQL data stores
data_store.extract_prefixes(schema_graph)

# LocalPerson model
data_store.create_model(
    "LocalPerson", ctx_iri, iri_prefix="http://localhost/persons/", iri_fragment="me", incremental_iri=True
)

# Client resource manager
client_manager = ClientResourceManager(data_store)
client_manager.import_store_models()

lp_model = client_manager.get_model("LocalPerson")

alice = lp_model.create(name="Alice", emails={"*****@*****.**"}, short_bio_en="I am ...")
bob = lp_model.new(
    name="Bob",
    # blog="http://blog.example.com/",
    short_bio_fr=u"J'ai grandi en ... .",
)

print bob.is_valid()
bob.emails = {"bob@localhost", "*****@*****.**"}
print bob.is_valid()
bob.save()
Beispiel #9
0
schema_url = "https://raw.githubusercontent.com/oldm/OldMan/master/examples/quickstart_schema.ttl"
parse_graph_safely(schema_graph, schema_url, format="turtle")

ctx_iri = "https://raw.githubusercontent.com/oldm/OldMan/master/examples/quickstart_context.jsonld"

data_graph = Graph()
data_store = SPARQLDataStore(data_graph, schema_graph=schema_graph)
# Only for SPARQL data stores
data_store.extract_prefixes(schema_graph)

#LocalPerson model
data_store.create_model("LocalPerson", ctx_iri, iri_prefix="http://localhost/persons/",
                        iri_fragment="me", incremental_iri=True)

#Client resource manager
client_manager = ClientResourceManager(data_store)
client_manager.import_store_models()

lp_model = client_manager.get_model("LocalPerson")

alice = lp_model.create(name="Alice", emails={"*****@*****.**"},
                        short_bio_en="I am ...")
bob = lp_model.new(name="Bob",
                   #blog="http://blog.example.com/",
                   short_bio_fr=u"J'ai grandi en ... .")

print bob.is_valid()
bob.emails = {"bob@localhost", "*****@*****.**"}
print bob.is_valid()
bob.save()
Beispiel #10
0
import unittest

schema_graph = Graph()
schema_file = path.join(path.dirname(__file__), "controller-schema.ttl")
schema_graph = parse_graph_safely(schema_graph, schema_file, format="turtle")

context_file = "file://" + path.join(path.dirname(__file__), "controller-context.jsonld")

data_graph = Graph()
data_store = SPARQLDataStore(data_graph, schema_graph=schema_graph)

data_store.create_model("Collection", context_file, iri_prefix="http://localhost/collections/",
                        incremental_iri=True)
data_store.create_model("Item", context_file, iri_prefix="http://localhost/items/", incremental_iri=True)

client_manager = ClientResourceManager(data_store)
client_manager.import_store_models()

collection_model = client_manager.get_model("Collection")
item_model = client_manager.get_model("Item")

collection1 = collection_model.create()

controller = HTTPController(client_manager)


class ControllerTest(unittest.TestCase):

    def test_operation(self):
        """TODO: remove """
        operation = collection1.get_operation("POST")
Beispiel #11
0
    data_graph = Graph(
        SPARQLStore("http://dbpedia.org/sparql", context_aware=False))

    cache_region = make_region().configure('dogpile.cache.memory_pickle')

    # Datastore: SPARQL-aware triple store, with two models
    data_store = SPARQLDataStore(data_graph,
                                 schema_graph=schema_graph,
                                 cache_region=cache_region)
    data_store.create_model("http://dbpedia.org/ontology/Film", context_url)
    # JSON-LD terms can be used instead of IRIs
    data_store.create_model("Person", context_url)

    # Client resource manager
    client_manager = ClientResourceManager(data_store)
    # Re-uses the models of the data store
    client_manager.import_store_models()
    film_model = client_manager.get_model("http://dbpedia.org/ontology/Film")
    actor_model = client_manager.get_model("Person")

    print "10 first French films found on DBPedia (with OldMan)"
    print "----------------------------------------------------"
    q1_start_time = time.time()
    for film in film_model.filter(
            subjects=["http://dbpedia.org/resource/Category:French_films"],
            limit=10,
            eager=True,
            pre_cache_properties=["http://dbpedia.org/ontology/starring"]):
        title = extract_title(film)
        if film.actors is None:
EXAMPLE = "http://localhost/vocab#"
HYDRA = "http://www.w3.org/ns/hydra/core#"

default_graph = ConjunctiveGraph()
schema_graph = default_graph.get_context(URIRef("http://localhost/schema"))
data_graph = default_graph.get_context(URIRef("http://localhost/data"))

# Declaration (no attribute)
schema_graph.add((URIRef(EXAMPLE + "MyClass"), RDF.type, URIRef(HYDRA + "Class")))

context = {"@context": {"ex": EXAMPLE, "id": "@id", "type": "@type", "MyClass": "ex:MyClass"}}

data_store = SPARQLDataStore(data_graph, schema_graph=schema_graph)
data_store.create_model("MyClass", context, iri_generator=UUIDFragmentIriGenerator())

client_manager = ClientResourceManager(data_store)
client_manager.import_store_models()
crud_controller = HashLessCRUDer(client_manager)
model = client_manager.get_model("MyClass")


class DatatypeTest(TestCase):
    def tearDown(self):
        """ Clears the data graph """
        data_graph.update("CLEAR DEFAULT")

    def test_generation(self):
        hashless_iri = "http://example.org/doc1"
        obj1 = model.new(hashless_iri=hashless_iri)
        self.assertEquals(obj1.hashless_iri, hashless_iri)
        self.assertTrue(hashless_iri in obj1.id)
Beispiel #13
0
context_file = "file://" + path.join(path.dirname(__file__),
                                     "controller-context.jsonld")

data_graph = Graph()
data_store = SPARQLDataStore(data_graph, schema_graph=schema_graph)

data_store.create_model("Collection",
                        context_file,
                        iri_prefix="http://localhost/collections/",
                        incremental_iri=True)
data_store.create_model("Item",
                        context_file,
                        iri_prefix="http://localhost/items/",
                        incremental_iri=True)

client_manager = ClientResourceManager(data_store)
client_manager.import_store_models()

collection_model = client_manager.get_model("Collection")
item_model = client_manager.get_model("Item")

collection1 = collection_model.create()

controller = HTTPController(client_manager)


class ControllerTest(unittest.TestCase):
    def test_operation(self):
        """TODO: remove """
        operation = collection1.get_operation("POST")
        self.assertTrue(operation is not None)
Beispiel #14
0
data_store = SPARQLDataStore(data_graph,
                             schema_graph=schema_graph,
                             cache_region=cache_region)
# Takes the prefixes from the schema graph
data_store.extract_prefixes(schema_graph)

#lp_name_or_iri = "LocalPerson"
lp_name_or_iri = MY_VOC + "LocalPerson"
data_store.create_model(lp_name_or_iri,
                        context,
                        iri_prefix="http://localhost/persons/",
                        iri_fragment="me")
data_store.create_model("LocalRSAPublicKey", context)
data_store.create_model("LocalGPGPublicKey", context)

client_manager = ClientResourceManager(data_store)
client_manager.import_store_models()

lp_model = client_manager.get_model(lp_name_or_iri)
rsa_model = client_manager.get_model("LocalRSAPublicKey")
gpg_model = client_manager.get_model("LocalGPGPublicKey")

crud_controller = HashLessCRUDer(client_manager)

bob_name = "Bob"
bob_blog = "http://blog.example.com/"
bob_email1 = "bob@localhost"
bob_email2 = "*****@*****.**"
bob_emails = {bob_email1, bob_email2}
bob_bio_en = "I grow up in ... ."
bob_bio_fr = u"J'ai grandi en ... ."
Beispiel #15
0
from os import path
from unittest import TestCase
from rdflib import Graph
from oldman import HttpDataStore, ClientResourceManager, parse_graph_safely

directory = path.dirname(__file__)
schema_graph = parse_graph_safely(Graph(), path.join(directory, 'api_schema.ttl'), format="turtle")
schema_graph.namespace_manager.bind("hydra", "http://www.w3.org/ns/hydra/core#")

context_uri = path.join(directory, 'api_documentation.json')

data_store = HttpDataStore(schema_graph=schema_graph)
data_store.create_model('ApiDocumentation', context_uri)

manager = ClientResourceManager(data_store)
manager.import_store_models()

doc_model = manager.get_model('ApiDocumentation')


class HttpStoreTest(TestCase):
    def test_get(self):
        iri = u"http://www.markus-lanthaler.com/hydra/api-demo/vocab"
        doc = doc_model.get(iri)
        self.assertTrue(doc is not None)
        self.assertEquals(doc.id, iri)
        expected_classes = {u'http://www.markus-lanthaler.com/hydra/api-demo/vocab#User',
                            u'http://www.w3.org/ns/hydra/core#Collection',
                            u'http://www.w3.org/ns/hydra/core#Resource',
                            u'http://www.markus-lanthaler.com/hydra/api-demo/vocab#Comment',
                            u'http://www.markus-lanthaler.com/hydra/api-demo/vocab#EntryPoint',
Beispiel #16
0
# Cache
#cache_region = None
cache_region = make_region().configure('dogpile.cache.memory_pickle')

data_store = SPARQLDataStore(data_graph, schema_graph=schema_graph, cache_region=cache_region)
# Takes the prefixes from the schema graph
data_store.extract_prefixes(schema_graph)

#lp_name_or_iri = "LocalPerson"
lp_name_or_iri = MY_VOC + "LocalPerson"
data_store.create_model(lp_name_or_iri, context, iri_prefix="http://localhost/persons/", iri_fragment="me")
data_store.create_model("LocalRSAPublicKey", context)
data_store.create_model("LocalGPGPublicKey", context)

client_manager = ClientResourceManager(data_store)
client_manager.import_store_models()

lp_model = client_manager.get_model(lp_name_or_iri)
rsa_model = client_manager.get_model("LocalRSAPublicKey")
gpg_model = client_manager.get_model("LocalGPGPublicKey")


crud_controller = HashLessCRUDer(client_manager)

bob_name = "Bob"
bob_blog = "http://blog.example.com/"
bob_email1 = "bob@localhost"
bob_email2 = "*****@*****.**"
bob_emails = {bob_email1, bob_email2}
bob_bio_en = "I grow up in ... ."
Beispiel #17
0
    #context_url = "https://raw.githubusercontent.com/oldm/OldMan/master/examples/dbpedia_film_context.jsonld"
    context_url = path.join(path.dirname(__file__), "dbpedia_film_context.jsonld")

    data_graph = Graph(SPARQLStore("http://dbpedia.org/sparql", context_aware=False))

    cache_region = make_region().configure('dogpile.cache.memory_pickle')

    # Datastore: SPARQL-aware triple store, with two models
    data_store = SPARQLDataStore(data_graph, schema_graph=schema_graph, cache_region=cache_region)
    data_store.create_model("http://dbpedia.org/ontology/Film", context_url)
    # JSON-LD terms can be used instead of IRIs
    data_store.create_model("Person", context_url)

    # Client resource manager
    client_manager = ClientResourceManager(data_store)
    # Re-uses the models of the data store
    client_manager.import_store_models()
    film_model = client_manager.get_model("http://dbpedia.org/ontology/Film")
    actor_model = client_manager.get_model("Person")

    print "10 first French films found on DBPedia (with OldMan)"
    print "----------------------------------------------------"
    q1_start_time = time.time()
    for film in film_model.filter(subjects=["http://dbpedia.org/resource/Category:French_films"],
                                  limit=10
                                  , eager=True, pre_cache_properties=["http://dbpedia.org/ontology/starring"]
                                  ):
        title = extract_title(film)
        if film.actors is None:
            print "   %s %s (no actor declared)" % (title, film.id)