class RangeTest(TestCase): def setUp(self): self.schema_graph = Graph().parse(join(dirname(__file__), "vocab.jsonld"), format="json-ld") print self.schema_graph.serialize(format="turtle") self.store = SPARQLDataStore(Graph(), schema_graph=self.schema_graph) def test_no_property_context(self): self.store.create_model("MyClass", NO_PROPERTY_CONTEXT_DICT) client = ClientResourceManager(self.store) client.import_store_models() model = client.get_model("MyClass") obj = model.new(test_hasX=2) with self.assertRaises(OMAttributeTypeCheckError): obj.test_hasX = "not a number" def test_no_datatype_context(self): context = deepcopy(NO_PROPERTY_CONTEXT_DICT) context["@context"]["hasX"] = "test:hasX" self.store.create_model("MyClass", context) client = ClientResourceManager(self.store) client.import_store_models() model = client.get_model("MyClass") obj = model.new(hasX=2) with self.assertRaises(OMAttributeTypeCheckError): obj.hasX = "not a number" def test_conflicting_datatype_context(self): context = deepcopy(NO_PROPERTY_CONTEXT_DICT) context["@context"]["hasX"] = { "@id": "test:hasX", # Not an int "@type": "xsd:string" } with self.assertRaises(OMAlreadyDeclaredDatatypeError): self.store.create_model("MyClass", context) def test_complete_context(self): context = deepcopy(NO_PROPERTY_CONTEXT_DICT) context["@context"]["hasX"] = { "@id": "test:hasX", "@type": "xsd:int" } self.store.create_model("MyClass", context) client = ClientResourceManager(self.store) client.import_store_models() model = client.get_model("MyClass") obj = model.new(hasX=2) with self.assertRaises(OMAttributeTypeCheckError): obj.hasX = "not a number"
class RangeTest(TestCase): def setUp(self): self.schema_graph = Graph().parse(join(dirname(__file__), "vocab.jsonld"), format="json-ld") print self.schema_graph.serialize(format="turtle") self.store = SPARQLDataStore(Graph(), schema_graph=self.schema_graph) def test_no_property_context(self): self.store.create_model("MyClass", NO_PROPERTY_CONTEXT_DICT) client = ClientResourceManager(self.store) client.import_store_models() model = client.get_model("MyClass") obj = model.new(test_hasX=2) with self.assertRaises(OMAttributeTypeCheckError): obj.test_hasX = "not a number" def test_no_datatype_context(self): context = deepcopy(NO_PROPERTY_CONTEXT_DICT) context["@context"]["hasX"] = "test:hasX" self.store.create_model("MyClass", context) client = ClientResourceManager(self.store) client.import_store_models() model = client.get_model("MyClass") obj = model.new(hasX=2) with self.assertRaises(OMAttributeTypeCheckError): obj.hasX = "not a number" def test_conflicting_datatype_context(self): context = deepcopy(NO_PROPERTY_CONTEXT_DICT) context["@context"]["hasX"] = { "@id": "test:hasX", # Not an int "@type": "xsd:string" } with self.assertRaises(OMAlreadyDeclaredDatatypeError): self.store.create_model("MyClass", context) def test_complete_context(self): context = deepcopy(NO_PROPERTY_CONTEXT_DICT) context["@context"]["hasX"] = {"@id": "test:hasX", "@type": "xsd:int"} self.store.create_model("MyClass", context) client = ClientResourceManager(self.store) client.import_store_models() model = client.get_model("MyClass") obj = model.new(hasX=2) with self.assertRaises(OMAttributeTypeCheckError): obj.hasX = "not a number"
def test(self): hydra_adapter = HydraSchemaAdapter() self.schema_graph = hydra_adapter.update_schema_graph( self.schema_graph) print self.schema_graph.serialize(format="turtle") self.assertTrue( self.schema_graph.query("""ASK { ?c rdfs:subClassOf <http://www.w3.org/ns/hydra/core#Collection> ; <http://www.w3.org/ns/hydra/core#supportedOperation> ?op . }""")) self.store = SPARQLDataStore(Graph(), schema_graph=self.schema_graph)
data_graph = default_graph.get_context(URIRef("http://localhost/data")) # Declaration (no attribute) schema_graph.add( (URIRef(EXAMPLE + "MyClass"), RDF.type, URIRef(HYDRA + "Class"))) context = { "@context": { "ex": EXAMPLE, "id": "@id", "type": "@type", "MyClass": "ex:MyClass", } } data_store = SPARQLDataStore(data_graph, schema_graph=schema_graph) data_store.create_model("MyClass", context, iri_generator=UUIDFragmentIriGenerator()) client_manager = ClientResourceManager(data_store) client_manager.import_store_models() crud_controller = HashLessCRUDer(client_manager) model = client_manager.get_model("MyClass") class DatatypeTest(TestCase): def tearDown(self): """ Clears the data graph """ data_graph.update("CLEAR DEFAULT")
"ro_property": { "@id": "ex:roProperty", "@type": "xsd:string" }, "secret": { "@id": "ex:secret", "@type": "xsd:string" }, "bad_property": { "@id": "ex:badProperty", "@type": "xsd:string" } } } data_store = SPARQLDataStore(data_graph, schema_graph=schema_graph) data_store.create_model("LocalClass", context, iri_prefix="http://localhost/objects/") client_manager = ClientResourceManager(data_store) client_manager.import_store_models() lc_model = client_manager.get_model("LocalClass") class PropertyTest(TestCase): def tearDown(self): """ Clears the data graph """ data_graph.update("CLEAR DEFAULT") def test_read_and_write_only(self):
# from rdflib.plugins.stores.sparqlstore import SPARQLUpdateStore # store = SPARQLUpdateStore(queryEndpoint="http://*****:*****@example.org"}, short_bio_en="I am ...")
from rdflib import Graph from oldman import SPARQLDataStore, ClientResourceManager, parse_graph_safely from oldman.rest.controller import HTTPController from os import path import unittest schema_graph = Graph() schema_file = path.join(path.dirname(__file__), "controller-schema.ttl") schema_graph = parse_graph_safely(schema_graph, schema_file, format="turtle") context_file = "file://" + path.join(path.dirname(__file__), "controller-context.jsonld") data_graph = Graph() data_store = SPARQLDataStore(data_graph, schema_graph=schema_graph) data_store.create_model("Collection", context_file, iri_prefix="http://localhost/collections/", incremental_iri=True) data_store.create_model("Item", context_file, iri_prefix="http://localhost/items/", incremental_iri=True) client_manager = ClientResourceManager(data_store) client_manager.import_store_models() collection_model = client_manager.get_model("Collection") item_model = client_manager.get_model("Item") collection1 = collection_model.create() controller = HTTPController(client_manager) class ControllerTest(unittest.TestCase):
"@context": { "hydra": "http://www.w3.org/ns/hydra/core#", }, "@id": "urn:test:vocab:MyClass", "@type": "hydra:Class", "hydra:supportedProperty": [{ "hydra:property": "urn:test:vocab:isWorking" }] } parse_graph_safely(schema_graph, data=json.dumps(my_class_def), format="json-ld") context_file_path = path.join(path.dirname(__file__), "basic_context.jsonld") context_iri = "/contexts/context.jsonld" store = SPARQLDataStore(Graph(), schema_graph=schema_graph) store.create_model("MyClass", context_iri, context_file_path=context_file_path) client_manager = ClientResourceManager(store) client_manager.import_store_models() model = client_manager.get_model("MyClass") class ContextUriTest(TestCase): def test_context_uri(self): obj = model.new(is_working=True) self.assertEquals(obj.context, context_iri) self.assertTrue(obj.is_working) print obj.to_rdf()
"@id": "wot:fingerprint", "@type": "xsd:hexBinary" }, "hex_id": { "@id": "wot:hex_id", "@type": "xsd:hexBinary" } } } # Cache #cache_region = None cache_region = make_region().configure('dogpile.cache.memory_pickle') data_store = SPARQLDataStore(data_graph, schema_graph=schema_graph, cache_region=cache_region) # Takes the prefixes from the schema graph data_store.extract_prefixes(schema_graph) #lp_name_or_iri = "LocalPerson" lp_name_or_iri = MY_VOC + "LocalPerson" data_store.create_model(lp_name_or_iri, context, iri_prefix="http://localhost/persons/", iri_fragment="me") data_store.create_model("LocalRSAPublicKey", context) data_store.create_model("LocalGPGPublicKey", context) client_manager = ClientResourceManager(data_store) client_manager.import_store_models()
from rdflib import Graph from oldman import SPARQLDataStore, ClientResourceManager, parse_graph_safely from oldman.rest.controller import HTTPController from os import path import unittest schema_graph = Graph() schema_file = path.join(path.dirname(__file__), "controller-schema.ttl") schema_graph = parse_graph_safely(schema_graph, schema_file, format="turtle") context_file = "file://" + path.join(path.dirname(__file__), "controller-context.jsonld") data_graph = Graph() data_store = SPARQLDataStore(data_graph, schema_graph=schema_graph) data_store.create_model("Collection", context_file, iri_prefix="http://localhost/collections/", incremental_iri=True) data_store.create_model("Item", context_file, iri_prefix="http://localhost/items/", incremental_iri=True) client_manager = ClientResourceManager(data_store) client_manager.import_store_models() collection_model = client_manager.get_model("Collection") item_model = client_manager.get_model("Item")
def setUp(self): self.schema_graph = Graph().parse(join(dirname(__file__), "vocab.jsonld"), format="json-ld") print self.schema_graph.serialize(format="turtle") self.store = SPARQLDataStore(Graph(), schema_graph=self.schema_graph)
#schema_url = "https://raw.githubusercontent.com/oldm/OldMan/master/examples/dbpedia_film_schema.ttl" schema_url = path.join(path.dirname(__file__), "dbpedia_film_schema.ttl") schema_graph = Graph().parse(schema_url, format="turtle") #context_url = "https://raw.githubusercontent.com/oldm/OldMan/master/examples/dbpedia_film_context.jsonld" context_url = path.join(path.dirname(__file__), "dbpedia_film_context.jsonld") data_graph = Graph( SPARQLStore("http://dbpedia.org/sparql", context_aware=False)) cache_region = make_region().configure('dogpile.cache.memory_pickle') # Datastore: SPARQL-aware triple store, with two models data_store = SPARQLDataStore(data_graph, schema_graph=schema_graph, cache_region=cache_region) data_store.create_model("http://dbpedia.org/ontology/Film", context_url) # JSON-LD terms can be used instead of IRIs data_store.create_model("Person", context_url) # Client resource manager client_manager = ClientResourceManager(data_store) # Re-uses the models of the data store client_manager.import_store_models() film_model = client_manager.get_model("http://dbpedia.org/ontology/Film") actor_model = client_manager.get_model("Person") print "10 first French films found on DBPedia (with OldMan)" print "----------------------------------------------------" q1_start_time = time.time()
sh.setLevel(logging.DEBUG) logger.addHandler(sh) #schema_url = "https://raw.githubusercontent.com/oldm/OldMan/master/examples/dbpedia_film_schema.ttl" schema_url = path.join(path.dirname(__file__), "dbpedia_film_schema.ttl") schema_graph = Graph().parse(schema_url, format="turtle") #context_url = "https://raw.githubusercontent.com/oldm/OldMan/master/examples/dbpedia_film_context.jsonld" context_url = path.join(path.dirname(__file__), "dbpedia_film_context.jsonld") data_graph = Graph(SPARQLStore("http://dbpedia.org/sparql", context_aware=False)) cache_region = make_region().configure('dogpile.cache.memory_pickle') # Datastore: SPARQL-aware triple store, with two models data_store = SPARQLDataStore(data_graph, schema_graph=schema_graph, cache_region=cache_region) data_store.create_model("http://dbpedia.org/ontology/Film", context_url) # JSON-LD terms can be used instead of IRIs data_store.create_model("Person", context_url) # Client resource manager client_manager = ClientResourceManager(data_store) # Re-uses the models of the data store client_manager.import_store_models() film_model = client_manager.get_model("http://dbpedia.org/ontology/Film") actor_model = client_manager.get_model("Person") print "10 first French films found on DBPedia (with OldMan)" print "----------------------------------------------------" q1_start_time = time.time() for film in film_model.filter(subjects=["http://dbpedia.org/resource/Category:French_films"],
"ro_property": { "@id": "ex:roProperty", "@type": "xsd:string" }, "secret": { "@id": "ex:secret", "@type": "xsd:string" }, "bad_property": { "@id": "ex:badProperty", "@type": "xsd:string" } } } data_store = SPARQLDataStore(data_graph, schema_graph=schema_graph) data_store.create_model("LocalClass", context, iri_prefix="http://localhost/objects/") client_manager = ClientResourceManager(data_store) client_manager.import_store_models() lc_model = client_manager.get_model("LocalClass") class PropertyTest(TestCase): def tearDown(self): """ Clears the data graph """ data_graph.update("CLEAR DEFAULT") def test_read_and_write_only(self): with self.assertRaises(OMPropertyDefError):
return self.old_number_value**2 def print_new_value(self): print self.new_value def disclaim1(self): return old_disclaim def disclaim2(self): return new_disclaim data_store = SPARQLDataStore(data_graph, schema_graph=schema_graph) # ChildClass is generated before its ancestors!! child_prefix = "http://localhost/children/" uri_fragment = "this" data_store.create_model("ChildClass", context, iri_prefix=child_prefix, iri_fragment=uri_fragment, incremental_iri=True) data_store.create_model("GrandParentClass", context, iri_prefix="http://localhost/ancestors/", iri_fragment=uri_fragment) data_store.create_model("ParentClass", context, iri_prefix="http://localhost/parents/") client_manager = ClientResourceManager(data_store) client_manager.import_store_models() # Methods client_manager.declare_method(square_value, "square_value", EXAMPLE + "GrandParentClass") client_manager.declare_method(print_new_value, "print_new_value", EXAMPLE + "ChildClass") # Method overloading
from oldman.rest.crud import HashLessCRUDer EXAMPLE = "http://localhost/vocab#" HYDRA = "http://www.w3.org/ns/hydra/core#" default_graph = ConjunctiveGraph() schema_graph = default_graph.get_context(URIRef("http://localhost/schema")) data_graph = default_graph.get_context(URIRef("http://localhost/data")) # Declaration (no attribute) schema_graph.add((URIRef(EXAMPLE + "MyClass"), RDF.type, URIRef(HYDRA + "Class"))) context = {"@context": {"ex": EXAMPLE, "id": "@id", "type": "@type", "MyClass": "ex:MyClass"}} data_store = SPARQLDataStore(data_graph, schema_graph=schema_graph) data_store.create_model("MyClass", context, iri_generator=UUIDFragmentIriGenerator()) client_manager = ClientResourceManager(data_store) client_manager.import_store_models() crud_controller = HashLessCRUDer(client_manager) model = client_manager.get_model("MyClass") class DatatypeTest(TestCase): def tearDown(self): """ Clears the data graph """ data_graph.update("CLEAR DEFAULT") def test_generation(self): hashless_iri = "http://example.org/doc1"
"fingerprint": { "@id": "wot:fingerprint", "@type": "xsd:hexBinary" }, "hex_id": { "@id": "wot:hex_id", "@type": "xsd:hexBinary" } } } # Cache #cache_region = None cache_region = make_region().configure('dogpile.cache.memory_pickle') data_store = SPARQLDataStore(data_graph, schema_graph=schema_graph, cache_region=cache_region) # Takes the prefixes from the schema graph data_store.extract_prefixes(schema_graph) #lp_name_or_iri = "LocalPerson" lp_name_or_iri = MY_VOC + "LocalPerson" data_store.create_model(lp_name_or_iri, context, iri_prefix="http://localhost/persons/", iri_fragment="me") data_store.create_model("LocalRSAPublicKey", context) data_store.create_model("LocalGPGPublicKey", context) client_manager = ClientResourceManager(data_store) client_manager.import_store_models() lp_model = client_manager.get_model(lp_name_or_iri) rsa_model = client_manager.get_model("LocalRSAPublicKey") gpg_model = client_manager.get_model("LocalGPGPublicKey")
}, "@id": "urn:test:vocab:MyClass", "@type": "hydra:Class", "hydra:supportedProperty": [ { "hydra:property": "urn:test:vocab:isWorking" } ] } parse_graph_safely(schema_graph, data=json.dumps(my_class_def), format="json-ld") context_file_path = path.join(path.dirname(__file__), "basic_context.jsonld") context_iri = "/contexts/context.jsonld" store = SPARQLDataStore(Graph(), schema_graph=schema_graph) store.create_model("MyClass", context_iri, context_file_path=context_file_path) client_manager = ClientResourceManager(store) client_manager.import_store_models() model = client_manager.get_model("MyClass") class ContextUriTest(TestCase): def test_context_uri(self): obj = model.new(is_working=True) self.assertEquals(obj.context, context_iri) self.assertTrue(obj.is_working) print obj.to_rdf()