def test_compute_qname(self): """Test sequential assignment of unknown prefixes""" g = Graph() self.assertEqual( g.compute_qname(URIRef("http://foo/bar/baz")), ("ns1", URIRef("http://foo/bar/"), "baz"), ) self.assertEqual( g.compute_qname(URIRef("http://foo/bar#baz")), ("ns2", URIRef("http://foo/bar#"), "baz"), ) # should skip to ns4 when ns3 is already assigned g.bind("ns3", URIRef("http://example.org/")) self.assertEqual( g.compute_qname(URIRef("http://blip/blop")), ("ns4", URIRef("http://blip/"), "blop"), ) # should return empty qnames correctly self.assertEqual( g.compute_qname(URIRef("http://foo/bar/")), ("ns1", URIRef("http://foo/bar/"), ""), ) # should compute qnames of URNs correctly as well self.assertEqual( g.compute_qname(URIRef("urn:ISSN:0167-6423")), ("ns5", URIRef("urn:ISSN:"), "0167-6423"), ) self.assertEqual( g.compute_qname(URIRef("urn:ISSN:")), ("ns5", URIRef("urn:ISSN:"), ""), )
def setupGraph(): # setup graph store = Graph() store.bind('ms', MS) store.bind('gmm', GMM) return store
class NegatedDisjunctTest(unittest.TestCase): def setUp(self): self.ontGraph = Graph() self.ontGraph.bind('ex', EX_NS) self.ontGraph.bind('owl', OWL_NS) Individual.factoryGraph = self.ontGraph def testStratified(self): bar = EX.Bar baz = EX.Baz noBarOrBaz = ~(bar | baz) omega = EX.Omega foo = omega & noBarOrBaz foo.identifier = EX_NS.Foo ruleStore, ruleGraph, network = SetupRuleStore(makeNetwork=True) individual = BNode() omega.extent = [individual] NormalFormReduction(self.ontGraph) self.assertEqual(repr(foo), 'ex:Omega THAT ( NOT ex:Bar ) AND ( NOT ex:Baz )') posRules, negRules = CalculateStratifiedModel(network, self.ontGraph, [EX_NS.Foo]) foo.graph = network.inferredFacts self.failUnless(not posRules, "There should be no rules in the 0 strata.") self.assertEqual( repr(negRules[0]), "Forall ?X ( ex:Foo(?X) :- And( ex:Omega(?X) not ex:Bar(?X) not ex:Baz(?X) ) )" ) self.failUnless( len(negRules) == 1, "There should only be one negative rule in a higher strata") self.failUnless(individual in foo.extent, "%s should be a member of ex:Foo" % individual)
class EARLPlugin(Plugin): """ Activate the EARL plugin to generate a report of the test results using EARL. """ name = 'EARL' def begin(self): self.graph = Graph() self.graph.bind("earl", EARL.uri) def finalize(self, result): # TODO: add plugin options for specifying where to send # output. self.graph.serialize("file:results-%s.rdf" % date_time(), format="pretty-xml") def addDeprecated(self, test): print "Deprecated: %s" % test def addError(self, test, err, capt): print "Error: %s" % test def addFailure(self, test, err, capt, tb_info): print "Failure: %s" % test def addSkip(self, test): print "Skip: %s" % test def addSuccess(self, test, capt): result = BNode() # TODO: coin URIRef self.graph.add((result, RDFS.label, Literal(test))) self.graph.add((result, RDFS.comment, Literal(type(test)))) self.graph.add((result, RDF.type, EARL.TestResult)) self.graph.add((result, EARL.outcome, EARL["pass"]))
class NegationOfAtomicConcept(unittest.TestCase): def setUp(self): self.ontGraph = Graph() self.ontGraph.bind('ex', EX_NS) self.ontGraph.bind('owl', OWL_NS) Individual.factoryGraph = self.ontGraph def testAtomicNegation(self): bar = EX.Bar baz = ~bar baz.identifier = EX_NS.Baz ruleStore, ruleGraph, network = SetupRuleStore(makeNetwork=True) individual = BNode() individual2 = BNode() (EX.OtherClass).extent = [individual] bar.extent = [individual2] NormalFormReduction(self.ontGraph) self.assertEqual(repr(baz), "Class: ex:Baz DisjointWith ex:Bar\n") posRules, negRules = CalculateStratifiedModel(network, self.ontGraph, [EX_NS.Foo]) self.failUnless(not posRules, "There should be no rules in the 0 strata.") self.failUnless( len(negRules) == 1, "There should only be one negative rule in a higher strata") self.assertEqual(repr(negRules[0]), "Forall ?X ( ex:Baz(?X) :- not ex:Bar(?X) )") baz.graph = network.inferredFacts self.failUnless(individual in baz.extent, "%s should be a member of ex:Baz" % individual) self.failUnless(individual2 not in baz.extent, "%s should *not* be a member of ex:Baz" % individual2)
def convert(self): store = Graph() store.bind("dc", "http://purl.org/dc/elements/1.1/") store.bind("data", "http://data.rpi.edu/vocab/") DC = Namespace("http://purl.org/dc/elements/1.1/") DATA = Namespace("http://data.rpi.edu/vocab/") RDFS = Namespace("http://www.w3.org/2000/01/rdf-schema#") FOAF = Namespace("http://xmlns.com/foaf/0.1/") header = self.reader.next() #Skip header minSize = len(header) #print header for row in self.reader: if len(row) != minSize: print "Number of columns different than header ({0} vs. {1}). Skipping".format(len(row), minSize) continue store.add((row[8], DC['identifier'], Literal(row[0]))) names = row[2].split(", ") creator=URIRef("http://data.rpi.edu/people/"+names[0].capitalize()+names[1].capitalize()) store.add((row[8], DC['creator'], creator)) store.add((creator, FOAF['firstName'], names[0])) store.add((creator, DC['family_name'], names[1])) store.add((row[8], DC['dateAccepted'], Literal(row[5]))) store.add((row[8], RDFS['comments'], Literal(row[6]))) store.add((row[8], DC['description'], Literal(row[6]))) store.add((row[8], DC['bibliographicCitation'], Literal(row[7]))) store.add((row[8], DC['title'], Literal(row[10]))) store.add((row[8], RDFS['label'], Literal(row[10]))) store.add((row[8], DC['subject'], URIRef(DATA+re.sub("\s", "_", row[9])))) print(store.serialize(format="pretty-xml"))
def transform(self, graph: Graph, instances=True, expression=True) -> any: g = RdfGraph() g.bind("pl", self.pl_ns) g.bind("sct", self.sct_ns) g.bind("umls", self.umls_ns) g.bind("owl", OWL) g.bind("skos", SKOS) text = graph.text main_class = self.pl_ns[self._convert(text)] class_expression, main_individual = self._node_to_class( graph.root, graph, g, instances, expression) if expression: g.add((main_class, RDF.type, OWL.Class)) g.add((main_class, OWL.equivalentClass, class_expression)) g.add((main_class, RDFS.label, Literal(graph.text))) if instances: g.add((main_individual, RDFS.label, Literal(graph.text))) return g
def testQueryMemoization(self): raise SkipTest("SKIPFAIL testQueryMemoization, see test/testBFPQueryMemoization.py") topDownStore = TopDownSPARQLEntailingStore( self.owlGraph.store, self.owlGraph, idb=self.program, DEBUG=False, nsBindings=nsMap, decisionProcedure=BFP_METHOD, identifyHybridPredicates=True) targetGraph = Graph(topDownStore) for pref, nsUri in nsMap.items(): targetGraph.bind(pref, nsUri) goal = (Variable('SUBJECT'), RDF.type, EX.C) queryLiteral = EDBQuery([BuildUnitermFromTuple(goal)], self.owlGraph, [Variable('SUBJECT')]) query = queryLiteral.asSPARQL() # rt=targetGraph.query(query,initNs=nsMap) # if len(topDownStore.edbQueries) == len(set(topDownStore.edbQueries)): # pprint(topDownStore.edbQueries) print("Queries dispatched against EDB") for query in self.owlGraph.queriesDispatched: print(query) self.failUnlessEqual( len(self.owlGraph.queriesDispatched), 4, "Duplicate query")
def getGraph(self): g = Graph() g.bind("rdf", RDF) g.bind("cs", CS) for cs in self.changesets.values(): g += cs return g
class UniversalRestrictionTest(unittest.TestCase): def setUp(self): self.ontGraph = Graph() self.ontGraph.bind('ex', EX_NS) self.ontGraph.bind('owl', OWL_NS) Individual.factoryGraph = self.ontGraph def testNegatedDisjunctionTest(self): contains=Property(EX_NS.contains) omega = EX.Omega alpha = EX.Alpha innerDisjunct = omega | alpha foo = EX.foo testClass1 = foo & (contains|only|~innerDisjunct) testClass1.identifier = EX_NS.Bar self.assertEqual(repr(testClass1), "ex:foo that ( ex:contains only ( not ( ex:Omega or ex:Alpha ) ) )") NormalFormReduction(self.ontGraph) self.assertEqual(repr(testClass1), "ex:foo that ( not ( ex:contains some ( ex:Omega or ex:Alpha ) ) )") individual1 = BNode() individual2 = BNode() foo.extent = [individual1] contains.extent = [(individual1,individual2)] (EX.Baz).extent = [individual2] ruleStore,ruleGraph,network=SetupRuleStore(makeNetwork=True) posRules,ignored=CalculateStratifiedModel(network,self.ontGraph,[EX_NS.Bar]) self.failUnless(not posRules,"There should be no rules in the 0 strata!") self.assertEqual(len(ignored),2,"There should be 2 'negative' rules") testClass1.graph = network.inferredFacts self.failUnless(individual1 in testClass1.extent, "%s should be in ex:Bar's extent"%individual1) def testNominalPartition(self): partition = EnumeratedClass(EX_NS.part, members=[EX_NS.individual1, EX_NS.individual2, EX_NS.individual3]) subPartition = EnumeratedClass(members=[EX_NS.individual1]) partitionProp = Property(EX_NS.propFoo, range=partition.identifier) self.testClass = (EX.Bar) & (partitionProp|only|subPartition) self.testClass.identifier = EX_NS.Foo self.assertEqual(repr(self.testClass), "ex:Bar that ( ex:propFoo only { ex:individual1 } )") self.assertEqual(repr(self.testClass.identifier), "rdflib.term.URIRef('http://example.com/Foo')") NormalFormReduction(self.ontGraph) self.assertEqual(repr(self.testClass), "ex:Bar that ( not ( ex:propFoo value ex:individual2 ) ) and ( not ( ex:propFoo value ex:individual3 ) )") ruleStore,ruleGraph,network=SetupRuleStore(makeNetwork=True) ex = BNode() (EX.Bar).extent = [ex] self.ontGraph.add((ex,EX_NS.propFoo,EX_NS.individual1)) CalculateStratifiedModel(network,self.ontGraph,[EX_NS.Foo]) self.failUnless((ex,RDF.type,EX_NS.Foo) in network.inferredFacts, "Missing level 1 predicate (ex:Foo)")
def team_index(request, format=None): logging.info("Format: %s" % format) if format == None: best_match = mimeparse.best_match(['application/rdf+xml', 'application/rdf+n3', 'text/html'], request.META['HTTP_ACCEPT']) if best_match == 'application/rdf+xml': format = 'rdf+xml' elif best_match == 'application/rdf+nt': format = 'rdf+nt' else: format = 'html' team_list = College.objects.filter(updated=True).order_by('name') if ( format != 'html'): store = Graph() store.bind("cfb", "http://www.cfbreference.com/cfb/0.1/") CFB = Namespace("http://www.cfbreference.com/cfb/0.1/") for current_team in team_list: team = BNode() store.add((team, RDF.type, CFB["Team"])) store.add((team, CFB["name"], Literal(current_team.name))) store.add((team, CFB["link"], Literal(current_team.get_absolute_url()))) if ( format == 'rdf+xml'): return HttpResponse(store.serialize(format="pretty-xml"), mimetype='application/rdf+xml') if ( format == 'rdf+nt'): return HttpResponse(store.serialize(format="nt"), mimetype='application/rdf+nt') return render_to_response('college/teams.html', {'team_list': team_list})
class NegatedDisjunctTest(unittest.TestCase): def setUp(self): self.ontGraph = Graph() self.ontGraph.bind('ex', EX_NS) self.ontGraph.bind('owl', OWL_NS) Individual.factoryGraph = self.ontGraph def testStratified(self): bar=EX.Bar baz=EX.Baz noBarOrBaz = ~(bar|baz) omega = EX.Omega foo = omega & noBarOrBaz foo.identifier = EX_NS.Foo ruleStore,ruleGraph,network=SetupRuleStore(makeNetwork=True) individual=BNode() omega.extent = [individual] NormalFormReduction(self.ontGraph) self.assertEqual(repr(foo), "ex:Omega that ( not ex:Bar ) and ( not ex:Baz )") posRules,negRules=CalculateStratifiedModel(network,self.ontGraph,[EX_NS.Foo]) foo.graph = network.inferredFacts self.failUnless(not posRules,"There should be no rules in the 0 strata!") self.assertEqual(repr(negRules[0]),"Forall ?X ( ex:Foo(?X) :- And( ex:Omega(?X) not ex:Bar(?X) not ex:Baz(?X) ) )") self.failUnless(len(negRules)==1,"There should only be one negative rule in a higher strata") self.failUnless(individual in foo.extent, "%s should be a member of ex:Foo"%individual)
def testQueryMemoization(self): raise SkipTest( "SKIPFAIL testQueryMemoization, see test/testBFPQueryMemoization.py" ) topDownStore = TopDownSPARQLEntailingStore( self.owlGraph.store, self.owlGraph, idb=self.program, DEBUG=False, nsBindings=nsMap, decisionProcedure=BFP_METHOD, identifyHybridPredicates=True) targetGraph = Graph(topDownStore) for pref, nsUri in nsMap.items(): targetGraph.bind(pref, nsUri) goal = (Variable('SUBJECT'), RDF.type, EX.C) queryLiteral = EDBQuery([BuildUnitermFromTuple(goal)], self.owlGraph, [Variable('SUBJECT')]) query = queryLiteral.asSPARQL() # rt=targetGraph.query(query,initNs=nsMap) # if len(topDownStore.edbQueries) == len(set(topDownStore.edbQueries)): # pprint(topDownStore.edbQueries) print("Queries dispatched against EDB") for query in self.owlGraph.queriesDispatched: print(query) self.failUnlessEqual(len(self.owlGraph.queriesDispatched), 4, "Duplicate query")
class NegationOfAtomicConcept(unittest.TestCase): def setUp(self): self.ontGraph = Graph() self.ontGraph.bind('ex', EX_NS) self.ontGraph.bind('owl', OWL_NS) Individual.factoryGraph = self.ontGraph def testAtomicNegation(self): bar=EX.Bar baz=~bar baz.identifier = EX_NS.Baz ruleStore,ruleGraph,network=SetupRuleStore(makeNetwork=True) individual=BNode() individual2=BNode() (EX.OtherClass).extent = [individual] bar.extent = [individual2] NormalFormReduction(self.ontGraph) self.assertEqual(repr(baz), "Class: ex:Baz DisjointWith ex:Bar\n") posRules,negRules=CalculateStratifiedModel(network,self.ontGraph,[EX_NS.Foo]) self.failUnless(not posRules,"There should be no rules in the 0 strata!") self.failUnless(len(negRules)==1,"There should only be one negative rule in a higher strata") self.assertEqual(repr(negRules[0]), "Forall ?X ( ex:Baz(?X) :- not ex:Bar(?X) )") baz.graph = network.inferredFacts self.failUnless(individual in baz.extent, "%s should be a member of ex:Baz"%individual) self.failUnless(individual2 not in baz.extent, "%s should *not* be a member of ex:Baz"%individual2)
def createTestOntGraph(): graph = Graph() graph.bind('ex',EX_NS,True) Individual.factoryGraph = graph kneeJoint = EX_CL.KneeJoint joint = EX_CL.Joint knee = EX_CL.Knee isPartOf = Property(EX_NS.isPartOf) graph.add((isPartOf.identifier,RDF.type,OWL_NS.TransitiveProperty)) structure = EX_CL.Structure leg = EX_CL.Leg hasLocation = Property(EX_NS.hasLocation,subPropertyOf=[isPartOf]) # graph.add((hasLocation.identifier,RDFS.subPropertyOf,isPartOf.identifier)) kneeJoint.equivalentClass = [joint & (isPartOf|some|knee)] legStructure = EX_CL.LegStructure legStructure.equivalentClass = [structure & (isPartOf|some|leg)] structure += leg structure += joint locatedInLeg = hasLocation|some|leg locatedInLeg += knee # print graph.serialize(format='n3') # newGraph = Graph() # newGraph.bind('ex',EX_NS,True) # newGraph,conceptMap = StructuralTransformation(graph,newGraph) # revDict = dict([(v,k) for k,v in conceptMap.items()]) # Individual.factoryGraph = newGraph # for oldConceptId ,newConceptId in conceptMap.items(): # if isinstance(oldConceptId,BNode): # oldConceptRepr = repr(Class(oldConceptId,graph=graph)) # if oldConceptRepr.strip() == 'Some Class': # oldConceptRepr = manchesterSyntax( # oldConceptId, # graph) # print "%s -> %s"%( # oldConceptRepr, # newConceptId # ) # # else: # print "%s -> %s"%( # oldConceptId, # newConceptId # ) # # for c in AllClasses(newGraph): # if isinstance(c.identifier,BNode) and c.identifier in conceptMap.values(): # print "## %s ##"%c.identifier # else: # print "##" * 10 # print c.__repr__(True) # print "################################" return graph
class NegatedExistentialRestrictionTest(unittest.TestCase): def setUp(self): self.ontGraph = Graph() self.ontGraph.bind('ex', EX_NS) self.ontGraph.bind('owl', OWL_NS) Individual.factoryGraph = self.ontGraph def testInConjunct(self): contains = Property(EX_NS.contains) testCase2 = EX.Operation & ~ (contains | some | EX.IsolatedCABGConcomitantExclusion) & \ (contains | some | EX.CoronaryArteryBypassGrafting) testCase2.identifier = EX_NS.IsolatedCABGOperation NormalFormReduction(self.ontGraph) self.assertEqual( repr(testCase2), 'ex:Operation THAT ( ex:contains SOME ex:CoronaryArteryBypassGrafting ) AND ( NOT ( ex:contains SOME ex:IsolatedCABGConcomitantExclusion ) )' ) ruleStore, ruleGraph, network = SetupRuleStore(makeNetwork=True) op = BNode() (EX.Operation).extent = [op] grafting = BNode() (EX.CoronaryArteryBypassGrafting).extent = [grafting] testCase2.graph.add((op, EX_NS.contains, grafting)) CalculateStratifiedModel(network, testCase2.graph, [EX_NS.Foo, EX_NS.IsolatedCABGOperation]) testCase2.graph = network.inferredFacts self.failUnless( op in testCase2.extent, "%s should be in ex:IsolatedCABGOperation's extent" % op) def testGeneralConceptInclusion(self): # Some Class # ## Primitive Type ## # SubClassOf: Class: ex:NoExclusion . # DisjointWith ( ex:contains some ex:IsolatedCABGConcomitantExclusion ) contains = Property(EX_NS.contains) testClass = ~(contains | some | EX.Exclusion) testClass2 = EX.NoExclusion testClass2 += testClass NormalFormReduction(self.ontGraph) individual1 = BNode() individual2 = BNode() contains.extent = [(individual1, individual2)] ruleStore, ruleGraph, network = SetupRuleStore(makeNetwork=True) posRules, negRules = CalculateStratifiedModel(network, self.ontGraph, [EX_NS.NoExclusion]) self.failUnless(not posRules, "There should be no rules in the 0 strata.") self.assertEqual(len(negRules), 2, "There should be 2 'negative' rules") Individual.factoryGraph = network.inferredFacts targetClass = Class(EX_NS.NoExclusion, skipOWLClassMembership=False) self.failUnless( individual1 in targetClass.extent, "There is a BNode that bears the contains relation with another individual that is not a member of Exclusion." ) self.assertEquals(len(list(targetClass.extent)), 1, "There should only be one member in NoExclusion")
def rdf_profile(request, username): '''Profile information comparable to the human-readable content returned by :meth:`profile`, but in RDF format.''' # retrieve user & publications - same logic as profile above user, userprofile = _get_profile_user(username) articles = userprofile.recent_articles(limit=10) # build an rdf graph with information author & publications rdf = RdfGraph() for prefix, ns in ns_prefixes.iteritems(): rdf.bind(prefix, ns) author_node = BNode() profile_uri = URIRef(request.build_absolute_uri(reverse('accounts:profile', kwargs={'username': username}))) profile_data_uri = URIRef(request.build_absolute_uri(reverse('accounts:profile-data', kwargs={'username': username}))) # author information rdf.add((profile_uri, FOAF.primaryTopic, author_node)) rdf.add((author_node, RDF.type, FOAF.Person)) rdf.add((author_node, FOAF.nick, Literal(user.username))) rdf.add((author_node, FOAF.publications, profile_uri)) try: esd_data = userprofile.esd_data() except EsdPerson.DoesNotExist: esd_data = None if esd_data: rdf.add((author_node, FOAF.name, Literal(esd_data.directory_name))) else: rdf.add((author_node, FOAF.name, Literal(user.get_full_name()))) if esd_data and not userprofile.suppress_esd_data: mbox_sha1sum = hashlib.sha1(esd_data.email).hexdigest() rdf.add((author_node, FOAF.mbox_sha1sum, Literal(mbox_sha1sum))) if esd_data.phone: rdf.add((author_node, FOAF.phone, URIRef('tel:' + esd_data.phone))) # TODO: use ESD profile data where appropriate # (and honor internet/directory suppressed, suppression override) # article information repo = Repository(request=request) for record in articles: obj = repo.get_object(record['pid'], type=Publication) obj_node = BNode() # info:fedora/ uri is not public # relate to author rdf.add((author_node, FRBR.creatorOf, obj_node)) rdf.add((author_node, FOAF.made, obj_node)) # add object rdf rdf += obj.as_rdf(node=obj_node) response = HttpResponse(rdf.serialize(), content_type='application/rdf+xml') response['Content-Location'] = profile_data_uri return response
class ReductionTestA(unittest.TestCase): def setUp(self): self.ontGraph = Graph() self.ontGraph.bind('ex', EX_NS) self.ontGraph.bind('owl', OWL_NS) Individual.factoryGraph = self.ontGraph partition = EnumeratedClass(EX_NS.part, members=[EX_NS.individual1, EX_NS.individual2, EX_NS.individual3]) subPartition = EnumeratedClass( EX_NS.partition, members=[EX_NS.individual1]) partitionProp = Property(EX_NS.propFoo, range=partition) self.foo = EX.foo self.foo.subClassOf = [ partitionProp | only | subPartition] def testUnivInversion(self): UniversalNominalRangeTransformer().transform(self.ontGraph) self.failUnlessEqual(len(list(self.foo.subClassOf)), 1, "There should still be one subsumed restriction") subC = CastClass(first(self.foo.subClassOf)) self.failUnless(not isinstance(subC, Restriction), "subclass of a restriction") self.failUnless( subC.complementOf is not None, "Should be a complement!") innerC = CastClass(subC.complementOf) self.failUnless(isinstance(innerC, Restriction), "complement of a restriction, not %r" % innerC) self.failUnlessEqual(innerC.onProperty, EX_NS.propFoo, "restriction on propFoo") self.failUnless( innerC.someValuesFrom, "converted to an existential restriction not %r" % innerC) invertedC = CastClass(innerC.someValuesFrom) self.failUnless( isinstance(invertedC, EnumeratedClass), "existencial restriction on enumerated class") self.assertEqual( len(invertedC), 2, "existencial restriction on enumerated class of length 2") self.assertEqual(repr(invertedC), "{ ex:individual2 ex:individual3 }", "The negated partition should exclude individual1") NominalRangeTransformer().transform(self.ontGraph) DemorganTransformer().transform(self.ontGraph) subC = CastClass(first(self.foo.subClassOf)) self.assertEqual( repr(subC), "( ( not ( ex:propFoo value ex:individual2 ) ) " + "and ( not ( ex:propFoo value ex:individual3 ) ) )")
def bfp(defaultDerivedPreds, options, factGraph, ruleSet, network, hybridPredicates): topDownDPreds = defaultDerivedPreds if options.builtinTemplates: builtinTemplateGraph = Graph().parse(options.builtinTemplates, format='n3') builtinDict = dict([ (pred, template) for pred, _ignore, template in builtinTemplateGraph.triples( (None, TEMPLATES.filterTemplate, None))]) else: builtinDict = None topDownStore = TopDownSPARQLEntailingStore( factGraph.store, factGraph, idb=ruleSet, DEBUG=options.debug, derivedPredicates=topDownDPreds, templateMap=builtinDict, nsBindings=network.nsMap, identifyHybridPredicates=options.hybrid, hybridPredicates=hybridPredicates) targetGraph = Graph(topDownStore) for pref, nsUri in list(network.nsMap.items()): targetGraph.bind(pref, nsUri) start = time.time() sTime = time.time() - start result = targetGraph.query(options.why, initNs=network.nsMap) if result.askAnswer: sTime = time.time() - start if sTime > 1: sTimeStr = "%s seconds" % sTime else: sTime = sTime * 1000 sTimeStr = "%s milli seconds" % sTime print("Time to reach answer ground goal answer of %s: %s" % ( result.askAnswer[0], sTimeStr)) else: for rt in result: sTime = time.time() - start if sTime > 1: sTimeStr = "%s seconds" % sTime else: sTime = sTime * 1000 sTimeStr = "%s milli seconds" % sTime if options.firstAnswer: break print("Time to reach answer %s via top-down SPARQL" " sip strategy: %s" % (rt, sTimeStr)) if options.output == 'conflict' and options.method == 'bfp': for _network, _goal in topDownStore.queryNetworks: print(network, _goal) _network.reportConflictSet(options.debug) for query in topDownStore.edbQueries: print(query.asSPARQL())
def serialize_as_rdf(media_resources): """Serialize a list of media resources as RDF triples. Args: media_resources (list): the list of media resources. Returns: string: RDF/XML serialization of the media resources. """ g = Graph('IOMemory') g.bind('ma', URIRef('http://www.w3.org/ns/ma-ont#')) g.bind('foaf', URIRef('http://xmlns.com/foaf/0.1/')) ma = Namespace('http://www.w3.org/ns/ma-ont#') foaf = Namespace('http://xmlns.com/foaf/0.1/') for media in media_resources: if not media.id: media.id = str(uuid.uuid4()).replace("-", "") media.uri = URIRef('http://production.sti2.org/lsi/media/' + media.id) g.add((media.uri, ma.title, Literal(media.title))) if media.description: g.add((media.uri, ma.description, Literal(media.description))) g.add((media.uri, ma.locator, Literal(media.locator, datatype=XSD.anyURI))) if hasattr(media, 'width') and media.width: g.add((media.uri, ma.width, Literal(media.width, datatype=XSD.integer))) if hasattr(media, 'height') and media.height: g.add((media.uri, ma.height, Literal(media.height, datatype=XSD.integer))) if hasattr(media, 'author_uri') and media.author_uri: author_uri_ref = URIRef(media.author_uri) g.add((media.uri, ma.contributor, author_uri_ref)) g.add((author_uri_ref, RDF.type, ma.Agent)) if hasattr(media, 'author_name') and media.author_name: g.add((author_uri_ref, RDFS.label, Literal(media.author_name))) if hasattr(media, 'created') and media.created: g.add((media.uri, ma.creationDate, Literal(str(media.created).replace(' ', 'T'), datatype=XSD.dateTime))) if hasattr(media, 'published') and media.published: g.add((media.uri, ma.releaseDate, Literal(str(media.published).replace(' ', 'T'), datatype=XSD.dateTime))) if hasattr(media, 'latitude') and media.latitude: g.add((media.uri, ma.locationLatitude, Literal(media.latitude, datatype=XSD.double))) if hasattr(media, 'longitude') and media.longitude: g.add((media.uri, ma.locationLongitude, Literal(media.longitude, datatype=XSD.double))) if hasattr(media, 'location_name') and media.location_name: g.add((media.uri, ma.locationName, Literal(media.location_name))) for keyword in media.keywords: g.add((media.uri, ma.hasKeyword, URIRef(keyword))) if isinstance(media, model.VideoTrack): g.add((media.uri, RDF.type, ma.MediaResource)) g.add((media.uri, foaf.thumbnail, URIRef(media.thumbnail))) g.add((media.uri, ma.duration, Literal(media.duration, datatype=XSD.integer))) elif isinstance(media, model.Image): g.add((media.uri, RDF.type, ma.Image)) try: g.add((media.uri, ma.publisher, URIRef(publisher_uri_dict.get(media.source)))) except: pass return g.serialize(format='xml')
class EARLPlugin(Plugin): """ Activate the EARL plugin to generate a report of the test results using EARL. """ name = 'EARL' def begin(self): self.graph = Graph() self.graph.bind("earl", EARL.uri) tool = BNode('rdflib') self.graph.add((tool, RDF.type, EARL.TestTool)) self.graph.add((tool, RDFS.label, Literal('rdflib.net'))) self.graph.add((tool, RDFS.comment, Literal('nosetests'))) def finalize(self, result): # TODO: add plugin options for specifying where to send # output. self.graph.serialize("file:results-%s.rdf" % \ date_time().replace(':','-'), format="pretty-xml") def addDeprecated(self, test): print "Deprecated: %s" % test def addError(self, test, err, capt): print "Error: %s" % test def addFailure(self, test, err, capt, tb_info): print("FAILED") result = BNode() # TODO: coin URIRef self.graph.add((result, RDFS.label, Literal(test))) self.graph.add((result, RDFS.comment, Literal(type(test)))) self.graph.add((result, RDF.type, EARL.TestResult)) self.graph.add((result, EARL.outcome, EARL["fail"])) def addSkip(self, test): print("SKIPPED") result = BNode() # TODO: coin URIRef self.graph.add((result, RDFS.label, Literal(test))) self.graph.add((result, RDFS.comment, Literal(type(test)))) self.graph.add((result, RDF.type, EARL.TestResult)) self.graph.add((result, EARL.outcome, EARL["untested"])) def addSuccess(self, test, capt): print("PASSED") result = BNode() # TODO: coin URIRef self.graph.add((result, RDFS.label, Literal(test))) self.graph.add((result, RDFS.comment, Literal(type(test)))) self.graph.add((result, RDF.type, EARL.TestResult)) self.graph.add((result, EARL.outcome, EARL["pass"]))
def graph(): """Return an empty graph with common namespaces defined.""" store = Graph() store.bind("cc", "http://creativecommons.org/ns#") store.bind("dc", "http://purl.org/dc/elements/1.1/") store.bind("dcq", "http://purl.org/dc/terms/") store.bind("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#") store.bind("foaf", "http://xmlns.com/foaf/0.1/") return store
def testBindingNamespaces(self): """ Binding custom namespaces just works. """ g = Graph(store=self.store) ns = ("prefix", URIRef("http://example.com/prefix")) self.assertNotIn(ns, list(g.namespaces())) g.bind(ns[0], ns[1]) self.assertIn(ns, list(g.namespaces()))
class NegatedExistentialRestrictionTest(unittest.TestCase): def setUp(self): self.ontGraph = Graph() self.ontGraph.bind('ex', EX_NS) self.ontGraph.bind('owl', OWL_NS) Individual.factoryGraph = self.ontGraph def testInConjunct(self): contains = Property(EX_NS.contains) testCase2 = EX.Operation & ~ (contains | some | EX.IsolatedCABGConcomitantExclusion) & \ (contains | some | EX.CoronaryArteryBypassGrafting) testCase2.identifier = EX_NS.IsolatedCABGOperation NormalFormReduction(self.ontGraph) self.assertEqual(repr(testCase2), 'ex:Operation THAT ( ex:contains SOME ex:CoronaryArteryBypassGrafting ) AND ( NOT ( ex:contains SOME ex:IsolatedCABGConcomitantExclusion ) )') ruleStore, ruleGraph, network = SetupRuleStore(makeNetwork=True) op = BNode() (EX.Operation).extent = [op] grafting = BNode() (EX.CoronaryArteryBypassGrafting).extent = [grafting] testCase2.graph.add((op, EX_NS.contains, grafting)) CalculateStratifiedModel( network, testCase2.graph, [EX_NS.Foo, EX_NS.IsolatedCABGOperation]) testCase2.graph = network.inferredFacts self.failUnless(op in testCase2.extent, "%s should be in ex:IsolatedCABGOperation's extent" % op) def testGeneralConceptInclusion(self): # Some Class # ## Primitive Type ## # SubClassOf: Class: ex:NoExclusion . # DisjointWith ( ex:contains some ex:IsolatedCABGConcomitantExclusion ) contains = Property(EX_NS.contains) testClass = ~(contains | some | EX.Exclusion) testClass2 = EX.NoExclusion testClass2 += testClass NormalFormReduction(self.ontGraph) individual1 = BNode() individual2 = BNode() contains.extent = [(individual1, individual2)] ruleStore, ruleGraph, network = SetupRuleStore(makeNetwork=True) posRules, negRules = CalculateStratifiedModel( network, self.ontGraph, [EX_NS.NoExclusion]) self.failUnless( not posRules, "There should be no rules in the 0 strata.") self.assertEqual( len(negRules), 2, "There should be 2 'negative' rules") Individual.factoryGraph = network.inferredFacts targetClass = Class(EX_NS.NoExclusion, skipOWLClassMembership=False) self.failUnless(individual1 in targetClass.extent, "There is a BNode that bears the contains relation with another individual that is not a member of Exclusion.") self.assertEquals(len(list(targetClass.extent)), 1, "There should only be one member in NoExclusion")
def testCannotUpdateDefaultNamespaces(self): """ Binding the prefix OR the URI of a default namespaces is a no-op. """ g = Graph(store=self.store) self.assertIn(XML_NAMESPACE, list(g.namespaces())) g.bind("hello-world", XML_NAMESPACE[1]) self.assertIn(XML_NAMESPACE, list(g.namespaces())) g.bind(XML_NAMESPACE, URIRef("http://example.com/xml")) self.assertIn(XML_NAMESPACE, list(g.namespaces()))
def test_compute_qname(self): """Test sequential assignment of unknown prefixes""" g = Graph() self.assertEqual(g.compute_qname(URIRef("http://foo/bar/baz")), ("ns1", URIRef("http://foo/bar/"), "baz")) self.assertEqual(g.compute_qname(URIRef("http://foo/bar#baz")), ("ns2", URIRef("http://foo/bar#"), "baz")) # should skip to ns4 when ns3 is already assigned g.bind("ns3", URIRef("http://example.org/")) self.assertEqual(g.compute_qname(URIRef("http://blip/blop")), ("ns4", URIRef("http://blip/"), "blop"))
def process_file(fileName): store = HTTP4Store('http://localhost:8080') fileHandle = open(fileName) data = fileHandle.read() metaFileName, metaFileExtension = os.path.splitext(fileName) try: metaFile = open(metaFileName + '.meta.json','r') metaData = json.loads(metaFile.read()) try: graph_name = metaData['ckan_url'] try: print "Storing data " + metaData['name'] response = store.add_graph(graph_name,data,"xml") print "Operation complete. Response status " + str(response.status) try: #Add to the data data here metaRDF = Graph() metaRDF.bind("dc", "http://purl.org/dc/elements/1.1/") DC = Namespace("http://purl.org/dc/elements/1.1/") metaGraph = URIRef("http://iatiregistry.org/") metaRDF.add((URIRef(graph_name),DC['license'],Literal(metaData['license']))) metaRDF.add((URIRef(graph_name),DC['title'],Literal(metaData['title']))) metaRDF.add((URIRef(graph_name),DC['creator'],Literal(metaData['author_email']))) try: metaRDF.add((URIRef(graph_name),DC['publisher'],Literal(metaData['groups'][0]))) except: pass try: print "Storing meta data for " + metaData['name'] response = store.append_graph(graph_name,metaRDF.serialize(),"xml") print "Metadata operation complete. Response status " + str(response.status) print except: print "Error storing meta data graph." except Exception: print "Trouble generating metadata graph" except: print "Trouble storing data" except: print "Trouble reading meta data" except: print "No meta-data found. Linked data upload currently requires meta-data."
class UniversalComplementXFormTest(unittest.TestCase): def setUp(self): self.ontGraph = Graph() self.ontGraph.bind('ex', EX_NS) self.ontGraph.bind('owl', OWL_NS) Individual.factoryGraph = self.ontGraph def testUniversalInversion(self): testClass1 = EX.omega & (Property(EX_NS.someProp) | only | ~EX.gamma) testClass1.identifier = EX_NS.Foo self.assertEquals(repr(testClass1), 'ex:omega THAT ( ex:someProp ONLY ( NOT ex:gamma ) )') NormalFormReduction(self.ontGraph) self.assertEquals(repr(testClass1), 'ex:omega THAT ( NOT ( ex:someProp SOME ex:gamma ) )')
class FlatteningTest(unittest.TestCase): def setUp(self): self.ontGraph = Graph() self.ontGraph.bind('ex', EX_NS) self.ontGraph.bind('owl', OWL_NS) Individual.factoryGraph = self.ontGraph nestedConjunct = EX.omega & EX.gamma self.topLevelConjunct = EX.alpha & nestedConjunct def testFlattening(self): self.assertEquals(repr(self.topLevelConjunct), 'ex:alpha THAT ( ex:omega AND ex:gamma )') ConjunctionFlattener().transform(self.ontGraph) self.assertEquals(repr(self.topLevelConjunct), '( ex:alpha AND ex:omega AND ex:gamma )')
def create_rdf_map(self, shape, destination): g = Graph() g.namespace_manager = self.g.namespace_manager g.bind('sh', SHACL) # Create the node associated with all the data entered g.add( (Literal('placeholder node_uri'), RDF.type, shape['target_class'])) # Go through each property and add it for group in shape['groups']: for prop in group['properties']: self.add_property_to_map(g, prop, Literal('placeholder node_uri')) for prop in shape['properties']: self.add_property_to_map(g, prop, Literal('placeholder node_uri')) g.serialize(destination=destination, format='turtle')
class FlatteningTest(unittest.TestCase): def setUp(self): self.ontGraph = Graph() self.ontGraph.bind('ex', EX_NS) self.ontGraph.bind('owl', OWL_NS) Individual.factoryGraph = self.ontGraph nestedConjunct = EX.omega & EX.gamma self.topLevelConjunct = EX.alpha & nestedConjunct def testFlatenning(self): self.assertEquals(repr(self.topLevelConjunct), "ex:alpha that ( ex:omega and ex:gamma )") ConjunctionFlattener().transform(self.ontGraph) self.assertEquals(repr(self.topLevelConjunct), "( ex:alpha and ex:omega and ex:gamma )")
class ReductionTestA(unittest.TestCase): def setUp(self): self.ontGraph = Graph() self.ontGraph.bind('ex', EX_NS) self.ontGraph.bind('owl', OWL_NS) Individual.factoryGraph = self.ontGraph partition = EnumeratedClass( EX_NS.part, members=[EX_NS.individual1, EX_NS.individual2, EX_NS.individual3]) subPartition = EnumeratedClass(EX_NS.partition, members=[EX_NS.individual1]) partitionProp = Property(EX_NS.propFoo, range=partition) self.foo = EX.foo self.foo.subClassOf = [partitionProp | only | subPartition] def testUnivInversion(self): UniversalNominalRangeTransformer().transform(self.ontGraph) self.failUnlessEqual(len(list(self.foo.subClassOf)), 1, "There should still be one subsumed restriction") subC = CastClass(first(self.foo.subClassOf)) self.failUnless(not isinstance(subC, Restriction), "subclass of a restriction") self.failUnless(subC.complementOf is not None, "Should be a complement.") innerC = CastClass(subC.complementOf) self.failUnless(isinstance(innerC, Restriction), "complement of a restriction, not %r" % innerC) self.failUnlessEqual(innerC.onProperty, EX_NS.propFoo, "restriction on propFoo") self.failUnless( innerC.someValuesFrom, "converted to an existential restriction not %r" % innerC) invertedC = CastClass(innerC.someValuesFrom) self.failUnless(isinstance(invertedC, EnumeratedClass), "existential restriction on enumerated class") self.assertEqual( len(invertedC), 2, "existencial restriction on enumerated class of length 2") self.assertEqual(repr(invertedC), "{ ex:individual2 ex:individual3 }", "The negated partition should exclude individual1") NominalRangeTransformer().transform(self.ontGraph) DemorganTransformer().transform(self.ontGraph) subC = CastClass(first(self.foo.subClassOf)) self.assertEqual( repr(subC), "( ( not ( ex:propFoo value ex:individual2 ) ) and ( not ( ex:propFoo value ex:individual3 ) ) )" )
def jsondict2graph(json_dict): g = Graph() [g.bind(*x) for x in ns_store.items()] for triple in json_dict['results']['bindings']: ts = triple['s'].get('type',None) vs = triple['s']['value'] if ts == 'uri': s = URIRef(vs) elif ts == 'literal': s = Literal(vs) elif ts == 'bnode': s = BNode(vs) #logging.debug(s) p = URIRef(triple['p']['value']) #logging.debug(p) to = triple['o'].get('type',None) vo = triple['o']['value'] dto = triple['o'].get('datatype',None) if to == 'uri': o = URIRef(triple['o']['value']) elif to == 'literal': o = Literal(triple['o']['value']) if dto: o.datatype = URIRef(dto) elif ts == 'bnode': o = BNode(vo) #logging.debug(o) g.add((s,p,o)) logging.debug(g.serialize(format='turtle')) return g
def format_members_to_rdf(members): from rdflib.graph import Graph from rdflib import Literal, Namespace, URIRef from rdflib import RDF g = Graph() g.bind("lt", NAMESPACE) LT = Namespace(NAMESPACE) # Add triples using store's add method. for user in members: person = URIRef(user.get("uri")) g.add((person, RDF.type, LT["Person"])) for key, extractor in REF_DATA_EXTRACTOR.items(): g.add((person, LT[key], Literal(user.get(key)))) return g
def format_members_to_rdf(members): from rdflib.graph import Graph from rdflib import Literal, Namespace, URIRef from rdflib import RDF g = Graph() g.bind("lt", NAMESPACE) LT = Namespace(NAMESPACE) # Add triples using store's add method. for user in members: person = URIRef(user.get('uri')) g.add((person, RDF.type, LT["Person"])) for key, extractor in REF_DATA_EXTRACTOR.items(): g.add((person, LT[key], Literal(user.get(key)))) return g
def xml_to_RDF(xml_string): root = ET.fromstring(xml_string) store = Graph() # Bind a few prefix, namespace pairs. store.bind("dc", "http://http://purl.org/dc/elements/1.1/") store.bind("foaf", "http://xmlns.com/foaf/0.1/") # Create a namespace object for the Friend of a friend namespace. FOAF = Namespace("http://xmlns.com/foaf/0.1/") # Create an identifier to use as the subject for Donna. user = URIRef("#me") # Add triples using store's add method. store.add((user, RDF.type, FOAF["Person"])) #define list to hold the full_name = (givenName, surName) full_name = [None, None] for child in root: if "AttributeStatement" in child.tag: for child1 in child: for child2 in child1: child1_name_attrib = child1.attrib.get("Name") if "givenName" in child1_name_attrib: full_name[0] = child2.text if full_name[1] is not None: #store.add((user, URIRef(FOAF.name), Literal(full_name[0]+' '+full_name[1]))) store.add( (user, URIRef(RDFS.label), Literal(full_name[0] + ' ' + full_name[1]))) elif "sn" in child1_name_attrib: full_name[1] = child2.text if full_name[0] is not None: #store.add((user, URIRef(FOAF.name), Literal(full_name[0]+' '+full_name[1]))) store.add( (user, URIRef(RDFS.label), Literal(full_name[0] + ' ' + full_name[1]))) store.add((user, URIRef(child1_name_attrib), Literal(child2.text))) rdf_output = store.serialize() return store
def lod2graph_mapping(file_out, types, properties, relations, names): g = Graph() g.bind("lod2graph", ns) for pred in types: g.add((pred, RDFS.subPropertyOf, ns["type"])) g.add((pred, ns["name"], Literal(uri2short(pred)))) for pred in properties: g.add((pred, RDFS.subPropertyOf, ns["property"])) g.add((pred, ns["name"], Literal(uri2short(pred)))) for pred in relations: g.add((pred, RDFS.subPropertyOf, ns["relation"])) g.add((pred, ns["name"], Literal(uri2short(pred)))) for pred in names: g.add((pred, RDFS.subPropertyOf, ns["name"])) g.add((pred, ns["name"], Literal(uri2short(pred)))) g.serialize(file_out, format="n3") logger.info("\nMapping file writen to: " + os.path.abspath(file_out))
def read_file(self, path_to_file, format="xml"): """ parse the skos file and extract all available data """ rdf_graph = Graph() # bind the namespaces rdf_graph.bind("arches", ARCHES) try: rdf = rdf_graph.parse(source=path_to_file, format=format) except: raise Exception("Error occurred while parsing the file %s" % path_to_file) return rdf
def read_file(self, path_to_file, format='xml'): """ parse the skos file and extract all available data """ rdf_graph = Graph() #bind the namespaces rdf_graph.bind('arches',ARCHES) start = time() try: rdf = rdf_graph.parse(source=path_to_file, format=format) print 'time elapsed to parse rdf graph %s s'%(time()-start) except: raise Exception('Error occurred while parsing the file %s' % path_to_file) return rdf
def concise_bounded_description(graph, uri): """ Given a particular node (the starting node) in a particular RDF graph (the source graph), a subgraph of that particular graph, taken to comprise a concise bounded description of the resource denoted by the starting node, can be identified as follows: * Include in the subgraph all statements in the source graph where the subject of the statement is the starting node; * Recursively, for all statements identified in the subgraph thus far having a blank node object, include in the subgraph all statements in the source graph where the subject of the statement is the blank node in question and which are not already included in the subgraph. * Recursively, for all statements included in the subgraph thus far, for all reifications of each statement in the source graph, include the concise bounded description beginning from the rdf:Statement node of each reification. """ subgraph = Graph() for ns in graph.namespaces(): subgraph.bind(ns[0], ns[1], override=False) blank_nodes = [] for p, o in graph.predicate_objects(uri): subgraph.add((uri, p, o)) if isinstance(o, BNode): blank_nodes.append(o) while blank_nodes: s = blank_nodes.pop() print "adding bnode", s for p, o in graph.predicate_objects(s): print "(", s, p, o, ")" subgraph.add((s, p, o)) if isinstance(o, BNode): blank_nodes.append(o) for s in graph.subjects(RDF.subject, uri): for p, o in graph.predicate_objects(s): subgraph.add((s, p, o)) return subgraph
def parsefoaf(self, location, pub, topic, callback): """ Method: parsefoaf(location) @param location:Either the location or the foaf profile as a string Parses the foaf profile and provides the URI of the person who is represented in the FOAF Returns graph, person's uri TODO: Before the foaf triples are sent, need to check whether the publisher or the subscriber are already in the rdf store. """ store = Graph() store.bind("dc", "http://http://purl.org/dc/elements/1.1/") store.bind("foaf", "http://xmlns.com/foaf/0.1/") foaf = get_private_uri(location, HUB_CERTIFICATE, HUB_KEY) store.parse(data=foaf, format="application/rdf+xml") #store.parse("http://www.w3.org/People/Berners-Lee/card.rdf") #for person in store.subjects(RDF.type, FOAF["Person"]): #print "Person:"+person qres = store.query("""SELECT DISTINCT ?a WHERE { ?a a <http://xmlns.com/foaf/0.1/Person> . ?b <http://xmlns.com/foaf/0.1/primaryTopic> ?a . }""") person_URI = '' for row in qres.result: person_URI = row # Check whether the foaf of the person is already present in the rdf store. # To speed up the execution we can keep a cache of the person_URIs whose foaf profiles # are present. logging.info( "Checking whether foaf: %s is already present in the RDF store", person_URI) if self.triple_store.foaf_exists(person_URI): store = Graph() logging.info("foaf: %s is already present in the RDF store", person_URI) # Add the rest of the required triples to the graph store = self.addTriples(store, person_URI, pub, topic, callback) # Transform the graph to triples triples = self.to_tuples(store, location) return triples
def traverse(s, depth=0, graph=None): if graph is None: graph = Graph() [ graph.bind(prefix, uri) for prefix, uri in settings.NAMESPACES.items() ] graph += cg.triples((s,None,None)) if depth > 0: map(partial(traverse, depth=depth-1, graph=graph), set(graph.objects(subject=s))) return graph
class ReductionTestB(unittest.TestCase): def setUp(self): self.ontGraph = Graph() self.ontGraph.bind('ex', EX_NS) self.ontGraph.bind('owl', OWL_NS) Individual.factoryGraph = self.ontGraph disjunct = (~ EX.alpha) | (~ EX.omega) self.foo = EX.foo disjunct += self.foo def testHiddenDemorgan(self): NormalFormReduction(self.ontGraph) self.failUnless(first(self.foo.subClassOf).complementOf, "should be the negation of a boolean class") innerC = CastClass(first(self.foo.subClassOf).complementOf) self.failUnless(isinstance(innerC, BooleanClass) and \ innerC._operator == OWL_NS.intersectionOf, "should be the negation of a conjunct") self.assertEqual(repr(innerC), "( ex:alpha and ex:omega )")
class ReductionTestB(unittest.TestCase): def setUp(self): self.ontGraph = Graph() self.ontGraph.bind('ex', EX_NS) self.ontGraph.bind('owl', OWL_NS) Individual.factoryGraph = self.ontGraph disjunct = (~ EX.alpha) | (~ EX.omega) self.foo = EX.foo disjunct += self.foo def testHiddenDemorgan(self): NormalFormReduction(self.ontGraph) self.failUnless(first(self.foo.subClassOf).complementOf, "should be the negation of a boolean class") innerC = CastClass(first(self.foo.subClassOf).complementOf) self.failUnless(isinstance(innerC, BooleanClass) and innerC._operator == OWL_NS.intersectionOf, "should be the negation of a conjunct") self.assertEqual(repr(innerC), "( ex:alpha and ex:omega )")
def command(self): if self.opts.infile == "-": infile = sys.stdin else: infile = open(self.opts.infile) if self.opts.outfile == "-": outfile = sys.stdout else: outfile = open(self.opts.outfile, "w+") if self.opts.base: base = self.opts.base elif self.opts.outfile != "-": base = self.opts.outfile else: base = "http://example.org/mboxlist" if not base.endswith("#"): base = base + "#" BASE = Namespace(base) g = Graph() g.bind("foaf", FOAF) g.add((BASE["mboxlist"], RDF["type"], FOAF["Group"])) if self.opts.maker: maker = URIRef(self.opts.maker) g.add((BASE["mboxlist"], FOAF["maker"], maker)) for line in infile.readlines(): mbox = line.strip() if not mbox: continue member = BNode() g.add((BASE["mboxlist"], FOAF["member"], member)) g.add((member, RDF["type"], FOAF["Agent"])) sha1sum = Literal(sha1("mailto:" + mbox).hexdigest()) g.add((member, FOAF["mbox_sha1sum"], sha1sum)) g.serialize(outfile, format=self.opts.format)
def skos_graph(): g = Graph() g.bind('dcterms', DCTERMS) g.bind('foaf', FOAF) g.bind('skos', SKOS) g.bind('owl', OWL) return g
def team_index(request, format=None): logging.info("Format: %s" % format) if format == None: best_match = mimeparse.best_match( ['application/rdf+xml', 'application/rdf+n3', 'text/html'], request.META['HTTP_ACCEPT']) if best_match == 'application/rdf+xml': format = 'rdf+xml' elif best_match == 'application/rdf+nt': format = 'rdf+nt' else: format = 'html' team_list = College.objects.filter(updated=True).order_by('name') if (format != 'html'): store = Graph() store.bind("cfb", "http://www.cfbreference.com/cfb/0.1/") CFB = Namespace("http://www.cfbreference.com/cfb/0.1/") for current_team in team_list: team = BNode() store.add((team, RDF.type, CFB["Team"])) store.add((team, CFB["name"], Literal(current_team.name))) store.add( (team, CFB["link"], Literal(current_team.get_absolute_url()))) if (format == 'rdf+xml'): return HttpResponse(store.serialize(format="pretty-xml"), mimetype='application/rdf+xml') if (format == 'rdf+nt'): return HttpResponse(store.serialize(format="nt"), mimetype='application/rdf+nt') return render_to_response('college/teams.html', {'team_list': team_list})
class RDFTestCase(unittest.TestCase): backend = 'default' path = 'store' def setUp(self): self.store = Graph(store=self.backend) self.store.open(self.path) self.store.bind("dc", "http://http://purl.org/dc/elements/1.1/") self.store.bind("foaf", "http://xmlns.com/foaf/0.1/") def tearDown(self): self.store.close() def addDonna(self): self.donna = donna = BNode() self.store.add((donna, RDF.type, FOAF["Person"])) self.store.add((donna, FOAF["nick"], Literal("donna"))) self.store.add((donna, FOAF["name"], Literal("Donna Fales"))) def testRDFXML(self): self.addDonna() g = Graph() g.parse(data=self.store.serialize(format="pretty-xml")) self.assertEquals(self.store.isomorphic(g), True)
class GrafoFOAF: def __init__(self): # criar um grafo vazio self.grafo = Graph() # ligar, no contexto do grafo, o "namespace" e o seu qualificador self.grafo.bind(qualificador_foaf, namespace_foaf) def adicionarPessoa(self, nomeDaPessoa, idPessoa): """ Adiciona dois triplos: <idPessoa, type, Person>, e <idPessoa, name, nomeDaPessoa> """ #s = BNode( idPessoa ) #identificador para usar no "sujeito" s = MNS[idPessoa] p = RDF.type #predicado "rdf:type" o = FOAF["Person"] #recurso "Person" definido em FOAF # adicionar tripo: <s, p, o> self.grafo.add((s, p, o)) p = FOAF["name"] #recurso "name" definido em FOAF #o = Literal( nomeDaPessoa ) o = MNS[nomeDaPessoa] # adicionar tripo: <s, p, o> self.grafo.add((s, p, o)) return (s, p, o) def s_knows_o(self, s, o): """ Adiciona um triplo: <s, knows, o> mantem o registo de 'quem cohece quem' """ p = FOAF["knows"] return self.grafo.add((s, p, o))
def get_graph(self): # Read owl (turtle) file owl_graph = Graph() # if self.file[0:4] == "http": # owl_txt = urllib2.urlopen(self.file).read() # else: # owl_txt = open(self.file, 'r').read() owl_graph.parse(self.file, format='turtle') # owl_graph.parse(data=owl_txt, format='turtle') if self.import_files: for import_file in self.import_files: # Read owl (turtle) file import_graph = Graph() # if self.file[0:4] == "http": # import_txt = urllib2.urlopen(import_file).read() # else: # import_txt = open(import_file, 'r').read() # This is a workaround to avoid issue with "#" in base prefix # as described in https://github.com/RDFLib/rdflib/issues/379, # When the fix is introduced in rdflib these 2 lines will be # replaced by: import_graph.parse(import_file, format='turtle') # import_txt = import_txt.replace( # "http://www.w3.org/2002/07/owl#", # "http://www.w3.org/2002/07/owl") # import_graph.parse(data=import_txt, format='turtle') owl_graph = owl_graph + import_graph # Overwrite namespaces for name, namespace in namespace_names.items(): owl_graph.bind(name, namespace) return owl_graph
def write(self, concept_graph, format='pretty-xml'): #get empty RDF graph rdf_graph = Graph() #define namespaces ARCHES = Namespace('http://www.archesproject.org/') #bind the namespaces rdf_graph.bind('arches',ARCHES) rdf_graph.bind('skos',SKOS) rdf_graph.bind('dcterms',DCTERMS) """ #add main concept to the graph rdf_graph.add((subject, predicate, object)) rdf_graph.add((ARCHES[node.id], RDF['type'], SKOS.Concept)) rdf_graph.add((Arches guid, SKOS.prefLabel, Literal('Stone',lang=en))) """ if concept_graph.nodetype == 'ConceptScheme': scheme_id = concept_graph.id def build_skos(node): if node.nodetype == 'Concept': rdf_graph.add((ARCHES[node.id], SKOS.inScheme, ARCHES[scheme_id])) for subconcept in node.subconcepts: rdf_graph.add((ARCHES[node.id], SKOS[subconcept.relationshiptype], ARCHES[subconcept.id])) for relatedconcept in node.relatedconcepts: rdf_graph.add((ARCHES[node.id], SKOS[relatedconcept.relationshiptype], ARCHES[relatedconcept.id])) for value in node.values: if value.category == 'label' or value.category == 'note': if node.nodetype == 'ConceptScheme': if value.type == 'prefLabel': rdf_graph.add((ARCHES[node.id], DCTERMS.title, Literal(value.value, lang = value.language))) elif value.type == 'scopeNote': rdf_graph.add((ARCHES[node.id], DCTERMS.description, Literal(value.value, lang = value.language))) else: rdf_graph.add((ARCHES[node.id], SKOS[value.type], Literal(value.value, lang = value.language))) else: rdf_graph.add((ARCHES[node.id], ARCHES[value.type.replace(' ', '_')], Literal(value.value, lang = value.language))) rdf_graph.add((ARCHES[node.id], RDF.type, SKOS[node.nodetype])) concept_graph.traverse(build_skos) return rdf_graph.serialize(format=format) else: raise Exception('Only ConceptSchemes can be written to SKOS RDF files.')
def write(self, concept_graphs, format="pretty-xml"): serializer = JSONSerializer() # get empty RDF graph rdf_graph = Graph() # bind the namespaces rdf_graph.bind("arches", ARCHES) rdf_graph.bind("skos", SKOS) rdf_graph.bind("dcterms", DCTERMS) """ #add main concept to the graph rdf_graph.add((subject, predicate, object)) rdf_graph.add((ARCHES[node.id], RDF['type'], SKOS.Concept)) rdf_graph.add((Arches guid, SKOS.prefLabel, Literal('Stone',lang=en))) """ if not isinstance(concept_graphs, list): concept_graphs = [concept_graphs] for concept_graph in concept_graphs: if (concept_graph.nodetype == "ConceptScheme" or concept_graph.nodetype == "Concept"): scheme_id = concept_graph.id if concept_graph.nodetype == "Concept": scheme = concept_graph.get_scheme() scheme_id = scheme.id if scheme is not None else None def build_skos(node): if node.nodetype == "Concept": rdf_graph.add((ARCHES[node.id], SKOS.inScheme, ARCHES[scheme_id])) for subconcept in node.subconcepts: rdf_graph.add(( ARCHES[node.id], SKOS[subconcept.relationshiptype], ARCHES[subconcept.id], )) for relatedconcept in node.relatedconcepts: rdf_graph.add(( ARCHES[node.id], SKOS[relatedconcept.relationshiptype], ARCHES[relatedconcept.id], )) for value in node.values: jsonLiteralValue = serializer.serialize({ "value": value.value, "id": value.id }) if value.category == "label" or value.category == "note": if node.nodetype == "ConceptScheme": if value.type == "prefLabel": # TODO: remove lowercasing of value.language once the pyld module # can accept mixedcase language tags rdf_graph.add(( ARCHES[node.id], DCTERMS.title, Literal( jsonLiteralValue, lang=value.language.lower(), ), )) elif value.type == "scopeNote": rdf_graph.add(( ARCHES[node.id], DCTERMS.description, Literal( jsonLiteralValue, lang=value.language.lower(), ), )) else: rdf_graph.add(( ARCHES[node.id], SKOS[value.type], Literal( jsonLiteralValue, lang=value.language.lower(), ), )) elif value.type == "identifier": rdf_graph.add(( ARCHES[node.id], DCTERMS.identifier, Literal(jsonLiteralValue, lang=value.language.lower()), )) else: rdf_graph.add(( ARCHES[node.id], ARCHES[value.type.replace(" ", "_")], Literal(jsonLiteralValue, lang=value.language.lower()), )) rdf_graph.add( (ARCHES[node.id], RDF.type, SKOS[node.nodetype])) concept_graph.traverse(build_skos) elif concept_graph.nodetype == "Collection": scheme_id = concept_graph.id def build_skos(node): for subconcept in node.subconcepts: rdf_graph.add(( ARCHES[node.id], SKOS[subconcept.relationshiptype], ARCHES[subconcept.id], )) rdf_graph.add( (ARCHES[node.id], RDF.type, SKOS[node.nodetype])) if node.nodetype == "Collection": for value in node.values: if value.category == "label" or value.category == "note": jsonLiteralValue = serializer.serialize({ "value": value.value, "id": value.id }) rdf_graph.add(( ARCHES[node.id], SKOS[value.type], Literal( jsonLiteralValue, lang=value.language.lower(), ), )) concept_graph.traverse(build_skos) else: raise Exception( "Only ConceptSchemes and Collections can be written to SKOS RDF files." ) return rdf_graph.serialize(format=format)
def plum_x(self, paper: Paper) -> Graph: g = Graph() g.bind("sd", ScholalryData) g.bind("iont", IOnt) g.bind("covid", AltmetricsCOVID) doi = paper.get_doi() timestamp = paper.get_timestamp() paper = URIRef("https://doi.org/" + doi) g.add((paper, RDF.type, ScholalryData.Document)) g.add((paper, DC.created, Literal(timestamp))) headers = { 'X-ELS-Insttoken': self.__insttoken, 'X-ELS-APIKey': self.__api_key } endpoint = ScopusAPIClient.PLUMX_ENDPOINT + "/%s" params = doi request = urllib.request.Request(endpoint % params, headers=headers) try: response = urllib.request.urlopen(request) output = response.read() js = json.loads(output) if output is not None: js = json.loads(output) if "count_categories" in js: cats = js["count_categories"] for cat in cats: name = cat["name"].lower() total = cat["total"] indicator = URIRef(AltmetricsCOVID + doi + "_" + name) g.add((paper, IOnt.hasIndicator, indicator)) g.add((indicator, RDF.type, IOnt.Indicator)) g.add((indicator, RDFS.label, Literal(cat["name"]))) g.add( (indicator, IOnt.hasSource, AltmetricsCOVID.plumx)) g.add((indicator, IOnt.basedOnMetric, URIRef(AltmetricsCOVID["name"]))) g.add((URIRef(AltmetricsCOVID["name"]), RDF.type, IOnt.Metric)) g.add((indicator, IOnt.hasIndicatorValue, URIRef(AltmetricsCOVID + doi + "_" + name + "_value"))) g.add( (URIRef(AltmetricsCOVID + doi + "_" + name + "_value"), RDF.type, IOnt.IndicatorValue)) g.add((URIRef(AltmetricsCOVID + doi + "_" + name + "_value"), IOnt.indicatorValue, Literal(total, datatype=XSD.integer))) if "count_types" in cat: for m in cat["count_types"]: name_2 = m["name"].lower() total_2 = m["total"] level_2_indicator = URIRef(AltmetricsCOVID + doi + "_" + name_2) g.add((level_2_indicator, RDF.type, IOnt.Indicator)) g.add((level_2_indicator, RDFS.label, Literal(m["name"]))) g.add((indicator, IOnt.hasSource, AltmetricsCOVID.plumx)) g.add((indicator, IOnt.hasSubIndicator, level_2_indicator)) g.add((indicator, IOnt.basedOnMetric, URIRef(AltmetricsCOVID["name"]))) g.add( (level_2_indicator, IOnt.hasIndicatorValue, URIRef(AltmetricsCOVID + doi + "_" + name_2 + "_value"))) g.add((URIRef(AltmetricsCOVID + doi + "_" + name_2 + "_value"), RDF.type, IOnt.IndicatorValue)) g.add((URIRef(AltmetricsCOVID + doi + "_" + name_2 + "_value"), IOnt.indicatorValue, Literal(total_2, datatype=XSD.integer))) level_3_indicator = URIRef(AltmetricsCOVID + doi + "_" + name_2 + "_source") g.add((level_3_indicator, RDF.type, IOnt.Indicator)) g.add((level_3_indicator, RDFS.label, Literal(m["name"] + " source"))) g.add((indicator, IOnt.hasSource, AltmetricsCOVID.plumx)) g.add((level_2_indicator, IOnt.hasSubIndicator, level_3_indicator)) g.add((indicator, IOnt.basedOnMetric, URIRef(AltmetricsCOVID["name"]))) g.add( (level_3_indicator, IOnt.hasIndicatorValue, URIRef(AltmetricsCOVID + doi + "_" + name_2 + "_value"))) except: log.error("No altmetrics available for paper %s." % doi) return g
def citation_count(self, paper: Paper) -> Graph: g = Graph() g.bind("sd", ScholalryData) g.bind("iont", IOnt) g.bind("covid", AltmetricsCOVID) doi = paper.get_doi() timestamp = paper.get_timestamp() paper = URIRef("https://doi.org/" + doi) g.add((paper, RDF.type, ScholalryData.Document)) g.add((paper, DC.created, Literal(timestamp))) headers = { 'X-ELS-Insttoken': self.__insttoken, 'X-ELS-APIKey': self.__api_key } endpoint = ScopusAPIClient.CITATION_COUNT_ENDPOINT + "?%s" params = {'doi': doi} params = urllib.parse.urlencode(params) request = urllib.request.Request(endpoint % params, headers=headers) try: response = urllib.request.urlopen(request) output = response.read() if output is not None: js = json.loads(output) if "citation-count-response" in js: ccr = js["citation-count-response"] if "document" in ccr: docu = ccr["document"] if "citation-count" in docu: citation_count = docu["citation-count"] indicator = URIRef(AltmetricsCOVID + doi + "_citations") g.add((paper, IOnt.hasIndicator, indicator)) g.add((indicator, RDF.type, IOnt.Indicator)) g.add( (indicator, RDFS.label, Literal("Citations"))) g.add((indicator, IOnt.hasSource, AltmetricsCOVID.scopus)) g.add((indicator, IOnt.basedOnMetric, AltmetricsCOVID.citation_count)) g.add((AltmetricsCOVID.citation_count, RDF.type, IOnt.Metric)) g.add((indicator, IOnt.hasIndicatorValue, URIRef(AltmetricsCOVID + doi + "_citations_value"))) g.add((URIRef(AltmetricsCOVID + doi + "_citations_value"), RDF.type, IOnt.IndicatorValue)) g.add((URIRef(AltmetricsCOVID + doi + "_citations_value"), IOnt.indicatorValue, Literal(citation_count, datatype=XSD.integer))) level_2_indicator = URIRef(AltmetricsCOVID + doi + "_citation-indexes") g.add( (level_2_indicator, RDF.type, IOnt.Indicator)) g.add((level_2_indicator, RDFS.label, Literal("Citations indexes"))) g.add((level_2_indicator, IOnt.hasSource, AltmetricsCOVID.scopus)) g.add((indicator, IOnt.hasSubIndicator, level_2_indicator)) g.add((level_2_indicator, IOnt.basedOnMetric, AltmetricsCOVID.citation_count)) g.add((AltmetricsCOVID.citation_count, RDF.type, IOnt.Metric)) g.add((level_2_indicator, IOnt.hasIndicatorValue, URIRef(AltmetricsCOVID + doi + "_citations_value"))) level_3_indicator = URIRef(AltmetricsCOVID + doi + "_scopus") g.add( (level_3_indicator, RDF.type, IOnt.Indicator)) g.add((level_3_indicator, RDFS.label, Literal("Scopus citation count"))) g.add((level_3_indicator, IOnt.hasSource, AltmetricsCOVID.scopus)) g.add((level_2_indicator, IOnt.hasSubIndicator, level_3_indicator)) g.add((level_3_indicator, IOnt.basedOnMetric, AltmetricsCOVID.citation_count)) g.add((AltmetricsCOVID.citation_count, RDF.type, IOnt.Metric)) g.add((level_3_indicator, IOnt.hasIndicatorValue, URIRef(AltmetricsCOVID + doi + "_citations_value"))) except: log.error("No citation count available for paper %s." % doi) return g