class NegationOfAtomicConcept(unittest.TestCase): def setUp(self): self.ontGraph = Graph() self.ontGraph.bind('ex', EX_NS) self.ontGraph.bind('owl', OWL_NS) Individual.factoryGraph = self.ontGraph def testAtomicNegation(self): bar=EX.Bar baz=~bar baz.identifier = EX_NS.Baz ruleStore,ruleGraph,network=SetupRuleStore(makeNetwork=True) individual=BNode() individual2=BNode() (EX.OtherClass).extent = [individual] bar.extent = [individual2] NormalFormReduction(self.ontGraph) self.assertEqual(repr(baz), "Class: ex:Baz DisjointWith ex:Bar\n") posRules,negRules=CalculateStratifiedModel(network,self.ontGraph,[EX_NS.Foo]) self.failUnless(not posRules,"There should be no rules in the 0 strata!") self.failUnless(len(negRules)==1,"There should only be one negative rule in a higher strata") self.assertEqual(repr(negRules[0]), "Forall ?X ( ex:Baz(?X) :- not ex:Bar(?X) )") baz.graph = network.inferredFacts self.failUnless(individual in baz.extent, "%s should be a member of ex:Baz"%individual) self.failUnless(individual2 not in baz.extent, "%s should *not* be a member of ex:Baz"%individual2)
def _parse_rdf(self, file): """ Returns a case from the given file. """ store = Graph() store.parse(file) print len(store)
def _getDataGraph(self): g = Graph() try: g.parse(self.storeUri, format="n3") except urllib2.URLError: print "%s file missing- starting a new one" % self.storeUri return g
def __init__(self, fname): self.graph = Graph() if os.path.splitext(fname)[1].lower() in IMAGE_EXTENSIONS: self.parseImageHeader(fname) else: self.parseXmp(fname) self.xmpName = fname
def setUp(self): self.ontGraph = Graph() self.ontGraph.bind('ex', EX_NS) self.ontGraph.bind('owl', OWL_NS) Individual.factoryGraph = self.ontGraph nestedConjunct = EX.omega & EX.gamma self.topLevelConjunct = EX.alpha & nestedConjunct
def handleImport(self): additionalRules = set() additionalFacts = set() for location,profile in self.graph.query(IMPORT_PARTS, initNs=rif_namespaces): graph = [] if profile == ENT.RDF: graph = Graph().parse(location) additionalFacts.update(graph) if self.debug: print "Importing RDF referenced from RIF document" if profile == ENT['OWL-Direct'] and self.owlEmbeddings: rule_store, rule_graph, network = SetupRuleStore(makeNetwork=True) graph = Graph().parse(location) additionalFacts.update(graph) additionalRules.update(network.setupDescriptionLogicProgramming( graph, addPDSemantics=False, constructNetwork=False)) if self.debug: print "Embedded %s rules from %s (imported OWL 2 RL)"%( len(additionalRules), location ) print "Added %s RDF statements from RDF Graph"%(len(graph)) return additionalFacts,additionalRules
class TestGraphGraphPattern(unittest.TestCase): def setUp(self): self.graph1 = Graph( identifier=URIRef('http://example.org/foaf/aliceFoaf')) self.graph1.parse(StringIO(test_graph_a), format="n3") self.graph2 = Graph( identifier=URIRef('http://example.org/foaf/bobFoaf')) self.graph2.parse(StringIO(test_graph_b), format="n3") self.unionGraph = ReadOnlyGraphAggregate( graphs=[self.graph1, self.graph2]) def test_8_3_1(self): rt = self.unionGraph.query(test_query1, DEBUG=False).serialize("python") self.failUnless(len(rt) == 2, "Expected 2 item solution set") for src, bobNick in rt: self.failUnless( src in [ URIRef('http://example.org/foaf/aliceFoaf'), URIRef('http://example.org/foaf/bobFoaf') ], "Unexpected ?src binding :\n %s" % src) self.failUnless(bobNick in [Literal("Bobby"), Literal("Robert")], "Unexpected ?bobNick binding :\n %s" % bobNick) def test_8_3_2(self): rt = self.unionGraph.query(test_query2, DEBUG=False).serialize("python") self.failUnless(len(rt) == 1, "Expected 1 item solution set") self.failUnless(rt[0] == Literal("Robert"), "Unexpected ?nick binding :\n %s" % rt[0])
def setUp(self): self.rule_store, self.rule_graph, self.network = SetupRuleStore( makeNetwork=True) self.tBoxGraph = Graph().parse(StringIO(TBOX), format='n3') self.aBoxGraph = Graph().parse(StringIO(ABOX), format='n3') NormalFormReduction(self.tBoxGraph)
def testAggregateRaw(): memStore = plugin.get('IOMemory', Store)() graph1 = Graph(memStore) graph2 = Graph(memStore) graph3 = Graph(memStore) for n3Str, graph in [(testGraph1N3, graph1), (testGraph2N3, graph2), (testGraph3N3, graph3)]: graph.parse(StringIO(n3Str), format='n3') G = ReadOnlyGraphAggregate([graph1, graph2, graph3]) #Test triples assert len(list(G.triples((None, RDF.type, None)))) == 4 assert len(list(G.triples((URIRef("http://test/bar"), None, None)))) == 2 assert len(list(G.triples((None, URIRef("http://test/d"), None)))) == 3 #Test __len__ assert len(G) == 8 #Test __contains__ assert (URIRef("http://test/foo"), RDF.type, RDFS.Resource) in G barPredicates = [URIRef("http://test/d"), RDFS.isDefinedBy] assert len( list( G.triples_choices( (URIRef("http://test/bar"), barPredicates, None)))) == 2
def main(inputFileName, outputFileName=None): """ Given an inputfile and optionally outputfile, create a GraphViz file of the NDL inputfile. If no outputfile is given, default to inputfilename with rdf replaced with dot. If the file exists, ask for user confirmation to overwrite it. """ graph = Graph() graph.parse(inputFileName) internal, external, locations = getConnections(graph) dotStr = dotString(internal, external, locations) if not outputFileName: outputFileName = inputFileName.replace(".rdf",".dot") if os.path.exists(outputFileName): while True: arg = raw_input("%s already exists. To replace type 'y' or provide different filename: " % outputFileName) # Some input should be given, otherwise repeat the question. if arg: if arg in "yY": # Overwrite the file break # A new name was given, store it outputFileName = arg # Check if new file exists, if so, repeat the question, if not, write the file if not os.path.exists(arg): break f = file(outputFileName,'w') f.write(dotStr) f.close()
def read_input(self, filenames, infmt): """Read the given RDF file(s) and return an rdflib Graph object.""" rdf = Graph() for filename in filenames: if filename == '-': f = sys.stdin else: f = open(filename, 'r') if infmt: fmt = infmt else: # determine format based on file extension fmt = 'xml' # default if filename.endswith('n3'): fmt = 'n3' if filename.endswith('ttl'): fmt = 'n3' if filename.endswith('nt'): fmt = 'nt' logging.debug("Parsing input file %s (format: %s)", filename, fmt) try: rdf.parse(f, format=fmt) except: logging.critical("Parsing failed. Exception: %s", str(sys.exc_info()[1])) sys.exit(1) return rdf
def makeOutputGraph(): graph = Graph() graph.bind('pre', 'http://bigasterisk.com/pre/general/') graph.bind('local', 'http://bigasterisk.com/pre/drew/') # todo graph.bind('ad', 'http://bigasterisk.com/pre/general/accountDataType/') graph.bind('mt', 'http://bigasterisk.com/pre/general/messageType/') return graph
def testConjunction(self): self.addStuffInMultipleContexts() triple = (self.pizza, self.likes, self.pizza) # add to context 1 graph = Graph(self.graph.store, self.c1) graph.add(triple) self.assertEquals(len(self.graph), len(graph))
def main(inputFileName, outputFileName=None): """ Given an inputfile and optionally outputfile, create a GraphViz file of the NDL inputfile. If no outputfile is given, default to inputfilename with rdf replaced with dot. If the file exists, ask for user confirmation to overwrite it. """ graph = Graph() graph.parse(inputFileName) internal, external, locations = getConnections(graph) dotStr = dotString(internal, external, locations) if not outputFileName: outputFileName = inputFileName.replace(".rdf", ".dot") if os.path.exists(outputFileName): while True: arg = raw_input( "%s already exists. To replace type 'y' or provide different filename: " % outputFileName) # Some input should be given, otherwise repeat the question. if arg: if arg in "yY": # Overwrite the file break # A new name was given, store it outputFileName = arg # Check if new file exists, if so, repeat the question, if not, write the file if not os.path.exists(arg): break f = file(outputFileName, 'w') f.write(dotStr) f.close()
def process_file (self, dirname, basename, **kw): if not basename.endswith('.doap'): return store = Graph() g = store.parse(os.path.join(dirname, basename)) query = """ PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> PREFIX doap: <http://usefulinc.com/ns/doap#> SELECT $bug WHERE { $project rdf:type doap:Project . $project doap:bug-database $bug }""" results = list(g.query(query)) if len(results) == 1: bug_database = URL.from_str(results[0][0]) self.scanner.branch.bug_database = unicode(bug_database) if bug_database.netloc == 'bugzilla.gnome.org': # TODO product = bug_database['product'][0] components = pulse.db.Component.select( pulse.db.Component.ident.like('comp/bugzilla.gnome.org/%s/%%' % product)) for comp in components: pulse.db.ModuleComponents.set_related (self.scanner.branch, comp)
def retrieveTestCases(base_uri): # query the master test manifest q = """ PREFIX test: <http://www.w3.org/2006/03/test-description#> PREFIX dc: <http://purl.org/dc/elements/1.1/> SELECT ?t ?title ?classification ?expected_results FROM <%s> WHERE { ?t dc:title ?title . ?t test:classification ?classification . OPTIONAL { ?t test:expectedResults ?expected_results . } }""" % (base_uri + "manifest.ttl") # Construct the graph from the given RDF and apply the SPARQL filter above g = Graph() unittests = [] for tc, title, classification_url, expected_results in g.query(q): classification = classification_url.split("#")[-1] matches = search(r'(\d+)', tc) num = matches.groups(1)[0] if(expected_results == None): expected_results = 'true' # Generate the input document URLs suffix = "xml" if hostLanguage in ["xhtml1", "xhtml5"]: suffix = "xhtml" elif hostLanguage in ["html4", "html5"]: suffix = "xhtml" elif hostLanguage in ["svgtiny1.2", "svg"]: suffix = "svg" doc_uri = "%stest-cases/%s." % \ (base_uri, num) unittests.append((int(num), str(title), str(doc_uri + suffix), str(doc_uri + "sparql"), str(classification), str(expected_results))) # Sorts the unit tests in unit test number order. def sorttests(a, b): if(a[0] < b[0]): return -1 elif(a[0] == b[0]): return 0 else: return 1 unittests.sort(sorttests) return unittests
class NonEqualityPredicatesTestSuite(unittest.TestCase): def setUp(self): from FuXi.Rete.RuleStore import N3RuleStore from FuXi.Rete import ReteNetwork from FuXi.Rete.Util import generateTokenSet self.testGraph = Graph() self.ruleStore = N3RuleStore() self.ruleGraph = Graph(self.ruleStore) self.ruleGraph.parse(StringIO(testN3), format='n3') self.testGraph.parse(StringIO(testN3), format='n3') self.closureDeltaGraph = Graph() self.network = ReteNetwork(self.ruleStore, initialWorkingMemory=generateTokenSet( self.testGraph), inferredTarget=self.closureDeltaGraph, nsMap={}) def testParseBuiltIns(self): from FuXi.Rete.RuleStore import N3Builtin from FuXi.Rete.AlphaNode import BuiltInAlphaNode self.failUnless(self.ruleStore.rules > 0, "No rules parsed out form N3!") for alphaNode in self.network.alphaNodes: if isinstance(alphaNode, BuiltInAlphaNode): self.failUnless(alphaNode.n3builtin.uri == MATH_NS.greaterThan, "Unable to find math:greaterThan func") def testEvaluateBuiltIns(self): from FuXi.Rete.RuleStore import N3Builtin from FuXi.Rete.AlphaNode import BuiltInAlphaNode self.failUnless( first( self.closureDeltaGraph.triples( (None, URIRef('http://test/pred1'), Literal(3)))), "Missing inferred :pred1 assertions")
class ParserTestCase(unittest.TestCase): backend = 'default' path = 'store' def setUp(self): self.graph = Graph(store=self.backend) self.graph.open(self.path) def tearDown(self): self.graph.close() def testNoPathWithHash(self): g = self.graph g.parse(StringInputSource("""\ <?xml version="1.0" encoding="UTF-8" standalone="yes"?> <rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#" > <rdfs:Class rdf:about="http://example.org#"> <rdfs:label>testing</rdfs:label> </rdfs:Class> </rdf:RDF> """), publicID="http://example.org") subject = URIRef("http://example.org#") label = g.value(subject, RDFS.label) self.assertEquals(label, Literal("testing")) type = g.value(subject, RDF.type) self.assertEquals(type, RDFS.Class)
class NegatedDisjunctTest(unittest.TestCase): def setUp(self): self.ontGraph = Graph() self.ontGraph.bind("ex", EX_NS) self.ontGraph.bind("owl", OWL_NS) Individual.factoryGraph = self.ontGraph def testStratified(self): bar = EX.Bar baz = EX.Baz noBarOrBaz = ~(bar | baz) omega = EX.Omega foo = omega & noBarOrBaz foo.identifier = EX_NS.Foo ruleStore, ruleGraph, network = SetupRuleStore(makeNetwork=True) individual = BNode() omega.extent = [individual] NormalFormReduction(self.ontGraph) self.assertEqual(repr(foo), "ex:Omega that ( not ex:Bar ) and ( not ex:Baz )") posRules, negRules = CalculateStratifiedModel(network, self.ontGraph, [EX_NS.Foo]) foo.graph = network.inferredFacts self.failUnless(not posRules, "There should be no rules in the 0 strata!") self.assertEqual( repr(negRules[0]), "Forall ?X ( ex:Foo(?X) :- And( ex:Omega(?X) not ex:Bar(?X) not ex:Baz(?X) ) )" ) self.failUnless(len(negRules) == 1, "There should only be one negative rule in a higher strata") self.failUnless(individual in foo.extent, "%s should be a member of ex:Foo" % individual)
def DBPediaAbstract(uri): from rdflib.Graph import Graph g = Graph() g.parse("http://bigasterisk.com/foaf.rdf") import pdb pdb.set_trace() return uri
def _testPositive(uri, manifest): if verbose: write(u"TESTING: %s" % uri) result = 0 # 1=failed, 0=passed inDoc = first(manifest.objects(uri, TEST["inputDocument"])) outDoc = first(manifest.objects(uri, TEST["outputDocument"])) expected = Graph() if outDoc[-3:]==".nt": format = "nt" else: format = "xml" expected.load(outDoc, format=format) store = TestStore(expected) if inDoc[-3:]==".nt": format = "nt" else: format = "xml" try: store.load(inDoc, format=format) except ParserError, pe: write("Failed '") write(inDoc) write("' failed with") raise pe try: write(type(pe)) except: write("sorry could not dump out error.") result = 1
def _extract_rdfa(self, filename): g = Graph() g.parse(open(filename),format="rdfa", rdfa_version="1.0") self.__tidy_graph(g) return g
class NegationOfAtomicConcept(unittest.TestCase): def setUp(self): self.ontGraph = Graph() self.ontGraph.bind("ex", EX_NS) self.ontGraph.bind("owl", OWL_NS) Individual.factoryGraph = self.ontGraph def testAtomicNegation(self): bar = EX.Bar baz = ~bar baz.identifier = EX_NS.Baz ruleStore, ruleGraph, network = SetupRuleStore(makeNetwork=True) individual = BNode() individual2 = BNode() (EX.OtherClass).extent = [individual] bar.extent = [individual2] NormalFormReduction(self.ontGraph) self.assertEqual(repr(baz), "Class: ex:Baz DisjointWith ex:Bar\n") posRules, negRules = CalculateStratifiedModel(network, self.ontGraph, [EX_NS.Foo]) self.failUnless(not posRules, "There should be no rules in the 0 strata!") self.failUnless(len(negRules) == 1, "There should only be one negative rule in a higher strata") self.assertEqual(repr(negRules[0]), "Forall ?X ( ex:Baz(?X) :- not ex:Bar(?X) )") baz.graph = network.inferredFacts self.failUnless(individual in baz.extent, "%s should be a member of ex:Baz" % individual) self.failUnless(individual2 not in baz.extent, "%s should *not* be a member of ex:Baz" % individual2)
def routerEndpoints(): # ideally this would all be in the same rdf store, with int and # ext versions of urls txt = open("/my/site/magma/tomato_config.js").read().replace('\n', '') knownMacAddr = jsValue(txt, 'knownMacAddr') tomatoUrl = jsValue(txt, 'tomatoUrl') from rdflib.Graph import Graph g = Graph() g.parse("/my/proj/openid_proxy/access.n3", format="n3") repl = {'/tomato1/': None, '/tomato2/': None} for k in repl: rows = list( g.query(''' PREFIX p: <http://bigasterisk.com/openid_proxy#> SELECT ?prefix WHERE { [ p:requestPrefix ?public; p:proxyUrlPrefix ?prefix ] }''', initBindings={Variable("public"): Literal(k)})) repl[k] = str(rows[0][0]) routers = [] for url in tomatoUrl: for k, v in repl.items(): url = url.replace(k, v) routers.append(restkit.Resource(url, timeout=2)) return routers, knownMacAddr
def __init__(self, case): super(RDFWriter, self).__init__(case) self.store = Graph() # Map of Bus objects to BNodes. self.bus_map = {}
class NegatedDisjunctTest(unittest.TestCase): def setUp(self): self.ontGraph = Graph() self.ontGraph.bind('ex', EX_NS) self.ontGraph.bind('owl', OWL_NS) Individual.factoryGraph = self.ontGraph def testStratified(self): bar=EX.Bar baz=EX.Baz noBarOrBaz = ~(bar|baz) omega = EX.Omega foo = omega & noBarOrBaz foo.identifier = EX_NS.Foo ruleStore,ruleGraph,network=SetupRuleStore(makeNetwork=True) individual=BNode() omega.extent = [individual] NormalFormReduction(self.ontGraph) self.assertEqual(repr(foo), "ex:Omega that ( not ex:Bar ) and ( not ex:Baz )") posRules,negRules=CalculateStratifiedModel(network,self.ontGraph,[EX_NS.Foo]) foo.graph = network.inferredFacts self.failUnless(not posRules,"There should be no rules in the 0 strata!") self.assertEqual(repr(negRules[0]),"Forall ?X ( ex:Foo(?X) :- And( ex:Omega(?X) not ex:Bar(?X) not ex:Baz(?X) ) )") self.failUnless(len(negRules)==1,"There should only be one negative rule in a higher strata") self.failUnless(individual in foo.extent, "%s should be a member of ex:Foo"%individual)
def parse_from_soup(self,soup,basefile): g = Graph() self.log.info("%s: Parsing" % basefile) if basefile == "teu": # FIXME: Use a better base URI? uri = 'http://rinfo.lagrummet.se/extern/celex/12008M' startnode = soup.findAll(text="-"*50)[1].parent g.add((URIRef(uri),DCT['title'],Literal("Treaty on European Union"))) elif basefile == "tfeu": uri = 'http://rinfo.lagrummet.se/extern/celex/12008E' startnode = soup.findAll(text="-"*50)[2].parent g.add((URIRef(uri),DCT['title'],Literal("Treaty on the Functioning of the European Union"))) lines = deque() for p in startnode.findNextSiblings("p"): if p.string == "-" * 50: self.log.info("found the end") break else: if p.string: lines.append(unicode(p.string)) self.log.info("%s: Found %d lines" % (basefile,len(lines))) body = self.make_body(lines) self.process_body(body, '', uri) # print serialize(body) return {'meta':g, 'body':body, 'lang':'en', 'uri':uri}
def __init__(self, connection, ontology): self._connection = connection self._ontology = ontology self._rdfObjects = {} self._graph = Graph() self._added = Graph() self._removed = Graph()
def __init__(self, graphs): """ Constructor takes a list of URLs that point to RDF/OWL files. :type graphs: list :param graphs: a list RDF/OWL files """ self.g = Graph() if type(graphs) == str: self.g.parse(graphs) else: for i in range(0, len(graphs)): self.g.parse(graphs[i]) # for testing - todo: load all namespaces from a merged graph in the code below self.owl = Namespace("http://www.w3.org/2002/07/owl#") self.rdf = Namespace("http://www.w3.org/1999/02/22-rdf-syntax-ns#") self.biblio = Namespace( "http://www.linguistics-ontology.org/bibliography/bibliography.owl#" ) self.goldbib = Namespace( "http://www.linguistics-ontology.org/bibliography/gold-bibliography.rdf#" ) self.gold = Namespace("http://purl.org/linguistics/gold/") self.rdfs = Namespace("http://www.w3.org/2000/01/rdf-schema#") self.bibtex = Namespace("http://purl.oclc.org/NET/nknouf/ns/bibtex#") self.person = Namespace( "http://www.linguistics-ontology.org/bibliography/person.rdf#") # get namespaces from graph self.namespaces = [] namespaces = self.g.namespaces()
def routerEndpoints(): # ideally this would all be in the same rdf store, with int and # ext versions of urls txt = open("/my/site/magma/tomato_config.js").read().replace('\n', '') knownMacAddr = jsValue(txt, 'knownMacAddr') tomatoUrl = jsValue(txt, 'tomatoUrl') from rdflib.Graph import Graph g = Graph() g.parse("/my/proj/openid_proxy/access.n3", format="n3") repl = {'/tomato1/' : None, '/tomato2/' : None} for k in repl: rows = list(g.query(''' PREFIX p: <http://bigasterisk.com/openid_proxy#> SELECT ?prefix WHERE { [ p:requestPrefix ?public; p:proxyUrlPrefix ?prefix ] }''', initBindings={Variable("public") : Literal(k)})) repl[k] = str(rows[0][0]) routers = [] for url in tomatoUrl: for k, v in repl.items(): url = url.replace(k, v) routers.append(restkit.Resource(url, timeout=2)) return routers, knownMacAddr
class NonEqualityPredicatesTestSuite(unittest.TestCase): def setUp(self): from FuXi.Rete.RuleStore import N3RuleStore from FuXi.Rete import ReteNetwork from FuXi.Rete.Util import generateTokenSet self.testGraph = Graph() self.ruleStore=N3RuleStore() self.ruleGraph = Graph(self.ruleStore) self.ruleGraph.parse(StringIO(testN3),format='n3') self.testGraph.parse(StringIO(testN3),format='n3') self.closureDeltaGraph = Graph() self.network = ReteNetwork(self.ruleStore, initialWorkingMemory=generateTokenSet(self.testGraph), inferredTarget = self.closureDeltaGraph, nsMap = {}) def testParseBuiltIns(self): from FuXi.Rete.RuleStore import N3Builtin from FuXi.Rete.AlphaNode import BuiltInAlphaNode self.failUnless(self.ruleStore.rules>0, "No rules parsed out form N3!") for alphaNode in self.network.alphaNodes: if isinstance(alphaNode, BuiltInAlphaNode): self.failUnless(alphaNode.n3builtin.uri == MATH_NS.greaterThan, "Unable to find math:greaterThan func") def testEvaluateBuiltIns(self): from FuXi.Rete.RuleStore import N3Builtin from FuXi.Rete.AlphaNode import BuiltInAlphaNode self.failUnless(first(self.closureDeltaGraph.triples((None,URIRef('http://test/pred1'),Literal(3)))), "Missing inferred :pred1 assertions")
def createTestOntGraph(): graph = Graph() graph.bind('ex',EX_NS,True) Individual.factoryGraph = graph kneeJoint = EX_CL.KneeJoint joint = EX_CL.Joint knee = EX_CL.Knee isPartOf = Property(EX_NS.isPartOf) graph.add((isPartOf.identifier,RDF.type,OWL_NS.TransitiveProperty)) structure = EX_CL.Structure leg = EX_CL.Leg hasLocation = Property(EX_NS.hasLocation,subPropertyOf=[isPartOf]) # graph.add((hasLocation.identifier,RDFS.subPropertyOf,isPartOf.identifier)) kneeJoint.equivalentClass = [joint & (isPartOf|some|knee)] legStructure = EX_CL.LegStructure legStructure.equivalentClass = [structure & (isPartOf|some|leg)] structure += leg structure += joint locatedInLeg = hasLocation|some|leg locatedInLeg += knee # print graph.serialize(format='n3') # newGraph = Graph() # newGraph.bind('ex',EX_NS,True) # newGraph,conceptMap = StructuralTransformation(graph,newGraph) # revDict = dict([(v,k) for k,v in conceptMap.items()]) # Individual.factoryGraph = newGraph # for oldConceptId ,newConceptId in conceptMap.items(): # if isinstance(oldConceptId,BNode): # oldConceptRepr = repr(Class(oldConceptId,graph=graph)) # if oldConceptRepr.strip() == 'Some Class': # oldConceptRepr = manchesterSyntax( # oldConceptId, # graph) # print "%s -> %s"%( # oldConceptRepr, # newConceptId # ) # # else: # print "%s -> %s"%( # oldConceptId, # newConceptId # ) # # for c in AllClasses(newGraph): # if isinstance(c.identifier,BNode) and c.identifier in conceptMap.values(): # print "## %s ##"%c.identifier # else: # print "##" * 10 # print c.__repr__(True) # print "################################" return graph
def len_graph(request): """ This Works...""" #store = Graph() #store.bind("contact", "http://www.example.com/contact#") #store.bind("person", "http://www.example.com/person#") #store.bind("xs", "http://www.w3.org/2001/XMLSchema#") #store.bind("rdfs", "http://www.w3.org/2000/01/rdf-schema#") #store.bind("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#") #store.bind("owl", "http://www.w3.org/2002/07/owl#") # Declare namespaces to use. ns_sn = Namespace("http://www.snee.com/ns/misc#") ns_sd = Namespace("http://www.snee.com/docs/") ns_dc = Namespace("http://purl.org/dc/elements/1.1/") ns_pr = Namespace("http://prismstandard.org/1.0#") myfile = '/var/rdf/municipality.rdf' # Create storage object for triples. store = Graph() # Add triples to store. graph.add( (ns_sd["d1001"], ns_dc["title"], Literal("Sample Acrobat document"))) graph.add((ns_sd["d1001"], ns_dc["format"], Literal("PDF"))) graph.add((ns_sd["d1001"], ns_dc["creator"], Literal("Billy Shears"))) graph.add( (ns_sd["d1001"], ns_pr["publicationTime"], Literal("2002-12-19"))) graph.add((ns_sd["d1002"], ns_dc["title"], Literal("Sample RTF document"))) graph.add((ns_sd["d1002"], ns_dc["format"], Literal("RTF"))) graph.add((ns_sd["d1002"], ns_dc["creator"], Literal("Nanker Phelge"))) graph.add( (ns_sd["d1002"], ns_pr["publicationTime"], Literal("2002-12-15"))) graph.add( (ns_sd["d1003"], ns_dc["title"], Literal("Sample LaTeX document"))) graph.add((ns_sd["d1003"], ns_dc["format"], Literal("LaTeX"))) graph.add((ns_sd["d1003"], ns_dc["creator"], Literal("Richard Mutt"))) graph.add( (ns_sd["d1003"], ns_pr["publicationTime"], Literal("2002-12-16"))) graph.add((ns_sd["d1003"], ns_sn["quality"], Literal("pretty good"))) #store.parse (myfile) rdf_subjects = graph.subjects() rdf_predicates = graph.predicates() rdf_objects = graph.objects() select_predicate_by_subject = graph.predicates(subject=ns_sd["d1001"]) select_object_by_predicate = graph.objects(predicate=ns_dc["title"]) g = Graph() g.parse(myfile, format="xml") exec "html = 'the lenght of the graph is: %s'" % len(g) context = {'html': html, 'g': select_predicate_by_subject} return render_to_response('len_graph.html', context)
def endElementNS(self, name, qname): if name[0] != TRIXNS: self.error("Only elements in the TriX namespace are allowed.") if name[1] == "uri": if self.state == 3: self.graph = Graph(store=self.store.store, identifier=URIRef(self.chars.strip())) self.state = 2 elif self.state == 4: self.triple += [URIRef(self.chars.strip())] else: self.error( "Illegal internal self.state - This should never happen if the SAX parser ensures XML syntax correctness" ) if name[1] == "id": if self.state == 3: self.graph = Graph(self.store.store, identifier=self.get_bnode( self.chars.strip())) self.state = 2 elif self.state == 4: self.triple += [self.get_bnode(self.chars.strip())] else: self.error( "Illegal internal self.state - This should never happen if the SAX parser ensures XML syntax correctness" ) if name[1] == "plainLiteral" or name[1] == "typedLiteral": if self.state == 4: self.triple += [ Literal(self.chars, lang=self.lang, datatype=self.datatype) ] else: self.error( "This should never happen if the SAX parser ensures XML syntax correctness" ) if name[1] == "triple": if self.state == 4: if len(self.triple) != 3: self.error("Triple has wrong length, got %d elements: %s" % (len(self.triple), self.triple)) self.graph.add(self.triple) #self.store.store.add(self.triple,context=self.graph) #self.store.addN([self.triple+[self.graph]]) self.state = 2 else: self.error( "This should never happen if the SAX parser ensures XML syntax correctness" ) if name[1] == "graph": self.state = 1 if name[1] == "TriX": self.state = 0
def getURILabel(uri): rdfs="http://www.w3.org/2000/01/rdf-schema#" g = Graph() g.parse(uri) for s, p, o in g.triples((None,None,None)): if re.search(re.compile(rdfs+".*label",re.I),p): return o return "No URILABEL Found"
def __init__(self, source): if isinstance(source, ZipFile): source = StringIO.StringIO(source.read('project.rdf')) elif isinstance(source, basestring) and os.path.isdir(source): source = os.path.join(source, 'project.rdf') self.g = Graph() self.g.parse(source)
def setUp(self): self.ontGraph = Graph() self.ontGraph.bind('ex', EX_NS) self.ontGraph.bind('owl', OWL_NS) Individual.factoryGraph = self.ontGraph disjunct = (~EX.alpha) | (~EX.omega) self.foo = EX.foo disjunct += self.foo
class Analyzer(object): '''Analyze the Dickens letter data. !! SPARQL does not support aggregates so we have to do this by hand. ''' # see add_person above dickens = URIRef(base_uri + 'person#mr_charles_dickens') def __init__(self): self.graph = Graph(store=IOMemory()) self.graph.parse(PATH, format='nt') def info(self): print len(self.graph) def simple_search(self): for count, (s, p, o) in enumerate( # self.graph.triples((self.dickens, letter_ns['from'],None)) # self.graph.triples((self.dickens,None,None)) self.graph.triples((None, letter_ns['from'], self.dickens))): if count > 10: break print s, p, o def plot_counts(self): q = '''SELECT ?adate WHERE { ?a letter:date ?adate . ?a letter:from <%s> . }''' % (self.dickens) spar = self.graph.query(q, initNs=dict(letter=letter_ns)) dates = [row[0].toPython() for row in spar] bins = range(min(dates), max(dates) + 1) plt.hist(dates, bins, fc='blue', alpha=0.8) plt.savefig('letter_dates.png') def plot_letter_network(self): q = '''SELECT ?adate ?to WHERE { ?a letter:date ?adate . ?a letter:from <%s> . ?a letter:to ?b . ?b foaf:name ?to . }''' % (self.dickens) spar = self.graph.query(q, initNs=dict(letter=letter_ns, foaf=FOAF)) values = [[row[0].toPython(), unicode(row[1])] for row in spar] names = list(set([x[1] for x in values])) import networkx as nx dgr = nx.Graph() labels = {-1: u'Charles Dickens'} for count, name in enumerate(names): # dgr.add_edge(u'Charles Dickens', name) dgr.add_edge(-1, count) labels[count] = name pos = nx.graphviz_layout(dgr, prog='twopi') fig = plt.figure(1, figsize=(15, 15)) nx.draw(dgr, pos, node_size=15, labels=labels, font_size=10) plt.savefig('dickens_letter_network.png')
def commit(self): """ Commits changes to the remote graph and flushes local caches. """ self._connection.update(add=self._added, remove=self._removed) self._rdfObjects = {} self._graph = Graph() self._added = Graph() self._removed = Graph()
def loadAuthRec(self, n3File): """Load a RDF graph with authority posts in n3-format""" g = Graph() n3File = Util.relpath(n3File) g.load(n3File, format='n3') d = {} for uri, label in g.subject_objects(RDFS.label): d[unicode(label)] = unicode(uri) return d
def setUp(self): self.graph1 = Graph( identifier=URIRef('http://example.org/foaf/aliceFoaf')) self.graph1.parse(StringIO(test_graph_a), format="n3") self.graph2 = Graph( identifier=URIRef('http://example.org/foaf/bobFoaf')) self.graph2.parse(StringIO(test_graph_b), format="n3") self.unionGraph = ReadOnlyGraphAggregate( graphs=[self.graph1, self.graph2])
def parse_remote(request): """ Reading an NT file - http://code.google.com/p/rdflib/wiki/IntroParsing """ g = Graph() g.parse("http://bigasterisk.com/foaf.rdf") # , format="xml" exec "len_g = 'the lenght of the graph is: %s'" % len(g) graph = [] for stmt in g: graph.append(stmt) context = {'graph': graph, 'len_g': len_g} return render_to_response('rdf/parse_remote.html', context)
def reason_func(resource_name): famNs = Namespace('file:///code/ganglia/metric.n3#') nsMapping = {'mtc': famNs} rules = HornFromN3('ganglia/metric/metric_rule.n3') factGraph = Graph().parse('ganglia/metric/metric.n3', format='n3') factGraph.bind('mtc', famNs) dPreds = [famNs.relateTo] topDownStore = TopDownSPARQLEntailingStore(factGraph.store, factGraph, idb=rules, derivedPredicates=dPreds, nsBindings=nsMapping) targetGraph = Graph(topDownStore) targetGraph.bind('ex', famNs) #get list of the related resource r_list = list( targetGraph.query('SELECT ?RELATETO { mtc:%s mtc:relateTo ?RELATETO}' % resource_name, initNs=nsMapping)) res_list = [] for res in r_list: res_list.append(str(res).split("#")[1]) return res_list
def read_nt(request): """ Reading an NT file - http://code.google.com/p/rdflib/wiki/IntroParsing """ nt_file = '/var/django/transdeco/rdf/demo.nt' g = Graph() g.parse(nt_file, format="nt") exec "len_g = 'the lenght of the graph is: %s'" % len(g) graph = [] for stmt in g: graph.append(stmt) context = {'len_g': len_g, 'graph': graph} return render_to_response('rdf/read_nt.html', context)
def setUp(self): ruleStore, ruleGraph, self.network = makeRuleStore([fixture("test_cmpuri.n3")]) g = Graph() g.parse(StringIO(""" <http://example.org/> a _:x . <http://EXAMPLE.ORG/> a _:x . <HTTP://example.org:80/> a _:x . <http://example.com/> a _:x . """), format="n3") self.network.feedFactsToAdd(generateTokenSet(g)) logging.debug("Inferred Facts:\n%s" % self.network.inferredFacts.serialize(format="n3"))
def IterItems(self): self.Produced() try: loader = bm_uri.URILoader(self.uri) graph = Graph() graph.parse(StringIO.StringIO(loader.Load()), format = "xml") except Exception, x: Log(exception = True) raise StopIteration
def parse(self): """ Parse RDF. Cache RDF parse results in the object for the further calls. """ if not hasattr(self, "_rdf_parse_data"): # Use parser cache - lifetime is the same as for this model object source = StringInputSource(self.data) g = Graph() self._rdf_parse_data = g.parse(source)
def main(rss_url, blog_uri): store, schema_store = Graph(), Graph() store.parse(rss_url) store.bind('rss', 'http://purl.org/rss/1.0/') schema_store.parse('file:rss_schema.xml') Thing = ThingFactory(store, schema_store) blog = Thing(URI(blog_uri)) for item in blog.rss_items: print "*", item.rss_title print indent(item.rss_description)
class NegatedExistentialRestrictionTest(unittest.TestCase): def setUp(self): self.ontGraph = Graph() self.ontGraph.bind("ex", EX_NS) self.ontGraph.bind("owl", OWL_NS) Individual.factoryGraph = self.ontGraph def testInConjunct(self): contains = Property(EX_NS.contains) testCase2 = ( EX.Operation & ~(contains | some | EX.IsolatedCABGConcomitantExclusion) & (contains | some | EX.CoronaryArteryBypassGrafting) ) testCase2.identifier = EX_NS.IsolatedCABGOperation NormalFormReduction(self.ontGraph) self.assertEqual( repr(testCase2), "ex:Operation that ( ex:contains some ex:CoronaryArteryBypassGrafting ) and ( not ( ex:contains some ex:IsolatedCABGConcomitantExclusion ) )", ) ruleStore, ruleGraph, network = SetupRuleStore(makeNetwork=True) op = BNode() (EX.Operation).extent = [op] grafting = BNode() (EX.CoronaryArteryBypassGrafting).extent = [grafting] testCase2.graph.add((op, EX_NS.contains, grafting)) CalculateStratifiedModel(network, testCase2.graph, [EX_NS.Foo, EX_NS.IsolatedCABGOperation]) testCase2.graph = network.inferredFacts self.failUnless(op in testCase2.extent, "%s should be in ex:IsolatedCABGOperation's extent" % op) def testGeneralConceptInclusion(self): # Some Class # ## Primitive Type ## # SubClassOf: Class: ex:NoExclusion . # DisjointWith ( ex:contains some ex:IsolatedCABGConcomitantExclusion ) contains = Property(EX_NS.contains) testClass = ~(contains | some | EX.Exclusion) testClass2 = EX.NoExclusion testClass2 += testClass NormalFormReduction(self.ontGraph) individual1 = BNode() individual2 = BNode() contains.extent = [(individual1, individual2)] ruleStore, ruleGraph, network = SetupRuleStore(makeNetwork=True) posRules, negRules = CalculateStratifiedModel(network, self.ontGraph, [EX_NS.NoExclusion]) self.failUnless(not posRules, "There should be no rules in the 0 strata!") self.assertEqual(len(negRules), 2, "There should be 2 'negative' rules") Individual.factoryGraph = network.inferredFacts targetClass = Class(EX_NS.NoExclusion, skipOWLClassMembership=False) self.failUnless( individual1 in targetClass.extent, "There is a BNode that bears the contains relation with another individual that is not a member of Exclusion!", ) self.assertEquals(len(list(targetClass.extent)), 1, "There should only be one member in NoExclusion")
def load_authority_rec(self, n3file): """Ladda in en RDF-graf som innehåller auktoritetsposter i n3-format""" graph = Graph() n3file = Util.relpath(n3file) #print "loadling %s" % n3file graph.load(n3file, format='n3') d = {} for uri, label in graph.subject_objects(RDFS.label): d[unicode(label)] = unicode(uri) return d
def construct(dictionary): # Step 1: massage the data to a rdflib graph graph = Graph() bnode = BNode() for key in dictionary: if key == "type": graph.add((bnode,RDF.type,URIRef(types[dictionary[key]]))) else: graph.add((bnode, predicate[key], Literal(dictionary[key]))) # print graph.serialize(format="nt") return construct_from_graph(graph)
def __init__(self, rdf, root_node): ''' constructor @param rdf: rdf document to work with @param root_node: the root node of the RDF tree ''' self.rdf = Graph() self.rdf = self.rdf.parse(rdf) self.root_node = URIRef(root_node) self.nodes = {} self.counterAllNodes = 0 # otherwise only nodes with a specified hierarchy link type will be counted self._buildTree()
def test_unresolvable_host(self): g = Graph() g.parse(fixture("cap-uk-payments-2009.rdf")) resource = URIRef("http://cap-payments.defra.../2008_All_CAP_Search_Results.xls") self.network.feedFactsToAdd(generateTokenSet(g)) closureDelta = self.network.inferredFacts logging.debug("Inferred Triples:\n%s" % closureDelta.serialize(format="n3")) req, resp = self.getreq(closureDelta, resource) assert (resp, CURL["status"], CURL["Failure"]) in closureDelta assert (resp, HTTP["statusCodeNumber"], Literal("0")) in closureDelta
def test_404(self): g = Graph() g.parse(fixture("dbpedia_broken.rdf")) resource = URIRef("http://eris.okfn.org/nonexistent") self.network.feedFactsToAdd(generateTokenSet(g)) closureDelta = self.network.inferredFacts logging.debug("Inferred Triples:\n%s" % closureDelta.serialize(format="n3")) req, resp = self.getreq(closureDelta, resource) assert (resp, CURL["status"], CURL["Failure"]) in closureDelta assert (resp, HTTP["statusCodeNumber"], Literal("404")) in closureDelta
def setUp(self): ruleStore, ruleGraph, self.network = makeRuleStore([fixture("test_regexp.n3")]) g = Graph() g.parse(StringIO(""" @prefix dc: <http://purl.org/dc/terms/>. @prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> . <http://example.org/> dc:title "abc/123" . <http://example.org/> rdfs:seeAlso <http://example.org/12345.txt>. """), format="n3") self.network.feedFactsToAdd(generateTokenSet(g)) logging.debug("Inferred Facts:\n%s" % self.network.inferredFacts.serialize(format="n3"))