def parse_and_serialize(input_files, input_format, guess, outfile, output_format, ns_bindings, store_conn="", store_type=None): if store_type: store = plugin.get(store_type, Store)() store.open(store_conn) graph = ConjunctiveGraph(store) else: store = None graph = ConjunctiveGraph() for prefix, uri in list(ns_bindings.items()): graph.namespace_manager.bind(prefix, uri, override=False) for fpath in input_files: use_format, kws = _format_and_kws(input_format) if fpath == '-': fpath = sys.stdin elif not input_format and guess: use_format = guess_format(fpath) or DEFAULT_INPUT_FORMAT graph.parse(fpath, format=use_format, **kws) if outfile: output_format, kws = _format_and_kws(output_format) kws.setdefault('base', None) graph.serialize(destination=outfile, format=output_format, **kws) if store: store.rollback()
class Db(_shared): def __init__(self): self.lastTimes = [] def getGraph(self): t1 = time.time() mtimes = [] for f in (["/my/proj/openid_proxy/access.n3"] + glob.glob("commentstore/*.nt")): mtimes.append(os.path.getmtime(f)) if mtimes == self.lastTimes and hasattr(self, 'currentGraph'): return self.currentGraph self.lastTimes = mtimes tf = tempfile.NamedTemporaryFile() os.system("cat /my/proj/openid_proxy/access.n3 commentstore/*.nt > %s" % tf.name) self.currentGraph = ConjunctiveGraph() self.currentGraph.parse(tf.name, format="n3") log.info("reloaded comments from disk in %f sec" % (time.time() - t1)) return self.currentGraph def writeFile(self, stmts, ctx, fileWords): outfile = "commentstore/post-%s.nt" % ("-".join(fileWords)) graph = ConjunctiveGraph() graph.add(*stmts, **{'context' : ctx}) graph.graph.serialize(outfile, format='n3') log.info("wrote new comment to %s", outfile)
def main(): parser = argparse.ArgumentParser( description='OMIA integration test', formatter_class=argparse.RawTextHelpFormatter) parser.add_argument( '--input', '-i', type=str, required=True, help='Location of input ttl file') args = parser.parse_args() graph = ConjunctiveGraph() graph.parse(args.input, format=rdflib_util.guess_format(args.input)) model_of = URIRef('http://purl.obolibrary.org/obo/RO_0003301') models = graph.subject_objects(model_of) model_len = len(list(models)) if model_len < EXPECTED_PAIRS: logger.error("Not enough model_of predicates in graph:" " {} expected {} check omia log for" " warnings".format(model_len, EXPECTED_PAIRS)) exit(1) else: logger.info("PASSED")
def check(kws): cg = ConjunctiveGraph() cg.parse(**kws) for g in cg.contexts(): gid = g.identifier assert isinstance(gid, Identifier)
def test4_DAWG_DATASET_COMPLIANCE_is_True(self): raise SkipTest("known DAWG_DATATSET_COMPLIANCE SPARQL issue") graph = Graph() graph.parse(data=test4data, format='n3') res = graph.query(test4query, dSCompliance=True) # print("json", res.serialize(format='json')) assert len(res) == 2
def get_sparql(self, current_ontology=None, destination_ontology=None, current_version=None, destination_version=None, origen=None, insert=None): """ Make sparql statements to be executed """ query_up = "" query_down = "" if insert is None: current_graph = ConjunctiveGraph() destination_graph = ConjunctiveGraph() #if insert is None: try: if current_ontology is not None: current_graph.parse(data=current_ontology, format='turtle') destination_graph.parse(data=destination_ontology, format='turtle') except BadSyntax, e: e._str = e._str.decode('utf-8') raise MigrationException("Error parsing graph %s" % unicode(e)) forward_migration, backward_migration = ( self._generate_migration_sparql_commands( destination_graph, current_graph, self.__virtuoso_graph)) query_up += forward_migration query_down += backward_migration forward_migration, backward_migration = ( self._generate_migration_sparql_commands( current_graph, destination_graph, self.__virtuoso_graph)) query_down += forward_migration query_up += backward_migration
def parse_and_serialize(input_files, input_format, guess, outfile, output_format, ns_bindings, store_conn="", store_type=None): if store_type: store = plugin.get(store_type, Store)() store.open(store_conn) graph = ConjunctiveGraph(store) else: store = None graph = ConjunctiveGraph() for prefix, uri in list(ns_bindings.items()): graph.namespace_manager.bind(prefix, uri, override=False) for fpath in input_files: use_format, kws = _format_and_kws(input_format) if fpath == '-': fpath = sys.stdin elif not input_format and guess: use_format = guess_format(fpath) or DEFAULT_INPUT_FORMAT graph.parse(fpath, format=use_format, **kws) if outfile: output_format, kws = _format_and_kws(output_format) kws.setdefault('base', None) graph.serialize(destination=outfile, format=output_format, **kws) if store: store.rollback()
def test_url(self): if self.html5lib_installed(): g = ConjunctiveGraph() g.parse(location='http://oreilly.com/catalog/9780596516499/', format='rdfa', lax=True) self.assertTrue(len(g) > 0)
class TestSparqlJsonResults(unittest.TestCase): def setUp(self): self.graph = ConjunctiveGraph() self.graph.parse(StringIO(test_data), format="n3") def _query_result_contains(self, query, correct): results = self.graph.query(query) result_json = json.loads(results.serialize(format='json')) msg = "Expected:\n %s \n- to contain:\n%s" % (result_json, correct) self.failUnless(result_json["head"]==correct["head"], msg) result_bindings = sorted(result_json["results"]["bindings"]) correct_bindings = sorted(correct["results"]["bindings"]) msg = "Expected:\n %s \n- to contain:\n%s" % (result_bindings, correct_bindings) self.failUnless(result_bindings==correct_bindings, msg) testOptional = make_method('optional') testWildcard = make_method('wildcard') testUnion = make_method('union') testUnion3 = make_method('union3') testSelectVars = make_method('select_vars') testWildcardVars = make_method('wildcard_vars')
def test_file(self): if self.html5lib_installed(): g = ConjunctiveGraph() g.parse(location='test/rdfa/oreilly.html', format='rdfa', lax=True) self.assertEqual(len(g), 77)
def test4_DAWG_DATASET_COMPLIANCE_is_True(self): raise SkipTest("known DAWG_DATATSET_COMPLIANCE SPARQL issue") graph = Graph() graph.parse(data=test4data, format='n3') res = graph.query(test4query, dSCompliance=True) # print("json", res.serialize(format='json')) assert len(res) == 2
class Query(unittest.TestCase): def setUp(self): self.graph = ConjunctiveGraph() self.graph.parse(StringIO(test_data), format="n3") def test1(self): r=list(self.graph.query(test_query1)) self.assertEqual(len(r), 1) def test2(self): r=list(self.graph.query(test_query2)) self.assertEqual(len(r), 1) def test3(self): r=list(self.graph.query(test_query3)) self.assertEqual(len(r), 1) def test4(self): r=list(self.graph.query(test_query4)) self.assertEqual(len(r), 1) def test5(self): r=list(self.graph.query(test_query5)) self.assertEqual(len(r), 0)
def deserialize( self, stream, rdf_format="trig", relation_mapper=relation_mapper, predicate_mapper=predicate_mapper, **kwargs, ): """ Deserialize from the `PROV-O <https://www.w3.org/TR/prov-o/>`_ representation to a :class:`~prov.model.ProvDocument` instance. :param stream: Input data. :param rdf_format: The RDF format of the input data, default: TRiG. """ newargs = kwargs.copy() newargs["format"] = rdf_format container = ConjunctiveGraph() container.parse(stream, **newargs) document = pm.ProvDocument() self.document = document self.decode_document( container, document, relation_mapper=relation_mapper, predicate_mapper=predicate_mapper, ) return document
def testSpec(self): g = ConjunctiveGraph() trix_path = os.path.relpath( os.path.join(TEST_DIR, 'trix/nokia_example.trix'), os.curdir) g.parse(trix_path, format="trix")
def testParse(self): g = ConjunctiveGraph() try: g.parse("http://groups.csail.mit.edu/dig/2005/09/rein/examples/troop42-policy.n3", format="n3") except URLError: from nose import SkipTest raise SkipTest('No network to retrieve the information, skipping test')
def main(): parser = argparse.ArgumentParser( description='OMIA integration test', formatter_class=argparse.RawTextHelpFormatter) parser.add_argument( '--input', '-i', type=str, required=True, help='Location of input ttl file') args = parser.parse_args() graph = ConjunctiveGraph() graph.parse(args.input, format=rdflib_util.guess_format(args.input)) model_of = URIRef('http://purl.obolibrary.org/obo/RO_0003301') models = graph.subject_objects(model_of) model_len = len(list(models)) if model_len < EXPECTED_PAIRS: logger.error("Not enough model_of predicates in graph:" " {} expected {} check omia log for" " warnings".format(model_len, EXPECTED_PAIRS)) exit(1) omim_diseases = graph.objects( subject=URIRef('https://monarchinitiative.org/model/OMIA-breed:18'), predicate=model_of ) if list(omim_diseases) != [URIRef('http://purl.obolibrary.org/obo/OMIM_275220')]: logger.error("Missing breed to omim triple for {}".format('OMIA-breed:18')) exit(1) logger.info("PASSED")
class TestSparqlJsonResults(unittest.TestCase): def setUp(self): self.graph = ConjunctiveGraph() self.graph.parse(StringIO(test_data), format="n3") def _query_result_contains(self, query, correct): results = self.graph.query(query) result_json = json.loads(results.serialize(format='json').decode('utf-8')) msg = "Expected:\n %s \n- to contain:\n%s" % (result_json, correct) self.assertEqual(sorted(result_json["head"], key=repr), sorted(correct["head"], key=repr), msg) # Sort by repr - rather a hack, but currently the best way I can think # of to ensure the results are in the same order. result_bindings = sorted(result_json["results"]["bindings"], key=repr) correct_bindings = sorted(correct["results"]["bindings"], key=repr) msg = "Expected:\n %s \n- to contain:\n%s" % (result_bindings, correct_bindings) self.failUnless(result_bindings==correct_bindings, msg) testOptional = make_method('optional') testWildcard = make_method('wildcard') testUnion = make_method('union') testUnion3 = make_method('union3') testSelectVars = make_method('select_vars') testWildcardVars = make_method('wildcard_vars')
class TestSearchAnnotations(unittest.TestCase): def test_search_for_uri(self): for url in annotation_urls: g, target, selector = specific_resource(self.canvas, res=URIRef(uuid.uuid4()), selector=URIRef(uuid.uuid4())) g, anno, body, target = annotation(g=g, anno=URIRef(uuid.uuid4()), target=target, body=URIRef(uuid.uuid4())) response = self.client.post(url, data=g.serialize(), content_type="text/xml") self.assertEqual(response.status_code, 201) for uri in [anno, body, target, selector, self.canvas]: response = self.client.get(url, {'uri': uri}) self.assertEqual(response.status_code, 200) validate_return_content(self, response, g) def tearDown(self): pass def setUp(self): url = reverse('semantic_store_annotations', kwargs=dict()) self.client = Client() fixture_filename = os.path.join(os.path.dirname(os.path.realpath(__file__)), "semantic_store_test_fixture.xml") self.g = ConjunctiveGraph(rdfstore.rdfstore(), identifier=rdfstore.default_identifier) self.g.parse(fixture_filename) canvases = self.g.subjects(URIRef(NS.rdf['type']), URIRef(NS.dms['Canvas'])) self.canvas = list(canvases)[0]
class TestIssue06(unittest.TestCase): debug = False sparql = True def setUp(self): self.graph = ConjunctiveGraph() self.graph.parse(data=testgraph, publicID="testgraph") def test_issue_6(self): query = """ PREFIX ex: <http://temp.example.org/terms/> PREFIX loc: <http://simile.mit.edu/2005/05/ontologies/location#> PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> PREFIX xsd: <http://www.w3.org/2001/XMLSchema#> SELECT * WHERE { {?event ex:date ?date . FILTER (xsd:date(?date) >= xsd:date("2007-12-31") && xsd:date(?date) <= xsd:date("2008-01-11"))} UNION {?event ex:starts ?start; ex:finishes ?end . FILTER (xsd:date(?start) >= xsd:date("2008-01-02") && xsd:date(?end) <= xsd:date("2008-01-10"))} } ORDER BY ?event """ self.graph.query(query, DEBUG=False)
def parse_n3(term_n3): ''' Disclaimer: Quick and dirty hack using the n3 parser. ''' prepstr = ("@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .\n" "<urn:no_use> <urn:no_use> %s.\n" % term_n3) g = ConjunctiveGraph() g.parse(data=prepstr, format='n3') return [t for t in g.triples((None, None, None))][0][2]
def check(kws): cg = ConjunctiveGraph() cg.parse(**kws) for g in cg.contexts(): gid = g.identifier assert isinstance(gid, Identifier)
def parse_n3(term_n3): ''' Disclaimer: Quick and dirty hack using the n3 parser. ''' prepstr = ("@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .\n" "<urn:no_use> <urn:no_use> %s.\n" % term_n3) g = ConjunctiveGraph() g.parse(data=prepstr, format='n3') return [t for t in g.triples((None, None, None))][0][2]
class TestSparqlJsonResults(unittest.TestCase): def setUp(self): self.graph = ConjunctiveGraph() self.graph.parse(StringIO(test_data), format="n3") def _query_result_contains(self, query, correct): results = self.graph.query(query) result_json = json.loads( results.serialize(format='json').decode('utf-8')) msg = "Expected:\n %s \n- to contain:\n%s" % (result_json, correct) self.assertEqual(sorted(result_json["head"], key=repr), sorted(correct["head"], key=repr), msg) # Sort by repr - rather a hack, but currently the best way I can think # of to ensure the results are in the same order. result_bindings = sorted(result_json["results"]["bindings"], key=repr) correct_bindings = sorted(correct["results"]["bindings"], key=repr) msg = "Expected:\n %s \n- to contain:\n%s" % (result_bindings, correct_bindings) self.failUnless(result_bindings == correct_bindings, msg) testOptional = make_method('optional') testWildcard = make_method('wildcard') testUnion = make_method('union') testUnion3 = make_method('union3') testSelectVars = make_method('select_vars') testWildcardVars = make_method('wildcard_vars')
def has_correct_hash(self, resource): f = RdfUtils.get_format(resource.get_filename()) cg = ConjunctiveGraph() cg.parse(data=resource.get_content(), format=f) quads = RdfUtils.get_quads(cg) h = RdfHasher.make_hash(quads, resource.get_hashstr()) return resource.get_hashstr() == h
def has_correct_hash(self, resource): f = RdfUtils.get_format(resource.get_filename()) cg = ConjunctiveGraph() cg.parse(data=resource.get_content(), format=f) quads = RdfUtils.get_quads(cg) h = RdfHasher.make_hash(quads, resource.get_hashstr()) return resource.get_hashstr() == h
def testNG4j(self): g = ConjunctiveGraph() trix_path = os.path.relpath( os.path.join(TEST_DIR, 'trix/ng4jtest.trix'), os.curdir) g.parse(trix_path, format="trix")
def test_turtle_namespace_prefixes(self): g = ConjunctiveGraph() n3 = \ """ @prefix _9: <http://data.linkedmdb.org/resource/movie/> . @prefix p_9: <urn:test:> . @prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> . p_9:a p_9:b p_9:c . <http://data.linkedmdb.org/resource/director/1> a <http://data.linkedmdb.org/resource/movie/director>; rdfs:label "Cecil B. DeMille (Director)"; _9:director_name "Cecil B. DeMille" .""" g.parse(data=n3, format='n3') turtle = g.serialize(format="turtle") # Check round-tripping, just for kicks. g = ConjunctiveGraph() g.parse(data=turtle, format='turtle') # Shouldn't have got to here s = g.serialize(format="turtle") self.assertTrue(b('@prefix _9') not in s)
def view(name=None, format=None, view=None): self.db.store.nsBindings = {} content_type = None if format is not None: if format in extensions: content_type = extensions[format] else: name = '.'.join([name, format]) #argstring = '&'.join(["%s=%s"%(k,v) for k,v in request.args.iteritems(multi=True) if k != 'value']) if name is not None: #if len(argstring) > 0: # name = name + "?" + argstring entity = self.NS.local[name] elif 'uri' in request.args: entity = URIRef(request.args['uri']) else: entity = self.NS.local.Home #print(request.method, 'view()', entity, view) if request.method == 'POST': print ("uploading file",entity) if len(request.files) == 0: flash('No file uploaded') return redirect(request.url) upload_type = rdflib.URIRef(request.form['upload_type']) self.add_files(entity, [y for x, y in request.files.items(multi=True)], upload_type=upload_type) url = "/about?%s" % urlencode(dict(uri=str(entity), view="view")) print ("redirecting to",url) return redirect(url) elif request.method == 'DELETE': self.delete_file(entity) return '', 204 elif request.method == 'GET': resource = self.get_resource(entity) # 'view' is the default view fileid = resource.value(self.NS.whyis.hasFileID) if fileid is not None and 'view' not in request.args: print (resource.identifier, fileid) f = self.file_depot.get(fileid) fsa = FileServeApp(f, self.config["file_archive"].get("cache_max_age",3600*24*7)) return fsa if content_type is None: content_type = request.headers['Accept'] if 'Accept' in request.headers else 'text/turtle' #print entity fmt = sadi.mimeparse.best_match([mt for mt in list(dataFormats.keys()) if mt is not None],content_type) if 'view' in request.args or fmt in htmls: return render_view(resource) elif fmt in dataFormats: output_graph = ConjunctiveGraph() result, status, headers = render_view(resource, view='describe') output_graph.parse(data=result, format="json-ld") return output_graph.serialize(format=dataFormats[fmt]), 200, {'Content-Type':content_type} #elif 'view' in request.args or sadi.mimeparse.best_match(htmls, content_type) in htmls: else: return render_view(resource)
def testLimit2(self): graph = ConjunctiveGraph(plugin.get("IOMemory", Store)()) graph.parse(StringIO(test_data2), format="n3") results = list(graph.query(test_query2, DEBUG=True)) print graph.query(test_query2).serialize(format="xml") self.failUnless(len(results) == 1) for title, price in results: self.failUnless(title in [Literal("Java Tutorial"), Literal("COBOL Tutorial")])
class TestEmptyBase(unittest.TestCase): def setUp(self): self.graph = ConjunctiveGraph() self.graph.parse(StringIO(test_data),publicID=baseUri) def test_base_ref(self): self.failUnless(len(self.graph) == 1,"There should be at least one statement in the graph") self.failUnless((baseUri,RDF.type,FOAF.Document) in self.graph,"There should be a triple with %s as the subject" % baseUri)
def testLimit2(self): graph = ConjunctiveGraph(plugin.get('IOMemory',Store)()) graph.parse(data=test_data2, format="n3") results = list(graph.query(test_query2,DEBUG=False)) self.assertEqual(len(results), 1) for title,price in results: self.assertTrue(title in [Literal("Java Tutorial"), Literal("COBOL Tutorial")])
def _get_graph(): inputGraph = ConjunctiveGraph() contentType = request.headers['Content-Type'] encoding = 'utf8' if not request.content_encoding else request.content_encoding content = str(request.data, encoding) fmt = sadi.mimeparse.best_match([mt for mt in list(dataFormats.keys()) if mt is not None],contentType) if fmt in dataFormats: inputGraph.parse(data=content, format=dataFormats[fmt]) return inputGraph
def testLimit2(self): graph = ConjunctiveGraph(plugin.get('IOMemory', Store)()) graph.parse(data=test_data2, format="n3") results = list(graph.query(test_query2, DEBUG=False)) self.assertEqual(len(results), 1) for title, price in results: self.assertTrue( title in [Literal("Java Tutorial"), Literal("COBOL Tutorial")])
def testParse(self): g = ConjunctiveGraph() try: g.parse( "http://groups.csail.mit.edu/dig/2005/09/rein/examples/troop42-policy.n3", format="n3") except URLError: from nose import SkipTest raise SkipTest( 'No network to retrieve the information, skipping test')
def testBaseSerialize(self): g = Graph() g.add((URIRef('http://example.com/people/Bob'), URIRef('urn:knows'), URIRef('http://example.com/people/Linda'))) s = g.serialize(base='http://example.com/', format='n3') self.assertTrue(b('<people/Bob>') in s) g2 = ConjunctiveGraph() g2.parse(data=s, publicID='http://example.com/', format='n3') self.assertEqual(list(g), list(g2))
def testOrderBy(self): graph = ConjunctiveGraph(plugin.get('IOMemory', Store)()) graph.parse(StringIO(test_data), format="n3") results = graph.query(test_query) self.failUnless(False not in [ r[0] == a for r, a in zip(results, ['Alice', 'Bob', 'Charlie', 'Dave']) ])
def test_html_decoded_entity_xhtml(self): if platform.system() == "Java": raise SkipTest('problem with HTML entities for html5lib in Jython') g = ConjunctiveGraph() g.parse(data=htmlentitydecode(html), format='rdfa') self.assertEqual(len(g), 1) self.assertEqual(g.value(URIRef("http://example.com"), URIRef("http://purl.org/dc/terms/title") ), u"Exampl\xe9")
class TestRelativeBase(unittest.TestCase): def setUp(self): self.graph = ConjunctiveGraph() self.graph.parse(StringIO(test_data2),publicID=baseUri2) def test_base_ref(self): self.failUnless(len(self.graph) == 1,"There should be at least one statement in the graph") resolvedBase = URIRef('http://example.com/baz') self.failUnless((resolvedBase,RDF.type,FOAF.Document) in self.graph,"There should be a triple with %s as the subject" % resolvedBase)
def testBaseSerialize(self): g = Graph() g.add((URIRef('http://example.com/people/Bob'), URIRef( 'urn:knows'), URIRef('http://example.com/people/Linda'))) s = g.serialize(base='http://example.com/', format='n3') self.assertTrue(b('<people/Bob>') in s) g2 = ConjunctiveGraph() g2.parse(data=s, publicID='http://example.com/', format='n3') self.assertEqual(list(g), list(g2))
def get_properties_from_input(file, input_format): input_graph = ConjunctiveGraph() input_graph.parse(file, format=input_format) # collapse to single list property_set = list() for row in input_graph.predicates(): property_set.append(row) return set(property_set)
def get_properties_from_input(file, input_format): input_graph = ConjunctiveGraph() input_graph.parse(file, format=input_format) # collapse to single list property_set = set() for row in input_graph.predicates(): property_set.add(row) return property_set
def test_url(self): if self.html5lib_installed(): try: g = ConjunctiveGraph() g.parse(location='http://oreilly.com/catalog/9780596516499/', format='rdfa') self.assertTrue(len(g), 77) except URLError: from nose import SkipTest raise SkipTest('No networking, test skipped')
def test_url(self): if self.html5lib_installed(): try: g = ConjunctiveGraph() g.parse(location='http://oreilly.com/catalog/9780596516499/', format='rdfa') self.assertTrue(len(g), 77) except URLError: from nose import SkipTest raise SkipTest('No networking, test skipped')
def testLimit2(self): graph = ConjunctiveGraph(plugin.get('IOMemory', Store)()) graph.parse(StringIO(test_data2), format="n3") results = list(graph.query(test_query2, DEBUG=True)) print graph.query(test_query2).serialize(format='xml') self.failUnless(len(results) == 1) for title, price in results: self.failUnless( title in [Literal("Java Tutorial"), Literal("COBOL Tutorial")])
def testParse(self): g = ConjunctiveGraph() try: g.parse( "http://groups.csail.mit.edu/dig/2005/09/rein/examples/troop42-policy.n3", format="n3", ) except URLError: pytest.skip( "No network to retrieve the information, skipping test")
class TestEmptyBase(unittest.TestCase): def setUp(self): self.graph = ConjunctiveGraph() self.graph.parse(StringIO(test_data), publicID=baseUri) def test_base_ref(self): self.assertTrue(len(self.graph) == 1, "There should be at least one statement in the graph") self.assertTrue((baseUri, RDF.type, FOAF.Document) in self.graph, "There should be a triple with %s as the subject" % baseUri)
class TestRelativeBase(unittest.TestCase): def setUp(self): self.graph = ConjunctiveGraph() self.graph.parse(StringIO(test_data2), publicID=baseUri2) def test_base_ref(self): self.assertTrue(len(self.graph) == 1, "There should be at least one statement in the graph") resolvedBase = URIRef('http://example.com/baz') self.assertTrue((resolvedBase, RDF.type, FOAF.Document) in self.graph, "There should be a triple with %s as the subject" % resolvedBase)
def extract_rdfa(url, outfile=sys.stdout, parser="rdfa", serializer="n3"): """ Extract RDFa from a given URL Parsers are listed at https://rdflib.readthedocs.org/en/4.1.0/plugin_parsers.html Serializers are listed at https://rdflib.readthedocs.org/en/4.1.0/plugin_serializers.html """ store = None graph = ConjunctiveGraph() graph.parse(url, format=parser) graph.serialize(destination=outfile, format=serializer)
def build_network2(rules): graph = ConjunctiveGraph() graph.parse(data=rules, publicID="test", format="n3") rule_store, rule_graph = SetupRuleStore( StringIO(rules), additionalBuiltins={STRING_NS.startsWith: StringStartsWith} ) from FuXi.Rete.Network import ReteNetwork network = ReteNetwork(rule_store) network.feedFactsToAdd(generateTokenSet(extractBaseFacts(graph))) return network
class TestSparqlJsonResults(unittest.TestCase): sparql = True def setUp(self): self.graph = ConjunctiveGraph() self.graph.parse(StringIO(test_data), format="n3") def test_base_ref(self): rt=list(self.graph.query(test_query)) self.failUnless(rt[0][0] == Literal("Alice"),"Expected:\n 'Alice' \nGot:\n %s" % rt)
def extract_rdfa(url, outfile=sys.stdout, parser="rdfa", serializer="n3"): """ Extract RDFa from a given URL Parsers are listed at https://rdflib.readthedocs.org/en/4.1.0/plugin_parsers.html Serializers are listed at https://rdflib.readthedocs.org/en/4.1.0/plugin_serializers.html """ store = None graph = ConjunctiveGraph() graph.parse(url, format=parser) graph.serialize(destination=outfile, format=serializer)
def testModel(self): g = ConjunctiveGraph() g.parse(data=test_data, format="n3") i = 0 for s, p, o in g: if isinstance(s, Graph): i += 1 self.assertEquals(i, 3) self.assertEquals(len(list(g.contexts())), 13) g.close()
def transform(args): filename = args[0] baseuristr = args[1] with open(filename, "r") as f: rdfFormat = RdfUtils.get_format(filename) cg = ConjunctiveGraph() cg.parse(data=f.read(), format=rdfFormat) baseuri = URIRef(baseuristr) outdir = os.path.abspath(os.path.join(str(file), os.pardir)) RdfTransformer.transform_to_file(cg, baseuri, outdir, filename)
def testModel(self): g = ConjunctiveGraph() g.parse(data=test_data, format="n3") i = 0 for s, p, o in g: if isinstance(s, Graph): i += 1 self.assertEquals(i, 3) self.assertEquals(len(list(g.contexts())), 13) g.close()
def testAperture(self): g = ConjunctiveGraph() g.parse("test/trix/aperture.trix", format="trix") c = list(g.contexts()) #print list(g.contexts()) t = sum(map(len, g.contexts())) self.assertEquals(t, 24) self.assertEquals(len(c), 4)
def test_html_entity_xhtml(self): if sys.version_info[0] == 3: raise SkipTest('minidom parser strips HTML entities in Python 3.2') if platform.system() == "Java": raise SkipTest('problem with HTML entities for html5lib in Jython') g = ConjunctiveGraph() warnings.simplefilter('ignore', UserWarning) g.parse(data=html, format='rdfa') self.assertEqual(len(g), 1) self.assertTrue( g.value(URIRef("http://example.com"), URIRef("http://purl.org/dc/terms/title")).eq(u"Exampl"))
def testBaseSerialize(self): g = Graph() g.add(( URIRef("http://example.com/people/Bob"), URIRef("urn:knows"), URIRef("http://example.com/people/Linda"), )) s = g.serialize(base="http://example.com/", format="n3") self.assertTrue("<people/Bob>".encode("latin-1") in s) g2 = ConjunctiveGraph() g2.parse(data=s, publicID="http://example.com/", format="n3") self.assertEqual(list(g), list(g2))
def test_html_entity_xhtml(self): if sys.version_info[0] == 3 or sys.version_info[:2] < (2,5): raise SkipTest('minidom parser strips HTML entities in Python 3.2') if platform.system() == "Java": raise SkipTest('problem with HTML entities for html5lib in Jython') g = ConjunctiveGraph() warnings.simplefilter('ignore', UserWarning) g.parse(data=html, format='rdfa') self.assertEqual(len(g), 1) self.assertTrue(g.value(URIRef("http://example.com"), URIRef("http://purl.org/dc/terms/title") ).eq( u"Exampl"))