Пример #1
0
	def sample_query(self, querystring):
	    print "Query enter"
	    processor = plugin.get('sparql', rdflib.query.Processor)(self.graph)
	    result = plugin.get('sparql', rdflib.query.Result)
	    
	    ns = dict(self.graph.namespace_manager.namespaces())
	    return result(processor.query(querystring, initNs=ns))
Пример #2
0
    def query(self, strOrQuery, initBindings={}, initNs={}, DEBUG=False,
              dataSetBase=None,
              processor="sparql",
              extensionFunctions={sparql.DESCRIBE:describe}):
        """
        Executes a SPARQL query (eventually will support Versa queries with same method) against this Graph
        strOrQuery - Is either a string consisting of the SPARQL query or an instance of rdflib.sparql.bison.Query.Query
        initBindings - A mapping from a Variable to an RDFLib term (used as initial bindings for SPARQL query)
        initNS - A mapping from a namespace prefix to an instance of rdflib.Namespace (used for SPARQL query)
        DEBUG - A boolean flag passed on to the SPARQL parser and evaluation engine
        processor - The kind of RDF query (must be 'sparql' until Versa is ported)
        """
        assert processor == 'sparql',"SPARQL is currently the only supported RDF query language"
        p = plugin.get(processor, sparql.Processor)(self)
        return plugin.get('SPARQLQueryResult',QueryResult)(p.query(strOrQuery,
                                                                   initBindings,
                                                                   initNs, 
                                                                   DEBUG, 
                                                                   dataSetBase,
                                                                   extensionFunctions))

        processor_plugin = plugin.get(processor, sparql.Processor)(self.store)
        qresult_plugin = plugin.get('SPARQLQueryResult', QueryResult)

        res = processor_plugin.query(strOrQuery, 
                                     initBindings, 
                                     initNs, 
                                     DEBUG, 
                                     extensionFunctions=extensionFunctions)
        return qresult_plugin(res)
Пример #3
0
    def query(
        self,
        strOrQuery,
        initBindings={},
        initNs={},
        DEBUG=False,
        PARSE_DEBUG=False,
        dataSetBase=None,
        processor="sparql",
        extensionFunctions={sparql.DESCRIBE: describe},
    ):
        """
        Executes a SPARQL query (eventually will support Versa queries with
        same method) against this Graph.

         - `strOrQuery`: Either a string consisting of the SPARQL query or
         	 an instance of rdflib.sparql.bison.Query.Query
         - `initBindings`: A mapping from a Variable to an RDFLib term (used
         	 as initial bindings for SPARQL query)
         - `initNS`: A mapping from a namespace prefix to an instance of
         	 rdflib.Namespace (used for SPARQL query)
         - `DEBUG`: A boolean flag passed on to the SPARQL parser and
         	 evaluation engine
         - `processor`: The kind of RDF query (must be 'sparql' until Versa
         	 is ported)
         - `USE_PYPARSING`: A flag indicating whether to use the
         	 experimental pyparsing parser for SPARQL
        """
        assert processor == "sparql", "SPARQL is currently the only supported RDF query language"
        p = plugin.get(processor, sparql.Processor)(self)
        return plugin.get("SPARQLQueryResult", query.result.QueryResult)(
            p.query(strOrQuery, initBindings, initNs, DEBUG, PARSE_DEBUG, dataSetBase, extensionFunctions)
        )
Пример #4
0
def __query(self, query_object, processor='sparql', result='sparql',
        initBindings={}):
    if not isinstance(processor, query.Processor):
        processor = plugin.get(processor, query.Processor)(self)
    if not isinstance(result, query.Result):
        result = plugin.get(result, query.Result)
    return result(processor.query(query_object, initBindings, namespaces))
Пример #5
0
 def query(self, query_object, processor='sparql', result='sparql'):
     """
     """
     if not isinstance(processor, query.Processor):
         processor = plugin.get(processor, query.Processor)(self)
     if not isinstance(result, query.Result):
         result = plugin.get(result, query.Result)
     return result(processor.query(query_object))
Пример #6
0
 def __init__(self, configuration, db, create):
     self.configuration = configuration
     self.create = create
     self.db = db
     if db:
         self.store = plugin.get(self.storeType, store.Store)(db)
     else:
         self.store = plugin.get(self.storeType, store.Store)()
     self.store.open(configuration, create)
Пример #7
0
    def query(self, query_object, processor='sparql', result='sparql', initNs={}, initBindings={}, use_store_provided=True, **kwargs):
        """
        """

        

        if hasattr(self.store, "query") and use_store_provided:
            return self.store.query(self,query_object, initNs, initBindings, **kwargs)

        if not isinstance(result, query.Result):
            result = plugin.get(result, query.Result)
        if not isinstance(processor, query.Processor):
            processor = plugin.get(processor, query.Processor)(self)

        return result(processor.query(query_object, initBindings, initNs, **kwargs))
Пример #8
0
def main():

    inFileTube = 'DATA/tram_validated.csv'
    outFileTube = "DATA/tram_dirty" + ".ttl"

    csvTubeS = readCsv(inFileTube)

    next(csvTubeS, None)

    tubeS_store = plugin.get('IOMemory', Store)()
    tubeS_g = Graph(tubeS_store)

    prefixes = definePrefixes()

    print('Binding Prefixes')
    bindingPrefixes(tubeS_g, prefixes)
    #bindingPrefixes(tubeT_graph,prefixes)

    print('Creating graph-TubeS...')
    flag = 1
    for row in csvTubeS:
        lstData = getTubeSData(row)
        createTubeSGraph(lstData, tubeS_g)

    createTubeSGraph(lstData, tubeS_g).serialize(outFileTube, format='turtle')

    print('DONE!')
Пример #9
0
def registerplugins():
    """
    If rdfextras is installed with setuptools, all plugins are registered
    through entry_points. This is strongly recommended. 

    If only distutils is available, the plugins must be registed manually
    This method will register all rdfextras plugins

    """
    from rdflib import plugin
    from rdflib.query import Processor

    try:
        x=plugin.get('sparql',Processor)
        return # plugins already registered
    except:
        pass # must register plugins    

    from rdflib.query import ResultParser, ResultSerializer, Result

    plugin.register('sparql', Result,
        'rdfextras.sparql.query', 'SPARQLQueryResult')
    plugin.register('sparql', Processor,
        'rdfextras.sparql.processor', 'Processor')

    plugin.register('html', ResultSerializer,
        'rdfextras.sparql.results.htmlresults', 'HTMLResultSerializer')
    plugin.register('xml', ResultSerializer,
        'rdfextras.sparql.results.xmlresults', 'XMLResultSerializer')
    plugin.register('json', ResultSerializer,
        'rdfextras.sparql.results.jsonresults', 'JSONResultSerializer')
    plugin.register('xml', ResultParser,
        'rdfextras.sparql.results.xmlresults', 'XMLResultParser')
    plugin.register('json', ResultParser,
        'rdfextras.sparql.results.jsonresults', 'JSONResultParser')
Пример #10
0
    def serialize(self, destination=None, format="xml", base=None, encoding=None, **args):
        """Serialize the Graph to destination

        If destination is None serialize method returns the serialization as a
        string. Format defaults to xml (AKA rdf/xml).
        """
        serializer = plugin.get(format, Serializer)(self)
        if destination is None:
            stream = StringIO()
            serializer.serialize(stream, base=base, encoding=encoding, **args)
            return stream.getvalue()
        if hasattr(destination, "write"):
            stream = destination
            serializer.serialize(stream, base=base, encoding=encoding, **args)
        else:
            location = destination
            scheme, netloc, path, params, query, fragment = urlparse(location)
            if netloc!="":
                print "WARNING: not saving as location is not a local file reference"
                return
            name = tempfile.mktemp()
            stream = open(name, 'wb')
            serializer.serialize(stream, base=base, encoding=encoding, **args)
            stream.close()
            if hasattr(shutil,"move"):
                shutil.move(name, path)
            else:
                shutil.copy(name, path)
                os.remove(name)
Пример #11
0
def registerplugins():
    """
    Register plugins.

    If setuptools is used to install rdflib-sqlalchemy, all the provided
    plugins are registered through entry_points. This is strongly recommended.

    However, if only distutils is available, then the plugins must be
    registed manually.

    This method will register all of the rdflib-sqlalchemy Store plugins.

    """
    from rdflib.store import Store
    from rdflib import plugin

    try:
        x = plugin.get("SQLAlchemy", Store)
        del x
        return  # plugins already registered
    except:
        pass  # must register plugins

    # Register the plugins ...

    plugin.register(
        "SQLAlchemy",
        Store,
        "rdflib_sqlalchemy.store",
        "SQLAlchemy",
    )
Пример #12
0
def test_concurrent2(): 
    dns = Namespace(u"http://www.example.com/")

    store = plugin.get("IOMemory", Store)()
    g1 = Graph(store=store)
    g2 = Graph(store=store)

    g1.add((dns.Name, dns.prop, Literal(u"test")))
    g1.add((dns.Name, dns.prop, Literal(u"test2")))
    g1.add((dns.Name, dns.prop, Literal(u"test3")))

    n = len(g1)
    i = 0

    for t in g1.triples((None, None, None)):
        i+=1
        g2.add(t)
        # next line causes problems because it adds a new Subject that needs
        # to be indexed  in __subjectIndex dictionary in IOMemory Store.
        # which invalidates the iterator used to iterate over g1
        g2.add((dns.Name1, dns.prop1, Literal(u"test")))
        g2.add((dns.Name1, dns.prop, Literal(u"test")))
        g2.add((dns.Name, dns.prop, Literal(u"test4")))

    assert i == n
Пример #13
0
 def setUp(self):
     store = plugin.get('MySQL', Store)(identifier="rdflib_test")
     store.destroy(self.path)
     store.open(self.path, create=True)
     self.graph = ConjunctiveGraph(store)
     self.graph.destroy(self.path)
     self.graph.open(self.path, create=self.create)
Пример #14
0
 def setUp(self):
     self.store = plugin.get('IOMemory', Store)()
     self.graph1 = Graph(self.store,identifier=URIRef('http://example.org/foaf/aliceFoaf'))
     self.graph1.parse(data=test_graph_a, format="n3")
     self.graph2 = Graph(self.store,identifier=URIRef('http://example.org/foaf/bobFoaf'))
     self.graph2.parse(data=test_graph_b, format="n3")
     self.unionGraph = ReadOnlyGraphAggregate(graphs=[self.graph1,self.graph2],store=self.store)
Пример #15
0
def parse_and_serialize(input_files,
                        input_format,
                        guess,
                        outfile,
                        output_format,
                        ns_bindings,
                        store_conn=STORE_CONNECTION,
                        store_type=STORE_TYPE):

    store = plugin.get(store_type, Store)()
    store.open(store_conn)
    graph = Graph(store)

    for prefix, uri in ns_bindings.items():
        graph.namespace_manager.bind(prefix, uri, override=False)

    for fpath in input_files:
        use_format, kws = _format_and_kws(input_format)
        if fpath == '-':
            fpath = sys.stdin
        elif not input_format and guess:
            use_format = guess_format(fpath) or DEFAULT_INPUT_FORMAT
        graph.parse(fpath, format=use_format, **kws)

    if outfile:
        output_format, kws = _format_and_kws(output_format)
        graph.serialize(destination=outfile,
                        format=output_format,
                        base=None,
                        **kws)
    store.rollback()
Пример #16
0
def parse_and_serialize(input_files,
                        input_format,
                        guess,
                        outfile,
                        output_format,
                        ns_bindings,
                        store_conn="",
                        store_type=None):

    if store_type:
        store = plugin.get(store_type, Store)()
        store.open(store_conn)
        graph = ConjunctiveGraph(store)
    else:
        store = None
        graph = ConjunctiveGraph()

    for prefix, uri in ns_bindings.items():
        graph.namespace_manager.bind(prefix, uri, override=False)

    for fpath in input_files:
        use_format, kws = _format_and_kws(input_format)
        if fpath == '-':
            fpath = sys.stdin
        elif not input_format and guess:
            use_format = guess_format(fpath) or DEFAULT_INPUT_FORMAT
        graph.parse(fpath, format=use_format, **kws)

    if outfile:
        output_format, kws = _format_and_kws(output_format)
        kws.setdefault('base', None)
        graph.serialize(destination=outfile, format=output_format, **kws)

    if store:
        store.rollback()
Пример #17
0
def get_rdflib_serializer(name, media_type, plugin_name):
    rdflib_serializer = plugin.get(plugin_name, Serializer)
    return type(name,
                (RDFLibSerializer,),
                {'plugin_name': plugin_name,
                 'media_type': media_type,
                 'rdflib_serializer': rdflib_serializer})
Пример #18
0
def main():
   # root = tk.Tk()
    #root.withdraw()
    #inFile = filedialog.askopenfilename()
    pathf="/Users/patrick/3cixty/IN/RM/"
    inFile = pathf+"bus-stops-10-06-15.csv"
    outFile=pathf+"bus.ttl"
    csv=readCsv(inFile)
    next(csv, None)  #FILE WITH HEADERS

    store = plugin.get('IOMemory', Store)()
    g = Graph(store)
    graph = ConjunctiveGraph(store)
    prefixes=definePrefixes()
    print('Binding Prefixes')
    bindingPrefixes(graph,prefixes)
    print('Creating graph...')

    for row in csv:
        lstData = createRDF(row)
        createGraph(lstData,g)
    createGraph(lstData,g).serialize(outFile,format='turtle')
    nzip = pathf+time.strftime("%Y-%m-%d")+'.zip'
    zf = zipfile.ZipFile(nzip, mode='w')
    try:
        print ('Creating zip file...')
        zf.write(outFile)
    finally:
        zf.close()
        print ('DONE!')
Пример #19
0
 def __init__(self):
     from django.conf import settings
     store = plugin.get("SQLAlchemy", Store)(identifier='demo')
     graph = Graph(store, identifier='demo')
     graph.namespace_manager = ns_mgr
     graph.open(Literal("sqlite:///" + settings.BASE_DIR + "demo.db"), create=True)
     self.graph = graph
Пример #20
0
def testAggregateSPARQL():
    memStore = plugin.get('IOMemory', Store)()
    graph1 = Graph(memStore, URIRef("graph1"))
    graph2 = Graph(memStore, URIRef("graph2"))
    graph3 = Graph(memStore, URIRef("graph3"))

    for n3Str, graph in [(testGraph1N3, graph1), (testGraph2N3, graph2),
                         (testGraph3N3, graph3)]:
        graph.parse(StringIO(n3Str), format='n3')
    print '-------------------testAggregateSPARQL()----------------------'
    print RDFS.RDFSNS
    print '---------------------------------------------------------------'
    graph4 = Graph(memStore, RDFS.RDFSNS)
    graph4.parse(RDFS.RDFSNS)

    #print graph4.serialize()

    G = ConjunctiveGraph(memStore)
    rt = G.query(sparqlQ)
    print '-------------------G.query(sparqlQ)----------------------'
    #print rt.serialize(format='xml')
    print '---------------------------------------------------------------'

    assert len(rt) > 1
    #print rt.serialize(format='xml')
    LOG_NS = Namespace(u'http://www.w3.org/2000/10/swap/log#')
    rt = G.query(sparqlQ2, initBindings={u'?graph': URIRef("graph3")})

    #print rt.serialize(format='json')
    assert rt.serialize('python')[0] == LOG_NS.N3Document, str(rt)
Пример #21
0
def main(odgifile, ttl, base, syntax):
    plugin.register('OdgiStore', Store, 'spodgi.OdgiStore', 'OdgiStore')
    store = plugin.get('OdgiStore', Store)(base=base)
    spodgi = Graph(store=store)
    spodgi.open(odgifile, create=False)
    res = spodgi.serialize(ttl, syntax)
    spodgi.close()
Пример #22
0
def test_sqlalchemy_obj_language():
    logger.info(f'Python version: {python_version}')
    logger.info(f'RDFLib version: {rdflib_version}')
    logger.info(f'RDFLib-SQLAlchemy version: {rdflib_sqlalchemy_version}')

    identifier = URIRef('local://test_sqlalchemy_obj_language/')

    store = plugin.get(
        'SQLAlchemy',
        Store,
    )(identifier=identifier, )

    graph = ConjunctiveGraph(
        store=store,
        identifier=identifier,
    )

    graph.open('sqlite:///', create=True)

    triple = (
        URIRef('https://foo'),
        RDFS.comment,
        Literal('', lang='en'),
    )

    return triple in graph
Пример #23
0
    def serialize(
            self, destination=None, encoding="utf-8", format='xml', **args):

        if self.type in ('CONSTRUCT', 'DESCRIBE'):
            return self.graph.serialize(
                destination, encoding=encoding, format=format, **args)

        """stolen wholesale from graph.serialize"""
        from rdflib import plugin
        serializer = plugin.get(format, ResultSerializer)(self)
        if destination is None:
            stream = BytesIO()
            stream2 = EncodeOnlyUnicode(stream)
            serializer.serialize(stream2, encoding=encoding, **args)
            return stream.getvalue()
        if hasattr(destination, "write"):
            stream = destination
            serializer.serialize(stream, encoding=encoding, **args)
        else:
            location = destination
            scheme, netloc, path, params, query, fragment = urlparse(location)
            if netloc != "":
                print("WARNING: not saving as location" +
                      "is not a local file reference")
                return
            fd, name = tempfile.mkstemp()
            stream = os.fdopen(fd, 'wb')
            serializer.serialize(stream, encoding=encoding, **args)
            stream.close()
            if hasattr(shutil, "move"):
                shutil.move(name, path)
            else:
                shutil.copy(name, path)
                os.remove(name)
Пример #24
0
    def serialize(
                self, destination=None, format="xml", 
                base=None, encoding=None, **args):
        """Serialize the Graph to destination

        If destination is None serialize method returns the serialization as a
        string. Format defaults to xml (AKA rdf/xml).

        Format support can be extended with plugins, 
        but 'xml', 'n3', 'turtle', 'nt', 'pretty-xml', trix' are built in.
        """
        serializer = plugin.get(format, Serializer)(self)
        if destination is None:
            stream = StringIO()
            serializer.serialize(stream, base=base, encoding=encoding, **args)
            return stream.getvalue()
        if hasattr(destination, "write"):
            stream = destination
            serializer.serialize(stream, base=base, encoding=encoding, **args)
        else:
            location = destination
            scheme, netloc, path, params, query, fragment = urlparse(location)
            if netloc!="":
                print("WARNING: not saving as location" + \
                      "is not a local file reference")
                return
            name = tempfile.mktemp()
            stream = open(name, 'wb')
            serializer.serialize(stream, base=base, encoding=encoding, **args)
            stream.close()
            if hasattr(shutil,"move"):
                shutil.move(name, path)
            else:
                shutil.copy(name, path)
                os.remove(name)
Пример #25
0
def make_ktbs(root_uri="ktbs:/", repository=None, create=None):
    """I create a kTBS engine conforming with the `abstract-ktbs-api`:ref:.

    :param root_uri:    the URI to use as the root of this kTBS
                        (defaults to <ktbs:/>)
    :param repository:  where to store kTBS data
    :param create:      whether the data repository should be initialized;
                        (see below)

    Parameter `repository` can be either a path (in which case data will be
    stored in a directory of that name, which will be created if needed), or a
    string of the form ``":store_type:configuration_string"`` where `store_type`
    is a registered store type in :mod:`rdflib`, and `configuration_string` is
    used to initialize this store.

    If `repository` is omitted or None, a volatile in-memory repository will be
    created.

    Parameter `create` defaults to True if `repository` is None or if it is an
    non-existing path; in other cases, it defaults to False.
    """
    if repository is None:
        if create is None:
            create = True
        repository = ":IOMemory:"
    elif repository[0] != ":":
        if create is None:
            create = not exists(repository)
        repository = ":Sleepycat:%s" % repository
    _, store_type, config_str = repository.split(":", 2)
    store = rdflib_plugin.get(store_type, Store)(config_str)
    service = KtbsService(root_uri, store, create)
    ret = service.get(service.root_uri, _rdf_type=KTBS.KtbsRoot)
    assert isinstance(ret, KtbsRoot)
    return ret
Пример #26
0
 def process_request(self, request):
     request.store = plugin.get(settings.STORE['TYPE'], Store)(
         URIRef(settings.STORE['ID']) 
         if 'ID' in settings.STORE else None,
         Literal(settings.STORE['CONFIG']) 
         if 'CONFIG' in settings.STORE else None)
     return None
Пример #27
0
def main(odgifile, base):
    global spodgi
    plugin.register('OdgiStore', Store, 'spodgi.OdgiStore', 'OdgiStore')
    store = plugin.get('OdgiStore', Store)(base=base)
    spodgi = Graph(store=store)
    spodgi.open(odgifile, create=False)
    app.run(host='0.0.0.0', port=5001)
Пример #28
0
def registerplugins():
    """
    If rdfextras is installed with setuptools, all plugins are registered
    through entry_points. This is strongly recommended. 

    If only distutils is available, the plugins must be registed manually
    This method will register all rdfextras plugins

    """
    from rdflib import plugin
    from rdflib.query import Processor

    try:
        x=plugin.get('sparql',Processor)
        return # plugins already registered
    except:
        pass # must register plugins    

    from rdflib.query import ResultParser, ResultSerializer, Result

    plugin.register('sparql', Result,
        'rdfextras.sparql.query', 'SPARQLQueryResult')
    plugin.register('sparql', Processor,
        'rdfextras.sparql.processor', 'Processor')

    plugin.register('html', ResultSerializer,
        'rdfextras.sparql.results.htmlresults', 'HTMLResultSerializer')
    plugin.register('xml', ResultSerializer,
        'rdfextras.sparql.results.xmlresults', 'XMLResultSerializer')
    plugin.register('json', ResultSerializer,
        'rdfextras.sparql.results.jsonresults', 'JSONResultSerializer')
    plugin.register('xml', ResultParser,
        'rdfextras.sparql.results.xmlresults', 'XMLResultParser')
    plugin.register('json', ResultParser,
        'rdfextras.sparql.results.jsonresults', 'JSONResultParser')
Пример #29
0
def testAggregateRaw():
    memStore = plugin.get('IOMemory',Store)()
    graph1 = Graph(memStore)
    graph2 = Graph(memStore)
    graph3 = Graph(memStore)

    for n3Str,graph in [(testGraph1N3,graph1),
                        (testGraph2N3,graph2),
                        (testGraph3N3,graph3)]:
        graph.parse(StringIO(n3Str),format='n3')

    G = ReadOnlyGraphAggregate([graph1,graph2,graph3])

    #Test triples
    assert len(list(G.triples((None,RDF.type,None))))                  == 4
    assert len(list(G.triples((URIRef("http://test/bar"),None,None)))) == 2
    assert len(list(G.triples((None,URIRef("http://test/d"),None))))   == 3

    #Test __len__
    assert len(G) == 8

    #Test __contains__
    assert (URIRef("http://test/foo"),RDF.type,RDFS.Resource) in G

    barPredicates = [URIRef("http://test/d"),RDFS.isDefinedBy]
    assert len(list(G.triples_choices((URIRef("http://test/bar"),barPredicates,None)))) == 2
Пример #30
0
 def __init__(self, parser_config: AssocParserConfig, modelstate=None):
     self.config = parser_config
     self.aspector = GoAspector(self.config.ontology)
     self.store = plugin.get('IOMemory', Store)()
     self.errors = GeneErrorSet()  # Errors by gene ID
     self.gpi_entities = self.parse_gpi(parser_config.gpi_authority_path)
     self.modelstate = modelstate
Пример #31
0
	def open_store(self):
		default_graph_uri = "http://rdflib.net/rdfstore"
		# open existing store or create new one
		#store = getStore() # if store does not exist, then new store returned
		
		# RDF store section:
		configString = "/var/tmp/rdfstore"
		
		# Get the Sleepycat plugin.
		store = plugin.get('Sleepycat', Store)('rdfstore')		
		# Open previously created store, or create it if it doesn't exist yet
		path = mkdtemp()
		rt = store.open('rdfstore', create=False)
		#print rt
		#print path
		
		if rt == NO_STORE:
			print "Creating new store"
			# There is no underlying Sleepycat infrastructure, create it
			store.open('rdfstore', create=True)        
		else:
			print "store exists "
			#assert rt == VALID_STORE, "The underlying store is corrupt"

		self.graph = Graph(store,identifier = URIRef(default_graph_uri))		
		self.build_graph()	
		        				        
		'''
Пример #32
0
    def serialize(self, destination=None, format="xml", base=None, encoding=None, **args):
        """Serialize the Graph to destination

        If destination is None serialize method returns the serialization as a
        string. Format defaults to xml (AKA rdf/xml).
        """
        serializer = plugin.get(format, Serializer)(self)
        if destination is None:
            stream = StringIO()
            serializer.serialize(stream, base=base, encoding=encoding, **args)
            return stream.getvalue()
        if hasattr(destination, "write"):
            stream = destination
            serializer.serialize(stream, base=base, encoding=encoding, **args)
        else:
            location = destination
            scheme, netloc, path, params, query, fragment = urlparse(location)
            if netloc!="":
                print "WARNING: not saving as location is not a local file reference"
                return
            path = location
            name = tempfile.mktemp()
            stream = open(name, 'wb')
            serializer.serialize(stream, base=base, encoding=encoding, **args)
            stream.close()
            if hasattr(shutil,"move"):
                shutil.move(name, path)
            else:
                print("Copying to: " + path)
                shutil.copy(name, path)
                os.remove(name)
Пример #33
0
def parse_and_serialize(input_files, input_format, guess,
                        outfile, output_format, ns_bindings,
                        store_conn="", store_type=None):

    if store_type:
        store = plugin.get(store_type, Store)()
        store.open(store_conn)
        graph = ConjunctiveGraph(store)
    else:
        store = None
        graph = ConjunctiveGraph()

    for prefix, uri in list(ns_bindings.items()):
        graph.namespace_manager.bind(prefix, uri, override=False)

    for fpath in input_files:
        use_format, kws = _format_and_kws(input_format)
        if fpath == '-':
            fpath = sys.stdin
        elif not input_format and guess:
            use_format = guess_format(fpath) or DEFAULT_INPUT_FORMAT
        graph.parse(fpath, format=use_format, **kws)

    if outfile:
        output_format, kws = _format_and_kws(output_format)
        kws.setdefault('base', None)
        graph.serialize(destination=outfile, format=output_format, **kws)

    if store:
        store.rollback()
Пример #34
0
 def _load_all_graphs(self, progress, trip_prog):
     import transaction
     from rdflib import plugin
     from rdflib.parser import Parser, create_input_source
     idx_fname = pth_join(self.powdir, 'graphs', 'index')
     triples_read = 0
     if exists(idx_fname):
         dest = self._conf()['rdf.graph']
         with open(idx_fname) as index_file:
             cnt = 0
             for l in index_file:
                 cnt += 1
             index_file.seek(0)
             progress.total = cnt
             with transaction.manager:
                 for l in index_file:
                     fname, ctx = l.strip().split(' ')
                     parser = plugin.get('nt', Parser)()
                     with open(pth_join(self.powdir, 'graphs', fname), 'rb') as f, \
                             _BatchAddGraph(dest.get_context(ctx), batchsize=4000) as g:
                         parser.parse(create_input_source(f), g)
                     progress.update(1)
                     triples_read += g.count
                     trip_prog.update(g.count)
                 progress.write('Finalizing writes to database...')
     progress.write('Loaded {:,} triples'.format(triples_read))
Пример #35
0
def main(fd, store_type=None, store_id=None, graph_id=None, gzipped=False):
    """
    Converts MARC21 data stored in fd to a RDFlib graph.
    """
    from rdflib import plugin

    if store_type:
        msg = "Need a {} identifier for a disk-based store."
        assert store_id, msg.format('store')
        assert graph_id, msg.format('graph')
        store = plugin.get(store_type, Store)(store_id)
    else:
        store = 'default'

    graph = Graph(store=store, identifier=graph_id)

    try:
        records = MARCReader(open(fd))

        for i, triple in enumerate(process_records(records)):
            graph.add(triple)
            if i % 100 == 0:
                graph.commit()
            if i % 10000 == 0:
                print i

    finally:
        graph.commit()

    return graph
Пример #36
0
 def setUp(self):
     store = plugin.get('MySQL', Store)(identifier="rdflib_test")
     store.destroy(self.path)
     store.open(self.path, create=True)
     self.graph = Graph(store)
     self.graph.destroy(self.path)
     self.graph.open(self.path, create=self.create)
Пример #37
0
def main():

    url="https://api.tfl.gov.uk/BikePoint?app_id=5ee709d5&app_key=1739d498d997e956a2b80c62a8948ff0" #url for bike api
    apiJsonCsv(url) #json to csv conversion
    #inputCsv = pathf + "test.csv"
    inputCsv = pathf + "londonBikes.csv"
    outFile = pathf + "londonBikes.ttl"

    csvBike = readCsv(inputCsv) #create object from the resulting csv file

    next(csvBike) #skips the header

    bike_store = plugin.get('IOMemory', Store)()
    bike_g = Graph(bike_store)
    prefixes = definePrefixes()

    print('Binding Prefixes')
    bindingPrefixes(bike_g, prefixes)

    print('Creating graph-bike...')

    for row in csvBike: #loop through individual rows in the csv file **KEY**
        lstData= getBikeData(row)#activates the getBikeData() function **KEY**
        createBikeGraph(lstData, bike_g).serialize(outFile, format='turtle')

    print ('Done!!')
Пример #38
0
def open_store(identifier):
    ident = URIRef(identifier)
    store = plugin.get("SQLAlchemy", Store)(identifier=ident)
    graph = Graph(store, identifier=ident)
    uri = Literal(os.environ.get("DATABASE_URL"))
    graph.open(uri, create=False)
    return graph
Пример #39
0
def create_store_with_identifier(identifier):
    ident = URIRef(identifier)
    store = plugin.get("SQLAlchemy", Store)(identifier=ident)
    graph = Graph(store, identifier=ident)
    uri = Literal(os.environ.get("DATABASE_URL"))
    graph.open(uri, create=True)
    graph.parse(join(join(settings.BASE_DIR, 'static'), 'output.xml'))
Пример #40
0
    def serialize(
            self, destination=None, encoding="utf-8", format='xml', **args):

        if self.type in ('CONSTRUCT', 'DESCRIBE'):
            return self.graph.serialize(
                destination, encoding=encoding, format=format, **args)

        """stolen wholesale from graph.serialize"""
        from rdflib import plugin
        serializer = plugin.get(format, ResultSerializer)(self)
        if destination is None:
            stream = BytesIO()
            stream2 = EncodeOnlyUnicode(stream)
            serializer.serialize(stream2, encoding=encoding, **args)
            return stream.getvalue()
        if hasattr(destination, "write"):
            stream = destination
            serializer.serialize(stream, encoding=encoding, **args)
        else:
            location = destination
            scheme, netloc, path, params, query, fragment = urlparse(location)
            if netloc != "":
                print("WARNING: not saving as location" +
                      "is not a local file reference")
                return
            fd, name = tempfile.mkstemp()
            stream = os.fdopen(fd, 'wb')
            serializer.serialize(stream, encoding=encoding, **args)
            stream.close()
            if hasattr(shutil, "move"):
                shutil.move(name, path)
            else:
                shutil.copy(name, path)
                os.remove(name)
Пример #41
0
    def serialize(
        self,
        destination: Optional[Union[str, IO]] = None,
        encoding: str = "utf-8",
        format: str = "xml",
        **args,
    ) -> Optional[bytes]:
        """
        Serialize the query result.

        The :code:`format` argument determines the Serializer class to use.

        - csv: :class:`~rdflib.plugins.sparql.results.csvresults.CSVResultSerializer`
        - json: :class:`~rdflib.plugins.sparql.results.jsonresults.JSONResultSerializer`
        - txt: :class:`~rdflib.plugins.sparql.results.txtresults.TXTResultSerializer`
        - xml: :class:`~rdflib.plugins.sparql.results.xmlresults.XMLResultSerializer`

        :param destination: Path of file output or BufferedIOBase object to write the output to.
        :param encoding: Encoding of output.
        :param format: One of ['csv', 'json', 'txt', xml']
        :param args:
        :return: bytes
        """
        if self.type in ("CONSTRUCT", "DESCRIBE"):
            return self.graph.serialize(  # type: ignore[return-value]
                destination,
                encoding=encoding,
                format=format,
                **args)
        """stolen wholesale from graph.serialize"""
        from rdflib import plugin

        serializer = plugin.get(format, ResultSerializer)(self)
        if destination is None:
            streamb: BytesIO = BytesIO()
            stream2 = EncodeOnlyUnicode(streamb)
            serializer.serialize(stream2, encoding=encoding,
                                 **args)  # type: ignore
            return streamb.getvalue()
        if hasattr(destination, "write"):
            stream = cast(IO[bytes], destination)
            serializer.serialize(stream, encoding=encoding, **args)
        else:
            location = cast(str, destination)
            scheme, netloc, path, params, query, fragment = urlparse(location)
            if netloc != "":
                print("WARNING: not saving as location" +
                      "is not a local file reference")
                return None
            fd, name = tempfile.mkstemp()
            stream = os.fdopen(fd, "wb")
            serializer.serialize(stream, encoding=encoding, **args)
            stream.close()
            if hasattr(shutil, "move"):
                shutil.move(name, path)
            else:
                shutil.copy(name, path)
                os.remove(name)
        return None
Пример #42
0
 def testLimit2(self):
     graph = ConjunctiveGraph(plugin.get("IOMemory", Store)())
     graph.parse(StringIO(test_data2), format="n3")
     results = list(graph.query(test_query2, DEBUG=True))
     print graph.query(test_query2).serialize(format="xml")
     self.failUnless(len(results) == 1)
     for title, price in results:
         self.failUnless(title in [Literal("Java Tutorial"), Literal("COBOL Tutorial")])
Пример #43
0
 def __init__(self, uri: str, *args, **kwargs):
     store = plugin.get("SQLAlchemy",
                        Store)(identifier="brickschema_persistent_graph")
     super().__init__(store, *args, **kwargs)
     kwargs.update({"create": True})
     super().open(uri, **kwargs)
     self.uri = uri
     super()._graph_init()
Пример #44
0
def get_rdflib_serializer(name, media_type, plugin_name):
    rdflib_serializer = plugin.get(plugin_name, Serializer)
    return type(
        name, (RDFLibSerializer, ), {
            'plugin_name': plugin_name,
            'media_type': media_type,
            'rdflib_serializer': rdflib_serializer
        })
Пример #45
0
 def connect(self, dburi):
     if rdflib is None:
         raise ConnectionError('RDFLib backend is not installed')
     if dburi == 'memory':
         self._store = plugin.get('IOMemory', Store)()
         self._store.graph_aware = False  # fixes context bug in Python 3
     else:
         raise ConnectionError('Unknown database config: %s' % dburi)
Пример #46
0
 def __init__(self):
     from django.conf import settings
     store = plugin.get("SQLAlchemy", Store)(identifier='demo')
     graph = Graph(store, identifier='demo')
     graph.namespace_manager = ns_mgr
     graph.open(Literal("sqlite:///" + settings.BASE_DIR + "demo.db"),
                create=True)
     self.graph = graph
Пример #47
0
 def testLimit2(self):
     graph = ConjunctiveGraph(plugin.get('IOMemory',Store)())
     graph.parse(data=test_data2, format="n3")
     results = list(graph.query(test_query2,DEBUG=False))
     self.assertEqual(len(results), 1)
     for title,price in results:    
         self.assertTrue(title in [Literal("Java Tutorial"),
                                   Literal("COBOL Tutorial")])    
Пример #48
0
 def setUp(self):
     registerplugins()
     store = plugin.get("SQLAlchemy", Store)(identifier=self.ident)
     self.graph = Graph(store, identifier=self.ident)
     self.graph.open(self.uri, create=True)
     g = self.graph
     sid = BNode()
     g.add((sid, RDF.subject, URIRef("http://www.google.com/")))
Пример #49
0
    def __init__(self, classes, service_config=None, init_with=None):
        """I create a local RDF-REST service around the given store.
        """
        if service_config is None:
            service_config = get_service_configuration()

        self.config = service_config
        root_uri = build_service_root_uri(service_config)

        assert urisplit(root_uri)[3:] == (None, None), \
            "Invalid URI <%s>" % root_uri
        self.root_uri = coerce_to_uri(root_uri)

        apply_logging_config(service_config)

        init_repo = False
        repository = service_config.get('rdf_database', 'repository', 1)
        if not repository:
            init_repo = True
            repository = ":IOMemory:"
        elif repository[0] != ":":
            init_repo = not exists(repository)
            repository = ":Sleepycat:%s" % repository

        # Whether we should force data repository initialization
        if service_config.getboolean('rdf_database', 'force-init'):
            init_repo = True

        _, store_type, config_str = repository.split(":", 2)
        store = rdflib_plugin.get(store_type, Store)(config_str)

        self.store = store
        self.class_map = class_map = {}
        for cls in classes:
            assert issubclass(cls, ILocalCore)
            assert cls.RDF_MAIN_TYPE not in class_map, \
                "duplicate RDF_MAIN_TYPE <%s>" % cls.RDF_MAIN_TYPE
            class_map[cls.RDF_MAIN_TYPE] = cls

        # about self._resource_cache: this is not per se a cache,
        # but ensures that we will not generate multiple instances for the
        # same resource.
        self._resource_cache = WeakValueDictionary()
        self._context_level = 0

        metadata_graph = self.get_metadata_graph(root_uri)
        initialized = list(
            metadata_graph.triples(
                (self.root_uri, NS.hasImplementation, None)))
        if not initialized and init_repo:
            assert init_with, \
                "Store is not initialized, and no initializer was provided"
            init_with(self)
            assert (list(
                metadata_graph.triples((self.root_uri, NS.hasImplementation,
                                        None))))  # correctly init'ed

        register_service(self)
Пример #50
0
def get_store( configString ):
	store = plugin.get( 'MySQL', Store)('rdflib')
        rt = store.open(configString, create=False)
        if rt == NO_STORE:
                store.open(configString, create=True)
        else:
                assert rt == VALID_STORE, "The underlying store is not valid"
        print "Opened Store"
	return store
Пример #51
0
def do(action):
    store = plugin.get("SQLAlchemy", Store)(identifier=identifier,
                                            configuration=db_uri)
    graph = ConjunctiveGraph(store)
    action(graph, db_uri)
    try:
        graph.close()
    except:
        pass
Пример #52
0
    def testOrderBy(self):
        graph = ConjunctiveGraph(plugin.get('IOMemory', Store)())
        graph.parse(StringIO(test_data), format="n3")
        results = graph.query(test_query)

        self.failUnless(False not in [
            r[0] == a
            for r, a in zip(results, ['Alice', 'Bob', 'Charlie', 'Dave'])
        ])
Пример #53
0
 def testLimit2(self):
     graph = ConjunctiveGraph(plugin.get('IOMemory', Store)())
     graph.parse(data=test_data2, format="n3")
     results = list(graph.query(test_query2, DEBUG=False))
     self.assertEqual(len(results), 1)
     for title, price in results:
         self.assertTrue(
             title in [Literal("Java Tutorial"),
                       Literal("COBOL Tutorial")])
Пример #54
0
 def testLimit2(self):
     graph = ConjunctiveGraph(plugin.get('IOMemory', Store)())
     graph.parse(StringIO(test_data2), format="n3")
     results = list(graph.query(test_query2, DEBUG=True))
     print graph.query(test_query2).serialize(format='xml')
     self.failUnless(len(results) == 1)
     for title, price in results:
         self.failUnless(
             title in [Literal("Java Tutorial"),
                       Literal("COBOL Tutorial")])
Пример #55
0
 def setUp(self):
     self.store = plugin.get('IOMemory', Store)()
     self.graph1 = Graph(
         self.store, identifier=URIRef('http://example.org/foaf/aliceFoaf'))
     self.graph1.parse(data=test_graph_a, format="n3")
     self.graph2 = Graph(
         self.store, identifier=URIRef('http://example.org/foaf/bobFoaf'))
     self.graph2.parse(data=test_graph_b, format="n3")
     self.unionGraph = ReadOnlyGraphAggregate(
         graphs=[self.graph1, self.graph2], store=self.store)
Пример #56
0
def test_count_all():
    plugin.register('OdgiStore', Store, 'spodgi.OdgiStore', 'OdgiStore')
    s = plugin.get('OdgiStore', Store)(base="http://example.org/test/")
    spodgi = Graph(store=s)
    spodgi.open('./test/t.odgi', create=False)
    for r in spodgi.query('SELECT (count(*) as ?count) WHERE {?s ?p ?o}'):
        assert r[0].value > 195

    spodgi.close()
    assert True
Пример #57
0
def get_graph( configString, default_graph_uri ):
        store = plugin.get( 'MySQL', Store)('rdflib')
        rt = store.open(configString, create=False)
        if rt == NO_STORE:
                store.open(configString, create=True)
        else:
                assert rt == VALID_STORE, "The underlying store is not valid"

        graph = Graph(store, identifier = URIRef( default_graph_uri ))
        return graph