Esempio n. 1
0
    def select(self, query, format="sparql"):
        url = self._endpoint_url()
        query = query.replace("\n", " ")
        url += "?query=" + quote(query).replace("/", "%2F")

        headers = {}
        if format == "python":
            headers['Accept'] = self._contenttype["sparql"]
        else:
            headers['Accept'] = self._contenttype[format]
        try:
            try:
                results = requests.get(url, headers=headers, data=query)
            except UnicodeEncodeError:
                results = requests.get(url, headers=headers, data=query.encode("utf-8"))
            results.raise_for_status()
            if format == "python":
                return self._sparql_results_to_list(results.content)
            # when using format="json", we should return a json
            # string, not the decoded data structure (c.f. how the
            # RDFLib based backends do it).
            # elif format == "json":
            #     return results.json()
            else:
                return results.content  # not .text -- should return raw bytestring response
        except requests.exceptions.HTTPError as e:
            raise errors.SparqlError(e.response.text)
Esempio n. 2
0
 def select(self, query, format="sparql"):
     # FIXME: workaround for the fact that rdflib select uses FROM
     # <%s> differently than Sesame/Fuseki. We remove the 'FROM
     # <%s>' part from the query and instead get a context graph
     # for the same URI.
     graphuri = None
     m = self.re_fromgraph.search(query)
     if m:
         graphuri = m.group("graphuri")
         query = self.re_fromgraph.sub(" ", query)
     try:
         res = self._getcontextgraph(graphuri).query(query)
     except pyparsing.ParseException as e:
         raise errors.SparqlError(e)
     if format == "sparql":
         return res.serialize(format="xml")
     elif format == "json":
         return res.serialize(format="json")
     else:
         # or just
         # return self._sparql_results_to_list(res.serialize(format="xml"))
         l = []
         for r in res.bindings:
             d = {}
             for (key, val) in r.items():
                 d[str(key)] = str(val)
             l.append(d)
         return l
Esempio n. 3
0
 def update(self, query):
     url = self._update_url()
     # url += "?query=" + quote(query.replace("\n", " ")).replace("/", "%2F")
     try:
         resp = requests.post(url, data={'update': query})
         resp.raise_for_status()
     except requests.exceptions.ConnectionError as e:
         raise errors.TriplestoreError(
             "Triplestore %s not responding: %s" % (url, e))
     except requests.exceptions.HTTPError as e:
         raise errors.SparqlError(e)
Esempio n. 4
0
    def construct(self, query):
        """
        Run a SPARQL CONSTRUCT query against the triple store and returns the results as a RDFLib graph

        :param query: A SPARQL query with all neccessary prefixes defined.
        :type query: str
        """
        try:
            res = self.graph.query(query)
        except pyparsing.ParseException as e:
            raise errors.SparqlError(e)
        return res.graph
Esempio n. 5
0
 def construct(self, query):
     url = self._endpoint_url()
     url += "?query=" + quote(query)
     try:
         format = "turtle"
         headers = {'Accept': self._contenttype[format]}
         resp = requests.get(url, headers=headers)
         resp.raise_for_status()
         result = Graph()
         result.parse(data=resp.content, format=format)
         return result
     except requests.exceptions.HTTPError as e:
         raise errors.SparqlError(e)
     except SAXParseException as e:
         # No real error message, most likely a empty string. We'll
         # return a empty graph for now, which'll trigger a warning
         # by the caller
         return result