def sparql_index(): mimetype = request.accept_mimetypes.best_match([ "application/json", "application/xml", "application/sparql-results+json", "application/sparql-results+xml", "text/html" ]) try: url = secure_url(request.base_url) # parse arguments if request.method == "GET": query = request.args.get("query") or None default_graph_uri = request.args.get( "default-graph-uri") or None next_link = request.args.get("next") or None # ensure that both the query and default-graph-uri params are set if (query is None or default_graph_uri is None) and ( next_link is None or default_graph_uri is None): return sage_http_error( "Invalid request sent to server: a GET request must contains both parameters 'query' and 'default-graph-uri'. See <a href='http://sage.univ-nantes.fr/documentation'>the API documentation</a> for reference." ) elif request.method == "POST" and request.is_json: # POST query post_query, err = SageSparqlQuery().load(request.get_json()) if err is not None and len(err) > 0: # TODO better formatting return Response(format_marshmallow_errors(err), status=400) query = post_query["query"] default_graph_uri = post_query["defaultGraph"] next_link = post_query["next"] if 'next' in post_query else None else: return sage_http_error( "Invalid request sent to server: a GET request must contains both parameters 'query' and 'default-graph-uri'. See <a href='http://sage.univ-nantes.fr/documentation'>the API documentation</a> for reference." ) # execute query return execute_query(query, default_graph_uri, next_link, dataset, mimetype, url) except Exception as e: logger.error(e) abort(500)
def sparql_query(graph_name): """WARNING: old API, deprecated""" graph = dataset.get_graph(graph_name) if graph is None: abort(404) logger.debug('[/sparql/] Corresponding dataset found') mimetype = request.accept_mimetypes.best_match([ "application/json", "application/xml", "application/sparql-results+json", "application/sparql-results+xml" ]) url = secure_url(request.url) try: # A GET request always returns the homepage of the dataset if request.method == "GET" or (not request.is_json): dinfo = graph.describe(url) dinfo['@id'] = url void_desc = { "nt": VoidDescriptor(url, graph).describe("ntriples"), "ttl": VoidDescriptor(url, graph).describe("turtle"), "xml": VoidDescriptor(url, graph).describe("xml") } return render_template("website/sage_dataset.html", dataset_info=dinfo, void_desc=void_desc) engine = SageEngine() post_query, err = QueryRequest().load(request.get_json()) if err is not None and len(err) > 0: return Response(format_marshmallow_errors(err), status=400) quota = graph.quota / 1000 max_results = graph.max_results # Load next link next_link = None if 'next' in post_query: next_link = decode_saved_plan(post_query["next"]) # build physical query plan, then execute it with the given quota start = time() plan, cardinalities = build_query_plan(post_query["query"], dataset, graph_name, next_link) loading_time = (time() - start) * 1000 # convert in milliseconds bindings, saved_plan, is_done = engine.execute( plan, quota, max_results) # compute controls for the next page start = time() next_page = None if not is_done: next_page = encode_saved_plan(saved_plan) exportTime = (time() - start) * 1000 # convert in milliseconds stats = { "cardinalities": cardinalities, "import": loading_time, "export": exportTime } if mimetype == "application/sparql-results+json": res = Response(responses.w3c_json_streaming( bindings, next_page, stats, url), content_type='application/json') if mimetype == "application/xml" or mimetype == "application/sparql-results+xml": res = Response(responses.w3c_xml(bindings, next_page, stats), content_type="application/xml") else: res = Response(responses.raw_json_streaming( bindings, next_page, stats, url), content_type='application/json') # set deprecation warning in headers res.headers.add( "Warning", "199 SaGe/2.0 \"You are using a deprecated API. Consider uppgrading to the SaGe SPARQL query API. See http://sage.univ-nantes.fr/documentation fore more details.\"" ) return res except Exception as e: logger.error(e) abort(500)
def sparql_query(dataset_name): logger.info('[IP: {}] [/sparql/] Querying {}'.format( request.environ['REMOTE_ADDR'], dataset_name)) dataset = datasets.get_dataset(dataset_name) if dataset is None: abort(404) logger.debug('[/sparql/] Corresponding dataset found') mimetype = request.accept_mimetypes.best_match([ "application/json", "application/xml", "application/sparql-results+json", "application/sparql-results+xml" ]) url = secure_url(request.url) try: # A GET request always returns the homepage of the dataset if request.method == "GET" or (not request.is_json): dinfo = dataset.describe(url) dinfo['@id'] = url void_desc = { "nt": VoidDescriptor(url, dataset).describe("ntriples"), "ttl": VoidDescriptor(url, dataset).describe("turtle"), "xml": VoidDescriptor(url, dataset).describe("xml") } return render_template("sage.html", dataset_info=dinfo, void_desc=void_desc) engine = SageEngine() post_query, err = QueryRequest().load(request.get_json()) if err is not None and len(err) > 0: return Response(format_marshmallow_errors(err), status=400) logger.info('[IP: {}] [/sparql/] Query={}'.format( request.environ['REMOTE_ADDR'], post_query)) quota = dataset.quota / 1000 max_results = dataset.max_results # Load next link next_link = None if 'next' in post_query: logger.debug( '[/sparql/{}] Saved plan found, decoding "next" link'. format(dataset_name)) next_link = decode_saved_plan(post_query["next"]) else: logger.debug('[/sparql/{}] Query to evaluate: {}'.format( dataset_name, post_query)) # build physical query plan, then execute it with the given quota logger.debug('[/sparql/{}] Starting query evaluation...'.format( dataset_name)) start = time() plan, cardinalities = build_query_plan(post_query["query"], dataset, next_link) loading_time = (time() - start) * 1000 bindings, saved_plan, is_done = engine.execute( plan, quota, max_results) logger.debug( '[/sparql/{}] Query evaluation completed'.format(dataset_name)) # compute controls for the next page start = time() next_page = None if is_done: logger.debug( '[/sparql/{}] Query completed under the time quota'.format( dataset_name)) else: logger.debug( '[/sparql/{}] The query was not completed under the time quota...' .format(dataset_name)) logger.debug( '[/sparql/{}] Saving the execution to plan to generate a "next" link' .format(dataset_name)) next_page = encode_saved_plan(saved_plan) logger.debug( '[/sparql/{}] "next" link successfully generated'.format( dataset_name)) exportTime = (time() - start) * 1000 stats = { "cardinalities": cardinalities, "import": loading_time, "export": exportTime } if mimetype == "application/sparql-results+json": return json.jsonify(responses.json(bindings, next_page, stats)) if mimetype == "application/xml" or mimetype == "application/sparql-results+xml": return Response(responses.xml(bindings, next_page, stats), content_type="application/xml") return json.jsonify(responses.raw_json(bindings, next_page, stats)) except Exception: abort(500)