def add_seed(seed_dict): try: sid = fountain.add_seed(seed_dict['uri'], seed_dict['type']) return server.url_for('get_seed', sid=sid) except (TypeError, ValueError) as e: raise APIError(e.message) except DuplicateSeedError as e: raise Conflict(e.message)
def get_paths(elm): force_seed = request.args.getlist('force_seed', None) force_seed = None if not force_seed else map( lambda x: ('http://{}'.format(uuid()), x), force_seed) try: return fountain.get_paths(elm, force_seed=force_seed) except TypeError, e: raise APIError(e.message)
def __make_plan(agp_str, force_seed): try: agp_str = agp_str.lstrip('{').rstrip('}').strip() tps = re.split('\. ', agp_str) tps = map(lambda x: x.rstrip('.').strip(), filter(lambda y: y != '', tps)) agp = AGP(tps, prefixes=planner.fountain.prefixes) plan = planner.make_plan(agp, force_seed=force_seed) plan = skolemize(plan) return plan.serialize(format='turtle') except NameError, e: raise APIError(e.message)
def gen_queue(status): with stop_event: while not status['completed'] or not queue.empty(): status['last'] = datetime.now() try: for chunk in queue.get(timeout=1.0): yield chunk except Empty: if not status['completed']: yield '\n' elif status['exception']: raise APIError(status['exception'].message)
def get_seed(sid): try: return fountain.get_seed(sid) except InvalidSeedError, e: raise APIError(e.message)
def update_vocabulary(owl, vid): try: fountain.update_vocabulary(vid, owl) except VocabularyNotFound, e: raise APIError('Ontology URI not found: {}'.format(e.message))
def add_vocabulary(owl): try: return fountain.add_vocabulary(owl) except VocabularyNotFound, e: raise APIError('Ontology URI not found: {}'.format(e.message))
def get_paths_force_seeds(req_json): force_seed = list(tuples_force_seed(req_json['force_seed'])) try: return fountain.get_paths(req_json['elm'], force_seed=force_seed) except TypeError, e: raise APIError(e.message)
def get_property(property): try: return fountain.get_property(property) except TypeError as e: raise NotFound(e.message) @server.post('/vocabs', consume_types=('text/turtle', )) def add_vocabulary(owl): try: return fountain.add_vocabulary(owl) except VocabularyNotFound, e: raise APIError('Ontology URI not found: {}'.format(e.message)) except DuplicateVocabulary, e: raise Conflict(e.message) except VocabularyError, e: raise APIError(e.message) @server.get('/seeds') def seeds(): return fountain.seeds @server.get('/properties') def properties(): return {'properties': fountain.properties} @server.put('/vocabs/<string:vid>', consume_types=('text/turtle', )) def update_vocabulary(owl, vid): try: fountain.update_vocabulary(vid, owl) except VocabularyNotFound, e: raise APIError('Ontology URI not found: {}'.format(e.message))
def build(agora, server=None, import_name=__name__, query_function=None): # type: (Agora, Server, str) -> AgoraServer if server is None: server = Server(import_name) query_function = agora.query if query_function is None else query_function @server.get('/sparql', produce_types=('application/sparql-results+json', 'text/html')) def query(): def gen_thread(status): first = True try: for row in gen: if first: queue.put('{\n') queue.put( ' "head": %s,\n "results": {\n "bindings": [\n' % json.dumps(head(row))) first = False else: queue.put(',\n') queue.put(' {}'.format( json.dumps(result(row)).encode('utf-8'))) if first: queue.put('{\n') queue.put( ' "head": [],\n "results": {\n "bindings": []\n }\n' ) else: queue.put('\n ]\n }\n') queue.put('}') except Exception, e: exception = e status['completed'] = True def gen_queue(status): with stop_event: while not status['completed'] or not queue.empty(): status['last'] = datetime.now() try: for chunk in queue.get(timeout=1.0): yield chunk except Empty: if not status['completed']: yield '\n' elif status['exception']: raise APIError(status['exception'].message) try: query = request.args.get('query') incremental = json.loads(request.args.get('incremental', 'true')) kwargs = dict(request.args.items()) del kwargs['query'] if 'incremental' in kwargs: del kwargs['incremental'] stop_event = Semaphore() gen = query_function(query, incremental=incremental, stop_event=stop_event, **kwargs) queue = Queue() request_status = {'completed': False, 'exception': None} stream_th = Thread(target=gen_thread, args=(request_status, )) stream_th.daemon = False stream_th.start() return gen_queue(request_status) except Exception, e: traceback.print_exc() raise APIError(e.message)
def get_fragment(): def gen_thread(status): try: first = True min_quads = '-min' in best_mime if best_mime.startswith('application/agora-quad'): for c, s, p, o in generator: if min_quads: quad = u'{}·{}·{}·{}\n'.format( c, s.n3(plan.namespace_manager), p.n3(plan.namespace_manager), o.n3(plan.namespace_manager)) else: quad = u'{}·{}·{}·{}\n'.format( c, s.n3(), p.n3(), o.n3()) queue.put(quad) else: if first: for prefix, uri in prefixes.items(): queue.put('@prefix {}: <{}> .\n'.format( prefix, uri)) queue.put('\n') for c, s, p, o in generator: triple = u'{} {} {} .\n'.format( s.n3(plan.namespace_manager), p.n3(plan.namespace_manager), o.n3(plan.namespace_manager)) queue.put(triple) except Exception as e: status['exception'] = e status['completed'] = True def gen_queue(status): with stop_event: while not status['completed'] or not queue.empty(): status['last'] = datetime.now() try: for chunk in queue.get(timeout=1.0): yield chunk except Empty: if not status['completed']: yield '\n' elif status['exception']: raise APIError(status['exception'].message) try: stop_event = Semaphore() query = request.args.get('query', None) kwargs = dict(request.args.items()) if query is not None: del kwargs['query'] fragment_dict = fragment_function(query=query, stop_event=stop_event, **kwargs) else: tps_str = request.args.get('agp') tps_match = re.search(r'\{(.*)\}', tps_str).groups(0) if len(tps_match) != 1: raise APIError('Invalid graph pattern') tps = re.split('\. ', tps_match[0]) agp = AGP([tp.strip() for tp in tps], prefixes=agora.planner.fountain.prefixes) fragment_dict = fragment_function(agps=[agp], **kwargs) plan = fragment_dict['plan'] generator = fragment_dict['generator'] prefixes = fragment_dict['prefixes'] best_mime = request.accept_mimetypes.best queue = Queue() request_status = {'completed': False, 'exception': None} stream_th = Thread(target=gen_thread, args=(request_status, )) stream_th.daemon = False stream_th.start() return gen_queue(request_status) except Exception, e: traceback.print_exc() raise APIError(e.message)