def setUp(self): self.engine = Engine("op1", "op2") self.engine.op1.setup(in_name="in") self.engine.op2.setup(out_name="out") self.engine.op1.set(OptProductEx()) foisdouze = OptProductEx("foisdouze") foisdouze.force_option_value("factor", 12) self.engine.op2.set(foisdouze, OptProductEx()) egn_view = EngineView(self.engine) egn_view.set_input_type(Numeric(vtype=int, min=-5, max=5)) egn_view.add_output("out") api = ReliureAPI() api.register_view(egn_view, url_prefix="egn") app = Flask(__name__) self.appp = app app.config['TESTING'] = True app.register_blueprint(api, url_prefix="/api") self.app = app.test_client()
def setUp(self): self.engine = Engine("op1", "op2") self.engine.op1.setup(in_name="in", out_name="middle") self.engine.op2.setup(in_name="middle", out_name="out") self.engine.op1.set(OptProductEx()) foisdouze = OptProductEx("foisdouze") foisdouze.force_option_value("factor", 12) foisquatre = OptProductEx("foisquatre") foisquatre.force_option_value("factor", 4) self.engine.op2.set(foisdouze, foisquatre) op2_view = EngineView(self.engine.op2, name="op2") op2_view.set_input_type(Numeric(vtype=int)) op2_view.add_output("out") api = ReliureAPI() api.register_view(op2_view) app = Flask(__name__) app.config['TESTING'] = True app.register_blueprint(api, url_prefix="/api") self.app = app.test_client()
def clustering_api(engines, api=None, optionables=None, prefix="clustering"): def clustering_engine(optionables): """ Return a default engine over a lexical graph """ # setup engine = Engine("gbuilder", "clustering", "labelling") engine.gbuilder.setup(in_name="request", out_name="graph", hidden=True) engine.clustering.setup(in_name="graph", out_name="clusters") engine.labelling.setup(in_name="clusters", out_name="clusters", hidden=True) engine.gbuilder.set(engines.edge_subgraph) engine.clustering.set(*optionables) ## Labelling from cello.clustering.labelling.model import Label from cello.clustering.labelling.basic import VertexAsLabel, TypeFalseLabel, normalize_score_max def _labelling(graph, cluster, vtx): return Label(vtx["uuid"], score=1, role="default") labelling = VertexAsLabel( _labelling ) | normalize_score_max engine.labelling.set(labelling) return engine if api is None: api = ReliureAPI(name,expose_route = False) ## Clustering from cello.graphs.transform import EdgeAttr from cello.clustering.common import Infomap, Walktrap # weighted walktrap = Walktrap(weighted=True) walktrap.name = "Walktrap" infomap = Infomap(weighted=True) infomap.name = "Infomap" DEFAULTS = [walktrap, infomap] if optionables == None : optionables = DEFAULTS view = EngineView(clustering_engine(optionables)) view.set_input_type(EdgeList()) view.add_output("clusters", export_clustering, vertex_id_attr='uuid') api.register_view(view, url_prefix=prefix) return api
def test_engine_view_init(self): _egn_view = EngineView(self.engine) with pytest.raises(ValueError): _egn_view.add_output("existe_pas")
def clustering_api(graphdb, engines, api=None, optionables=None, prefix="clustering"): def clustering_engine(optionables): """ Return a default engine over a lexical graph """ # setup engine = Engine("gbuilder", "clustering") engine.gbuilder.setup(in_name="request", out_name="graph", hidden=True) engine.clustering.setup(in_name="graph", out_name="clusters") engine.gbuilder.set(engines.edge_subgraph) engine.clustering.set(*optionables) return engine if api is None: api = ReliureAPI(name, expose_route=False) ## Clustering from cello.graphs.transform import EdgeAttr from cello.clustering.common import Infomap, Walktrap # weighted walktrap = Walktrap(weighted=True) walktrap.name = "Walktrap" infomap = Infomap(weighted=True) infomap.name = "Infomap" DEFAULTS = [walktrap, infomap] if optionables == None: optionables = DEFAULTS from pdgapi.explor import EdgeList view = EngineView(clustering_engine(optionables)) view.set_input_type(EdgeList()) view.add_output("clusters", export_clustering, vertex_id_attr='uuid') api.register_view(view, url_prefix=prefix) # cluster labels view = EngineView(clusters_labels_engine(graphdb)) view.set_input_type(Clusters()) view.add_output("labels", lambda e: e) api.register_view(view, url_prefix="labels") return api
def explore_api(engines, graphdb): #explor_api = explor.explore_api("xplor", graphdb, engines) api = ReliureAPI("xplor", expose_route=False) # import pad view = EngineView(import_calc_engine(graphdb)) view.set_input_type(AdditiveNodes()) view.add_output("graph", export_graph, id_attribute='uuid') api.register_view(view, url_prefix="import") # istex search view = EngineView(search_engine(graphdb)) view.set_input_type(ComplexQuery()) view.add_output("request", ComplexQuery()) view.add_output("graph", export_graph, id_attribute='uuid') api.register_view(view, url_prefix="search") # graph exploration, reset global view = EngineView(graph_engine(graphdb)) view.set_input_type(ComplexQuery()) view.add_output("request", ComplexQuery()) view.add_output("graph", export_graph, id_attribute='uuid') api.register_view(view, url_prefix="graph") # prox expand returns [(node,score), ...] view = EngineView(expand_prox_engine(graphdb)) view.set_input_type(NodeExpandQuery()) view.add_output("scores", lambda x: x) api.register_view(view, url_prefix="expand_px") # additive search view = EngineView(engines.additive_nodes_engine(graphdb)) view.set_input_type(AdditiveNodes()) view.add_output("graph", export_graph, id_attribute='uuid') api.register_view(view, url_prefix="additive_nodes") # export pad view = EngineView(export_calc_engine(graphdb)) view.set_input_type(AdditiveNodes()) view.add_output("url", lambda e: e) api.register_view(view, url_prefix="export") return api
def layout_api(engines, api=None, optionables=None, prefix="layout"): def export_layout(graph, layout): uuids = graph.vs['uuid'] coords = { uuid: layout[i] for i,uuid in enumerate(uuids) } return { "desc" : str(layout), "coords": coords } def layout_engine(layouts): """ Return a default engine over a lexical graph """ # setup engine = Engine("gbuilder", "layout", "export") engine.gbuilder.setup(in_name="request", out_name="graph", hidden=True) engine.layout.setup(in_name="graph", out_name="layout") engine.export.setup(in_name=["graph", "layout"], out_name="layout", hidden=True) engine.gbuilder.set(engines.edge_subgraph) for k,v in layouts: v.name = k layouts = [ l for n,l in layouts ] engine.layout.set( *layouts ) engine.export.set( export_layout ) return engine from cello.layout.simple import KamadaKawaiLayout, GridLayout, FruchtermanReingoldLayout from cello.layout.proxlayout import ProxLayoutPCA, ProxLayoutRandomProj, ProxLayoutMDS, ProxMDSSugiyamaLayout from cello.layout.transform import Shaker from cello.layout.transform import ByConnectedComponent from cello.layout.simple import DrlLayout LAYOUTS = [ ("2D_Force_directed" , FruchtermanReingoldLayout(dim=2, weighted=True) ), ("3D_Force_directed" , FruchtermanReingoldLayout(dim=3, weighted=True) ), ("2D_KamadaKawai" , KamadaKawaiLayout(dim=2) ), ("3D_KamadaKawai" , KamadaKawaiLayout(dim=3) ), ("3DMds" , ProxLayoutMDS(dim=3) | Shaker(kelastic=.9) ), ("2DMds" , ProxLayoutMDS(dim=2 ) | Shaker(kelastic=.9) ), ("3DPca" , ProxLayoutPCA(dim=3, ) | Shaker(kelastic=.9) ), ("3DPcaWeighted" , ProxLayoutPCA(dim=3, weighted=True) | Shaker(kelastic=.9) ), ("2DRandomProj" , ProxLayoutRandomProj(dim=2) ), #("3DRandomProj" , ProxLayoutRandomProj(dim=3) ), ("3DOrdered" , ProxMDSSugiyamaLayout(dim=3) | Shaker(kelastic=0.9) ), # 2D ("2DPca" , ProxLayoutPCA(dim=2) | Shaker(kelastic=1.8) ), # tree ("DrlLayout" , DrlLayout(dim=2) | Shaker(kelastic=0.8) ), ] if api is None: api = ReliureAPI(name,expose_route = False) if optionables == None : optionables = LAYOUTS view = EngineView(layout_engine(optionables)) view.set_input_type(EdgeList()) view.add_output("layout", lambda x:x) api.register_view(view, url_prefix=prefix) return api
def explore_api(name, graphdb, engines): """ API over tmuse elastic search """ api = ReliureAPI(name,expose_route=True) # starred view = EngineView(engines.starred_engine(graphdb)) view.set_input_type(ComplexQuery()) view.add_output("request", ComplexQuery()) view.add_output("graph", export_graph, id_attribute='uuid') api.register_view(view, url_prefix="starred") # prox search returns graph only view = EngineView(engines.explore_engine(graphdb)) view.set_input_type(ComplexQuery()) view.add_output("request", ComplexQuery()) view.add_output("graph", export_graph, id_attribute='uuid') api.register_view(view, url_prefix="explore") # prox expand returns [(node,score), ...] view = EngineView(engines.expand_prox_engine(graphdb)) view.set_input_type(NodeExpandQuery()) view.add_output("scores", lambda x:x) api.register_view(view, url_prefix="expand_px") # additive search view = EngineView(engines.additive_nodes_engine(graphdb)) view.set_input_type(AdditiveNodes()) view.add_output("graph", export_graph, id_attribute='uuid' ) api.register_view(view, url_prefix="additive_nodes") import random import json import pickle from flask import request, jsonify from flask import Response, make_response @api.route("/<string:gid>.json", methods=['GET']) @api.route("/starred/<string:gid>.json", methods=['GET']) def _json_dump(gid): dumps = lambda g : json.dumps( export_graph(g, id_attribute='uuid') ) return stargraph_dump(gid, dumps, 'json') @api.route("/<string:gid>.pickle", methods=['GET']) @api.route("/starred/<string:gid>.pickle", methods=['GET']) def _pickle_dump(gid): return stargraph_dump(gid, pickle.dumps, 'pickle') def stargraph_dump(gid, dumps, content_type): """ returns igraph pickled/jsonified starred graph """ engine = engines.starred_engine(graphdb) meta = graphdb.get_graph_metadata(gid) graph = engine.play({'graph':gid})['graph'] for k,v in meta.iteritems(): graph[k] = v response = make_response(dumps(graph)) response.headers['Content-Type'] = 'application/%s' % content_type response.headers['Content-Disposition'] = 'inline; filename=%s.%s' % (gid, content_type) return response @api.route("/<string:gid>/random") def random_node(gid): return jsonify({ 'gid': gid}) # Debug views @api.route("/<string:gid>/extraction/<string:text>", methods=['GET']) @api.route("/<string:gid>/extraction", methods=['POST']) def _extract(gid, text=None): """ POST /<string:graph>/extraction { gid: graph, uuids : [uuid, uuid] } """ if request.method == "GET": labels = text.split(',') nodes = [ graphdb.get_node_by_name(gid, label) for label in labels ] p0_uuids = [ node['uuid'] for node in nodes ] elif request.method == "POST": assert graph == request.json.get('gid', None) p0_uuids = request.json.get('uuids') prox = graphdb.proxemie(gid, p0_uuids, limit=50, n_step=3) return jsonify({ 'gid': gid, 'nodes': p0_uuids , 'extraction':prox}) return api
def explore_api(engines, graphdb): #explor_api = explor.explore_api("xplor", graphdb, engines) api = ReliureAPI("xplor", expose_route=False) # starred view = EngineView(starred_engine(graphdb)) view.set_input_type(ComplexQuery()) view.add_output("request", ComplexQuery()) view.add_output("graph", export_graph, id_attribute='uuid') api.register_view(view, url_prefix="starred") # prox search returns graph only view = EngineView(explore_engine(graphdb)) view.set_input_type(ComplexQuery()) view.add_output("request", ComplexQuery()) view.add_output("graph", export_graph, id_attribute='uuid') api.register_view(view, url_prefix="explore") # prox expand returns [(node,score), ...] view = EngineView(expand_prox_engine(graphdb)) view.set_input_type(NodeExpandQuery()) view.add_output("scores", lambda x: x) api.register_view(view, url_prefix="expand_px") # additive search view = EngineView(engines.additive_nodes_engine(graphdb)) view.set_input_type(AdditiveNodes()) view.add_output("graph", export_graph, id_attribute='uuid') api.register_view(view, url_prefix="additive_nodes") @api.route("/starred/<string:gid>.json", methods=['GET']) def g_json_dump(gid): graph = graphdb.get_graph(gid) g = starred(graph, limit=100, prune=True) g = export_graph(g, id_attribute='uuid') return jsonify(g) @api.route("/<string:gid>.json", methods=['GET']) def _json_dump(gid): dumps = lambda g: json.dumps(export_graph(g, id_attribute='uuid')) return stargraph_dump(gid, dumps, 'json') @api.route("/<string:gid>.pickle", methods=['GET']) def _pickle_dump(gid): return stargraph_dump(gid, pickle.dumps, 'pickle') def stargraph_dump(gid, dumps, content_type): """ returns igraph pickled/jsonified starred graph """ engine = explore_engine(graphdb) meta = graphdb.get_graph_metadata(gid) graph = engine.play({'graph': gid})['graph'] for k, v in meta.iteritems(): graph[k] = v response = make_response(dumps(graph)) response.headers['Content-Type'] = 'application/%s' % content_type response.headers['Content-Disposition'] = 'inline; filename=%s.%s' % ( gid, content_type) return response return api
"label_key" : "label", "vertices_color": {'fort': (255,150,0), 'bon': (200,255,0), 'faible': (50,50,255), 'mauvais': (255,50,50)}, }, } for gname, config in graph_config.iteritems(): graph = igraph.read(config.pop("path")) graph['vertices_color'] = config.pop("vertices_color") graphs.add(gname) engine = lexical_graph_engine(graph) view = EngineView(engine) view.set_input_type(Text()) view.add_output("query", lambda x : x.encode('utf8')) view.add_output("graph", export_graph) view.add_output("layout", export_layout) view.add_output("clusters", export_clustering) api = ReliureAPI(name=gname ) api.register_view(view, url_prefix="api" ) app.register_blueprint(api, url_prefix="/graph/%s" % (gname) ) # === Routes ===
def explore_api(engines, graphdb): #explor_api = explor.explore_api("xplor", graphdb, engines) api = ReliureAPI("xplor", expose_route=False) # prox search returns graph only view = EngineView(explore_engine(graphdb)) view.set_input_type(ComplexQuery()) view.add_output("request", ComplexQuery()) view.add_output("graph", graph2dict, id_attribute='uuid') api.register_view(view, url_prefix="explore") # prox expand returns [(node,score), ...] view = EngineView(engines.expand_prox_engine(graphdb)) view.set_input_type(NodeExpandQuery()) view.add_output("scores", lambda x: x) api.register_view(view, url_prefix="expand_px") # additive search view = EngineView(engines.additive_nodes_engine(graphdb)) view.set_input_type(AdditiveNodes()) view.add_output("graph", graph2dict, id_attribute='uuid') api.register_view(view, url_prefix="additive_nodes") #layout api = layout_api(engines, api) #clustering api = clustering_api(engines, api) return api
def TmuseApi(name, host='localhost:9200', index_name='tmuse', doc_type='graph', retry=5): """ API over tmuse elastic search """ esindex = EsIndex(index_name, doc_type=doc_type, host=host) print "# TmuseApi", host, doc_type, index_name # let es start for i in range(retry): if not esindex._es.ping(): print "waiting for es to start" time.sleep(i) assert esindex._es.ping(), "impossible to reach ES server" # build the API from this engine print "api name", name api = ReliureAPI(name) # Main api entry point: tmuse engine (subgraph) view = EngineView(engine(esindex)) view.set_input_type(ComplexQuery()) view.add_output("query", ComplexQuery()) view.add_output("graph", export_graph) view.add_output("layout", export_layout) view.add_output("clusters", export_clustering) # add a simple play route view.play_route("<query>") api.register_view(view, url_prefix="subgraph") # Add auto completion View completion = TmuseEsComplete(index=esindex, size=20) # TODO suggestion rerank # completion |= rerank completion_view = ComponentView(completion) completion_view.add_input("lang", Text(default=u"*")) completion_view.add_input("pos", Text(default=u"*")) completion_view.add_input("form") completion_view.add_output("response") completion_view.play_route("<lang>.<pos>.<form>") api.register_view(completion_view, url_prefix="complete") import random @api.route("/random") @api.route("/random/<string:pos>") def random_node(pos=None, retry=5, count=0): if pos not in ALL_POS: pos = random.sample(ALL_POS, 1)[0] graph = "jdm.%s.flat" % pos docs = tmuse.random_node(esindex, graph) doc = docs[0] if len(docs) else dict() return jsonify({'pos': pos, 'doc': doc}) # Debug views @api.route("/_extract/<string:graph>/<string:text>") def _extract(graph, text): query = QueryUnit(graph=graph, form=text) es_res = tmuse.extract(esindex, query) return jsonify({'res': es_res}) @api.route("/_prox/<string:graph>/<string:text>") def _prox(graph, text): es_res = proxlist(esindex, graph, text, 100) return jsonify({'res': es_res}) return api