def attributes(): etag_cache_keygen() attributes = available_attributes( request.args, sources=authz.authz_sources('read'), # noqa lists=authz.authz_lists('read')) # noqa return jsonify(attributes)
def query(): etag_cache_keygen() query = document_query(request.args, lists=authz.authz_lists('read'), sources=authz.authz_sources('read')) results = search_documents(query) pager = Pager(results, results_converter=lambda ds: [add_urls(d) for d in ds]) data = pager.to_dict() data['facets'] = transform_facets(results.result.get('aggregations', {})) return jsonify(data)
def export(): attributes = request.args.getlist('attribute') query = document_query(request.args, lists=authz.authz_lists('read'), sources=authz.authz_sources('read')) query['_source'] = set(query['_source']) for attribute in attributes: if attribute in CORE_FIELDS: query['_source'].add(attribute) else: query['_source'].add('attributes') query['_source'] = list(query['_source']) output = (process_row(r, attributes) for r in raw_iter(query)) output = make_excel(output, attributes) return send_file(output, mimetype=XLSX_MIME, as_attachment=True, attachment_filename='export.xlsx')
def _query(): ''' everything here should be applicable both to the internal and to the public api ''' etag_cache_keygen() query = document_query(request.args, lists=authz.authz_lists('read'), sources=authz.authz_sources('read'), highlights=True) results = search_documents(query) pager = Pager(results, results_converter=lambda ds: [add_urls(d) for d in ds]) data = pager.to_dict() #import ipdb; ipdb.set_trace() data['facets'] = transform_facets(results.result.get('aggregations', {})) return data
def generate_graph(args): fields = ['id', 'collection', 'entities.id', 'entities.label', 'entities.category'] query = document_query(args, fields=fields, sources=authz.authz_sources('read'), lists=authz.authz_lists('read'), facets=False) graph = nx.MultiGraph() for doc in raw_iter(query): entities = set() for entity in doc.get('_source').get('entities', []): if not graph.has_node(entity.get('id')): graph.add_node(entity.get('id'), label=entity.get('label'), category=entity.get('category')) entities.add(entity.get('id')) for (src, dst) in combinations(entities, 2): graph.add_edge(src, dst, weight=1) graph = multigraph_to_weighted(graph) return paginate_graph(graph)
def generate_graph(args): fields = [ 'id', 'collection', 'entities.id', 'entities.label', 'entities.category' ] query = document_query(args, fields=fields, sources=authz.authz_sources('read'), lists=authz.authz_lists('read'), facets=False) graph = nx.MultiGraph() for doc in raw_iter(query): entities = set() for entity in doc.get('_source').get('entities', []): if not graph.has_node(entity.get('id')): graph.add_node(entity.get('id'), label=entity.get('label'), category=entity.get('category')) entities.add(entity.get('id')) for (src, dst) in combinations(entities, 2): graph.add_edge(src, dst, weight=1) graph = multigraph_to_weighted(graph) return paginate_graph(graph)
def attributes(): etag_cache_keygen() attributes = available_attributes(request.args, sources=authz.authz_sources('read'), # noqa lists=authz.authz_lists('read')) # noqa return jsonify(attributes)
def suggest(): lists = authz.authz_lists("read") prefix = request.args.get("prefix") results = Entity.suggest_prefix(prefix, lists) return jsonify({"results": results})
def suggest(): lists = authz.authz_lists('read') prefix = request.args.get('prefix') results = Entity.suggest_prefix(prefix, lists) return jsonify({'results': results})