def get(self, term): """ Returns list of matching concepts or entities using lexical search """ args = simple_parser.parse_args() q = GolrSearchQuery(term, **args) results = q.exec() return results
def test_search_go_ontol(): q = GolrSearchQuery("transport*", category='ontology_class', is_go=True) print("Q={}".format(q)) params = q.solr_params() print("PARAMS={}".format(params)) results = q.exec() print("RESULTS={}".format(results)) docs = results['docs'] for r in docs: print(str(r)) assert len(docs) > 0
def test_search(): q = GolrSearchQuery("abnormal") print("Q={}".format(q)) params = q.solr_params() print("PARAMS={}".format(params)) results = q.exec() print("RESULTS={}".format(results)) docs = results['docs'] for r in docs: print(str(r)) assert len(docs) > 0
def test_search_go_all(): q = GolrSearchQuery("transport*", is_go=True) print("Q={}".format(q)) params = q.solr_params() print("PARAMS={}".format(params)) results = q.exec() print("RESULTS={}".format(results)) docs = results['docs'] for r in docs: print(str(r)) assert len(docs) > 0 print(str(results['facet_counts']))
def test_cursor(): """ Tests rows and start parameters. First fetch 100 docs, then same query but iterate with cursor in increments of ten. The two sets of IDs returned should be identicial """ q = GolrSearchQuery("abnormal", rows=100) results = q.exec() docs = results['docs'] ids = set([d['id'] for d in docs]) print('Init ids={}'.format(ids)) assert len(ids) == 100 matches = set() for i in range(0,10): q = GolrSearchQuery("abnormal", start=i*10, rows=10) docs = q.exec()['docs'] next_ids = [d['id'] for d in docs] assert len(next_ids) == 10 print('Next ids (from {}) = {}'.format(i*10, next_ids)) matches.update(next_ids) assert len(matches) == 100 assert len(matches.intersection(ids)) == 100
def get_concepts(): keywords = request.args.get('keywords', None) semanticGroups = request.args.get('semanticGroups', None) pageSize = int(request.args.get('pageSize', 1)) pageNumber = int(request.args.get('pageNumber', 1)) validatePagination(pageSize, pageNumber) validateKeywords(keywords) q = GolrSearchQuery(term=keywords, category=build_categories(semanticGroups), rows=pageSize, start=getStartIndex(pageNumber, pageSize)) results = q.exec() concepts = [] for d in results['docs']: concept = parse_concept(d) concepts.append(concept) return jsonify(concepts)
def search(term, args): q = GolrSearchQuery(term, args) return q.exec()