def search_share(): tick = time.time() results = {} count = request.args.get('count') is not None raw = request.args.get('raw') is not None version = request.args.get('v') if version: index = settings.SHARE_ELASTIC_INDEX_TEMPLATE.format(version) else: index = settings.SHARE_ELASTIC_INDEX if request.method == 'POST': query = request.get_json() elif request.method == 'GET': query = build_query(request.args.get('q', '*'), request.args.get('from', 0), request.args.get('size', 10), sort=request.args.get('sort')) if count: results = search.count_share(query, index=index) else: results = search.search_share(query, raw, index=index) results['time'] = round(time.time() - tick, 2) return results
def search_share(): tick = time.time() results = {} count = request.args.get('count') is not None raw = request.args.get('raw') is not None version = request.args.get('v') if version: index = settings.SHARE_ELASTIC_INDEX_TEMPLATE.format(version) else: index = settings.SHARE_ELASTIC_INDEX if request.method == 'POST': query = request.get_json() elif request.method == 'GET': query = build_query( request.args.get('q', '*'), request.args.get('from', 0), request.args.get('size', 10), sort=request.args.get('sort') ) if count: results = search.count_share(query, index=index) else: results = search.search_share(query, raw, index=index) results['time'] = round(time.time() - tick, 2) return results
def search_share_atom(**kwargs): q = request.args.get('q', '*') sort = request.args.get('sort', 'dateUpdated') # we want the results per page to be constant between pages # TODO - move this functionality into build_query in util start = util.compute_start(request.args.get('page', 1), RESULTS_PER_PAGE) query = build_query(q, size=RESULTS_PER_PAGE, start=start, sort=sort) try: search_results = search.search_share(query) except MalformedQueryError: raise HTTPError(http.BAD_REQUEST) except IndexNotFoundError: search_results = { 'count': 0, 'results': [] } atom_url = api_url_for('search_share_atom', _xml=True, _absolute=True) return util.create_atom_feed( name='SHARE', data=search_results['results'], query=q, size=RESULTS_PER_PAGE, start=start, url=atom_url, to_atom=share_search.to_atom )
def search_share_atom(**kwargs): q = request.args.get('q', '*') sort = request.args.get('sort', 'dateUpdated') # we want the results per page to be constant between pages # TODO - move this functionality into build_query in util start = util.compute_start(request.args.get('page', 1), RESULTS_PER_PAGE) query = build_query(q, size=RESULTS_PER_PAGE, start=start, sort=sort) try: search_results = search.search_share(query, index='share_v1') except MalformedQueryError: raise HTTPError(http.BAD_REQUEST) except IndexNotFoundError: search_results = {'count': 0, 'results': []} atom_url = api_url_for('search_share_atom', _xml=True, _absolute=True) return util.create_atom_feed(name='SHARE', data=search_results['results'], query=q, size=RESULTS_PER_PAGE, start=start, url=atom_url, to_atom=share_search.to_atom)
def search_share_atom(**kwargs): json_query = request.args.get('jsonQuery') start = util.compute_start(request.args.get('page', 1), RESULTS_PER_PAGE) if not json_query: q = request.args.get('q', '*') sort = request.args.get('sort') # we want the results per page to be constant between pages # TODO - move this functionality into build_query in util query = build_query(q, size=RESULTS_PER_PAGE, start=start, sort=sort) else: query = json.loads(unquote(json_query)) query['from'] = start query['size'] = RESULTS_PER_PAGE # Aggregations are expensive, and we really don't want to # execute them if they won't be used for field in ['aggs', 'aggregations']: if query.get(field): del query[field] q = query # Do we really want to display this? try: search_results = search.search_share(query) except MalformedQueryError: raise HTTPError(http.BAD_REQUEST) except IndexNotFoundError: search_results = { 'count': 0, 'results': [] } atom_url = api_url_for('search_share_atom', _xml=True, _absolute=True) return util.create_atom_feed( name='SHARE', data=search_results['results'], query=q, size=RESULTS_PER_PAGE, start=start, url=atom_url, to_atom=share_search.to_atom )
def search_share_atom(**kwargs): json_query = request.args.get('jsonQuery') start = util.compute_start(request.args.get('page', 1), RESULTS_PER_PAGE) if not json_query: q = request.args.get('q', '*') sort = request.args.get('sort') # we want the results per page to be constant between pages # TODO - move this functionality into build_query in util query = build_query(q, size=RESULTS_PER_PAGE, start=start, sort=sort) else: query = json.loads(unquote(json_query)) query['from'] = start query['size'] = RESULTS_PER_PAGE # Aggregations are expensive, and we really don't want to # execute them if they won't be used for field in ['aggs', 'aggregations']: if query.get(field): del query[field] q = query # Do we really want to display this? try: search_results = search.search_share(query) except MalformedQueryError: raise HTTPError(http.BAD_REQUEST) except IndexNotFoundError: search_results = {'count': 0, 'results': []} atom_url = api_url_for('search_share_atom', _xml=True, _absolute=True) return util.create_atom_feed(name='SHARE', data=search_results['results'], query=q, size=RESULTS_PER_PAGE, start=start, url=atom_url, to_atom=share_search.to_atom)
def search_share(): tick = time.time() results = {} count = request.args.get('count') is not None raw = request.args.get('raw') is not None if request.method == 'POST': query = request.get_json() elif request.method == 'GET': query = build_query(request.args.get('q', '*'), request.args.get('from'), request.args.get('size'), sort=request.args.get('sort')) if count: results = search.count_share(query) else: results = search.search_share(query, raw) results['time'] = round(time.time() - tick, 2) return results
def search_share(): tick = time.time() results = {} count = request.args.get('count') is not None raw = request.args.get('raw') is not None if request.method == 'POST': query = request.get_json() elif request.method == 'GET': query = build_query( request.args.get('q', '*'), request.args.get('from'), request.args.get('size'), sort=request.args.get('sort') ) if count: results = search.count_share(query) else: results = search.search_share(query, raw) results['time'] = round(time.time() - tick, 2) return results