def index(): global categories if request.method == 'POST': request_data = request.form else: request_data = request.args if not request_data.get('q'): return render('index.html') selected_categories = [] query, selected_engines = parse_query(request_data['q'].encode('utf-8')) if not len(selected_engines): for pd_name, pd in request_data.items(): if pd_name.startswith('category_'): category = pd_name[9:] if not category in categories: continue selected_categories.append(category) if not len(selected_categories): cookie_categories = request.cookies.get('categories', '').split(',') for ccateg in cookie_categories: if ccateg in categories: selected_categories.append(ccateg) if not len(selected_categories): selected_categories = ['general'] for categ in selected_categories: selected_engines.extend({ 'category': categ, 'name': x.name } for x in categories[categ]) results = search(query, request, selected_engines) for result in results: if len(result['url']) > 74: result['pretty_url'] = result['url'][:35] + '[..]' + result['url'][ -35:] else: result['pretty_url'] = result['url'] if request_data.get('format') == 'json': return Response(json.dumps({ 'query': query, 'results': results }), mimetype='application/json') template = render('results.html', results=results, q=request_data['q'], selected_categories=selected_categories, number_of_results=len(results)) resp = make_response(template) resp.set_cookie('categories', ','.join(selected_categories)) return resp
def index(): global categories if request.method=='POST': request_data = request.form else: request_data = request.args if not request_data.get('q'): return render('index.html') selected_categories = [] query, selected_engines = parse_query(request_data['q'].encode('utf-8')) if not len(selected_engines): for pd_name,pd in request_data.items(): if pd_name.startswith('category_'): category = pd_name[9:] if not category in categories: continue selected_categories.append(category) if not len(selected_categories): cookie_categories = request.cookies.get('categories', '').split(',') for ccateg in cookie_categories: if ccateg in categories: selected_categories.append(ccateg) if not len(selected_categories): selected_categories = ['general'] for categ in selected_categories: selected_engines.extend({'category': categ, 'name': x.name} for x in categories[categ]) results = search(query, request, selected_engines) for result in results: if len(result['url']) > 74: result['pretty_url'] = result['url'][:35] + '[..]' + result['url'][-35:] else: result['pretty_url'] = result['url'] if request_data.get('format') == 'json': return Response(json.dumps({'query': query, 'results': results}), mimetype='application/json') template = render('results.html' ,results=results ,q=request_data['q'] ,selected_categories=selected_categories ,number_of_results=len(results) ) resp = make_response(template) resp.set_cookie('categories', ','.join(selected_categories)) return resp
def index(): global categories if request.method == 'POST': request_data = request.form else: request_data = request.args if not request_data.get('q'): return render('index.html') selected_categories = [] query, selected_engines = parse_query(request_data['q'].encode('utf-8')) if not len(selected_engines): for pd_name, pd in request_data.items(): if pd_name.startswith('category_'): category = pd_name[9:] if not category in categories: continue selected_categories.append(category) if not len(selected_categories): cookie_categories = request.cookies.get('categories', '') cookie_categories = cookie_categories.split(',') for ccateg in cookie_categories: if ccateg in categories: selected_categories.append(ccateg) if not len(selected_categories): selected_categories = ['general'] for categ in selected_categories: selected_engines.extend({'category': categ, 'name': x.name} for x in categories[categ]) results, suggestions = search(query, request, selected_engines) featured_results = [] for result in results: if request_data.get('format', 'html') == 'html': if 'content' in result: result['content'] = highlight_content(result['content'], query) result['title'] = highlight_content(result['title'], query) else: if 'content' in result: result['content'] = html_to_text(result['content']).strip() result['title'] = html_to_text(result['title']).strip() if len(result['url']) > 74: url_parts = result['url'][:35], result['url'][-35:] result['pretty_url'] = '{0}[...]{1}'.format(*url_parts) else: result['pretty_url'] = result['url'] for engine in result['engines']: if engine in favicons: result['favicon'] = engine if request_data.get('format') == 'json': return Response(json.dumps({'query': query, 'results': results}), mimetype='application/json') elif request_data.get('format') == 'csv': csv = UnicodeWriter(cStringIO.StringIO()) keys = ('title', 'url', 'content', 'host', 'engine', 'score') if len(results): csv.writerow(keys) for row in results: row['host'] = row['parsed_url'].netloc csv.writerow([row.get(key, '') for key in keys]) csv.stream.seek(0) response = Response(csv.stream.read(), mimetype='application/csv') content_disp = 'attachment;Filename=searx_-_{0}.csv'.format(query) response.headers.add('Content-Disposition', content_disp) return response elif request_data.get('format') == 'rss': response_rss = render( 'opensearch_response_rss.xml', results=results, q=request_data['q'], number_of_results=len(results), base_url=get_base_url() ) return Response(response_rss, mimetype='text/xml') return render( 'results.html', results=results, q=request_data['q'], selected_categories=selected_categories, number_of_results=len(results) + len(featured_results), featured_results=featured_results, suggestions=suggestions )
def index(): """Render index page. Supported outputs: html, json, csv, rss. """ paging = False lang = 'all' if request.cookies.get('language')\ and request.cookies['language'] in (x[0] for x in language_codes): lang = request.cookies['language'] if request.method == 'POST': request_data = request.form else: request_data = request.args if not request_data.get('q'): return render('index.html') pageno_param = request_data.get('pageno', '1') if not pageno_param.isdigit() or int(pageno_param) < 1: return render('index.html') pageno = int(pageno_param) selected_categories = [] query, selected_engines = parse_query(request_data['q'].encode('utf-8')) if len(selected_engines): selected_categories = list(set(engine['category'] for engine in selected_engines)) else: for pd_name, pd in request_data.items(): if pd_name.startswith('category_'): category = pd_name[9:] if not category in categories: continue selected_categories.append(category) if not len(selected_categories): cookie_categories = request.cookies.get('categories', '') cookie_categories = cookie_categories.split(',') for ccateg in cookie_categories: if ccateg in categories: selected_categories.append(ccateg) if not len(selected_categories): selected_categories = ['general'] for categ in selected_categories: selected_engines.extend({'category': categ, 'name': x.name} for x in categories[categ]) results, suggestions = search(query, request, selected_engines, pageno, lang) for result in results: if not paging and engines[result['engine']].paging: paging = True if request_data.get('format', 'html') == 'html': if 'content' in result: result['content'] = highlight_content(result['content'], query) result['title'] = highlight_content(result['title'], query) else: if 'content' in result: result['content'] = html_to_text(result['content']).strip() # removing html content and whitespace duplications result['title'] = ' '.join(html_to_text(result['title']) .strip().split()) if len(result['url']) > 74: url_parts = result['url'][:35], result['url'][-35:] result['pretty_url'] = '{0}[...]{1}'.format(*url_parts) else: result['pretty_url'] = result['url'] for engine in result['engines']: if engine in favicons: result['favicon'] = engine if request_data.get('format') == 'json': return Response(json.dumps({'query': query, 'results': results}), mimetype='application/json') elif request_data.get('format') == 'csv': csv = UnicodeWriter(cStringIO.StringIO()) keys = ('title', 'url', 'content', 'host', 'engine', 'score') if len(results): csv.writerow(keys) for row in results: row['host'] = row['parsed_url'].netloc csv.writerow([row.get(key, '') for key in keys]) csv.stream.seek(0) response = Response(csv.stream.read(), mimetype='application/csv') content_disp = 'attachment;Filename=searx_-_{0}.csv'.format(query) response.headers.add('Content-Disposition', content_disp) return response elif request_data.get('format') == 'rss': response_rss = render( 'opensearch_response_rss.xml', results=results, q=request_data['q'], number_of_results=len(results), base_url=get_base_url() ) return Response(response_rss, mimetype='text/xml') return render( 'results.html', results=results, q=request_data['q'], selected_categories=selected_categories, paging=paging, pageno=pageno, suggestions=suggestions )