def export_auditlog(format, query): "Export the audit log" logger = export_auditlog.get_logger() filename = 'auditlog-%s.%s' % (export_auditlog.request.id, format) content_type = 'text/csv' if format == 'csv' else 'application/pdf' results = dict(id=export_auditlog.request.id, f=None, content_type=content_type, filename=filename, errormsg='') try: dbquery = Session.query(AuditLog) if query: conn = SphinxClient() conn.SetMatchMode(SPH_MATCH_EXTENDED2) conn.SetLimits(0, 500, 500) query = clean_sphinx_q(query) qresults = conn.Query(query, 'auditlog, auditlog_rt') if qresults and qresults['matches']: ids = [hit['id'] for hit in qresults['matches']] dbquery = dbquery.filter(AuditLog.id.in_(ids)) dbquery = dbquery.order_by(desc('timestamp')).all() if format == 'pdf': PS = ParagraphStyle('auditlogp', fontName='Helvetica', fontSize=8, borderPadding=(2, 2, 2, 2)) rows = [(Paragraph(item.timestamp.strftime('%Y-%m-%d %H:%M'), PS), Paragraph(wrap_string(item.username, 27), PS), Paragraph(wrap_string(item.info, 33), PS), Paragraph(wrap_string(item.hostname, 27), PS), Paragraph(wrap_string(item.remoteip, 15), PS), Paragraph(CATEGORY_MAP[item.category], PS)) for item in dbquery] pdf = build_pdf(rows) results['f'] = pdf elif format == 'csv': rows = [item.tojson() for item in dbquery] keys = ('timestamp', 'username', 'info', 'hostname', 'remoteip', 'category') results['f'] = build_csv(rows, keys) logger.info(_("Audit Log export complete: %s") % results['filename']) return results except (DatabaseError), err: results['errormsg'] = str(err) logger.info(_("Audit Log export FAILURE: %s") % str(err)) return results
def search(self, format=None): "Search for domains" total_found = 0 search_time = 0 num_items = session.get('domains_num_items', 10) qry = request.GET.get('q', '') org = request.GET.get('o', None) page = int(request.GET.get('p', 1)) # if q: kwds = {'presliced_list': True} conn = SphinxClient() sphinxopts = extract_sphinx_opts(config['sphinx.url']) conn.SetServer(sphinxopts.get('host', '127.0.0.1')) conn.SetMatchMode(SPH_MATCH_EXTENDED2) if page == 1: conn.SetLimits(0, num_items, 500) else: offset = (page - 1) * num_items conn.SetLimits(offset, num_items, 500) if org: conn.SetFilter('orgs', [int(org)]) if c.user.is_domain_admin: crcs = get_dom_crcs(Session, c.user) conn.SetFilter('domain_name', crcs) qry = clean_sphinx_q(qry) try: results = conn.Query(qry, 'domains, domains_rt') except (socket.timeout, struct.error): redirect(request.path_qs) qry = restore_sphinx_q(qry) if results and results['matches']: ids = [hit['id'] for hit in results['matches']] domains = Session.query(Domain)\ .options(joinedload('organizations'))\ .filter(Domain.id.in_(ids))\ .all() total_found = results['total_found'] search_time = results['time'] domaincount = total_found else: domains = [] domaincount = 0 c.page = paginate.Page(domains, page=page, items_per_page=num_items, item_count=domaincount, **kwds) c.q = qry c.org = org c.total_found = total_found c.search_time = search_time return self.render('/domains/searchresults.html')
def Query(self, query, index='*', comment=''): """Processes the query as Sphinx normally would. If specified, parse the query, retrieve the hits and compute the facets. """ # first let's parse the query if possible if not isinstance(query, MultiFieldQuery) and hasattr( self, 'query_parser'): self.query_parser.Parse(query) query = self.query_parser self.query = query # let's perform a normal query results = SphinxClient.Query(self, getattr(query, 'sphinx', query), getattr(self, 'default_index', index), comment) # let's fetch the hits from the DB if possible if hasattr(self, 'db_fetch') and results and results['total_found']: self.hits = self.db_fetch.Fetch(results) # let's compute the facets if possible if hasattr(self, 'facets') and results and results['total_found']: self.facets.Compute(query) # keep expected return of SphinxClient if hasattr(self, 'hits'): return self.hits else: return results
def __init__(self): """Creates a sphinx client but with all of fSphinx additional functionalities. """ # the possible options self.query_parser = None self.default_index = '*' self.db_fetch = None self.cache = None self.sort_mode_options = [] # the returned results self.query = '' self.hits = Hits() self.facets = FacetGroup() SphinxClient.__init__(self)
def Query(self, query, index='', comment=''): """Processes the query as Sphinx normally would. If specified, parse the query, retrieve the hits and compute the facets. """ # first let's parse the query if possible if self.query_parser and isinstance(query, basestring): query = self.query_parser.Parse(query) self.query = query # check the default index index = index or self.default_index # let's perform a normal query results = SphinxClient.Query(self, getattr(query, 'sphinx', query), index, comment) # let's fetch the hits from the DB if possible if self.db_fetch and results and results['total_found']: self.hits = self.db_fetch.Fetch(results) else: self.hits = Hits(results) # let's compute the facets if possible if self.facets and results and results['total_found']: self.facets.Compute(query) # keep expected return of SphinxClient return self.hits
def CacheSphinx(cache, cl): """Caches the request of a Sphinx client. """ # there are requests and to be computed results reqs = [req for req in cl._reqs] results = [None] * len(reqs) comp_reqs = [] comp_results = [] # get results from cache for i, req in enumerate(reqs): if req in cache: results[i] = cache.Get(req) results[i]['time'] = 0 else: comp_reqs.append(req) # get results that need to be computed if comp_reqs: cl._reqs = comp_reqs comp_results = SphinxClient.RunQueries(cl) else: cl._reqs = [] # return None on IO failure if comp_results == None: return None # cache computed results and Get results for req, result in zip(comp_reqs, comp_results): if result != None: cache.Set(req, result) results[results.index(None)] = result return results
def index(self, page=1, format=None): "index page" total_found = 0 search_time = 0 num_items = session.get('organizations_num_items', 10) qry = request.GET.get('q', None) kwds = {} if qry: kwds['presliced_list'] = True conn = SphinxClient() sphinxopts = extract_sphinx_opts(config['sphinx.url']) conn.SetServer(sphinxopts.get('host', '127.0.0.1')) conn.SetMatchMode(SPH_MATCH_EXTENDED2) if page == 1: conn.SetLimits(0, num_items, 500) else: page = int(page) offset = (page - 1) * num_items conn.SetLimits(offset, num_items, 500) qry = clean_sphinx_q(qry) try: results = conn.Query(qry, 'organizations, organizations_rt') except (socket.timeout, struct.error): redirect(request.path_qs) qry = restore_sphinx_q(qry) if results and results['matches']: ids = [hit['id'] for hit in results['matches']] orgs = Session.query(Group)\ .filter(Group.id.in_(ids))\ .all() total_found = results['total_found'] search_time = results['time'] orgcount = total_found else: orgs = [] ocount = 0 orgcount = 0 else: orgs = Session.query(Group) ocount = Session.query(Group.id) if 'orgcount' not in locals(): orgcount = ocount.count() items = paginate.Page(orgs, page=int(page), items_per_page=num_items, item_count=orgcount, **kwds) if format == 'json': response.headers['Content-Type'] = 'application/json' data = convert_org_to_json(items) return data c.page = items c.q = qry c.total_found = total_found c.search_time = search_time return self.render('/organizations/index.html')
def search(request): from sphinxapi import SphinxClient, SPH_MATCH_EXTENDED, SPH_SORT_RELEVANCE term = request.GET.get('term', '') category = None args = [u'term=%s'%term] template_name = 'board/search.html' if term: sphinx = SphinxClient() sphinx.SetServer(settings.SPHINX_SERVER, settings.SPHINX_PORT) sphinx.SetMatchMode(SPH_MATCH_EXTENDED) sphinx.SetSortMode(SPH_SORT_RELEVANCE) cid = request.GET.get('c') if cid: try: cid = int(cid) except TypeError: raise Http404 category = get_object_or_404(Category, cid) if category: sphinx.SetFilter('category_id', [category]) args.append(u'c=%s'%cid) user_settings = get_user_settings(request.user) try: page = int(request.GET.get('page', '1')) if page < 1: raise Http404 except ValueError: raise Http404 #sphinx.SetLimits(page * user_settings.ppp, user_settings.ppp) if request.GET.get('adv_submit.x'): template_name='board/advanced_search.html' u = User.objects.filter(username=term) if u: q = QuerySetPaginator(Post.objects.filter(user=u), user_settings.ppp) else: q = Paginator([], 1).page(1) else: result = sphinx.Query(u'@@relaxed %s'%term) if not result.has_key('total_found'): template_name = 'board/search_unavailable.html' pages = result.get('total_found', 0) / user_settings.ppp if pages > 0 and page > pages: raise Http404 ids = [m['id'] for m in result.get('matches', [])] q = QuerySetPaginator(Post.view_manager.filter(id__in=ids), user_settings.ppp) q = get_page(request.GET.get('page', 1), q) else: q = Paginator([], 1).page(1) return render_to_response(template_name, { 'result': q, 'term': term, 'category': category, 'args': u'&'.join(['']+args), }, context_instance=RequestContext(request, processors=extra_processors))
def home (request): mysql = DBSession () images = None sphinx = SphinxClient () sphinx.SetServer ('127.0.0.1', 9312) sphinx.SetMatchMode (sphinxapi.SPH_MATCH_ANY) if 'query' in request.GET and len (request.GET['query']) > 0: # do search results = sphinx.Query (request.GET['query']) matches = [] for match in results['matches']: matches.append (match['id']) if results['total'] > 0: images = mysql.query (Image.id.label ('id'), Image.filename.label ('filename'), func.count (Keyword.id).label ('match_count') ).join (Image.keywords).filter (Keyword.id.in_ (matches) ).group_by (Image).order_by ('match_count DESC').distinct () else: # get some random images images = mysql.query (Image).order_by (func.rand () ).limit (30).all () return {'images': images}
def search(q, index): if q: query = q s = SphinxClient() s.SetServer('127.0.0.1', 9312) s.SetLimits(0, 16777215) print s.Status() if s.Status(): query_results = s.Query(query) print query_results
def search(request, slug): forum = get_object_or_404(Forum, slug=slug) try: try: from sphinxapi import SphinxClient, SPH_MATCH_EXTENDED, SPH_SORT_RELEVANCE except ImportError: raise SearchUnavailable() term = request.GET.get('term', '').encode('utf-8') if term: sphinx = SphinxClient() sphinx.SetServer(settings.CICERO_SPHINX_SERVER, settings.CICERO_SPHINX_PORT) sphinx.SetMatchMode(SPH_MATCH_EXTENDED) sphinx.SetSortMode(SPH_SORT_RELEVANCE) sphinx.SetFilter('gid', [forum.id]) paginator = Paginator(SphinxObjectList(sphinx, term), settings.CICERO_PAGINATE_BY) try: page = paginator.page(request.GET.get('page', '1')) except InvalidPage: raise Http404 else: paginator = Paginator([], 1) page = paginator.page(1) return response(request, 'cicero/search.html', { 'page_id': 'search', 'forum': forum, 'term': term, 'paginator': paginator, 'page_obj': page, 'query_dict': request.GET, }) except SearchUnavailable: raise return response(request, 'cicero/search_unavailable.html', {})
def audit(self, page=1, format=None): "Audit log" total_found = 0 search_time = 0 num_items = session.get('auditlog_num_items', 50) q = request.GET.get('q', None) kwds = {} if q: conn = SphinxClient() conn.SetMatchMode(SPH_MATCH_EXTENDED2) if page == 1: conn.SetLimits(0, num_items, 500) else: page = int(page) offset = (page - 1) * num_items conn.SetLimits(offset, num_items, 500) q = clean_sphinx_q(q) results = conn.Query(q, 'auditlog, auditlog_rt') q = restore_sphinx_q(q) if results and results['matches']: ids = [hit['id'] for hit in results['matches']] query = Session.query(AuditLog)\ .filter(AuditLog.id.in_(ids))\ .order_by(desc('timestamp'))\ .all() total_found = results['total_found'] search_time = results['time'] logcount = total_found kwds['presliced_list'] = True else: query = [] lcount = 0 logcount = 0 else: query = Session.query(AuditLog)\ .order_by(desc('timestamp')) lcount = Session.query(AuditLog)\ .order_by(desc('timestamp')) if not 'logcount' in locals(): logcount = lcount.count() items = paginate.Page(query, page=int(page), items_per_page=num_items, item_count=logcount, **kwds) if format == 'json': response.headers['Content-Type'] = 'application/json' jdict = convert_settings_to_json(items) if q: encoded = json.loads(jdict) encoded['q'] = q jdict = json.dumps(encoded) return jdict c.page = items c.q = q c.total_found = total_found c.search_time = search_time return render('/status/audit.html')
def list(self, request, *args, **kwargs): client = SphinxClient() client.SetServer(settings.SPHINX_HOST, settings.SPHINX_PORT) client.SetMatchMode(SPH_MATCH_EXTENDED) client.SetRankingMode(SPH_RANK_SPH04) query = self.request.query_params.get('q', None) if query is not None: searchresults = client.Query(query, settings.SPHINX_INDEX) return Response(searchresults) return Response({"message": "No record found"})
def getpricelist(keywords='', frompageCount='', limitNum='', allnum=200000): cl = SphinxClient() cl.SetServer(sphinxconfig['serverid'], sphinxconfig['port']) cl.SetMatchMode(SPH_MATCH_BOOLEAN) # cl.SetSortMode( SPH_SORT_ATTR_DESC ,'postdate desc' ) cl.SetLimits(frompageCount, limitNum, allnum) if keywords: res = cl.Query('@(title) ' + keywords, 'news_pages') else: res = cl.Query('', 'news_pages') listall = [] count = 0 if res: count = res['total'] listall = [m['id'] for m in res['matches']] return {'listall': listall, 'count': count}
def index(self, page=1, format=None): "index page" total_found = 0 search_time = 0 num_items = session.get('organizations_num_items', 10) q = request.GET.get('q', None) kwds = {} if q: kwds['presliced_list'] = True conn = SphinxClient() conn.SetMatchMode(SPH_MATCH_EXTENDED2) if page == 1: conn.SetLimits(0, num_items, 500) else: page = int(page) offset = (page - 1) * num_items conn.SetLimits(offset, num_items, 500) q = clean_sphinx_q(q) results = conn.Query(q, 'organizations, organizations_rt') q = restore_sphinx_q(q) if results and results['matches']: ids = [hit['id'] for hit in results['matches']] orgs = Session.query(Group)\ .filter(Group.id.in_(ids))\ .all() total_found = results['total_found'] search_time = results['time'] orgcount = total_found else: orgs = [] ocount = 0 orgcount = 0 else: orgs = Session.query(Group) ocount = Session.query(Group.id) if not 'orgcount' in locals(): orgcount = ocount.count() items = paginate.Page(orgs, page=int(page), items_per_page=num_items, item_count=orgcount, **kwds) if format == 'json': response.headers['Content-Type'] = 'application/json' data = convert_org_to_json(items) return data c.page = items c.q = q c.total_found = total_found c.search_time = search_time return render('/organizations/index.html')
def search(request): if request.method == 'POST': form = SearchForm(request.POST) if form.is_valid(): search_string = form.cleaned_data['search_string'] s = SphinxClient() s.SetServer('192.168.102.2', 9312) s.SetLimits(0, 100) if s.Status(): res = s.Query(search_string) return render(request, 'search.html', { 'items': res, 'form': form }) form = SearchForm() return render(request, 'search.html', {'form': form})
def search(self, format=None): "Search for messages" qry = request.GET.get('q', None) if qry is None: redirect(url(controller='messages', action='listing')) index = 'messages, messagesdelta, messages_rt' action = request.GET.get('a', 'listing') if action not in ['listing', 'quarantine', 'archive']: action = 'listing' if action == 'archive': index = 'archive archivedelta' try: page = int(request.GET.get('page', 1)) except ValueError: page = 1 num_items = session.get('msgs_search_num_results', 50) conn = SphinxClient() sphinxopts = extract_sphinx_opts(config['sphinx.url']) conn.SetServer(sphinxopts.get('host', '127.0.0.1')) conn.SetMatchMode(SPH_MATCH_EXTENDED2) if action == 'quarantine': conn.SetFilter('isquarantined', [ True, ]) if page == 1: conn.SetLimits(0, num_items, 500) else: offset = (page - 1) * num_items conn.SetLimits(offset, num_items, 500) if not c.user.is_superadmin: filter_sphinx(Session, c.user, conn) else: conn.SetSelect('timestamp') qry = clean_sphinx_q(qry) try: results = conn.Query(qry, index) except (socket.timeout, struct.error): redirect(request.path_qs) qry = restore_sphinx_q(qry) if results and results['matches']: ids = [hit['id'] for hit in results['matches']] filters = session.get('filter_by', None) if action == 'archive': messages = get_archived().filter(Archive.id.in_(ids)) query = UserFilter(Session, c.user, messages, True) messages = query.filter() if filters: dynq = DynaQuery(Message, messages, filters) messages = dynq.generate() else: messages = get_messages().filter(Message.id.in_(ids)) query = UserFilter(Session, c.user, messages) messages = query.filter() if filters: dynq = DynaQuery(Message, messages, filters) messages = dynq.generate() total_found = results['total'] search_time = results['time'] messages = messages.order_by(desc('timestamp')) else: messages = [] results = dict(matches=[], total=0) total_found = 0 search_time = 0 pages = paginator( dict(page=page, results_per_page=num_items, total=results['total'], items=len(results['matches']), q=qry)) if format == 'json': response.headers['Content-Type'] = 'application/json' data = dict(action=action, total_found=total_found, search_time=search_time, paginator=pages, items=[jsonify_msg_list(msg) for msg in messages]) return json.dumps(data) c.messages = messages c.action = action c.total_found = total_found c.search_time = search_time c.page = pages return self.render('/messages/searchresults.html')
def search(self, format=None): "Search for accounts" total_found = 0 search_time = 0 num_items = session.get('accounts_num_items', 10) qry = request.GET.get('q', '') doms = request.GET.get('d', None) kwds = {'presliced_list': True} page = int(request.GET.get('p', 1)) conn = SphinxClient() sphinxopts = extract_sphinx_opts(config['sphinx.url']) conn.SetServer(sphinxopts.get('host', '127.0.0.1')) conn.SetMatchMode(SPH_MATCH_EXTENDED2) conn.SetFieldWeights( dict(username=50, email=30, firstname=10, lastname=10)) if page == 1: conn.SetLimits(0, num_items, 500) else: page = int(page) offset = (page - 1) * num_items conn.SetLimits(offset, num_items, 500) if doms: conn.SetFilter('domains', [ int(doms), ]) if c.user.is_domain_admin: # crcs = get_dom_crcs(Session, c.user) domains = Session.query(Domain.id).join(dom_owns, (oas, dom_owns.c.organization_id == oas.c.organization_id))\ .filter(oas.c.user_id == c.user.id) conn.SetFilter('domains', [domain[0] for domain in domains]) qry = clean_sphinx_q(qry) try: results = conn.Query(qry, 'accounts, accounts_rt') except (socket.timeout, struct.error): redirect(request.path_qs) qry = restore_sphinx_q(qry) if results and results['matches']: ids = [hit['id'] for hit in results['matches']] total_found = results['total_found'] search_time = results['time'] users = Session.query(User.id, User.username, User.firstname, User.lastname, User.email, User.active, User.local, User.account_type)\ .filter(User.id.in_(ids))\ .order_by(User.id)\ .all() usercount = total_found else: users = [] usercount = 0 c.q = qry c.d = doms c.total_found = total_found c.search_time = search_time c.page = paginate.Page(users, page=int(page), items_per_page=num_items, item_count=usercount, **kwds) return self.render('/accounts/searchresults.html')
def search(self, format=None): "Search for messages" q = request.GET.get('q', None) if q is None: redirect(url(controller='messages', action='listing')) index = 'messages, messages_rt' action = request.GET.get('a', 'listing') if not action in ['listing', 'quarantine', 'archive']: action = 'listing' if action == 'archive': index = 'archive' try: page = int(request.GET.get('page', 1)) except ValueError: page = 1 num_items = session.get('msgs_search_num_results', 50) conn = SphinxClient() conn.SetMatchMode(SPH_MATCH_EXTENDED2) #conn.SetSortMode(SPH_SORT_EXTENDED, "timestamp DESC") if action == 'quarantine': conn.SetFilter('isquarantined', [True,]) if page == 1: conn.SetLimits(0, num_items, 500) else: offset = (page - 1) * num_items conn.SetLimits(offset, num_items, 500) if not c.user.is_superadmin: filter_sphinx(Session, c.user, conn) else: conn.SetSelect('timestamp') q = clean_sphinx_q(q) results = conn.Query(q, index) q = restore_sphinx_q(q) if results and results['matches']: #import pprint #pprint.pprint(results) ids = [hit['id'] for hit in results['matches']] filters = session.get('filter_by', None) if index == 'archive': messages = self._get_archived().filter( Archive.id.in_(ids)) query = UserFilter(Session, c.user, messages, True) messages = query.filter() if filters: dynq = DynaQuery(Message, messages, filters) messages = dynq.generate() else: messages = self._get_messages().filter( Message.id.in_(ids)) query = UserFilter(Session, c.user, messages) messages = query.filter() if filters: dynq = DynaQuery(Message, messages, filters) messages = dynq.generate() total_found = results['total'] search_time = results['time'] messages = messages.order_by(desc('timestamp')) else: print '=' * 100 print conn.GetLastError() messages = [] results = dict(matches=[], total=0) total_found = 0 search_time = 0 pages = paginator(dict(page=page, results_per_page=num_items, total=results['total'], items=len(results['matches']), q=q)) if format == 'json': response.headers['Content-Type'] = 'application/json' data = dict(action=action, total_found=total_found, search_time=search_time, paginator=pages, items=[jsonify_msg_list(msg) for msg in messages]) return json.dumps(data) c.messages = messages c.action = action c.total_found = total_found c.search_time = search_time c.page = pages return render('/messages/searchresults.html')
def RunQueries(self, caching=None): if not self.cache or caching is False: return SphinxClient.RunQueries(self) else: return cache.CacheSphinx(self.cache, self)
def SetSortMode(self, mode, clause=''): if mode in self.sort_mode_options: sort_mode = self.sort_mode_options[mode] else: sort_mode = (mode, clause) SphinxClient.SetSortMode(self, *sort_mode)
def index(self, list_type=1, direction='dsc', order_by='id', page=1, format=None): "Page through lists" total_found = 0 search_time = 0 num_items = session.get('lists_num_items', 10) if direction == 'dsc': sort = desc(order_by) else: sort = order_by qry = request.GET.get('q', None) kwds = {} if qry: kwds['presliced_list'] = True conn = SphinxClient() sphinxopts = extract_sphinx_opts(config['sphinx.url']) conn.SetServer(sphinxopts.get('host', '127.0.0.1')) conn.SetMatchMode(SPH_MATCH_EXTENDED2) conn.SetFilter('list_type', [ int(list_type), ]) if not c.user.is_superadmin: conn.SetFilter('user_id', [ c.user.id, ]) if page == 1: conn.SetLimits(0, num_items, 500) else: page = int(page) offset = (page - 1) * num_items conn.SetLimits(offset, num_items, 500) try: results = conn.Query(qry, 'lists, lists_rt') except (socket.timeout, struct.error): redirect(request.path_qs) if results and results['matches']: ids = [hit['id'] for hit in results['matches']] total_found = results['total_found'] search_time = results['time'] items = Session.query(List)\ .filter(List.list_type == list_type)\ .filter(List.id.in_(ids))\ .order_by(sort)\ .all() listcount = total_found else: items = [] itemcount = 0 listcount = 0 else: items = Session.query(List)\ .filter(List.list_type == list_type)\ .order_by(sort) itemcount = Session.query(List.id)\ .filter(List.list_type == list_type) if not c.user.is_superadmin: items = items.filter(List.user_id == c.user.id) itemcount = itemcount.filter(List.user_id == c.user.id) listcount = itemcount.count() # pylint: disable-msg=W0142 records = paginate.Page(items, page=int(page), items_per_page=num_items, item_count=listcount, **kwds) if format == 'json': response.headers['Content-Type'] = 'application/json' data = convert_list_to_json(records, list_type) return data c.list_type = list_type c.page = records c.direction = direction c.order_by = order_by c.q = qry c.total_found = total_found c.search_time = search_time return self.render('/lists/index.html')
def index(self, list_type=1, direction='dsc', order_by='id', page=1, format=None): "Page through lists" total_found = 0 search_time = 0 num_items = session.get('lists_num_items', 10) if direction == 'dsc': sort = desc(order_by) else: sort = order_by q = request.GET.get('q', None) kwds = {} if q: kwds['presliced_list'] = True conn = SphinxClient() conn.SetMatchMode(SPH_MATCH_EXTENDED2) conn.SetFilter('list_type', [int(list_type),]) if page == 1: conn.SetLimits(0, num_items, 500) else: page = int(page) offset = (page - 1) * num_items conn.SetLimits(offset, num_items, 500) results = conn.Query(q, 'lists, lists-rt') if results and results['matches']: ids = [hit['id'] for hit in results['matches']] total_found = results['total_found'] search_time = results['time'] items = Session.query(List)\ .filter(List.list_type == list_type)\ .filter(List.id.in_(ids))\ .order_by(sort)\ .all() listcount = total_found else: items = [] itemcount = 0 listcount = 0 else: items = Session.query(List)\ .filter(List.list_type == list_type)\ .order_by(sort) itemcount = Session.query(List.id)\ .filter(List.list_type == list_type) if c.user.account_type != 1 and itemcount: items = items.filter(List.user_id == c.user.id) itemcount = itemcount.filter(List.user_id == c.user.id) if not 'listcount' in locals(): listcount = itemcount.count() records = paginate.Page(items, page=int(page), items_per_page=num_items, item_count=listcount, **kwds) if format == 'json': response.headers['Content-Type'] = 'application/json' data = convert_list_to_json(records, list_type) return data c.list_type = list_type c.page = records c.direction = direction c.order_by = order_by c.q = q c.total_found = total_found c.search_time = search_time return render('/lists/index.html')