Пример #1
0
def search(request, slug):
    forum = get_object_or_404(Forum, slug=slug)
    try:
        try:
            from sphinxapi import SphinxClient, SPH_MATCH_EXTENDED, SPH_SORT_RELEVANCE
        except ImportError:
            raise SearchUnavailable()
        term = request.GET.get('term', '').encode('utf-8')
        if term:
            sphinx = SphinxClient()
            sphinx.SetServer(settings.CICERO_SPHINX_SERVER, settings.CICERO_SPHINX_PORT)
            sphinx.SetMatchMode(SPH_MATCH_EXTENDED)
            sphinx.SetSortMode(SPH_SORT_RELEVANCE)
            sphinx.SetFilter('gid', [forum.id])
            paginator = Paginator(SphinxObjectList(sphinx, term), settings.CICERO_PAGINATE_BY)
            try:
                page = paginator.page(request.GET.get('page', '1'))
            except InvalidPage:
                raise Http404
        else:
            paginator = Paginator([], 1)
            page = paginator.page(1)
        return response(request, 'cicero/search.html', {
            'page_id': 'search',
            'forum': forum,
            'term': term,
            'paginator': paginator,
            'page_obj': page,
            'query_dict': request.GET,
        })
    except SearchUnavailable:
        raise
        return response(request, 'cicero/search_unavailable.html', {})
Пример #2
0
    def index(self, page=1, format=None):
        "index page"
        total_found = 0
        search_time = 0
        num_items = session.get('organizations_num_items', 10)
        qry = request.GET.get('q', None)
        kwds = {}
        if qry:
            kwds['presliced_list'] = True
            conn = SphinxClient()
            sphinxopts = extract_sphinx_opts(config['sphinx.url'])
            conn.SetServer(sphinxopts.get('host', '127.0.0.1'))
            conn.SetMatchMode(SPH_MATCH_EXTENDED2)
            if page == 1:
                conn.SetLimits(0, num_items, 500)
            else:
                page = int(page)
                offset = (page - 1) * num_items
                conn.SetLimits(offset, num_items, 500)
            qry = clean_sphinx_q(qry)
            try:
                results = conn.Query(qry, 'organizations, organizations_rt')
            except (socket.timeout, struct.error):
                redirect(request.path_qs)
            qry = restore_sphinx_q(qry)
            if results and results['matches']:
                ids = [hit['id'] for hit in results['matches']]
                orgs = Session.query(Group)\
                        .filter(Group.id.in_(ids))\
                        .all()
                total_found = results['total_found']
                search_time = results['time']
                orgcount = total_found
            else:
                orgs = []
                ocount = 0
                orgcount = 0
        else:
            orgs = Session.query(Group)
            ocount = Session.query(Group.id)
        if 'orgcount' not in locals():
            orgcount = ocount.count()
        items = paginate.Page(orgs,
                              page=int(page),
                              items_per_page=num_items,
                              item_count=orgcount,
                              **kwds)
        if format == 'json':
            response.headers['Content-Type'] = 'application/json'
            data = convert_org_to_json(items)
            return data

        c.page = items
        c.q = qry
        c.total_found = total_found
        c.search_time = search_time
        return self.render('/organizations/index.html')
Пример #3
0
def search(request):
    from sphinxapi import SphinxClient, SPH_MATCH_EXTENDED, SPH_SORT_RELEVANCE
    term = request.GET.get('term', '')
    category = None
    args = [u'term=%s'%term]
    template_name = 'board/search.html'
    if term:
        sphinx = SphinxClient()
        sphinx.SetServer(settings.SPHINX_SERVER, settings.SPHINX_PORT)
        sphinx.SetMatchMode(SPH_MATCH_EXTENDED)
        sphinx.SetSortMode(SPH_SORT_RELEVANCE)
        cid = request.GET.get('c')
        if cid:
            try:
                cid = int(cid)
            except TypeError:
                raise Http404
            category = get_object_or_404(Category, cid)
            if category:
                sphinx.SetFilter('category_id', [category])
                args.append(u'c=%s'%cid)
        user_settings = get_user_settings(request.user)
        try:
            page = int(request.GET.get('page', '1'))
            if page < 1:
                raise Http404
        except ValueError:
            raise Http404
        #sphinx.SetLimits(page * user_settings.ppp, user_settings.ppp)
        if request.GET.get('adv_submit.x'):
            template_name='board/advanced_search.html'
            u = User.objects.filter(username=term)
            if u:
                q = QuerySetPaginator(Post.objects.filter(user=u),
                    user_settings.ppp)
            else:
                q = Paginator([], 1).page(1)
        else:
            result = sphinx.Query(u'@@relaxed %s'%term)
            if not result.has_key('total_found'):
                template_name = 'board/search_unavailable.html'
            pages = result.get('total_found', 0) / user_settings.ppp
            if pages > 0 and page > pages:
                raise Http404
            ids = [m['id'] for m in result.get('matches', [])]
            q = QuerySetPaginator(Post.view_manager.filter(id__in=ids),
                user_settings.ppp)
            q = get_page(request.GET.get('page', 1), q)
    else:
        q = Paginator([], 1).page(1)
    return render_to_response(template_name, {
        'result': q,
        'term': term,
        'category': category,
        'args': u'&'.join(['']+args),
    }, context_instance=RequestContext(request, processors=extra_processors))
Пример #4
0
def search(q, index):
    if q:
        query = q
        s = SphinxClient()
        s.SetServer('127.0.0.1', 9312)
        s.SetLimits(0, 16777215)
        print s.Status()
        if s.Status():
            query_results = s.Query(query)
            print query_results
Пример #5
0
    def list(self, request, *args, **kwargs):

        client = SphinxClient()
        client.SetServer(settings.SPHINX_HOST, settings.SPHINX_PORT)
        client.SetMatchMode(SPH_MATCH_EXTENDED)
        client.SetRankingMode(SPH_RANK_SPH04)
        query = self.request.query_params.get('q', None)
        if query is not None:
            searchresults = client.Query(query, settings.SPHINX_INDEX)
            return Response(searchresults)

        return Response({"message": "No record found"})
Пример #6
0
    def search(self, format=None):
        "Search for domains"
        total_found = 0
        search_time = 0
        num_items = session.get('domains_num_items', 10)
        qry = request.GET.get('q', '')
        org = request.GET.get('o', None)
        page = int(request.GET.get('p', 1))
        # if q:
        kwds = {'presliced_list': True}
        conn = SphinxClient()
        sphinxopts = extract_sphinx_opts(config['sphinx.url'])
        conn.SetServer(sphinxopts.get('host', '127.0.0.1'))
        conn.SetMatchMode(SPH_MATCH_EXTENDED2)
        if page == 1:
            conn.SetLimits(0, num_items, 500)
        else:
            offset = (page - 1) * num_items
            conn.SetLimits(offset, num_items, 500)
        if org:
            conn.SetFilter('orgs', [int(org)])
        if c.user.is_domain_admin:
            crcs = get_dom_crcs(Session, c.user)
            conn.SetFilter('domain_name', crcs)
        qry = clean_sphinx_q(qry)
        try:
            results = conn.Query(qry, 'domains, domains_rt')
        except (socket.timeout, struct.error):
            redirect(request.path_qs)
        qry = restore_sphinx_q(qry)
        if results and results['matches']:
            ids = [hit['id'] for hit in results['matches']]
            domains = Session.query(Domain)\
                    .options(joinedload('organizations'))\
                    .filter(Domain.id.in_(ids))\
                    .all()
            total_found = results['total_found']
            search_time = results['time']
            domaincount = total_found
        else:
            domains = []
            domaincount = 0

        c.page = paginate.Page(domains,
                               page=page,
                               items_per_page=num_items,
                               item_count=domaincount,
                               **kwds)
        c.q = qry
        c.org = org
        c.total_found = total_found
        c.search_time = search_time
        return self.render('/domains/searchresults.html')
Пример #7
0
def export_auditlog(format, query):
    "Export the audit log"
    logger = export_auditlog.get_logger()
    filename = 'auditlog-%s.%s' % (export_auditlog.request.id, format)
    content_type = 'text/csv' if format == 'csv' else 'application/pdf'
    results = dict(id=export_auditlog.request.id,
                   f=None,
                   content_type=content_type,
                   filename=filename,
                   errormsg='')
    try:
        dbquery = Session.query(AuditLog)
        if query:
            conn = SphinxClient()
            sphinxopts = extract_sphinx_opts(config['sphinx.url'])
            conn.SetServer(sphinxopts.get('host', '127.0.0.1'))
            conn.SetMatchMode(SPH_MATCH_EXTENDED2)
            conn.SetLimits(0, 500, 500)
            query = clean_sphinx_q(query)
            qresults = conn.Query(query, 'auditlog, auditlog_rt')
            if qresults and qresults['matches']:
                ids = [hit['id'] for hit in qresults['matches']]
                dbquery = dbquery.filter(AuditLog.id.in_(ids))

        dbquery = dbquery.order_by(desc('timestamp')).all()
        if format == 'pdf':
            PS = ParagraphStyle('auditlogp',
                                fontName='Helvetica',
                                fontSize=8,
                                borderPadding=(2, 2, 2, 2))
            rows = [(Paragraph(item.timestamp.strftime('%Y-%m-%d %H:%M'), PS),
                     Paragraph(wrap_string(item.username, 27),
                               PS), Paragraph(wrap_string(item.info, 33), PS),
                     Paragraph(wrap_string(item.hostname, 27), PS),
                     Paragraph(wrap_string(item.remoteip, 15),
                               PS), Paragraph(CATEGORY_MAP[item.category], PS))
                    for item in dbquery]
            pdf = build_pdf(rows)
            results['f'] = pdf
        elif format == 'csv':
            rows = [item.tojson() for item in dbquery]
            keys = ('timestamp', 'username', 'info', 'hostname', 'remoteip',
                    'category')
            results['f'] = build_csv(rows, keys)
        logger.info("Audit Log export complete: %s" % results['filename'])
        return results
    except (DatabaseError), err:
        results['errormsg'] = str(err)
        logger.info("Audit Log export FAILURE: %s" % str(err))
        return results
Пример #8
0
def getpricelist(keywords='', frompageCount='', limitNum='', allnum=200000):
    cl = SphinxClient()
    cl.SetServer(sphinxconfig['serverid'], sphinxconfig['port'])
    cl.SetMatchMode(SPH_MATCH_BOOLEAN)
    #		cl.SetSortMode( SPH_SORT_ATTR_DESC ,'postdate desc' )
    cl.SetLimits(frompageCount, limitNum, allnum)
    if keywords:
        res = cl.Query('@(title) ' + keywords, 'news_pages')
    else:
        res = cl.Query('', 'news_pages')
    listall = []
    count = 0
    if res:
        count = res['total']
        listall = [m['id'] for m in res['matches']]
    return {'listall': listall, 'count': count}
Пример #9
0
def search(request):
    if request.method == 'POST':
        form = SearchForm(request.POST)
        if form.is_valid():
            search_string = form.cleaned_data['search_string']
            s = SphinxClient()
            s.SetServer('192.168.102.2', 9312)
            s.SetLimits(0, 100)
            if s.Status():
                res = s.Query(search_string)
                return render(request, 'search.html', {
                    'items': res,
                    'form': form
                })

    form = SearchForm()
    return render(request, 'search.html', {'form': form})
Пример #10
0
def home (request):
    mysql = DBSession ()
    images = None
    sphinx = SphinxClient ()
    sphinx.SetServer ('127.0.0.1', 9312)
    sphinx.SetMatchMode (sphinxapi.SPH_MATCH_ANY)

    if 'query' in request.GET and len (request.GET['query']) > 0:
        # do search
        results = sphinx.Query (request.GET['query'])
        matches = []
        for match in results['matches']:
            matches.append (match['id'])

        if results['total'] > 0:
            images = mysql.query (Image.id.label ('id'), Image.filename.label ('filename'), func.count (Keyword.id).label ('match_count') ).join (Image.keywords).filter (Keyword.id.in_ (matches) ).group_by (Image).order_by ('match_count DESC').distinct ()
    else:
        # get some random images
        images = mysql.query (Image).order_by (func.rand () ).limit (30).all ()

    return {'images': images}
Пример #11
0
 def search(self, format=None):
     "Search for accounts"
     total_found = 0
     search_time = 0
     num_items = session.get('accounts_num_items', 10)
     qry = request.GET.get('q', '')
     doms = request.GET.get('d', None)
     kwds = {'presliced_list': True}
     page = int(request.GET.get('p', 1))
     conn = SphinxClient()
     sphinxopts = extract_sphinx_opts(config['sphinx.url'])
     conn.SetServer(sphinxopts.get('host', '127.0.0.1'))
     conn.SetMatchMode(SPH_MATCH_EXTENDED2)
     conn.SetFieldWeights(
         dict(username=50, email=30, firstname=10, lastname=10))
     if page == 1:
         conn.SetLimits(0, num_items, 500)
     else:
         page = int(page)
         offset = (page - 1) * num_items
         conn.SetLimits(offset, num_items, 500)
     if doms:
         conn.SetFilter('domains', [
             int(doms),
         ])
     if c.user.is_domain_admin:
         # crcs = get_dom_crcs(Session, c.user)
         domains = Session.query(Domain.id).join(dom_owns,
                     (oas, dom_owns.c.organization_id ==
                     oas.c.organization_id))\
                     .filter(oas.c.user_id == c.user.id)
         conn.SetFilter('domains', [domain[0] for domain in domains])
     qry = clean_sphinx_q(qry)
     try:
         results = conn.Query(qry, 'accounts, accounts_rt')
     except (socket.timeout, struct.error):
         redirect(request.path_qs)
     qry = restore_sphinx_q(qry)
     if results and results['matches']:
         ids = [hit['id'] for hit in results['matches']]
         total_found = results['total_found']
         search_time = results['time']
         users = Session.query(User.id,
                                 User.username,
                                 User.firstname,
                                 User.lastname,
                                 User.email,
                                 User.active,
                                 User.local,
                                 User.account_type)\
                             .filter(User.id.in_(ids))\
                             .order_by(User.id)\
                             .all()
         usercount = total_found
     else:
         users = []
         usercount = 0
     c.q = qry
     c.d = doms
     c.total_found = total_found
     c.search_time = search_time
     c.page = paginate.Page(users,
                            page=int(page),
                            items_per_page=num_items,
                            item_count=usercount,
                            **kwds)
     return self.render('/accounts/searchresults.html')
Пример #12
0
    def search(self, format=None):
        "Search for messages"
        qry = request.GET.get('q', None)
        if qry is None:
            redirect(url(controller='messages', action='listing'))
        index = 'messages, messagesdelta, messages_rt'
        action = request.GET.get('a', 'listing')
        if action not in ['listing', 'quarantine', 'archive']:
            action = 'listing'
        if action == 'archive':
            index = 'archive archivedelta'
        try:
            page = int(request.GET.get('page', 1))
        except ValueError:
            page = 1
        num_items = session.get('msgs_search_num_results', 50)
        conn = SphinxClient()
        sphinxopts = extract_sphinx_opts(config['sphinx.url'])
        conn.SetServer(sphinxopts.get('host', '127.0.0.1'))
        conn.SetMatchMode(SPH_MATCH_EXTENDED2)
        if action == 'quarantine':
            conn.SetFilter('isquarantined', [
                True,
            ])
        if page == 1:
            conn.SetLimits(0, num_items, 500)
        else:
            offset = (page - 1) * num_items
            conn.SetLimits(offset, num_items, 500)
        if not c.user.is_superadmin:
            filter_sphinx(Session, c.user, conn)
        else:
            conn.SetSelect('timestamp')
        qry = clean_sphinx_q(qry)
        try:
            results = conn.Query(qry, index)
        except (socket.timeout, struct.error):
            redirect(request.path_qs)
        qry = restore_sphinx_q(qry)
        if results and results['matches']:
            ids = [hit['id'] for hit in results['matches']]
            filters = session.get('filter_by', None)
            if action == 'archive':
                messages = get_archived().filter(Archive.id.in_(ids))
                query = UserFilter(Session, c.user, messages, True)
                messages = query.filter()
                if filters:
                    dynq = DynaQuery(Message, messages, filters)
                    messages = dynq.generate()
            else:
                messages = get_messages().filter(Message.id.in_(ids))
                query = UserFilter(Session, c.user, messages)
                messages = query.filter()
                if filters:
                    dynq = DynaQuery(Message, messages, filters)
                    messages = dynq.generate()
            total_found = results['total']
            search_time = results['time']
            messages = messages.order_by(desc('timestamp'))
        else:
            messages = []
            results = dict(matches=[], total=0)
            total_found = 0
            search_time = 0

        pages = paginator(
            dict(page=page,
                 results_per_page=num_items,
                 total=results['total'],
                 items=len(results['matches']),
                 q=qry))

        if format == 'json':
            response.headers['Content-Type'] = 'application/json'
            data = dict(action=action,
                        total_found=total_found,
                        search_time=search_time,
                        paginator=pages,
                        items=[jsonify_msg_list(msg) for msg in messages])
            return json.dumps(data)

        c.messages = messages
        c.action = action
        c.total_found = total_found
        c.search_time = search_time
        c.page = pages
        return self.render('/messages/searchresults.html')
Пример #13
0
    def index(self,
              list_type=1,
              direction='dsc',
              order_by='id',
              page=1,
              format=None):
        "Page through lists"
        total_found = 0
        search_time = 0
        num_items = session.get('lists_num_items', 10)
        if direction == 'dsc':
            sort = desc(order_by)
        else:
            sort = order_by
        qry = request.GET.get('q', None)
        kwds = {}
        if qry:
            kwds['presliced_list'] = True
            conn = SphinxClient()
            sphinxopts = extract_sphinx_opts(config['sphinx.url'])
            conn.SetServer(sphinxopts.get('host', '127.0.0.1'))
            conn.SetMatchMode(SPH_MATCH_EXTENDED2)
            conn.SetFilter('list_type', [
                int(list_type),
            ])
            if not c.user.is_superadmin:
                conn.SetFilter('user_id', [
                    c.user.id,
                ])
            if page == 1:
                conn.SetLimits(0, num_items, 500)
            else:
                page = int(page)
                offset = (page - 1) * num_items
                conn.SetLimits(offset, num_items, 500)

            try:
                results = conn.Query(qry, 'lists, lists_rt')
            except (socket.timeout, struct.error):
                redirect(request.path_qs)

            if results and results['matches']:
                ids = [hit['id'] for hit in results['matches']]
                total_found = results['total_found']
                search_time = results['time']
                items = Session.query(List)\
                        .filter(List.list_type == list_type)\
                        .filter(List.id.in_(ids))\
                        .order_by(sort)\
                        .all()
                listcount = total_found
            else:
                items = []
                itemcount = 0
                listcount = 0
        else:
            items = Session.query(List)\
                    .filter(List.list_type == list_type)\
                    .order_by(sort)
            itemcount = Session.query(List.id)\
                    .filter(List.list_type == list_type)
            if not c.user.is_superadmin:
                items = items.filter(List.user_id == c.user.id)
                itemcount = itemcount.filter(List.user_id == c.user.id)
            listcount = itemcount.count()

        # pylint: disable-msg=W0142
        records = paginate.Page(items,
                                page=int(page),
                                items_per_page=num_items,
                                item_count=listcount,
                                **kwds)
        if format == 'json':
            response.headers['Content-Type'] = 'application/json'
            data = convert_list_to_json(records, list_type)
            return data

        c.list_type = list_type
        c.page = records
        c.direction = direction
        c.order_by = order_by
        c.q = qry
        c.total_found = total_found
        c.search_time = search_time
        return self.render('/lists/index.html')
Пример #14
0
    def audit(self, page=1, format=None):
        "Audit log"
        total_found = 0
        search_time = 0
        num_items = session.get('auditlog_num_items', 50)
        qry = request.GET.get('q', None)
        kwds = {}
        if qry:
            conn = SphinxClient()
            sphinxopts = extract_sphinx_opts(config['sphinx.url'])
            conn.SetServer(sphinxopts.get('host', '127.0.0.1'))
            conn.SetMatchMode(SPH_MATCH_EXTENDED2)
            if page == 1:
                conn.SetLimits(0, num_items, 500)
            else:
                page = int(page)
                offset = (page - 1) * num_items
                conn.SetLimits(offset, num_items, 500)
            qry = clean_sphinx_q(qry)
            results = conn.Query(qry, 'auditlog, auditlog_rt')
            qry = restore_sphinx_q(qry)
            if results and results['matches']:
                ids = [hit['id'] for hit in results['matches']]
                query = Session.query(AuditLog)\
                        .filter(AuditLog.id.in_(ids))\
                        .order_by(desc('timestamp'))\
                        .all()
                total_found = results['total_found']
                search_time = results['time']
                logcount = total_found
                kwds['presliced_list'] = True
            else:
                query = []
                lcount = 0
                logcount = 0
        else:
            query = Session.query(AuditLog)\
                    .order_by(desc('timestamp'))
            lcount = Session.query(AuditLog)\
                    .order_by(desc('timestamp'))
        if 'logcount' not in locals():
            logcount = lcount.count()
        items = paginate.Page(query,
                              page=int(page),
                              items_per_page=num_items,
                              item_count=logcount,
                              **kwds)
        if format == 'json':
            response.headers['Content-Type'] = 'application/json'
            jdict = convert_settings_to_json(items)
            if qry:
                encoded = json.loads(jdict)
                encoded['q'] = qry
                jdict = json.dumps(encoded)
            return jdict

        c.page = items
        c.q = qry
        c.total_found = total_found
        c.search_time = search_time
        return self.render('/status/audit.html')