def search_nonrel(request): """Search using Nonrel-search """ from search.core import search query = request.GET.get('q') if query: events = search(Event, query).order_by('-timestamp') events = filter(lambda e: e.source_host.has_access(request.user), events) hosts = search(Host, query) hosts = filter(lambda h: h.has_access(request.user), hosts) networks = search(Network, query) networks = filter(lambda n: n.has_access(request.user), networks) context = { 'results_events': events, 'results_hosts': hosts, 'results_networks': networks, 'query': query, 'form': SearchForm(request.GET) } else: context = { 'form': SearchForm(request.GET) } return direct_to_template(request, extra_context=context, template='search_nonrel.html')
def search_nonrel(request): """Search using Nonrel-search """ from search.core import search query = request.GET.get('q') if query: events = search(Event, query).order_by('-timestamp') events = filter(lambda e: e.source_host.has_access(request.user), events) hosts = search(Host, query) hosts = filter(lambda h: h.has_access(request.user), hosts) networks = search(Network, query) networks = filter(lambda n: n.has_access(request.user), networks) context = { 'results_events': events, 'results_hosts': hosts, 'results_networks': networks, 'query': query, 'form': SearchForm(request.GET) } else: context = {'form': SearchForm(request.GET)} return direct_to_template(request, extra_context=context, template='search_nonrel.html')
def search_db(request): search_str = request.GET.get('q', '') search_str.replace('+', ' ') taxa_sw = set(search(Taxon, search_str, search_index='startswith_index')) taxa_ps = set(search(Taxon, search_str, search_index='porterstemmer_index')) sightings_sw = set(search(Sighting, search_str, search_index='startswith_index')) sightings_ps = set(search(Sighting, search_str, search_index='porterstemmer_index')) return render_to_response('ias/search.html', { 'taxa': taxa_sw | taxa_ps, 'sightings': sightings_sw | sightings_ps }, context_instance=RequestContext(request))
def get_more_posts(GET): """ Function to retrieve additional posts. """ from django.template import Context, loader page = GET.get('page', None) start = int(GET.get('start', 0)) total = int(GET.get('total', 0)) if start and total and page and start < total: end = start + INITIAL_POSTS if start + INITIAL_POSTS < total else total if page == 'home': posts = Post.objects.all().order_by( '-sticky', '-updated_on')[start:end] elif page == 'search': from search.core import search search_terms = GET['terms'] raw_posts = search(Post, search_terms).order_by( '-sticky', '-updated_on') # slicing a search result seems to give empty lists? posts = [raw_posts[p] for p in range(start, end)] elif page == 'tag': tag_name = GET['terms'] posts = Post.objects.filter(tags__in=[tag_name])[start:end] else: return None t = loader.get_template('post_list.html') if len(posts) == 0: return '', 0 else: d = {'posts': posts} return t.render(Context(d)), start + INITIAL_POSTS else: return '', 0
def events_search(request): search_form = EventSearchForm(request.user, request.GET) events = None if search_form.is_valid() and search_form.cleaned_data["message"]: cleaned_data = search_form.cleaned_data search_phrase = cleaned_data.get("message") events = search(Event, search_phrase) date_after = cleaned_data.get("date_after") if date_after: events = events.filter(timestamp__gte=date_after) date_before = cleaned_data.get("date_before") if date_before: events = events.filter(timestamp__lte=date_before) event_type = cleaned_data.get("event_type") if event_type and event_type != "0": events = events.filter(event_type__pk=event_type) events = events.order_by("-timestamp") # filter events by user access events = filter(lambda e: user_has_access(e.source_host, request.user), events) else: if not request.GET.get("message"): search_form = EventSearchForm(request.user) extra_context = {"adv_search": True} return events_list( request, events, search_form=search_form, template_name="events/event_search.html", extra_context=extra_context )
def network_list(request, page=None): search_phrase = request.GET.get('s') if search_phrase and search != None: nets = search(Network, search_phrase) # TODO # filter search results by user access else: nets = Network.shared_objects(request.user) paginator = Paginator(list(nets), 10) page = page or request.GET.get('page', 1) try: nets = paginator.page(page) except PageNotAnInteger: nets = paginator.page(1) except EmptyPage: nets = paginator.page(paginator.num_pages) extra_context = { 'networks': nets, 'url': '/network/network/list/' } return direct_to_template(request, 'networks/network_list.html', extra_context=extra_context)
def show(request, wish_id): controller = "wishes" method = "show" w = get_object_or_404(Wishlist,id=wish_id, user = request.user) results = search(Bid, w.product) return render(request, "system/wishes/show.html", locals() )
def search_view(request): """Search view, handles searching on posts based on arbitrary strings. See search_indexes for details. """ from search.core import search search_terms = request.GET.get('q', None) if search_terms: total_posts = search(Post, search_terms).order_by( '-sticky', '-updated_on').count() sliced_posts = search(Post, search_terms).order_by( '-sticky', '-updated_on')[:INITIAL_POSTS] else: total_posts = 0 sliced_posts = None return render(request, 'search.html', {'posts': sliced_posts, 'total_posts': total_posts, 'terms': search_terms})
def pp_get_search_items(context, nodelist, *args, **kwargs): context.push() namespace = get_namespace(context) search_key = kwargs.get('search_key', None) num = 0 failed = [] if search_key is not None: results = {} registered = {} for fd in get_models(): try: mod = fd.get_model() if mod not in registered: ctype_pk = ContentType.objects.get_for_model(mod).pk s = search(mod, search_key) results[str(ctype_pk)] = s num += len(s) registered[mod] = True except: failed.append(mod) #search for topics s = search(Topic, search_key) topics = s topic_num = len(s) s = search(User, search_key) users = s user_num = len(s) namespace['topics'] = topics namespace['results'] = results namespace['users'] = users namespace['num_results'] = num + topic_num + user_num namespace['failed'] = failed output = nodelist.render(context) context.pop() return output
def global_search(request): extra_context = {} if request.method == 'GET': search_phrase = request.GET.get('s') if search_phrase: events = search(Event, search_phrase) event = events.order_by('-timestamp') events = filter(lambda e: user_has_access(e.source_host, request.user), events) hosts = search(Host, search_phrase).filter(user=request.user) networks = search(Network, search_phrase).filter(user=request.user) extra_context = { 'events': events[:RESULTS_LIMIT], 'hosts': hosts[:RESULTS_LIMIT], 'networks': networks[:RESULTS_LIMIT] } return direct_to_template(request, 'global_search.html', extra_context=extra_context)
def user_search(request): users = None search_phrase = request.GET.get('s') if search_phrase: users = search(User, search_phrase) extra_context = { 'users': users } return direct_to_template(request, 'users/user_search.html', extra_context=extra_context)
def user_search(request): if search != None: users = None search_phrase = request.GET.get('s') if search_phrase: users = search(User, search_phrase) extra_context = { 'users': users } else: extra_context = {} return direct_to_template(request, 'users/user_search.html', extra_context=extra_context)
def asyncSearch(request): modelname = request.GET['model'] model = MODEL_NAME_TO_MODEL[modelname] searchables = request.GET['searchables'] search_index = request.GET['search_index'] kwargs = json.loads(request.GET['kwargs']) logging.info("kwargs: "+str(kwargs)) if modelname.find('_Exact') == -1: cards = search(model, searchables, search_index=search_index).filter(kwargs) else: cards = model.objects.filter(**kwargs) if len(cards) > 0: logging.info("cards: "+str(cards)) if modelname != 'Card': cards = map(lambda a: a.card, list(cards)) cards = set(map(lambda a: (a.pk, a.priority), list(cards))) logging.info('asyncsearch:' + str(cards)) return HttpResponse(json.dumps(list(cards)), mimetype='applications/javascript')
def asyncSearch(request): modelname = request.GET['model'] model = MODEL_NAME_TO_MODEL[modelname] searchables = request.GET['searchables'] search_index = request.GET['search_index'] kwargs = json.loads(request.GET['kwargs']) logging.info("kwargs: " + str(kwargs)) if modelname.find('_Exact') == -1: cards = search(model, searchables, search_index=search_index).filter(kwargs) else: cards = model.objects.filter(**kwargs) if len(cards) > 0: logging.info("cards: " + str(cards)) if modelname != 'Card': cards = map(lambda a: a.card, list(cards)) cards = set(map(lambda a: (a.pk, a.priority), list(cards))) logging.info('asyncsearch:' + str(cards)) return HttpResponse(json.dumps(list(cards)), mimetype='applications/javascript')
def events_search(request): search_form = EventSearchForm(request.user, request.GET) events = None if search_form.is_valid() and search_form.cleaned_data['message'] and \ search != None: cleaned_data = search_form.cleaned_data search_phrase = cleaned_data.get('message') events = search(Event, search_phrase) date_after = cleaned_data.get('date_after') if date_after: events = events.filter(timestamp__gte=date_after) date_before = cleaned_data.get('date_before') if date_before: events = events.filter(timestamp__lte=date_before) event_type = cleaned_data.get('event_type') if event_type and event_type != '0': events = events.filter(event_type__pk=event_type) events = events.order_by('-timestamp') # filter events by user access events = filter(lambda e: user_has_access(e.source_host, request.user), events) else: if not request.GET.get('message'): search_form = EventSearchForm(request.user) extra_context = { 'adv_search': True, } return events_list(request, events, search_form=search_form, template_name='events/event_search.html', extra_context=extra_context)
def searchBooks(value): book_ids = Set([]) result = [] #Get books from id set (and their corresponding suggestions) books = [] if value is not None and len(value) > 0: books = search(Book, value) else: books = Book.objects.all() for b in books: book_ids.add(b.pk); s = Suggestion.objects.filter(book=b.pk).order_by('-date') w = Writer.objects.filter(book=b.pk).order_by('name') a = 0 for sug in s: a += sug.amount result.append(BookView(book=b, suggestions=s, writers=w, amount=a)) return result
def host_list(request, page=None): search_phrase = request.GET.get('s') if search_phrase and search != None: hosts = search(Host, search_phrase) else: hosts = Host.shared_objects(request.user) paginator = Paginator(list(hosts), 10) page = page or request.GET.get('page', 1) try: hosts = paginator.page(page) except PageNotAnInteger: hosts = paginator.page(1) except EmptyPage: hosts = paginator.page(paginator.num_pages) extra_context = { 'hosts': hosts, 'url': reverse('host_list') } return direct_to_template(request, 'networks/host_list.html', extra_context=extra_context)
def host_list(request, page=None): search_phrase = request.GET.get('s') if search_phrase: hosts = search(Host, search_phrase) else: hosts = filter_user_objects(request.user, Host) paginator = Paginator(list(hosts), 10) page = page or request.GET.get('page', 1) try: hosts = paginator.page(page) except PageNotAnInteger: hosts = paginator.page(1) except EmptyPage: hosts = paginator.page(paginator.num_pages) extra_context = { 'hosts': hosts, 'url': reverse('host_list') } return direct_to_template(request, 'networks/host_list.html', extra_context=extra_context)
def partial_match_search(model, query, query_filter_args=None, primary_rank_by_number_of_matches=True, ranking_field=None, rank_descending=True, exact_match_field=None, exact_match_min_keywords=2, blacklisted_keywords=[], per_query_limit='unlimited', debug=False, search_index='search_index', splitter=default_splitter, language=settings.LANGUAGE_CODE): """ Args & Description: Uses nonrel-search (which is normally an AND search) to perform separate queries for each keyword and combine the results. The result is an OR search where AND matches appear at the top. If primary_rank_by_number_of_matches is True, the results will first be ranked by the number of keywords they match and then by their desc_ranking_field. Otherwise, they will just be ranked by ranking_field. If ranking_field is None, that part of the ranking will be skipped. rank_descending determines whether or not the ranking sort is applied in descending order. If exact_match_field is set, any results matching all query keywords in the given field (in any order) will be returned as the only results. exact_match_min_keywords can be used to tune when this rule is applied. blacklisted_keywords in the query are ignored. Because of backend limitations, this partial match search is implemented using multiple queries (one for the entire query, and one for each keyword). The per_query_limit limits the number of results that are fetched for each of these queries and therefore also effects the number of final (deduplicated) results that are returned. query_filter_args can be set to filter each of these these queries and thereby restrict the final results. Setting debug=True will print some info logs as the search results are queried and sorted. Return: Pre-sliced list of objects (not filterable). Example: query_filter_args['is_deleted'] = False catalog_items = partial_match_search(CatalogItem, search_query_string, query_filter_args=query_filter_args, ranking_field='search_rank', rank_descending=True, exact_match_field='title', blacklisted_keywords=['com']) """ try: keywords = get_keyword_set(query, blacklisted_keywords, splitter, language, debug) if debug: logging.info("search keywords: " + (', ').join(keywords)) query_result_list = [] query_set = search(model, query, language, search_index) if query_filter_args: query_set = query_set.filter(**query_filter_args) if ranking_field: query_set = query_set.order_by('-' + ranking_field) if per_query_limit == 'unlimited': query_results = query_set else: query_results = query_set[:per_query_limit] query_result_list.extend(query_results) for keyword in keywords: query_set = search(model, keyword, language, search_index) if query_filter_args: query_set = query_set.filter(**query_filter_args) if ranking_field: order_by = ranking_field if rank_descending: order_by = '-' + ranking_field query_set = query_set.order_by(order_by) if per_query_limit == 'unlimited': query_results = query_set else: query_results = query_set[:per_query_limit] query_result_list.extend(query_results) if debug: logging.info( "Deduplicate and create primary search ranking based on how many keywords matched." ) query_result_set = {} dedup_query_result_list = [] all_keyword_match_dedup_query_result_list = [] all_keyword_match = False for result in query_result_list: if exact_match_field and exact_match_min_keywords != None: all_keyword_match = False if len(keywords) >= exact_match_min_keywords: all_keyword_match = True exact_match_field_keywords = get_keyword_set( getattr(result, exact_match_field), blacklisted_keywords, splitter, language, debug) if len(keywords - exact_match_field_keywords) > 0: all_keyword_match = False if result.id in query_result_set: query_result_set[ result.id].__partial_match_search__primary_rank += 1 query_result_set[result.id].rank += 1 if debug: logging.info("number " + repr(query_result_set[ result.id].__partial_match_search__primary_rank) + " instance of result: " + repr(result)) else: if debug: logging.info("first instance of result: " + repr(result)) query_result_set[result.id] = result setattr(query_result_set[result.id], '__partial_match_search__primary_rank', 1) setattr(query_result_set[result.id], 'rank', 1) dedup_query_result_list.append(result) if all_keyword_match: all_keyword_match_dedup_query_result_list.append(result) if len(all_keyword_match_dedup_query_result_list) > 0: if debug: logging.info( "All keywords matched at least one result exact_match_field. Using only these matches: " + repr(len(all_keyword_match_dedup_query_result_list))) dedup_query_result_list = all_keyword_match_dedup_query_result_list else: if debug: logging.info("Found " + repr(len(dedup_query_result_list)) + " results.") if rank_descending: sorted_ranked_query_result_set = sorted( dedup_query_result_list, key=lambda result: (result.__partial_match_search__primary_rank), reverse=True) else: sorted_ranked_query_result_set = sorted( dedup_query_result_list, key=lambda result: (-result.__partial_match_search__primary_rank)) if debug: logging.info('final result ordering:') for result in sorted_ranked_query_result_set: logging.info( "primary_rank: " + repr(result.__partial_match_search__primary_rank) + ", ranking_field: " + repr(getattr(result, ranking_field)) + ", result: " + repr(result)) return sorted_ranked_query_result_set except Exception: logging.exception("Error in partial_match_search") return None
def database_search(query): #l1 cache global SCache if SCache.get(query) is not None: return SCache.get(query) results = [] querysplit = query.split(' ') # try query results += search(Person, query, search_index='person_level1') results += search(Organization, query, search_index='organization_level1') results += search(Crisis, query, search_index='crisis_level1') results += search(Person, query, search_index='auto_person_level1') results += search(Organization, query, search_index='auto_organization_level1') results += search(Crisis, query, search_index='auto_crisis_level1') results += search(Person, query, search_index='person_level2') results += search(Organization, query, search_index='organization_level2') results += search(Crisis, query, search_index='crisis_level2') results += search(Person, query, search_index='person_level3') results += search(Organization, query, search_index='organization_level3') results += search(Crisis, query, search_index='crisis_level3') if len(results) < 1: for q in querysplit: results += search(Person, q, search_index='person_level1') results += search(Organization, q, search_index='organization_level1') results += search(Crisis, q, search_index='crisis_level1') if len(results) < 5: for q in querysplit: results += search(Person, q, search_index='person_level2') results += search(Organization, q, search_index='organization_level2') results += search(Crisis, q, search_index='crisis_level2') if len(results) < 10: for q in querysplit: results += search(Person, q, search_index='person_level3') results += search(Organization, q, search_index='organization_level3') results += search(Crisis, q, search_index='crisis_level3') if len(results) < 1: for q in querysplit: results += search(Person, q, search_index='auto_person_level2') results += search(Organization, q, search_index='auto_organization_level2') results += search(Crisis, q, search_index='auto_crisis_level2') results += search(Person, q, search_index='auto_person_level3') results += search(Organization, q, search_index='auto_organization_level3') results += search(Crisis, q, search_index='auto_crisis_level3') if(len(SCache) > 10): SCache.pop(SCache.keys()[0]) SCache.update({query:f7(results)}) else: SCache.update({query:f7(results)}) return f7(results)
def partial_match_search(model, query, query_filter_args=None, primary_rank_by_number_of_matches=True, ranking_field=None, ranking_field_descending=True, exact_match_field=None, exact_match_min_keywords=2, blacklisted_keywords=[], per_query_limit=40, debug=False, search_index='search_index', splitter=default_splitter, language=settings.LANGUAGE_CODE, final_result_limit=None): """ Args & Description: Uses nonrel-search (which is normally an AND search) to perform separate queries for each keyword and combine the results. The result is an OR search where AND matches appear at the top. If primary_rank_by_number_of_matches is True, the results will first be ranked by the number of keywords they match and then by their desc_ranking_field. Otherwise, they will just be ranked by ranking_field. If ranking_field is None, that part of the ranking will be skipped. ranking_field_descending determines whether or not the ranking sort is applied in descending order. If exact_match_field is set, any results matching all query keywords in the given field (in any order) will be returned as the only results. exact_match_min_keywords can be used to tune when this rule is applied. blacklisted_keywords in the query are ignored. Because of backend limitations, this partial match search is implemented using multiple queries (one for the entire query, and one for each keyword). The per_query_limit limits the number of results that are fetched for each of these queries and therefore also effects the number of final (deduplicated) results that are returned. query_filter_args can be set to filter each of these these queries and thereby restrict the final results. Setting debug=True will print some info logs as the search results are queried and sorted. Return: Pre-sliced list of objects (not filterable). Simple example: results = partial_match_search(Indexed, 'foo bar', search_index='test_index') Production example: search_query_string = 'tech news' query_filter_args['is_deleted'] = False catalog_items = partial_match_search(CatalogItem, search_query_string, query_filter_args=query_filter_args, ranking_field='search_rank', ranking_field_descending=True, exact_match_field='title', blacklisted_keywords=['com']) """ try: keywords = get_keyword_set(query, blacklisted_keywords, splitter, language, debug) if debug: logging.info("search keywords: " + (', ').join(keywords)) query_result_list = [] query_set = search(model, query, language, search_index) if query_filter_args: query_set = query_set.filter(**query_filter_args) if ranking_field: query_set = query_set.order_by('-' + ranking_field) query_results = query_set[:per_query_limit] query_result_list.extend(query_results) for keyword in keywords: query_set = search(model, keyword, language, search_index) if query_filter_args: query_set = query_set.filter(**query_filter_args) if ranking_field: order_by = ranking_field if ranking_field_descending: order_by = '-' + ranking_field query_set = query_set.order_by(order_by) query_results = query_set[:per_query_limit] if debug: logging.info("Result for of query for '" + repr(keyword) + "': " + repr(query_results) ) query_result_list.extend(query_results) if debug: logging.info("Deduplicate and create primary search ranking based on how many keywords matched.") query_result_set = {} dedup_query_result_list = [] all_keyword_match_dedup_query_result_list = [] all_keyword_match = False for result in query_result_list: if exact_match_field and exact_match_min_keywords != None: all_keyword_match = False if len(keywords) >= exact_match_min_keywords: all_keyword_match = True exact_match_field_keywords = get_keyword_set(getattr(result, exact_match_field), blacklisted_keywords, splitter, language, debug) if len(keywords - exact_match_field_keywords) > 0: all_keyword_match = False if result._get_pk_val() in query_result_set: query_result_set[result._get_pk_val()].__partial_match_search__primary_rank += 1 if debug: logging.info("number " + repr(query_result_set[result._get_pk_val()].__partial_match_search__primary_rank) + " instance of result: " + repr(result)) else: if debug: logging.info("first instance of result: " + repr(result)) query_result_set[result._get_pk_val()] = result setattr(query_result_set[result._get_pk_val()], '__partial_match_search__primary_rank', 1) dedup_query_result_list.append(result) if all_keyword_match: all_keyword_match_dedup_query_result_list.append(result) if len(all_keyword_match_dedup_query_result_list) > 0: if debug: logging.info("All keywords matched at least one result exact_match_field. Using only these matches: " + repr(len(all_keyword_match_dedup_query_result_list))) dedup_query_result_list = all_keyword_match_dedup_query_result_list else: if debug: logging.info("Found " + repr(len(dedup_query_result_list)) + " results.") if ranking_field: if ranking_field_descending: sorted_ranked_query_result_set = sorted(dedup_query_result_list, key=lambda result: (result.__partial_match_search__primary_rank, getattr(result, ranking_field)), reverse=True) else: sorted_ranked_query_result_set = sorted(dedup_query_result_list, key=lambda result: (-result.__partial_match_search__primary_rank, getattr(result, ranking_field))) else: sorted_ranked_query_result_set = sorted(dedup_query_result_list, key=lambda result: -result.__partial_match_search__primary_rank) if final_result_limit: sorted_ranked_query_result_set = sorted_ranked_query_result_set[:final_result_limit] if debug: logging.info('final result ordering:') for result in sorted_ranked_query_result_set: if ranking_field: logging.info("primary_rank: " + repr(result.__partial_match_search__primary_rank) + ", ranking_field: " + repr(getattr(result, ranking_field)) + ", result: " + repr(result)) else: logging.info("primary_rank: " + repr(result.__partial_match_search__primary_rank) + ", result: " + repr(result)) return sorted_ranked_query_result_set except: logging.exception("Error in partial_match_search") return None
def get_queryset(self): qs = super(EventListView, self).get_queryset() pks_in = [item.pk for item in qs.by_user(self.request.user)] if self.request.method == 'POST': form = self.get_form(self.get_form_class()) if form.is_valid(): if form.cleaned_data['title'] and form.cleaned_data['fulltext']: search_title = [item.pk for item in search(self.model, form.cleaned_data['title'], search_index='search_index_name')] search_fulltext = [item.pk for item in search(self.model, form.cleaned_data['fulltext'])] result = [] for pk in search_title: if pk in search_fulltext and pk in pks_in: result.append(pk) pks_in = result elif form.cleaned_data['title']: result = [] for item in search(self.model, form.cleaned_data['title'], search_index='search_index_name'): if item.pk in pks_in: result.append(item.pk) pks_in = result elif form.cleaned_data['fulltext']: result = [] for word in form.cleaned_data['fulltext'].replace(',', ' ').split(' '): for item in search(self.model, word): if item.pk in pks_in: result.append(item.pk) pks_in = result if form.cleaned_data['users']: result = [] for word in form.cleaned_data['users'].replace(',', ' ').split(' '): for user in search(User, word): events = Event.objects.filter(user__pk=user.pk) for event in events: if event.pk in pks_in: result.append(event.pk) event_relations = EventRelation.objects.filter(relation__pk__in=[item.pk for item in user.relations]) for item in event_relations: if item.event.pk in pks_in and not item.event.pk in result: result.append(item.event.pk) pks_in = result if form.cleaned_data['date_from']: qs = qs.filter(term__gte=form.cleaned_data['date_from']) if form.cleaned_data['date_to']: qs = qs.filter(term__lte=form.cleaned_data['date_to']) result = [] for pk in pks_in: if not pk in result: result.append(pk) qs = qs.filter(pk__in=result) return qs
def get_queryset(self): qs = super(TaskListView, self).get_queryset() pks_in = [item.pk for item in qs.by_user(self.request.user)] if self.request.method == "POST": form = self.get_form(self.get_form_class()) if form.is_valid(): if form.cleaned_data["title"] and form.cleaned_data["fulltext"]: search_title = [ item.pk for item in search(self.model, form.cleaned_data["title"], search_index="search_index_name") ] search_fulltext = [item.pk for item in search(self.model, form.cleaned_data["fulltext"])] result = [] for pk in search_title: if pk in search_fulltext and pk in pks_in: result.append(pk) pks_in = result elif form.cleaned_data["title"]: result = [] for item in search(self.model, form.cleaned_data["title"], search_index="search_index_name"): if item.pk in pks_in: result.append(item.pk) pks_in = result elif form.cleaned_data["fulltext"]: result = [] for word in form.cleaned_data["fulltext"].replace(",", " ").split(" "): for item in search(self.model, word): if item.pk in pks_in: result.append(item.pk) pks_in = result if form.cleaned_data["users"]: result = [] for word in form.cleaned_data["users"].replace(",", " ").split(" "): for user in search(User, word): tasks = Task.objects.filter(user__pk=user.pk) for task in tasks: if task.pk in pks_in: result.append(task.pk) task_relations = TaskRelation.objects.filter( relation__pk__in=[item.pk for item in user.relations] ) for item in task_relations: if item.task.pk in pks_in and not item.task.pk in result: result.append(item.task.pk) pks_in = result if form.cleaned_data["date_from"]: qs = qs.filter(term__gte=form.cleaned_data["date_from"]) if form.cleaned_data["date_to"]: qs = qs.filter(term__lte=form.cleaned_data["date_to"]) result = [] for pk in pks_in: if not pk in result: result.append(pk) qs = qs.filter(pk__in=result) return qs
def count_bids(self): if self.product: results = search(Bid, self.product) return results.count()
def get_queryset(self): from search.core import search search_terms = self.kwargs.get(self.lookup_url_kwarg) # force the evaluation of the search RelationIndexQuery result as the pagination doesn't seem to like it return [q for q in search(Post, search_terms).order_by('-sticky', '-updated_on')]