def search(self): # First, store the SearchQuerySet received from other processing. if self.cleaned_data['q']: sqs = super(objectSearchForm, self).search() else: sqs= SearchQuerySet().exclude(draft=True) if not self.is_valid(): return self.no_query_found() # Check to see if a start_date was chosen. if self.cleaned_data['tags']: sqs = sqs.filter(tags=parse_tags(self.cleaned_data['tags'])) # if 'start_date' in self.cleaned_data: # sqs = sqs.filter(created__gte=self.cleaned_data['start_date']) # Check to see if an end_date was chosen. # if 'end_date' in self.cleaned_data: # sqs = sqs.filter(created=self.cleaned_data['end_date']) if not self.cleaned_data['sort'] or self.cleaned_data['sort'] == "votes": sqs = sqs.order_by('-ratingSortBest') elif self.cleaned_data['sort'] == "new": sqs = sqs.order_by('-created') return sqs
def volunteer_page(request): # Get the user comfort languages list user_langs = get_user_languages_from_request(request) relevant = SearchQuerySet().result_class(VideoSearchResult) \ .models(Video).filter(video_language_exact__in=user_langs) \ .filter_or(languages_exact__in=user_langs) \ .order_by('-requests_count') featured_videos = relevant.filter( featured__gt=datetime.datetime(datetime.MINYEAR, 1, 1)) \ .order_by('-featured')[:5] popular_videos = relevant.order_by('-week_views')[:5] latest_videos = relevant.order_by('-edited')[:15] requested_videos = relevant.filter(requests_exact__in=user_langs)[:5] context = { 'featured_videos': featured_videos, 'popular_videos': popular_videos, 'latest_videos': latest_videos, 'requested_videos': requested_videos, 'user_langs':user_langs, } return render_to_response('videos/volunteer.html', context, context_instance=RequestContext(request))
def search_api(request): query = request.REQUEST.get("q", "") start = int(request.REQUEST.get("start", 0)) limit = int(request.REQUEST.get("limit", getattr(settings, "HAYSTACK_SEARCH_RESULTS_PER_PAGE", 25))) sort = request.REQUEST.get("sort", "relevance") type = request.REQUEST.get("bytype") sqs = SearchQuerySet() if type is not None: if type in ["map", "layer", "contact", "group"]: # Type is one of our Major Types (not a sub type) sqs = sqs.narrow("type:%s" % type) elif type in ["vector", "raster"]: # Type is one of our sub types sqs = sqs.narrow("subtype:%s" % type) if query: sqs = sqs.filter(content=AutoQuery(query)) sqs = sqs.facet("type").facet("subtype") if sort.lower() == "newest": sqs = sqs.order_by("-date") elif sort.lower() == "oldest": sqs = sqs.order_by("date") elif sort.lower() == "alphaaz": sqs = sqs.order_by("title") elif sort.lower() == "alphaza": sqs = sqs.order_by("-title") results = [] for i, result in enumerate(sqs[start:start + limit]): data = json.loads(result.json) data.update({"iid": i + start}) results.append(data) facets = sqs.facet_counts() counts = {"map": 0, "layer": 0, "vector": 0, "raster": 0, "contact": 0, "group": 0} for t, c in facets.get("fields", {}).get("type", []): counts[t] = c for t, c in facets.get("fields", {}).get("subtype", []): counts[t] = c data = { "success": True, "total": sqs.count(), "rows": results, "counts": counts, } return HttpResponse(json.dumps(data), mimetype="application/json")
def get_queryset(self): queryset = super(SenateSearchView, self).get_queryset() if 'senate_date' in self.request.GET: queryset = SearchQuerySet().models(StatusAssertion) else: queryset = queryset.narrow('date:[{0} TO {0}]'.format( SenateSearchForm.INITIAL_DATE)) certainty = self.request.GET.get('dating_certainty') if certainty is not None and certainty == '3': return queryset.order_by('-date_end') return queryset.order_by('date_start')
def get_data(self): from pombola.hansard.models import Entry defaults = {"model": Entry, "title": "Hansard"} data_query = SearchQuerySet().models(defaults["model"]) data_query = data_query.filter(content=AutoQuery(self.query)) if self.order == "date": data_query = data_query.order_by("-start_date") if self.order == "adate": data_query = data_query.order_by("start_date") result = defaults.copy() result["results"] = data_query.highlight() result["results_count"] = result["results"].count() return result
def search(request): q = request.REQUEST.get('q') page = request.GET.get('page') if not page == 'last': try: page = int(page) except (ValueError, TypeError, KeyError): page = 1 if q: qs = SearchQuerySet().auto_query(q).highlight() else: qs = SubtitleLanguage.objects.none() context = { 'query': q } ordering, order_type = request.GET.get('o'), request.GET.get('ot') order_fields = { 'title': 'title', 'language': 'language' } if ordering in order_fields and order_type in ['asc', 'desc']: qs = qs.order_by(('-' if order_type == 'desc' else '')+order_fields[ordering]) context['ordering'], context['order_type'] = ordering, order_type return object_list(request, queryset=qs, allow_empty=True, paginate_by=30, page=page, template_name='videos/search.html', template_object_name='result', extra_context=context)
def tags(request, tags=''): if tags == '': return redirect(reverse('index')) tag_list = [tag for tag in tags.split(',') if tag != ''] # Due to a bug (or feature?) in Whoosh or haystack, we can't filter for all tags at once, # the .filter(tags=[...]) method cannot handle spaces apparently # It however works with tags__in, but that is an OR sqs = SearchQuerySet().filter(owner_id=request.user.id) for tag in tag_list: sqs = sqs.filter(tags__in=[tag]) profile = request.user.userprofile if not profile.show_excluded: excluded_tags = profile.excluded_tags.names() for tag in excluded_tags: sqs = sqs.exclude(tags__in=[tag]) sqs = sqs.order_by('-created').facet('tags') facets = sqs.facet_counts() result_objects = [result.object for result in sqs] tag_objects = [Tag(name, count, tag_list) for name, count in facets.get('fields', {}).get('tags', [])] return TemplateResponse(request, 'readme/item_list.html', { 'current_item_list': result_objects, 'tags': tag_objects, 'tag_names': json.dumps([tag.name for tag in tag_objects]), 'user': request.user, })
def search(self, request, collection, filter_parameters, order_parameters): parameters = ' | '.join(collection.parameters.split(',')).strip(' |') objects = SearchQuerySet().filter(content=parameters) if hasattr(settings, "COLLECTIONS_HAYSTACK_MODELS"): haystack_models = settings.COLLECTIONS_HAYSTACK_MODELS #if we're a string pull out the function if isinstance(haystack_models, str): mod_name, func_name = get_mod_func(haystack_models) haystack_models = getattr(import_module(mod_name), func_name) #if we're a function pull out the models if callable(haystack_models): haystack_models = haystack_models(request) model_list = [] for haystack_model in haystack_models: app_model = haystack_model.split('.') model_list.append(get_model(*app_model)) objects.models(*model_list) if isinstance(order_parameters, list): objects = objects.order_by(*order_parameters) if isinstance(filter_parameters, dict): objects = objects.filter(**filter_parameters) return objects
def search( query, limit=20, models=None, as_list=False ): print "Searching for: %s" % query if not query: return [] try: sqs = SearchQuerySet() # clean query q = sqs.query.clean( query ) # add models if models is not None: if not isinstance( models, ( list, tuple ) ): models = ( models, ) sqs = sqs.models( *models ) # ignore letters keywords = [ k for k in q.split() if len( k ) > 1 ] # search word by word for keyword in keywords: sqs.query.add_filter( SQ( title="%s^2.0" % keyword ) ) # order by score and popularity sqs = sqs.order_by( '-score', '-popularity' ) # add highlight sqs = sqs.highlight() sqs = sqs[:limit] except Exception, e: # on error show empty result print e # TODO: log error sqs = []
def handle(self, *args, **kwargs): for badge in Badge.objects.filter(type='auto'): if not badge.comparison: continue elif badge.comparison == 'biggest': order = u'-{}'.format(Badge.USER_ATTR_OPTS[badge.user_attr]) sqs = SearchQuerySet().filter(type='user') user = sqs.order_by(order)[0] badge.awardees.add(User.objects.get(pk=user.pk)) continue comparison = u'__{}'.format(badge.comparison) if badge.comparison \ is not 'equal' else u'' key = u'{}{}'.format( Badge.USER_ATTR_OPTS[badge.user_attr], comparison ) opts = {key: badge.value} sqs = SearchQuerySet().filter( type='user', **opts ) for user in sqs: badge.awardees.add(User.objects.get(pk=user.pk))
class LiveSimpleSearchQuerySetTestCase(TestCase): fixtures = ['bulk_data.json'] def setUp(self): super(LiveSimpleSearchQuerySetTestCase, self).setUp() import haystack # Stow. self.old_debug = settings.DEBUG settings.DEBUG = True self.old_site = haystack.site self.site = SearchSite() self.backend = SearchBackend(site=self.site) self.index = SimpleMockSearchIndex(MockModel, backend=self.backend) self.site.register(MockModel, SimpleMockSearchIndex) haystack.site = self.site self.sample_objs = MockModel.objects.all() self.sqs = SearchQuerySet() def tearDown(self): # Restore. import haystack haystack.site = self.old_site settings.DEBUG = self.old_debug super(LiveSimpleSearchQuerySetTestCase, self).tearDown() def test_general_queries(self): # For now, just make sure these don't throw an exception. # They won't work until the simple backend is improved. self.assertTrue(len(self.sqs.auto_query('daniel')) > 0) self.assertTrue(len(self.sqs.filter(text='index')) > 0) self.assertTrue(len(self.sqs.exclude(name='daniel')) > 0) self.assertTrue(len(self.sqs.order_by('-pub_date')) > 0)
def handle(self, *args, **kwargs): for badge in Badge.objects.filter(type='auto'): if not badge.comparison: continue elif badge.comparison == 'biggest': order = u'-{}'.format(Badge.USER_ATTR_OPTS[badge.user_attr]) sqs = SearchQuerySet().filter(type='user') user = sqs.order_by(order)[0] badge.awardees.remove(*list(badge.awardees.all())) badge.awardees.add(User.objects.get(pk=user.pk)) continue comparison = u'__{}'.format(badge.comparison) if badge.comparison \ is not 'equal' else u'' key = u'{}{}'.format( Badge.USER_ATTR_OPTS[badge.user_attr], comparison ) opts = {key: badge.value} sqs = SearchQuerySet().filter( type='user', **opts ) # Remove all awardees to make sure that all of then # still accomplish the necessary to keep the badge badge.awardees.remove(*list(badge.awardees.all())) for user in sqs: badge.awardees.add(User.objects.get(pk=user.pk))
def get_section_data(self, section): defaults = self.search_sections[section] extra_filter = defaults.get('filter', {}) filter_args = extra_filter.get('args', []) filter_kwargs = extra_filter.get('kwargs', {}) extra_exclude = defaults.get('exclude', {}) query = SearchQuerySet().models(defaults['model']) if extra_exclude: query = query.exclude(**extra_exclude) query = query.filter( content=AutoQuery(self.query), *filter_args, **filter_kwargs ) if self.start_date_range: query = query.filter(start_date__gte=self.start_date_range) if self.end_date_range: query = query.filter(start_date__lte=self.end_date_range) if self.order == 'date': query = query.order_by('-start_date') result = defaults.copy() result['results'] = query.highlight() result['results_count'] = result['results'].count() result['section'] = section result['section_dashes'] = section.replace('_', '-') return result
def get_global_context(self, context): # Find all the models to search over... models = set(self.search_sections[section]["model"] for section in self.search_sections) show_top_hits = self.page == "1" or not self.page top_hits_ids = [] if show_top_hits: context["top_hits"] = [] for section, max_for_top_hits in SearchBaseView.top_hits_under.items(): data = self.get_section_data(section) if data["results_count"] <= max_for_top_hits: context["top_hits"] += data["results"] top_hits_ids = set(r.id for r in context["top_hits"]) sqs = SearchQuerySet().models(*list(models)) # Exclude anything that will already have been shown in the top hits: for top_hit_id in top_hits_ids: sqs = sqs.exclude(id=top_hit_id) sqs = sqs.exclude(hidden=True).filter(content=AutoQuery(self.query)).highlight() if self.start_date_range: sqs = sqs.filter(start_date__gte=self.start_date_range) if self.end_date_range: sqs = sqs.filter(start_date__lte=self.end_date_range) if self.order == "date": sqs = sqs.order_by("-start_date") context["paginator"] = Paginator(sqs, self.results_per_page) context["page_obj"] = self.get_paginated_results(context["paginator"]) return context
def get_results(self, name, branch, year, offset, branch_facet, year_facet, city_facet): if year_facet: year_facet = [int(x) for x in year_facet.split(",")] sqs = SearchQuerySet().facet('branch') sqs = sqs.facet('year_of_passing') sqs = sqs.facet('city') if name: sqs = sqs.auto_query(name) if branch: sqs = sqs.filter(branch_exact=branch) if year: sqs = sqs.filter(year_of_passing_exact=year) if branch_facet: sqs = sqs.filter(branch_exact=branch_facet) if year_facet: sqs = sqs.filter(year_of_passing_exact__in=year_facet) if city_facet: sqs = sqs.filter(city_exact=city_facet) offsetvalue = int(offset) results = sqs.order_by('name')[offsetvalue:offsetvalue + INITIAL_RESULTS_COUNT] resultcount = len(results) return results, resultcount
def get_content(self): qs = SearchQuerySet() search_fields = { 'q': 'text', 'controlling_bodies': 'controlling_body', 'statuses': 'status', 'file_types': 'file_type', } for key, val in self.filter.iteritems(): if key in search_fields: field = search_fields[key] else: field = key if val in ([], {}, '', (), None): continue if isinstance(val, list): for item in val: qs = qs.filter(**{field: item}) else: qs = qs.filter(**{field: val}) return qs.order_by('order_date')
def get_section_data(self, section): defaults = self.search_sections[section] extra_filter = defaults.get("filter", {}) filter_args = extra_filter.get("args", []) filter_kwargs = extra_filter.get("kwargs", {}) extra_exclude = defaults.get("exclude", {}) query = SearchQuerySet().models(defaults["model"]) if extra_exclude: query = query.exclude(**extra_exclude) query = query.filter(content=AutoQuery(self.query), *filter_args, **filter_kwargs) if self.start_date_range: query = query.filter(start_date__gte=self.start_date_range) if self.end_date_range: query = query.filter(start_date__lte=self.end_date_range) if self.order == "date": query = query.order_by("-start_date") result = defaults.copy() result["results"] = query.highlight() result["results_count"] = result["results"].count() result["section"] = section result["section_dashes"] = section.replace("_", "-") return result
def myitems_index(request, view_name, page_title, no_items_message, index_name, only_published=True, template="myitems/index.html"): breadcrumbs = [ {"url": reverse("myitems:myitems"), "title": u"My Items"}, {"url": reverse("myitems:%s" % view_name), "title": page_title}, ] query_string_params = {} index_params = IndexParams(request) query_string_params = index_params.update_query_string_params(query_string_params) batch_end = index_params.batch_start + index_params.batch_size query = SearchQuerySet() if only_published: query = query.narrow("is_displayed:true") query = query.narrow("%s:%i" % (index_name, request.user.id)) if index_params.query_order_by is not None: query = query.order_by(index_params.query_order_by) items = [] results = query[index_params.batch_start:batch_end] for result in results: items.append(populate_item_from_search_result(result)) pagination = Pagination(request.path, query_string_params, index_params.batch_start, index_params.batch_size, len(query)) return direct_to_template(request, template, locals())
def directory(request, categories=None): sqs = SearchQuerySet() sqs = sqs.models(CourseInfo) if categories: cat_list = categories.lower().strip('/').split('/') category = get_object_or_404(Category, name=cat_list[-1]) children = category.children.all() sqs = sqs.filter(categories=category.name) curr_category = category else: children = Category.objects.filter(parent_category__isnull=True) curr_category = None sqs = sqs.order_by('-rating') try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 courses_paginator = Paginator(sqs, int(request.GET.get('rpp','25'))) try: courses_page = courses_paginator.page(page) except (EmptyPage, InvalidPage): courses_page = courses_paginator.page(courses_paginator.num_pages) render_dict = {'children': children, 'curr_category':curr_category, 'courses_page': courses_page, 'paginator':courses_paginator, 'mod_path':request.path + '?rpp=25' } return render_to_response('classes/directory.html', render_dict , context_instance=RequestContext(request))
def search(self): """ Returns a search queryset. """ cleaned_data = self.cleaned_data_or_empty() search_in = cleaned_data.get("search_in", DEFAULT_SEARCH_IN) query = cleaned_data.get("q", "") if not query: return EmptySearchQuerySet() if not search_in: # The following returns result of a SearchQuerySet.autoquery() search_queryset = super(HitGroupContentSearchForm, self).search() else: # Pass query to each field, which you want to search in. search_queryset = SearchQuerySet() for field in search_in: key = "{}__exact".format(field) search_queryset = search_queryset.filter_or(**{key: query}) # Get field and order for sorting. sort_by = cleaned_data.get("sort_by", DEFAULT_SORT_BY).rsplit("_", 1) # Prepare for SearchQuerySet API. sort_by = "{}{}".format("" if sort_by[1] == "asc" else "-", sort_by[0]) search_queryset = search_queryset.order_by(sort_by) return search_queryset
class LiveSimpleSearchQuerySetTestCase(TestCase): fixtures = ['bulk_data.json'] def setUp(self): super(LiveSimpleSearchQuerySetTestCase, self).setUp() # Stow. self.old_debug = settings.DEBUG settings.DEBUG = True self.old_ui = connections['default'].get_unified_index() self.ui = UnifiedIndex() self.smmi = SimpleMockSearchIndex() self.ui.build(indexes=[self.smmi]) connections['default']._index = self.ui self.sample_objs = MockModel.objects.all() self.sqs = SearchQuerySet() def tearDown(self): # Restore. connections['default']._index = self.old_ui settings.DEBUG = self.old_debug super(LiveSimpleSearchQuerySetTestCase, self).tearDown() def test_general_queries(self): # For now, just make sure these don't throw an exception. # They won't work until the simple backend is improved. self.assertTrue(len(self.sqs.auto_query('daniel')) > 0) self.assertTrue(len(self.sqs.filter(text='index')) > 0) self.assertTrue(len(self.sqs.exclude(name='daniel')) > 0) self.assertTrue(len(self.sqs.order_by('-pub_date')) > 0)
def get_context_data(self, **kwargs): query = None sort = MEPSearchAdvancedFormset.options_form_class.SORT_INITIAL limit = MEPSearchAdvancedFormset.options_form_class.LIMIT_INITIAL label = "" formset_class = get_advanced_search_formset_class(self.request.user, MEPSearchAdvancedFormset, MEPSearchForm) if "q" in self.request.GET: form = MEPSimpleSearchForm(self.request.GET) if form.is_valid(): F = ParsedStringQBuilder(form.cleaned_data['q'], MEPSearchForm) query, label = F() formset = formset_class() _limit = form.cleaned_data.get("limit") if _limit is not None: limit = _limit sort = form.cleaned_data.get("sort") or sort else: form = MEPSimpleSearchForm() formset = formset_class(self.request.GET or None) formset.full_clean() if formset.is_valid(): F = FormsetQBuilder(formset) query, label = F() sort = formset.options_form.cleaned_data.get("sort", sort) limit = formset.options_form.cleaned_data.get("limit", limit) if query: results = SearchQuerySet().filter(query) if sort: results = results.order_by(sort) # When iterating over SearchQuerySet, haystack will fetch # results 10 by 10. This fetchs them all in one call: results = results[:] # we must find the average score for the search results if len(filter(lambda mep: mep.total_score, results)) != 0: average = sum([mep.total_score for mep in results if mep.total_score])/len(filter(lambda mep: mep.total_score, results)) else: average = 0.0 else: results = EmptySearchQuerySet() average = 0.0 return { "dynamiq": { "results": results, "label": label, "formset": formset, "form": form, "average": average, "shortcuts": [ TopRated({"request": self.request}), WorstRated({"request": self.request}) ] }, "list_template_name": self.list_template_name, "per_page": limit, "as_csv": self.request.GET.get('as_csv', False) }
def get_context_data(self, **kwargs): user = self.object context = {} count_types = OrderedDict() fields_or_lookup = ( {'collaborators__contains': user.username}, {'fullname_and_username__contains': user.username}, ) counter_class = {} #{ # 'wiki': WikiCollabCount, # 'ticket': TicketCollabCount, #} types = ['thread'] #types.extend(['ticket', 'wiki', 'changeset', 'attachment']) messages = Message.objects.filter(from_address__user__pk=user.pk) for type in types: CounterClass = counter_class.get(type) if CounterClass: try: counter = CounterClass.objects.get(author=user.username) except CounterClass.DoesNotExist: count_types[trans(type)] = 0 else: count_types[trans(type)] = counter.count elif type == 'thread': count_types[trans(type)] = messages.count() else: sqs = SearchQuerySet() for filter_or in fields_or_lookup: sqs = sqs.filter_or(type=type, **filter_or) count_types[trans(type)] = sqs.count() context['type_count'] = count_types sqs = SearchQuerySet() for filter_or in fields_or_lookup: sqs = sqs.filter_or(**filter_or).exclude(type='thread') context['results'] = sqs.order_by('-modified', '-created')[:10] email_pks = [addr.pk for addr in user.emails.iterator()] query = Message.objects.filter(from_address__in=email_pks) query = query.order_by('-received_time') context['emails'] = query[:10] count_by = 'thread__mailinglist__name' context['list_activity'] = dict(messages.values_list(count_by)\ .annotate(Count(count_by))\ .order_by(count_by)) context.update(kwargs) return super(UserProfileDetailView, self).get_context_data(**context)
def microsite(request, microsite): microsite = get_object_or_404(Microsite, slug=microsite) page_title = u"%s Home" % microsite.name breadcrumbs = [{"url": reverse("materials:microsite", kwargs=dict(microsite=microsite.slug)), "title": page_title}] query = SearchQuerySet().narrow("is_displayed:true") query = query.narrow("microsites:%i" % microsite.id) query = query.order_by("-rating") query = query.facet("indexed_topics").facet("keywords").facet("grade_levels").facet("course_material_types") items = [] results = query[0:8] for result in results: items.append(populate_item_from_search_result(result)) facets = query.facet_counts()["fields"] topics = [] topic_counts = dict(facets["indexed_topics"]) for topic, tree_info in tree_item_iterator(microsite.topics.all()): topic.count = topic_counts.get(str(topic.id), 0) topics.append((topic, tree_info)) grade_levels = [] grade_level_counts = dict(facets["grade_levels"]) for level in GradeLevel.objects.all(): level.count = grade_level_counts.get(str(level.id), 0) grade_levels.append(level) course_material_types = [] course_material_type_counts = dict(facets["course_material_types"]) for material_type in CourseMaterialType.objects.all(): material_type.count = course_material_type_counts.get(str(material_type.id), 0) course_material_types.append(material_type) keywords = query.count() and facets.get("keywords", []) or [] if len(keywords) > MAX_TOP_KEYWORDS: keywords = keywords[:MAX_TOP_KEYWORDS] keywords = get_tag_cloud(dict(keywords), 3, 0, 0) for keyword in keywords: name = get_name_from_slug(Keyword, keyword["slug"]) or \ get_name_from_slug(Tag, keyword["slug"]) or \ keyword["slug"] keyword["name"] = name featured_k12 = SearchQuerySet().filter(workflow_state=PUBLISHED_STATE, featured=True, grade_levels__in=(1, 2), microsites=microsite.id).order_by("-featured_on").load_all()[:3] featured_k12 = [r.object for r in featured_k12 if r] featured_highered = SearchQuerySet().filter(workflow_state=PUBLISHED_STATE, featured=True, grade_levels=3, microsites=microsite.id).order_by("-featured_on").load_all()[:3] featured_highered = [r.object for r in featured_highered if r] slides = Slide.objects.filter(microsite=microsite) resource_number = SearchQuerySet().filter(workflow_state=PUBLISHED_STATE, microsites=microsite.id).count() return direct_to_template(request, "materials/microsites/%s.html" % microsite.slug, locals())
def get(self, request): # handle older filter group urls if any( filter(lambda k: k.startswith('fg'), request.GET.keys()) ): return self.handle_old_urls(request) # Perform the search and get back the versions along with a # mapping with the search keys results, search_keys = self.do_search(request) search_q = request.GET.get('q', '').strip() # Search Letter search_letter = request.GET.get('letter', '').strip().upper() if results is not None: pass elif search_letter == 'ALL' or not search_letter: results = SearchQuerySet() search_letter = 'ALL' pass elif search_letter: results = SearchQuerySet().filter(letter__exact=search_letter.lower()).filter(name__startswith=search_letter) pass # generate letter pagination pagination = self.build_pagination(search_letter) # convert query list to regular list results = list( results.order_by('name') ) # check if there are results has_results = len(results) > 0 # get year ranges years_start = SystemVersion.objects.filter(is_current=True).filter(start_year__gt=0).aggregate( min_start_year=Min('start_year'), max_start_year=Max('start_year') ) years_end = SystemVersion.objects.filter(is_current=True).filter(end_year__gt=0).aggregate( min_end_year=Min('end_year'), max_end_year=Max('end_year') ) years = {} years.update(years_start) years.update(years_end) return render(request, self.template_name, { 'activate': 'browse', # NAV-LINKS 'filtergroups': self.build_filter_groups(request.GET), 'has_results': has_results, 'pagination': pagination, 'query': search_q, 'results': results, 'years': years, 'has_search': len(search_keys) != 0, 'search': search_keys, })
def get_data(self): from pombola.hansard.models import Entry defaults = { 'model': Entry, 'title': 'Hansard', } data_query = SearchQuerySet().models(defaults['model']) data_query = data_query.filter( content=AutoQuery(self.query) ) if self.order == 'date': data_query = data_query.order_by('-start_date') if self.order == 'adate': data_query = data_query.order_by('start_date') result = defaults.copy() result['results'] = data_query.highlight() result['results_count'] = result['results'].count() return result
def get_content(self): qs = SearchQuerySet() for key, val in self.filter.iteritems(): if isinstance(val, list): for item in val: qs = qs.filter(**{key: item}) else: qs = qs.filter(**{key: val}) return qs.order_by('order_date')
def get(self, request): frm = ItemSearchForm(request.GET) self.object_list = Item.objects.none() if frm.is_valid(): cdata = frm.clean() sqs = SearchQuerySet().filter(published=True) psqs = sqs # No blank values, please for key in cdata.iterkeys(): if isinstance(cdata[key], basestring): cdata[key] = cdata[key].strip() if cdata['categories']: sqs = sqs.filter(categories__in=[x.strip() for x in cdata['categories'].split(' ')]) if cdata['title']: sqs = sqs.filter(title=cdata['title']) if cdata['artist']: sqs = sqs.filter(artist=cdata['artist']) if cdata['date_from']: sqs = sqs.filter(date_from__gte=cdata['date_from']) if cdata['date_to']: sqs = sqs.filter(date_to__lte=cdata['date_to']) if cdata['origin_city']: sqs = sqs.filter(origin_city=cdata['origin_city']) if cdata['origin_country']: sqs = sqs.filter(origin_country=cdata['origin_country']) if cdata['materials']: sqs = sqs.filter(materials__in=[x.strip() for x in cdata['materials'].split(' ')]) if cdata['video_only']: sqs = sqs.filter(video_only=True) # fulltext search if cdata['q']: sqs = sqs.filter(content=cdata['q']) # No search data entered if psqs == sqs: return redirect(reverse('item_search')) # Assigning a list to self.object_list won't work, it needs a QuerySet. # We're basically loading the items twice :( results = list(sqs.order_by('score')[:1000]) # slicing the array prevents multiple queries to Solr. ids = [x.object.id for x in results] # sqs.values_list('django_id', flat=True) won't work with Haystack. self.object_list = Item.objects.filter(id__in=ids) self.parent_category = None self.current_category = None self.child_categories = None context = self.get_context_data(object_list=self.object_list) return(self.render_to_response(context))
class LiveSimpleSearchQuerySetTestCase(TestCase): fixtures = ['bulk_data.json'] def setUp(self): super(LiveSimpleSearchQuerySetTestCase, self).setUp() # Stow. self.old_debug = settings.DEBUG settings.DEBUG = True self.old_ui = connections['default'].get_unified_index() self.ui = UnifiedIndex() self.smmi = SimpleMockSearchIndex() self.ui.build(indexes=[self.smmi]) connections['default']._index = self.ui self.sample_objs = MockModel.objects.all() self.sqs = SearchQuerySet() def tearDown(self): # Restore. connections['default']._index = self.old_ui settings.DEBUG = self.old_debug super(LiveSimpleSearchQuerySetTestCase, self).tearDown() def test_general_queries(self): # For now, just make sure these don't throw an exception. # They won't work until the simple backend is improved. self.assertTrue(len(self.sqs.auto_query('daniel')) > 0) self.assertTrue(len(self.sqs.filter(text='index')) > 0) self.assertTrue(len(self.sqs.exclude(name='daniel')) > 0) self.assertTrue(len(self.sqs.order_by('-pub_date')) > 0) def test_general_queries_unicode(self): self.assertEqual(len(self.sqs.auto_query(u'Привет')), 0) def test_more_like_this(self): # MLT shouldn't be horribly broken. This used to throw an exception. mm1 = MockModel.objects.get(pk=1) self.assertEqual(len(self.sqs.filter(text=1).more_like_this(mm1)), 0) def test_values_queries(self): sqs = self.sqs.auto_query('daniel') self.assertTrue(len(sqs) > 0) flat_scores = sqs.values_list("score", flat=True) self.assertEqual(flat_scores[0], 0) scores = sqs.values_list("id", "score") self.assertEqual(scores[0], [1, 0]) scores_dict = sqs.values("id", "score") self.assertEqual(scores_dict[0], {"id": 1, "score": 0})
def build_form(self, form_kwargs=None): if form_kwargs is None: form_kwargs = {} if self.searchqueryset is None: order = self.request.GET.get('o') sqs = SearchQuerySet().models(*self.get_models()) if order in self.order_types: sqs = sqs.order_by(order) form_kwargs['searchqueryset'] = sqs return super(SearchViewWithOrdering, self).build_form(form_kwargs)
class LiveSimpleSearchQuerySetTestCase(TestCase): fixtures = ['base_data.json', 'bulk_data.json'] def setUp(self): super(LiveSimpleSearchQuerySetTestCase, self).setUp() # Stow. self.old_ui = connections['simple'].get_unified_index() self.ui = UnifiedIndex() self.smmi = SimpleMockSearchIndex() self.ui.build(indexes=[self.smmi]) connections['simple']._index = self.ui self.sample_objs = MockModel.objects.all() self.sqs = SearchQuerySet(using='simple') def tearDown(self): # Restore. connections['simple']._index = self.old_ui super(LiveSimpleSearchQuerySetTestCase, self).tearDown() def test_general_queries(self): # For now, just make sure these don't throw an exception. # They won't work until the simple backend is improved. self.assertTrue(len(self.sqs.auto_query('daniel')) > 0) self.assertTrue(len(self.sqs.filter(text='index')) > 0) self.assertTrue(len(self.sqs.exclude(name='daniel')) > 0) self.assertTrue(len(self.sqs.order_by('-pub_date')) > 0) def test_general_queries_unicode(self): self.assertEqual(len(self.sqs.auto_query(u'Привет')), 0) def test_more_like_this(self): # MLT shouldn't be horribly broken. This used to throw an exception. mm1 = MockModel.objects.get(pk=1) self.assertEqual(len(self.sqs.filter(text=1).more_like_this(mm1)), 0) def test_values_queries(self): sqs = self.sqs.auto_query('daniel') self.assertTrue(len(sqs) > 0) flat_scores = sqs.values_list("score", flat=True) self.assertEqual(flat_scores[0], 0) scores = sqs.values_list("id", "score") self.assertEqual(scores[0], [1, 0]) scores_dict = sqs.values("id", "score") self.assertEqual(scores_dict[0], {"id": 1, "score": 0})
def get_queryset(self): sqs = SearchQuerySet().all().models(Image) filter = self.request.GET.get('filter') if filter == 'all_ds': sqs = sqs.filter(is_deep_sky=True) elif filter == 'all_ss': sqs = sqs.filter(is_solar_system=True) elif filter == 'sun': sqs = sqs.filter(is_sun=True) elif filter == 'moon': sqs = sqs.filter(is_moon=True) elif filter == 'planets': sqs = sqs.filter(is_planets=True) elif filter == 'comets': sqs = sqs.filter(is_comets=True) elif filter == 'wide': sqs = sqs.filter(subject_type=300) elif filter == 'trails': sqs = sqs.filter(subject_type=400) elif filter == 'northernlights': sqs = sqs.filter(subject_type=450) elif filter == 'gear': sqs = sqs.filter(subject_type=500) elif filter == 'products': sqs = sqs.filter(is_commercial=True) elif filter == 'other': sqs = sqs.filter(subject_type=600) try: sort = self.request.GET.get('sort') sqs = sqs.order_by(self.sorting_map[sort]) except KeyError: sqs = sqs.order_by('-uploaded') return sqs
def search(request): tokens = _search_tokens(request) if len(tokens) is None: return HttpResponseRedirect(reverse('mail.views.index')) sqs = SearchQuerySet().models(Thread) for t in tokens: sqs = sqs.filter_or(text_and_recipients=t) sqs = sqs.order_by('-date') if sqs.count() == 0: return render_to_response('search_empty.html', {'path': request.path}, context_instance=RequestContext(request)) return index(request, search=tokens, threads=sqs)
def search_listing(request, model, template_name='search/search.html'): # Extract the model type from the full path, which should be the plural name # of a valid model type (ex: '/users/') if model not in MODEL_FACETS.keys(): raise Http404 sqs = SearchQuerySet().models(model, ) sqs = sqs.order_by('name') for facet in MODEL_FACETS[model]: sqs = sqs.facet(facet) view = search_view_factory( view_class=FacetedSearchCustomView, template=template_name, searchqueryset=sqs, form_class=FacetedSearchListingForm, ) return view(request)
def __init__(self, haystack_result, obj, user, *args, **kwargs): # collect group's created visible projects sqs = SearchQuerySet().models(SEARCH_MODEL_NAMES_REVERSE['projects']) sqs = sqs.filter_and(id__in=obj.created_groups.all().values_list('id', flat=True)) # the preview for projects and groups is always visible for everyone! #sqs = filter_searchqueryset_for_read_access(sqs, user) sqs = sqs.order_by('title') kwargs.update({ 'projects': [HaystackProjectMapCard(result) for result in sqs], 'action_url_1': _prepend_url(user, obj.portal) + reverse('cosinnus:group-add') + ('?idea=%s&name=%s' % (itemid_from_searchresult(haystack_result), escape(haystack_result.title))), 'creator_name': obj.creator.get_full_name(), 'creator_slug': obj.creator.username, 'followed': obj.is_user_following(user), }) ret = super(DetailedIdeaMapResult, self).__init__(haystack_result, obj, user, *args, **kwargs) return ret
def __init__(self, haystack_result, obj, user, *args, **kwargs): kwargs.update({ 'is_member': check_ug_membership(user, obj.group), 'time_html': haystack_result.humanized_event_time_html, }) # collect visible attending users sqs = SearchQuerySet().models(SEARCH_MODEL_NAMES_REVERSE['people']) sqs = sqs.filter_and(user_id__in=haystack_result.participants) sqs = filter_searchqueryset_for_read_access(sqs, user) sqs = sqs.order_by('title') kwargs.update({ 'participants': [HaystackUserMapCard(result) for result in sqs], 'participant_count': haystack_result.participant_count, 'followed': obj.is_user_following(user), }) return super(DetailedEventResult, self).__init__(haystack_result, obj, user, *args, **kwargs)
def _search_objects(query, filters, facets, model_object, order_by=None): """ Search objects via solR. query: the user search query filters: list containing filters facets: the list of facets model_object: the model the search is refering to If a filter key is repeated, we apply a OR lookup. return a queryset """ log.debug('search_objects', query=query, filter=filters, facets=facets, model_object=model_object, order_by=order_by) # start queryset queryset = SearchQuerySet().models( model_object) # .narrow("namespace:(%s)" % namespace.name) # filter by facets filters for key, values in filters.items(): tmp = None if isinstance(values, (list, tuple)): # apply OR lookup if same key is repeated. for value in values: if tmp is None: tmp = Q(**{key: Exact(value)}) else: tmp &= Q(**{key: Exact(value)}) else: # only one value, no need to loop. tmp = Q(**{key: Exact(values)}) queryset = queryset.filter(tmp) # execute the query if query: queryset = queryset.filter(content=AutoQuery(query)) # get facets if facets: for field in facets: queryset = queryset.facet(field) # apply order_by if any if order_by: queryset = queryset.order_by(order_by) return queryset
def __search__(self, content="", areas="", tags="", **kwargs): start = self.start end = start + self.num_of_records if not content: sqs = SearchQuerySet().models(*self.models).all() else: sqs = SearchQuerySet().models(*self.models).filter(content=content) if areas: sqs = sqs.filter(area__in=areas.split(",")) if tags: tags_list = tags.split(",") Q_list = [Q(tags=tag) for tag in tags_list] sqs = sqs.filter(reduce(operator.or_, Q_list)) if kwargs: sqs = sqs.filter(**kwargs) sqs = sqs.order_by(self.order_by) self.sqs_full = sqs return sqs[start: end]
def list(self): from haystack.query import SearchQuerySet results = SearchQuerySet() for facet in Facet.asset_facets: results = results.facet(facet) types = self.search_criteria.split(';') for t in types: criteria = t.split(',') q = '' for c in criteria: if len(q): q += ' OR ' q += c.strip() results = results.narrow(q) return results.order_by("name")
class LiveSimpleSearchQuerySetTestCase(TestCase): fixtures = ['bulk_data.json'] def setUp(self): super(LiveSimpleSearchQuerySetTestCase, self).setUp() # Stow. self.old_debug = settings.DEBUG settings.DEBUG = True self.old_ui = connections['default'].get_unified_index() self.ui = UnifiedIndex() self.smmi = SimpleMockSearchIndex() self.ui.build(indexes=[self.smmi]) connections['default']._index = self.ui self.sample_objs = MockModel.objects.all() self.sqs = SearchQuerySet() def tearDown(self): # Restore. connections['default']._index = self.old_ui settings.DEBUG = self.old_debug super(LiveSimpleSearchQuerySetTestCase, self).tearDown() def test_general_queries(self): # For now, just make sure these don't throw an exception. # They won't work until the simple backend is improved. self.assertTrue(len(self.sqs.auto_query('daniel')) > 0) self.assertTrue(len(self.sqs.filter(text='index')) > 0) self.assertTrue(len(self.sqs.exclude(name='daniel')) > 0) self.assertTrue(len(self.sqs.order_by('-pub_date')) > 0) def test_values_queries(self): sqs = self.sqs.auto_query('daniel') self.assertTrue(len(sqs) > 0) flat_scores = sqs.values_list("score", flat=True) self.assertEqual(flat_scores[0], 0) scores = sqs.values_list("id", "score") self.assertEqual(scores[0], [1, 0]) scores_dict = sqs.values("id", "score") self.assertEqual(scores_dict[0], {"id": 1, "score": 0})
def my_search(request, template='search/search.html', extra_context=None): data = {} if extra_context is not None: data.update(extra_context) search_cat = request.GET.get('cat', '') sub_cat = request.GET.get('sub_cat', '') min_price = request.GET.get('min_price', '') max_price = request.GET.get('max_price', '') location = request.GET.get('location', '') q = request.GET.get('q', '') sqs = SearchQuerySet().exclude(status='D') print "search_cat: " + search_cat print "sub_cat: " + sub_cat print "min_price: " + min_price print "max_price: " + max_price print "location: " + location print "q: " + q if q: print "if q" sqs = sqs.filter(content=AutoQuery(request.GET['q'])) if search_cat: print "if search_cat" sqs = sqs.filter(category=search_cat) if sub_cat: print "if sub_cat" sqs = sqs.filter(subcategory=sub_cat) if min_price: print "if min_price" sqs = sqs.filter(price__gte=min_price) if max_price: print "if max_price" sqs = sqs.filter(price__lte=max_price) if location: print "if location" sqs = sqs.filter(location=location) sqs = sqs.order_by('-added') data['results'] = sqs return render_to_response(template, data, context_instance=RequestContext(request))
def get_queryset(self): qs = None if not self.q: qs = Debater.objects if self.q: qs = SearchQuerySet().models(Debater).filter(content=self.q) qs = [q.pk for q in qs.all()] qs = Debater.objects.filter(id__in=qs) qs = qs.order_by('-pk') school = self.forwarded.get('school', None) if school: qs = qs.filter(school__id=school) return qs
def get_autocomplete(self, request, **kwargs): self.method_check(request, allowed=['get']) query = request.GET.get('query', '') short_query = ' '.join([ q[:AUTOCOMPLETE_MAX_CHAR] for q in query.split() if len(q) >= AUTOCOMPLETE_MIN_CHAR ]) sqs = [] if short_query: sqs = SearchQuerySet().models(PublicBody).autocomplete( name_auto=short_query) sqs = sqs.order_by('name') jurisdiction = request.GET.get('jurisdiction', None) if jurisdiction is not None: sqs = sqs.filter(jurisdiction=sqs.query.clean(jurisdiction)) names = [] data = [] if len(sqs): # Sniffing results of different search engine backends # Real search engine vs. simple backend # FIXME: Make this better pb = sqs[0] if pb.jurisdiction is not None: jur_get = lambda pb: pb.jurisdiction elif pb.object is not None: jur_get = lambda pb: pb.object.jurisdiction.name else: jur_get = lambda pb: None sqs = sorted(sqs, key=lambda x: x.name) names = [u"%s (%s)" % (x.name, jur_get(x)) for x in sqs] data = [{ "name": x.name, "jurisdiction": jur_get(x), "id": x.pk, "url": x.url } for x in sqs] response = {"query": query, "suggestions": names, "data": data} return self.create_response(request, response)
def get_results(self, request): if SEARCH_VAR not in request.GET and len(request.GET.keys()) is 0: return super(SearchChangeList, self).get_results(request) filters = self.custom_get_filters(request) # Note that pagination is 0-based, not 1-based. sqs = SearchQuerySet(self.haystack_connection).models(self.model) if request.GET.get(SEARCH_VAR, False): sqs = sqs.auto_query(request.GET[SEARCH_VAR]) if filters: sqs = sqs.filter(**filters) sqs = sqs.load_all() # Set ordering. ordering = self.get_ordering(request, sqs) sqs = sqs.order_by(*ordering) paginator = Paginator(sqs, self.list_per_page) # Get the number of objects, with admin filters applied. result_count = paginator.count full_result_count = SearchQuerySet(self.haystack_connection).models(self.model).all().count() can_show_all = result_count <= list_max_show_all(self) multi_page = result_count > self.list_per_page # Get the list of objects to display on this page. try: result_list = paginator.page(self.page_num + 1).object_list # Grab just the Django models, since that's what everything else is # expecting. result_list = [result.object for result in result_list if result] except InvalidPage: result_list = () self.result_count = result_count self.full_result_count = full_result_count self.result_list = result_list self.can_show_all = can_show_all self.multi_page = multi_page self.paginator = paginator
def get_ultimas_publicacoes(self): sqs = SearchQuerySet().all() sqs = sqs.filter( Q(at=0) | Q(at__in=AreaTrabalho.objects.areatrabalho_publica().values_list( 'id', flat=True))) sqs = sqs.models( *haystack_get_models('protocoloadm.documentoadministrativo')) sqs = sqs.order_by('-data', '-last_update')[:100] r = [] for sr in sqs: if sr.object and sr.object._certidao: if sr.object._certidao.exists(): r.append(sr) if len(r) == 20: break return r
def search_listing(request, model, template_name='search/search.html'): # Extract the model type from the full path, which should be the plural name # of a valid model type (ex: '/users/') if model not in MODEL_FACETS.keys(): raise Http404 sqs = SearchQuerySet().models(model, ) # import pdb; pdb.set_trace() field = request.GET.get("sort", "created") asc = "-" if not request.GET.get("dir") == "asc" else "" ordering = "{direction}{field}".format(field=field, direction=asc) sqs = sqs.order_by(ordering) for facet in MODEL_FACETS[model]: sqs = sqs.facet(facet) view = search_view_factory( view_class=FacetedSearchCustomView, template=template_name, searchqueryset=sqs, form_class=FacetedSearchListingForm, ) return view(request)
def glossary_page(request): """ Display the entire Glossary page if there is no user query and allows users to search for terms The view will fetch the result from the search engine and display the results. Uses :class:`~voyages.apps.help.models.Glossary` """ query = "" lang = request.LANGUAGE_CODE field = 'glossary_term_lang_' + lang results = SearchQuerySet().models(Glossary) if request.method == 'POST': form = HighlightedSearchForm(request.POST) if form.is_valid(): # Perform the query query = form.cleaned_data['q'] results = results.filter(content=query) else: form = HighlightedSearchForm() else: form = HighlightedSearchForm() results = results.order_by(field) letters, letters_found, glossary_content = _sort_glossary(results, lang) try: glossary_content = sorted(glossary_content, key=lambda k: k['letter']) except: pass return render( request, 'help/page_glossary.html', { 'glossary': glossary_content, 'letters': letters, 'form': form, 'letters_found': letters_found, 'results': results, 'query': query })
def get_section_data(self, section): defaults = self.search_sections[section] extra_filter = defaults.get('filter', {}) filter_args = extra_filter.get('args', []) filter_kwargs = extra_filter.get('kwargs', {}) extra_exclude = defaults.get('exclude', {}) query = SearchQuerySet().models(defaults['model']) if extra_exclude: query = query.exclude(**extra_exclude) query = query.filter(content=AutoQuery(self.query), *filter_args, **filter_kwargs) if self.order == 'date': query = query.order_by('-start_date') result = defaults.copy() result['results'] = query.highlight() result['results_count'] = result['results'].count() result['section'] = section result['section_dashes'] = section.replace('_', '-') return result
def get_global_context(self, context): # Find all the models to search over... models = set( self.search_sections[section]['model'] for section in self.search_sections ) show_top_hits = (self.page == '1' or not self.page) top_hits_ids = [] if show_top_hits: context['top_hits'] = [] for section, max_for_top_hits in SearchBaseView.top_hits_under.items(): data = self.get_section_data(section) if data['results_count'] <= max_for_top_hits: context['top_hits'] += data['results'] top_hits_ids = set(r.id for r in context['top_hits']) sqs = SearchQuerySet().models(*list(models)) # Exclude anything that will already have been shown in the top hits: for top_hit_id in top_hits_ids: sqs = sqs.exclude(id=top_hit_id) sqs = sqs. \ exclude(hidden=True). \ filter(content=self.generate_fuzzy_query_object(self.query)). \ highlight() if self.start_date_range: sqs = sqs.filter(start_date__gte=self.start_date_range) if self.end_date_range: sqs = sqs.filter(start_date__lte=self.end_date_range) if self.order == 'date': sqs = sqs.order_by('-start_date') context['paginator'] = Paginator(sqs, self.results_per_page) context['page_obj'] = self.get_paginated_results(context['paginator']) return context
def handle(self, *args, **kwargs): for badge in Badge.objects.filter(type='auto'): if not badge.comparison: continue elif badge.comparison == 'biggest': order = u'-{}'.format(Badge.USER_ATTR_OPTS[badge.user_attr]) sqs = SearchQuerySet().filter(type='user') user = sqs.order_by(order)[0] badge.awardees.add(User.objects.get(pk=user.pk)) continue comparison = u'__{}'.format(badge.comparison) if badge.comparison \ is not 'equal' else u'' key = u'{}{}'.format(Badge.USER_ATTR_OPTS[badge.user_attr], comparison) opts = {key: badge.value} sqs = SearchQuerySet().filter(type='user', **opts) for user in sqs: badge.awardees.add(User.objects.get(pk=user.pk))
def __init__(self, *args, **kwargs): # dynamically compute date ranges for faceted search curr_year = datetime.today().year for curr_year in xrange(settings.OM_START_YEAR, curr_year + 1): date_range = self._build_date_range(curr_year) self.DATE_INTERVALS_RANGES[curr_year] = date_range sqs = SearchQuerySet().filter(django_ct='acts.act').\ exclude(act_type='fascicolo').\ facet('act_type').facet('is_key').facet('is_proposal').\ facet('initiative').facet('organ').facet('month') for (year, range) in self.DATE_INTERVALS_RANGES.items(): sqs = sqs.query_facet('pub_date', range['qrange']) sqs = sqs.order_by('-pub_date').highlight() kwargs['searchqueryset'] = sqs # Needed to switch out the default form class. if kwargs.get('form_class') is None: kwargs['form_class'] = RangeFacetedSearchForm super(ActSearchView, self).__init__(*args, **kwargs)
def autocomplete(request): # for datetime format locale.setlocale(locale.LC_ALL, 'de_DE.UTF-8') args = [] models = request.GET.getlist('models') for model in models: args.append(apps.get_model(model)) sqs = SearchQuerySet().models(*set(args)) # only open tickets in autocomplete sqs = sqs.filter(closed=False) if not request.user.is_staff: sqs = sqs.filter(customer=request.organisation.pk) q = request.GET.get('q', '').split(' ') for word in q: word = word.strip() if word: sqs = sqs.filter(content_auto__contains=word) if len(models) == 1 and models[0] == 'web.test': sqs = sqs.order_by('-last_action_date') result = [] if 'suggestions' in request.GET: suggestion = sqs.spelling_suggestion() if suggestion: result.append({'caption': suggestion}) else: for ele in sqs: data = {'caption': str(ele.caption), 'id': ele.pk} if hasattr(ele, 'closed') and ele.closed: data['closed'] = ele.closed result.append(data) return JsonResponse(result, safe=False)
def recommendation(self, request): """Retrieve recommendation from shows in history :param request: :return: """ data = request.data queryset = SearchQuerySet().models(HistorylistShow) if "friends" in data: queryset = queryset.filter(user_slug__in=data["friends"]) if "year" in data: queryset = queryset.filter(movie_year__gte=data["year"][0], movie_year__lte=data["year"][1]) if "recent" in data and data["recent"]: queryset = queryset.order_by('-movie_year') serializer = ShowRecommendationSerializer(queryset, many=True) return Response(serializer.data)
def search(request): # TODO: move to form if 'action' in request.GET: action = request.GET['action'] #FIXME: show_user for anonymous raise exception, #django bug http://code.djangoproject.com/changeset/14087 :| groups = request.user.groups.all() or [ ] #removed after django > 1.2.3 release topics = Topic.objects.filter(deleted=False).filter( Q(forum__category__groups__in=groups) | \ Q(forum__category__groups__isnull=True)) if action == 'show_24h': date = datetime.today() - timedelta(1) topics = topics.filter(created__gte=date) elif action == 'show_new': try: last_read = PostTracking.objects.get( user=request.user).last_read except PostTracking.DoesNotExist: last_read = None if last_read: topics = topics.filter(last_post__updated__gte=last_read).all() else: #searching more than forum_settings.SEARCH_PAGE_SIZE in this way - not good idea :] topics = [ topic for topic in topics[:forum_settings.SEARCH_PAGE_SIZE] if forum_extras.has_unreads(topic, request.user) ] elif action == 'show_unanswered': topics = topics.filter(post_count=1) elif action == 'show_subscriptions': topics = topics.filter(subscribers__id=request.user.id) elif action == 'show_user': user_id = request.GET['user_id'] posts = Post.objects.filter(deleted=False, user__id=user_id) topics = [post.topic for post in posts if post.topic in topics] elif action == 'search': keywords = request.GET.get('keywords') author = request.GET.get('author') forum = request.GET.get('forum') search_in = request.GET.get('search_in') sort_by = request.GET.get('sort_by') sort_dir = request.GET.get('sort_dir') if not (keywords or author): return HttpResponseRedirect(reverse('djangobb:search')) query = SearchQuerySet().models(Post).filter(deleted=0) if author: query = query.filter(author__username=author) if forum != u'0': query = query.filter(forum__id=forum) if keywords: if search_in == 'all': query = query.filter( SQ(topic=keywords) | SQ(text=keywords)) elif search_in == 'message': query = query.filter(text=keywords) elif search_in == 'topic': query = query.filter(topic=keywords) # add exlusions for categories user does not have access too for category in Category.objects.all(): if not category.has_access(request.user): query = query.exclude(category=category) order = { '0': 'created', '1': 'author', '2': 'topic', '3': 'forum' }.get(sort_by, 'created') if sort_dir == 'DESC': order = '-' + order posts = query.order_by(order) if 'topics' in request.GET['show_as']: return render(request, 'djangobb_forum/search_topics.html', {'results': TopicFromPostResult(posts)}) elif 'posts' in request.GET['show_as']: return render(request, 'djangobb_forum/search_posts.html', {'results': posts}) return render(request, 'djangobb_forum/search_topics.html', {'results': topics}) else: form = PostSearchForm() return render(request, 'djangobb_forum/search_form.html', { 'categories': Category.objects.all(), 'form': form, })
def search(self): """This search method starts from a new query of all documents in the index instead of getting the existing SearchQuerySet from the super class. This is mainly to clear the default query of the index for the value of q. HOWEVER, this requires redoing any actions normally taken before the SearchForm is called, such as faceting the SearchQuerySet.""" # faceting must be done here manually b/c we are creating a new SearchQuerySet sqs = SearchQuerySet().facet('facet_prop_19').facet('facet_prop_12').facet('facet_prop_59') if not self.is_valid(): return self.no_query_found() prop_list = [self.cleaned_data['property'], self.cleaned_data['property2'], self.cleaned_data['property3']] type_list = [self.cleaned_data['search_type'], self.cleaned_data['search_type2'], self.cleaned_data['search_type3']] query_list = [self.cleaned_data['q'], self.cleaned_data['q2'], self.cleaned_data['q3']] op_list = [self.cleaned_data['op'], self.cleaned_data['op2']] # SELECTED FIELDS SEARCH if self.cleaned_data['object_type']: value_tree = self.cleaned_data['object_type'].get_descendants(include_self=True) tsq = SQ() for index, node in enumerate(value_tree): if index == 0: tsq = SQ(facet_prop_19 = node.id) else: tsq = tsq | SQ(facet_prop_19 = node.id) sqs = sqs.filter(tsq) if self.cleaned_data['material']: value_tree = self.cleaned_data['material'].get_descendants(include_self=True) tsq = SQ() for index, node in enumerate(value_tree): if index == 0: tsq = SQ(facet_prop_12 = node.id) else: tsq = tsq | SQ(facet_prop_12 = node.id) sqs = sqs.filter(tsq) if self.cleaned_data['museum']: sqs = sqs.filter(facet_prop_59 = self.cleaned_data['museum'].id) if self.cleaned_data['keyword']: pg_fix = re.sub(r'(\s*)([pPlL][gG]?)(\s*?[\./]?\s*)(\d+)', r'\1\2* *\4*', self.cleaned_data['keyword']) sqs = sqs.filter(content = pg_fix) if self.cleaned_data['museum_num']: mus_sq = SQ() mus_nums = ['31', '32', '33', '34', '35', '36', '38', '40', '41', '42', '43', '44', '45', '73', '128'] for index, num in enumerate(mus_nums): kwargs = {str('prop_%s' % (num)) : self.cleaned_data['museum_num']} if index == 0: mus_sq = SQ(**kwargs) else: mus_sq = mus_sq | SQ(**kwargs) sqs = sqs.filter(mus_sq) if self.cleaned_data['unum']: sqs = sqs.filter(prop_23 = self.cleaned_data['unum']) # ADVANCED SEARCH # query object for building full advanced query sq = SQ() modified = False for j in range(0, len(prop_list)): prop = 'content' type = type_list[j] query = query_list[j] operator = '' negate = False kwargs = {} # check for operator if j > 0: operator = op_list[j - 1] # check for not if type.startswith('!'): negate = True type = type[1:] # if this row of query builder is blank, skip if (query == '') and (type != 'blank'): continue else: modified = True # check if a property was selected if prop_list[j] != None: if prop_list[j].facet: prop = 'facet_prop_'+ str(prop_list[j].id) else: prop = 'prop_'+ str(prop_list[j].id) # check if search type was selected if type == '': type = 'contains' # determine the type of search # CONTAINS -> special case misspellings if type == 'contains': query_text = '(' # special misspellings if prop == 'prop_23': #if doing a contains search for u number, get first instance of numbers followed by a 0 or 1 letter match = re.search(r'(\d+[a-zA-Z]?)', query) if match: query = match.group(0) query_text += (' ' + query + '? OR ') else: query = re.sub(r'(\s*)([uU]\s*?\.?\s*)(\d+)([a-zA-Z]*)', r'\1u* *\3*', query) query = re.sub(r'(\s*)([pP][gG]\s*?[\./]?\s*)(\w+)', r'\1pg* *\3*', query) query_text += '(' + query + '))' kwargs = {str('%s' % prop) : Raw(query_text)} # LIKE -> 'a*b' or 'a?b' elif type == 'like': keywords = query.split() if keywords: query_text = '(' for i, word in enumerate(keywords): if i > 0: query_text += ' AND ' query_text += word query_text += ')' kwargs = {str('%s' % prop) : Raw(query_text)} # BLANK -> returns all subjects that don't have a value for given property elif type == 'blank': #if property is Any, then return all b/c query asks for doc with 'any' blank properties if self.cleaned_data['property'] == None: continue # BLANK is a special case negation (essentially a double negative), so handle differently if negate: kwargs = {str('%s' % prop) : Raw('[1 TO *]')} negate = False else: kwargs = {str('-%s' % prop) : Raw('[* TO *]')} # ENDSWITH -> '*abc' elif type == 'endswith': keywords = query.split() if keywords: query_text = '(' for i, word in enumerate(keywords): if i > 0: query_text += ' AND ' query_text += ('*' + word) query_text += ')' kwargs = {str('%s' % prop) : Raw(query_text)} else: kwargs = {str('%s__%s' % (prop, type)) : str('%s' % query)} if operator == 'or': if negate: sq = sq | ~SQ(**kwargs) else: sq = sq | SQ(**kwargs) elif operator == 'and': if negate: sq = sq & ~SQ(**kwargs) else: sq = sq & SQ(**kwargs) else: if negate: sq = ~SQ(**kwargs) else: sq = SQ(**kwargs) if modified: sqs = sqs.filter(sq) if self.cleaned_data['order']: prop_order = self.cleaned_data['order'].display_field[5:] return sqs.order_by(prop_order) else: return sqs.order_by('-django_ct')
def get_context(self, request, *args, **kwargs): facet_map = ( ('building_block', (ActivityBuildingBlock, False, 10)), ('school_subject', (ActivitySchoolSubject, False, 25)), ('topic', (ActivityTopic, True, 25)), ('grade_level', (ActivityGradeLevel, False, 10)), ('age_range', (ActivityAgeRange, False, 10)), ('student_characteristics', (ActivityStudentCharacteristics, False, 10)), # noqa: E501 ('activity_type', (ActivityType, False, 10)), ('teaching_strategy', (ActivityTeachingStrategy, False, 25)), ('blooms_taxonomy_level', (ActivityBloomsTaxonomyLevel, False, 25)), # noqa: E501 ('activity_duration', (ActivityDuration, False, 10)), ('jump_start_coalition', (ActivityJumpStartCoalition, False, 25)), ('council_for_economic_education', (ActivityCouncilForEconEd, False, 25)), # noqa: E501 ) search_query = request.GET.get('q', '') # haystack cleans this string sqs = SearchQuerySet().models(ActivityPage).filter(live=True) total_activities = sqs.count() # Load selected facets selected_facets = {} facet_queries = {} for facet, facet_config in facet_map: sqs = sqs.facet(str(facet), size=facet_config[2]) if facet in request.GET and request.GET.get(facet): selected_facets[facet] = [ int(value) for value in request.GET.getlist(facet) if value.isdigit() ] facet_queries[facet] = facet + '_exact:' + ( " OR " + facet + "_exact:").join( [str(value) for value in selected_facets[facet]]) payload = { 'search_query': search_query, 'results': [], 'total_results': 0, 'total_activities': total_activities, 'selected_facets': selected_facets, 'facet_queries': facet_queries, 'all_facets': {}, } # Apply search query if it exists, but don't apply facets if search_query: sqs = sqs.filter(content=search_query).order_by( '-_score', '-date') # noqa: E501 else: sqs = sqs.order_by('-date') # Get all facets and their counts facet_counts = sqs.facet_counts() all_facets = self.get_all_facets(facet_map, sqs, facet_counts, facet_queries, selected_facets) # noqa: E501 # List all facet blocks that need to be expanded always_expanded = {'building_block', 'topic', 'school_subject'} conditionally_expanded = { facet_name for facet_name, facet_items in all_facets.items() if any(facet['selected'] is True for facet in facet_items) } expanded_facets = always_expanded.union(set(conditionally_expanded)) payload.update({ 'facet_counts': facet_counts, 'all_facets': all_facets, 'expanded_facets': expanded_facets, }) # Apply all the active facet values to our search results for facet_narrow_query in facet_queries.values(): sqs = sqs.narrow(facet_narrow_query) results = [activity.object for activity in sqs] total_results = sqs.count() payload.update({ 'results': results, 'total_results': total_results, }) self.results = payload results_per_page = validate_results_per_page(request) paginator = Paginator(payload['results'], results_per_page) current_page = validate_page_number(request, paginator) paginated_page = paginator.page(current_page) context = super(ActivityIndexPage, self).get_context(request) context.update({ 'facet_counts': facet_counts, 'facets': all_facets, 'activities': paginated_page, 'total_results': total_results, 'results_per_page': results_per_page, 'current_page': current_page, 'paginator': paginator, 'show_filters': bool(facet_queries), }) return context
def search_results(request): query_str = request.GET.get('q', '').strip() lender_id = False for regex in LENDER_REGEXES: match = regex.match(query_str) if match: lender_id = match.group('agency') + match.group('respondent') query = SearchQuerySet().models(Institution).load_all() current_sort = request.GET.get('sort') if current_sort in ('assets', '-assets', 'num_loans', '-num_loans'): query = query.order_by(current_sort) else: current_sort = 'score' if lender_id: query = query.filter(lender_id=Exact(lender_id)) elif query_str and request.GET.get('auto'): query = query.filter(text_auto=AutoQuery(query_str)) elif query_str: query = query.filter(content=AutoQuery(query_str)) else: query = [] # number of results per page try: num_results = int(request.GET.get('num_results', '25')) except ValueError: num_results = 25 # page number try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 # start and end results if page > 1: start_results = num_results * page - num_results end_results = num_results * page else: start_results = 0 end_results = num_results sort = request.GET.get('sort', 'relevance') total_results = len(query) # total number of pages if total_results <= num_results: total_pages = 1 else: total_pages = int(math.ceil(float(total_results) / float(num_results))) query = query[start_results:end_results] # next page if total_results < num_results or page is total_pages: next_page = 0 end_results = total_results else: next_page = page + 1 # previous page prev_page = page - 1 results = [] for result in query: result.object.num_loans = result.num_loans results.append(result.object) if request.accepted_renderer.format != 'html': results = InstitutionSerializer(results, many=True).data # to adjust for template start_results = start_results + 1 return Response( { 'institutions': results, 'query_str': query_str, 'num_results': num_results, 'start_results': start_results, 'end_results': end_results, 'sort': sort, 'page_num': page, 'total_results': total_results, 'next_page': next_page, 'prev_page': prev_page, 'total_pages': total_pages, 'current_sort': current_sort }, template_name='respondents/search_results.html')
class SearchQuerySetTestCase(TestCase): fixtures = ['bulk_data.json'] def setUp(self): super(SearchQuerySetTestCase, self).setUp() # Stow. self.old_unified_index = connections['default']._index self.ui = UnifiedIndex() self.bmmsi = BasicMockModelSearchIndex() self.cpkmmsi = CharPKMockModelSearchIndex() self.ui.build(indexes=[self.bmmsi, self.cpkmmsi]) connections['default']._index = self.ui # Update the "index". backend = connections['default'].get_backend() backend.clear() backend.update(self.bmmsi, MockModel.objects.all()) self.msqs = SearchQuerySet() # Stow. reset_search_queries() def tearDown(self): # Restore. connections['default']._index = self.old_unified_index super(SearchQuerySetTestCase, self).tearDown() def test_len(self): self.assertEqual(len(self.msqs), 23) def test_repr(self): reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) self.assertRegexpMatches( repr(self.msqs), r'^<SearchQuerySet: query=<test_haystack.mocks.MockSearchQuery object' r' at 0x[0-9A-Fa-f]+>, using=None>$') def test_iter(self): reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) msqs = self.msqs.all() results = [int(res.pk) for res in msqs] self.assertEqual(results, [res.pk for res in MOCK_SEARCH_RESULTS[:23]]) self.assertEqual(len(connections['default'].queries), 3) def test_slice(self): reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) results = self.msqs.all() self.assertEqual([int(res.pk) for res in results[1:11]], [res.pk for res in MOCK_SEARCH_RESULTS[1:11]]) self.assertEqual(len(connections['default'].queries), 1) reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) results = self.msqs.all() self.assertEqual(int(results[22].pk), MOCK_SEARCH_RESULTS[22].pk) self.assertEqual(len(connections['default'].queries), 1) def test_manual_iter(self): results = self.msqs.all() reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) check = [result.pk for result in results._manual_iter()] self.assertEqual(check, [ u'1', u'2', u'3', u'4', u'5', u'6', u'7', u'8', u'9', u'10', u'11', u'12', u'13', u'14', u'15', u'16', u'17', u'18', u'19', u'20', u'21', u'22', u'23' ]) self.assertEqual(len(connections['default'].queries), 3) reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) # Test to ensure we properly fill the cache, even if we get fewer # results back (not a handled model) than the hit count indicates. # This will hang indefinitely if broken. old_ui = self.ui self.ui.build(indexes=[self.cpkmmsi]) connections['default']._index = self.ui self.cpkmmsi.update() results = self.msqs.all() loaded = [result.pk for result in results._manual_iter()] self.assertEqual(loaded, [u'sometext', u'1234']) self.assertEqual(len(connections['default'].queries), 1) connections['default']._index = old_ui def test_fill_cache(self): reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) results = self.msqs.all() self.assertEqual(len(results._result_cache), 0) self.assertEqual(len(connections['default'].queries), 0) results._fill_cache(0, 10) self.assertEqual( len([ result for result in results._result_cache if result is not None ]), 10) self.assertEqual(len(connections['default'].queries), 1) results._fill_cache(10, 20) self.assertEqual( len([ result for result in results._result_cache if result is not None ]), 20) self.assertEqual(len(connections['default'].queries), 2) reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) # Test to ensure we properly fill the cache, even if we get fewer # results back (not a handled model) than the hit count indicates. sqs = SearchQuerySet().all() sqs.query.backend = MixedMockSearchBackend('default') results = sqs self.assertEqual( len([ result for result in results._result_cache if result is not None ]), 0) self.assertEqual([ int(result.pk) for result in results._result_cache if result is not None ], []) self.assertEqual(len(connections['default'].queries), 0) results._fill_cache(0, 10) self.assertEqual( len([ result for result in results._result_cache if result is not None ]), 9) self.assertEqual([ int(result.pk) for result in results._result_cache if result is not None ], [1, 2, 3, 4, 5, 6, 7, 8, 10]) self.assertEqual(len(connections['default'].queries), 2) results._fill_cache(10, 20) self.assertEqual( len([ result for result in results._result_cache if result is not None ]), 17) self.assertEqual([ int(result.pk) for result in results._result_cache if result is not None ], [1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 15, 16, 17, 18, 19, 20]) self.assertEqual(len(connections['default'].queries), 4) results._fill_cache(20, 30) self.assertEqual( len([ result for result in results._result_cache if result is not None ]), 20) self.assertEqual([ int(result.pk) for result in results._result_cache if result is not None ], [ 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 15, 16, 17, 18, 19, 20, 21, 22, 23 ]) self.assertEqual(len(connections['default'].queries), 6) def test_cache_is_full(self): reset_search_queries() self.assertEqual(len(connections['default'].queries), 0) self.assertEqual(self.msqs._cache_is_full(), False) results = self.msqs.all() fire_the_iterator_and_fill_cache = [result for result in results] self.assertEqual(results._cache_is_full(), True) self.assertEqual(len(connections['default'].queries), 3) def test_all(self): sqs = self.msqs.all() self.assertTrue(isinstance(sqs, SearchQuerySet)) def test_filter(self): sqs = self.msqs.filter(content='foo') self.assertTrue(isinstance(sqs, SearchQuerySet)) self.assertEqual(len(sqs.query.query_filter), 1) def test_exclude(self): sqs = self.msqs.exclude(content='foo') self.assertTrue(isinstance(sqs, SearchQuerySet)) self.assertEqual(len(sqs.query.query_filter), 1) def test_order_by(self): sqs = self.msqs.order_by('foo') self.assertTrue(isinstance(sqs, SearchQuerySet)) self.assertTrue('foo' in sqs.query.order_by) def test_models(self): # Stow. old_unified_index = connections['default']._index ui = UnifiedIndex() bmmsi = BasicMockModelSearchIndex() bammsi = BasicAnotherMockModelSearchIndex() ui.build(indexes=[bmmsi, bammsi]) connections['default']._index = ui msqs = SearchQuerySet() sqs = msqs.all() self.assertTrue(isinstance(sqs, SearchQuerySet)) self.assertEqual(len(sqs.query.models), 0) sqs = msqs.models(MockModel) self.assertTrue(isinstance(sqs, SearchQuerySet)) self.assertEqual(len(sqs.query.models), 1) sqs = msqs.models(MockModel, AnotherMockModel) self.assertTrue(isinstance(sqs, SearchQuerySet)) self.assertEqual(len(sqs.query.models), 2) # This will produce a warning. ui.build(indexes=[bmmsi]) sqs = msqs.models(AnotherMockModel) self.assertTrue(isinstance(sqs, SearchQuerySet)) self.assertEqual(len(sqs.query.models), 1) def test_result_class(self): sqs = self.msqs.all() self.assertTrue(issubclass(sqs.query.result_class, SearchResult)) # Custom class. class IttyBittyResult(object): pass sqs = self.msqs.result_class(IttyBittyResult) self.assertTrue(issubclass(sqs.query.result_class, IttyBittyResult)) # Reset to default. sqs = self.msqs.result_class(None) self.assertTrue(issubclass(sqs.query.result_class, SearchResult)) def test_boost(self): sqs = self.msqs.boost('foo', 10) self.assertTrue(isinstance(sqs, SearchQuerySet)) self.assertEqual(len(sqs.query.boost.keys()), 1) def test_highlight(self): sqs = self.msqs.highlight() self.assertTrue(isinstance(sqs, SearchQuerySet)) self.assertEqual(sqs.query.highlight, True) def test_spelling(self): # Test the case where spelling support is disabled. sqs = self.msqs.filter(content='Indx') self.assertTrue(isinstance(sqs, SearchQuerySet)) self.assertEqual(sqs.spelling_suggestion(), None) self.assertEqual(sqs.spelling_suggestion('indexy'), None) def test_raw_search(self): self.assertEqual(len(self.msqs.raw_search('foo')), 23) self.assertEqual( len( self.msqs.raw_search( '(content__exact:hello AND content__exact:world)')), 23) def test_load_all(self): # Models with character primary keys. sqs = SearchQuerySet() sqs.query.backend = CharPKMockSearchBackend('charpk') results = sqs.load_all().all() self.assertEqual(len(results._result_cache), 0) results._fill_cache(0, 2) self.assertEqual( len([ result for result in results._result_cache if result is not None ]), 2) # If nothing is handled, you get nothing. old_ui = connections['default']._index ui = UnifiedIndex() ui.build(indexes=[]) connections['default']._index = ui sqs = self.msqs.load_all() self.assertTrue(isinstance(sqs, SearchQuerySet)) self.assertEqual(len(sqs), 0) connections['default']._index = old_ui # For full tests, see the solr_backend. def test_load_all_read_queryset(self): # Stow. old_ui = connections['default']._index ui = UnifiedIndex() gafmmsi = GhettoAFifthMockModelSearchIndex() ui.build(indexes=[gafmmsi]) connections['default']._index = ui gafmmsi.update() sqs = SearchQuerySet() results = sqs.load_all().all() results.query.backend = ReadQuerySetMockSearchBackend('default') results._fill_cache(0, 2) # The deleted result isn't returned self.assertEqual( len([ result for result in results._result_cache if result is not None ]), 1) # Register a SearchIndex with a read_queryset that returns deleted items rqstsi = TextReadQuerySetTestSearchIndex() ui.build(indexes=[rqstsi]) rqstsi.update() sqs = SearchQuerySet() results = sqs.load_all().all() results.query.backend = ReadQuerySetMockSearchBackend('default') results._fill_cache(0, 2) # Both the deleted and not deleted items are returned self.assertEqual( len([ result for result in results._result_cache if result is not None ]), 2) # Restore. connections['default']._index = old_ui def test_auto_query(self): sqs = self.msqs.auto_query('test search -stuff') self.assertTrue(isinstance(sqs, SearchQuerySet)) self.assertEqual(repr(sqs.query.query_filter), '<SQ: AND content__contains=test search -stuff>') sqs = self.msqs.auto_query('test "my thing" search -stuff') self.assertTrue(isinstance(sqs, SearchQuerySet)) self.assertEqual( repr(sqs.query.query_filter), '<SQ: AND content__contains=test "my thing" search -stuff>') sqs = self.msqs.auto_query( 'test "my thing" search \'moar quotes\' -stuff') self.assertTrue(isinstance(sqs, SearchQuerySet)) self.assertEqual( repr(sqs.query.query_filter), '<SQ: AND content__contains=test "my thing" search \'moar quotes\' -stuff>' ) sqs = self.msqs.auto_query( 'test "my thing" search \'moar quotes\' "foo -stuff') self.assertTrue(isinstance(sqs, SearchQuerySet)) self.assertEqual( repr(sqs.query.query_filter), '<SQ: AND content__contains=test "my thing" search \'moar quotes\' "foo -stuff>' ) sqs = self.msqs.auto_query('test - stuff') self.assertTrue(isinstance(sqs, SearchQuerySet)) self.assertEqual(repr(sqs.query.query_filter), "<SQ: AND content__contains=test - stuff>") # Ensure bits in exact matches get escaped properly as well. sqs = self.msqs.auto_query('"pants:rule"') self.assertTrue(isinstance(sqs, SearchQuerySet)) self.assertEqual(repr(sqs.query.query_filter), '<SQ: AND content__contains="pants:rule">') # Now with a different fieldname sqs = self.msqs.auto_query('test search -stuff', fieldname='title') self.assertTrue(isinstance(sqs, SearchQuerySet)) self.assertEqual(repr(sqs.query.query_filter), "<SQ: AND title__contains=test search -stuff>") sqs = self.msqs.auto_query('test "my thing" search -stuff', fieldname='title') self.assertTrue(isinstance(sqs, SearchQuerySet)) self.assertEqual( repr(sqs.query.query_filter), '<SQ: AND title__contains=test "my thing" search -stuff>') def test_count(self): self.assertEqual(self.msqs.count(), 23) def test_facet_counts(self): self.assertEqual(self.msqs.facet_counts(), {}) def test_best_match(self): self.assertTrue(isinstance(self.msqs.best_match(), SearchResult)) def test_latest(self): self.assertTrue(isinstance(self.msqs.latest('pub_date'), SearchResult)) def test_more_like_this(self): mock = MockModel() mock.id = 1 self.assertEqual(len(self.msqs.more_like_this(mock)), 23) def test_facets(self): sqs = self.msqs.facet('foo') self.assertTrue(isinstance(sqs, SearchQuerySet)) self.assertEqual(len(sqs.query.facets), 1) sqs2 = self.msqs.facet('foo').facet('bar') self.assertTrue(isinstance(sqs2, SearchQuerySet)) self.assertEqual(len(sqs2.query.facets), 2) def test_date_facets(self): try: sqs = self.msqs.date_facet('foo', start_date=datetime.date(2008, 2, 25), end_date=datetime.date(2009, 2, 25), gap_by='smarblaph') self.fail() except FacetingError as e: self.assertEqual( str(e), "The gap_by ('smarblaph') must be one of the following: year, month, day, hour, minute, second." ) sqs = self.msqs.date_facet('foo', start_date=datetime.date(2008, 2, 25), end_date=datetime.date(2009, 2, 25), gap_by='month') self.assertTrue(isinstance(sqs, SearchQuerySet)) self.assertEqual(len(sqs.query.date_facets), 1) sqs2 = self.msqs.date_facet('foo', start_date=datetime.date(2008, 2, 25), end_date=datetime.date(2009, 2, 25), gap_by='month').date_facet( 'bar', start_date=datetime.date(2007, 2, 25), end_date=datetime.date(2009, 2, 25), gap_by='year') self.assertTrue(isinstance(sqs2, SearchQuerySet)) self.assertEqual(len(sqs2.query.date_facets), 2) def test_query_facets(self): sqs = self.msqs.query_facet('foo', '[bar TO *]') self.assertTrue(isinstance(sqs, SearchQuerySet)) self.assertEqual(len(sqs.query.query_facets), 1) sqs2 = self.msqs.query_facet('foo', '[bar TO *]').query_facet( 'bar', '[100 TO 499]') self.assertTrue(isinstance(sqs2, SearchQuerySet)) self.assertEqual(len(sqs2.query.query_facets), 2) # Test multiple query facets on a single field sqs3 = self.msqs.query_facet('foo', '[bar TO *]').query_facet( 'bar', '[100 TO 499]').query_facet('foo', '[1000 TO 1499]') self.assertTrue(isinstance(sqs3, SearchQuerySet)) self.assertEqual(len(sqs3.query.query_facets), 3) def test_stats(self): sqs = self.msqs.stats_facet('foo', 'bar') self.assertTrue(isinstance(sqs, SearchQuerySet)) self.assertEqual(len(sqs.query.stats), 1) sqs2 = self.msqs.stats_facet('foo', 'bar').stats_facet('foo', 'baz') self.assertTrue(isinstance(sqs2, SearchQuerySet)) self.assertEqual(len(sqs2.query.stats), 1) sqs3 = self.msqs.stats_facet('foo', 'bar').stats_facet('moof', 'baz') self.assertTrue(isinstance(sqs3, SearchQuerySet)) self.assertEqual(len(sqs3.query.stats), 2) def test_narrow(self): sqs = self.msqs.narrow('foo:moof') self.assertTrue(isinstance(sqs, SearchQuerySet)) self.assertEqual(len(sqs.query.narrow_queries), 1) def test_clone(self): results = self.msqs.filter(foo='bar', foo__lt='10') clone = results._clone() self.assertTrue(isinstance(clone, SearchQuerySet)) self.assertEqual(str(clone.query), str(results.query)) self.assertEqual(clone._result_cache, []) self.assertEqual(clone._result_count, None) self.assertEqual(clone._cache_full, False) self.assertEqual(clone._using, results._using) def test_using(self): sqs = SearchQuerySet(using='default') self.assertNotEqual(sqs.query, None) self.assertEqual(sqs.query._using, 'default') def test_chaining(self): sqs = self.msqs.filter(content='foo') self.assertTrue(isinstance(sqs, SearchQuerySet)) self.assertEqual(len(sqs.query.query_filter), 1) # A second instance should inherit none of the changes from above. sqs = self.msqs.filter(content='bar') self.assertTrue(isinstance(sqs, SearchQuerySet)) self.assertEqual(len(sqs.query.query_filter), 1) def test_none(self): sqs = self.msqs.none() self.assertTrue(isinstance(sqs, EmptySearchQuerySet)) self.assertEqual(len(sqs), 0) def test___and__(self): sqs1 = self.msqs.filter(content='foo') sqs2 = self.msqs.filter(content='bar') sqs = sqs1 & sqs2 self.assertTrue(isinstance(sqs, SearchQuerySet)) self.assertEqual(len(sqs.query.query_filter), 2) def test___or__(self): sqs1 = self.msqs.filter(content='foo') sqs2 = self.msqs.filter(content='bar') sqs = sqs1 | sqs2 self.assertTrue(isinstance(sqs, SearchQuerySet)) self.assertEqual(len(sqs.query.query_filter), 2) def test_and_or(self): """ Combining AND queries with OR should give AND(OR(a, b), OR(c, d)) """ sqs1 = self.msqs.filter(content='foo').filter(content='oof') sqs2 = self.msqs.filter(content='bar').filter(content='rab') sqs = sqs1 | sqs2 self.assertEqual(sqs.query.query_filter.connector, 'OR') self.assertEqual(repr(sqs.query.query_filter.children[0]), repr(sqs1.query.query_filter)) self.assertEqual(repr(sqs.query.query_filter.children[1]), repr(sqs2.query.query_filter)) def test_or_and(self): """ Combining OR queries with AND should give OR(AND(a, b), AND(c, d)) """ sqs1 = self.msqs.filter(content='foo').filter_or(content='oof') sqs2 = self.msqs.filter(content='bar').filter_or(content='rab') sqs = sqs1 & sqs2 self.assertEqual(sqs.query.query_filter.connector, 'AND') self.assertEqual(repr(sqs.query.query_filter.children[0]), repr(sqs1.query.query_filter)) self.assertEqual(repr(sqs.query.query_filter.children[1]), repr(sqs2.query.query_filter))
def search(self): # First, store the SearchQuerySet received from other processing. if not self.is_valid(): print("Here") return self.no_query_found() print("Search Valid") if self.cleaned_data["q"] == "": # sqs = SearchQuerySet().exclude(no_such_field="x") # sqs = SearchQuerySet().filter(content="soccer") sqs = SearchQuerySet() print(sqs.count()) # sqs = sqs.filter(content="soccer") print("q = ''") else: print("q = something") sqs = super(grasaSearchForm, self).search() print(sqs.count()) sqs = sqs.order_by("title") selectedActivities = self.cleaned_data["activities"] for activity in activityList: for selectedActivity in selectedActivities: print(selectedActivity) if activity[0] == selectedActivity: sqs = sqs.filter(content=activity[0]) selectedTransportations = self.cleaned_data["transportations"] for transportation in transportationList: for selectedTransportation in selectedTransportations: if transportation[0] == selectedTransportation: print("inside transport filter") print(transportation[0]) print(selectedTransportation) sqs = sqs.filter(content=transportation[0]) selectedGrades = self.cleaned_data["grades"] for grade in gradesList: for selectedGrade in selectedGrades: if grade[0] == selectedGrade: sqs = sqs.filter(content=grade[0]) selectedGenders = self.cleaned_data["genders"] for gender in genderList: for selectedGender in selectedGenders: if gender[0] == selectedGender: sqs = sqs.filter(content=gender[0]) selectedFees = self.cleaned_data["fees"] for selectedFee in selectedFees: # These need to be specially written since they aren't stored as a range in the DB if "Free" == selectedFee: print("Free selected") sqs = sqs.filter(fees=0.00) if "$1-$25" == selectedFee: print("$1-$25 selected") sqs = sqs.filter(fees__gte=0.01) sqs = sqs.filter(fees__lte=25.99) if "$26-$50" == selectedFee: print("$26-$50 selected") print(sqs.count()) sqs = sqs.filter(fees__gte=26.00) sqs = sqs.filter(fees__lte=50.99) print(sqs.count()) if "$51-$75" == selectedFee: print("$51-$75 selected") sqs = sqs.filter(fees__gte=51.00) sqs = sqs.filter(fees__lte=75.99) if "$75+" == selectedFee: sqs = sqs.filter(fees__gte=75.00) selectedTimings = self.cleaned_data["timings"] for timing in timingList: for selectedTiming in selectedTimings: if timing[0] == selectedTiming: sqs = sqs.filter(content=timing[0]) print(sqs.count()) return sqs
def search(request): # TODO: used forms in every search type def _render_search_form(form=None): return render(request, 'djangobb_forum/search_form.html', { 'categories': Category.objects.all(), 'form': form, }) if not 'action' in request.GET: return _render_search_form(form=PostSearchForm()) if request.GET.get("show_as") == "posts": show_as_posts = True template_name = 'djangobb_forum/search_posts.html' else: show_as_posts = False template_name = 'djangobb_forum/search_topics.html' context = {} # Create 'user viewable' pre-filtered topics/posts querysets viewable_category = Category.objects.all() topics = Topic.objects.all().order_by("-last_post__created") posts = Post.objects.all().order_by('-created') user = request.user if not user.is_superuser: user_groups = user.groups.all() or [ ] # need 'or []' for anonymous user otherwise: 'EmptyManager' object is not iterable viewable_category = viewable_category.filter( Q(groups__in=user_groups) | Q(groups__isnull=True)) topics = Topic.objects.filter(forum__category__in=viewable_category) posts = Post.objects.filter( topic__forum__category__in=viewable_category) base_url = None _generic_context = True action = request.GET['action'] if action == 'show_24h': date = datetime.now() - timedelta(days=1) if show_as_posts: context["posts"] = posts.filter( Q(created__gte=date) | Q(updated__gte=date)) else: context["topics"] = topics.filter( Q(last_post__created__gte=date) | Q(last_post__updated__gte=date)) _generic_context = False elif action == 'show_new': if not user.is_authenticated(): raise Http404( "Search 'show_new' not available for anonymous user.") try: last_read = PostTracking.objects.get(user=user).last_read except PostTracking.DoesNotExist: last_read = None if last_read: if show_as_posts: context["posts"] = posts.filter( Q(created__gte=last_read) | Q(updated__gte=last_read)) else: context["topics"] = topics.filter( Q(last_post__created__gte=last_read) | Q(last_post__updated__gte=last_read)) _generic_context = False else: #searching more than forum_settings.SEARCH_PAGE_SIZE in this way - not good idea :] topics = [ topic for topic in topics[:forum_settings.SEARCH_PAGE_SIZE] if forum_extras.has_unreads(topic, user) ] elif action == 'show_unanswered': topics = topics.filter(post_count=1) elif action == 'show_subscriptions': topics = topics.filter(subscribers__id=user.id) elif action == 'show_user': # Show all posts from user or topics started by user if not user.is_authenticated(): raise Http404( "Search 'show_user' not available for anonymous user.") if user.is_staff: user_id = request.GET.get("user_id", user.id) user_id = int(user_id) if user_id != user.id: search_user = User.objects.get(id=user_id) messages.info(request, "Filter by user '%s'." % search_user.username) else: user_id = user.id if show_as_posts: posts = posts.filter(user__id=user_id) else: # show as topic topics = topics.filter(posts__user__id=user_id).order_by( "-last_post__created").distinct() base_url = "?action=show_user&user_id=%s&show_as=" % user_id elif action == 'search': form = PostSearchForm(request.GET) if not form.is_valid(): return _render_search_form(form) keywords = form.cleaned_data['keywords'] author = form.cleaned_data['author'] forum = form.cleaned_data['forum'] search_in = form.cleaned_data['search_in'] sort_by = form.cleaned_data['sort_by'] sort_dir = form.cleaned_data['sort_dir'] query = SearchQuerySet().models(Post) if author: query = query.filter(author__username=author) if forum != u'0': query = query.filter(forum__id=forum) if keywords: if search_in == 'all': query = query.filter(SQ(topic=keywords) | SQ(text=keywords)) elif search_in == 'message': query = query.filter(text=keywords) elif search_in == 'topic': query = query.filter(topic=keywords) order = { '0': 'created', '1': 'author', '2': 'topic', '3': 'forum' }.get(sort_by, 'created') if sort_dir == 'DESC': order = '-' + order posts = query.order_by(order) if not show_as_posts: # TODO: We have here a problem to get a list of topics without double entries. # Maybe we must add a search index over topics? # Info: If whoosh backend used, setup HAYSTACK_ITERATOR_LOAD_PER_QUERY # to a higher number to speed up post_pks = posts.values_list("pk", flat=True) context["topics"] = topics.filter(posts__in=post_pks).distinct() else: # FIXME: How to use the pre-filtered query from above? posts = posts.filter(topic__forum__category__in=viewable_category) context["posts"] = posts get_query_dict = request.GET.copy() get_query_dict.pop("show_as") base_url = "?%s&show_as=" % get_query_dict.urlencode() _generic_context = False if _generic_context: if show_as_posts: context["posts"] = posts.filter( topic__in=topics).order_by('-created') else: context["topics"] = topics if base_url is None: base_url = "?action=%s&show_as=" % action if show_as_posts: context["as_topic_url"] = base_url + "topics" post_count = context["posts"].count() messages.success(request, _("Found %i posts.") % post_count) else: context["as_post_url"] = base_url + "posts" topic_count = context["topics"].count() messages.success(request, _("Found %i topics.") % topic_count) return render(request, template_name, context)