def search(self): # TODO: docs and tests if not self.is_valid(): return self.no_query_found() self.q = self.cleaned_data.get('q') self.user = not self.cleaned_data.get('all') and self.user or None self.order = self.cleaned_data.get('order') self.tags = [tag.strip() for tag in self.cleaned_data.get('tags').split(' ') if tag.strip()] try: self.ss = int(self.cleaned_data.get('ss')) except ValueError: self.ss = None if self.q: query = self.searchqueryset.auto_query(self.q) else: query = self.searchqueryset.all() if self.user: query = query.filter(added_by=self.user) if self.ss is not None: query = query.filter(serving_size=self.ss) sq = SQ() for tag in self.tags: sq.add(SQ(tags=tag), SQ.AND) query = query.filter(sq) if self.load_all: query = query.load_all() return self.order_by(query, self.order).models(Recipe)
def prepare_search_queryset(self): sq = SearchQuerySet() filter_types = self.form.get_filter_types() _sq = SQ() for or_filter in filter_types: _sq.add(SQ(**or_filter), SQ.OR) if not filter_types: return sq return sq.filter(_sq)
def get_queryset(self): """ return the approved lawyers if we have a query string then use that to filter """ logger.info('Using ElasticSearch') sq = SQ() for value in [value for key,value in self.request.GET.items() if key in ['q','location']]: if value: term = Clean(urlparse.unquote(value)) sq.add(SQ(content=term), SQ.AND) sq.add(SQ(practice_locations=term), SQ.OR) return SearchQuerySet().filter(sq).order_by('-fee_packages')
def search(self): sqs = super(ArticleFacetedSearchForm, self).search() if len(self.compendium_types) > 0 or len(self.research_fields) > 0: sq = SQ() for compendium_type in self.compendium_types: sq.add(SQ(compendium_type=compendium_type), SQ.OR) for research_field in self.research_fields: sq.add(SQ(primary_research_field=research_field), SQ.OR) return sqs.filter(sq) # otherwise just pass through return sqs
def get_queryset(self, **kwargs): sq = SQ() if not self.request.GET.items(): query_set = SearchQuerySet().all() else: term = Clean(self.request.GET.get('q')) if term: sq.add(SQ(content=term), SQ.OR) sq.add(SQ(brand_name=term), SQ.OR) query_set = SearchQuerySet().filter(sq) return ProductSerializer([o.object for o in query_set], many=True).data
def search(self): """ A blend of its super methods with only a different base `SearchQuerySet` in case of empty/invalid queries. """ sqs = self.searchqueryset if self.is_valid() and self.cleaned_data.get('q'): # extract special queries special_queries, query = \ _extract_special_queries(self.cleaned_data.get('q')) if query: sqs = sqs.auto_query(query) if (special_queries): # for each special query, get the Django internal resource ids # matching the query and filter the SearchQuerySet accordingly for _sq in special_queries: _res_ids = _process_special_query(_sq) if _res_ids: _sq = SQ() for _id in _res_ids: _sq.add(SQ(django_id=_id), SQ.OR) sqs = sqs.filter(_sq) else: # force empty search result if no ids are returned # for a special query sqs = sqs.none() break if self.load_all: sqs = sqs.load_all() # we need to process each facet to ensure that the field name and the # value are quoted correctly and separately: for facet in [f for f in self.selected_facets if ":" in f]: field, value = facet.split(":", 1) # only add facets which are also in the search index # pylint: disable-msg=E1101 if not field in resourceInfoType_modelIndex.fields: LOGGER.info('Ignoring unknown facet field "%s".', field) continue if value: sqs = sqs.narrow(u'%s:"%s"' % (field, sqs.query.clean(value))) if not is_member(self.request.user, 'ecmembers') and not self.request.user.is_superuser: sqs = sqs.filter_and(publicationStatusFilter__exact='published') return sqs
def search(self): """Search on the ES index the query sting provided by the user.""" search_term = self.cleaned_data['q'] # Calling super will handle with form validation and # will also search in fields that are not explicit queried through `text` sqs = super(PhonebookSearchForm, self).search() if not sqs: return self.no_query_found() # Profiles Search profile = self.request.user.userprofile all_indexed_fields = UserProfileIndex.fields.keys() privacy_indexed_fields = [field for field in all_indexed_fields if field.startswith('privacy_')] query = SQ() q_args = {} # Every profile object in mozillians.org has privacy settings. # Let's take advantage of this and compare the indexed fields # with the ones listed in a profile in order to build the query to ES. for p_field in privacy_indexed_fields: # this is the field that we are going to query q_field = p_field.split('_', 1)[1] if hasattr(profile, q_field): # The user needs to have less or equal permission number with the queried field # (lower number, means greater permission level) q_args = { q_field: search_term, '{0}__gte'.format(p_field): profile.privacy_level } query.add(SQ(**q_args), SQ.OR) # Group Search # We need to exclude non visible groups. query.add(SQ(**{'visible': True}), SQ.OR) sqs = sqs.filter(query) return sqs
def search(sqs, query): or_terms = map(lambda x: x.strip(), query.split("OR")) final_query = SQ() for query in or_terms: if query: current_query = SQ() # Pull out anything wrapped in quotes and do an exact match on it. open_quote_position = None non_exact_query = query for offset, char in enumerate(query): if char == '"': if open_quote_position != None: current_match = non_exact_query[open_quote_position + 1:offset] if current_match: current_query.add(SQ(text__exact=sqs.query.clean(current_match)), SQ.AND) non_exact_query = non_exact_query.replace('"%s"' % current_match, '', 1) open_quote_position = None else: open_quote_position = offset # Pseudo-tokenize the rest of the query. keywords = non_exact_query.split() # Loop through keywords and add filters to the query. for keyword in keywords: exclude = False if keyword.startswith('-') and len(keyword) > 1: keyword = keyword[1:] exclude = True cleaned_keyword = sqs.query.clean(keyword) if exclude: current_query.add(~SQ(text = cleaned_keyword), SQ.AND) else: current_query.add(SQ(text = cleaned_keyword), SQ.AND) final_query.add(current_query, SQ.OR) return sqs.filter(final_query)
def search(request): # TODO: used forms in every search type def _render_search_form(form=None): return render(request, 'djangobb_forum/search_form.html', { 'categories': Category.objects.all(), 'form': form, }) if not 'action' in request.GET: return _render_search_form(form=PostSearchForm()) if request.GET.get("show_as") == "posts": show_as_posts = True template_name = 'djangobb_forum/search_posts.html' else: show_as_posts = False template_name = 'djangobb_forum/search_topics.html' context = {} # Create 'user viewable' pre-filtered topics/posts querysets viewable_category = Category.objects.all() topics = Topic.objects.all().order_by("-last_post__created") posts = Post.objects.all().order_by('-created') user = request.user if not user.is_superuser: user_groups = user.groups.all() or [ ] # need 'or []' for anonymous user otherwise: 'EmptyManager' object is not iterable viewable_category = viewable_category.filter( Q(groups__in=user_groups) | Q(groups__isnull=True)) topics = Topic.objects.filter(forum__category__in=viewable_category) posts = Post.objects.filter( topic__forum__category__in=viewable_category) base_url = None _generic_context = True action = request.GET['action'] if action == 'show_24h': date = datetime.now() - timedelta(days=1) if show_as_posts: context["posts"] = posts.filter( Q(created__gte=date) | Q(updated__gte=date)) else: context["topics"] = topics.filter( Q(last_post__created__gte=date) | Q(last_post__updated__gte=date)) _generic_context = False elif action == 'show_new': if not user.is_authenticated(): raise Http404( "Search 'show_new' not available for anonymous user.") try: last_read = PostTracking.objects.get(user=user).last_read except PostTracking.DoesNotExist: last_read = None if last_read: if show_as_posts: context["posts"] = posts.filter( Q(created__gte=last_read) | Q(updated__gte=last_read)) else: context["topics"] = topics.filter( Q(last_post__created__gte=last_read) | Q(last_post__updated__gte=last_read)) _generic_context = False else: #searching more than forum_settings.SEARCH_PAGE_SIZE in this way - not good idea :] topics = [ topic for topic in topics[:forum_settings.SEARCH_PAGE_SIZE] if forum_extras.has_unreads(topic, user) ] elif action == 'show_unanswered': topics = topics.filter(post_count=1) elif action == 'show_subscriptions': topics = topics.filter(subscribers__id=user.id) elif action == 'show_user': # Show all posts from user or topics started by user if not user.is_authenticated(): raise Http404( "Search 'show_user' not available for anonymous user.") if user.is_staff: user_id = request.GET.get("user_id", user.id) user_id = int(user_id) if user_id != user.id: search_user = User.objects.get(id=user_id) messages.info(request, "Filter by user '%s'." % search_user.username) else: user_id = user.id if show_as_posts: posts = posts.filter(user__id=user_id) else: # show as topic topics = topics.filter(posts__user__id=user_id).order_by( "-last_post__created").distinct() base_url = "?action=show_user&user_id=%s&show_as=" % user_id elif action == 'search': form = PostSearchForm(request.GET) if not form.is_valid(): return _render_search_form(form) keywords = form.cleaned_data['keywords'] author = form.cleaned_data['author'] forum = form.cleaned_data['forum'] search_in = form.cleaned_data['search_in'] sort_by = form.cleaned_data['sort_by'] sort_dir = form.cleaned_data['sort_dir'] query = SearchQuerySet().models(Post) if author: query = query.filter(author__username=author) if forum != u'0': query = query.filter(forum__id=forum) if keywords: if search_in == 'all': query = query.filter(SQ(topic=keywords) | SQ(text=keywords)) elif search_in == 'message': query = query.filter(text=keywords) elif search_in == 'topic': query = query.filter(topic=keywords) order = { '0': 'created', '1': 'author', '2': 'topic', '3': 'forum' }.get(sort_by, 'created') if sort_dir == 'DESC': order = '-' + order posts = query.order_by(order) if not show_as_posts: # TODO: We have here a problem to get a list of topics without double entries. # Maybe we must add a search index over topics? # Info: If whoosh backend used, setup HAYSTACK_ITERATOR_LOAD_PER_QUERY # to a higher number to speed up post_pks = posts.values_list("pk", flat=True) context["topics"] = topics.filter(posts__in=post_pks).distinct() else: # FIXME: How to use the pre-filtered query from above? posts = posts.filter(topic__forum__category__in=viewable_category) context["posts"] = posts get_query_dict = request.GET.copy() get_query_dict.pop("show_as") base_url = "?%s&show_as=" % get_query_dict.urlencode() _generic_context = False if _generic_context: if show_as_posts: context["posts"] = posts.filter( topic__in=topics).order_by('-created') else: context["topics"] = topics if base_url is None: base_url = "?action=%s&show_as=" % action if show_as_posts: context["as_topic_url"] = base_url + "topics" post_count = context["posts"].count() messages.success(request, _("Found %i posts.") % post_count) else: context["as_post_url"] = base_url + "posts" topic_count = context["topics"].count() messages.success(request, _("Found %i topics.") % topic_count) return render(request, template_name, context)
def test_build_query_with_endswith(self): self.sq.add_filter(SQ(content="circular")) self.sq.add_filter(SQ(title__endswith="haystack")) self.assertEqual(self.sq.build_query(), "((circular) AND title:(*haystack))")
def test_get_spelling(self): self.sq.add_filter(SQ(content='Indexy')) self.assertEqual(self.sq.get_spelling_suggestion(), u'index') self.assertEqual(self.sq.get_spelling_suggestion('indexy'), u'index')
def test_build_query_multiple_words_not(self): self.sq.add_filter(~SQ(content="hello")) self.sq.add_filter(~SQ(content="world")) self.assertEqual(self.sq.build_query(), "(NOT ((hello)) AND NOT ((world)))")
def search(request): styles = { "hairstyle": "hair", "nails-design": "nails", "make-up": "make up", } query = styles.get(request.GET.get("q", None), request.GET.get("q", None)) lat = request.GET.get("lat", None) lng = request.GET.get("lng", None) budget = request.GET.get("budget", None) gender = request.GET.get("gender", None) rating = request.GET.get("rating", None) date = request.GET.get("date", None) hour = request.GET.get("hour", None) tags = request.GET.get("tags", None) sorted_by = request.GET.get("sorted_by", None) point = None start_price = 0 end_price = 15000 # point = Point(23.31326937672202, 42.68336526966131) type_of_order = ['listing_id', '-listing_id', '-likes', '-comments'] currencies = dict(CURRENCY) if query: query = query.split(" ") if tags: tags = tags.split(',') query = query + tags sq = SQ() for q in query: sq.add(SQ(tags__contains=q), SQ.OR) sq.add(SQ(title__contains=q), SQ.OR) sq.add(SQ(description__contains=q), SQ.OR) else: sq = SQ() if budget: start_price = int(budget.split('-')[0]) end_price = int(budget.split('-')[1]) if gender: gender = int(gender) if gender == 0: gender = [0] elif gender == 2: gender = [2] else: gender = [0, 1, 2] else: gender = [0, 1, 2] if lat and lng and lat != 'undefined' and lng != 'undefined': print(lat, lng) lat = float(lat) lng = float(lng) point = Point(lat, lng) if date and date.isdigit(): date = int(date) else: date = None if not (hour is None or hour == "-1"): hour = int(hour) else: hour = -1 if rating: rating = int(rating) else: rating = 0 if sorted_by: sorted_by = type_of_order[int(sorted_by)] else: sorted_by = 'listing_id' partial_query = SearchQuerySet().models(Listing).filter(sq) partial_query = [l.price for l in partial_query] if len(partial_query) >= 2: price_list = [min(partial_query), max(partial_query)] else: price_list = [0, 500] price_list = [min(price_list), max(price_list)] if point: _listings = SearchQuerySet().models(Listing).filter(sq).filter( gender__in=gender, price__gte=start_price, price__lte=end_price, status=1, rating__gte=rating).dwithin('location', point, D(km=1500000)) else: _listings = SearchQuerySet().models(Listing).filter(sq).filter( gender__in=gender, price__gte=start_price, price__lte=end_price, status=1, rating__gte=rating) if date and not (hour is not None and hour == -1): ''' hour in seconds is the time in utc seconds from the date to the required time ''' work_days = [("mon_start", "mon_end"), ("tues_start", "tues_end"), ("wed_start", "wed_end"), ("thurs_start", "thurs_end"), ("fri_start", "fri_end"), ("sat_start", "sat_end"), ("sun_start", "sun_end")] # get listings IDs which was already filtered and make in format (1,1,3,3,5) listings_ids = [l.listing_id for l in _listings] listings_ids = str(listings_ids)[1:-1] if listings_ids else 'NULL' # get the index of the day from the week week_days = datetime.datetime.fromtimestamp(date).strftime('%w') week_days = work_days[int(week_days)] # hour in seconds hour_in_seconds = 28800 + hour*1800 # start range is a variable which will be used for the following things: time in seconds from booking start start_range = date + hour_in_seconds query = '''SELECT DISTINCT listing.id, listing.title, listing.likes, listing.price, listing.artist_id, listing.comments, listing.currency, listing.picture_cover, artist.lat, artist.lng, artist.style, artist.avatar, artist_user.first_name as artist_name, salon.id as salon_id, salon.avatar as salon_avatar, salon_user.first_name as salon_name FROM listings_listing AS listing JOIN artists_artist AS artist ON artist.id = listing.artist_id JOIN artists_worktime AS worktime ON worktime.artist_id = artist.id LEFT JOIN booking_booking AS booking ON booking.artist_id = artist.id LEFT JOIN artists_busy AS busy ON busy.artist_id = artist.id LEFT JOIN salons_salon AS salon ON artist.salon_id = salon.id LEFT JOIN auth_user AS salon_user ON salon.user_id = salon_user.id LEFT JOIN auth_user AS artist_user ON artist.user_id = artist_user.id WHERE listing.id IN ({listings_ids}) AND worktime.{first_week_day} <= {hour} AND worktime.{second_week_day} >= ({hour} + listing.duration/1800) AND (booking.start_time >= listing.duration + {start_range} OR booking.end_time <= {start_range} OR booking.start_time IS NULL) AND (busy.start_time >= listing.duration + {start_range} OR busy.end_time <= {start_range} OR busy.start_time IS NULL)'''.format(listings_ids=listings_ids, first_week_day=week_days[0], hour=hour, second_week_day=week_days[1], start_range=start_range) cursor = connection.cursor() cursor.execute(query) _listings = dictfetchall(cursor) listings = [] listing = {} for l in _listings: listing = { "lat": l["lat"], "lng": l["lng"], "id": l["id"], "picture": l["picture_cover"], "style": STYLE_INDEXES[l["style"]][1], "title": l["title"], "likes": l["likes"], "price": l["price"], "currency": currencies[l["currency"]], "comments": l["comments"], "artist_id": l["artist_id"], "artist_name": l["artist_name"], "avatar": MEDIA_ROOT + l["avatar"][7:] if l["avatar"] else '', "salon_id": l["salon_id"], "salon_name": l["salon_name"], "salon_avatar": MEDIA_ROOT + l["salon_avatar"][7:] if l["salon_avatar"] else '' } listings.append(listing) elif date: work_days = [("mon_start", "mon_end"), ("tues_start", "tues_end"), ("wed_start", "wed_end"), ("thurs_start", "thurs_end"), ("fri_start", "fri_end"), ("sat_start", "sat_end"), ("sun_start", "sun_end")] listings_ids = [l.listing_id for l in _listings] listings_ids = str(listings_ids)[1:-1] if listings_ids else 'NULL' week_days = datetime.datetime.fromtimestamp(date).strftime('%w') week_days = work_days[int(week_days)] query = '''SELECT DISTINCT listing.id, listing.title, listing.likes, listing.price, listing.artist_id, listing.comments, listing.currency, listing.picture_cover, artist.lat, artist.lng, artist.style, artist.avatar, artist_user.first_name as artist_name, salon.id as salon_id, salon.avatar as salon_avatar, salon_user.first_name as salon_name FROM listings_listing AS listing JOIN artists_artist AS artist ON artist.id = listing.artist_id JOIN artists_worktime AS worktime ON worktime.artist_id = artist.id LEFT JOIN booking_booking AS booking ON booking.artist_id = artist.id LEFT JOIN artists_busy AS busy ON busy.artist_id = artist.id LEFT JOIN salons_salon AS salon ON artist.salon_id = salon.id LEFT JOIN auth_user AS salon_user ON salon.user_id = salon_user.id LEFT JOIN auth_user AS artist_user ON artist.user_id = artist_user.id WHERE listing.id IN ({listings_ids}) AND NOT worktime.{week_day} = -1'''.format(listings_ids=listings_ids, week_day=week_days[0]) cursor = connection.cursor() cursor.execute(query) _listings = dictfetchall(cursor) listings = [] listing = {} for l in _listings: listing = { "lat": l["lat"], "lng": l["lng"], "id": l["id"], "picture": l["picture_cover"], "style": STYLE_INDEXES[l["style"]][1], "title": l["title"], "likes": l["likes"], "price": l["price"], "currency": currencies[l["currency"]], "comments": l["comments"], "artist_id": l["artist_id"], "artist_name": l["artist_name"], "avatar": MEDIA_ROOT + l["avatar"][7:] if l["avatar"] else '', "salon_id": l["salon_id"], "salon_name": l["salon_name"], "salon_avatar": MEDIA_ROOT + l["salon_avatar"][7:] if l["salon_avatar"] else '' } listings.append(listing) else: listings = [] listing = {} for l in _listings: listing = { "lat": l.location.x, "lng": l.location.y, "id": l.listing_id, "picture": l.get_picture, "style": l.style, "title": l.title, "likes": l.likes, "price": l.price, "currency": currencies[l.currency], "comments": l.comments, "artist_id": l.artist_id, "artist_name": l.artist_name, "avatar": l.artist_avatar, "salon_id": l.salon_id, "salon_name": l.salon_name, "salon_avatar": l.salon_avatar, } listings.append(listing) artists_ids = [l["artist_id"] for l in listings] artists_ratings = Review.objects.filter(artist_id__in=artists_ids).values("artist_id").annotate(average_rating=Avg('rating')) final_map = {} for e in artists_ratings: final_map[e["artist_id"]] = e["average_rating"] for l in listings: rating = final_map.get(l['artist_id'], None) l["artist_rating"] = rating return render(request, 'service/service.html', {"listings": listings, "price_list": price_list})
def test_build_query_wildcard_filter_types(self): self.sq.add_filter(SQ(content="why")) self.sq.add_filter(SQ(title__startswith="haystack")) self.assertEqual(self.sq.build_query(), "((why) AND title:(haystack*))")
def test_in_filter_values_list(self): self.sq.add_filter(SQ(content="why")) self.sq.add_filter(SQ(title__in=[1, 2, 3])) self.assertEqual(self.sq.build_query(), '((why) AND title:("1" OR "2" OR "3"))')
def test_query_generation(self): sqs = self.sqs.filter( SQ(content=AutoQuery("hello world")) | SQ(title=AutoQuery("hello world"))) self.assertEqual(sqs.query.build_query(), u"((hello world) OR title:(hello world))")
def handle(sender, receiver, message): overall_start = time.time() flow_state = cache.get(sender) if flow_state: if message == 'done': cache.delete(sender) return nexmo_send(receiver, sender, help_message()) if flow_state == QUERY_ACCOUNT: if message in ('yes', 'yep'): cache.set(sender, GET_EMAIL, CACHE_TIMEOUT) return nexmo_send( receiver, sender, "Ok, what's the email address on your account?") else: cache.delete(sender) return nexmo_send( receiver, sender, "Ok! Please go to https://www.contactotter.com to create an account." ) user, book = get_user_objects_from_message(sender) if not user or not book: cache.set(sender, QUERY_ACCOUNT, CACHE_TIMEOUT) return nexmo_send( receiver, sender, "Hmm... I can't find an account with this number. Do you have a ContactOtter account?" ) if flow_state: if flow_state.startswith('log'): name = ':'.join(flow_state.split(':')[1:]) contacts = SearchQuerySet().filter(book=book.id).filter( SQ(name=AutoQuery(name)) | SQ(content=AutoQuery(name))) if len(message) == 1 and len(contacts) > 0: index = ascii_lowercase.index(message) contact = contacts[index].object cache.delete(sender) log_contact(contact, user) return nexmo_send( receiver, sender, "Updated {} ({})".format(contact.name, contact.get_complete_url())) cache.delete(sender) return nexmo_send(receiver, sender, "Sorry, I didn't understand that.") if flow_state.startswith('find'): name = ':'.join(flow_state.split(':')[1:]) contacts = SearchQuerySet().filter(book=book.id).filter( SQ(name=AutoQuery(name)) | SQ(content=AutoQuery(name))) if len(message) == 1 and len(contacts) > 0: index = ascii_lowercase.index(message) contact = contacts[index].object cache.delete(sender) return nexmo_send(receiver, sender, get_contact_string(contact)) cache.delete(sender) return nexmo_send(receiver, sender, "Sorry, I didn't understand that.") tokens = message.split(' ') if len(tokens) < 2: return nexmo_send(receiver, sender, help_message()) search_start = time.time() if tokens[0].lower() in MET_PREFIXES: if tokens[1].lower() == 'with': del tokens[1] name = ' '.join(tokens[1:]) contacts = SearchQuerySet().filter(book=book.id).filter( SQ(name=AutoQuery(name)) | SQ(content=AutoQuery(name))) if len(contacts) > 1: cache.set(sender, "log:{}".format(name), CACHE_TIMEOUT) response_string = "Which {} did you mean?\n".format(name) response_string += get_string_from_search_contacts(contacts) response_string += "(DONE to exit)" return nexmo_send(receiver, sender, response_string) if len(contacts) == 1: contact = contacts[0].object else: contact = Contact.objects.create( book=book, name=name, ) cache.delete(sender) log_contact(contact, user) return nexmo_send( receiver, sender, "Updated {} ({})".format(contact.name, contact.get_complete_url())) if tokens[0].lower() == 'find': name = ' '.join(tokens[1:]) contacts = SearchQuerySet().filter(book=book.id).filter( SQ(name=AutoQuery(name)) | SQ(content=AutoQuery(name))) if len(contacts) == 0: return nexmo_send(receiver, sender, "Hmm... I didn't find any contacts.") if len(contacts) == 1: return nexmo_send(receiver, sender, get_contact_string(contacts[0].object)) response_string = get_string_from_search_contacts(contacts) if len(contacts) > 3: response_string += "More: https://{}/search/?q={}".format( Site.objects.get_current().domain, name, ) cache.set(sender, "find:{}".format(name), CACHE_TIMEOUT) return nexmo_send( receiver, sender, "Here's what I found for {}:\n{}".format(name, response_string)) return nexmo_send(receiver, sender, help_message())
def test_correct_exact(self): self.sq.add_filter(SQ(content=Exact("hello world"))) self.assertEqual(self.sq.build_query(), '("hello world")')
def test_get_spelling(self): self.sb.update(self.wmmi, self.sample_objs) self.sq.add_filter(SQ(content='Indexe')) self.assertEqual(self.sq.get_spelling_suggestion(), u'indexed')
def test_build_query_boost(self): self.sq.add_filter(SQ(content="hello")) self.sq.add_boost("world", 5) self.assertEqual(self.sq.build_query(), "(hello) world^5")
def test_build_query_multiple_words_mixed(self): self.sq.add_filter(SQ(content="why")) self.sq.add_filter(SQ(content="hello"), use_or=True) self.sq.add_filter(~SQ(content="world")) self.assertEqual(self.sq.build_query(), "(((why) OR (hello)) AND NOT ((world)))")
def test_build_query_multiple_words_or(self): self.sq.add_filter(~SQ(content="hello")) self.sq.add_filter(SQ(content="hello"), use_or=True) self.assertEqual(self.sq.build_query(), "(NOT ((hello)) OR (hello))")
def test_build_query_in_filter_datetime(self): self.sq.add_filter(SQ(content="why")) self.sq.add_filter(SQ(pub_date__in=[datetime.datetime(2009, 7, 6, 1, 56, 21)])) self.assertEqual( self.sq.build_query(), '((why) AND pub_date:("2009-07-06T01:56:21"))' )
def test_build_query_single_word(self): self.sq.add_filter(SQ(content="hello")) self.assertEqual(self.sq.build_query(), "(hello)")
def test_build_query_in_with_set(self): self.sq.add_filter(SQ(content="why")) self.sq.add_filter(SQ(title__in={"A Famous Paper", "An Infamous Article"})) self.assertTrue("((why) AND title:(" in self.sq.build_query()) self.assertTrue('"A Famous Paper"' in self.sq.build_query()) self.assertTrue('"An Infamous Article"' in self.sq.build_query())
def test_regression_slash_search(self): self.sq.add_filter(SQ(content="hello/")) self.assertEqual(self.sq.build_query(), "(hello\\/)")
def test_build_query_fuzzy_filter_types(self): self.sq.add_filter(SQ(content="why")) self.sq.add_filter(SQ(title__fuzzy="haystack")) self.assertEqual(self.sq.build_query(), "((why) AND title:(haystack~))")
def test_build_query_multiple_words_and(self): self.sq.add_filter(SQ(content="hello")) self.sq.add_filter(SQ(content="world")) self.assertEqual(self.sq.build_query(), "((hello) AND (world))")
def test_narrow_sq(self): sqs = SearchQuerySet(using="elasticsearch").narrow(SQ(foo="moof")) self.assertTrue(isinstance(sqs, SearchQuerySet)) self.assertEqual(len(sqs.query.narrow_queries), 1) self.assertEqual(sqs.query.narrow_queries.pop(), "foo:(moof)")
def test_in_filter_values_list(self): self.sq.add_filter(SQ(content="why")) self.sq.add_filter( SQ(title__in=MockModel.objects.values_list("id", flat=True))) self.assertEqual(self.sq.build_query(), '((why) AND title:("1" OR "2" OR "3"))')
def test_build_query_boolean(self): self.sq.add_filter(SQ(content=True)) self.assertEqual(self.sq.build_query(), "(True)")
def _tokens_to_sq(self, tokens): """ Takes a list of tokens and returns a single SQ instance representing those tokens, or ``None`` if no valid tokens were supplied. """ from localtv.search.utils import _exact_q sq_list = [] for token in tokens: if isinstance(token, basestring): negated = False if token[0] == '-': negated = True token = token[1:] if ':' not in token: sq = SQ(content=token) else: # possibly a special keyword keyword, rest = token.split(':', 1) keyword = keyword.lower() if keyword == 'category': category = self._get_object(Category, rest, 'name', 'slug', 'pk') if category is None: continue sq = _exact_q(self, 'categories', category.pk) elif keyword == 'feed': feed = self._get_object(Feed, rest, 'name', 'pk') if feed is None: continue sq = _exact_q(self, 'feed', feed.pk) elif keyword == 'search': search = self._get_object(SavedSearch, rest, 'query_string', 'pk') if search is None: continue sq = _exact_q(self, 'search', search.pk) elif keyword == 'tag': tag = self._get_object(Tag, rest, 'name') if tag is None: continue sq = _exact_q(self, 'tags', tag.pk) elif keyword == 'user': user = self._get_object(User, rest, 'username', 'pk') if user is None: continue sq = (_exact_q(self, 'user', user.pk) | _exact_q(self, 'authors', user.pk)) elif keyword == 'playlist': playlist = self._get_object(Playlist, rest, 'pk') if playlist is None and '/' in rest: # user/slug user, slug = rest.split('/', 1) try: playlist = Playlist.objects.get( user__username=user, slug=slug) except Playlist.DoesNotExist: pass if playlist is None: continue sq = _exact_q(self, 'playlists', playlist.pk) else: sq = SQ(content=token) if negated: sq = ~sq elif isinstance(token, (list, tuple)): # or block or_sq_list = filter(None, (self._tokens_to_sq([or_token]) for or_token in token)) if not or_sq_list: continue sq = reduce(operator.or_, or_sq_list) else: raise ValueError("Invalid token: {0!r}".format(token)) sq_list.append(sq) if not sq_list: return None return reduce(operator.and_, sq_list)
def test_build_query_datetime(self): self.sq.add_filter(SQ(content=datetime.datetime(2009, 5, 8, 11, 28))) self.assertEqual(self.sq.build_query(), "(2009-05-08T11:28:00)")
def search(self): self.queryset = self.searchqueryset.models(Paper) q = remove_diacritics(self.cleaned_data['q']) if q: self.queryset = self.queryset.auto_query(q) visible = self.cleaned_data['visible'] if visible == '': self.filter(visible=True) elif visible == 'invisible': self.filter(visible=False) self.form_filter('availability', 'availability') self.form_filter('oa_status__in', 'oa_status') self.form_filter('pubdate__gte', 'pub_after') self.form_filter('pubdate__lte', 'pub_before') self.form_filter('doctype__in', 'doctypes') # Filter by authors. # authors field: a comma separated list of full/last names. # Items with no whitespace of prefixed with 'last:' are considered as # last names; others are full names. for name in self.cleaned_data['authors'].split(','): name = name.strip() # If part of this author name matches ORCID identifiers, consider # these as orcid ids and do the filtering orcid_ids = [x for x in name.split(' ') if validate_orcid(x)] for orcid_id in orcid_ids: try: researcher = Researcher.objects.get(orcid=orcid_id) self.filter(researchers=researcher.id) except Researcher.DoesNotExist: pass continue # Rebuild a full name excluding the ORCID id terms name = ' '.join([x for x in name.split(' ') if x not in orcid_ids]) name = remove_diacritics(name.strip()) if name.startswith('last:'): is_lastname = True name = name[5:].strip() else: is_lastname = ' ' not in name if not name: continue if is_lastname: self.filter(authors_last=name) else: reversed_name = ' '.join(reversed(name.split(' '))) sq = SQ() sq.add(SQ(authors_full=Sloppy(name, slop=1)), SQ.OR) sq.add(SQ(authors_full=Sloppy(reversed_name, slop=1)), SQ.OR) self.queryset = self.queryset.filter(sq) self.queryset = aggregate_combined_status(self.queryset) status = self.cleaned_data['status'] if status: self.queryset = self.queryset.post_filter( combined_status__in=status) # Default ordering by decreasing publication date order = self.cleaned_data['sort_by'] or '-pubdate' self.queryset = self.queryset.order_by(order).load_all() return self.queryset
def test_build_query_with_datetime(self): self.sq.add_filter(SQ(pub_date=datetime.datetime(2009, 5, 9, 16, 20))) self.assertEqual(self.sq.build_query(), "pub_date:(20090509162000)")
def search(request): query = request.GET.get("q", None) lat = request.GET.get("lat", None) lng = request.GET.get("lng", None) budget = request.GET.get("budget", None) gender = request.GET.get("gender", None) rating = request.GET.get("rating", None) date = request.GET.get("date", None) hour = request.GET.get("hour", None) tags = request.GET.get("tags", None) sorted_by = request.GET.get("sorted_by", None) start_price = 0 end_price = 15000 point = Point(23.31326937672202, 42.68336526966131) type_of_order = ['listing_id', '-listing_id', '-likes', '-comments'] if query: query = query.split(" ") if tags: tags = tags.split(',') query = query + tags sq = SQ() for q in query: sq.add(SQ(tags__contains=q), SQ.OR) sq.add(SQ(title__contains=q), SQ.OR) sq.add(SQ(description__contains=q), SQ.OR) else: sq = SQ() if budget: start_price = int(budget.split('-')[0]) end_price = int(budget.split('-')[1]) if gender: gender = int(gender) if gender == 0: gender = [0] elif gender == 2: gender = [1] else: gender = [0,1,2] else: gender = [0,1,2] # if lat and lng: # lat = float(lat) # lng = float(lng) # point = Point(lat, lng) if date and date.isdigit(): date = int(date) else: date = None if not (hour is None or hour == "-1"): hour = int(hour) else: hour = -1 if rating: rating = int(rating) else: rating = 0 if sorted_by: sorted_by = type_of_order[int(sorted_by)] else: sorted_by = 'listing_id' partial_query = SearchQuerySet().models(Listing).filter(sq) partial_query = [l.price for l in partial_query] if len(partial_query) >= 2: price_list = [min(partial_query), max(partial_query)] else: price_list = [0, 500] print(price_list) price_list = [min(price_list), max(price_list)] _listings = SearchQuerySet().models(Listing).filter(sq).filter( gender__in=gender, price__gte=start_price, price__lte=end_price, status=1, rating__gte=rating) print("numbers", len(_listings)) # .dwithin('location', point, D(km=1500000)) if date and not (hour is not None and hour == -1): ''' hour in seconds is the time in utc seconds from the date to the required time ''' work_days = [("mon_start", "mon_end"), ("tues_start", "tues_end"), ("wed_start", "wed_end"), ("thurs_start", "thurs_end"), ("fri_start", "fri_end"), ("sat_start", "sat_end"), ("sun_start", "sun_end")] # get listings IDs which was already filtered and make in format (1,1,3,3,5) listings_ids = [l.listing_id for l in _listings] listings_ids = str(listings_ids)[1:-1] # get the index of the day from the week week_days = datetime.datetime.fromtimestamp(date).strftime('%w') week_days = work_days[int(week_days)] # hour in seconds hour_in_seconds = 28800 + hour*1800 # start range is a variable which will be used for the following things: time in seconds from booking start start_range = date + hour_in_seconds query = "SELECT DISTINCT listing.id, listing.title, listing.likes, listing.price, listing.artist_id, listing.comments, listing.picture_cover, artist.lat, artist.lng, artist.style, artist.avatar" query += " FROM listings_listing AS listing" query += " JOIN artists_artist AS artist ON artist.id = listing.artist_id" query += " JOIN artists_worktime AS worktime ON worktime.artist_id = artist.id" query += " LEFT JOIN booking_booking AS booking ON booking.artist_id = artist.id" query += " LEFT JOIN artists_busy AS busy ON busy.artist_id = artist.id" query += " WHERE listing.id IN ({0})".format(listings_ids) query += " AND worktime.{0} <= {1} AND worktime.{2} >= ({1} + listing.duration/1800)".format(week_days[0], hour, week_days[1]) query += " AND (booking.start_time >= listing.duration + {0}".format(start_range) query += " OR booking.end_time <= {0} OR booking.start_time IS NULL)".format(start_range) query += " AND (busy.start_time >= listing.duration + {0}".format(start_range) query += " OR busy.end_time <= {0} OR busy.start_time IS NULL)".format(start_range) cursor = connection.cursor() cursor.execute(query) _listings = dictfetchall(cursor) listings = [] listing = {} for l in _listings: listing = {} listing["lat"] = l["lat"] listing["lng"] = l["lng"] listing["id"] = l["id"] listing["picture"] = l["picture_cover"] listing["style"] = STYLE_INDEXES[int(l["style"])-1][1] listing["title"] = l["title"] listing["likes"] = l["likes"] listing["price"] = int(l["price"]) listing["comments"] = l["comments"] listing["artist_id"] = l["artist_id"] listing["avatar"] = MEDIA_ROOT + l["avatar"][7:] listings.append(listing) print("1") elif date: work_days = [("mon_start", "mon_end"), ("tues_start", "tues_end"), ("wed_start", "wed_end"), ("thurs_start", "thurs_end"), ("fri_start", "fri_end"), ("sat_start", "sat_end"), ("sun_start", "sun_end")] listings_ids = [l.listing_id for l in _listings] listings_ids = str(listings_ids)[1:-1] week_days = datetime.datetime.fromtimestamp(date).strftime('%w') week_days = work_days[int(week_days)] query = "SELECT DISTINCT listing.id, listing.title, listing.likes, listing.price, listing.artist_id, listing.comments, listing.picture_cover, artist.lat, artist.lng, artist.style, artist.avatar" query += " FROM listings_listing AS listing" query += " JOIN artists_artist AS artist ON artist.id = listing.artist_id" query += " JOIN artists_worktime AS worktime ON worktime.artist_id = artist.id" query += " LEFT JOIN booking_booking AS booking ON booking.artist_id = artist.id" query += " LEFT JOIN artists_busy AS busy ON busy.artist_id = artist.id" query += " WHERE listing.id IN ({0})".format(listings_ids) query += " AND NOT worktime.{0} = -1".format(week_days[0]) cursor = connection.cursor() cursor.execute(query) _listings = dictfetchall(cursor) listings = [] listing = {} for l in _listings: listing = {} listing["lat"] = l["lat"] listing["lng"] = l["lng"] listing["id"] = l["id"] listing["picture"] = l["picture_cover"] listing["style"] = STYLE_INDEXES[int(l["style"])-1][1] listing["title"] = l["title"] listing["likes"] = l["likes"] listing["price"] = int(l["price"]) listing["comments"] = l["comments"] listing["artist_id"] = l["artist_id"] listing["avatar"] = MEDIA_ROOT + l["avatar"][7:] listings.append(listing) print("2") else: listings = [] listing = {} for l in _listings: listing = {} listing["lat"] = l.location.x listing["lng"] = l.location.y listing["id"] = l.listing_id listing["picture"] = l.get_picture listing["style"] = l.style listing["title"] = l.title listing["likes"] = l.likes listing["price"] = int(l.price) listing["comments"] = l.comments listing["artist_id"] = l.artist_id listing["avatar"] = l.artist_avatar listings.append(listing) print('3') return render(request, 'service/service.html', {"listings": listings, "price_list": price_list})
def filter_annotations(request, sqs): # The current situation makes no sense: authenticated users can see fewer # annotations than anonymous users. Let's repair this by returning an # empty queryset if the current user has not logged in. if not request.user.is_authenticated(): return sqs.none() # category category = request.GET.get('category') if category: sqs = sqs.filter(category__exact=category) # location bbox = request.GET.get('bbox') bottom_left = request.GET.get('bottom_left') top_right = request.GET.get('top_right') north = request.GET.get('north') east = request.GET.get('east') south = request.GET.get('south') west = request.GET.get('west') if bbox: if bbox == 'test': bottom_left = '48.0', '4.0' top_right = '52.0', '10.0' else: # lon_min, lat_min, lon_max, lat_max # west, south, east, north x_min, y_min, x_max, y_max = bbox.split(',') bottom_left = y_min, x_min top_right = y_max, x_max elif bottom_left and top_right: bottom_left = bottom_left.split(',') top_right = top_right.split(',') elif north and east and south and west: bottom_left = south, west top_right = north, east else: bottom_left = None top_right = None if bottom_left and top_right: bottom_left = Point(float(bottom_left[0]), float(bottom_left[1])) top_right = Point(float(top_right[0]), float(top_right[1])) sqs = sqs.within('location', bottom_left, top_right) # As decided during the UAT on 2014-09-09: no more private annotations, # all annotations will be public. Hence, we don't need filtering. # Well, not entirely: public for authenticated users only. ### user ##username = request.user.username ### allow username overriding in DEBUG mode ### this is a possible security leak ##username_override = request.GET.get('username_override') ##if settings.DEBUG and username_override: ## username = username_override ##sqs = sqs.filter( ## # either private and linked to the current user ## SQ(username__exact=username, visibility=Visibility.PRIVATE) | ## # or public ## SQ(visibility=Visibility.PUBLIC) ##) # relation to model instances the_model_name = request.GET.get('model_name') the_model_pk = request.GET.get('model_pk') if the_model_name and the_model_pk: sqs = sqs.filter( the_model_name__exact=the_model_name, the_model_pk__exact=the_model_pk) else: # allow multiple models and pks model_names_pks = request.GET.get('model_names_pks') if model_names_pks: model_names_pks = model_names_pks.split(';') sq = SQ() for model_name_pk in model_names_pks: model_name, model_pk = model_name_pk.split(',') sq.add( SQ(the_model_name__exact=model_name, the_model_pk__exact=model_pk), SQ.OR) sqs = sqs.filter(sq) # date range datetime_from = request.GET.get('datetime_from') if datetime_from: datetime_from = dateutil.parser.parse(datetime_from) sqs = sqs.filter(datetime_from__gte=datetime_from) datetime_until = request.GET.get('datetime_until') if datetime_until: datetime_until = dateutil.parser.parse(datetime_until) sqs = sqs.filter(datetime_until__lte=datetime_until) # full text text = request.GET.get('text') if text: sqs = sqs.filter(text__contains=text) tags = request.GET.get('tags') if tags: sqs = sqs.filter(tags__contains=tags) return sqs
def test_build_query_with_sequence_and_filter_not_in(self): self.sq.add_filter(SQ(id=[1, 2, 3])) self.assertEqual(self.sq.build_query(), "id:(1,2,3)")
def search(self): self.parse_error = None # error return from parser sqs = self.searchqueryset.all().filter(replaced=False) if self.cleaned_data.get('q'): # The prior code corrected for an failed match of complete words, as documented # in issue #2308. This version instead uses an advanced query syntax in which # "word" indicates an exact match and the bare word indicates a stemmed match. cdata = self.cleaned_data.get('q') try: parser = ParseSQ() parsed = parser.parse(cdata) sqs = sqs.filter(parsed) except ValueError as e: sqs = self.searchqueryset.none() self.parse_error = "Value error: {}. No matches. Please try again".format( e.value) return sqs except MatchingBracketsNotFoundError as e: sqs = self.searchqueryset.none() self.parse_error = "{} No matches. Please try again.".format( e.value) return sqs except MalformedDateError as e: sqs = self.searchqueryset.none() self.parse_error = "{} No matches. Please try again.".format( e.value) return sqs except FieldNotRecognizedError as e: sqs = self.searchqueryset.none() self.parse_error = \ ("{} Field delimiters include title, contributor, subject, etc. " + "Please try again.")\ .format(e.value) return sqs except InequalityNotAllowedError as e: sqs = self.searchqueryset.none() self.parse_error = "{} No matches. Please try again.".format( e.value) return sqs geo_sq = None if self.cleaned_data['NElng'] and self.cleaned_data['SWlng']: if float(self.cleaned_data['NElng']) > float( self.cleaned_data['SWlng']): geo_sq = SQ(east__lte=float(self.cleaned_data['NElng'])) geo_sq.add(SQ(east__gte=float(self.cleaned_data['SWlng'])), SQ.AND) else: geo_sq = SQ(east__gte=float(self.cleaned_data['SWlng'])) geo_sq.add(SQ(east__lte=float(180)), SQ.OR) geo_sq.add(SQ(east__lte=float(self.cleaned_data['NElng'])), SQ.AND) geo_sq.add(SQ(east__gte=float(-180)), SQ.AND) if self.cleaned_data['NElat'] and self.cleaned_data['SWlat']: # latitude might be specified without longitude if geo_sq is None: geo_sq = SQ(north__lte=float(self.cleaned_data['NElat'])) else: geo_sq.add(SQ(north__lte=float(self.cleaned_data['NElat'])), SQ.AND) geo_sq.add(SQ(north__gte=float(self.cleaned_data['SWlat'])), SQ.AND) if geo_sq is not None: sqs = sqs.filter(geo_sq) # Check to see if a start_date was chosen. start_date = self.cleaned_data['start_date'] end_date = self.cleaned_data['end_date'] # allow overlapping ranges # cs < s < ce OR s < cs => s < ce # AND # cs < e < ce OR e > ce => cs < e if start_date and end_date: sqs = sqs.filter( SQ(end_date__gte=start_date) & SQ(start_date__lte=end_date)) elif start_date: sqs = sqs.filter(SQ(end_date__gte=start_date)) elif end_date: sqs = sqs.filter(SQ(start_date__lte=end_date)) if self.cleaned_data['coverage_type']: sqs = sqs.filter( coverage_types__in=[self.cleaned_data['coverage_type']]) creator_sq = None contributor_sq = None owner_sq = None subject_sq = None content_type_sq = None availability_sq = None # We need to process each facet to ensure that the field name and the # value are quoted correctly and separately: for facet in self.selected_facets: if ":" not in facet: continue field, value = facet.split(":", 1) value = sqs.query.clean(value) if value: if "creator" in field: if creator_sq is None: creator_sq = SQ(creator__exact=value) else: creator_sq.add(SQ(creator__exact=value), SQ.OR) if "contributor" in field: if contributor_sq is None: contributor_sq = SQ(contributor__exact=value) else: contributor_sq.add(SQ(contributor__exact=value), SQ.OR) elif "owner" in field: if owner_sq is None: owner_sq = SQ(owner__exact=value) else: owner_sq.add(SQ(owner__exact=value), SQ.OR) elif "subject" in field: if subject_sq is None: subject_sq = SQ(subject__exact=value) else: subject_sq.add(SQ(subject__exact=value), SQ.OR) elif "content_type" in field: if content_type_sq is None: content_type_sq = SQ(content_type__exact=value) else: content_type_sq.add(SQ(content_type__exact=value), SQ.OR) elif "availability" in field: if availability_sq is None: availability_sq = SQ(availability__exact=value) else: availability_sq.add(SQ(availability__exact=value), SQ.OR) else: continue if creator_sq is not None: sqs = sqs.filter(creator_sq) if contributor_sq is not None: sqs = sqs.filter(contributor_sq) if owner_sq is not None: sqs = sqs.filter(owner_sq) if subject_sq is not None: sqs = sqs.filter(subject_sq) if content_type_sq is not None: sqs = sqs.filter(content_type_sq) if availability_sq is not None: sqs = sqs.filter(availability_sq) return sqs
def test_build_query_in_filter_datetime(self): self.sq.add_filter(SQ(content="why")) self.sq.add_filter( SQ(pub_date__in=[datetime.datetime(2009, 7, 6, 1, 56, 21)])) self.assertEqual(self.sq.build_query(), "((why) AND pub_date:(20090706015621))")
def search(self): """Search on the ES index the query sting provided by the user.""" search_term = self.cleaned_data.get('q') profile = None location_query = {} if self.country: location_query['country'] = self.country location_query['privacy_country__gte'] = None if self.region: location_query['region'] = self.region location_query['privacy_region__gte'] = None if self.city: location_query['city'] = self.city location_query['privacy_city__gte'] = None try: profile = self.request.user.userprofile except AttributeError: # This is an AnonymousUser privacy_level = PUBLIC else: privacy_level = profile.privacy_level if profile and profile.is_vouched: # If this is empty, it will default to all models. search_models = self.get_models() else: # Anonymous and un-vouched users cannot search groups search_models = [UserProfile, IdpProfile] if location_query: for k in location_query.keys(): if k.startswith('privacy_'): location_query[k] = privacy_level return SearchQuerySet().filter(**location_query).load_all() or self.no_query_found() # Calling super will handle with form validation and # will also search in fields that are not explicit queried through `text` sqs = super(PhonebookSearchForm, self).search().models(*search_models) if not sqs: return self.no_query_found() query = SQ() q_args = {} # Profiles Search all_indexed_fields = UserProfileIndex.fields.keys() + IdpProfileIndex.fields.keys() privacy_indexed_fields = [field for field in all_indexed_fields if field.startswith('privacy_')] # Every profile object in mozillians.org has privacy settings. # Let's take advantage of this and compare the indexed fields # with the ones listed in a profile in order to build the query to ES. for p_field in privacy_indexed_fields: # this is the field that we are going to query q_field = p_field.split('_', 1)[1] # The user needs to have less or equal permission number with the queried field # (lower number, means greater permission level) q_args = { q_field: search_term, '{0}__gte'.format(p_field): privacy_level } query.add(SQ(**q_args), SQ.OR) # Username is always public query.add(SQ(**{'username': search_term}), SQ.OR) # Group Search if not search_models or Group in search_models: # Filter only visible groups. query.add(SQ(**{'visible': True}), SQ.OR) return sqs.filter(query).load_all()
def search(self): # sqs = SearchQuerySet().models(JobPost).filter(status='Live') sqs = SearchQuerySet() sqs = sqs.filter_and(status='Live') if not self.is_valid(): return sqs if self.cleaned_data['q'] and self.cleaned_data['location']: term = self.cleaned_data['q'] term = term.replace('[', '') term = term.replace(']', '') term = term.replace("'", '') # sqs = sqs.filter_and(SQ(title=term) | SQ(designation=term)| SQ(skills=term)) terms = [t.strip() for t in term.split(',')] sqs = sqs.filter_and( SQ(title__in=terms) | SQ(designation__in=terms) | SQ(skills__in=terms)) # sqs = sqs.filter_or(SQ(designation__in=terms)) # sqs = sqs.filter_or(SQ(skills__in=terms)) location = self.cleaned_data['location'] location = location.replace('[', '') location = location.replace(']', '') location = location.replace("'", '') locations = [t.strip() for t in location.split(',')] other_cities = City.objects.filter(name__in=locations).values_list( 'parent_city__name', flat=True) sqs = sqs.filter_and( SQ(location__in=locations) | SQ(location__startswith=self.cleaned_data['location']) | SQ(location__in=other_cities)) if self.cleaned_data['job_type']: sqs = sqs.filter_and(job_type=self.cleaned_data['job_type']) if self.cleaned_data['industry']: term = self.cleaned_data['industry'] # sqs = sqs.filter_and(SQ(title=term) | SQ(designation=term)| SQ(skills=term)) terms = [t.strip() for t in term.split(',')] sqs = sqs.filter_or(industry__in=terms) if self.cleaned_data['functional_area']: term = self.cleaned_data['functional_area'] # sqs = sqs.filter_and(SQ(title=term) | SQ(designation=term)| SQ(skills=term)) terms = [t.strip() for t in term.split(',')] sqs = sqs.filter_or(functional_area__in=terms) if self.cleaned_data['experience'] or self.cleaned_data[ 'experience'] == 0: sqs = sqs.filter_or( SQ(max_experience__gte=self.cleaned_data['experience']) & SQ(min_experience__lte=self.cleaned_data['experience'])) if self.cleaned_data['salary']: sqs = sqs.filter_or( SQ(max_salary__gte=self.cleaned_data['salary']) & SQ(min_salary__lte=self.cleaned_data['salary'])) if self.cleaned_data['walkin_type']: import datetime if self.cleaned_data['walkin_type'] == 'this_week': date = datetime.date.today() start_week = date - \ datetime.timedelta(date.weekday()) - datetime.timedelta(1) end_week = start_week + datetime.timedelta(6) start_week = datetime.datetime.strptime( str(start_week), "%Y-%m-%d").strftime("%Y-%m-%d") end_week = datetime.datetime.strptime( str(end_week), "%Y-%m-%d").strftime("%Y-%m-%d") sqs = sqs.filter_and( SQ(walkin_from_date__range=[start_week, end_week]) | SQ(walkin_to_date__range=[start_week, end_week])) if self.cleaned_data['walkin_type'] == 'next_week': date = datetime.date.today() start_week = date - \ datetime.timedelta( date.isoweekday()) + datetime.timedelta(7) end_week = start_week + datetime.timedelta(6) start_week = datetime.datetime.strptime( str(start_week), "%Y-%m-%d").strftime("%Y-%m-%d") end_week = datetime.datetime.strptime( str(end_week), "%Y-%m-%d").strftime("%Y-%m-%d") sqs = sqs.filter_and( SQ(walkin_from_date__range=[start_week, end_week]) | SQ(walkin_to_date__range=[start_week, end_week])) # sqs = sqs.filter_and(SQ(walkin_from_date__range=[start_week, end_week]) | SQ(walkin_to_date__range=[start_week, end_week])) if self.cleaned_data['walkin_type'] == 'this_month': current_date = datetime.date.today() from dateutil.relativedelta import relativedelta from datetime import date start_week = date(current_date.year, current_date.month, 1) end_week = start_week + relativedelta(day=31) start_week = datetime.datetime.strptime( str(start_week), "%Y-%m-%d").strftime("%Y-%m-%d") end_week = datetime.datetime.strptime( str(end_week), "%Y-%m-%d").strftime("%Y-%m-%d") sqs = sqs.filter_and( SQ(walkin_from_date__range=[start_week, end_week]) | SQ(walkin_to_date__range=[start_week, end_week])) # if self.cleaned_data['walkin_type'] == 'next_month': # pass if self.cleaned_data['walkin_type'] == 'custom_range': if self.cleaned_data['walkin_from_date']: walkin_from_date = datetime.datetime.strptime( str(self.cleaned_data['walkin_from_date']), "%Y-%m-%d").strftime("%Y-%m-%d") sqs = sqs.filter_and( SQ(walkin_from_date__gte=walkin_from_date) | SQ(walkin_to_date__gte=walkin_from_date)) if self.cleaned_data['walkin_to_date']: walkin_to_date = datetime.datetime.strptime( str(self.cleaned_data['walkin_to_date']), "%Y-%m-%d").strftime("%Y-%m-%d") sqs = sqs.filter_and( SQ(walkin_from_date__gte=walkin_to_date) | SQ(walkin_to_date__lte=walkin_to_date)) return sqs else: return []
def build_haystack_filters(self, parameters): from haystack.inputs import Raw from haystack.query import SearchQuerySet, SQ # noqa sqs = None # Retrieve Query Params # Text search query = parameters.get('q', None) # Types and subtypes to filter (map, layer, vector, etc) type_facets = parameters.getlist("type__in", []) # If coming from explore page, add type filter from resource_name resource_filter = self._meta.resource_name.rstrip("s") if resource_filter != "base" and resource_filter not in type_facets: type_facets.append(resource_filter) # Publication date range (start,end) date_end = parameters.get("date__lte", None) date_start = parameters.get("date__gte", None) # Topic category filter category = parameters.getlist("category__identifier__in") # Keyword filter keywords = parameters.getlist("keywords__slug__in") # Region filter regions = parameters.getlist("regions__name__in") # Owner filters owner = parameters.getlist("owner__username__in") # Sort order sort = parameters.get("order_by", "relevance") # Geospatial Elements bbox = parameters.get("extent", None) # Filter by Type and subtype if type_facets is not None: types = [] subtypes = [] for type in type_facets: if type in {"map", "layer", "document", "user"}: # Type is one of our Major Types (not a sub type) types.append(type) elif type in LAYER_SUBTYPES.keys(): subtypes.append(type) if 'vector' in subtypes and 'vector_time' not in subtypes: subtypes.append('vector_time') if len(subtypes) > 0: types.append("layer") sqs = SearchQuerySet().narrow("subtype:%s" % ','.join(map(str, subtypes))) if len(types) > 0: sqs = (SearchQuerySet() if sqs is None else sqs).narrow( "type:%s" % ','.join(map(str, types))) # Filter by Query Params # haystack bug? if boosted fields aren't included in the # query, then the score won't be affected by the boost if query: if query.startswith('"') or query.startswith('\''): # Match exact phrase phrase = query.replace('"', '') sqs = (SearchQuerySet() if sqs is None else sqs).filter( SQ(title__exact=phrase) | SQ(description__exact=phrase) | SQ(content__exact=phrase)) else: words = [ w for w in re.split(r'\W', query, flags=re.UNICODE) if w ] for i, search_word in enumerate(words): if i == 0: sqs = (SearchQuerySet() if sqs is None else sqs) \ .filter( SQ(title=Raw(search_word)) | SQ(description=Raw(search_word)) | SQ(content=Raw(search_word)) ) elif search_word in {"AND", "OR"}: pass elif words[i - 1] == "OR": # previous word OR this word sqs = sqs.filter_or( SQ(title=Raw(search_word)) | SQ(description=Raw(search_word)) | SQ(content=Raw(search_word))) else: # previous word AND this word sqs = sqs.filter( SQ(title=Raw(search_word)) | SQ(description=Raw(search_word)) | SQ(content=Raw(search_word))) # filter by category if category: sqs = (SearchQuerySet() if sqs is None else sqs).narrow( 'category:%s' % ','.join(map(str, category))) # filter by keyword: use filter_or with keywords_exact # not using exact leads to fuzzy matching and too many results # using narrow with exact leads to zero results if multiple keywords # selected if keywords: for keyword in keywords: sqs = (SearchQuerySet() if sqs is None else sqs).filter_or( keywords_exact=keyword) # filter by regions: use filter_or with regions_exact # not using exact leads to fuzzy matching and too many results # using narrow with exact leads to zero results if multiple keywords # selected if regions: for region in regions: sqs = (SearchQuerySet() if sqs is None else sqs).filter_or( regions_exact__exact=region) # filter by owner if owner: sqs = (SearchQuerySet() if sqs is None else sqs).narrow( "owner__username:%s" % ','.join(map(str, owner))) # filter by date if date_start: sqs = (SearchQuerySet() if sqs is None else sqs).filter( SQ(date__gte=date_start)) if date_end: sqs = (SearchQuerySet() if sqs is None else sqs).filter( SQ(date__lte=date_end)) # Filter by geographic bounding box if bbox: left, bottom, right, top = bbox.split(',') sqs = (SearchQuerySet() if sqs is None else sqs).exclude( SQ(bbox_top__lte=bottom) | SQ(bbox_bottom__gte=top) | SQ(bbox_left__gte=right) | SQ(bbox_right__lte=left)) # Apply sort if sort.lower() == "-date": sqs = (SearchQuerySet() if sqs is None else sqs).order_by("-date") elif sort.lower() == "date": sqs = (SearchQuerySet() if sqs is None else sqs).order_by("date") elif sort.lower() == "title": sqs = (SearchQuerySet() if sqs is None else sqs).order_by("title_sortable") elif sort.lower() == "-title": sqs = (SearchQuerySet() if sqs is None else sqs).order_by("-title_sortable") elif sort.lower() == "-popular_count": sqs = (SearchQuerySet() if sqs is None else sqs).order_by("-popular_count") else: sqs = (SearchQuerySet() if sqs is None else sqs).order_by("-date") return sqs
def _get_se_objects(self, word_list, operator=SQ.AND): sq = SQ() for word in word_list: sq.add(SQ(content=word), operator) sqs = SearchQuerySet().filter(sq) return sqs