def get_object_list(request=None, **kwargs): """Perform the Solr work""" # Set the offset value paginator = kwargs['paginator'] page_number = int(request.GET.get(paginator.page_query_param, 1)) # Assume page_size = 20, then: 1 --> 0, 2 --> 19, 3 --> 39 offset = max(0, (page_number - 1) * paginator.page_size - 1) limit = 20 main_query = {'caller': 'api_search'} try: main_query.update(search_utils.build_main_query( kwargs['cd'], highlight='text', facet=False, )) sl = SolrList( main_query=main_query, offset=offset, limit=limit, type=kwargs['cd']['type'], ) except KeyError: sf = forms.SearchForm({'q': '*'}) if sf.is_valid(): main_query.update(search_utils.build_main_query( sf.cleaned_data, highlight='text', facet=False, )) sl = SolrList( main_query=main_query, offset=offset, limit=limit, ) return sl
def get_object_list(request=None, **kwargs): """Perform the Solr work""" # Set the offset value paginator = kwargs['paginator'] page_number = int(request.GET.get(paginator.page_query_param, 1)) # Assume page_size = 20, then: 1 --> 0, 2 --> 19, 3 --> 39 offset = max(0, (page_number - 1) * paginator.page_size - 1) limit = 20 main_query = {'caller': 'api_search'} try: main_query.update(search_utils.build_main_query( kwargs['cd'], highlight='text' )) sl = SolrList( main_query=main_query, offset=offset, limit=limit, type=kwargs['cd']['type'], ) except KeyError: sf = forms.SearchForm({'q': '*'}) if sf.is_valid(): main_query.update(search_utils.build_main_query( sf.cleaned_data, highlight='text', )) sl = SolrList( main_query=main_query, offset=offset, limit=limit, ) return sl
def do_search(request, rows=20, order_by=None, type=None, facet=True): query_citation = None error = False paged_results = None search_form = SearchForm(request.GET) courts = Court.objects.filter(in_use=True) if search_form.is_valid(): cd = search_form.cleaned_data # Allows an override by calling methods. if order_by is not None: cd['order_by'] = order_by if type is not None: cd['type'] = type search_form = _clean_form(request, cd, courts) if cd['type'] == 'o': si = ExtraSolrInterface(settings.SOLR_OPINION_URL, mode='r') results = si.query().add_extra(**build_main_query(cd, facet=facet)) query_citation = get_query_citation(cd) elif cd['type'] == 'r': si = ExtraSolrInterface(settings.SOLR_RECAP_URL, mode='r') results = si.query().add_extra(**build_main_query(cd, facet=facet)) elif cd['type'] == 'oa': si = ExtraSolrInterface(settings.SOLR_AUDIO_URL, mode='r') results = si.query().add_extra(**build_main_query(cd, facet=facet)) elif cd['type'] == 'p': si = ExtraSolrInterface(settings.SOLR_PEOPLE_URL, mode='r') results = si.query().add_extra(**build_main_query(cd, facet=facet)) # Set up pagination try: if cd['type'] == 'r': rows = 10 paginator = Paginator(results, rows) page = request.GET.get('page', 1) try: paged_results = paginator.page(page) except PageNotAnInteger: paged_results = paginator.page(1) except EmptyPage: # Page is out of range (e.g. 9999), deliver last page. paged_results = paginator.page(paginator.num_pages) except Exception, e: # Catches any Solr errors, and aborts. logger.warning("Error loading pagination on search page with " "request: %s" % request.GET) logger.warning("Error was: %s" % e) if settings.DEBUG is True: traceback.print_exc() error = True # Post processing of the results regroup_snippets(paged_results)
def items(self, obj): """Do a Solr query here. Return the first 20 results""" search_form = SearchForm(obj.GET) if search_form.is_valid(): cd = search_form.cleaned_data order_by = "dateFiled" if cd["type"] == SEARCH_TYPES.OPINION: solr = ExtraSolrInterface(settings.SOLR_OPINION_URL, mode="r") elif cd["type"] == SEARCH_TYPES.RECAP: solr = ExtraSolrInterface(settings.SOLR_RECAP_URL, mode="r") else: return [] main_params = search_utils.build_main_query( cd, highlight=False, facet=False ) main_params.update( { "sort": "%s desc" % order_by, "rows": "20", "start": "0", "caller": "SearchFeed", } ) # Eliminate items that lack the ordering field. main_params["fq"].append("%s:[* TO *]" % order_by) items = solr.query().add_extra(**main_params).execute() solr.conn.http_connection.close() return items else: return []
def items(self, obj): """Do a Solr query here. Return the first 20 results""" search_form = SearchForm(obj.GET) if search_form.is_valid(): cd = search_form.cleaned_data order_by = 'dateFiled' if cd['type'] == 'o': solr = ExtraSolrInterface(settings.SOLR_OPINION_URL, mode='r') elif cd['type'] == 'r': solr = ExtraSolrInterface(settings.SOLR_RECAP_URL, mode='r') else: return [] main_params = search_utils.build_main_query(cd, highlight=False, facet=False) main_params.update({ 'sort': '%s desc' % order_by, 'rows': '20', 'start': '0', 'caller': 'SearchFeed', }) # Eliminate items that lack the ordering field. main_params['fq'].append('%s:[* TO *]' % order_by) return solr.query().add_extra(**main_params).execute() else: return []
def run_query(self, alert, rate): results = [] error = False cd = {} try: logger.info("Now running the query: %s\n" % alert.query) # Set up the data data = search_utils.get_string_to_dict(alert.query) try: del data['filed_before'] except KeyError: pass data['order_by'] = 'score desc' logger.info(" Data sent to SearchForm is: %s\n" % data) search_form = SearchForm(data) if search_form.is_valid(): cd = search_form.cleaned_data if rate == 'rt' and len(self.valid_ids[cd['type']]) == 0: # Bail out. No results will be found if no valid_ids. return error, cd['type'], results cut_off_date = get_cut_off_date(rate) if cd['type'] == 'o': cd['filed_after'] = cut_off_date elif cd['type'] == 'oa': cd['argued_after'] = cut_off_date main_params = search_utils.build_main_query(cd, facet=False) main_params.update({ 'rows': '20', 'start': '0', 'hl.tag.pre': '<em><strong>', 'hl.tag.post': '</strong></em>', 'caller': 'cl_send_alerts', }) if rate == 'rt': main_params['fq'].append('id:(%s)' % ' OR '.join( [str(i) for i in self.valid_ids[cd['type']]])) results = self.connections[cd['type']].query().add_extra( **main_params).execute() regroup_snippets(results) else: logger.info(" Query for alert %s was invalid\n" " Errors from the SearchForm: %s\n" % (alert.query, search_form.errors)) error = True except: traceback.print_exc() logger.info(" Search for this alert failed: %s\n" % alert.query) error = True logger.info(" There were %s results\n" % len(results)) return error, cd.get('type'), results
def run_query(self, alert, rate): results = [] error = False cd = {} try: logger.info("Now running the query: %s\n" % alert.query) # Set up the data data = search_utils.get_string_to_dict(alert.query) try: del data["filed_before"] except KeyError: pass data["order_by"] = "score desc" logger.info(" Data sent to SearchForm is: %s\n" % data) search_form = SearchForm(data) if search_form.is_valid(): cd = search_form.cleaned_data if rate == "rt" and len(self.valid_ids[cd["type"]]) == 0: # Bail out. No results will be found if no valid_ids. return error, cd["type"], results cut_off_date = get_cut_off_date(rate) if cd["type"] == "o": cd["filed_after"] = cut_off_date elif cd["type"] == "oa": cd["argued_after"] = cut_off_date main_params = search_utils.build_main_query(cd) main_params.update( { "rows": "20", "start": "0", "hl.tag.pre": "<em><strong>", "hl.tag.post": "</strong></em>", "caller": "cl_send_alerts", } ) if rate == "rt": main_params["fq"].append("id:(%s)" % " OR ".join([str(i) for i in self.valid_ids[cd["type"]]])) results = self.connections[cd["type"]].raw_query(**main_params).execute() else: logger.info( " Query for alert %s was invalid\n" " Errors from the SearchForm: %s\n" % (alert.query, search_form.errors) ) error = True except: traceback.print_exc() logger.info(" Search for this alert failed: %s\n" % alert.query) error = True logger.info(" There were %s results\n" % len(results)) return error, cd.get("type"), results
def run_query(self, alert, rate): results = [] cd = {} logger.info("Now running the query: %s\n" % alert.query) # Make a dict from the query string. qd = QueryDict(alert.query.encode('utf-8'), mutable=True) try: del qd['filed_before'] except KeyError: pass qd['order_by'] = 'score desc' cut_off_date = get_cut_off_date(rate) # Default to 'o', if not available, according to the front end. query_type = qd.get('type', 'o') if query_type in ['o', 'r']: qd['filed_after'] = cut_off_date elif query_type == 'oa': qd['argued_after'] = cut_off_date logger.info("Data sent to SearchForm is: %s\n" % qd) search_form = SearchForm(qd) if search_form.is_valid(): cd = search_form.cleaned_data if rate == Alert.REAL_TIME and \ len(self.valid_ids[query_type]) == 0: # Bail out. No results will be found if no valid_ids. return query_type, results main_params = search_utils.build_main_query(cd, facet=False) main_params.update({ 'rows': '20', 'start': '0', 'hl.tag.pre': '<em><strong>', 'hl.tag.post': '</strong></em>', 'caller': 'cl_send_alerts:%s' % query_type, }) if rate == Alert.REAL_TIME: main_params['fq'].append( 'id:(%s)' % ' OR '.join([str(i) for i in self.valid_ids[query_type]])) # Ignore warnings from this bit of code. Otherwise, it complains # about the query URL being too long and having to POST it instead # of being able to GET it. with warnings.catch_warnings(): warnings.simplefilter("ignore") results = self.connections[query_type].query().add_extra( **main_params).execute() regroup_snippets(results) logger.info("There were %s results." % len(results)) return qd, results
def run_query(self, alert, rate): results = [] cd = {} logger.info("Now running the query: %s\n" % alert.query) # Make a dict from the query string. qd = QueryDict(alert.query.encode("utf-8"), mutable=True) try: del qd["filed_before"] except KeyError: pass qd["order_by"] = "score desc" cut_off_date = get_cut_off_date(rate) # Default to 'o', if not available, according to the front end. query_type = qd.get("type", "o") if query_type in ["o", "r"]: qd["filed_after"] = cut_off_date elif query_type == "oa": qd["argued_after"] = cut_off_date logger.info("Data sent to SearchForm is: %s\n" % qd) search_form = SearchForm(qd) if search_form.is_valid(): cd = search_form.cleaned_data if (rate == Alert.REAL_TIME and len(self.valid_ids[query_type]) == 0): # Bail out. No results will be found if no valid_ids. return query_type, results main_params = search_utils.build_main_query(cd, facet=False) main_params.update({ "rows": "20", "start": "0", "hl.tag.pre": "<em><strong>", "hl.tag.post": "</strong></em>", "caller": "cl_send_alerts:%s" % query_type, }) if rate == Alert.REAL_TIME: main_params["fq"].append( "id:(%s)" % " OR ".join([str(i) for i in self.valid_ids[query_type]])) # Ignore warnings from this bit of code. Otherwise, it complains # about the query URL being too long and having to POST it instead # of being able to GET it. with warnings.catch_warnings(): warnings.simplefilter("ignore") results = (self.connections[query_type].query().add_extra( **main_params).execute()) regroup_snippets(results) logger.info("There were %s results." % len(results)) return qd, results
def get_solr_result_objects(cd, facet): """Note that this doesn't run the query yet. Not until the pagination is run. """ search_type = cd['type'] if search_type == 'o': si = ExtraSolrInterface(settings.SOLR_OPINION_URL, mode='r') results = si.query().add_extra(**build_main_query(cd, facet=facet)) elif search_type == 'r': si = ExtraSolrInterface(settings.SOLR_RECAP_URL, mode='r') results = si.query().add_extra(**build_main_query(cd, facet=facet)) elif search_type == 'oa': si = ExtraSolrInterface(settings.SOLR_AUDIO_URL, mode='r') results = si.query().add_extra(**build_main_query(cd, facet=facet)) elif search_type == 'p': si = ExtraSolrInterface(settings.SOLR_PEOPLE_URL, mode='r') results = si.query().add_extra(**build_main_query(cd, facet=facet)) else: raise NotImplementedError("Unknown search type: %s" % search_type) return results
def get_solr_result_objects(cd, facet): """Note that this doesn't run the query yet. Not until the pagination is run. """ search_type = cd["type"] if search_type == SEARCH_TYPES.OPINION: si = ExtraSolrInterface(settings.SOLR_OPINION_URL, mode="r") results = si.query().add_extra(**build_main_query(cd, facet=facet)) elif search_type == SEARCH_TYPES.RECAP: si = ExtraSolrInterface(settings.SOLR_RECAP_URL, mode="r") results = si.query().add_extra(**build_main_query(cd, facet=facet)) elif search_type == SEARCH_TYPES.ORAL_ARGUMENT: si = ExtraSolrInterface(settings.SOLR_AUDIO_URL, mode="r") results = si.query().add_extra(**build_main_query(cd, facet=facet)) elif search_type == SEARCH_TYPES.PEOPLE: si = ExtraSolrInterface(settings.SOLR_PEOPLE_URL, mode="r") results = si.query().add_extra(**build_main_query(cd, facet=facet)) else: raise NotImplementedError("Unknown search type: %s" % search_type) return results
def items(self, obj): search_form = SearchForm(obj.GET) if search_form.is_valid(): cd = search_form.cleaned_data conn = sunburnt.SolrInterface(settings.SOLR_AUDIO_URL, mode='r') main_params = search_utils.build_main_query(cd, highlight=False) main_params.update({ 'sort': 'dateArgued desc', 'rows': '20', 'start': '0', 'caller': 'SearchFeed', }) return conn.raw_query(**main_params).execute() else: return []
def run_query(self, alert, rate): results = [] cd = {} logger.info("Now running the query: %s\n" % alert.query) # Make a dict from the query string. qd = QueryDict(alert.query.encode('utf-8'), mutable=True) try: del qd['filed_before'] except KeyError: pass qd['order_by'] = 'score desc' cut_off_date = get_cut_off_date(rate) # Default to 'o', if not available, according to the front end. query_type = qd.get('type', 'o') if query_type in ['o', 'r']: qd['filed_after'] = cut_off_date elif query_type == 'oa': qd['argued_after'] = cut_off_date logger.info("Data sent to SearchForm is: %s\n" % qd) search_form = SearchForm(qd) if search_form.is_valid(): cd = search_form.cleaned_data if rate == Alert.REAL_TIME and \ len(self.valid_ids[query_type]) == 0: # Bail out. No results will be found if no valid_ids. return query_type, results main_params = search_utils.build_main_query(cd, facet=False) main_params.update({ 'rows': '20', 'start': '0', 'hl.tag.pre': '<em><strong>', 'hl.tag.post': '</strong></em>', 'caller': 'cl_send_alerts:%s' % query_type, }) if rate == Alert.REAL_TIME: main_params['fq'].append('id:(%s)' % ' OR '.join( [str(i) for i in self.valid_ids[query_type]] )) results = self.connections[query_type].query().add_extra( **main_params).execute() regroup_snippets(results) logger.info("There were %s results." % len(results)) return qd, results
def get_object_list(request, cd, paginator): """Perform the Solr work""" # Set the offset value page_number = int(request.GET.get(paginator.page_query_param, 1)) page_size = paginator.get_page_size(request) # Assume page_size = 20, then: 1 --> 0, 2 --> 20, 3 --> 40 offset = max(0, (page_number - 1) * page_size) main_query = search_utils.build_main_query(cd, highlight="text", facet=False, group=False) main_query["caller"] = "api_search" if cd["type"] == SEARCH_TYPES.RECAP: main_query["sort"] = map_to_docket_entry_sorting(main_query["sort"]) sl = SolrList(main_query=main_query, offset=offset, type=cd["type"]) return sl
def get_object_list(request, cd, paginator): """Perform the Solr work""" # Set the offset value page_number = int(request.GET.get(paginator.page_query_param, 1)) page_size = paginator.get_page_size(request) # Assume page_size = 20, then: 1 --> 0, 2 --> 20, 3 --> 40 offset = max(0, (page_number - 1) * page_size) main_query = search_utils.build_main_query(cd, highlight='text', facet=False, group=False) main_query['caller'] = 'api_search' if cd['type'] == 'r': main_query['sort'] = map_to_docket_entry_sorting(main_query['sort']) sl = SolrList(main_query=main_query, offset=offset, type=cd['type']) return sl
def items(self, obj): search_form = SearchForm(obj.GET) if search_form.is_valid(): cd = search_form.cleaned_data solr = ExtraSolrInterface(settings.SOLR_AUDIO_URL, mode="r") main_params = search_utils.build_main_query(cd, highlight=False, facet=False) main_params.update({ "sort": "dateArgued desc", "rows": "20", "start": "0", "caller": "SearchFeed", }) return solr.query().add_extra(**main_params).execute() else: return []
def items(self, obj): """Do a Solr query here. Return the first 20 results""" search_form = SearchForm(obj.GET) if search_form.is_valid(): cd = search_form.cleaned_data if cd['type'] == 'o': solr = ExtraSolrInterface(settings.SOLR_OPINION_URL, mode='r') elif cd['type'] == 'r': solr = ExtraSolrInterface(settings.SOLR_RECAP_URL, mode='r') main_params = search_utils.build_main_query(cd, highlight=False) main_params.update({ 'sort': 'dateFiled desc', 'rows': '20', 'start': '0', 'caller': 'SearchFeed', }) return solr.query().add_extra(**main_params).execute() else: return []
def items(self, obj): """Do a Solr query here. Return the first 20 results""" search_form = SearchForm(obj.GET) if search_form.is_valid(): cd = search_form.cleaned_data if cd['type'] == 'o': solr = ExtraSolrInterface(settings.SOLR_OPINION_URL, mode='r') elif cd['type'] == 'r': solr = ExtraSolrInterface(settings.SOLR_RECAP_URL, mode='r') main_params = search_utils.build_main_query(cd, highlight=False, facet=False) main_params.update({ 'sort': 'dateFiled desc', 'rows': '20', 'start': '0', 'caller': 'SearchFeed', }) return solr.query().add_extra(**main_params).execute() else: return []
def do_search(request, rows=20, order_by=None, type=None): # Bind the search form. search_form = SearchForm(request.GET) if search_form.is_valid(): cd = search_form.cleaned_data # Allows an override by calling methods. if order_by: cd['order_by'] = order_by if type: cd['type'] = type search_form = _clean_form(request, cd) try: if cd['type'] == 'o': conn = sunburnt.SolrInterface(settings.SOLR_OPINION_URL, mode='r') stat_facet_fields = search_utils.place_facet_queries(cd, conn) status_facets = search_utils.make_stats_variable( stat_facet_fields, search_form) elif cd['type'] == 'oa': conn = sunburnt.SolrInterface(settings.SOLR_AUDIO_URL, mode='r') status_facets = None results_si = conn.raw_query(**search_utils.build_main_query(cd)) courts = Court.objects.filter(in_use=True).values( 'pk', 'short_name', 'jurisdiction', 'has_oral_argument_scraper') courts, court_count_human, court_count = search_utils\ .merge_form_with_courts(courts, search_form) except Exception, e: logger.warning("Error loading search with request: %s" % request.GET) logger.warning("Error was %s" % e) return {'error': True}
def do_search(request, rows=20, order_by=None, type=None): # Bind the search form. search_form = SearchForm(request.GET) if search_form.is_valid(): cd = search_form.cleaned_data # Allows an override by calling methods. if order_by is not None: cd['order_by'] = order_by if type is not None: cd['type'] = type search_form = _clean_form(request, cd) try: if cd['type'] == 'o': conn = sunburnt.SolrInterface(settings.SOLR_OPINION_URL, mode='r') stat_facet_fields = search_utils.place_facet_queries(cd, conn) status_facets = search_utils.make_stats_variable( stat_facet_fields, search_form) elif cd['type'] == 'oa': conn = sunburnt.SolrInterface(settings.SOLR_AUDIO_URL, mode='r') status_facets = None elif cd['type'] == 'p': conn = sunburnt.SolrInterface(settings.SOLR_PEOPLE_URL, mode='r') status_facets = None results_si = conn.raw_query(**search_utils.build_main_query(cd)) courts = Court.objects.filter(in_use=True) courts, court_count_human, court_count = search_utils\ .merge_form_with_courts(courts, search_form) except Exception as e: if settings.DEBUG is True: traceback.print_exc() logger.warning("Error loading search with request: %s" % request.GET) logger.warning("Error was %s" % e) return {'error': True} else: # Invalid form, send it back logger.warning( "Invalid form when loading search page with request: %s" % request.GET) return {'error': True} # Set up pagination try: paginator = Paginator(results_si, rows) page = request.GET.get('page', 1) try: paged_results = paginator.page(page) except PageNotAnInteger: # If page is not an integer, deliver first page. paged_results = paginator.page(1) except EmptyPage: # If page is out of range (e.g. 9999), deliver last page of results. paged_results = paginator.page(paginator.num_pages) except Exception, e: # Catches any Solr errors, and aborts. logger.warning( "Error loading pagination on search page with request: %s" % request.GET) logger.warning("Error was: %s" % e) if settings.DEBUG is True: print e return {'error': True}
def do_search( get_params, rows=20, override_params=None, facet=True, cache_key=None, ): """Do all the difficult solr work. :param get_params: The request.GET parameters sent by the user. Note that this cannot simply be request.GET since that is immutable and override_params needs to be able to change this. Instead generally it's best to send request.GET.copy(). :param rows: The number of solr results to request :param override_params: A dict with additional or different GET params to be sent to solr. :param facet: Whether to complete faceting in the query :param cache_key: A cache key with which to save the results. Note that it does not do anything clever with the actual query, so if you use this, your cache key should *already* have factored in the query. If None, no caching is set or used. Results are saved for six hours. :return A big dict of variables for use in the search results, homepage, or other location. """ query_citation = None error = False paged_results = None cited_cluster = None courts = Court.objects.filter(in_use=True) related_cluster_pks = None # Add additional or overridden GET parameters if override_params: get_params.update(override_params) search_form = SearchForm(get_params) if search_form.is_valid(): cd = search_form.cleaned_data # Do the query, hitting the cache if desired try: si = get_solr_interface(cd) except NotImplementedError: logger.error( "Tried getting solr connection for %s, but it's not " "implemented yet", cd["type"], ) raise try: # Is this a `related:<pks>` prefix query? related_prefix_match = RELATED_PATTERN.search(cd["q"]) if related_prefix_match: # Seed IDs related_cluster_pks = related_prefix_match.group("pks").split( ",") results = get_mlt_query( si, cd.copy(), facet, related_cluster_pks, # Original query cd["q"].replace(related_prefix_match.group("pfx"), ""), ) else: # Regular search queries results = si.query().add_extra( **build_main_query(cd, facet=facet)) paged_results = paginate_cached_solr_results( get_params, cd, results, rows, cache_key) cited_cluster = add_depth_counts( # Also returns cited cluster if found search_data=cd, search_results=paged_results, ) except (NotImplementedError, RequestException, SolrError) as e: error = True logger.warning("Error loading search page with request: %s" % get_params) logger.warning("Error was: %s" % e) if settings.DEBUG is True: traceback.print_exc() # A couple special variables for particular search types search_form = _clean_form(get_params, cd, courts) if cd["type"] in [ SEARCH_TYPES.OPINION, SEARCH_TYPES.RECAP, SEARCH_TYPES.DOCKETS, ]: query_citation = get_query_citation(cd) if cd["type"] in [ SEARCH_TYPES.RECAP, SEARCH_TYPES.DOCKETS, SEARCH_TYPES.PEOPLE, ]: panels = Court.FEDERAL_BANKRUPTCY_PANEL courts = courts.filter( pacer_court_id__isnull=False, end_date__isnull=True).exclude(jurisdiction=panels) else: error = True courts, court_count_human, court_count = merge_form_with_courts( courts, search_form) search_summary_str = search_form.as_text(court_count_human) search_summary_dict = search_form.as_display_dict(court_count_human) related_cluster = (OpinionCluster.objects.get( sub_opinions__pk__in=related_cluster_pks) if related_cluster_pks else None) return { "results": paged_results, "facet_fields": make_stats_variable(search_form, paged_results), "search_form": search_form, "search_summary_str": search_summary_str, "search_summary_dict": search_summary_dict, "courts": courts, "court_count_human": court_count_human, "court_count": court_count, "query_citation": query_citation, "error": error, "cited_cluster": cited_cluster, "related_cluster": related_cluster, }
def run_query(self, alert, rate): results = [] error = False cd = {} try: logger.info("Now running the query: %s\n" % alert.query) # Set up the data data = search_utils.get_string_to_dict(alert.query) try: del data['filed_before'] except KeyError: pass data['order_by'] = 'score desc' logger.info(" Data sent to SearchForm is: %s\n" % data) search_form = SearchForm(data) if search_form.is_valid(): cd = search_form.cleaned_data if rate == 'rt' and len(self.valid_ids[cd['type']]) == 0: # Bail out. No results will be found if no valid_ids. return error, cd['type'], results cut_off_date = get_cut_off_date(rate) if cd['type'] == 'o': cd['filed_after'] = cut_off_date elif cd['type'] == 'oa': cd['argued_after'] = cut_off_date main_params = search_utils.build_main_query(cd, facet=False) main_params.update({ 'rows': '20', 'start': '0', 'hl.tag.pre': '<em><strong>', 'hl.tag.post': '</strong></em>', 'caller': 'cl_send_alerts', }) if rate == 'rt': main_params['fq'].append('id:(%s)' % ' OR '.join( [str(i) for i in self.valid_ids[cd['type']]] )) results = self.connections[ cd['type'] ].query().add_extra( **main_params ).execute() regroup_snippets(results) else: logger.info(" Query for alert %s was invalid\n" " Errors from the SearchForm: %s\n" % (alert.query, search_form.errors)) error = True except: traceback.print_exc() logger.info(" Search for this alert failed: %s\n" % alert.query) error = True logger.info(" There were %s results\n" % len(results)) return error, cd.get('type'), results
def do_search(request, rows=20, order_by=None, type=None, facet=True): query_citation = None error = False paged_results = None search_form = SearchForm(request.GET) courts = Court.objects.filter(in_use=True) if search_form.is_valid(): cd = search_form.cleaned_data # Allows an override by calling methods. if order_by is not None: cd['order_by'] = order_by if type is not None: cd['type'] = type search_form = _clean_form(request, cd, courts) if cd['type'] == 'o': si = ExtraSolrInterface(settings.SOLR_OPINION_URL, mode='r') results = si.query().add_extra(**build_main_query(cd, facet=facet)) query_citation = get_query_citation(cd) elif cd['type'] == 'r': si = ExtraSolrInterface(settings.SOLR_RECAP_URL, mode='r') results = si.query().add_extra(**build_main_query(cd, facet=facet)) elif cd['type'] == 'oa': si = ExtraSolrInterface(settings.SOLR_AUDIO_URL, mode='r') results = si.query().add_extra(**build_main_query(cd, facet=facet)) elif cd['type'] == 'p': si = ExtraSolrInterface(settings.SOLR_PEOPLE_URL, mode='r') results = si.query().add_extra(**build_main_query(cd, facet=facet)) # Set up pagination try: if cd['type'] == 'r': rows = 10 paginator = Paginator(results, rows) page = request.GET.get('page', 1) try: paged_results = paginator.page(page) except PageNotAnInteger: paged_results = paginator.page(1) except EmptyPage: # Page is out of range (e.g. 9999), deliver last page. paged_results = paginator.page(paginator.num_pages) except Exception as e: # Catches any Solr errors, and aborts. logger.warning("Error loading pagination on search page with " "request: %s" % request.GET) logger.warning("Error was: %s" % e) if settings.DEBUG is True: traceback.print_exc() error = True # Post processing of the results regroup_snippets(paged_results) else: error = True courts, court_count_human, court_count = merge_form_with_courts(courts, search_form) return { 'results': paged_results, 'search_form': search_form, 'courts': courts, 'court_count_human': court_count_human, 'court_count': court_count, 'query_citation': query_citation, 'facet_fields': make_stats_variable(search_form, paged_results), 'error': error, }
def do_search(request, rows=20, order_by=None, type=None): search_form = SearchForm(request.GET) if search_form.is_valid(): cd = search_form.cleaned_data # Allows an override by calling methods. if order_by is not None: cd['order_by'] = order_by if type is not None: cd['type'] = type search_form = _clean_form(request, cd) try: query_citation = None status_facets = None if cd['type'] == 'o': si = ExtraSolrInterface(settings.SOLR_OPINION_URL, mode='r') stat_facet_fields = place_facet_queries(cd, si) status_facets = make_stats_variable(stat_facet_fields, search_form) query_citation = get_query_citation(cd) results = si.query().add_extra(**build_main_query(cd)) elif cd['type'] == 'r': si = ExtraSolrInterface(settings.SOLR_RECAP_URL, mode='r') results = si.query().add_extra(**build_main_query(cd)) elif cd['type'] == 'oa': si = ExtraSolrInterface(settings.SOLR_AUDIO_URL, mode='r') results = si.query().add_extra(**build_main_query(cd)) elif cd['type'] == 'p': si = ExtraSolrInterface(settings.SOLR_PEOPLE_URL, mode='r') results = si.query().add_extra(**build_main_query(cd)) courts = Court.objects.filter(in_use=True) courts, court_count_human, court_count = merge_form_with_courts( courts, search_form ) except Exception as e: if settings.DEBUG is True: traceback.print_exc() logger.warning("Error loading search with request: %s" % request.GET) logger.warning("Error was %s" % e) return {'error': True} else: # Invalid form, send it back logger.warning("Invalid form when loading search page with request: %s" % request.GET) return {'error': True} # Set up pagination try: paginator = Paginator(results, rows) page = request.GET.get('page', 1) try: paged_results = paginator.page(page) except PageNotAnInteger: # If page is not an integer, deliver first page. paged_results = paginator.page(1) except EmptyPage: # If page is out of range (e.g. 9999), deliver last page of results. paged_results = paginator.page(paginator.num_pages) except Exception, e: # Catches any Solr errors, and aborts. logger.warning("Error loading pagination on search page with request: %s" % request.GET) logger.warning("Error was: %s" % e) if settings.DEBUG is True: traceback.print_exc() return {'error': True}