Ejemplo n.º 1
0
def paginate_cached_solr_results(get_params, cd, results, rows, cache_key):
    # Run the query and set up pagination
    if cache_key is not None:
        paged_results = cache.get(cache_key)
        if paged_results is not None:
            return paged_results

    try:
        page = int(get_params.get("page", 1))
    except ValueError:
        page = 1
    check_pagination_depth(page)

    if cd["type"] in [SEARCH_TYPES.RECAP, SEARCH_TYPES.DOCKETS]:
        rows = 10

    paginator = Paginator(results, rows)
    try:
        paged_results = paginator.page(page)
    except PageNotAnInteger:
        paged_results = paginator.page(1)
    except EmptyPage:
        # Page is out of range (e.g. 9999), deliver last page.
        paged_results = paginator.page(paginator.num_pages)

    # Post processing of the results
    regroup_snippets(paged_results)

    if cache_key is not None:
        six_hours = 60 * 60 * 6
        cache.set(cache_key, paged_results, six_hours)

    return paged_results
Ejemplo n.º 2
0
def paginate_cached_solr_results(request, cd, results, rows, cache_key):
    # Run the query and set up pagination
    if cache_key is not None:
        paged_results = cache.get(cache_key)
        if paged_results is not None:
            return paged_results

    page = int(request.GET.get('page', 1))
    check_pagination_depth(page)

    if cd['type'] == 'r':
        rows = 10

    paginator = Paginator(results, rows)
    try:
        paged_results = paginator.page(page)
    except PageNotAnInteger:
        paged_results = paginator.page(1)
    except EmptyPage:
        # Page is out of range (e.g. 9999), deliver last page.
        paged_results = paginator.page(paginator.num_pages)

    # Post processing of the results
    regroup_snippets(paged_results)

    if cache_key is not None:
        six_hours = 60 * 60 * 6
        cache.set(cache_key, paged_results, six_hours)

    return paged_results
Ejemplo n.º 3
0
def paginate_cached_solr_results(request, cd, results, rows, cache_key):
    # Run the query and set up pagination
    if cache_key is not None:
        paged_results = cache.get(cache_key)
        if paged_results is not None:
            return paged_results

    page = int(request.GET.get('page', 1))
    check_pagination_depth(page)

    if cd['type'] == 'r':
        rows = 10

    paginator = Paginator(results, rows)
    try:
        paged_results = paginator.page(page)
    except PageNotAnInteger:
        paged_results = paginator.page(1)
    except EmptyPage:
        # Page is out of range (e.g. 9999), deliver last page.
        paged_results = paginator.page(paginator.num_pages)

    # Post processing of the results
    regroup_snippets(paged_results)

    if cache_key is not None:
        six_hours = 60 * 60 * 6
        cache.set(cache_key, paged_results, six_hours)

    return paged_results
Ejemplo n.º 4
0
    def run_query(self, alert, rate):
        results = []
        error = False
        cd = {}
        try:
            logger.info("Now running the query: %s\n" % alert.query)

            # Set up the data
            data = search_utils.get_string_to_dict(alert.query)
            try:
                del data['filed_before']
            except KeyError:
                pass
            data['order_by'] = 'score desc'
            logger.info("  Data sent to SearchForm is: %s\n" % data)
            search_form = SearchForm(data)
            if search_form.is_valid():
                cd = search_form.cleaned_data

                if rate == 'rt' and len(self.valid_ids[cd['type']]) == 0:
                    # Bail out. No results will be found if no valid_ids.
                    return error, cd['type'], results

                cut_off_date = get_cut_off_date(rate)
                if cd['type'] == 'o':
                    cd['filed_after'] = cut_off_date
                elif cd['type'] == 'oa':
                    cd['argued_after'] = cut_off_date
                main_params = search_utils.build_main_query(cd, facet=False)
                main_params.update({
                    'rows': '20',
                    'start': '0',
                    'hl.tag.pre': '<em><strong>',
                    'hl.tag.post': '</strong></em>',
                    'caller': 'cl_send_alerts',
                })

                if rate == 'rt':
                    main_params['fq'].append('id:(%s)' % ' OR '.join(
                        [str(i) for i in self.valid_ids[cd['type']]]))
                results = self.connections[cd['type']].query().add_extra(
                    **main_params).execute()
                regroup_snippets(results)

            else:
                logger.info("  Query for alert %s was invalid\n"
                            "  Errors from the SearchForm: %s\n" %
                            (alert.query, search_form.errors))
                error = True
        except:
            traceback.print_exc()
            logger.info("  Search for this alert failed: %s\n" % alert.query)
            error = True

        logger.info("  There were %s results\n" % len(results))

        return error, cd.get('type'), results
Ejemplo n.º 5
0
def do_search(request, rows=20, order_by=None, type=None, facet=True):

    query_citation = None
    error = False
    paged_results = None
    search_form = SearchForm(request.GET)
    courts = Court.objects.filter(in_use=True)

    if search_form.is_valid():
        cd = search_form.cleaned_data
        # Allows an override by calling methods.
        if order_by is not None:
            cd['order_by'] = order_by
        if type is not None:
            cd['type'] = type
        search_form = _clean_form(request, cd, courts)

        if cd['type'] == 'o':
            si = ExtraSolrInterface(settings.SOLR_OPINION_URL, mode='r')
            results = si.query().add_extra(**build_main_query(cd, facet=facet))
            query_citation = get_query_citation(cd)
        elif cd['type'] == 'r':
            si = ExtraSolrInterface(settings.SOLR_RECAP_URL, mode='r')
            results = si.query().add_extra(**build_main_query(cd, facet=facet))
        elif cd['type'] == 'oa':
            si = ExtraSolrInterface(settings.SOLR_AUDIO_URL, mode='r')
            results = si.query().add_extra(**build_main_query(cd, facet=facet))
        elif cd['type'] == 'p':
            si = ExtraSolrInterface(settings.SOLR_PEOPLE_URL, mode='r')
            results = si.query().add_extra(**build_main_query(cd, facet=facet))

        # Set up pagination
        try:
            if cd['type'] == 'r':
                rows = 10
            paginator = Paginator(results, rows)
            page = request.GET.get('page', 1)
            try:
                paged_results = paginator.page(page)
            except PageNotAnInteger:
                paged_results = paginator.page(1)
            except EmptyPage:
                # Page is out of range (e.g. 9999), deliver last page.
                paged_results = paginator.page(paginator.num_pages)
        except Exception, e:
            # Catches any Solr errors, and aborts.
            logger.warning("Error loading pagination on search page with "
                           "request: %s" % request.GET)
            logger.warning("Error was: %s" % e)
            if settings.DEBUG is True:
                traceback.print_exc()
            error = True

        # Post processing of the results
        regroup_snippets(paged_results)
Ejemplo n.º 6
0
    def run_query(self, alert, rate):
        results = []
        cd = {}
        logger.info("Now running the query: %s\n" % alert.query)

        # Make a dict from the query string.
        qd = QueryDict(alert.query.encode("utf-8"), mutable=True)
        try:
            del qd["filed_before"]
        except KeyError:
            pass
        qd["order_by"] = "score desc"
        cut_off_date = get_cut_off_date(rate)
        # Default to 'o', if not available, according to the front end.
        query_type = qd.get("type", "o")
        if query_type in ["o", "r"]:
            qd["filed_after"] = cut_off_date
        elif query_type == "oa":
            qd["argued_after"] = cut_off_date
        logger.info("Data sent to SearchForm is: %s\n" % qd)
        search_form = SearchForm(qd)
        if search_form.is_valid():
            cd = search_form.cleaned_data

            if (rate == Alert.REAL_TIME
                    and len(self.valid_ids[query_type]) == 0):
                # Bail out. No results will be found if no valid_ids.
                return query_type, results

            main_params = search_utils.build_main_query(cd, facet=False)
            main_params.update({
                "rows": "20",
                "start": "0",
                "hl.tag.pre": "<em><strong>",
                "hl.tag.post": "</strong></em>",
                "caller": "cl_send_alerts:%s" % query_type,
            })

            if rate == Alert.REAL_TIME:
                main_params["fq"].append(
                    "id:(%s)" %
                    " OR ".join([str(i) for i in self.valid_ids[query_type]]))

            # Ignore warnings from this bit of code. Otherwise, it complains
            # about the query URL being too long and having to POST it instead
            # of being able to GET it.
            with warnings.catch_warnings():
                warnings.simplefilter("ignore")
                results = (self.connections[query_type].query().add_extra(
                    **main_params).execute())
            regroup_snippets(results)

        logger.info("There were %s results." % len(results))
        return qd, results
Ejemplo n.º 7
0
    def run_query(self, alert, rate):
        results = []
        cd = {}
        logger.info("Now running the query: %s\n" % alert.query)

        # Make a dict from the query string.
        qd = QueryDict(alert.query.encode('utf-8'), mutable=True)
        try:
            del qd['filed_before']
        except KeyError:
            pass
        qd['order_by'] = 'score desc'
        cut_off_date = get_cut_off_date(rate)
        # Default to 'o', if not available, according to the front end.
        query_type = qd.get('type', 'o')
        if query_type in ['o', 'r']:
            qd['filed_after'] = cut_off_date
        elif query_type == 'oa':
            qd['argued_after'] = cut_off_date
        logger.info("Data sent to SearchForm is: %s\n" % qd)
        search_form = SearchForm(qd)
        if search_form.is_valid():
            cd = search_form.cleaned_data

            if rate == Alert.REAL_TIME and \
                    len(self.valid_ids[query_type]) == 0:
                # Bail out. No results will be found if no valid_ids.
                return query_type, results

            main_params = search_utils.build_main_query(cd, facet=False)
            main_params.update({
                'rows': '20',
                'start': '0',
                'hl.tag.pre': '<em><strong>',
                'hl.tag.post': '</strong></em>',
                'caller': 'cl_send_alerts:%s' % query_type,
            })

            if rate == Alert.REAL_TIME:
                main_params['fq'].append(
                    'id:(%s)' %
                    ' OR '.join([str(i) for i in self.valid_ids[query_type]]))

            # Ignore warnings from this bit of code. Otherwise, it complains
            # about the query URL being too long and having to POST it instead
            # of being able to GET it.
            with warnings.catch_warnings():
                warnings.simplefilter("ignore")
                results = self.connections[query_type].query().add_extra(
                    **main_params).execute()
            regroup_snippets(results)

        logger.info("There were %s results." % len(results))
        return qd, results
Ejemplo n.º 8
0
    def run_query(self, alert, rate):
        results = []
        cd = {}
        logger.info("Now running the query: %s\n" % alert.query)

        # Make a dict from the query string.
        qd = QueryDict(alert.query.encode('utf-8'), mutable=True)
        try:
            del qd['filed_before']
        except KeyError:
            pass
        qd['order_by'] = 'score desc'
        cut_off_date = get_cut_off_date(rate)
        # Default to 'o', if not available, according to the front end.
        query_type = qd.get('type', 'o')
        if query_type in ['o', 'r']:
            qd['filed_after'] = cut_off_date
        elif query_type == 'oa':
            qd['argued_after'] = cut_off_date
        logger.info("Data sent to SearchForm is: %s\n" % qd)
        search_form = SearchForm(qd)
        if search_form.is_valid():
            cd = search_form.cleaned_data

            if rate == Alert.REAL_TIME and \
                    len(self.valid_ids[query_type]) == 0:
                # Bail out. No results will be found if no valid_ids.
                return query_type, results

            main_params = search_utils.build_main_query(cd, facet=False)
            main_params.update({
                'rows': '20',
                'start': '0',
                'hl.tag.pre': '<em><strong>',
                'hl.tag.post': '</strong></em>',
                'caller': 'cl_send_alerts:%s' % query_type,
            })

            if rate == Alert.REAL_TIME:
                main_params['fq'].append('id:(%s)' % ' OR '.join(
                    [str(i) for i in self.valid_ids[query_type]]
                ))
            results = self.connections[query_type].query().add_extra(
                **main_params).execute()
            regroup_snippets(results)

        logger.info("There were %s results." % len(results))
        return qd, results
Ejemplo n.º 9
0
def do_search(request, rows=20, order_by=None, type=None, facet=True):

    query_citation = None
    error = False
    paged_results = None
    search_form = SearchForm(request.GET)
    courts = Court.objects.filter(in_use=True)

    if search_form.is_valid():
        cd = search_form.cleaned_data
        # Allows an override by calling methods.
        if order_by is not None:
            cd['order_by'] = order_by
        if type is not None:
            cd['type'] = type
        search_form = _clean_form(request, cd, courts)

        if cd['type'] == 'o':
            si = ExtraSolrInterface(settings.SOLR_OPINION_URL, mode='r')
            results = si.query().add_extra(**build_main_query(cd, facet=facet))
            query_citation = get_query_citation(cd)
        elif cd['type'] == 'r':
            si = ExtraSolrInterface(settings.SOLR_RECAP_URL, mode='r')
            results = si.query().add_extra(**build_main_query(cd, facet=facet))
        elif cd['type'] == 'oa':
            si = ExtraSolrInterface(settings.SOLR_AUDIO_URL, mode='r')
            results = si.query().add_extra(**build_main_query(cd, facet=facet))
        elif cd['type'] == 'p':
            si = ExtraSolrInterface(settings.SOLR_PEOPLE_URL, mode='r')
            results = si.query().add_extra(**build_main_query(cd, facet=facet))

        # Set up pagination
        try:
            if cd['type'] == 'r':
                rows = 10
            paginator = Paginator(results, rows)
            page = request.GET.get('page', 1)
            try:
                paged_results = paginator.page(page)
            except PageNotAnInteger:
                paged_results = paginator.page(1)
            except EmptyPage:
                # Page is out of range (e.g. 9999), deliver last page.
                paged_results = paginator.page(paginator.num_pages)
        except Exception as e:
            # Catches any Solr errors, and aborts.
            logger.warning("Error loading pagination on search page with "
                           "request: %s" % request.GET)
            logger.warning("Error was: %s" % e)
            if settings.DEBUG is True:
                traceback.print_exc()
            error = True

        # Post processing of the results
        regroup_snippets(paged_results)

    else:
        error = True

    courts, court_count_human, court_count = merge_form_with_courts(courts,
                                                                    search_form)
    return {
        'results': paged_results,
        'search_form': search_form,
        'courts': courts,
        'court_count_human': court_count_human,
        'court_count': court_count,
        'query_citation': query_citation,
        'facet_fields': make_stats_variable(search_form, paged_results),
        'error': error,
    }
Ejemplo n.º 10
0
    def run_query(self, alert, rate):
        results = []
        error = False
        cd = {}
        try:
            logger.info("Now running the query: %s\n" % alert.query)

            # Set up the data
            data = search_utils.get_string_to_dict(alert.query)
            try:
                del data['filed_before']
            except KeyError:
                pass
            data['order_by'] = 'score desc'
            logger.info("  Data sent to SearchForm is: %s\n" % data)
            search_form = SearchForm(data)
            if search_form.is_valid():
                cd = search_form.cleaned_data

                if rate == 'rt' and len(self.valid_ids[cd['type']]) == 0:
                    # Bail out. No results will be found if no valid_ids.
                    return error, cd['type'], results

                cut_off_date = get_cut_off_date(rate)
                if cd['type'] == 'o':
                    cd['filed_after'] = cut_off_date
                elif cd['type'] == 'oa':
                    cd['argued_after'] = cut_off_date
                main_params = search_utils.build_main_query(cd, facet=False)
                main_params.update({
                    'rows': '20',
                    'start': '0',
                    'hl.tag.pre': '<em><strong>',
                    'hl.tag.post': '</strong></em>',
                    'caller': 'cl_send_alerts',
                })

                if rate == 'rt':
                    main_params['fq'].append('id:(%s)' % ' OR '.join(
                        [str(i) for i in self.valid_ids[cd['type']]]
                    ))
                results = self.connections[
                    cd['type']
                ].query().add_extra(
                    **main_params
                ).execute()
                regroup_snippets(results)

            else:
                logger.info("  Query for alert %s was invalid\n"
                            "  Errors from the SearchForm: %s\n" %
                            (alert.query, search_form.errors))
                error = True
        except:
            traceback.print_exc()
            logger.info("  Search for this alert failed: %s\n" %
                        alert.query)
            error = True

        logger.info("  There were %s results\n" % len(results))

        return error, cd.get('type'), results
Ejemplo n.º 11
0
            paged_results = paginator.page(page)
        except PageNotAnInteger:
            # If page is not an integer, deliver first page.
            paged_results = paginator.page(1)
        except EmptyPage:
            # If page is out of range (e.g. 9999), deliver last page of results.
            paged_results = paginator.page(paginator.num_pages)
    except Exception, e:
        # Catches any Solr errors, and aborts.
        logger.warning("Error loading pagination on search page with request: %s" % request.GET)
        logger.warning("Error was: %s" % e)
        if settings.DEBUG is True:
            traceback.print_exc()
        return {'error': True}

    regroup_snippets(paged_results)

    return {
        'search_form': search_form,
        'results': paged_results,
        'courts': courts,
        'court_count_human': court_count_human,
        'court_count': court_count,
        'status_facets': status_facets,
        'query_citation': query_citation,
    }


def get_homepage_stats():
    """Get any stats that are displayed on the homepage and return them as a
    dict