Example #1
0
def schema_filter_geojson(request, slug, args_from_url):
    s = get_object_or_404(get_schema_manager(request), slug=slug, is_special_report=False)
    if not s.allow_charting:
        return HttpResponse(status=404)

    filter_sf_list = list(SchemaField.objects.filter(schema__id=s.id, is_filter=True).order_by('display_order'))
    textsearch_sf_list = list(SchemaField.objects.filter(schema__id=s.id, is_searchable=True).order_by('display_order'))

    # Use SortedDict to preserve the display_order.
    filter_sf_dict = SortedDict([(sf.name, sf) for sf in filter_sf_list] + [(sf.name, sf) for sf in textsearch_sf_list])

    # Determine what filters to apply, based on path and/or query string.
    filterchain = FilterChain(request=request, schema=s)
    try:
        filterchain.update_from_request(args_from_url, filter_sf_dict)
        filters_need_more = filterchain.validate()
    except FilterError:
        return HttpResponse(status=400)
    except BadAddressException:
        return HttpResponse(status=400)
    except BadDateException:
        return HttpResponse(status=400)

    if filters_need_more:
        return HttpResponse(status=400)


    # If there isn't a date filter, add some dates to the queryset,
    # but NOT to the filterchain, because need to give the user the
    # option of choosing dates.
    qs, start_date, end_date = _default_date_filtering(filterchain)

    if s.is_event:
        qs = qs.order_by('item_date', 'id')
    else:
        qs = qs.order_by('-item_date', '-id')

    page = request.GET.get('page', None)
    if page is not None:
        try:
            page = int(page)
            idx_start = (page - 1) * constants.FILTER_PER_PAGE
            idx_end = page * constants.FILTER_PER_PAGE
            # Get one extra, so we can tell whether there's a next page.
            idx_end += 1
        except ValueError:
            return HttpResponse('Invalid Page', status=400)
    else:
        idx_start, idx_end = 0, 1000
    qs = qs[idx_start:idx_end]

    cache_key = 'schema_filter_geojson:' + _make_cache_key_from_queryset(qs)
    cache_seconds = 60 * 5
    output = cache.get(cache_key, None)
    if output is None:
        output = api_items_geojson(list(qs))
        cache.set(cache_key, output, cache_seconds)

    response = HttpResponse(output, mimetype="application/javascript")
    patch_response_headers(response, cache_timeout=60 * 5)
    return response
Example #2
0
def newsitems_geojson(request):
    """Get a list of newsitems, optionally filtered for one place ID
    and/or one schema slug.

    Response is a geojson string.
    """
    # Note: can't use @cache_page here because that ignores all requests
    # with query parameters (in FetchFromCacheMiddleware.process_request).
    # So, we'll use the low-level cache API.

    # Copy-pasted code from ajax_place_newsitems.  Refactoring target:
    # Seems like there are a number of similar code blocks in
    # ebpub.db.views?

    pid = request.GET.get('pid', '')
    schema = request.GET.get('schema', None)
    if schema is not None:
        schema = get_object_or_404(Schema, slug=schema)

    nid = request.GET.get('newsitem', '')

    newsitem_qs = NewsItem.objects.all()
    if nid:
        newsitem_qs = newsitem_qs.filter(id=nid)
    else:
        filters = FilterChain(request=request, queryset=newsitem_qs, schema=schema)
        if pid:
            filters.add_by_place_id(pid)
        else:
            # Whole city!
            pass

        # More copy/paste from ebpub.db.views...
        # As an optimization, limit the NewsItems to those published in the
        # last few days.
        filters.update_from_query_params(request)
        if not filters.has_key('date'):
            end_date = today()
            start_date = end_date - datetime.timedelta(days=settings.DEFAULT_DAYS)
            filters.add('date', start_date, end_date)
        newsitem_qs = filters.apply()
        newsitem_qs = newsitem_qs
        if not has_staff_cookie(request):
            newsitem_qs = newsitem_qs.filter(schema__is_public=True)

        # Put a hard limit on the number of newsitems, and throw away
        # older items.
        newsitem_qs = newsitem_qs.select_related().order_by('-item_date', '-id')
        newsitem_qs = newsitem_qs[:constants.NUM_NEWS_ITEMS_PLACE_DETAIL]

    # Done preparing the query; cache based on the raw SQL
    # to be sure we capture everything that matters.
    cache_seconds = 60 * 5
    cache_key = 'newsitem_geojson:' + _make_cache_key_from_queryset(newsitem_qs)
    output = cache.get(cache_key, None)
    if output is None:
        newsitem_list = list(newsitem_qs)
        output = api_items_geojson(newsitem_list)
        cache.set(cache_key, output, cache_seconds)

    response = HttpResponse(output, mimetype="application/javascript")
    patch_response_headers(response, cache_timeout=60 * 5)
    return response