Пример #1
0
 def apply(self, queryset=None):
     """
     Applies each filter in the chain.
     """
     for key, filt in self._sorted_items():
         # TODO: this is an awkward way of passing the queryset.
         if queryset is not None:
             filt.qs = queryset
         filt.apply()
         queryset = filt.qs
     # Don't show any NewsItems whose Schema isn't public yet.
     # TODO: Is there a better place to do this?
     if not has_staff_cookie(self.request):
         queryset = queryset.filter(schema__is_public=True)
     return queryset
Пример #2
0
def newsitem_detail(request, schema_slug, newsitem_id):
    ni = get_object_or_404(NewsItem.objects.select_related(), id=newsitem_id,
                           schema__slug=schema_slug)
    if not ni.schema.is_public and not has_staff_cookie(request):
        raise Http404('Not public')

    if not ni.schema.has_newsitem_detail:
        # Don't show detail pages.
        if ni.url:
            return HttpResponsePermanentRedirect(ni.url)
        else:
            # We have nothing to show the user; ticket #110.
            raise Http404("This news item needs an external URL and doesn't have one")

    atts = ni.attributes_for_template()

    has_location = ni.location is not None

    if has_location:
        locations_within = Location.objects.select_related().filter(
            newsitemlocation__news_item__id=ni.id)
        center_x = ni.location.centroid.x
        center_y = ni.location.centroid.y
    else:
        locations_within = ()
        center_x = settings.DEFAULT_MAP_CENTER_LON
        center_y = settings.DEFAULT_MAP_CENTER_LAT

    hide_ads = (request.COOKIES.get(HIDE_ADS_COOKIE_NAME) == 't')

    templates_to_try = ('db/newsitem_detail/%s.html' % ni.schema.slug, 'db/newsitem_detail.html')

    # Try to find a usable URL to link to from the location name.
    # TODO: move this logic to NewsItem.location_url()
    location_url = ni.location_url()
    if not location_url:
        # There might be any number of intersecting locations_within,
        # and we don't have any criteria for deciding which if any
        # best corresponds to ni.location_name; but we can try
        # a few other fallbacks.
        if ni.block:
            location_url = ni.block.url()
        elif ni.location:
            # Try reverse-geocoding and see if we get a block.
            try:
                block, distance = geocoder.reverse.reverse_geocode(ni.location)
                # TODO: if this happens, we should really update
                # ni.block, but this seems like a terrible place to do
                # that.
                logger.warn(
                    "%r (id %d) can be reverse-geocoded to %r (id %d) but"
                    " self.block isn't set" % (ni, ni.id, block, block.id))
                location_url = block.url()
            except geocoder.reverse.ReverseGeocodeError:
                logger.error(
                    "%r (id %d) has neither a location_url, nor a block,"
                    " nor a reverse-geocodable location" % (ni, ni.id))
                pass

    context = {
        'newsitem': ni,
        'attribute_list': [att for att in atts if att.sf.display],
        'attribute_dict': dict((att.sf.name, att) for att in atts),
        'has_location': has_location,
        'locations_within': locations_within,
        'location_url': location_url,
        'hide_ads': hide_ads,
        'map_center_x': center_x,
        'map_center_y': center_y,
        'bodyclass': 'newsitem-detail',
        'bodyid': schema_slug,
    }
    context['breadcrumbs'] = breadcrumbs.newsitem_detail(context)
    context['map_configuration'] = _preconfigured_map(context)
    return eb_render(request, templates_to_try, context)
Пример #3
0
def newsitems_geojson(request):
    """Get a list of newsitems, optionally filtered for one place ID
    and/or one schema slug.

    Response is a geojson string.
    """
    # Note: can't use @cache_page here because that ignores all requests
    # with query parameters (in FetchFromCacheMiddleware.process_request).
    # So, we'll use the low-level cache API.

    # Copy-pasted code from ajax_place_newsitems.  Refactoring target:
    # Seems like there are a number of similar code blocks in
    # ebpub.db.views?

    pid = request.GET.get('pid', '')
    schema = request.GET.get('schema', None)
    if schema is not None:
        schema = get_object_or_404(Schema, slug=schema)

    nid = request.GET.get('newsitem', '')

    newsitem_qs = NewsItem.objects.all()
    if nid:
        newsitem_qs = newsitem_qs.filter(id=nid)
    else:
        filters = FilterChain(request=request, queryset=newsitem_qs, schema=schema)
        if pid:
            filters.add_by_place_id(pid)
        else:
            # Whole city!
            pass

        # More copy/paste from ebpub.db.views...
        # As an optimization, limit the NewsItems to those published in the
        # last few days.
        filters.update_from_query_params(request)
        if not filters.has_key('date'):
            end_date = today()
            start_date = end_date - datetime.timedelta(days=settings.DEFAULT_DAYS)
            filters.add('date', start_date, end_date)
        newsitem_qs = filters.apply()
        newsitem_qs = newsitem_qs
        if not has_staff_cookie(request):
            newsitem_qs = newsitem_qs.filter(schema__is_public=True)

        # Put a hard limit on the number of newsitems, and throw away
        # older items.
        newsitem_qs = newsitem_qs.select_related().order_by('-item_date', '-id')
        newsitem_qs = newsitem_qs[:constants.NUM_NEWS_ITEMS_PLACE_DETAIL]

    # Done preparing the query; cache based on the raw SQL
    # to be sure we capture everything that matters.
    cache_seconds = 60 * 5
    cache_key = 'newsitem_geojson:' + _make_cache_key_from_queryset(newsitem_qs)
    output = cache.get(cache_key, None)
    if output is None:
        newsitem_list = list(newsitem_qs)
        output = api_items_geojson(newsitem_list)
        cache.set(cache_key, output, cache_seconds)

    response = HttpResponse(output, mimetype="application/javascript")
    patch_response_headers(response, cache_timeout=60 * 5)
    return response