def get_featured_lookups_by_schema(context): """ Get all :ref:`featured_lookups` names and URLs for them; puts in the context as 'featured_lookups', a mapping grouped by schema. Example: .. code-block:: html+django {% get_featured_lookups_by_schema %} {% for schema, lookups in featured_lookups.items %} <ul>{{ schema }} {% for info in lookups %} <a href="{{ info.url }}">{{ info.lookup }}</a> ... {% endfor %} {% endfor %} """ lookups = {} for lookup in Lookup.objects.filter(featured=True).select_related(): sf = lookup.schema_field schema = sf.schema filters = FilterChain(schema=schema) filters.add(sf, lookup) info = {'lookup': lookup.name, 'url': filters.make_url()} lookups.setdefault(schema.slug, []).append(info) context['featured_lookups'] = lookups return u''
def get_featured_lookups_by_schema(context): """ Get all :ref:`featured_lookups` names and URLs for them; puts in the context as 'featured_lookups', a mapping grouped by schema. Example: .. code-block:: html+django {% get_featured_lookups_by_schema %} {% for schema, lookups in featured_lookups.items %} <ul>{{ schema }} {% for info in lookups %} <a href="{{ info.url }}">{{ info.lookup }}</a> ... {% endfor %} {% endfor %} """ lookups = {} for lookup in Lookup.objects.filter(featured=True).select_related(): sf = lookup.schema_field schema = sf.schema filters = FilterChain(schema=schema) filters.add(sf, lookup) info = {"lookup": lookup.name, "url": filters.make_url()} lookups.setdefault(schema.slug, []).append(info) context["featured_lookups"] = lookups return u""
def test_add__pubdate(self): # Key gets overridden. chain = FilterChain() import datetime chain.add('pubdate', datetime.date(2011, 8, 13)) self.assert_(chain.has_key('date')) self.failIf(chain.has_key('pubdate')) self.assertEqual(chain['date'].start_date, datetime.date(2011, 8, 13))
def test_add_date__whole_month(self): # Special syntax for adding a whole month, convenient for templates # where you don't want to have to calculate the end date. chain = FilterChain() import datetime chain.add('date', datetime.date(2011, 8, 13), 'month') self.assertEqual(chain['date'].start_date, datetime.date(2011, 8, 1)) self.assertEqual(chain['date'].end_date, datetime.date(2011, 8, 31))
def ajax_place_date_chart(request): """ Returns HTML fragment containing a chart of how many news items were added for each day over a short period (length defined by constants.DAYS_SHORT_AGGREGATE_TIMEDELTA). Expects request.GET['pid'] and request.GET['s'] (a Schema ID). """ manager = get_schema_manager(request) try: schema = manager.get(id=int(request.GET['s'])) except (KeyError, ValueError, Schema.DoesNotExist): raise Http404('Invalid Schema') filters = FilterChain(request=request, schema=schema) filters.add_by_place_id(request.GET.get('pid', '')) qs = filters.apply() # These charts are used on eg. the place overview page; there, # they should be smaller than the ones on the schema_detail view; # we don't have room for a full 30 days. date_span = constants.DAYS_SHORT_AGGREGATE_TIMEDELTA if schema.is_event: # Soonest span that includes some. try: qs = qs.filter(item_date__gte=today()).order_by('item_date', 'id') first_item = qs.values('item_date')[0] start_date = first_item['item_date'] except IndexError: # No matching items. start_date = today() end_date = today() + date_span else: # Most recent span that includes some. try: qs = qs.filter(item_date__lte=today()).order_by('-item_date', '-id') last_item = qs.values('item_date')[0] end_date = last_item['item_date'] except IndexError: # No matching items. end_date = today() start_date = end_date - date_span filters.add('date', start_date, end_date) counts = filters.apply().date_counts() date_chart = get_date_chart([schema], start_date, end_date, {schema.id: counts})[0] return render_to_response('db/snippets/date_chart.html', { 'schema': schema, 'date_chart': date_chart, 'filters': filters, })
def place_detail_timeline(request, *args, **kwargs): """ Recent news OR upcoming events for the given Location or Block. """ context, response = _place_detail_normalize_url(request, *args, **kwargs) if response is not None: return response show_upcoming = kwargs.get('show_upcoming') schema_manager = get_schema_manager(request) if show_upcoming: context['breadcrumbs'] = breadcrumbs.place_detail_upcoming(context) else: context['breadcrumbs'] = breadcrumbs.place_detail_timeline(context) is_latest_page = True # Check the query string for the max date to use. Otherwise, fall # back to today. end_date = today() if 'start' in request.GET: try: end_date = parse_date(request.GET['start'], '%m/%d/%Y') is_latest_page = False except ValueError: raise Http404('Invalid date %s' % request.GET['start']) filterchain = FilterChain(request=request, context=context) filterchain.add('location', context['place']) # As an optimization, limit the NewsItems to those on the # last (or next) few days. # And only fetch for relevant schemas - either event-ish or not. if show_upcoming: s_list = schema_manager.filter(is_event=True) start_date = end_date end_date = start_date + datetime.timedelta(days=settings.DEFAULT_DAYS) order_by = 'item_date_date' else: s_list = schema_manager.filter(is_event=False) start_date = end_date - datetime.timedelta(days=settings.DEFAULT_DAYS) order_by = '-item_date_date' filterchain.add('schema', list(s_list)) filterchain.add('date', start_date, end_date) newsitem_qs = filterchain.apply().select_related() # TODO: can this really only be done via extra()? newsitem_qs = newsitem_qs.extra( select={'item_date_date': 'date(db_newsitem.item_date)'}, order_by=(order_by, '-schema__importance', 'schema') )[:constants.NUM_NEWS_ITEMS_PLACE_DETAIL] # We're done filtering, so go ahead and do the query, to # avoid running it multiple times, # per http://docs.djangoproject.com/en/dev/topics/db/optimization ni_list = list(newsitem_qs) schemas_used = list(set([ni.schema for ni in ni_list])) s_list = s_list.filter(is_special_report=False, allow_charting=True).order_by('plural_name') populate_attributes_if_needed(ni_list, schemas_used) if ni_list: next_day = ni_list[-1].item_date - datetime.timedelta(days=1) else: next_day = None hidden_schema_list = [] if not request.user.is_anonymous(): hidden_schema_list = [o.schema for o in HiddenSchema.objects.filter(user_id=request.user.id)] context.update({ 'newsitem_list': ni_list, 'next_day': next_day, 'is_latest_page': is_latest_page, 'hidden_schema_list': hidden_schema_list, 'bodyclass': 'place-detail-timeline', 'bodyid': context.get('place_type') or '', 'filters': filterchain, 'show_upcoming': show_upcoming, }) context['filtered_schema_list'] = s_list context['map_configuration'] = _preconfigured_map(context); response = eb_render(request, 'db/place_detail.html', context) for k, v in context['cookies_to_set'].items(): response.set_cookie(k, v) return response
def newsitems_geojson(request): """Get a list of newsitems, optionally filtered for one place ID and/or one schema slug. Response is a geojson string. """ # Note: can't use @cache_page here because that ignores all requests # with query parameters (in FetchFromCacheMiddleware.process_request). # So, we'll use the low-level cache API. # Copy-pasted code from ajax_place_newsitems. Refactoring target: # Seems like there are a number of similar code blocks in # ebpub.db.views? pid = request.GET.get('pid', '') schema = request.GET.get('schema', None) if schema is not None: schema = get_object_or_404(Schema, slug=schema) nid = request.GET.get('newsitem', '') newsitem_qs = NewsItem.objects.all() if nid: newsitem_qs = newsitem_qs.filter(id=nid) else: filters = FilterChain(request=request, queryset=newsitem_qs, schema=schema) if pid: filters.add_by_place_id(pid) else: # Whole city! pass # More copy/paste from ebpub.db.views... # As an optimization, limit the NewsItems to those published in the # last few days. filters.update_from_query_params(request) if not filters.has_key('date'): end_date = today() start_date = end_date - datetime.timedelta(days=settings.DEFAULT_DAYS) filters.add('date', start_date, end_date) newsitem_qs = filters.apply() newsitem_qs = newsitem_qs if not has_staff_cookie(request): newsitem_qs = newsitem_qs.filter(schema__is_public=True) # Put a hard limit on the number of newsitems, and throw away # older items. newsitem_qs = newsitem_qs.select_related().order_by('-item_date', '-id') newsitem_qs = newsitem_qs[:constants.NUM_NEWS_ITEMS_PLACE_DETAIL] # Done preparing the query; cache based on the raw SQL # to be sure we capture everything that matters. cache_seconds = 60 * 5 cache_key = 'newsitem_geojson:' + _make_cache_key_from_queryset(newsitem_qs) output = cache.get(cache_key, None) if output is None: newsitem_list = list(newsitem_qs) output = api_items_geojson(newsitem_list) cache.set(cache_key, output, cache_seconds) response = HttpResponse(output, mimetype="application/javascript") patch_response_headers(response, cache_timeout=60 * 5) return response
def place_detail_overview(request, *args, **kwargs): context, response = _place_detail_normalize_url(request, *args, **kwargs) if response is not None: return response schema_manager = get_schema_manager(request) context['breadcrumbs'] = breadcrumbs.place_detail_overview(context) schema_list = SortedDict([(s.id, s) for s in schema_manager.filter(is_special_report=False).order_by('plural_name')]) # needed = set(schema_list.keys()) # We actually want two lists of schemas, since we care whether # they are news-like or future-event-like. import copy eventish_schema_list = copy.deepcopy(schema_list) newsish_schema_list = copy.deepcopy(schema_list) for s_id, schema in schema_list.items(): if schema.is_event: del(newsish_schema_list[s_id]) else: del(eventish_schema_list[s_id]) filterchain = FilterChain(request=request, context=context) filterchain.add('location', context['place']) # Distinguish between past news and upcoming events. # With some preliminary date limiting too. filterchain_news = filterchain.copy() filterchain_news.add('date', today() - datetime.timedelta(days=90), today()) filterchain_events = filterchain.copy() filterchain_events.add('date', today(), today() + datetime.timedelta(days=60)) # Ordering by ID ensures consistency across page views. newsitem_qs = filterchain_news.apply().order_by('-item_date', '-id') events_qs = filterchain_events.apply().order_by('item_date', 'id') # Mapping of schema id -> [schemafields], for building Lookup charts. sf_dict = {} charted_lookups = SchemaField.objects.filter( is_lookup=True, is_charted=True, schema__is_public=True, schema__is_special_report=False) charted_lookups = charted_lookups.values('id', 'schema_id', 'pretty_name') for sf in charted_lookups.order_by('schema__id', 'display_order'): sf_dict.setdefault(sf['schema_id'], []).append(sf) # Now retrieve newsitems per schema. schema_groups, all_newsitems = [], [] for schema in schema_list.values(): if schema.id in newsish_schema_list: newsitems = newsitem_qs.filter(schema__id=schema.id) elif schema.id in eventish_schema_list: newsitems = events_qs.filter(schema__id=schema.id) else: raise RuntimeError("should never get here") newsitems = list(newsitems[:s.number_in_overview]) populate_schema(newsitems, schema) schema_groups.append({ 'schema': schema, 'latest_newsitems': newsitems, 'has_newsitems': bool(newsitems), 'lookup_charts': sf_dict.get(schema.id), }) all_newsitems.extend(newsitems) schema_list = schema_list.values() populate_attributes_if_needed(all_newsitems, schema_list) schema_list = [s for s in schema_list if s.allow_charting] context['schema_groups'] = schema_groups context['filtered_schema_list'] = schema_list context['bodyclass'] = 'place-detail-overview' if context['is_block']: context['bodyid'] = '%s-%s-%s' % (context['place'].street_slug, context['place'].number(), context['place'].dir_url_bit()) else: context['bodyid'] = context['location'].slug response = eb_render(request, 'db/place_overview.html', context) for k, v in context['cookies_to_set'].items(): response.set_cookie(k, v) return response
def test_add__id(self): from ebpub.db.schemafilters import IdFilter chain = FilterChain() chain.add('id', [1, 2, 3]) self.assert_(isinstance(chain['id'], IdFilter))
def test_add__id(self): from ebpub.db.schemafilters import IdFilter chain = FilterChain() chain.add("id", [1, 2, 3]) self.assert_(isinstance(chain["id"], IdFilter))
def item_date_url(self): from ebpub.db.schemafilters import FilterChain chain = FilterChain(schema=self.schema) chain.add('date', self.item_date) return chain.make_url()