def schema_detail_special_report(request, schema): """ For display of schemas where is_special_report=True. """ ni_list = NewsItem.objects.filter(schema__id=schema.id) populate_schema(ni_list, schema) populate_attributes_if_needed(ni_list, [schema]) if schema.allow_charting: browsable_locationtype_list = LocationType.objects.filter(is_significant=True) schemafield_list = list(schema.schemafield_set.filter(is_filter=True).order_by('display_order')) else: browsable_locationtype_list = [] schemafield_list = [] templates_to_try = ('db/schema_detail/%s.html' % schema.slug, 'db/schema_detail_special_report.html') return eb_render(request, templates_to_try, { 'schema': schema, 'newsitem_list': ni_list, 'browsable_locationtype_list': browsable_locationtype_list, 'schemafield_list': schemafield_list, 'bodyclass': 'schema-detail-special-report', 'bodyid': schema.slug, })
def place_detail_overview(request, *args, **kwargs): context, response = _place_detail_normalize_url(request, *args, **kwargs) if response is not None: return response schema_manager = get_schema_manager(request) context['breadcrumbs'] = breadcrumbs.place_detail_overview(context) schema_list = SortedDict([(s.id, s) for s in schema_manager.filter(is_special_report=False).order_by('plural_name')]) # needed = set(schema_list.keys()) # We actually want two lists of schemas, since we care whether # they are news-like or future-event-like. import copy eventish_schema_list = copy.deepcopy(schema_list) newsish_schema_list = copy.deepcopy(schema_list) for s_id, schema in schema_list.items(): if schema.is_event: del(newsish_schema_list[s_id]) else: del(eventish_schema_list[s_id]) filterchain = FilterChain(request=request, context=context) filterchain.add('location', context['place']) # Distinguish between past news and upcoming events. # With some preliminary date limiting too. filterchain_news = filterchain.copy() filterchain_news.add('date', today() - datetime.timedelta(days=90), today()) filterchain_events = filterchain.copy() filterchain_events.add('date', today(), today() + datetime.timedelta(days=60)) # Ordering by ID ensures consistency across page views. newsitem_qs = filterchain_news.apply().order_by('-item_date', '-id') events_qs = filterchain_events.apply().order_by('item_date', 'id') # Mapping of schema id -> [schemafields], for building Lookup charts. sf_dict = {} charted_lookups = SchemaField.objects.filter( is_lookup=True, is_charted=True, schema__is_public=True, schema__is_special_report=False) charted_lookups = charted_lookups.values('id', 'schema_id', 'pretty_name') for sf in charted_lookups.order_by('schema__id', 'display_order'): sf_dict.setdefault(sf['schema_id'], []).append(sf) # Now retrieve newsitems per schema. schema_groups, all_newsitems = [], [] for schema in schema_list.values(): if schema.id in newsish_schema_list: newsitems = newsitem_qs.filter(schema__id=schema.id) elif schema.id in eventish_schema_list: newsitems = events_qs.filter(schema__id=schema.id) else: raise RuntimeError("should never get here") newsitems = list(newsitems[:s.number_in_overview]) populate_schema(newsitems, schema) schema_groups.append({ 'schema': schema, 'latest_newsitems': newsitems, 'has_newsitems': bool(newsitems), 'lookup_charts': sf_dict.get(schema.id), }) all_newsitems.extend(newsitems) schema_list = schema_list.values() populate_attributes_if_needed(all_newsitems, schema_list) schema_list = [s for s in schema_list if s.allow_charting] context['schema_groups'] = schema_groups context['filtered_schema_list'] = schema_list context['bodyclass'] = 'place-detail-overview' if context['is_block']: context['bodyid'] = '%s-%s-%s' % (context['place'].street_slug, context['place'].number(), context['place'].dir_url_bit()) else: context['bodyid'] = context['location'].slug response = eb_render(request, 'db/place_overview.html', context) for k, v in context['cookies_to_set'].items(): response.set_cookie(k, v) return response
def schema_detail(request, slug): s = get_object_or_404(get_schema_manager(request), slug=slug) if s.is_special_report: return schema_detail_special_report(request, s) location_type_list = LocationType.objects.filter(is_significant=True).order_by('slug') if s.allow_charting: # For the date range, the end_date is the last non-future date # with at least one NewsItem. try: end_date = NewsItem.objects.filter(schema__id=s.id, item_date__lte=today()).values_list('item_date', flat=True).order_by('-item_date')[0] except IndexError: latest_dates = () date_chart = {} start_date = end_date = None else: start_date = end_date - constants.DAYS_AGGREGATE_TIMEDELTA date_chart = get_date_chart_agg_model([s], start_date, end_date, AggregateDay)[0] latest_dates = [date['date'] for date in date_chart['dates'] if date['count']] # Populate schemafield_list and lookup_list. schemafield_list = list(s.schemafield_set.filter(is_filter=True).order_by('display_order')) # XXX this duplicates part of schema_filter() LOOKUP_MIN_DISPLAYED = 7 LOOKUP_BUFFER = 4 lookup_list = [] for sf in schemafield_list: if not (sf.is_charted and sf.is_lookup): continue top_values = list(AggregateFieldLookup.objects.filter(schema_field__id=sf.id).select_related('lookup').order_by('-total')[:LOOKUP_MIN_DISPLAYED + LOOKUP_BUFFER]) if len(top_values) == LOOKUP_MIN_DISPLAYED + LOOKUP_BUFFER: top_values = top_values[:LOOKUP_MIN_DISPLAYED] has_more = True else: has_more = False lookup_list.append({'sf': sf, 'top_values': top_values, 'has_more': has_more}) location_chartfield_list = [] # Populate location_chartfield_list. for lt in location_type_list: # Collect the locations in the location_type here so we don't have # to query them again in the select_related() below. locations = dict([(loc.id, loc) for loc in lt.location_set.iterator()]) ni_totals = AggregateLocation.objects.filter( schema__id=s.id, location_type__id=lt.id, location__is_public=True).select_related('location').order_by('-total') if ni_totals: # This runs the query. known_count = reduce(operator.add, (n.total for n in ni_totals)) total_count = date_chart.get('total_count', 0) unknown_count = max(0, total_count - known_count) location_chartfield_list.append({'location_type': lt, 'locations': ni_totals[:9], 'unknown': unknown_count}) ni_list = () else: date_chart = {} latest_dates = schemafield_list = lookup_list = location_chartfield_list = () ni_list = list(NewsItem.objects.filter(schema__id=s.id).order_by('-item_date', '-id')[:30]) populate_schema(ni_list, s) populate_attributes_if_needed(ni_list, [s]) textsearch_sf_list = list(SchemaField.objects.filter(schema__id=s.id, is_searchable=True).order_by('display_order')) boolean_lookup_list = [sf for sf in SchemaField.objects.filter(schema__id=s.id, is_filter=True, is_lookup=False).order_by('display_order') if sf.is_type('bool')] templates_to_try = ('db/schema_detail/%s.html' % s.slug, 'db/schema_detail.html') # The HIDE_SCHEMA_INTRO_COOKIE_NAME cookie is a comma-separated list of # schema IDs for schemas whose intro text should *not* be displayed. hide_intro = str(s.id) in request.COOKIES.get(HIDE_SCHEMA_INTRO_COOKIE_NAME, '').split(',') context = { 'schema': s, 'schemafield_list': schemafield_list, 'location_type_list': location_type_list, 'date_chart': date_chart, 'lookup_list': lookup_list, 'location_chartfield_list': location_chartfield_list, 'boolean_lookup_list': boolean_lookup_list, 'search_list': textsearch_sf_list, 'newsitem_list': ni_list, 'latest_dates': latest_dates[-3:], 'hide_intro': hide_intro, 'hide_intro_cookie_name': HIDE_SCHEMA_INTRO_COOKIE_NAME, 'start_date': s.min_date, 'end_date': today(), 'bodyclass': 'schema-detail', 'bodyid': slug, 'filters': FilterChain(schema=s), } context['breadcrumbs'] = breadcrumbs.schema_detail(context) return eb_render(request, templates_to_try, context)
idx_start = (page - 1) * constants.FILTER_PER_PAGE idx_end = page * constants.FILTER_PER_PAGE # Get one extra, so we can tell whether there's a next page. ni_list = list(qs[idx_start:idx_end+1]) if page > 1 and not ni_list: raise Http404('No objects on page %s' % page) if len(ni_list) > constants.FILTER_PER_PAGE: has_next = True ni_list = ni_list[:-1] else: has_next = False idx_end = idx_start + len(ni_list) has_previous = page > 1 populate_schema(ni_list, s) populate_attributes_if_needed(ni_list, [s]) # Need map parameters based on location/block, if there is one. loc_filter = filterchain.get('location') if loc_filter: context.update(get_place_info_for_request( request, place=loc_filter.location_object, block_radius=getattr(loc_filter, 'block_radius', None))) else: # Whole city map. context.update({ 'default_lon': settings.DEFAULT_MAP_CENTER_LON, 'default_lat': settings.DEFAULT_MAP_CENTER_LAT, 'default_zoom': settings.DEFAULT_MAP_ZOOM,