def top_current_races(request): week_number = get_week_number(datetime.date.today()) - 1 week_start = get_week_start(int(week_number)) week_start_formatted = week_start.strftime('%m/%d') week_end = get_week_end(int(week_number)) week_end_formatted = week_end.strftime('%m/%d, %Y') previous_week_number = int(week_number) - 1 following_week_number = int(week_number) + 1 period_start = week_start - datetime.timedelta(days=14) weeklysummaries = DistrictWeekly.objects.filter(cycle_week_number=week_number, outside_spending__gt=1000).order_by('-outside_spending')[:3] title = "Top races by outside spending, %s-%s" % (week_start_formatted, week_end_formatted) district_ids = weeklysummaries.values("district__pk") district_id_list = [str(x['district__pk']) for x in district_ids] district_list = ",".join(district_id_list) data_url = "http://realtime.influenceexplorer.com/api/districts-weekly/?week_start=%s&week_end=%s&districts=%s&format=json" % (int(week_number)-2, week_number, district_list) return render_to_response('datapages/top_races.html', { 'previous_week_number':previous_week_number, 'following_week_number':following_week_number, 'title':title, 'period_start': period_start, 'week_start':week_start, 'week_end':week_end, 'weeklysummaries':weeklysummaries, 'week_number':week_number, 'data_url':data_url, }, context_instance=RequestContext(request) )
def top_current_races(request): week_number = get_week_number(datetime.date.today()) - 1 week_start = get_week_start(int(week_number)) week_start_formatted = week_start.strftime('%m/%d') week_end = get_week_end(int(week_number)) week_end_formatted = week_end.strftime('%m/%d, %Y') previous_week_number = int(week_number) - 1 following_week_number = int(week_number) + 1 period_start = week_start - datetime.timedelta(days=14) weeklysummaries = DistrictWeekly.objects.filter( cycle_week_number=week_number, outside_spending__gt=1000).order_by('-outside_spending')[:3] title = "Top races by outside spending, %s-%s" % (week_start_formatted, week_end_formatted) district_ids = weeklysummaries.values("district__pk") district_id_list = [str(x['district__pk']) for x in district_ids] district_list = ",".join(district_id_list) data_url = "http://realtime.influenceexplorer.com/api/districts-weekly/?week_start=%s&week_end=%s&districts=%s&format=json" % ( int(week_number) - 2, week_number, district_list) return render_to_response('datapages/top_races.html', { 'previous_week_number': previous_week_number, 'following_week_number': following_week_number, 'title': title, 'period_start': period_start, 'week_start': week_start, 'week_end': week_end, 'weeklysummaries': weeklysummaries, 'week_number': week_number, 'data_url': data_url, }, context_instance=RequestContext(request))
def summarize_week(week_number, and_condition, cursor): week_start = get_week_start(week_number).strftime("%Y-%m-%d") week_end = get_week_end(week_number).strftime("%Y-%m-%d") query = "select sum(contribution_amount) from superpac_donors where contribution_date_formatted >= '%s' and contribution_date_formatted <= '%s' and upper(line_type) = 'SA11AI' %s" % (week_start, week_end, and_condition) print "query is: %s" % query cursor.execute(query) row = cursor.fetchone() value = row[0] return value or 0
def summarize_week(week_number, and_condition, cursor): week_start = get_week_start(week_number).strftime("%Y-%m-%d") week_end = get_week_end(week_number).strftime("%Y-%m-%d") query = "select sum(contribution_amount) from superpac_donors where contribution_date_formatted >= '%s' and contribution_date_formatted <= '%s' and upper(line_type) = 'SA11AI' %s" % ( week_start, week_end, and_condition) print "query is: %s" % query cursor.execute(query) row = cursor.fetchone() value = row[0] return value or 0