def create_welcome_email(user, request): # fish out all the relevant information about the user and # then create an unsent WelcomeEmail subject = u"Welcome to %s" % settings.PROJECT_NAME try: person = user.get_profile() except KungfuPerson.DoesNotExist: return None alu = AutoLoginKey.get_or_create(user) profile_url = reverse('person.view', args=(user.username, )) upload_photo_url = reverse('upload_profile_photo', args=(user.username, )) change_password_url = reverse("edit_password", args=(user.username, )) edit_style_url = reverse("edit_style", args=(user.username, )) edit_club_url = reverse("edit_club", args=(user.username, )) edit_profile_url = reverse("edit_profile", args=(user.username, )) data = locals() domain = RequestSite(request).domain base_url = 'http://%s' % domain # for every variable that ends with _url make it an absolute url # and add the _alu variable def aluify_url(url): if '?' in url: return url + '&alu=%s' % alu.uuid else: return url + '?alu=%s' % alu.uuid keys = list(data.keys()) for key in keys: if key.endswith('_url'): url = data[key] if url.startswith('/'): url = base_url + url data[key] = url data[key + '_alu'] = aluify_url(url) # now the interesting thing starts. We need to find out what they haven't # done with their profile and pester them about that. response = render(request, 'welcome-email.html', data) html = response.content html = Premailer( html, base_url=base_url, keep_style_tags=False, ).transform() return WelcomeEmail.objects.create( user=user, subject=subject, body=html, )
def create_welcome_email(user, request): # fish out all the relevant information about the user and # then create an unsent WelcomeEmail subject = u"Welcome to %s" % settings.PROJECT_NAME try: person = user.get_profile() except KungfuPerson.DoesNotExist: return None alu = AutoLoginKey.get_or_create(user) profile_url = reverse('person.view', args=(user.username,)) upload_photo_url = reverse('upload_profile_photo', args=(user.username,)) change_password_url = reverse("edit_password", args=(user.username,)) edit_style_url = reverse("edit_style", args=(user.username,)) edit_club_url = reverse("edit_club", args=(user.username,)) edit_profile_url = reverse("edit_profile", args=(user.username,)) data = locals() domain = RequestSite(request).domain base_url = 'http://%s' % domain # for every variable that ends with _url make it an absolute url # and add the _alu variable def aluify_url(url): if '?' in url: return url + '&alu=%s' % alu.uuid else: return url + '?alu=%s' % alu.uuid keys = list(data.keys()) for key in keys: if key.endswith('_url'): url = data[key] if url.startswith('/'): url = base_url + url data[key] = url data[key + '_alu'] = aluify_url(url) # now the interesting thing starts. We need to find out what they haven't # done with their profile and pester them about that. response = render(request, 'welcome-email.html', data) html = response.content html = Premailer(html, base_url=base_url, keep_style_tags=False, ).transform() return WelcomeEmail.objects.create(user=user, subject=subject, body=html, )
def list_new_people_html(request): try: # UTC timestamps from_timestamp = float(request.GET.get('from')) to_timestamp = float(request.GET.get('to')) if from_timestamp > to_timestamp: raise Http404("to timestamp less than from") except (ValueError, TypeError): raise Http404("Invalid timestamps") from_datetime = datetime.datetime.utcfromtimestamp(from_timestamp/ 1000) to_datetime = datetime.datetime.utcfromtimestamp(to_timestamp/ 1000) print from_datetime, to_datetime people = KungfuPerson.objects.filter(user__date_joined__gte=from_datetime, user__date_joined__lt=to_datetime) print people.count() #print people.query.as_sql() people = people.select_related().order_by('user__date_joined') return render(request, '_list-new-people.html', locals())
def list_new_people_html(request): try: # UTC timestamps from_timestamp = float(request.GET.get('from')) to_timestamp = float(request.GET.get('to')) if from_timestamp > to_timestamp: raise Http404("to timestamp less than from") except (ValueError, TypeError): raise Http404("Invalid timestamps") from_datetime = datetime.datetime.utcfromtimestamp(from_timestamp / 1000) to_datetime = datetime.datetime.utcfromtimestamp(to_timestamp / 1000) print from_datetime, to_datetime people = KungfuPerson.objects.filter(user__date_joined__gte=from_datetime, user__date_joined__lt=to_datetime) print people.count() #print people.query.as_sql() people = people.select_related().order_by('user__date_joined') return render(request, '_list-new-people.html', locals())
def competitions(request): data = dict() data.update(_get_competitions_tables()) return render(request, 'competitions.html', data)
def new_people(request, period='monthly'): """page that shows a line graph showing the number of new signups cummulativively or individual""" #blocks = 'monthly' weekly = period == 'weekly' def _find_week_min_max(date): # given a date anywhere in the middle of the week, return the date # of that week's Monday at 00:00:00 and return the Monday exactly # 7 days later search_date = datetime.datetime(date.year, date.month, date.day, 0, 0, 0) while search_date.strftime('%A') != 'Monday': search_date = search_date - datetime.timedelta(days=1) return search_date, search_date + datetime.timedelta(days=7) first_date = User.objects.all().order_by('date_joined')[0].date_joined last_date = User.objects.all().order_by('-date_joined')[0].date_joined buckets = dict() date = first_date qs = User.objects.filter(is_staff=False) count_previous = 0 total_count = 0 while date < last_date: if weekly: key = date.strftime('%Y%W') else: # default is monthly key = date.strftime('%Y%m') if key not in buckets: if weekly: week_min, next_week = _find_week_min_max(date) this_qs = qs.filter(date_joined__gte=week_min, date_joined__lt=next_week) date_hourless = week_min else: date_hourless = datetime.date(date.year, date.month, 15) this_qs = qs.filter(date_joined__year=date.year, date_joined__month=date.month) count = this_qs.count() total_count += count buckets[key] = { 'year': date.year, 'month': date.month, 'month_name': date.strftime('%B'), 'date': date, 'count': count, 'total_count': total_count, 'timestamp': int(mktime(date_hourless.timetuple())) * 1000, } if weekly: buckets[key]['week_name'] = date.strftime('%W') date = date + datetime.timedelta(days=1) # turn it into a list buckets = [v for v in buckets.values()] buckets.sort(lambda x, y: cmp(x['date'], y['date'])) buckets_timestamps = [[x['timestamp'], x['count']] for x in buckets] buckets_timestamps_json = simplejson.dumps(buckets_timestamps) buckets_cumulative_timestamps = [[x['timestamp'], x['total_count']] for x in buckets] buckets_cumulative_timestamps_json = simplejson.dumps( buckets_cumulative_timestamps) return render(request, 'stats-new-people.html', locals())
def index(request): """list all available stats pages""" return render(request, 'stats-index.html', locals())
def new_people(request, period='monthly'): """page that shows a line graph showing the number of new signups cummulativively or individual""" #blocks = 'monthly' weekly = period == 'weekly' def _find_week_min_max(date): # given a date anywhere in the middle of the week, return the date # of that week's Monday at 00:00:00 and return the Monday exactly # 7 days later search_date = datetime.datetime(date.year, date.month, date.day, 0, 0, 0) while search_date.strftime('%A') != 'Monday': search_date = search_date - datetime.timedelta(days=1) return search_date, search_date + datetime.timedelta(days=7) first_date = User.objects.all().order_by('date_joined')[0].date_joined last_date = User.objects.all().order_by('-date_joined')[0].date_joined buckets = dict() date = first_date qs = User.objects.filter(is_staff=False) count_previous = 0 total_count = 0 while date < last_date: if weekly: key = date.strftime('%Y%W') else: # default is monthly key = date.strftime('%Y%m') if key not in buckets: if weekly: week_min, next_week = _find_week_min_max(date) this_qs = qs.filter(date_joined__gte=week_min, date_joined__lt=next_week) date_hourless = week_min else: date_hourless = datetime.date(date.year, date.month, 15) this_qs = qs.filter(date_joined__year=date.year, date_joined__month=date.month) count = this_qs.count() total_count += count buckets[key] = {'year': date.year, 'month': date.month, 'month_name': date.strftime('%B'), 'date': date, 'count': count, 'total_count': total_count, 'timestamp': int(mktime(date_hourless.timetuple())) * 1000, } if weekly: buckets[key]['week_name'] = date.strftime('%W') date = date + datetime.timedelta(days=1) # turn it into a list buckets = [v for v in buckets.values()] buckets.sort(lambda x,y: cmp(x['date'], y['date'])) buckets_timestamps = [[x['timestamp'], x['count']] for x in buckets] buckets_timestamps_json = simplejson.dumps(buckets_timestamps) buckets_cumulative_timestamps = [[x['timestamp'], x['total_count']] for x in buckets] buckets_cumulative_timestamps_json = simplejson.dumps(buckets_cumulative_timestamps) return render(request, 'stats-new-people.html', locals())