def test_get_signups_for_sequence(self): signup_models.create_or_update_signup('*****@*****.**', { 'q1': 'a1', 'q2': 'a2', 'q3': 'a3' }) signup_models.create_or_update_signup('*****@*****.**', { 'q1': 'ar1', 'q2': 'ar2' }) signup_models.create_or_update_signup('*****@*****.**', { 'q1': 'ar1', 'q2': 'ar2' }) signup_models.create_or_update_signup('*****@*****.**', { 'q1': 'ar1', 'q2': 'ar2' }) with patch('signup.models.sequence_model.get_current_sequence_number', lambda: 2): signup_models.create_or_update_signup('*****@*****.**', {'q1': 'ar1'}) signup_models.create_or_update_signup('*****@*****.**', {'q1': 'ar1'}) signup_models.create_or_update_signup('*****@*****.**', {'q1': 'ar1'}) signup_models.create_or_update_signup('*****@*****.**', {'q1': 'ar1'}) self.assertEqual(len(signup_models.get_signups(1)), 2) self.assertEqual(len(signup_models.get_signups(2)), 4)
def test_get_signups_for_sequence(self): signup_models.create_or_update_signup('*****@*****.**', {'q1':'a1', 'q2':'a2', 'q3':'a3'}) signup_models.create_or_update_signup('*****@*****.**', {'q1':'ar1', 'q2':'ar2'}) signup_models.create_or_update_signup('*****@*****.**', {'q1':'ar1', 'q2':'ar2'}) signup_models.create_or_update_signup('*****@*****.**', {'q1':'ar1', 'q2':'ar2'}) with patch('signup.models.sequence_model.get_current_sequence_number', lambda: 2): signup_models.create_or_update_signup('*****@*****.**', {'q1':'ar1'}) signup_models.create_or_update_signup('*****@*****.**', {'q1':'ar1'}) signup_models.create_or_update_signup('*****@*****.**', {'q1':'ar1'}) signup_models.create_or_update_signup('*****@*****.**', {'q1':'ar1'}) self.assertEqual(len(signup_models.get_signups(1)), 3) self.assertEqual(len(signup_models.get_signups(2)), 4)
def dump_data(): sequences = [(4, 'sequence_4_campaign')] user_keys = ['email', 'date_created', 'group_id', 'group_size'] event_keys = ['event', 'timestamp', 'tags', 'link'] for sequence, campaign_id in sequences: writer = unicodecsv.writer(open('sequence_{0}_all.csv'.format(sequence), 'w')) writer.writerow(user_keys + event_keys + ['control_group']) users = signup_api.get_signups(sequence) for user in users: user['group_size'] = 0 user['group_id'] = None user_groups = groups_api.get_member_groups(user['email']) if len(user_groups) == 1: user['group_id'] = user_groups[0]['address'] user['group_size'] = len(user_groups[0]['members']) user['control_group'] = user['group_id'] in settings.EXPERIMENTAL_GROUPS sequence_data = [] for i, user in enumerate(users): print('getting data for user {0} of {1}: {2}'.format(i,len(users),user['email'])) get_stats = lambda p: mailgun_api.get_campaign_events(campaign_id, ['opened', 'clicked'], recipient=user['email'], page=p) user_stats = _fetch_all(get_stats) for event in user_stats: if 'link' not in event: event['link'] = '' event['tags'] = str(event['tags']) row = [ user[key] for key in user_keys ] row += [ event[key] for key in event_keys ] row += [ user['control_group'] ] writer.writerow(row)
def prepare_groups(sequence, max_group_size=40): """ Do grouping for sequence """ signups = signup_model.get_signups(sequence) tz_grouping = {} filter_group_preference = lambda su: su['questions']['groupRadios'] for user_signup in filter(filter_group_preference, signups): timezone = user_signup['questions']['timezone'] tz_offset = int(datetime.datetime.now(pytz.timezone(timezone)).strftime('%z')) user_signup['tz_offset'] = tz_offset if tz_offset in tz_grouping: tz_grouping[tz_offset].append(user_signup) else: tz_grouping[tz_offset] = [user_signup] for timezone, group in tz_grouping.items(): tz_grouping[timezone] = shuffle(group) merge = lambda x,y: x + y tz_sorted_signups = reduce(merge, [ tz_grouping[timezone] for timezone in sorted(tz_grouping.keys())]) groups = [ tz_sorted_signups[i:min(i+max_group_size, len(tz_sorted_signups))] for i in range(0,len(tz_sorted_signups), max_group_size)] return groups
def send_user_link_to_whole_sequence(sequence): all_signups = signup_api.get_signups(sequence) context = { 'mooc_title': settings.MOOC_TITLE, 'mooc_domain': settings.MOOC_DOMAIN, 'sequence': sequence } subject = render_to_string('classphoto/emails/sequence_links-subject.txt', context).strip() text_body = render_to_string('classphoto/emails/sequence_links.txt', context).strip() html_body = render_to_string('classphoto/emails/sequence_links.html', context).strip() for i in range(0, len(all_signups), 1000): signups = all_signups[i:i + 1000] emails = [signup['email'] for signup in signups] recipient_variables = { signup['email']: { 'key': signup['key'] } for signup in signups } mailgun.api.send_mass_email( emails, settings.DEFAULT_FROM_EMAIL, subject, text_body, html_body, tags=['classphotolink'], campaign_id=sequence_api.sequence_campaign(sequence), recipient_variables=json.dumps(recipient_variables))
def send_user_link_to_whole_sequence( sequence ): all_signups = signup_api.get_signups(sequence) context = { 'mooc_title': settings.MOOC_TITLE, 'mooc_domain': settings.MOOC_DOMAIN, 'sequence': sequence } subject = render_to_string('classphoto/emails/sequence_links-subject.txt', context).strip() text_body = render_to_string('classphoto/emails/sequence_links.txt', context).strip() html_body = render_to_string('classphoto/emails/sequence_links.html', context).strip() for i in range(0, len(all_signups), 1000): signups = all_signups[i:i+1000] emails = [ signup['email'] for signup in signups ] recipient_variables = { signup['email']: {'key': signup['key']} for signup in signups } mailgun.api.send_mass_email( emails, settings.DEFAULT_FROM_EMAIL, subject, text_body, html_body, tags=['classphotolink'], campaign_id=sequence_api.sequence_campaign(sequence), recipient_variables=json.dumps(recipient_variables) )
def prepare_groups(sequence, max_group_size=40): """ Do grouping for sequence """ signups = signup_model.get_signups(sequence) tz_grouping = {} filter_group_preference = lambda su: su['questions']['groupRadios'] for user_signup in filter(filter_group_preference, signups): timezone = user_signup['questions']['timezone'] tz_offset = int( datetime.datetime.now(pytz.timezone(timezone)).strftime('%z')) user_signup['tz_offset'] = tz_offset if tz_offset in tz_grouping: tz_grouping[tz_offset].append(user_signup) else: tz_grouping[tz_offset] = [user_signup] for timezone, group in tz_grouping.items(): tz_grouping[timezone] = shuffle(group) merge = lambda x, y: x + y tz_sorted_signups = reduce( merge, [tz_grouping[timezone] for timezone in sorted(tz_grouping.keys())]) groups = [ tz_sorted_signups[i:min(i + max_group_size, len(tz_sorted_signups))] for i in range(0, len(tz_sorted_signups), max_group_size) ] return groups
def test_get_signups(self): signup_models.create_or_update_signup('*****@*****.**', {'q1':'a1', 'q2':'a2', 'q3':'a3'}) signup_models.create_or_update_signup('*****@*****.**', {'q1':'ar1', 'q2':'ar2'}) signup_models.create_or_update_signup('*****@*****.**', {'q1':'ar1', 'q2':'ar2'}) signup_models.create_or_update_signup('*****@*****.**', {'q1':'ar1', 'q2':'ar2'}) signup_models.create_or_update_signup('*****@*****.**', {'q1':'ar1', 'q2':'ar2'}) self.assertEqual(len(signup_models.get_signups(1)), 4)
def test_prepare_groups(self): for signup in randata.random_data(1000): signup_model.create_or_update_signup(**signup) groups = models.prepare_groups(1) grouped_users = filter(lambda x: x['questions']['groupRadios'], signup_model.get_signups(1)) self.assertEquals(len(groups), math.ceil(len(grouped_users)/40.0))
def test_delete_signup(self): signup_models.create_or_update_signup('*****@*****.**', { 'q1': 'a1', 'q2': 'a2', 'q3': 'a3' }) signup_models.create_or_update_signup('*****@*****.**', { 'q1': 'ar1', 'q2': 'ar2' }) signup_models.create_or_update_signup('*****@*****.**', { 'q1': 'ar1', 'q2': 'ar2' }) signup_models.create_or_update_signup('*****@*****.**', { 'q1': 'ar1', 'q2': 'ar2' }) signup_models.create_or_update_signup('*****@*****.**', { 'q1': 'ar1', 'q2': 'ar2' }) self.assertEqual(len(signup_models.get_signups()), 4) new_signups = signup_models.get_new_signups() self.assertIn('*****@*****.**', [s['email'] for s in new_signups]) signup_models.delete_signup('*****@*****.**') signups = signup_models.get_signups() self.assertEqual(len(signups), 3) self.assertNotIn('*****@*****.**', [s['email'] for s in signups]) new_signups = signup_models.get_new_signups() self.assertNotIn('*****@*****.**', [s['email'] for s in new_signups]) signup_models.create_or_update_signup('*****@*****.**', { 'q1': 'ar1', 'q2': 'ar2' }) self.assertEqual(len(signup_models.get_signups()), 4) new_signups = signup_models.get_new_signups() self.assertIn('*****@*****.**', [s['email'] for s in new_signups])
def count(request, sequence): sequence = int(sequence) context = { 'signup_count': str(len(signup_model.get_signups(sequence))), 'sequence': sequence } return render_to_response('signup/count.html', context, context_instance=RequestContext(request))
def count(request, scope): context = { 'signup_count': str(len(signup_model.get_signups(scope))), 'scope': scope } return render_to_response( 'signup/count.html', context, context_instance=RequestContext(request) )
def signup_count(request, sequence): sequence = int(sequence) context = { 'signup_count': str(len(signup_api.get_signups(sequence))), 'sequence': sequence } return render_to_response( 'stats/signup_count.html', context, context_instance=RequestContext(request) )
def test_delete_signup(self): signup_models.create_or_update_signup('*****@*****.**', {'q1':'a1', 'q2':'a2', 'q3':'a3'}) signup_models.create_or_update_signup('*****@*****.**', {'q1':'ar1', 'q2':'ar2'}) signup_models.create_or_update_signup('*****@*****.**', {'q1':'ar1', 'q2':'ar2'}) signup_models.create_or_update_signup('*****@*****.**', {'q1':'ar1', 'q2':'ar2'}) signup_models.create_or_update_signup('*****@*****.**', {'q1':'ar1', 'q2':'ar2'}) self.assertEqual(len(signup_models.get_signups(1)), 4) new_signups = signup_models.get_new_signups() self.assertIn('*****@*****.**', [s['email'] for s in new_signups]) signup_models.delete_signup('*****@*****.**', 1) signups = signup_models.get_signups(1) self.assertEqual(len(signups), 3) self.assertNotIn('*****@*****.**', [s['email'] for s in signups]) new_signups = signup_models.get_new_signups() self.assertNotIn('*****@*****.**', [s['email'] for s in new_signups]) signup_models.create_or_update_signup('*****@*****.**', {'q1':'ar1', 'q2':'ar2'}) self.assertEqual(len(signup_models.get_signups(1)), 4) new_signups = signup_models.get_new_signups() self.assertIn('*****@*****.**', [s['email'] for s in new_signups])
def do_grouping(sequence): groups = prepare_groups(sequence) create_groups(groups, sequence, 'Group {0}'.format(sequence)) # handle singups not in group signups = signup_model.get_signups(sequence) filter_group_preference = lambda su: su['questions']['groupRadios'] == False signups = filter(filter_group_preference, signups) group_address = 'ungrouped-s-{0}@{1}'.format(sequence, settings.EMAIL_DOMAIN) group_name = 'Ungrouped S{0}'.format(sequence, settings.EMAIL_DOMAIN) ungroup = group_model.create_group(group_address, group_name, sequence) for signup in signups: group_model.add_group_member(ungroup['uri'], signup['email']) # sync groups with mailgun for group in group_model.get_groups(sequence): group_model.add_group_member(group['uri'], settings.DEFAULT_FROM_EMAIL) group_model.sync_group_with_mailgun(group['uri']) # update access to group for ungrouped users mailgun_api.update_list(ungroup['address'], access_level='readonly')
def do_grouping(sequence): groups = prepare_groups(sequence) create_groups(groups, sequence, 'Group {0}'.format(sequence)) # handle singups not in group signups = signup_model.get_signups(sequence) filter_group_preference = lambda su: su['questions']['groupRadios' ] == False signups = filter(filter_group_preference, signups) group_address = 'ungrouped-s-{0}@{1}'.format(sequence, settings.EMAIL_DOMAIN) group_name = 'Ungrouped S{0}'.format(sequence, settings.EMAIL_DOMAIN) ungroup = group_model.create_group(group_address, group_name, sequence) for signup in signups: group_model.add_group_member(ungroup['uri'], signup['email']) # sync groups with mailgun for group in group_model.get_groups(sequence): group_model.add_group_member(group['uri'], settings.DEFAULT_FROM_EMAIL) group_model.sync_group_with_mailgun(group['uri']) # update access to group for ungrouped users mailgun_api.update_list(ungroup['address'], access_level='readonly')
def test_get_signups(self): signup_models.create_or_update_signup('*****@*****.**', { 'q1': 'a1', 'q2': 'a2', 'q3': 'a3' }) signup_models.create_or_update_signup('*****@*****.**', { 'q1': 'ar1', 'q2': 'ar2' }) signup_models.create_or_update_signup('*****@*****.**', { 'q1': 'ar1', 'q2': 'ar2' }) signup_models.create_or_update_signup('*****@*****.**', { 'q1': 'ar1', 'q2': 'ar2' }) signup_models.create_or_update_signup('*****@*****.**', { 'q1': 'ar1', 'q2': 'ar2' }) self.assertEqual(len(signup_models.get_signups()), 4)
def dump_data(): sequences = [(4, "sequence_4_campaign")] user_keys = ["email", "date_created", "group_id", "group_size"] event_keys = ["event", "timestamp", "tags", "link"] for sequence, campaign_id in sequences: writer = unicodecsv.writer(open("sequence_{0}_all.csv".format(sequence), "w")) writer.writerow(user_keys + event_keys + ["control_group"]) users = signup_api.get_signups(sequence) for user in users: user["group_size"] = 0 user["group_id"] = None user_groups = groups_api.get_member_groups(user["email"]) if len(user_groups) == 1: user["group_id"] = user_groups[0]["address"] user["group_size"] = len(user_groups[0]["members"]) user["control_group"] = user["group_id"] in settings.EXPERIMENTAL_GROUPS sequence_data = [] for i, user in enumerate(users): print("getting data for user {0} of {1}: {2}".format(i, len(users), user["email"])) get_stats = lambda p: mailgun_api.get_campaign_events( campaign_id, ["opened", "clicked"], recipient=user["email"], page=p ) user_stats = _fetch_all(get_stats) for event in user_stats: if "link" not in event: event["link"] = "" event["tags"] = str(event["tags"]) row = [user[key] for key in user_keys] row += [event[key] for key in event_keys] row += [user["control_group"]] writer.writerow(row)
def export(request, sequence): sequence = int(sequence) signups = signup_model.get_signups(sequence) return http.HttpResponse(json.dumps(signups, cls=DjangoJSONEncoder), content_type='application/json')