def test_only_old_emails(self, load_file): n = ActionNetwork() n.load() old_data = [{ 'first_name': 'Henry', 'last_name': 'Kissinger', 'email': '*****@*****.**' }, { 'zip': '01234', 'ward_precinct': 'MA1', 'email': '*****@*****.**', 'first_name': 'Libby', 'last_name': 'Snowflake', 'vanid': '999' }] # Libby Snowflake exists in the old data and the new. Expect to see her gone. combed = n.only_email_matches(other_data=old_data) self.assertEqual([{ 'vanid': '999', 'last_name': 'Snowflake', 'zip': '01234', 'event': 'ice cream social', 'ward_precinct': 'MA1', 'first_name': 'Libby', 'email': '*****@*****.**' }], combed)
def test_minus_old_emails(self, load_file): n = ActionNetwork() n.load() old_data = [{ 'first_name': 'Henry', 'last_name': 'Kissinger', 'email': '*****@*****.**' }, { 'zip': '01234', 'ward_precinct': 'MA1', 'email': '*****@*****.**', 'first_name': 'Libby', 'last_name': 'Snowflake', 'vanid': '999' }] # Libby Snowflake exists in the old data and the new. Expect to see only her. combed = n.minus_email_matches(other_data=old_data) self.assertEqual([{ 'ward_precinct': 'MA2', 'zip': '02345', 'last_name': 'Provocateur', 'vanid': '888', 'event': 'boxing match', 'email': '*****@*****.**', 'first_name': 'Agent' }], combed)
def test_to_csv(self, load_file): n = ActionNetwork() n.load() csv = n.to_csv() self.assertEqual( "first_name,last_name,email,zip_code,ORMA Supporter,VoterVANID,Ward/PrecinctName,Zip\r\nLibby,Snowflake,[email protected],01234,,999,MA1,\r\nAgent,Provocateur,[email protected],02345,,888,MA2,\r\n", csv)
def test_map_row_zeropads_zip(self): raw_row = { 'last_name': 'Snowflake', 'zip_code': '1234', 'first_name': 'Libby', 'Ward/PrecinctName': 'MA1', 'email': '*****@*****.**', 'VoterVANID': '999', 'Event': 'ice cream social' } n = ActionNetwork() row = n.map_row(row=raw_row) self.assertEqual('0' + raw_row['zip_code'], row['zip'])
def test_map_row_lowercases_event(self): raw_row = { 'last_name': 'Snowflake', 'zip_code': '1234', 'first_name': 'Libby', 'Ward/PrecinctName': 'MA1', 'email': '*****@*****.**', 'VoterVANID': '999', 'Event': 'ICE cream social' } n = ActionNetwork() row = n.map_row(row=raw_row) self.assertEqual(raw_row['Event'].lower(), row['event'])
def test_load(self, load_file): n = ActionNetwork() n.load() self.assertEqual([{ 'zip': '01234', 'ward_precinct': 'MA1', 'email': '*****@*****.**', 'first_name': 'Libby', 'last_name': 'Snowflake', 'vanid': '999', 'event': 'ice cream social' }, { 'zip': '02345', 'ward_precinct': 'MA2', 'email': '*****@*****.**', 'first_name': 'Agent', 'last_name': 'Provocateur', 'vanid': '888', 'event': 'boxing match' }], n.rows)
def test_map_row_results(self): raw_row = { 'last_name': 'Snowflake', 'zip_code': '1234', 'first_name': 'Libby', 'Ward/PrecinctName': 'MA1', 'email': '*****@*****.**', 'VoterVANID': '999', 'Event': 'ice cream social' } n = ActionNetwork() row = n.map_row(row=raw_row) self.assertEqual( { 'zip': '01234', 'last_name': 'Snowflake', 'first_name': 'Libby', 'email': '*****@*****.**', 'ward_precinct': 'MA1', 'vanid': '999', 'event': 'ice cream social' }, row)
""" generates count of records for each "Event" type """ from objects.ActionNetwork import ActionNetwork from collections import OrderedDict import operator a = ActionNetwork() a.load() events = {} for row in a.rows: event = str(row['event']).strip() if event not in events: events[event] = 0 events[event] += 1 events = OrderedDict( sorted(events.items(), key=operator.itemgetter(1), reverse=True)) [print(row, ':', count) for row, count in events.items()]
""" takes National data and Action Network data, figures out which rows in National data have an email address not present in existing Action Network data, and creates a CSV upload """ from objects.National import National from objects.ActionNetwork import ActionNetwork # load our two data sources a = ActionNetwork() a.load() n = National() n.load() # minus email matches national_minus_email_matches = n.minus_email_matches(other_data=a.rows) # create a csv upload for action network based removing those with matching email a_upload = ActionNetwork() a_upload.feed(rows=national_minus_email_matches) print(a_upload.to_csv())