def get_existing_popit_person(vi_person_id): from candidates.models import PopItPerson from candidates.popit import get_search_url # See if this person already exists by searching for the # ID they were imported with: query_format = \ 'identifiers.identifier:"{id}" AND ' + \ 'identifiers.scheme:"{scheme}"' search_url = get_search_url( 'persons', query_format.format( id=vi_person_id, scheme='import-id' ), embed='membership.organization' ) results = requests.get(search_url).json() total = results['total'] if total > 1: message = "Multiple matches for CI ID {0}" raise Exception(message.format(vi_person_id)) if total == 0: return None # Otherwise there was exactly one result: return PopItPerson.create_from_dict(results['result'][0])
def handle(self, *args, **options): self.verbosity = int(options.get('verbosity', 1)) api = create_popit_api_object() if len(args) != 3: raise CommandError("You must provide all three arguments") person_id, scheme, identifier = args person = PopItPerson.create_from_popit(api, person_id) person.identifiers.append( { 'scheme': scheme, 'identifier': identifier, } ) person.save_to_popit(api) person.invalidate_cache_entries() # FIXME: this should create a new version in the versions # array too, otherwise you manually have to edit on YourNextMP # too to create a new version with a change message. print "Successfully updated {0}".format(person_id)
def handle(self, **options): for collection in ('organization', 'person'): api_collection = getattr(self.api, collection + 's') message = "{titled} {base_url}{plural}/{id}" for item in popit_unwrap_pagination( api_collection, embed='', per_page=100 ): print message.format( titled=collection.title(), base_url=get_base_url(), plural=(collection + "s"), id=item['id'] ) for image in item.get('images', []): print " Image with URL:", image['url'] fix_image(image) # Some images have an empty 'created' field, which # causes an Elasticsearch indexing error, so change it # to null if that's the case: if not image.get('created'): image['created'] = None fix_dates(item) try: api_collection(item['id']).put(item) except HttpClientError as e: print "HttpClientError", e.content sys.exit(1) # If this is a person, make sure that the # corresponding cache entries are invalidated: if collection == 'person': person = PopItPerson.create_from_dict(item) person.invalidate_cache_entries()
def get_context_data(self, **kwargs): context = super(PhotoUploadSuccess, self).get_context_data(**kwargs) context['person'] = PopItPerson.create_from_popit( self.api, kwargs['popit_person_id'] ) return context
def handle(self, *args, **options): self.verbosity = int(options.get('verbosity', 1)) api = create_popit_api_object() if len(args) != 2: raise CommandError("You must provide all two arguments") person_id, other_name = args person = PopItPerson.create_from_popit(api, person_id) person.other_names.append( { 'name': other_name, 'note': options['note'], 'start_date': options['start_date'], 'end_date': options['end_date'] } ) person.save_to_popit(api) person.invalidate_cache_entries() # FIXME: this should create a new version in the versions # array too, otherwise you manually have to edit on the # YourNextRepresentative site too to create a new version with # a change message. print "Successfully updated {0}".format(person_id)
def handle(self, **options): all_people = [] for person_dict in popit_unwrap_pagination( self.api.persons, embed="membership.organization", per_page=100, ): if person_dict.get('standing_in') \ and person_dict['standing_in'].get(options['year']): person = PopItPerson.create_from_dict(person_dict) all_people.append(person.as_dict(year=options['year'])) csv = list_to_csv(all_people) # Write to stdout if no output filename is specified, or if it # is '-' if options['output_filename'] in (None, '-'): with sys.stdout as f: f.write(csv) else: # Otherwise write to a temporary file and atomically # rename into place: ntf = NamedTemporaryFile( delete=False, dir=dirname(options['output_filename']) ) ntf.write(csv) chmod(ntf.name, 0o644) rename(ntf.name, options['output_filename'])
def handle(self, **options): for person_data in popit_unwrap_pagination( self.api.persons, embed='', per_page=100 ): msg = "Person {0}persons/{1}" print msg.format(get_base_url(), person_data['id']) strip_bogus_fields( person_data, [ 'founding_date', 'dissolution_date', 'start_date', 'end_date' ] ) for image in person_data.get('images', []): strip_bogus_fields( image, [ 'birth_date', 'death_date', 'founding_date', 'dissolution_date', 'start_date', 'end_date' ] ) person = PopItPerson.create_from_dict(person_data) person.save_to_popit(self.api) person.invalidate_cache_entries()
def update_popit_person(self, popit_person_id, ppc_data, image_filename): from candidates.models import PopItPerson from ..images import image_uploaded_already # Get the existing data first: person_data, _ = self.get_person(popit_person_id) previous_versions = person_data.pop('versions') new_person_data = self.get_person_data_from_ppc(ppc_data) # Remove any empty keys, we don't want to overwrite exiting # data with nothing: keys = new_person_data.keys() warnings = [] for key in keys: if not new_person_data[key]: del new_person_data[key] # Also make sure that we don't overwrite any existing # fields that are filled in with different values: if key not in ('standing_in', 'party_memberships'): new_person_data_value = new_person_data.get(key) person_data_value = person_data.get(key) if person_data_value and new_person_data_value and new_person_data_value != person_data_value: if key_value_appeared_in_previous_version( key, new_person_data_value, previous_versions ): warning_message = "[{0}] it looks as if a previous " warning_message += "version had {1}, so not " warning_message += "overwriting the current value {2}" warnings.append(warning_message.format( key, new_person_data_value, person_data_value )) del new_person_data[key] else: warnings.append("[{0}] replacing {1}".format(key, person_data_value)) warnings.append("[{0}] with new value {1}".format(key, new_person_data_value)) if warnings: print("Warnings for person/{0} {1}".format( popit_person_id, person_data['name'] ).encode('utf-8')) for warning in warnings: print(" ...", warning.encode('utf-8')) merged_person_data = merge_person_data(person_data, new_person_data) change_metadata = get_change_metadata( None, 'Updated candidate from official PPC data ({0})'.format(ppc_data['party_slug']), ) person = PopItPerson.create_from_reduced_json(merged_person_data) person.record_version(change_metadata) person_id = person.save_to_popit(self.api) if image_filename: if image_uploaded_already(self.api.persons, person_id, image_filename): print("That image has already been uploaded!") else: print("Uploading image...") self.upload_person_image(person_id, image_filename, ppc_data['image_url']) person.invalidate_cache_entries() return person_id
def handle(self, *args, **options): from candidates.models import PopItPerson from candidates.popit import create_popit_api_object self.verbosity = int(options.get('verbosity', 1)) api = create_popit_api_object() if len(args) != 1: raise CommandError("You must provide a person.js URL") person_js_url = args[0] people_data = requests.get(person_js_url).json() for person_data in people_data['persons']: twfy_person = PopItPerson.create_from_dict(person_data) ynmp_id = twfy_person.get_identifier('yournextmp') if not ynmp_id: continue parlparse_id = twfy_person.id ynmp_person = PopItPerson.create_from_popit(api, ynmp_id) existing_parlparse_id = ynmp_person.get_identifier('uk.org.publicwhip') if existing_parlparse_id: if existing_parlparse_id == parlparse_id: # That's fine, there's already the right parlparse ID pass else: # Otherwise there's a mismatch, which needs investigation msg = "Warning: parlparse ID mismatch between YNMP {0} " msg += "and TWFY {1} for YNMP person {2}\n" self.stderr.write( msg.format( existing_parlparse_id, parlparse_id, ynmp_id, ) ) continue msg = "Updating the YourNextMP person {0} with parlparse_id {1}\n" self.stdout.write(msg.format(ynmp_id, parlparse_id)) ynmp_person.set_identifier( 'uk.org.publicwhip', parlparse_id, ) change_metadata = get_change_metadata( None, "Fetched a new parlparse ID" ) ynmp_person.record_version(change_metadata) ynmp_person.save_to_popit(api) ynmp_person.invalidate_cache_entries()
def get_context_data(self, **kwargs): context = super(PhotoReview, self).get_context_data(**kwargs) self.queued_image = get_object_or_404( QueuedImage, pk=kwargs['queued_image_id'] ) context['queued_image'] = self.queued_image person = PopItPerson.create_from_popit( self.api, self.queued_image.popit_person_id, ) context['has_crop_bounds'] = int(self.queued_image.has_crop_bounds) max_x = self.queued_image.image.width - 1 max_y = self.queued_image.image.height - 1 guessed_crop_bounds = [ value_if_none(self.queued_image.crop_min_x, 0), value_if_none(self.queued_image.crop_min_y, 0), value_if_none(self.queued_image.crop_max_x, max_x), value_if_none(self.queued_image.crop_max_y, max_y), ] context['form'] = PhotoReviewForm( initial = { 'queued_image_id': self.queued_image.id, 'decision': self.queued_image.decision, 'x_min': guessed_crop_bounds[0], 'y_min': guessed_crop_bounds[1], 'x_max': guessed_crop_bounds[2], 'y_max': guessed_crop_bounds[3], 'moderator_why_allowed': self.queued_image.why_allowed, 'make_primary': True, } ) context['guessed_crop_bounds'] = guessed_crop_bounds context['why_allowed'] = self.queued_image.why_allowed context['moderator_why_allowed'] = self.queued_image.why_allowed # There are often source links supplied in the justification, # and it's convenient to be able to follow them. However, make # sure that any maliciously added HTML tags have been stripped # before linkifying any URLs: context['justification_for_use'] = \ bleach.linkify( bleach.clean( self.queued_image.justification_for_use, tags=[], strip=True ) ) context['google_image_search_url'] = self.get_google_image_search_url( person ) context['google_reverse_image_search_url'] = \ self.get_google_reverse_image_search_url( self.queued_image.image.url ) context['person'] = person return context
def handle(self, *args, **kwargs): if len(args) < 1: raise CommandError("You must provide one or more PopIt person ID") for person_id in args: person = PopItPerson.create_from_popit( create_popit_api_object(), person_id ) person.delete_memberships() self.create_party_memberships(person_id, person.popit_data) self.create_candidate_list_memberships(person_id, person.popit_data)
def parse_data(self, json_file): with open(json_file) as f: for ec_party in json.load(f): ec_party_id = ec_party['ECRef'].strip() # We're only interested in political parties: if not ec_party_id.startswith('PP'): continue party_id = self.clean_id(ec_party_id) if ec_party['RegulatedEntityTypeName'] == 'Minor Party': register = ec_party['RegisterNameMinorParty'].replace( ' (minor party)', '' ) else: register = ec_party['RegisterName'] party_name, party_dissolved = self.clean_name(ec_party['RegulatedEntityName']) party_founded = self.clean_date(ec_party['ApprovedDate']) party_data = { 'id': party_id, 'name': party_name, 'slug': slugify(party_name), 'classification': 'Party', 'descriptions': get_descriptions(ec_party), 'founding_date': party_founded, 'dissolution_date': party_dissolved, 'register': register, 'identifiers': [ { 'identifier': ec_party_id, 'scheme': 'electoral-commission', } ] } try: self.api.organizations.post(party_data) self.upload_images(ec_party['PartyEmblems'], party_id) except HttpServerError as e: if 'E11000' in e.content: # Duplicate Party Found self.api.organizations(party_id).put(party_data) self.upload_images(ec_party['PartyEmblems'], party_id) else: raise organization_with_memberships = \ self.api.organizations(party_id).get(embed='membership.person')['result'] # Make sure any members of these parties are # invalidated from the cache so that the embedded # party information when getting posts and persons is # up-to-date: for membership in organization_with_memberships.get( 'memberships', [] ): person = PopItPerson.create_from_dict(membership['person_id']) person.invalidate_cache_entries()
def add_popit_person(self, ppc_data, image_filename): change_metadata = get_change_metadata( None, 'Created new candidate from official PPC data ({0})'.format(ppc_data['party_slug']), ) person_data = self.get_person_data_from_ppc(ppc_data) person = PopItPerson.create_from_reduced_json(person_data) person.record_version(change_metadata) person_id = person.save_to_popit(self.api) if image_filename: self.upload_person_image(person_id, image_filename, ppc_data['image_url']) person.invalidate_cache_entries() return person_id
def add_popit_person(self, ppc_data, image_filename): from candidates.models import PopItPerson change_metadata = get_change_metadata( None, 'Created new candidate from official PPC data ({})'.format(ppc_data['party_slug']), ) person_data = self.get_person_data_from_ppc(ppc_data) person = PopItPerson.create_from_reduced_json(person_data) person.record_version(change_metadata) person_id = person.save_to_popit(self.api) if image_filename: self.upload_person_image(person_id, image_filename, ppc_data['image_url']) person.invalidate_cache_entries() return person_id
def handle(self, **options): for o in popit_unwrap_pagination(self.api.organizations, per_page=100, embed='membership.person'): if o['classification'] != 'Party': continue print o['name'] for image in o.get('images', []): print " DELETE", image['_id'] self.api.organizations(o['id']).image(image['_id']).delete() # The person pages get party images via the # membership.organization embed, so invalidate the cache # entries for any person who's a member of this party: for membership in o.get('memberships', []): person = PopItPerson.create_from_dict(membership['person_id']) person.invalidate_cache_entries()
def handle(self, **options): for o in popit_unwrap_pagination( self.api.organizations, per_page=100, embed='membership.person' ): if o['classification'] != 'Party': continue print o['name'] for image in o.get('images', []): print " DELETE", image['_id'] self.api.organizations(o['id']).image(image['_id']).delete() # The person pages get party images via the # membership.organization embed, so invalidate the cache # entries for any person who's a member of this party: for membership in o.get('memberships', []): person = PopItPerson.create_from_dict(membership['person_id']) person.invalidate_cache_entries()
def handle(self, *args, **kwargs): api = create_popit_api_object() if len(args) < 1: raise CommandError("You must provide one or more PopIt person ID") for person_id in args: invalidate_person(person_id) person = PopItPerson.create_from_popit(api, person_id) posts_to_invalidate = person.get_associated_posts() person.delete_memberships(api) # The memberships are recreated when you assign to # standing_in and party_memberships; this script assumes # these are correct and so re-setting these should # recreate the memberships correctly. person.standing_in = person.standing_in person.party_memberships = person.party_memberships person.save_to_popit(api) invalidate_posts(posts_to_invalidate) invalidate_person(person_id)
def handle(self, **options): for person_data in popit_unwrap_pagination( self.api.persons, embed='', per_page=100 ): needs_update = False for version in person_data.get('versions', []): data = version['data'] if data.get('last_party'): needs_update = True msg = "Fixing person {0}persons/{1}" print msg.format(get_base_url(), person_data['id']) del data['last_party'] if not needs_update: continue person = PopItPerson.create_from_dict(person_data) person.save_to_popit(self.api) person.invalidate_cache_entries()
def get_existing_popit_person(vi_person_id): # See if this person already exists by searching for the # ID they were imported with: query_format = \ 'identifiers.identifier:"{id}" AND ' + \ 'identifiers.scheme:"{scheme}"' search_url = get_search_url('persons', query_format.format(id=vi_person_id, scheme='import-id'), embed='membership.organization') results = requests.get(search_url).json() total = results['total'] if total > 1: message = "Multiple matches for CI ID {0}" raise Exception(message.format(vi_person_id)) if total == 0: return None # Otherwise there was exactly one result: return PopItPerson.create_from_dict(results['result'][0])
def test_get_person_data_from_dict_clear_email(self): form_data = { 'name': 'John Doe', 'email': '', 'birth_date': '', 'wikipedia_url': 'http://en.wikipedia.org/wiki/John_Doe', 'homepage_url': '', 'twitter_username': '******', 'facebook_personal_url': '', 'facebook_page_url': '', 'party_ppc_page_url': '', } expected_result = { 'birth_date': None, 'contact_details': [{ 'type': 'twitter', 'value': 'foobar' }], 'email': '', 'gender': '', 'honorific_prefix': '', 'honorific_suffix': '', 'id': None, 'links': [{ 'note': 'wikipedia', 'url': 'http://en.wikipedia.org/wiki/John_Doe' }], 'memberships': [], 'name': u'John Doe', 'party_memberships': {}, 'standing_in': {}, } p = PopItPerson.create_from_reduced_json(form_data) self.assertEqual(p.popit_data, expected_result)
def test_get_person_data_from_dict_clear_email(self): form_data = { 'name': 'John Doe', 'email': '', 'birth_date': '', 'wikipedia_url': 'http://en.wikipedia.org/wiki/John_Doe', 'homepage_url': '', 'twitter_username': '******', 'facebook_personal_url': '', 'facebook_page_url': '', 'party_ppc_page_url': '', } expected_result = { 'birth_date': None, 'contact_details': [ { 'type': 'twitter', 'value': 'foobar' } ], 'email': '', 'gender': '', 'honorific_prefix': '', 'honorific_suffix': '', 'id': None, 'links': [ { 'note': 'wikipedia', 'url': 'http://en.wikipedia.org/wiki/John_Doe' } ], 'memberships': [], 'name': u'John Doe', 'party_memberships': {}, 'standing_in': {}, } p = PopItPerson.create_from_reduced_json(form_data) self.assertEqual( p.popit_data, expected_result )
def handle(self, *args, **options): if len(args) != 1: msg = "You must supply the prefix for output filenames" raise CommandError(msg) output_prefix = args[0] all_people = [] election_to_people = defaultdict(list) for person_dict in popit_unwrap_pagination( self.api.persons, embed="membership.organization", per_page=100, ): standing_in = person_dict.get('standing_in') if not standing_in: continue for election in standing_in.keys(): if not standing_in[election]: continue person = PopItPerson.create_from_dict(person_dict) person_as_csv_dict = person.as_dict(election=election) all_people.append(person_as_csv_dict) election_to_people[election].append(person_as_csv_dict) elections = election_to_people.keys() + [None] for election in elections: if election is None: output_filename = output_prefix + '-all.csv' people_data = all_people else: output_filename = output_prefix + '-' + election + '.csv' people_data = election_to_people[election] csv = list_to_csv(people_data) # Otherwise write to a temporary file and atomically # rename into place: ntf = NamedTemporaryFile( delete=False, dir=dirname(output_filename) ) ntf.write(csv) chmod(ntf.name, 0o644) rename(ntf.name, output_filename)
def upload_photo(request, popit_person_id): if request.method == 'POST': form = UploadPersonPhotoForm(request.POST, request.FILES) if form.is_valid(): # Make sure that we save the user that made the upload queued_image = form.save(commit=False) queued_image.user = request.user queued_image.save() # Record that action: LoggedAction.objects.create( user=request.user, action_type='photo-upload', ip_address=get_client_ip(request), popit_person_new_version='', popit_person_id=popit_person_id, source=form.cleaned_data['justification_for_use'], ) return HttpResponseRedirect(reverse( 'photo-upload-success', kwargs={ 'popit_person_id': form.cleaned_data['popit_person_id'] } )) else: form = UploadPersonPhotoForm( initial={ 'popit_person_id': popit_person_id } ) api = create_popit_api_object() return render( request, 'moderation_queue/photo-upload-new.html', {'form': form, 'queued_images': QueuedImage.objects.filter( popit_person_id=popit_person_id, decision='undecided', ).order_by('created'), 'person': PopItPerson.create_from_popit(api, popit_person_id)} )
def crop_and_upload_image_to_popit(self, image_filename, crop_bounds, moderator_why_allowed, make_primary): original = Image.open(image_filename) # Some uploaded images are CYMK, which gives you an error when # you try to write them as PNG, so convert to RGBA (this is # RGBA rather than RGB so that any alpha channel (transparency) # is preserved). original = original.convert('RGBA') cropped = original.crop(crop_bounds) ntf = NamedTemporaryFile(delete=False) cropped.save(ntf.name, 'PNG') # Upload the image to PopIt... person_id = self.queued_image.popit_person_id person = PopItPerson.create_from_popit(self.api, person_id) image_upload_url = '{base}persons/{person_id}/image'.format( base=get_base_url(), person_id=person_id ) data = { 'md5sum': get_file_md5sum(ntf.name), 'user_why_allowed': self.queued_image.why_allowed, 'user_justification_for_use': self.queued_image.justification_for_use, 'moderator_why_allowed': moderator_why_allowed, 'mime_type': 'image/png', 'notes': _('Approved from photo moderation queue'), 'uploaded_by_user': self.queued_image.user.username, 'created': None, } if make_primary: data['index'] = 'first' with open(ntf.name) as f: requests.post( image_upload_url, data=data, files={'image': f.read()}, headers={'APIKey': self.api.api_key} ) person.invalidate_cache_entries() # Remove the cropped temporary image file: os.remove(ntf.name)
def upload_photo(request, popit_person_id): if request.method == 'POST': form = UploadPersonPhotoForm(request.POST, request.FILES) if form.is_valid(): # Make sure that we save the user that made the upload queued_image = form.save(commit=False) queued_image.user = request.user queued_image.save() # Record that action: LoggedAction.objects.create( user=request.user, action_type='photo-upload', ip_address=get_client_ip(request), popit_person_new_version='', popit_person_id=popit_person_id, source=form.cleaned_data['justification_for_use'], ) return HttpResponseRedirect( reverse('photo-upload-success', kwargs={ 'popit_person_id': form.cleaned_data['popit_person_id'] })) else: form = UploadPersonPhotoForm( initial={'popit_person_id': popit_person_id}) api = create_popit_api_object() return render( request, 'moderation_queue/photo-upload-new.html', { 'form': form, 'queued_images': QueuedImage.objects.filter( popit_person_id=popit_person_id, decision='undecided', ).order_by('created'), 'person': PopItPerson.create_from_popit(api, popit_person_id) })
def crop_and_upload_image_to_popit(self, image_filename, crop_bounds, moderator_why_allowed, make_primary): original = Image.open(image_filename) # Some uploaded images are CYMK, which gives you an error when # you try to write them as PNG, so convert to RGBA (this is # RGBA rather than RGB so that any alpha channel (transparency) # is preserved). original = original.convert('RGBA') cropped = original.crop(crop_bounds) ntf = NamedTemporaryFile(delete=False) cropped.save(ntf.name, 'PNG') # Upload the image to PopIt... person_id = self.queued_image.popit_person_id person = PopItPerson.create_from_popit(self.api, person_id) image_upload_url = '{base}persons/{person_id}/image'.format( base=get_base_url(), person_id=person_id) data = { 'md5sum': get_file_md5sum(ntf.name), 'user_why_allowed': self.queued_image.why_allowed, 'user_justification_for_use': self.queued_image.justification_for_use, 'moderator_why_allowed': moderator_why_allowed, 'mime_type': 'image/png', 'notes': _('Approved from photo moderation queue'), 'uploaded_by_user': self.queued_image.user.username, 'created': None, } if make_primary: data['index'] = 'first' with open(ntf.name) as f: requests.post(image_upload_url, data=data, files={'image': f.read()}, headers={'APIKey': self.api.api_key}) person.invalidate_cache_entries() # Remove the cropped temporary image file: os.remove(ntf.name)
def get_context_data(self, **kwargs): context = super(PhotoUploadSuccess, self).get_context_data(**kwargs) context['person'] = PopItPerson.create_from_popit( self.api, kwargs['popit_person_id']) return context
def test_age_full_obvious(self, mock_date): mock_date.today.return_value = date(1977, 9, 3) mock_date.side_effect = \ lambda *args, **kwargs: date(*args, **kwargs) p = PopItPerson.create_from_dict({'birth_date': '1976-09-01'}) self.assertEqual(p.age, '1')
def handle(self, **options): from slumber.exceptions import HttpClientError, HttpServerError from candidates.election_specific import PARTY_DATA, shorten_post_label from candidates.models import PopItPerson from candidates.popit import create_popit_api_object api = create_popit_api_object() csv_filename = join(dirname(__file__), '..', '..', 'data', 'candidates.csv') with open(csv_filename) as f: all_data = csv.DictReader(f) for candidate in all_data: vi_person_id = candidate['Distrito'] + candidate[ 'Numero Lista'] + candidate['Posicion'] + candidate[ 'Cargo'] + candidate['Nombre Lista'] election_data, post_data = get_post_data( api, candidate['Cargo'], candidate['Distrito']) if (election_data == False): print("Skipping: " + candidate['Cargo'] + ", " + candidate['Distrito'] + ", " + candidate['Nombre']) continue name = candidate['Nombre'] birth_date = None gender = None image_url = None person = get_existing_popit_person(vi_person_id) if person: print("Found an existing person:", person.get_absolute_url()) else: print("No existing person, creating a new one:", name) person = PopItPerson() # Now update fields from the imported data: person.name = name.split(",")[1] + " " + name.split(",")[0] person.gender = gender if birth_date: person.birth_date = str(birth_date) else: person.birth_date = None standing_in_election = { 'post_id': post_data['id'], 'name': shorten_post_label(post_data['label']), 'party_list_position': candidate['Posicion'], } if 'area' in post_data: standing_in_election['mapit_url'] = post_data['area'][ 'identifier'] person.standing_in = {election_data.slug: standing_in_election} party_id = get_party_id(candidate["Partido"]) person.party_memberships = { election_data.slug: { 'id': party_id, 'name': PARTY_DATA.party_id_to_name[party_id], } } person.set_identifier('import-id', vi_person_id) change_metadata = get_change_metadata( None, 'Imported candidate from CSV', ) person.record_version(change_metadata) try: person.save_to_popit(api) except HttpClientError as hce: print("Got an HttpClientError:", hce.content) raise except HttpServerError as hse: print("The server error content was:", hse.content) raise
def test_age_month_early_in_year(self, mock_date): mock_date.today.return_value = date(1977, 8, 15) mock_date.side_effect = \ lambda *args, **kwargs: date(*args, **kwargs) p = PopItPerson.create_from_dict({'birth_date': '1976-09'}) self.assertEqual(p.age, '0')
def test_age_year_ambiguous(self, mock_date): mock_date.today.return_value = date(1977, 9, 10) mock_date.side_effect = \ lambda *args, **kwargs: date(*args, **kwargs) p = PopItPerson.create_from_dict({'birth_date': '1975'}) self.assertEqual(p.age, '1 or 2')
def test_update_tessa_jowell(self, mock_invalidate_person, mock_invalidate_posts, mocked_put): mock_api = MagicMock() mock_api.persons = FakePersonCollection old_person_data = { "birth_date": '1947', "email": "*****@*****.**", "facebook_page_url": "", "facebook_personal_url": "", "gender": "", "homepage_url": "http://foo.example.org", "honorific_prefix": "", "honorific_suffix": "", "id": "2009", "identifiers": [], "image": None, "linkedin_url": "", "name": "Tessa Jowell", "other_names": [], "party_memberships": { "2010": { "id": "party:53", "name": "Labour Party" }, "2015": { "id": "party:53", "name": "Labour Party" } }, "party_ppc_page_url": "", "proxy_image": None, "standing_in": { "2010": { "mapit_url": "http://mapit.mysociety.org/area/65808", "name": "Dulwich and West Norwood", "post_id": "65808", }, "2015": { "mapit_url": "http://mapit.mysociety.org/area/65913", "name": "Camberwell and Peckham", "post_id": "65913", } }, "twitter_username": "******", "wikipedia_url": "", } new_person_data = deepcopy(old_person_data) new_person_data['standing_in']['2015'] = { "mapit_url": "http://mapit.mysociety.org/area/65808", "name": "Dulwich and West Norwood", "post_id": "65808", } previous_version = { 'data': { "name": "Tessa Jowell", "standing_in": { "2010": { "mapit_url": "http://mapit.mysociety.org/area/65808", "name": "Dulwich and West Norwood", "post_id": "65808", }, "2015": { "mapit_url": "http://mapit.mysociety.org/area/65913", "name": "Camberwell and Peckham", "post_id": "65913", } }, } } person = PopItPerson.create_from_reduced_json(old_person_data) person.update_from_reduced_json(new_person_data) person.versions = [previous_version] person.record_version( { 'information_source': 'A change made for testing purposes', 'username': '******', 'version_id': '6054aa38b30b4418', 'timestamp': '2014-09-28T14:02:44.567413', }, ) person.save_to_popit(mock_api) self.assertEqual(2, len(mocked_put.call_args_list)) first_put_call_args = { 'birth_date': '1947', 'contact_details': [], 'email': u'*****@*****.**', 'gender': '', 'honorific_prefix': '', 'honorific_suffix': '', 'id': '2009', 'identifiers': [], 'name': u'Tessa Jowell', 'links': [], 'other_names': [], 'party_memberships': None, 'standing_in': None, 'versions': [{ 'username': '******', 'information_source': 'A change made for testing purposes', 'version_id': '6054aa38b30b4418', 'timestamp': '2014-09-28T14:02:44.567413', 'data': new_person_data }, previous_version], } second_put_call_args = { 'birth_date': '1947', 'contact_details': [{ 'type': 'twitter', 'value': 'jowellt' }], 'email': u'*****@*****.**', 'gender': '', 'honorific_prefix': '', 'honorific_suffix': '', 'id': '2009', 'identifiers': [], 'name': u'Tessa Jowell', 'links': [{ 'note': 'homepage', 'url': 'http://foo.example.org' }], 'other_names': [], 'party_memberships': { '2010': { 'id': 'party:53', 'name': 'Labour Party' }, '2015': { 'id': 'party:53', 'name': 'Labour Party' } }, 'standing_in': { '2015': { 'name': 'Dulwich and West Norwood', 'mapit_url': 'http://mapit.mysociety.org/area/65808', 'post_id': '65808', }, '2010': { 'name': 'Dulwich and West Norwood', 'mapit_url': 'http://mapit.mysociety.org/area/65808', 'post_id': '65808', } }, 'versions': [{ 'username': '******', 'information_source': 'A change made for testing purposes', 'version_id': '6054aa38b30b4418', 'timestamp': '2014-09-28T14:02:44.567413', 'data': new_person_data }, previous_version], } self.assertTrue( equal_call_args( [first_put_call_args], mocked_put.call_args_list[0][0], ), "Unexpected first PUT (the one blanking out standing_in and party_memberships", ) self.assertTrue( equal_call_args( [second_put_call_args], mocked_put.call_args_list[1][0], ), "Unexpected second PUT (the one with real standing_in and party_memberships", ) self.assertEqual(4, mock_api.memberships.post.call_count) posted_memberships = [ c[0][0] for c in mock_api.memberships.post.call_args_list ] self.assertEqual(posted_memberships, [ { "election": "2015", "end_date": "9999-12-31", "person_id": "2009", "post_id": "65808", "role": "Candidate", "start_date": "2010-05-07" }, { "election": "2010", "end_date": "2010-05-06", "person_id": "2009", "post_id": "65808", "role": "Candidate", "start_date": "2005-05-06" }, { "end_date": "9999-12-31", "organization_id": "party:53", "person_id": "2009", "start_date": "2010-05-07" }, { "end_date": "2010-05-06", "organization_id": "party:53", "person_id": "2009", "start_date": "2005-05-06" }, ]) mock_invalidate_person.assert_called_with('2009') mock_invalidate_posts.assert_called_with(set(['65808', '65913']))
def test_update_tessa_jowell( self, mock_invalidate_person, mock_invalidate_posts, mocked_put ): mock_api = MagicMock() mock_api.persons = FakePersonCollection old_person_data = { "birth_date": '1947', "email": "*****@*****.**", "facebook_page_url": "", "facebook_personal_url": "", "gender": "", "homepage_url": "http://foo.example.org", "honorific_prefix": "", "honorific_suffix": "", "id": "2009", "identifiers": [], "image": None, "linkedin_url": "", "name": "Tessa Jowell", "other_names": [], "party_memberships": { "2010": { "id": "party:53", "name": "Labour Party" }, "2015": { "id": "party:53", "name": "Labour Party" } }, "party_ppc_page_url": "", "proxy_image": None, "standing_in": { "2010": { "mapit_url": "http://mapit.mysociety.org/area/65808", "name": "Dulwich and West Norwood", "post_id": "65808", }, "2015": { "mapit_url": "http://mapit.mysociety.org/area/65913", "name": "Camberwell and Peckham", "post_id": "65913", } }, "twitter_username": "******", "wikipedia_url": "", } new_person_data = deepcopy(old_person_data) new_person_data['standing_in']['2015'] = { "mapit_url": "http://mapit.mysociety.org/area/65808", "name": "Dulwich and West Norwood", "post_id": "65808", } previous_version = { 'data': { "name": "Tessa Jowell", "standing_in": { "2010": { "mapit_url": "http://mapit.mysociety.org/area/65808", "name": "Dulwich and West Norwood", "post_id": "65808", }, "2015": { "mapit_url": "http://mapit.mysociety.org/area/65913", "name": "Camberwell and Peckham", "post_id": "65913", } }, } } person = PopItPerson.create_from_reduced_json(old_person_data) person.update_from_reduced_json(new_person_data) person.versions = [previous_version] person.record_version( { 'information_source': 'A change made for testing purposes', 'username': '******', 'version_id': '6054aa38b30b4418', 'timestamp': '2014-09-28T14:02:44.567413', }, ) person.save_to_popit(mock_api) self.assertEqual(2, len(mocked_put.call_args_list)) first_put_call_args = { 'birth_date': '1947', 'contact_details': [], 'email': u'*****@*****.**', 'gender': '', 'honorific_prefix': '', 'honorific_suffix': '', 'id': '2009', 'identifiers': [], 'name': u'Tessa Jowell', 'links': [], 'other_names': [], 'party_memberships': None, 'standing_in': None, 'versions': [ { 'username': '******', 'information_source': 'A change made for testing purposes', 'version_id': '6054aa38b30b4418', 'timestamp': '2014-09-28T14:02:44.567413', 'data': new_person_data }, previous_version ], } second_put_call_args = { 'birth_date': '1947', 'contact_details': [ { 'type': 'twitter', 'value': 'jowellt' } ], 'email': u'*****@*****.**', 'gender': '', 'honorific_prefix': '', 'honorific_suffix': '', 'id': '2009', 'identifiers': [], 'name': u'Tessa Jowell', 'links': [ { 'note': 'homepage', 'url': 'http://foo.example.org' } ], 'other_names': [], 'party_memberships': { '2010': { 'id': 'party:53', 'name': 'Labour Party' }, '2015': { 'id': 'party:53', 'name': 'Labour Party' } }, 'standing_in': { '2015': { 'name': 'Dulwich and West Norwood', 'mapit_url': 'http://mapit.mysociety.org/area/65808', 'post_id': '65808', }, '2010': { 'name': 'Dulwich and West Norwood', 'mapit_url': 'http://mapit.mysociety.org/area/65808', 'post_id': '65808', } }, 'versions': [ { 'username': '******', 'information_source': 'A change made for testing purposes', 'version_id': '6054aa38b30b4418', 'timestamp': '2014-09-28T14:02:44.567413', 'data': new_person_data }, previous_version ], } self.assertTrue( equal_call_args( [first_put_call_args], mocked_put.call_args_list[0][0], ), "Unexpected first PUT (the one blanking out standing_in and party_memberships", ) self.assertTrue( equal_call_args( [second_put_call_args], mocked_put.call_args_list[1][0], ), "Unexpected second PUT (the one with real standing_in and party_memberships", ) self.assertEqual(4, mock_api.memberships.post.call_count) posted_memberships = [ c[0][0] for c in mock_api.memberships.post.call_args_list ] self.assertEqual( posted_memberships, [ { "election": "2015", "end_date": "9999-12-31", "person_id": "2009", "post_id": "65808", "role": "Candidate", "start_date": "2010-05-07" }, { "election": "2010", "end_date": "2010-05-06", "person_id": "2009", "post_id": "65808", "role": "Candidate", "start_date": "2005-05-06" }, { "end_date": "9999-12-31", "organization_id": "party:53", "person_id": "2009", "start_date": "2010-05-07" }, { "end_date": "2010-05-06", "organization_id": "party:53", "person_id": "2009", "start_date": "2005-05-06" }, ] ) mock_invalidate_person.assert_called_with('2009') mock_invalidate_posts.assert_called_with(set(['65808', '65913']))
def handle(self, username=None, **options): from slumber.exceptions import HttpClientError from candidates.popit import create_popit_api_object from candidates.election_specific import PARTY_DATA, shorten_post_label from candidates.models import PopItPerson if username is None: message = "You must supply the name of a user to be associated with the image uploads." raise CommandError(message) try: user = User.objects.get(username=username) except User.DoesNotExist: message = "No user with the username '{0}' could be found" raise CommandError(message.format(username)) api = create_popit_api_object() json_filename = join( dirname(__file__), '..', '..','data', 'candidates.json' ) with open(json_filename) as f: all_data = json.load(f) # This map is needed to get getting YNR election data from # the election ID used in the JSON file. json_election_id_to_name = { e['pk']: e['fields']['name'] for e in all_data if e['model'] == 'elections.election' } person_dict = { e['pk']: e['fields'] for e in all_data if e['model'] == 'popolo.person' } candidate_list = [ dict(person_id=e['pk'], election_id=e['fields']['election']) for e in all_data if e['model'] == 'elections.candidate' ] for candidate in candidate_list: vi_person_id = candidate['person_id'] person_data = person_dict[vi_person_id] election_data, post_data = get_post_data( api, candidate['election_id'], json_election_id_to_name ) birth_date = None if person_data['birth_date']: birth_date = str(dateutil.parser.parse( person_data['birth_date'], dayfirst=True ).date()) name = person_data['name'] gender = person_data['gender'] image_url = person_data['image'] person = get_existing_popit_person(vi_person_id) if person: print("Found an existing person:", person.get_absolute_url()) else: print("No existing person, creating a new one:", name) person = PopItPerson() # Now update fields from the imported data: person.name = name person.gender = gender if birth_date: person.birth_date = str(birth_date) else: person.birth_date = None standing_in_election = { 'post_id': post_data['id'], 'name': shorten_post_label(post_data['label']), } if 'area' in post_data: standing_in_election['mapit_url'] = post_data['area']['identifier'] person.standing_in = { election_data.slug: standing_in_election } person.party_memberships = { election_data.slug: { 'id': UNKNOWN_PARTY_ID, 'name': PARTY_DATA.party_id_to_name[UNKNOWN_PARTY_ID], } } person.set_identifier('import-id', vi_person_id) change_metadata = get_change_metadata( None, 'Imported candidate from JSON', ) person.record_version(change_metadata) try: person.save_to_popit(api) if image_url: enqueue_image(person, user, image_url) except HttpClientError as hce: print("Got an HttpClientError:", hce.content) raise
def handle(self, username=None, **options): election_data = { 'prv-2015': 'listedescandidatsauxelectionslegislativeslisteprovincialeanptic.csv', 'nat-2015': 'listedescandidatsauxelectionslegislativesanptic.csv' } field_map = { 'prv-2015': { 'region': 1, 'party': 4, 'list_order': 5, 'first_name': 7, 'last_name': 6, 'gender': 8, 'birth_date': 9, 'party_short': 3 }, 'nat-2015': { 'region': 0, 'party': 2, 'list_order': 3, 'first_name': 5, 'last_name': 4, 'gender': 6, 'birth_date': 7, 'party_short': 2 } } api = create_popit_api_object() party_id_missing = {} party_name_to_id = {} for party_id, party_name in PARTY_DATA.party_id_to_name.items(): party_name_to_id[party_name] = party_id for election_id, filename in election_data.items(): csv_filename = join( dirname(__file__), '..', '..', 'data', filename ) fields = field_map[election_id] with codecs.open(csv_filename, 'r', encoding='windows-1252') as f: initial = True for candidate in unicode_csv_reader(f): # skip header line if initial: initial = False continue region = candidate[fields['region']] party = candidate[fields['party']] party_list_order = candidate[fields['list_order']] first_name = string.capwords(candidate[fields['first_name']]) last_name = string.capwords(candidate[fields['last_name']]) gender = candidate[fields['gender']] birth_date = None if candidate[fields['birth_date']] is not None: birth_date = str(dateutil.parser.parse( candidate[fields['birth_date']], dayfirst=True ).date()) name = first_name + ' ' + last_name id = '-'.join([ re.sub('[^\w]*', '', re.sub(r' ', '-', strip_accents(name.lower()))), re.sub('[^\w]*', '', candidate[fields['party_short']].lower()), birth_date ]) # national candidate if region == 'PAYS': region = 'Burkina Faso' election_data, post_data = get_post_data( api, election_id, region ) # debug # tmp = '%s %s %s (%s) - %s (%s)' % ( id, first_name, last_name, party, region, post_data['label'] ) # print tmp person = get_existing_popit_person(id) if person: # print "Found an existing person:", person.get_absolute_url() pass else: print "No existing person, creating a new one:", name person = PopItPerson() person.set_identifier('import-id', id) person.family_name = last_name person.given_name = first_name person.name = name person.gender = gender if birth_date: person.birth_date = str(birth_date) else: person.birth_date = None standing_in_election = { 'post_id': post_data['id'], 'name': AREA_POST_DATA.shorten_post_label( election_data.slug, post_data['label'], ), 'party_list_position': party_list_order, } if 'area' in post_data: standing_in_election['mapit_url'] = post_data['area']['identifier'] person.standing_in = { election_data.slug: standing_in_election } change_metadata = get_change_metadata( None, 'Imported candidate from CSV', ) party_comp = re.sub(' +', ' ', party) party_id = UNKNOWN_PARTY_ID if party_comp in party_name_to_id.keys(): party_id = party_name_to_id[party_comp] party = party_comp else: party_id = party_name_to_id['Unknown Party'] party = 'Unknown Party' if party_id == UNKNOWN_PARTY_ID and party_comp not in party_id_missing.keys(): party_id_missing[party_comp] = 1 person.party_memberships = { election_data.slug: { 'id': party_id, 'name': party, 'imported_name': party_comp } } person.record_version(change_metadata) try: person.save_to_popit(api) except HttpClientError as hce: print "Got an HttpClientError:", hce.content raise if len(party_id_missing) > 0: print "Unmatched party names:" for name in party_id_missing.keys(): print name
def handle(self, **options): from slumber.exceptions import HttpClientError, HttpServerError from candidates.election_specific import PARTY_DATA, shorten_post_label from candidates.models import PopItPerson from candidates.popit import create_popit_api_object api = create_popit_api_object() csv_filename = join( dirname(__file__), '..', '..','data', 'candidates.csv' ) with open(csv_filename) as f: all_data = csv.DictReader(f) for candidate in all_data: vi_person_id = candidate['Distrito']+candidate['Numero Lista']+candidate['Posicion']+candidate['Cargo']+candidate['Nombre Lista'] election_data, post_data = get_post_data( api, candidate['Cargo'], candidate['Distrito'] ) if (election_data == False): print("Skipping: "+ candidate['Cargo'] +", " + candidate['Distrito']+", " + candidate['Nombre']) continue; name = candidate['Nombre'] birth_date = None gender = None image_url = None person = get_existing_popit_person(vi_person_id) if person: print("Found an existing person:", person.get_absolute_url()) else: print("No existing person, creating a new one:", name) person = PopItPerson() # Now update fields from the imported data: person.name = name.split(",")[1] + " " + name.split(",")[0] person.gender = gender if birth_date: person.birth_date = str(birth_date) else: person.birth_date = None standing_in_election = { 'post_id': post_data['id'], 'name': shorten_post_label(post_data['label']), 'party_list_position': candidate['Posicion'], } if 'area' in post_data: standing_in_election['mapit_url'] = post_data['area']['identifier'] person.standing_in = { election_data.slug: standing_in_election } party_id = get_party_id(candidate["Partido"]); person.party_memberships = { election_data.slug: { 'id': party_id, 'name': PARTY_DATA.party_id_to_name[party_id], } } person.set_identifier('import-id', vi_person_id) change_metadata = get_change_metadata( None, 'Imported candidate from CSV', ) person.record_version(change_metadata) try: person.save_to_popit(api) except HttpClientError as hce: print("Got an HttpClientError:", hce.content) raise except HttpServerError as hse: print("The server error content was:", hse.content) raise
def handle(self, **options): from slumber.exceptions import HttpClientError from candidates.cache import get_post_cached, UnknownPostException from candidates.election_specific import PARTY_DATA, shorten_post_label from candidates.models import PopItPerson from candidates.popit import create_popit_api_object spreadsheet_url = 'https://docs.google.com/spreadsheets/d/{0}/pub?output=csv'\ .format(GOOGLE_DOC_ID) candidate_list = requests.get(spreadsheet_url) content = StringIO(unicode(candidate_list.content)) reader = csv.DictReader(content) api = create_popit_api_object() for row in reader: try: election_data = Election.objects.get_by_slug('council-member-2015') ocd_division = election_data.post_id_format.format(area_id=row['Ward']) post_data = get_post_cached(api, ocd_division)['result'] except (UnknownPostException, memcache.Client.MemcachedKeyCharacterError): election_data = Election.objects.get_by_slug('school-board-2015') post_data = get_post_cached(api, election_data.post_id_format)['result'] person_id = slugify(row['Name']) person = get_existing_popit_person(person_id) if person: print("Found an existing person:", row['Name']) else: print("No existing person, creating a new one:", row['Name']) person = PopItPerson() person.name = row['Name'] # TODO: Get these attributes in the spreadsheet # person.gender = gender # if birth_date: # person.birth_date = str(birth_date) # else: # person.birth_date = None person.email = row['Campaign Email'] person.facebook_personal_url = row["Candidate's Personal Facebook Profile"] person.facebook_page_url = row['Campaign Facebook Page'] person.twitter_username = row['Campaign Twitter']\ .replace('N', '')\ .replace('N/A', '')\ .replace('http://twitter.com/', '')\ .replace('https://twitter.com/', '') person.linkedin_url = row['LinkedIn'] person.homepage_url = row['Campaign Website\n'] standing_in_election = { 'post_id': post_data['id'], 'name': shorten_post_label(post_data['label']), } if 'area' in post_data: standing_in_election['mapit_url'] = post_data['area']['identifier'] person.standing_in = { election_data.slug: standing_in_election } if 'dfl' in row['Party'].lower(): party_id = 'party:101' elif 'green' in row['Party'].lower(): party_id = 'party:201' elif 'independence' in row['Party'].lower(): party_id = 'party:301' else: party_id = 'party:401' party_name = PARTY_DATA.party_id_to_name[party_id] person.party_memberships = { election_data.slug: { 'id': party_id, 'name': party_name, } } person.set_identifier('import-id', person_id) change_metadata = get_change_metadata( None, 'Imported candidate from Google Spreadsheet', ) person.record_version(change_metadata) try: person.save_to_popit(api) # TODO: Get candidate Images # if image_url: # enqueue_image(person, user, image_url) except HttpClientError as hce: print "Got an HttpClientError:", hce.content raise
def handle(self, username=None, **options): from slumber.exceptions import HttpClientError from candidates.popit import create_popit_api_object from candidates.election_specific import PARTY_DATA, shorten_post_label from candidates.models import PopItPerson if username is None: message = "You must supply the name of a user to be associated with the image uploads." raise CommandError(message) try: user = User.objects.get(username=username) except User.DoesNotExist: message = "No user with the username '{0}' could be found" raise CommandError(message.format(username)) api = create_popit_api_object() json_filename = join(dirname(__file__), '..', '..', 'data', 'candidates.json') with open(json_filename) as f: all_data = json.load(f) # This map is needed to get getting YNR election data from # the election ID used in the JSON file. json_election_id_to_name = { e['pk']: e['fields']['name'] for e in all_data if e['model'] == 'elections.election' } person_dict = { e['pk']: e['fields'] for e in all_data if e['model'] == 'popolo.person' } candidate_list = [ dict(person_id=e['pk'], election_id=e['fields']['election']) for e in all_data if e['model'] == 'elections.candidate' ] for candidate in candidate_list: vi_person_id = candidate['person_id'] person_data = person_dict[vi_person_id] election_data, post_data = get_post_data(api, candidate['election_id'], json_election_id_to_name) birth_date = None if person_data['birth_date']: birth_date = str( dateutil.parser.parse(person_data['birth_date'], dayfirst=True).date()) name = person_data['name'] gender = person_data['gender'] image_url = person_data['image'] person = get_existing_popit_person(vi_person_id) if person: print("Found an existing person:", person.get_absolute_url()) else: print("No existing person, creating a new one:", name) person = PopItPerson() # Now update fields from the imported data: person.name = name person.gender = gender if birth_date: person.birth_date = str(birth_date) else: person.birth_date = None standing_in_election = { 'post_id': post_data['id'], 'name': shorten_post_label(post_data['label']), } if 'area' in post_data: standing_in_election['mapit_url'] = post_data['area'][ 'identifier'] person.standing_in = {election_data.slug: standing_in_election} person.party_memberships = { election_data.slug: { 'id': UNKNOWN_PARTY_ID, 'name': PARTY_DATA.party_id_to_name[UNKNOWN_PARTY_ID], } } person.set_identifier('import-id', vi_person_id) change_metadata = get_change_metadata( None, 'Imported candidate from JSON', ) person.record_version(change_metadata) try: person.save_to_popit(api) if image_url: enqueue_image(person, user, image_url) except HttpClientError as hce: print("Got an HttpClientError:", hce.content) raise
def form_valid(self, form): decision = form.cleaned_data['decision'] person = PopItPerson.create_from_popit( self.api, self.queued_image.popit_person_id) candidate_path = person.get_absolute_url() candidate_name = person.name candidate_link = u'<a href="{url}">{name}</a>'.format( url=candidate_path, name=candidate_name, ) photo_review_url = self.request.build_absolute_uri( self.queued_image.get_absolute_url()) def flash(level, message): messages.add_message(self.request, level, message, extra_tags='safe photo-review') if decision == 'approved': # Crop the image... crop_fields = ('x_min', 'y_min', 'x_max', 'y_max') self.crop_and_upload_image_to_popit( self.queued_image.image.path, [form.cleaned_data[e] for e in crop_fields], form.cleaned_data['moderator_why_allowed'], form.cleaned_data['make_primary'], ) self.queued_image.decision = 'approved' for i, field in enumerate(crop_fields): setattr(self.queued_image, 'crop_' + field, form.cleaned_data[field]) self.queued_image.save() update_message = _( u'Approved a photo upload from ' u'{uploading_user} who provided the message: ' u'"{message}"').format( uploading_user=self.queued_image.user.username, message=self.queued_image.justification_for_use, ) change_metadata = get_change_metadata(self.request, update_message) # We have to refetch the person from PopIt, otherwise # saving the new version will write back the images array # from before we uploaded the image: person = PopItPerson.create_from_popit(self.api, person.id) person.record_version(change_metadata) person.save_to_popit(self.api, self.request.user) LoggedAction.objects.create( user=self.request.user, action_type='photo-approve', ip_address=get_client_ip(self.request), popit_person_new_version=change_metadata['version_id'], popit_person_id=self.queued_image.popit_person_id, source=update_message, ) self.send_mail( _('YourNextMP image upload approved'), render_to_string('moderation_queue/photo_approved_email.txt', { 'candidate_page_url': person.get_absolute_url(self.request) }), ) flash(messages.SUCCESS, _(u'You approved a photo upload for %s') % candidate_link) elif decision == 'rejected': self.queued_image.decision = 'rejected' self.queued_image.save() update_message = _( u'Rejected a photo upload from ' u'{uploading_user}').format( uploading_user=self.queued_image.user.username, ) LoggedAction.objects.create( user=self.request.user, action_type='photo-reject', ip_address=get_client_ip(self.request), popit_person_new_version='', popit_person_id=self.queued_image.popit_person_id, source=update_message, ) retry_upload_link = self.request.build_absolute_uri( reverse('photo-upload', kwargs={ 'popit_person_id': self.queued_image.popit_person_id })) self.send_mail( _('YourNextMP image moderation results'), render_to_string( 'moderation_queue/photo_rejected_email.txt', { 'reason': form.cleaned_data['rejection_reason'], 'candidate_name': candidate_name, 'retry_upload_link': retry_upload_link, 'photo_review_url': photo_review_url }, ), email_support_too=True, ) flash(messages.INFO, _(u'You rejected a photo upload for %s') % candidate_link) elif decision == 'undecided': # If it's left as undecided, just redirect back to the # photo review queue... flash( messages.INFO, _(u'You left a photo upload for {0} in the queue').format( candidate_link)) elif decision == 'ignore': self.queued_image.decision = 'ignore' self.queued_image.save() update_message = _( u'Ignored a photo upload from ' u'{uploading_user} (This usually means it was a duplicate)' ).format(uploading_user=self.queued_image.user.username) LoggedAction.objects.create( user=self.request.user, action_type='photo-ignore', ip_address=get_client_ip(self.request), popit_person_new_version='', popit_person_id=self.queued_image.popit_person_id, source=update_message, ) flash( messages.INFO, _(u'You indicated a photo upload for {0} should be ignored'). format(candidate_link)) else: raise Exception("BUG: unexpected decision {0}".format(decision)) return HttpResponseRedirect(reverse('photo-review-list'))
def form_valid(self, form): decision = form.cleaned_data['decision'] person = PopItPerson.create_from_popit( self.api, self.queued_image.popit_person_id ) candidate_path = person.get_absolute_url() candidate_name = person.name candidate_link = u'<a href="{url}">{name}</a>'.format( url=candidate_path, name=candidate_name, ) photo_review_url = self.request.build_absolute_uri( self.queued_image.get_absolute_url() ) def flash(level, message): messages.add_message( self.request, level, message, extra_tags='safe photo-review' ) if decision == 'approved': # Crop the image... crop_fields = ('x_min', 'y_min', 'x_max', 'y_max') self.crop_and_upload_image_to_popit( self.queued_image.image.path, [form.cleaned_data[e] for e in crop_fields], form.cleaned_data['moderator_why_allowed'], form.cleaned_data['make_primary'], ) self.queued_image.decision = 'approved' for i, field in enumerate(crop_fields): setattr( self.queued_image, 'crop_' + field, form.cleaned_data[field] ) self.queued_image.save() update_message = _(u'Approved a photo upload from ' u'{uploading_user} who provided the message: ' u'"{message}"').format( uploading_user=self.queued_image.user.username, message=self.queued_image.justification_for_use, ) change_metadata = get_change_metadata( self.request, update_message ) # We have to refetch the person from PopIt, otherwise # saving the new version will write back the images array # from before we uploaded the image: person = PopItPerson.create_from_popit(self.api, person.id) person.record_version(change_metadata) person.save_to_popit(self.api, self.request.user) LoggedAction.objects.create( user=self.request.user, action_type='photo-approve', ip_address=get_client_ip(self.request), popit_person_new_version=change_metadata['version_id'], popit_person_id=self.queued_image.popit_person_id, source=update_message, ) self.send_mail( _('YourNextMP image upload approved'), render_to_string( 'moderation_queue/photo_approved_email.txt', {'candidate_page_url': person.get_absolute_url(self.request)} ), ) flash( messages.SUCCESS, _(u'You approved a photo upload for %s') % candidate_link ) elif decision == 'rejected': self.queued_image.decision = 'rejected' self.queued_image.save() update_message = _(u'Rejected a photo upload from ' u'{uploading_user}').format( uploading_user=self.queued_image.user.username, ) LoggedAction.objects.create( user=self.request.user, action_type='photo-reject', ip_address=get_client_ip(self.request), popit_person_new_version='', popit_person_id=self.queued_image.popit_person_id, source=update_message, ) retry_upload_link = self.request.build_absolute_uri( reverse( 'photo-upload', kwargs={'popit_person_id': self.queued_image.popit_person_id} ) ) self.send_mail( _('YourNextMP image moderation results'), render_to_string( 'moderation_queue/photo_rejected_email.txt', {'reason': form.cleaned_data['rejection_reason'], 'candidate_name': candidate_name, 'retry_upload_link': retry_upload_link, 'photo_review_url': photo_review_url}, ), email_support_too=True, ) flash( messages.INFO, _(u'You rejected a photo upload for %s') % candidate_link ) elif decision == 'undecided': # If it's left as undecided, just redirect back to the # photo review queue... flash( messages.INFO, _(u'You left a photo upload for {0} in the queue').format( candidate_link ) ) elif decision == 'ignore': self.queued_image.decision = 'ignore' self.queued_image.save() update_message = _(u'Ignored a photo upload from ' u'{uploading_user} (This usually means it was a duplicate)').format( uploading_user=self.queued_image.user.username) LoggedAction.objects.create( user=self.request.user, action_type='photo-ignore', ip_address=get_client_ip(self.request), popit_person_new_version='', popit_person_id=self.queued_image.popit_person_id, source=update_message, ) flash( messages.INFO, _(u'You indicated a photo upload for {0} should be ignored').format( candidate_link ) ) else: raise Exception("BUG: unexpected decision {0}".format(decision)) return HttpResponseRedirect(reverse('photo-review-list'))
def handle(self, username=None, **options): from slumber.exceptions import HttpClientError from candidates.election_specific import PARTY_DATA, shorten_post_label from candidates.models import PopItPerson from candidates.popit import create_popit_api_object election_data = { 'prv-2015': 'listedescandidatsauxelectionslegislativeslisteprovincialeanptic.csv', 'nat-2015': 'listedescandidatsauxelectionslegislativesanptic.csv' } field_map = { 'prv-2015': { 'region': 1, 'party': 4, 'list_order': 5, 'first_name': 7, 'last_name': 6, 'gender': 8, 'birth_date': 9, 'party_short': 3 }, 'nat-2015': { 'region': 0, 'party': 2, 'list_order': 3, 'first_name': 5, 'last_name': 4, 'gender': 6, 'birth_date': 7, 'party_short': 2 } } api = create_popit_api_object() party_id_missing = {} party_name_to_id = {} for party_id, party_name in PARTY_DATA.party_id_to_name.items(): party_name_to_id[party_name] = party_id for election_id, filename in election_data.items(): csv_filename = join(dirname(__file__), '..', '..', 'data', filename) fields = field_map[election_id] with codecs.open(csv_filename, 'r', encoding='windows-1252') as f: initial = True for candidate in unicode_csv_reader(f): # skip header line if initial: initial = False continue region = candidate[fields['region']] party = candidate[fields['party']] party_list_order = candidate[fields['list_order']] first_name = string.capwords( candidate[fields['first_name']]) last_name = string.capwords(candidate[fields['last_name']]) gender = candidate[fields['gender']] birth_date = None if candidate[fields['birth_date']] is not None: birth_date = str( dateutil.parser.parse( candidate[fields['birth_date']], dayfirst=True).date()) name = first_name + ' ' + last_name id = '-'.join([ re.sub('[^\w]*', '', re.sub(r' ', '-', strip_accents(name.lower()))), re.sub('[^\w]*', '', candidate[fields['party_short']].lower()), birth_date ]) # national candidate if region == 'PAYS': region = 'Burkina Faso' election_data, post_data = get_post_data( api, election_id, region) # debug # tmp = '%s %s %s (%s) - %s (%s)' % ( id, first_name, last_name, party, region, post_data['label'] ) # print(tmp) person = get_existing_popit_person(id) if person: # print("Found an existing person:", person.get_absolute_url()) pass else: print("No existing person, creating a new one:", name) person = PopItPerson() person.set_identifier('import-id', id) person.family_name = last_name person.given_name = first_name person.name = name person.gender = gender if birth_date: person.birth_date = str(birth_date) else: person.birth_date = None standing_in_election = { 'post_id': post_data['id'], 'name': shorten_post_label(post_data['label']), 'party_list_position': party_list_order, } if 'area' in post_data: standing_in_election['mapit_url'] = post_data['area'][ 'identifier'] person.standing_in = { election_data.slug: standing_in_election } change_metadata = get_change_metadata( None, 'Imported candidate from CSV', ) party_comp = re.sub(' +', ' ', party) party_id = UNKNOWN_PARTY_ID if party_comp in party_name_to_id.keys(): party_id = party_name_to_id[party_comp] party = party_comp else: party_id = party_name_to_id['Unknown Party'] party = 'Unknown Party' if party_id == UNKNOWN_PARTY_ID and party_comp not in party_id_missing.keys( ): party_id_missing[party_comp] = 1 person.party_memberships = { election_data.slug: { 'id': party_id, 'name': party, 'imported_name': party_comp } } person.record_version(change_metadata) try: person.save_to_popit(api) except HttpClientError as hce: print("Got an HttpClientError:", hce.content) raise if len(party_id_missing) > 0: print("Unmatched party names:") for name in party_id_missing.keys(): print(name)