def AuthenticatedPost(self, org, event): # check votesmart API key is available try: config_key_db.get_config_key('votesmart') except: raise Exception("votesmart API key is not defined as a ConfigKey.") # get selected event selected_event = event_db.Event.get(self.request.get('event')) # decide filename in advance filename = "%s-officials-%s.zip" % ( selected_event.filename_friendly_name, timestamp_now() ) # package parameters for deferral params = {} params['org_pickle'] = pickle.dumps(org) params['event_pickle'] = pickle.dumps(event) params['post_pickle'] = pickle.dumps(self.request.POST) deferred.defer(self._write_csv, params, filename) # write filename out as json self.response.headers['Content-Type'] = 'application/json' self.response.out.write( json.dumps({ 'filename': filename }) )
def AuthenticatedPost(self, org, event): self.org = org self.event = event # get selected event -> filename selected_event = event_db.Event.get(self.request.get('event')) filename = "%s-%s-%s.csv" % ( selected_event.filename_friendly_name, re.sub(r'\W+', '-', org.name.lower()), timestamp_now() ) self.start_export(org, event, '/admin-export-bulk-worker', filename=filename)
def start_export(self, org, event, worker_url, filtering_event_key=None, filename=None): self.worker_url = worker_url # create filename if not supplied if filename is None: filename = "%s-%s-%s.csv" % ( event.filename_friendly_name, re.sub(r'\W+', '-', org.name.lower()), timestamp_now(), ) self.filename = filename # decide header/title row header_sio = StringIO() writer = csv.writer(header_sio) writer.writerow([ "%s Work Orders. Created %s UTC%s" % ( event.name, str(datetime.datetime.utcnow()).split('.')[0], ' by %s' % org.name if org else '' ) ]) writer.writerow( get_csv_fields_list(event.short_name) ) self.csv_header = header_sio.getvalue() header_sio.close() # select event filter based on parameter or org-user if filtering_event_key: self.filtering_event_key = filtering_event_key elif org.is_global_admin: self.filtering_event_key = '' else: self.filtering_event_key = event.key() # start first task taskqueue.add( url=self.worker_url, params=self.get_continuation_param_dict(), retry_options=taskqueue.TaskRetryOptions(task_retry_limit=3), ) # write filename out as json self.response.headers['Content-Type'] = 'application/json' self.response.out.write( json.dumps({ 'filename': filename }) )
def handle(self, org, event): # get list of ids self.id_list = self.request.get('id_list') if not self.id_list: self.abort(404) # get selected event -> filename selected_event = event_db.Event.get(self.request.get('event')) filename = "%s-%s-%s.csv" % ( selected_event.filename_friendly_name, re.sub(r'\W+', '-', org.name.lower()), timestamp_now() ) self.start_export( org, event, '/export_bulk_worker', filtering_event_key=None, # event filtering handled prior filename=filename )