def _delete_list_entry(list_entry): """Updates the given list_entry, which must have been first retrieved with get_list_feed. Calls webapp2.abort on failure. """ googledata = GoogleData() googledata.delete_list_entry(list_entry)
def _add_new_row(row_dict, spreadsheet_key, worksheet_key): entry = ListEntry() entry.from_dict(row_dict) googledata = GoogleData() googledata.add_list_entry(entry, spreadsheet_key, worksheet_key)
def _copy_drive_file(file_id, new_title, description): """Makes a copy of the given Google Drive file (i.e., spreadsheet), with a new title. """ googledata = GoogleData() drive_service = googledata.get_drive_service() drive_files = drive_service.files() # pylint: disable=E1103 drive_permissions = drive_service.permissions() # pylint: disable=E1103 # Get the parent folder(s) # (this is essential to maintain share permissions) file_info = drive_files.get(fileId=file_id).execute() # Make the copy request_body = { 'title': new_title, 'description': description, 'parents': file_info['parents'], } req = drive_files.copy(fileId=file_id, body=request_body) new_file_info = req.execute() # Transfer ownership from the service account to the real user account req = drive_permissions.getIdForEmail(email=config.MASTER_EMAIL_ADDRESS) master_permissions_info = req.execute() req = drive_permissions.update(fileId=new_file_info['id'], permissionId=master_permissions_info['id'], transferOwnership=True, body={'role': 'owner'}) req.execute()
def _copy_drive_file(file_id, new_title, description): """Makes a copy of the given Google Drive file (i.e., spreadsheet), with a new title. """ googledata = GoogleData() drive_service = googledata.get_drive_service() drive_files = drive_service.files() # pylint: disable=E1103 drive_permissions = drive_service.permissions() # pylint: disable=E1103 # Get the parent folder(s) # (this is essential to maintain share permissions) file_info = drive_files.get(fileId=file_id).execute() # Make the copy request_body = { 'title': new_title, 'description': description, 'parents': file_info['parents'], } req = drive_files.copy(fileId=file_id, body=request_body) new_file_info = req.execute() # Transfer ownership from the service account to the real user account req = drive_permissions.getIdForEmail(email=config.MASTER_EMAIL_ADDRESS) master_permissions_info = req.execute() req = drive_permissions.update( fileId=new_file_info['id'], permissionId=master_permissions_info['id'], transferOwnership=True, body={'role': 'owner'}) req.execute()
def _get_members_renewed_ago(after_datetime, before_datetime): """Get the members who were last renewed within the given window. Args: after_datetime (datetime): Members must have been renewed *after* this date. Optional. before_datetime (datetime): Members must have been renewed *before* this date. Optional. Returns: List of member list entries. (Caller can get dict with `.to_dict()`.) """ # Note that dates get returned from the spreadsheet as locale-formatted # strings, so we can't do a list-feed query to get just the rows we want. # Instead we're going to have to go through the whole set and filter them # from there. assert(after_datetime or before_datetime) googledata = GoogleData() list_entries = googledata.get_list_entries(config.MEMBERS_SPREADSHEET_KEY, config.MEMBERS_WORKSHEET_KEY) results = [] for entry in list_entries: entry_dict = entry.to_dict() renewed_date = entry_dict.get(config.MEMBER_FIELDS.renewed.name) # Use Joined date if Renewed is empty if not renewed_date: renewed_date = entry_dict.get(config.MEMBER_FIELDS.joined.name) # Convert date string to datetime if renewed_date: try: renewed_date = dateutil.parser.parse(renewed_date) except: renewed_date = None # If we still don't have a renewed date... the user is probably # very old or invalid. Set the date to a long time ago, so it gets # culled out. if not renewed_date: renewed_date = datetime.datetime(1970, 1, 1) if after_datetime and not (after_datetime <= renewed_date): continue if before_datetime and not (before_datetime >= renewed_date): continue # If we passed those two checks, then it's a hit. results.append(entry) return results
def _get_members_renewed_ago(after_datetime, before_datetime): """Get the members who were last renewed within the given window. Args: after_datetime (datetime): Members must have been renewed *after* this date. Optional. before_datetime (datetime): Members must have been renewed *before* this date. Optional. Returns: List of member list entries. (Caller can get dict with `.to_dict()`.) """ # Note that dates get returned from the spreadsheet as locale-formatted # strings, so we can't do a list-feed query to get just the rows we want. # Instead we're going to have to go through the whole set and filter them # from there. assert (after_datetime or before_datetime) googledata = GoogleData() list_entries = googledata.get_list_entries(config.MEMBERS_SPREADSHEET_KEY, config.MEMBERS_WORKSHEET_KEY) results = [] for entry in list_entries: entry_dict = entry.to_dict() renewed_date = entry_dict.get(config.MEMBER_FIELDS.renewed.name) # Use Joined date if Renewed is empty if not renewed_date: renewed_date = entry_dict.get(config.MEMBER_FIELDS.joined.name) # Convert date string to datetime if renewed_date: try: renewed_date = dateutil.parser.parse(renewed_date) except: renewed_date = None # If we still don't have a renewed date... the user is probably # very old or invalid. Set the date to a long time ago, so it gets # culled out. if not renewed_date: renewed_date = datetime.datetime(1970, 1, 1) if after_datetime and not (after_datetime <= renewed_date): continue if before_datetime and not (before_datetime >= renewed_date): continue # If we passed those two checks, then it's a hit. results.append(entry) return results
def _get_single_list_entry(querystring, spreadsheet_key, worksheet_key): """Returns a matching ListEntry or None if not found. """ googledata = GoogleData() list_entries = googledata.get_list_entries(spreadsheet_key, worksheet_key, query=querystring) if not list_entries: return None return list_entries[0]
def _get_all_rows(spreadsheet_key, worksheet_key, sort_name=None): """Returns a list of dicts of row data. """ order_by = None if sort_name: order_by = 'column:%s' % sort_name googledata = GoogleData() list_entries = googledata.get_list_entries(spreadsheet_key, worksheet_key, order_by=order_by) return [entry.to_dict() for entry in list_entries]
def process_mailchimp_updates(): """Checks Members and Volunteers spreadsheets for records that need updating in MailChimp. """ # See comment in `cull_members_sheet()` for why we're using `taskqueue` # to process these records one at a time. googledata = GoogleData() for fields, spreadsheet_key, worksheet_key, mailchimp_upsert in ( (config.MEMBER_FIELDS, config.MEMBERS_SPREADSHEET_KEY, config.MEMBERS_WORKSHEET_KEY, mailchimp.upsert_member_info), (config.VOLUNTEER_FIELDS, config.VOLUNTEERS_SPREADSHEET_KEY, config.VOLUNTEERS_WORKSHEET_KEY, mailchimp.upsert_volunteer_info), ): querystring = '%s==""' % (fields.mailchimp_updated.name, ) list_entries = googledata.get_list_entries(spreadsheet_key, worksheet_key, query=querystring) for entry in list_entries: entry_dict = entry.to_dict() if not entry_dict.get(fields.id.name): logging.error('Member missing ID value: %s', entry_dict) continue if not entry_dict.get(fields.email.name): # If there's no email, we don't add to MailChimp continue # Updated MailChimp mailchimp_upsert(entry_dict) # Set the MailChimp update datetime entry.set_value(fields.mailchimp_updated.name, utils.current_datetime()) # Update the spreadsheet _update_list_entry(entry) # We've updated one record successfully. Enqueue another run and exit. taskqueue.add(url='/tasks/process-mailchimp-updates') return
def _get_first_worksheet_id(spreadsheet_key): """Mostly used as a hand-run function to get the ID of the first worksheet in a spreadsheet (which is otherwise remarkably difficult to figure out). """ googledata = GoogleData() worksheets = googledata.get_worksheets(spreadsheet_key) if not worksheets: logging.error('No worksheet found?!?') return None url = worksheets[0].get_self_link().href # There is surely a less hacky way to do this. worksheet_id = url.split('/')[-1] return worksheet_id
def process_mailchimp_updates(): """Checks Members and Volunteers spreadsheets for records that need updating in MailChimp. """ # See comment in `cull_members_sheet()` for why we're using `taskqueue` # to process these records one at a time. googledata = GoogleData() for fields, spreadsheet_key, worksheet_key, mailchimp_upsert in ( (config.MEMBER_FIELDS, config.MEMBERS_SPREADSHEET_KEY, config.MEMBERS_WORKSHEET_KEY, mailchimp.upsert_member_info), (config.VOLUNTEER_FIELDS, config.VOLUNTEERS_SPREADSHEET_KEY, config.VOLUNTEERS_WORKSHEET_KEY, mailchimp.upsert_volunteer_info), ): querystring = '%s==""' % (fields.mailchimp_updated.name,) list_entries = googledata.get_list_entries(spreadsheet_key, worksheet_key, query=querystring) for entry in list_entries: entry_dict = entry.to_dict() if not entry_dict.get(fields.id.name): logging.error('Member missing ID value: %s', entry_dict) continue if not entry_dict.get(fields.email.name): # If there's no email, we don't add to MailChimp continue # Updated MailChimp mailchimp_upsert(entry_dict) # Set the MailChimp update datetime entry.set_value(fields.mailchimp_updated.name, utils.current_datetime()) # Update the spreadsheet _update_list_entry(entry) # We've updated one record successfully. Enqueue another run and exit. taskqueue.add(url='/tasks/process-mailchimp-updates') return
def _update_all_members_address_latlong(): """One-off helper to fill in the `address_latlong` field for legacy members. """ googledata = GoogleData() list_entries = googledata.get_list_entries(config.MEMBERS_SPREADSHEET_KEY, config.MEMBERS_WORKSHEET_KEY) for list_entry in list_entries: member_dict = list_entry.to_dict() if member_dict.get(config.MEMBER_FIELDS.address_latlong.name): continue latlong = helpers.latlong_for_record(config.MEMBER_FIELDS, member_dict) if not latlong: continue list_entry.set_value(config.MEMBER_FIELDS.address_latlong.name, latlong) _update_list_entry(list_entry)