def update_teams(self, user): # Are we tracking teams? Sync data from Lastuser. if self.teammodel: allteamdata = user.userinfo.get('teams', []) user_team_ids = [ t['userid'] for t in allteamdata if t.get('member') ] org_teams = {} for t in allteamdata: org_teams.setdefault(t['org'], []).append(t) for orgid, teams in org_teams.items(): if ('teams' in user.access_scope or 'teams/*' in user.access_scope ) and orgid in user.organizations_owned_ids(): # 1/4: Remove teams that are no longer in lastuser, provided we have # an authoritative list ('teams' is in scope and the user owns the organization) removed_teams = self.teammodel.query.filter_by( orgid=orgid).filter(~self.teammodel.userid.in_( [t['userid'] for t in teams])).all() for team in removed_teams: self.db.session.delete(team) for teamdata in teams: # 2/4: Create teams team = self.teammodel.query.filter_by( userid=teamdata['userid']).first() if team is None: team = self.teammodel(userid=teamdata['userid'], orgid=teamdata['org'], title=teamdata['title'], owners=getbool( teamdata['owners'])) team.members = getbool(teamdata['members']) self.db.session.add(team) else: # Check if title has changed. The others will never change if team.title != teamdata['title']: team.title = teamdata['title'] if team.userid in user_team_ids: # 3/4: Add user to teams they are in if user not in team.users: team.users.append(user) else: # 4/4: Remove users from teams they are no longer in if user in team.users: team.users.remove(user) # Commit this so that token info is saved even if the user account is an existing account. # This is called before the request is processed by the client app, so there should be no # other data in the transaction self.db.session.commit()
def org_revenue(organization): check_api_access(organization.details.get('access_token')) if not request.args.get('year'): return api_error(message=_(u"Missing year"), status_code=400) if not request.args.get('timezone'): return api_error(message=_(u"Missing timezone"), status_code=400) if request.args.get('timezone') not in pytz.common_timezones: return api_error( message=_(u"Unknown timezone. Timezone is case-sensitive"), status_code=400) item_collection_ids = [ item_collection.id for item_collection in organization.item_collections ] year = int(request.args.get('year')) user_timezone = request.args.get('timezone') if getbool(request.args.get('refund')): result = calculate_weekly_refunds(item_collection_ids, user_timezone, year).items() doc = _(u"Refunds per week for {year}".format(year=year)) else: # sales includes confirmed and cancelled line items result = calculate_weekly_sales(item_collection_ids, user_timezone, year).items() doc = _(u"Revenue per week for {year}".format(year=year)) return api_success(result=result, doc=doc, status_code=200)
def decorated_function(*args, **kwargs): g.login_required = True data = f(*args, **kwargs) metarefresh = getbool(request.args.get('metarefresh')) if 'cookietest' in request.args: next = get_next_url() else: next = data.get('next') or get_next_url(referrer=True) if session.new and 'cookietest' not in request.args: # Check if the user's browser supports cookies session['cookies'] = True # Reconstruct current URL with ?cookietest=1 or &cookietest=1 appended url_parts = urlparse.urlsplit(request.url) if url_parts.query: return redirect(request.url + '&cookietest=1&next=' + urllib.quote(next)) else: return redirect(request.url + '?cookietest=1&next=' + urllib.quote(next)) else: if session.new: # No support for cookies. Abort login return self._auth_error_handler('no_cookies', error_description=u"Your browser must accept cookies for you to login.", error_uri="") else: # The 'cookies' key is not needed anymore session.pop('cookies', None) scope = data.get('scope', 'id') message = data.get('message') or request.args.get('message') if isinstance(message, unicode): message = message.encode('utf-8') return self._login_handler_internal(scope, next, message, metarefresh)
def checkin_puk(self, profile, project, ticket_event, puk): abort(403) checked_in = getbool(request.form.get('checkin', 't')) ticket_event = ( TicketEvent.query.join(Project, Profile) .filter( db.func.lower(Profile.name) == db.func.lower(profile), Project.name == project, TicketEvent.name == ticket_event, ) .first_or_404() ) ticket_participant = ( TicketParticipant.query.join(Project, Profile) .filter( db.func.lower(Profile.name) == db.func.lower(profile), Project.name == project, TicketParticipant.puk == puk, ) .first_or_404() ) attendee = TicketEventParticipant.get(ticket_event, ticket_participant.uuid_b58) if not attendee: return ( {'error': 'not_found', 'error_description': "Attendee not found"}, 404, ) attendee.checked_in = checked_in db.session.commit() return {'attendee': {'fullname': ticket_participant.fullname}}
def item_collection_listing(organization, item_collection): show_title = getbool(request.args.get('show_title', True)) return render_template('item_collection_listing.html.jinja2', organization=organization, item_collection=item_collection, show_title=show_title, boxoffice_js=Markup(render_boxoffice_js()))
def contacts(self): """Grouped list of contacts""" archived = getbool(request.args.get('archived')) return { 'contacts': ContactExchange.grouped_counts_for(current_auth.user, archived=archived) }
def checkin_puk(profile, project, event, participant): checked_in = getbool(request.form.get('checkin')) attendee = Attendee.get(event, participant.id) if not attendee: return make_response(jsonify(error='not_found', error_description="Attendee not found"), 404) attendee.checked_in = checked_in db.session.commit() return jsonify(attendee={'fullname': participant.fullname})
def resource_id(authtoken, args, files=None): """ Return user's id """ if 'all' in args and getbool(args['all']): return get_userinfo(authtoken.user, authtoken.client, scope=authtoken.scope, get_permissions=True) else: return get_userinfo(authtoken.user, authtoken.client, scope=['id'], get_permissions=False)
def load_alt_names(fd): progress = get_progressbar() print "Loading..." size = sum(1 for line in fd) fd.seek(0) # Return to start loadprogress = ProgressBar(maxval=size, widgets=[ progressbar.widgets.Percentage(), ' ', progressbar.widgets.Bar(), ' ', progressbar.widgets.ETA(), ' ' ]).start() def update_progress(counter): loadprogress.update(counter + 1) return True geonameids = set([r[0] for r in db.session.query(GeoName.id).all()]) altnames = [ GeoAltNameRecord(*row) for counter, row in enumerate(unicodecsv.reader(fd, delimiter='\t')) if update_progress(counter) and not row[0].startswith('#') and int(row[1]) in geonameids ] loadprogress.finish() print "Processing %d records..." % len(altnames) GeoAltName.query.all( ) # Load all data into session cache for faster lookup for item in progress(altnames): if item.geonameid: rec = GeoAltName.query.get(int(item.id)) if rec is None: rec = GeoAltName() db.session.add(rec) rec.id = int(item.id) rec.geonameid = int(item.geonameid) rec.lang = item.lang or None rec.title = item.title rec.is_preferred_name = getbool(item.is_preferred_name) or False rec.is_short_name = getbool(item.is_short_name) or False rec.is_colloquial = getbool(item.is_colloquial) or False rec.is_historic = getbool(item.is_historic) or False db.session.commit()
def update_teams(self, user): # Are we tracking teams? Sync data from Lastuser. if self.teammodel: allteamdata = user.userinfo.get('teams', []) user_team_ids = [t['userid'] for t in allteamdata if t.get('member')] org_teams = {} for t in allteamdata: org_teams.setdefault(t['org'], []).append(t) for orgid, teams in org_teams.items(): if ('teams' in user.access_scope or 'teams/*' in user.access_scope) and orgid in user.organizations_owned_ids(): # 1/4: Remove teams that are no longer in lastuser, provided we have # an authoritative list ('teams' is in scope and the user owns the organization) removed_teams = self.teammodel.query.filter_by(orgid=orgid).filter( ~self.teammodel.userid.in_([t['userid'] for t in teams])).all() for team in removed_teams: self.db.session.delete(team) for teamdata in teams: # 2/4: Create teams team = self.teammodel.query.filter_by(userid=teamdata['userid']).first() if team is None: team = self.teammodel(userid=teamdata['userid'], orgid=teamdata['org'], title=teamdata['title'], owners=getbool(teamdata['owners'])) team.members = getbool(teamdata['members']) self.db.session.add(team) else: # Check if title has changed. The others will never change if team.title != teamdata['title']: team.title = teamdata['title'] if team.userid in user_team_ids: # 3/4: Add user to teams they are in if user not in team.users: team.users.append(user) else: # 4/4: Remove users from teams they are no longer in if user in team.users: team.users.remove(user) # Commit this so that token info is saved even if the user account is an existing account. # This is called before the request is processed by the client app, so there should be no # other data in the transaction self.db.session.commit()
def resource_phone(authtoken, args, files=None): """Return user's phone numbers.""" if 'all' in args and getbool(args['all']): return { 'phone': str(authtoken.user.phone), 'all': [str(phone) for phone in authtoken.user.phones], } return {'phone': str(authtoken.user.phone)}
def resource_email(authtoken, args, files=None): """ Return user's email addresses. """ if 'all' in args and getbool(args['all']): return {'email': unicode(authtoken.user.email), 'all': [unicode(email) for email in authtoken.user.emails]} else: return {'email': unicode(authtoken.user.email)}
def resource_email(authtoken, args, files=None): """ Return user's email addresses. """ if 'all' in args and getbool(args['all']): return {'email': unicode(authtoken.user.email), 'all': [unicode(email) for email in authtoken.user.emails if not email.private]} else: return {'email': unicode(authtoken.user.email)}
def resource_phone(authtoken, args, files=None): """ Return user's phone numbers. """ if 'all' in args and getbool(args['all']): return {'phone': unicode(authtoken.user.phone), 'all': [unicode(phone) for phone in authtoken.user.phones]} else: return {'phone': unicode(authtoken.user.phone)}
def kiosk_mode(): if getbool(request.args.get('enable')): session['kiosk'] = True session.permanent = True flash("Kiosk mode has been enabled", 'success') else: session.pop('kiosk', None) session.permanent = False flash("Kiosk mode has been disabled", 'success') return redirect(url_for('index'))
def reset(): # User wants to reset password # Ask for username or email, verify it, and send a reset code form = PasswordResetRequestForm() if getbool(request.args.get('expired')): message = _(u"Your password has expired. Please enter your username " "or email address to request a reset code and set a new password") else: message = None if request.method == 'GET': form.username.data = request.args.get('username') if form.validate_on_submit(): username = form.username.data user = form.user if '@' in username and not username.startswith('@'): # They provided an email address. Send reset email to that address email = username else: # Send to their existing address # User.email is a UserEmail object email = unicode(user.email) if not email and user.emailclaims: email = user.emailclaims[0].email if not email: # They don't have an email address. Maybe they logged in via Twitter # and set a local username and password, but no email. Could happen. if len(user.externalids) > 0: extid = user.externalids[0] return render_message(title=_("Cannot reset password"), message=Markup(_(u""" We do not have an email address for your account. However, your account is linked to <strong>{service}</strong> with the id <strong>{username}</strong>. You can use that to login. """).format(service=login_registry[extid.service].title, username=extid.username or extid.userid))) else: return render_message(title=_("Cannot reset password"), message=Markup(_( u""" We do not have an email address for your account and therefore cannot email you a reset link. Please contact <a href="mailto:{email}">{email}</a> for assistance. """).format(email=escape(current_app.config['SITE_SUPPORT_EMAIL'])))) resetreq = PasswordResetRequest(user=user) db.session.add(resetreq) send_password_reset_link(email=email, user=user, secret=resetreq.reset_code) db.session.commit() return render_message(title=_("Reset password"), message=_(u""" We sent you an email with a link to reset your password. Please check your email. If it doesn’t arrive in a few minutes, it may have landed in your spam or junk folder. The reset link is valid for 24 hours. """)) return render_form(form=form, title=_("Reset password"), message=message, submit=_("Send reset code"), ajax=False)
def reset(): # User wants to reset password # Ask for username or email, verify it, and send a reset code form = PasswordResetRequestForm() if getbool(request.args.get('expired')): message = _(u"Your password has expired. Please enter your username " "or email address to request a reset code and set a new password") else: message = None if request.method == 'GET': form.username.data = request.args.get('username') if form.validate_on_submit(): username = form.username.data user = form.user if '@' in username and not username.startswith('@'): # They provided an email address. Send reset email to that address email = username else: # Send to their existing address # User.email is a UserEmail object email = unicode(user.email) if not email and user.emailclaims: email = user.emailclaims[0].email if not email: # They don't have an email address. Maybe they logged in via Twitter # and set a local username and password, but no email. Could happen. if len(user.externalids) > 0: extid = user.externalids[0] return render_message(title=_("Cannot reset password"), message=Markup(_(u""" We do not have an email address for your account. However, your account is linked to <strong>{service}</strong> with the id <strong>{username}</strong>. You can use that to login. """).format(service=login_registry[extid.service].title, username=extid.username or extid.userid))) else: return render_message(title=_("Cannot reset password"), message=Markup(_( u""" We do not have an email address for your account and therefore cannot email you a reset link. Please contact <a href="mailto:{email}">{email}</a> for assistance. """).format(email=escape(current_app.config['SITE_SUPPORT_EMAIL'])))) resetreq = PasswordResetRequest(user=user) db.session.add(resetreq) send_password_reset_link(email=email, user=user, secret=resetreq.reset_code) db.session.commit() return render_message(title=_("Reset password"), message=_(u""" We sent a link to reset your password to your email address: {masked_email}. Please check your email. If it doesn’t arrive in a few minutes, it may have landed in your spam or junk folder. The reset link is valid for 24 hours. """.format(masked_email=mask_email(email)))) return render_form(form=form, title=_("Reset password"), message=message, submit=_("Send reset code"), ajax=False)
def event(profile, space, event): participants = Participant.checkin_list(event) form = ParticipantBadgeForm() if form.validate_on_submit(): badge_printed = True if getbool(form.data.get('badge_printed')) else False db.session.query(Participant).filter(Participant.id.in_([participant.id for participant in event.participants])).\ update({'badge_printed': badge_printed}, False) db.session.commit() return redirect(url_for('event', profile=space.profile.name, space=space.name, name=event.name), code=303) checked_in_count = len([p for p in participants if p.checked_in]) return render_template('event.html', profile=profile, space=space, participants=participants, event=event, badge_form=ParticipantBadgeForm(model=Participant), checked_in_count=checked_in_count, checkin_form=forms.Form())
def edit(self): if request.method == 'GET': # Find draft if it exists draft_revision, initial_formdata = self.get_draft_data() # Initialize forms with draft initial formdata. # If no draft exists, initial_formdata is None. # WTForms will ignore formdata if it's None. form = ProjectForm( obj=self.obj, parent=self.obj.profile, model=Project, formdata=initial_formdata, ) if not self.obj.timezone: form.timezone.data = str(current_auth.user.timezone) return render_form( form=form, title=_("Edit project"), submit=_("Save changes"), autosave=True, draft_revision=draft_revision, ) elif request.method == 'POST': if getbool(request.args.get('form.autosave')): return self.autosave_post() else: form = ProjectForm(obj=self.obj, parent=self.obj.profile, model=Project) if form.validate_on_submit(): form.populate_obj(self.obj) db.session.commit() flash(_("Your changes have been saved"), 'info') tag_locations.queue(self.obj.id) # Find and delete draft if it exists if self.get_draft() is not None: self.delete_draft() db.session.commit() return redirect(self.obj.url_for(), code=303) else: # Reset nonce to avoid conflict with autosave form.form_nonce.data = form.form_nonce.default() return render_form( form=form, title=_("Edit project"), submit=_("Save changes"), autosave=True, )
def event_checkin(profile, project, event): form = forms.Form() if form.validate_on_submit(): checked_in = getbool(request.form.get('checkin')) participant_ids = request.form.getlist('pid') for participant_id in participant_ids: attendee = Attendee.get(event, participant_id) attendee.checked_in = checked_in db.session.commit() if request.is_xhr: return jsonify(status=True, participant_ids=participant_ids, checked_in=checked_in) return redirect(url_for('event', profile=project.profile.name, project=project.name, name=event.name), code=303)
def resource_email(authtoken, args, files=None): """Return user's email addresses.""" if 'all' in args and getbool(args['all']): return { 'email': str(authtoken.user.email), 'all': [ str(email) for email in authtoken.user.emails if not email.private ], } return {'email': str(authtoken.user.email)}
def project_csv(self, uuid_b58): """Contacts for a given project in CSV format""" archived = getbool(request.args.get('archived')) project = self.get_project(uuid_b58) contacts = ContactExchange.contacts_for_project( current_auth.user, project, archived) return self.contacts_to_csv( contacts, timezone=project.timezone, filename='contacts-{project}'.format( project=make_name(project.title)), )
def resource_id(authtoken, args, files=None): """Return user's basic identity.""" if 'all' in args and getbool(args['all']): return get_userinfo( authtoken.user, authtoken.auth_client, scope=authtoken.effective_scope, get_permissions=True, ) return get_userinfo(authtoken.user, authtoken.auth_client, scope=['id'], get_permissions=False)
def label_badges(self): badge_printed = getbool(request.args.get('badge_printed', 'f')) ticket_participants = ( TicketParticipant.query.join(TicketEventParticipant) .filter(TicketEventParticipant.ticket_event_id == self.obj.id) .filter(TicketParticipant.badge_printed == badge_printed) .all() ) return { 'badge_template': self.obj.badge_template, 'badges': ticket_participant_badge_data( ticket_participants, self.obj.project ), }
def load_alt_names(fd): progress = get_progressbar() print "Loading alternate names..." size = sum(1 for line in fd) fd.seek(0) # Return to start loadprogress = ProgressBar(maxval=size, widgets=[progressbar.widgets.Percentage(), ' ', progressbar.widgets.Bar(), ' ', progressbar.widgets.ETA(), ' ']).start() def update_progress(counter): loadprogress.update(counter + 1) return True geonameids = set([r[0] for r in db.session.query(GeoName.id).all()]) altnames = [GeoAltNameRecord(*row) for counter, row in enumerate(unicodecsv.reader(fd, delimiter='\t')) if update_progress(counter) and not row[0].startswith('#') and int(row[1]) in geonameids] loadprogress.finish() print "Processing %d records..." % len(altnames) GeoAltName.query.all() # Load all data into session cache for faster lookup for item in progress(altnames): if item.geonameid: rec = GeoAltName.query.get(int(item.id)) if rec is None: rec = GeoAltName() db.session.add(rec) rec.id = int(item.id) rec.geonameid = int(item.geonameid) rec.lang = item.lang or None rec.title = item.title rec.is_preferred_name = getbool(item.is_preferred_name) or False rec.is_short_name = getbool(item.is_short_name) or False rec.is_colloquial = getbool(item.is_colloquial) or False rec.is_historic = getbool(item.is_historic) or False db.session.commit()
def event(profile, project, event): csrf_form = forms.Form() if csrf_form.validate_on_submit(): for ticket_client in project.ticket_clients: if ticket_client and ticket_client.name.lower() in [u'explara', u'boxoffice']: import_tickets.queue(ticket_client.id) flash(_(u"Importing tickets from vendors...Refresh the page in about 30 seconds..."), 'info') form = ParticipantBadgeForm() if form.validate_on_submit(): badge_printed = True if getbool(form.data.get('badge_printed')) else False db.session.query(Participant).filter(Participant.id.in_([participant.id for participant in event.participants])).\ update({'badge_printed': badge_printed}, False) db.session.commit() return redirect(url_for('event', profile=project.profile.name, project=project.name, name=event.name), code=303) return render_template('event.html.jinja2', profile=profile, project=project, event=event, badge_form=ParticipantBadgeForm(model=Participant), checkin_form=forms.Form(), csrf_form=csrf_form)
def project_date_csv(self, uuid_b58, datestr): """Contacts for a given project and date in CSV format""" archived = getbool(request.args.get('archived')) project = self.get_project(uuid_b58) date = datetime.strptime(datestr, '%Y-%m-%d').date() contacts = ContactExchange.contacts_for_project_and_date( current_auth.user, project, date, archived) return self.contacts_to_csv( contacts, timezone=project.timezone, filename='contacts-{project}-{date}'.format( project=make_name(project.title), date=date.strftime('%Y%m%d')), )
def checkin(self): form = forms.Form() if form.validate_on_submit(): checked_in = getbool(request.form.get('checkin')) ticket_participant_ids = [ abort_null(x) for x in request.form.getlist('puuid_b58') ] for ticket_participant_id in ticket_participant_ids: attendee = TicketEventParticipant.get(self.obj, ticket_participant_id) attendee.checked_in = checked_in db.session.commit() if request_is_xhr(): return jsonify( status=True, ticket_participant_ids=ticket_participant_ids, checked_in=checked_in, ) return redirect(self.obj.url_for('view'), code=303)
def view(self): if request.method == 'POST': if 'form.id' not in request.form: abort(400) if request.form['form.id'] == 'csrf_form': csrf_form = forms.Form() if csrf_form.validate_on_submit(): for ticket_client in self.obj.project.ticket_clients: if ticket_client and ticket_client.name.lower() in [ 'explara', 'boxoffice', ]: import_tickets.queue(ticket_client.id) flash( _("Importing tickets from vendors... " "Refresh the page in about 30 seconds..."), 'info', ) elif request.form['form.id'] == 'badge_form': form = TicketParticipantBadgeForm() if form.validate_on_submit(): badge_printed = getbool(form.data.get('badge_printed')) db.session.query(TicketParticipant).filter( TicketParticipant.id.in_([ ticket_participant.id for ticket_participant in self.obj.ticket_participants ])).update({'badge_printed': badge_printed}, synchronize_session=False) db.session.commit() return redirect(self.obj.url_for('view'), code=303) else: # Unknown form abort(400) return { 'profile': self.obj.project.profile, 'ticket_event': self.obj, 'project': self.obj.project, 'badge_form': TicketParticipantBadgeForm(model=TicketParticipant), 'checkin_form': forms.Form(), 'csrf_form': forms.Form(), }
def event(profile, space, event): participants = Participant.checkin_list(event) form = ParticipantBadgeForm() if form.validate_on_submit(): badge_printed = True if getbool( form.data.get('badge_printed')) else False db.session.query(Participant).filter(Participant.id.in_([participant.id for participant in event.participants])).\ update({'badge_printed': badge_printed}, False) db.session.commit() return redirect(url_for('event', profile=space.profile.name, space=space.name, name=event.name), code=303) checked_in_count = len([p for p in participants if p.checked_in]) return render_template('event.html', profile=profile, space=space, participants=participants, event=event, badge_form=ParticipantBadgeForm(model=Participant), checked_in_count=checked_in_count, checkin_form=forms.Form())
def org_revenue(organization): check_api_access(organization.details.get('access_token')) if not request.args.get('year'): return api_error(message=_(u"Missing year"), status_code=400) if not request.args.get('timezone'): return api_error(message=_(u"Missing timezone"), status_code=400) if request.args.get('timezone') not in pytz.common_timezones: return api_error(message=_(u"Unknown timezone. Timezone is case-sensitive"), status_code=400) item_collection_ids = [item_collection.id for item_collection in organization.item_collections] year = int(request.args.get('year')) user_timezone = request.args.get('timezone') if getbool(request.args.get('refund')): result = calculate_weekly_refunds(item_collection_ids, user_timezone, year).items() doc = _(u"Refunds per week for {year}".format(year=year)) else: # sales includes confirmed and cancelled line items result = calculate_weekly_sales(item_collection_ids, user_timezone, year).items() doc = _(u"Revenue per week for {year}".format(year=year)) return api_success(result=result, doc=doc, status_code=200)
def browse_tags(): return render_template( 'tags.html', tags=gettags(alltime=getbool(request.args.get('all'))))
def index(basequery=None, md5sum=None, tag=None, domain=None, location=None, title=None, showall=True, statuses=None, batched=True, ageless=False, template_vars={}): is_siteadmin = lastuser.has_permission('siteadmin') if basequery is None: is_index = True else: is_index = False now = datetime.utcnow() if basequery is None and not (g.user or g.kiosk or (g.board and not g.board.require_login)): showall = False batched = False if basequery is None: basequery = JobPost.query # Apply request.args filters data_filters = {} f_types = request.args.getlist('t') while '' in f_types: f_types.remove('') if f_types: data_filters['types'] = f_types basequery = basequery.join(JobType).filter(JobType.name.in_(f_types)) f_categories = request.args.getlist('c') while '' in f_categories: f_categories.remove('') if f_categories: data_filters['categories'] = f_categories basequery = basequery.join(JobCategory).filter( JobCategory.name.in_(f_categories)) r_locations = request.args.getlist('l') if location: r_locations.append(location['geonameid']) f_locations = [] remote_location = getbool(request.args.get('anywhere')) or False for rl in r_locations: if isinstance(rl, int) and rl > 0: f_locations.append(rl) elif rl == 'anywhere': remote_location = True elif rl.isdigit(): f_locations.append(int(rl)) elif rl: ld = location_geodata(rl) if ld: f_locations.append(ld['geonameid']) remote_location_query = basequery.filter( JobPost.remote_location == True) # NOQA if f_locations: locations_query = basequery.join(JobLocation).filter( JobLocation.geonameid.in_(f_locations)) else: locations_query = basequery.join(JobLocation) if f_locations and remote_location: data_filters['locations'] = f_locations data_filters['anywhere'] = True recency = JobPost.datetime > datetime.utcnow() - agelimit basequery = locations_query.filter(recency).union( remote_location_query.filter(recency)) elif f_locations: data_filters['locations'] = f_locations basequery = locations_query elif remote_location: data_filters['anywhere'] = True # Only works as a positive filter: you can't search for jobs that are NOT anywhere basequery = remote_location_query if 'currency' in request.args and request.args[ 'currency'] in CURRENCY.keys(): currency = request.args['currency'] data_filters['currency'] = currency basequery = basequery.filter(JobPost.pay_currency == currency) pay_graph = currency else: pay_graph = False if getbool(request.args.get('equity')): # Only works as a positive filter: you can't search for jobs that DON'T pay in equity data_filters['equity'] = True basequery = basequery.filter(JobPost.pay_equity_min != None) # NOQA if 'pay' in request.args or ('pmin' in request.args and 'pmax' in request.args): if 'pay' in request.args: f_pay = string_to_number(request.args['pay']) f_min = int(f_pay * 0.90) f_max = int(f_pay * 1.30) else: # Legacy URL with min/max values f_min = string_to_number(request.args['pmin']) f_max = string_to_number(request.args['pmax']) f_pay = f_min # Use min for pay now if f_pay is not None and f_min is not None and f_max is not None: data_filters['pay'] = f_pay basequery = basequery.filter(JobPost.pay_cash_min < f_max, JobPost.pay_cash_max >= f_min) else: f_pay = None f_min = None f_max = None if getbool(request.args.get('archive')): ageless = True data_filters['archive'] = True statuses = POSTSTATUS.ARCHIVED search_domains = None if request.args.get('q'): q = for_tsquery(request.args['q']) try: # TODO: Can we do syntax validation without a database roundtrip? db.session.query(db.func.to_tsquery(q)).all() except ProgrammingError: db.session.rollback() g.event_data['search_syntax_error'] = (request.args['q'], q) if not request.is_xhr: flash( _(u"Search terms ignored because this didn’t parse: {query}" ).format(query=q), 'danger') else: # Query's good? Use it. data_filters['query'] = q search_domains = Domain.query.filter( Domain.search_vector.match(q, postgresql_regconfig='english'), Domain.is_banned == False).options( db.load_only('name', 'title', 'logo_url')).all() # NOQA basequery = basequery.filter( JobPost.search_vector.match(q, postgresql_regconfig='english')) if data_filters: g.event_data['filters'] = data_filters showall = True batched = True # getposts sets g.board_jobs, used below posts = getposts(basequery, pinned=True, showall=showall, statuses=statuses, ageless=ageless).all() if is_siteadmin or (g.user and g.user.flags.get('is_employer_month')): cache_viewcounts(posts) if posts: employer_name = posts[0].company_name else: employer_name = u'a single employer' if g.user: g.starred_ids = set( g.user.starred_job_ids(agelimit if not ageless else None)) else: g.starred_ids = set() jobpost_ab = session_jobpost_ab() # Make lookup slightly faster in the loop below since 'g' is a proxy board = g.board if board: board_jobs = g.board_jobs else: board_jobs = {} if is_index and posts and not g.kiosk: # Group posts by email_domain on index page only, when not in kiosk mode grouped = OrderedDict() for post in posts: pinned = post.pinned if board is not None: blink = board_jobs.get( post.id ) # board_jobs only contains the last 30 days, no archive if blink is not None: pinned = blink.pinned if pinned: # Make pinned posts appear in a group of one grouped.setdefault(('s', post.hashid), []).append( (pinned, post, bgroup(jobpost_ab, post))) elif post.status == POSTSTATUS.ANNOUNCEMENT: # Make announcements also appear in a group of one grouped.setdefault(('a', post.hashid), []).append( (pinned, post, bgroup(jobpost_ab, post))) elif post.domain.is_webmail: grouped.setdefault(('ne', post.md5sum), []).append( (pinned, post, bgroup(jobpost_ab, post))) else: grouped.setdefault(('nd', post.email_domain), []).append( (pinned, post, bgroup(jobpost_ab, post))) pinsandposts = None else: grouped = None if g.board: pinsandposts = [] for post in posts: pinned = post.pinned if board is not None: blink = board_jobs.get( post.id ) # board_jobs only contains the last 30 days, no archive if blink is not None: pinned = blink.pinned pinsandposts.append((pinned, post, bgroup(jobpost_ab, post))) else: pinsandposts = [(post.pinned, post, bgroup(jobpost_ab, post)) for post in posts] # Pick a header campaign (only if not kiosk or an XHR reload) pay_graph_data = None if not g.kiosk: if g.preview_campaign: header_campaign = g.preview_campaign else: geonameids = g.user_geonameids + f_locations header_campaign = Campaign.for_context(CAMPAIGN_POSITION.HEADER, board=g.board, user=g.user, anon_user=g.anon_user, geonameids=geonameids) if pay_graph: pay_graph_data = make_pay_graph(pay_graph, posts, rmin=f_min, rmax=f_max) else: header_campaign = None loadmore = False if batched: # Figure out where the batch should start from startdate = None if 'startdate' in request.values: try: startdate = parse_isoformat(request.values['startdate']) except ValueError: pass batchsize = 32 # list of posts that were pinned at the time of first load pinned_hashids = request.args.getlist('ph') # Depending on the display mechanism (grouped or ungrouped), extract the batch if grouped: if not startdate: startindex = 0 for row in grouped.values(): # break when a non-pinned post is encountered if (not row[0][0]): break else: pinned_hashids.append(row[0][1].hashid) else: # Loop through group looking for start of next batch. See below to understand the # nesting structure of 'grouped' for startindex, row in enumerate(grouped.values()): # Skip pinned posts when looking for starting index if (row[0][1].hashid not in pinned_hashids and row[0][1].datetime < startdate): break batch = grouped.items()[startindex:startindex + batchsize] if startindex + batchsize < len(grouped): # Get the datetime of the last group's first item # batch = [((type, domain), [(pinned, post, bgroup), ...])] # batch[-1] = ((type, domain), [(pinned, post, bgroup), ...]) # batch[-1][1] = [(pinned, post, bgroup), ...] # batch[-1][1][0] = (pinned, post, bgroup) # batch[-1][1][0][1] = post loadmore = batch[-1][1][0][1].datetime grouped = OrderedDict(batch) elif pinsandposts: if not startdate: startindex = 0 for row in pinsandposts: # break when a non-pinned post is encountered if not row[0]: break else: pinned_hashids.append(row[1].hashid) else: for startindex, row in enumerate(pinsandposts): # Skip pinned posts when looking for starting index if (row[1].hashid not in pinned_hashids and row[1].datetime < startdate): break batch = pinsandposts[startindex:startindex + batchsize] if startindex + batchsize < len(pinsandposts): # batch = [(pinned, post), ...] loadmore = batch[-1][1].datetime pinsandposts = batch if grouped: g.impressions = { post.id: (pinflag, post.id, is_bgroup) for group in grouped.itervalues() for pinflag, post, is_bgroup in group } elif pinsandposts: g.impressions = { post.id: (pinflag, post.id, is_bgroup) for pinflag, post, is_bgroup in pinsandposts } # Test values for development: # if not g.user_geonameids: # g.user_geonameids = [1277333, 1277331, 1269750] if not location and 'l' not in request.args and g.user_geonameids and ( g.user or g.anon_user) and ( (not g.board.auto_locations) if g.board else True): # No location filters? Prompt the user ldata = location_geodata(g.user_geonameids) location_prompts = [ ldata[geonameid] for geonameid in g.user_geonameids if geonameid in ldata ] else: location_prompts = [] query_params = request.args.to_dict(flat=False) if loadmore: query_params.update({ 'startdate': loadmore.isoformat() + 'Z', 'ph': pinned_hashids }) return dict(pinsandposts=pinsandposts, grouped=grouped, now=now, newlimit=newlimit, title=title, md5sum=md5sum, domain=domain, location=location, employer_name=employer_name, showall=showall, is_index=is_index, header_campaign=header_campaign, loadmore=loadmore, location_prompts=location_prompts, search_domains=search_domains, query_params=query_params, is_siteadmin=is_siteadmin, pay_graph_data=pay_graph_data, paginated=index_is_paginated(), template_vars=template_vars)
def fetch_jobposts(request_args, request_values, filters, is_index, board, board_jobs, gkiosk, basequery, md5sum, domain, location, title, showall, statusfilter, batched, ageless, template_vars, search_query=None, query_string=None): if basequery is None: basequery = JobPost.query # Apply request.args filters data_filters = {} f_types = filters.get('t') or request_args.getlist('t') while '' in f_types: f_types.remove('') if f_types: data_filters['types'] = f_types basequery = basequery.join(JobType).filter(JobType.name.in_(f_types)) f_categories = filters.get('c') or request_args.getlist('c') while '' in f_categories: f_categories.remove('') if f_categories: data_filters['categories'] = f_categories basequery = basequery.join(JobCategory).filter(JobCategory.name.in_(f_categories)) f_domains = filters.get('d') or request_args.getlist('d') while '' in f_domains: f_domains.remove('') if f_domains: basequery = basequery.join(Domain).filter(Domain.name.in_(f_domains)) f_tags = filters.get('k') or request_args.getlist('k') while '' in f_tags: f_tags.remove('') if f_tags: basequery = basequery.join(JobPostTag).join(Tag).filter(Tag.name.in_(f_tags)) data_filters['location_names'] = r_locations = filters.get('l') or request_args.getlist('l') if location: r_locations.append(location['geonameid']) f_locations = [] remote_location = getbool(filters.get('anywhere') or request_args.get('anywhere')) or False if remote_location: data_filters['location_names'].append('anywhere') for rl in r_locations: if isinstance(rl, int) and rl > 0: f_locations.append(rl) elif rl == 'anywhere': remote_location = True elif rl.isdigit(): f_locations.append(int(rl)) elif rl: ld = location_geodata(rl) if ld: f_locations.append(ld['geonameid']) remote_location_query = basequery.filter(JobPost.remote_location == True) # NOQA if f_locations: locations_query = basequery.join(JobLocation).filter(JobLocation.geonameid.in_(f_locations)) else: locations_query = basequery.join(JobLocation) if f_locations and remote_location: data_filters['locations'] = f_locations data_filters['anywhere'] = True recency = JobPost.state.LISTED basequery = locations_query.filter(recency).union(remote_location_query.filter(recency)) elif f_locations: data_filters['locations'] = f_locations basequery = locations_query elif remote_location: data_filters['anywhere'] = True # Only works as a positive filter: you can't search for jobs that are NOT anywhere basequery = remote_location_query currency = filters.get('currency') or request_args.get('currency') if currency in CURRENCY.keys(): data_filters['currency'] = currency basequery = basequery.filter(JobPost.pay_currency == currency) pay_graph = currency else: pay_graph = False if getbool(filters.get('equity') or request_args.get('equity')): # Only works as a positive filter: you can't search for jobs that DON'T pay in equity data_filters['equity'] = True basequery = basequery.filter(JobPost.pay_equity_min != None) # NOQA if filters.get('pay') or 'pay' in request_args or ('pmin' in request_args and 'pmax' in request_args): if 'pay' in request_args or filters.get('pay'): f_pay = filters['pay'] if filters.get('pay') else string_to_number(request_args['pay']) if f_pay is not None: f_min = int(f_pay * 0.90) f_max = int(f_pay * 1.30) else: f_min = None f_max = None else: # Legacy URL with min/max values f_min = string_to_number(request_args['pmin']) f_max = string_to_number(request_args['pmax']) f_pay = f_min # Use min for pay now if f_pay is not None and f_min is not None and f_max is not None: data_filters['pay'] = f_pay basequery = basequery.filter(JobPost.pay_cash_min < f_max, JobPost.pay_cash_max >= f_min) else: f_pay = None f_min = None f_max = None if getbool(request_args.get('archive')): ageless = True data_filters['archive'] = True statusfilter = JobPost.state.ARCHIVED if query_string: data_filters['query'] = search_query data_filters['query_string'] = query_string basequery = basequery.filter(JobPost.search_vector.match(search_query, postgresql_regconfig='english')) if data_filters: showall = True batched = True posts = getposts(basequery, pinned=True, showall=showall, statusfilter=statusfilter, ageless=ageless).all() if getbool(request_args.get('embed')): embed = True if posts: limit = string_to_number(request_args.get('limit')) if limit is not None: posts = posts[:limit] else: posts = posts[:8] else: embed = False if posts: employer_name = posts[0].company_name else: employer_name = u'a single employer' jobpost_ab = session_jobpost_ab() if is_index and posts and not gkiosk and not embed: # Group posts by email_domain on index page only, when not in kiosk mode grouped = OrderedDict() for post in posts: pinned = post.pinned if board is not None: blink = board_jobs.get(post.id) # board_jobs only contains the last 30 days, no archive if blink is not None: pinned = blink.pinned if pinned: # Make pinned posts appear in a group of one grouped.setdefault(('s', post.hashid), []).append( (pinned, post, bgroup(jobpost_ab, post))) elif post.state.ANNOUNCEMENT: # Make announcements also appear in a group of one grouped.setdefault(('a', post.hashid), []).append( (pinned, post, bgroup(jobpost_ab, post))) elif post.domain.is_webmail: grouped.setdefault(('ne', post.md5sum), []).append( (pinned, post, bgroup(jobpost_ab, post))) else: grouped.setdefault(('nd', post.email_domain), []).append( (pinned, post, bgroup(jobpost_ab, post))) pinsandposts = None else: grouped = None if board: pinsandposts = [] for post in posts: pinned = post.pinned if board is not None: blink = board_jobs.get(post.id) # board_jobs only contains the last 30 days, no archive if blink is not None: pinned = blink.pinned pinsandposts.append((pinned, post, bgroup(jobpost_ab, post))) else: pinsandposts = [(post.pinned, post, bgroup(jobpost_ab, post)) for post in posts] # Pick a header campaign (only if not kiosk or an XHR reload) pay_graph_data = None loadmore = False if batched: # Figure out where the batch should start from startdate = None if 'startdate' in request_values: try: startdate = parse_isoformat(request_values['startdate']) except TypeError: abort(400) except ValueError: pass batchsize = 32 # list of posts that were pinned at the time of first load pinned_hashids = request_args.getlist('ph') # Depending on the display mechanism (grouped or ungrouped), extract the batch if grouped: if not startdate: startindex = 0 for row in grouped.values(): # break when a non-pinned post is encountered if (not row[0][0]): break else: pinned_hashids.append(row[0][1].hashid) else: # Loop through group looking for start of next batch. See below to understand the # nesting structure of 'grouped' for startindex, row in enumerate(grouped.values()): # Skip pinned posts when looking for starting index if (row[0][1].hashid not in pinned_hashids and row[0][1].datetime < startdate): break batch = grouped.items()[startindex:startindex + batchsize] if startindex + batchsize < len(grouped): # Get the datetime of the last group's first item # batch = [((type, domain), [(pinned, post, bgroup), ...])] # batch[-1] = ((type, domain), [(pinned, post, bgroup), ...]) # batch[-1][1] = [(pinned, post, bgroup), ...] # batch[-1][1][0] = (pinned, post, bgroup) # batch[-1][1][0][1] = post loadmore = batch[-1][1][0][1].datetime grouped = OrderedDict(batch) elif pinsandposts: if not startdate: startindex = 0 for row in pinsandposts: # break when a non-pinned post is encountered if not row[0]: break else: pinned_hashids.append(row[1].hashid) else: for startindex, row in enumerate(pinsandposts): # Skip pinned posts when looking for starting index if (row[1].hashid not in pinned_hashids and row[1].datetime < startdate): break batch = pinsandposts[startindex:startindex + batchsize] if startindex + batchsize < len(pinsandposts): # batch = [(pinned, post), ...] loadmore = batch[-1][1].datetime pinsandposts = batch query_params = request_args.to_dict(flat=False) if loadmore: query_params.update({'startdate': loadmore.isoformat() + 'Z', 'ph': pinned_hashids}) if location: data_filters['location_names'].append(location['name']) query_params.update({'l': location['name']}) if pay_graph: pay_graph_data = make_pay_graph(pay_graph, posts, rmin=f_min, rmax=f_max) return dict(posts=posts, pinsandposts=pinsandposts, grouped=grouped, newlimit=newlimit, title=title, md5sum=md5sum, domain=domain, location=location, employer_name=employer_name, showall=showall, f_locations=f_locations, loadmore=loadmore, query_params=query_params, data_filters=data_filters, pay_graph_data=pay_graph_data, paginated=index_is_paginated(), template_vars=template_vars, embed=embed)
def browse_tags(): return render_template('tags.html.jinja2', tags=gettags(alltime=getbool(request.args.get('all'))))
def fetch_jobposts(request_args, request_values, filters, is_index, board, board_jobs, gkiosk, basequery, md5sum, domain, location, title, showall, statusfilter, batched, ageless, template_vars, search_query=None, query_string=None): if basequery is None: basequery = JobPost.query # Apply request.args filters data_filters = {} f_types = filters.get('t') or request_args.getlist('t') while '' in f_types: f_types.remove('') if f_types: data_filters['types'] = f_types basequery = basequery.join(JobType).filter(JobType.name.in_(f_types)) f_categories = filters.get('c') or request_args.getlist('c') while '' in f_categories: f_categories.remove('') if f_categories: data_filters['categories'] = f_categories basequery = basequery.join(JobCategory).filter(JobCategory.name.in_(f_categories)) f_domains = filters.get('d') or request_args.getlist('d') while '' in f_domains: f_domains.remove('') if f_domains: basequery = basequery.join(Domain).filter(Domain.name.in_(f_domains)) f_tags = filters.get('k') or request_args.getlist('k') while '' in f_tags: f_tags.remove('') if f_tags: basequery = basequery.join(JobPostTag).join(Tag).filter(Tag.name.in_(f_tags)) data_filters['location_names'] = r_locations = filters.get('l') or request_args.getlist('l') if location: r_locations.append(location['geonameid']) f_locations = [] remote_location = getbool(filters.get('anywhere') or request_args.get('anywhere')) or False if remote_location: data_filters['location_names'].append('anywhere') for rl in r_locations: if isinstance(rl, int) and rl > 0: f_locations.append(rl) elif rl == 'anywhere': remote_location = True elif rl.isdigit(): f_locations.append(int(rl)) elif rl: ld = location_geodata(rl) if ld: f_locations.append(ld['geonameid']) remote_location_query = basequery.filter(JobPost.remote_location == True) # NOQA if f_locations: locations_query = basequery.join(JobLocation).filter(JobLocation.geonameid.in_(f_locations)) else: locations_query = basequery.join(JobLocation) if f_locations and remote_location: data_filters['locations'] = f_locations data_filters['anywhere'] = True recency = JobPost.state.LISTED basequery = locations_query.filter(recency).union(remote_location_query.filter(recency)) elif f_locations: data_filters['locations'] = f_locations basequery = locations_query elif remote_location: data_filters['anywhere'] = True # Only works as a positive filter: you can't search for jobs that are NOT anywhere basequery = remote_location_query currency = filters.get('currency') or request_args.get('currency') if currency in CURRENCY.keys(): data_filters['currency'] = currency basequery = basequery.filter(JobPost.pay_currency == currency) pay_graph = currency else: pay_graph = False if getbool(filters.get('equity') or request_args.get('equity')): # Only works as a positive filter: you can't search for jobs that DON'T pay in equity data_filters['equity'] = True basequery = basequery.filter(JobPost.pay_equity_min != None) # NOQA if filters.get('pay') or 'pay' in request_args or ('pmin' in request_args and 'pmax' in request_args): if 'pay' in request_args or filters.get('pay'): f_pay = filters['pay'] if filters.get('pay') else string_to_number(request_args['pay']) if f_pay is not None: f_min = int(f_pay * 0.90) f_max = int(f_pay * 1.30) else: f_min = None f_max = None else: # Legacy URL with min/max values f_min = string_to_number(request_args['pmin']) f_max = string_to_number(request_args['pmax']) f_pay = f_min # Use min for pay now if f_pay is not None and f_min is not None and f_max is not None: data_filters['pay'] = f_pay basequery = basequery.filter(JobPost.pay_cash_min < f_max, JobPost.pay_cash_max >= f_min) else: f_pay = None f_min = None f_max = None if getbool(request_args.get('archive')): ageless = True data_filters['archive'] = True statusfilter = JobPost.state.ARCHIVED if query_string: data_filters['query'] = search_query data_filters['query_string'] = query_string basequery = basequery.filter(JobPost.search_vector.match(search_query, postgresql_regconfig='english')) if data_filters: showall = True batched = True posts = getposts(basequery, pinned=True, showall=showall, statusfilter=statusfilter, ageless=ageless).all() if posts: employer_name = posts[0].company_name else: employer_name = u'a single employer' jobpost_ab = session_jobpost_ab() if is_index and posts and not gkiosk: # Group posts by email_domain on index page only, when not in kiosk mode grouped = OrderedDict() for post in posts: pinned = post.pinned if board is not None: blink = board_jobs.get(post.id) # board_jobs only contains the last 30 days, no archive if blink is not None: pinned = blink.pinned if pinned: # Make pinned posts appear in a group of one grouped.setdefault(('s', post.hashid), []).append( (pinned, post, bgroup(jobpost_ab, post))) elif post.state.ANNOUNCEMENT: # Make announcements also appear in a group of one grouped.setdefault(('a', post.hashid), []).append( (pinned, post, bgroup(jobpost_ab, post))) elif post.domain.is_webmail: grouped.setdefault(('ne', post.md5sum), []).append( (pinned, post, bgroup(jobpost_ab, post))) else: grouped.setdefault(('nd', post.email_domain), []).append( (pinned, post, bgroup(jobpost_ab, post))) pinsandposts = None else: grouped = None if board: pinsandposts = [] for post in posts: pinned = post.pinned if board is not None: blink = board_jobs.get(post.id) # board_jobs only contains the last 30 days, no archive if blink is not None: pinned = blink.pinned pinsandposts.append((pinned, post, bgroup(jobpost_ab, post))) else: pinsandposts = [(post.pinned, post, bgroup(jobpost_ab, post)) for post in posts] # Pick a header campaign (only if not kiosk or an XHR reload) pay_graph_data = None loadmore = False if batched: # Figure out where the batch should start from startdate = None if 'startdate' in request_values: try: startdate = parse_isoformat(request_values['startdate']) except ValueError: pass batchsize = 32 # list of posts that were pinned at the time of first load pinned_hashids = request_args.getlist('ph') # Depending on the display mechanism (grouped or ungrouped), extract the batch if grouped: if not startdate: startindex = 0 for row in grouped.values(): # break when a non-pinned post is encountered if (not row[0][0]): break else: pinned_hashids.append(row[0][1].hashid) else: # Loop through group looking for start of next batch. See below to understand the # nesting structure of 'grouped' for startindex, row in enumerate(grouped.values()): # Skip pinned posts when looking for starting index if (row[0][1].hashid not in pinned_hashids and row[0][1].datetime < startdate): break batch = grouped.items()[startindex:startindex + batchsize] if startindex + batchsize < len(grouped): # Get the datetime of the last group's first item # batch = [((type, domain), [(pinned, post, bgroup), ...])] # batch[-1] = ((type, domain), [(pinned, post, bgroup), ...]) # batch[-1][1] = [(pinned, post, bgroup), ...] # batch[-1][1][0] = (pinned, post, bgroup) # batch[-1][1][0][1] = post loadmore = batch[-1][1][0][1].datetime grouped = OrderedDict(batch) elif pinsandposts: if not startdate: startindex = 0 for row in pinsandposts: # break when a non-pinned post is encountered if not row[0]: break else: pinned_hashids.append(row[1].hashid) else: for startindex, row in enumerate(pinsandposts): # Skip pinned posts when looking for starting index if (row[1].hashid not in pinned_hashids and row[1].datetime < startdate): break batch = pinsandposts[startindex:startindex + batchsize] if startindex + batchsize < len(pinsandposts): # batch = [(pinned, post), ...] loadmore = batch[-1][1].datetime pinsandposts = batch query_params = request_args.to_dict(flat=False) if loadmore: query_params.update({'startdate': loadmore.isoformat() + 'Z', 'ph': pinned_hashids}) if location: data_filters['location_names'].append(location['name']) query_params.update({'l': location['name']}) if pay_graph: pay_graph_data = make_pay_graph(pay_graph, posts, rmin=f_min, rmax=f_max) return dict(posts=posts, pinsandposts=pinsandposts, grouped=grouped, newlimit=newlimit, title=title, md5sum=md5sum, domain=domain, location=location, employer_name=employer_name, showall=showall, f_locations=f_locations, loadmore=loadmore, query_params=query_params, data_filters=data_filters, pay_graph_data=pay_graph_data, paginated=index_is_paginated(), template_vars=template_vars)
def reset(): # User wants to reset password # Ask for username or email, verify it, and send a reset code form = PasswordResetRequestForm() if getbool(request.args.get('expired')): message = _( "Your password has expired. Please enter your username or email address to" " request a reset code and set a new password." ) else: message = None if request.method == 'GET': form.username.data = abort_null(request.args.get('username')) if form.validate_on_submit(): username = form.username.data user = form.user if '@' in username and not username.startswith('@'): # They provided an email address. Send reset email to that address email = username else: # Send to their existing address # User.email is a UserEmail object email = str(user.email) if not email and user.emailclaims: email = user.emailclaims[0].email if not email: # They don't have an email address. Maybe they logged in via Twitter # and set a local username and password, but no email. Could happen. if len(user.externalids) > 0: extid = user.externalids[0] return render_message( title=_("Cannot reset password"), message=Markup( _( "Your account does not have an email address. However," " it is linked to <strong>{service}</strong> with the id" " <strong>{username}</strong>. You can use that to login." ).format( service=login_registry[extid.service].title, username=extid.username or extid.userid, ) ), ) return render_message( title=_("Cannot reset password"), message=Markup( _( 'Your account does not have an email address. Please' ' contact <a href="mailto:{email}">{email}</a> for' ' assistance.' ).format(email=escape(current_app.config['SITE_SUPPORT_EMAIL'])) ), ) # Allow only two reset attempts per hour to discourage abuse validate_rate_limit('email_reset', user.uuid_b58, 2, 3600) send_password_reset_link( email=email, user=user, token=token_serializer().dumps( {'buid': user.buid, 'pw_set_at': str_pw_set_at(user)} ), ) return render_message( title=_("Email sent"), message=_( "You have been sent an email with a link to reset your password, to" " your address {masked_email}. If it doesn’t arrive in a few minutes," " it may have landed in your spam or junk folder. The reset link is" " valid for 24 hours." ).format(masked_email=mask_email(email)), ) return render_form( form=form, title=_("Reset password"), message=message, submit=_("Send reset link"), ajax=False, template='account_formlayout.html.jinja2', )
def jobdetail(domain, hashid): post = JobPost.query.filter_by(hashid=hashid).first_or_404() # If we're on a board (that's now 'www') and this post isn't on this board, # redirect to (a) the first board it is on, or (b) on the root domain (which may # be the 'www' board, which is why we don't bother to redirect if we're currently # in the 'www' board) if g.board and g.board.not_root and post.link_to_board(g.board) is None: blink = post.postboards.first() if blink: return redirect( post.url_for(subdomain=blink.board.name, _external=True)) else: return redirect(post.url_for(subdomain=None, _external=True)) # If this post is past pending state and the domain doesn't match, redirect there if post.status not in POSTSTATUS.UNPUBLISHED and post.email_domain != domain: return redirect(post.url_for(), code=301) if post.status in [POSTSTATUS.DRAFT, POSTSTATUS.PENDING]: if not ((g.user and post.admin_is(g.user))): abort(403) if post.status in POSTSTATUS.GONE: abort(410) if g.user: jobview = UserJobView.get(post, g.user) if jobview is None: jobview = UserJobView(user=g.user, jobpost=post) post.uncache_viewcounts('viewed') cache.delete_memoized(viewstats_by_id_qhour, post.id) cache.delete_memoized(viewstats_by_id_hour, post.id) cache.delete_memoized(viewstats_by_id_day, post.id) db.session.add(jobview) try: db.session.commit() except IntegrityError: db.session.rollback() post.viewcounts # Re-populate cache else: jobview = None if g.anon_user: anonview = AnonJobView.get(post, g.anon_user) if not anonview: anonview = AnonJobView(jobpost=post, anon_user=g.anon_user) db.session.add(anonview) try: db.session.commit() except IntegrityError: db.session.rollback() if g.user: report = JobPostReport.query.filter_by(post=post, user=g.user).first() else: report = None g.jobpost_viewed = (post, getbool(request.args.get('b'))) reportform = forms.ReportForm(obj=report) reportform.report_code.choices = [ (ob.id, ob.title) for ob in ReportCode.query.filter_by(public=True).order_by('seq') ] rejectform = forms.RejectForm() moderateform = forms.ModerateForm() if request.method == 'GET': moderateform.reason.data = post.review_comments if g.board: pinnedform = forms.PinnedForm(obj=post.link_to_board(g.board)) else: pinnedform = forms.PinnedForm(obj=post) applyform = None # User isn't allowed to apply unless non-None if g.user: job_application = JobApplication.query.filter_by(user=g.user, jobpost=post).first() if not job_application: applyform = forms.ApplicationForm() applyform.apply_phone.data = g.user.phone elif g.kiosk and g.peopleflow_url: applyform = forms.KioskApplicationForm() job_application = None else: job_application = None if reportform.validate_on_submit(): if g.user: if report is None: report = JobPostReport(post=post, user=g.user) report.reportcode_id = reportform.report_code.data report.ipaddr = request.environ['REMOTE_ADDR'] report.useragent = request.user_agent.string db.session.add(report) db.session.commit() if request.is_xhr: return "<p>Thanks! This post has been flagged for review</p>" # FIXME: Ugh! else: flash("Thanks! This post has been flagged for review", "interactive") else: if request.is_xhr: return "<p>You need to be logged in to report a post</p>" # FIXME: Ugh! else: flash("You need to be logged in to report a post", "interactive") elif request.method == 'POST' and request.is_xhr: return render_template('inc/reportform.html', reportform=reportform) if post.company_url and post.status != POSTSTATUS.ANNOUNCEMENT: domain_mismatch = not base_domain_matches(post.company_url.lower(), post.email_domain.lower()) else: domain_mismatch = False if not g.kiosk: if g.preview_campaign: header_campaign = g.preview_campaign else: header_campaign = Campaign.for_context( CAMPAIGN_POSITION.HEADER, board=g.board, user=g.user, anon_user=g.anon_user, geonameids=g.user_geonameids + post.geonameids) else: header_campaign = None if g.user and not g.kiosk: g.starred_ids = set(g.user.starred_job_ids(agelimit)) else: g.starred_ids = set() jobpost_ab = session_jobpost_ab() related_posts = post.related_posts() cache_viewcounts(related_posts) is_bgroup = getbool(request.args.get('b')) headline = post.headlineb if is_bgroup and post.headlineb else post.headline g.impressions = { rp.id: (False, rp.id, bgroup(jobpost_ab, rp)) for rp in related_posts } return render_template('detail.html', post=post, headline=headline, reportform=reportform, rejectform=rejectform, pinnedform=pinnedform, applyform=applyform, job_application=job_application, jobview=jobview, report=report, moderateform=moderateform, domain_mismatch=domain_mismatch, header_campaign=header_campaign, related_posts=related_posts, is_bgroup=is_bgroup, is_siteadmin=lastuser.has_permission('siteadmin'))
def jobdetail(domain, hashid): post = JobPost.query.filter_by(hashid=hashid).first_or_404() # If we're on a board (that's now 'www') and this post isn't on this board, # redirect to (a) the first board it is on, or (b) on the root domain (which may # be the 'www' board, which is why we don't bother to redirect if we're currently # in the 'www' board) if g.board and g.board.not_root and post.link_to_board(g.board) is None: blink = post.postboards.first() if blink: return redirect(post.url_for(subdomain=blink.board.name, _external=True)) else: return redirect(post.url_for(subdomain=None, _external=True)) # If this post is past pending state and the domain doesn't match, redirect there if post.status not in POSTSTATUS.UNPUBLISHED and post.email_domain != domain: return redirect(post.url_for(), code=301) if post.status in [POSTSTATUS.DRAFT, POSTSTATUS.PENDING]: if not ((g.user and post.admin_is(g.user))): abort(403) if post.status in POSTSTATUS.GONE: abort(410) if g.user: jobview = UserJobView.get(post, g.user) if jobview is None: jobview = UserJobView(user=g.user, jobpost=post) post.uncache_viewcounts('viewed') cache.delete_memoized(viewstats_by_id_qhour, post.id) cache.delete_memoized(viewstats_by_id_hour, post.id) cache.delete_memoized(viewstats_by_id_day, post.id) db.session.add(jobview) try: db.session.commit() except IntegrityError: db.session.rollback() post.viewcounts # Re-populate cache else: jobview = None if g.anon_user: anonview = AnonJobView.get(post, g.anon_user) if not anonview: anonview = AnonJobView(jobpost=post, anon_user=g.anon_user) db.session.add(anonview) try: db.session.commit() except IntegrityError: db.session.rollback() if g.user: report = JobPostReport.query.filter_by(post=post, user=g.user).first() else: report = None g.jobpost_viewed = (post, getbool(request.args.get('b'))) reportform = forms.ReportForm(obj=report) reportform.report_code.choices = [(ob.id, ob.title) for ob in ReportCode.query.filter_by(public=True).order_by('seq')] rejectform = forms.RejectForm() moderateform = forms.ModerateForm() if request.method == 'GET': moderateform.reason.data = post.review_comments if g.board: pinnedform = forms.PinnedForm(obj=post.link_to_board(g.board)) else: pinnedform = forms.PinnedForm(obj=post) applyform = None # User isn't allowed to apply unless non-None if g.user: job_application = JobApplication.query.filter_by(user=g.user, jobpost=post).first() if not job_application: applyform = forms.ApplicationForm() applyform.apply_phone.data = g.user.phone elif g.kiosk and g.peopleflow_url: applyform = forms.KioskApplicationForm() job_application = None else: job_application = None if reportform.validate_on_submit(): if g.user: if report is None: report = JobPostReport(post=post, user=g.user) report.reportcode_id = reportform.report_code.data report.ipaddr = request.environ['REMOTE_ADDR'] report.useragent = request.user_agent.string db.session.add(report) db.session.commit() if request.is_xhr: return "<p>Thanks! This post has been flagged for review</p>" # FIXME: Ugh! else: flash("Thanks! This post has been flagged for review", "interactive") else: if request.is_xhr: return "<p>You need to be logged in to report a post</p>" # FIXME: Ugh! else: flash("You need to be logged in to report a post", "interactive") elif request.method == 'POST' and request.is_xhr: return render_template('inc/reportform.html', reportform=reportform) if post.company_url and post.status != POSTSTATUS.ANNOUNCEMENT: domain_mismatch = not base_domain_matches(post.company_url.lower(), post.email_domain.lower()) else: domain_mismatch = False if not g.kiosk: if g.preview_campaign: header_campaign = g.preview_campaign else: header_campaign = Campaign.for_context(CAMPAIGN_POSITION.HEADER, board=g.board, user=g.user, anon_user=g.anon_user, geonameids=g.user_geonameids + post.geonameids) else: header_campaign = None if g.user and not g.kiosk: g.starred_ids = set(g.user.starred_job_ids(agelimit)) else: g.starred_ids = set() jobpost_ab = session_jobpost_ab() related_posts = post.related_posts() cache_viewcounts(related_posts) is_bgroup = getbool(request.args.get('b')) headline = post.headlineb if is_bgroup and post.headlineb else post.headline g.impressions = {rp.id: (False, rp.id, bgroup(jobpost_ab, rp)) for rp in related_posts} return render_template('detail.html', post=post, headline=headline, reportform=reportform, rejectform=rejectform, pinnedform=pinnedform, applyform=applyform, job_application=job_application, jobview=jobview, report=report, moderateform=moderateform, domain_mismatch=domain_mismatch, header_campaign=header_campaign, related_posts=related_posts, is_bgroup=is_bgroup, is_siteadmin=lastuser.has_permission('siteadmin') )
def sync_resources(): resources = request.get_json().get('resources', []) actions_list = {} results = {} for name in resources: if '/' in name: parts = name.split('/') if len(parts) != 2: results[name] = {'status': 'error', 'error': _(u"Invalid resource name {name}").format(name=name)} continue resource_name, action_name = parts else: resource_name = name action_name = None description = resources[name].get('description') siteresource = getbool(resources[name].get('siteresource')) restricted = getbool(resources[name].get('restricted')) actions_list.setdefault(resource_name, []) resource = Resource.get(name=resource_name, client=g.client) if resource: results[resource.name] = {'status': 'exists', 'actions': {}} if not action_name and resource.description != description: resource.description = description results[resource.name]['status'] = 'updated' if not action_name and resource.siteresource != siteresource: resource.siteresource = siteresource results[resource.name]['status'] = 'updated' if not action_name and resource.restricted != restricted: resource.restricted = restricted results[resource.name]['status'] = 'updated' else: resource = Resource(client=g.client, name=resource_name, title=resources.get(resource_name, {}).get('title') or resource_name.title(), description=resources.get(resource_name, {}).get('description') or u'') db.session.add(resource) results[resource.name] = {'status': 'added', 'actions': {}} if action_name: if action_name not in actions_list[resource_name]: actions_list[resource_name].append(action_name) action = resource.get_action(name=action_name) if action: if description != action.description: action.description = description results[resource.name]['actions'][action.name] = {'status': 'updated'} else: results[resource.name]['actions'][action.name] = {'status': 'exists'} else: # FIXME: What is "title" here? This assignment doesn't seem right action = ResourceAction(resource=resource, name=action_name, title=resources[name].get('title') or action_name.title() + " " + resource.title, description=description) db.session.add(action) results[resource.name]['actions'][action.name] = {'status': 'added'} # Deleting resources & actions not defined in client application. for resource_name in actions_list: resource = Resource.get(name=resource_name, client=g.client) actions = ResourceAction.query.filter( ~ResourceAction.name.in_(actions_list[resource_name]), ResourceAction.resource == resource) for action in actions.all(): results[resource_name]['actions'][action.name] = {'status': 'deleted'} actions.delete(synchronize_session='fetch') del_resources = Resource.query.filter( ~Resource.name.in_(actions_list.keys()), Resource.client == g.client) for resource in del_resources.all(): ResourceAction.query.filter_by(resource=resource).delete(synchronize_session='fetch') results[resource.name] = {'status': 'deleted'} del_resources.delete(synchronize_session='fetch') db.session.commit() return api_result('ok', results=results)
def sync_resources(): resources = request.get_json().get('resources', []) actions_list = {} results = {} for name in resources: if '/' in name: parts = name.split('/') if len(parts) != 2: results[name] = { 'status': 'error', 'error': _(u"Invalid resource name {name}").format(name=name) } continue resource_name, action_name = parts else: resource_name = name action_name = None description = resources[name].get('description') siteresource = getbool(resources[name].get('siteresource')) restricted = getbool(resources[name].get('restricted')) actions_list.setdefault(resource_name, []) resource = Resource.get(name=resource_name, client=g.client) if resource: results[resource.name] = {'status': 'exists', 'actions': {}} if not action_name and resource.description != description: resource.description = description results[resource.name]['status'] = 'updated' if not action_name and resource.siteresource != siteresource: resource.siteresource = siteresource results[resource.name]['status'] = 'updated' if not action_name and resource.restricted != restricted: resource.restricted = restricted results[resource.name]['status'] = 'updated' else: resource = Resource( client=g.client, name=resource_name, title=resources.get(resource_name, {}).get('title') or resource_name.title(), description=resources.get(resource_name, {}).get('description') or u'') db.session.add(resource) results[resource.name] = {'status': 'added', 'actions': {}} if action_name: if action_name not in actions_list[resource_name]: actions_list[resource_name].append(action_name) action = resource.get_action(name=action_name) if action: if description != action.description: action.description = description results[resource.name]['actions'][action.name] = { 'status': 'updated' } else: results[resource.name]['actions'][action.name] = { 'status': 'exists' } else: # FIXME: What is "title" here? This assignment doesn't seem right action = ResourceAction( resource=resource, name=action_name, title=resources[name].get('title') or action_name.title() + " " + resource.title, description=description) db.session.add(action) results[resource.name]['actions'][action.name] = { 'status': 'added' } # Deleting resources & actions not defined in client application. for resource_name in actions_list: resource = Resource.get(name=resource_name, client=g.client) actions = ResourceAction.query.filter( ~ResourceAction.name.in_(actions_list[resource_name]), ResourceAction.resource == resource) for action in actions.all(): results[resource_name]['actions'][action.name] = { 'status': 'deleted' } actions.delete(synchronize_session='fetch') del_resources = Resource.query.filter( ~Resource.name.in_(actions_list.keys()), Resource.client == g.client) for resource in del_resources.all(): ResourceAction.query.filter_by(resource=resource).delete( synchronize_session='fetch') results[resource.name] = {'status': 'deleted'} del_resources.delete(synchronize_session='fetch') db.session.commit() return api_result('ok', results=results)
def index(basequery=None, type=None, category=None, md5sum=None, domain=None, location=None, title=None, showall=True, statuses=None, tag=None, batched=True, ageless=False): if basequery is None: is_index = True else: is_index = False now = datetime.utcnow() if basequery is None and not (g.user or g.kiosk or (g.board and not g.board.require_login)): showall = False batched = False if basequery is None: basequery = JobPost.query # Apply request.args filters data_filters = {} f_types = request.args.getlist('t') while '' in f_types: f_types.remove('') if f_types: data_filters['types'] = f_types basequery = basequery.join(JobType).filter(JobType.name.in_(f_types)) f_categories = request.args.getlist('c') while '' in f_categories: f_categories.remove('') if f_categories: data_filters['categories'] = f_categories basequery = basequery.join(JobCategory).filter(JobCategory.name.in_(f_categories)) r_locations = request.args.getlist('l') f_locations = [] remote_location = getbool(request.args.get('anywhere')) or False for rl in r_locations: if rl == 'anywhere': remote_location = True elif rl.isdigit(): f_locations.append(int(rl)) elif rl: ld = location_geodata(rl) if ld: f_locations.append(ld['geonameid']) remote_location_query = basequery.filter(JobPost.remote_location == True) # NOQA locations_query = basequery.join(JobLocation).filter(JobLocation.geonameid.in_(f_locations)) if f_locations and remote_location: data_filters['locations'] = f_locations data_filters['anywhere'] = True recency = JobPost.datetime > datetime.utcnow() - agelimit basequery = locations_query.filter(recency).union(remote_location_query.filter(recency)) elif f_locations: data_filters['locations'] = f_locations basequery = locations_query elif remote_location: data_filters['anywhere'] = True # Only works as a positive filter: you can't search for jobs that are NOT anywhere basequery = remote_location_query if 'currency' in request.args and request.args['currency'] in CURRENCY.keys(): currency = request.args['currency'] data_filters['currency'] = currency basequery = basequery.filter(JobPost.pay_currency == currency) pay_graph = currency else: pay_graph = False if getbool(request.args.get('equity')): # Only works as a positive filter: you can't search for jobs that DON'T pay in equity data_filters['equity'] = True basequery = basequery.filter(JobPost.pay_equity_min != None) # NOQA if 'pmin' in request.args and 'pmax' in request.args: f_min = string_to_number(request.args['pmin']) f_max = string_to_number(request.args['pmax']) if f_min is not None and f_max is not None: data_filters['pay_min'] = f_min data_filters['pay_max'] = f_max basequery = basequery.filter(JobPost.pay_cash_min < f_max, JobPost.pay_cash_max >= f_min) else: f_min = None f_max = None if getbool(request.args.get('archive')): ageless = True data_filters['archive'] = True statuses = POSTSTATUS.ARCHIVED search_domains = None if request.args.get('q'): q = for_tsquery(request.args['q']) try: # TODO: Can we do syntax validation without a database roundtrip? db.session.query(db.func.to_tsquery(q)).all() except ProgrammingError: db.session.rollback() g.event_data['search_syntax_error'] = (request.args['q'], q) if not request.is_xhr: flash(_(u"Search terms ignored because this didn’t parse: {query}").format(query=q), 'danger') else: # Query's good? Use it. data_filters['query'] = q search_domains = Domain.query.filter( Domain.search_vector.match(q, postgresql_regconfig='english'), Domain.is_banned == False).options( db.load_only('name', 'title', 'logo_url')).all() # NOQA basequery = basequery.filter(JobPost.search_vector.match(q, postgresql_regconfig='english')) if data_filters: g.event_data['filters'] = data_filters showall = True batched = True # getposts sets g.board_jobs, used below posts = getposts(basequery, pinned=True, showall=showall, statuses=statuses, ageless=ageless).all() # Cache viewcounts (admin view or not) cache_viewcounts(posts) if posts: employer_name = posts[0].company_name else: employer_name = u'a single employer' if g.user: g.starred_ids = set(g.user.starred_job_ids(agelimit)) else: g.starred_ids = set() jobpost_ab = session_jobpost_ab() # Make lookup slightly faster in the loop below since 'g' is a proxy board = g.board if board: board_jobs = g.board_jobs else: board_jobs = {} if is_index and posts and not g.kiosk: # Group posts by email_domain on index page only, when not in kiosk mode grouped = OrderedDict() for post in posts: pinned = post.pinned if board is not None: blink = board_jobs.get(post.id) # board_jobs only contains the last 30 days, no archive if blink is not None: pinned = blink.pinned if pinned: # Make pinned posts appear in a group of one grouped.setdefault(('s', post.hashid), []).append( (pinned, post, bgroup(jobpost_ab, post))) elif post.status == POSTSTATUS.ANNOUNCEMENT: # Make announcements also appear in a group of one grouped.setdefault(('a', post.hashid), []).append( (pinned, post, bgroup(jobpost_ab, post))) elif post.domain.is_webmail: grouped.setdefault(('ne', post.md5sum), []).append( (pinned, post, bgroup(jobpost_ab, post))) else: grouped.setdefault(('nd', post.email_domain), []).append( (pinned, post, bgroup(jobpost_ab, post))) pinsandposts = None else: grouped = None if g.board: pinsandposts = [] for post in posts: pinned = post.pinned if board is not None: blink = board_jobs.get(post.id) # board_jobs only contains the last 30 days, no archive if blink is not None: pinned = blink.pinned pinsandposts.append((pinned, post, bgroup(jobpost_ab, post))) else: pinsandposts = [(post.pinned, post, bgroup(jobpost_ab, post)) for post in posts] # Pick a header campaign (only if not kiosk or an XHR reload) pay_graph_data = None if not g.kiosk and not request.is_xhr: if g.preview_campaign: header_campaign = g.preview_campaign else: if location: geonameids = g.user_geonameids + [location['geonameid']] else: geonameids = g.user_geonameids header_campaign = Campaign.for_context(CAMPAIGN_POSITION.HEADER, board=g.board, user=g.user, anon_user=g.anon_user, geonameids=geonameids) if pay_graph: pay_graph_data = make_pay_graph(pay_graph, posts, rmin=f_min, rmax=f_max) else: header_campaign = None loadmore = False if batched: # Figure out where the batch should start from startdate = None if 'startdate' in request.values: try: startdate = parse_isoformat(request.values['startdate']) except ValueError: pass if request.method == 'GET': batchsize = 31 # Skipping one for the special stickie that's on all pages else: batchsize = 32 # Depending on the display mechanism (grouped or ungrouped), extract the batch if grouped: if not startdate: startindex = 0 else: # Loop through group looking for start of next batch. See below to understand the # nesting structure of 'grouped' for startindex, row in enumerate(grouped.values()): # Skip examination of pinned listings (having row[0][0] = True) if (not row[0][0]) and row[0][1].datetime < startdate: break batch = grouped.items()[startindex:startindex + batchsize] if startindex + batchsize < len(grouped): # Get the datetime of the last group's first item # batch = [((type, domain), [(pinned, post, bgroup), ...])] # batch[-1] = ((type, domain), [(pinned, post, bgroup), ...]) # batch[-1][1] = [(pinned, post, bgroup), ...] # batch[-1][1][0] = (pinned, post, bgroup) # batch[-1][1][0][1] = post loadmore = batch[-1][1][0][1].datetime grouped = OrderedDict(batch) elif pinsandposts: if not startdate: startindex = 0 else: for startindex, row in enumerate(pinsandposts): # Skip pinned posts when looking for starting index if (not row[0]) and row[1].datetime < startdate: break batch = pinsandposts[startindex:startindex + batchsize] if startindex + batchsize < len(pinsandposts): # batch = [(pinned, post), ...] loadmore = batch[-1][1].datetime pinsandposts = batch if grouped: g.impressions = {post.id: (pinflag, post.id, is_bgroup) for group in grouped.itervalues() for pinflag, post, is_bgroup in group} elif pinsandposts: g.impressions = {post.id: (pinflag, post.id, is_bgroup) for pinflag, post, is_bgroup in pinsandposts} return render_template('index.html', pinsandposts=pinsandposts, grouped=grouped, now=now, newlimit=newlimit, jobtype=type, jobcategory=category, title=title, md5sum=md5sum, domain=domain, employer_name=employer_name, location=location, showall=showall, tag=tag, is_index=is_index, header_campaign=header_campaign, loadmore=loadmore, search_domains=search_domains, is_siteadmin=lastuser.has_permission('siteadmin'), pay_graph_data=pay_graph_data)