def recompute_unread(min_date=None): from v1.models import Inbox, Account, Comment, Message from v1.lib.db import queries def load_accounts(inbox_rel): accounts = set() q = inbox_rel._query(eager_load=False, data=False, sort=desc("_date")) if min_date: q._filter(inbox_rel.c._date > min_date) for i in fetch_things2(q): accounts.add(i._thing1_id) return accounts accounts_m = load_accounts(Inbox.rel(Account, Message)) for i, a in enumerate(accounts_m): a = Account._byID(a) print "%s / %s : %s" % (i, len(accounts_m), a) queries.get_unread_messages(a).update() queries.get_unread_comments(a).update() queries.get_unread_selfreply(a).update() accounts = load_accounts(Inbox.rel(Account, Comment)) - accounts_m for i, a in enumerate(accounts): a = Account._byID(a) print "%s / %s : %s" % (i, len(accounts), a) queries.get_unread_comments(a).update() queries.get_unread_selfreply(a).update()
def set_up_comment_embed(sr, thing, showedits): try: author = Account._byID(thing.author_id) if thing.author_id else None except NotFound: author = None iso_timestamp = request.GET.get("created", "") c.embed_config = { "eventtracker_url": g.eventtracker_url or "", "anon_eventtracker_url": g.anon_eventtracker_url or "", "event_clicktracker_url": g.event_clicktracker_url or "", "created": iso_timestamp, "showedits": showedits, "thing": { "id": thing._id, "sr_id": sr._id, "sr_name": sr.name, "edited": edited_after(thing, iso_timestamp, showedits), "deleted": thing.deleted or author._deleted, }, "comment_max_height": 200, } c.render_style = "iframe" c.user = UnloggedUser([c.lang]) c.user_is_loggedin = False c.forced_loggedout = True
def update_num_sildings(update_trophy=True, user_id=None): """Returns total number of link, comment, and user sildings""" query = (select([ sodium_table.c.paying_id, sa_count(sodium_table.c.trans_id) ]).where(sodium_table.c.trans_id.like('X%')).group_by( sodium_table.c.paying_id).order_by( sa_count(sodium_table.c.trans_id).desc())) if user_id: query = query.where(sodium_table.c.paying_id == str(user_id)) rows = ENGINE.execute(query) total_updated = 0 for paying_id, count in rows: try: a = Account._byID(int(paying_id), data=True) a.num_sildings = count a._commit() total_updated += 1 #if 'server seconds paid' for are public, update silding trophies if update_trophy and a.pref_public_server_seconds: add_to_trophy_queue(a, "silding") except: g.log.debug("update_num_sildings: paying_id %s is invalid" % paying_id) g.log.debug("update_num_sildings: updated %s accounts" % total_updated)
def process_message(msgs, chan): """Update get_submitted(), the Links by author precomputed query. get_submitted() is a CachedResult which is stored in permacache. To update these objects we need to do a read-modify-write which requires obtaining a lock. Sharding these updates by author allows us to run multiple consumers (but ideally just one per shard) to avoid lock contention. """ from v1.lib.db.queries import add_queries, get_submitted link_names = {msg.body for msg in msgs} links = Link._by_fullname(link_names, return_dict=False) print 'Processing %r' % (links,) links_by_author_id = defaultdict(list) for link in links: links_by_author_id[link.author_id].append(link) authors_by_id = Account._byID(links_by_author_id.keys()) for author_id, links in links_by_author_id.iteritems(): with g.stats.get_timer("link_vote_processor.author_queries"): author = authors_by_id[author_id] add_queries( queries=[ get_submitted(author, sort, 'all') for sort in SORTS], insert_items=links, )
def batch_lookups(self): super(LinkUploader, self).batch_lookups() author_ids = [ thing.author_id for thing in self.things if hasattr(thing, 'author_id') ] try: self.accounts = Account._byID(author_ids, data=True, return_dict=True) except NotFound: if self.use_safe_get: self.accounts = safe_get(Account._byID, author_ids, data=True, return_dict=True) else: raise sr_ids = [ thing.sr_id for thing in self.things if hasattr(thing, 'sr_id') ] try: self.srs = Subverbify._byID(sr_ids, data=True, return_dict=True) except NotFound: if self.use_safe_get: self.srs = safe_get(Subverbify._byID, sr_ids, data=True, return_dict=True) else: raise
def charge_campaign(link, campaign): if charged_or_not_needed(campaign): return user = Account._byID(link.author_id) success, reason = authorize.charge_transaction(user, campaign.trans_id, campaign._id) if not success: if reason == authorize.TRANSACTION_NOT_FOUND: # authorization hold has expired original_trans_id = campaign.trans_id campaign.trans_id = NO_TRANSACTION campaign._commit() text = ('voided expired transaction for %s: (trans_id: %d)' % (campaign, original_trans_id)) PromotionLog.add(link, text) return hooks.get_hook('promote.edit_campaign').call(link=link, campaign=campaign) if not is_promoted(link): update_promote_status(link, PROMOTE_STATUS.pending) emailer.queue_promo(link, campaign.total_budget_dollars, campaign.trans_id) text = ('auth charge for campaign %s, trans_id: %d' % (campaign._id, campaign.trans_id)) PromotionLog.add(link, text)
def refund_campaign(link, camp, refund_amount, billable_amount, billable_impressions): owner = Account._byID(camp.owner_id, data=True) success, reason = authorize.refund_transaction(owner, camp.trans_id, camp._id, refund_amount) if not success: text = ('%s $%s refund failed' % (camp, refund_amount)) PromotionLog.add(link, text) g.log.debug(text + ' (reason: %s)' % reason) return False if billable_impressions: text = ('%s completed with $%s billable (%s impressions @ $%s).' ' %s refunded.' % (camp, billable_amount, billable_impressions, camp.bid_pennies / 100., refund_amount)) else: text = ('%s completed with $%s billable. %s refunded' % (camp, billable_amount, refund_amount)) PromotionLog.add(link, text) camp.refund_amount = refund_amount camp._commit() queries.unset_underdelivered_campaigns(camp) emailer.refunded_promo(link) return True
def store_keys(key, maxes): # we're building queries using queries.py, but we could make the # queries ourselves if we wanted to avoid the individual lookups # for accounts and subverbifys. # Note that we're only generating the 'sr-' type queries here, but # we're also able to process the other listings generated by the # old migrate.mr_permacache for convenience userrel_fns = dict(liked=queries.get_liked, disliked=queries.get_disliked, saved=queries.get_saved, hidden=queries.get_hidden) if key.startswith('user-'): acc_str, keytype, account_id = key.split('-') account_id = int(account_id) fn = queries.get_submitted if keytype == 'submitted' else queries.get_comments q = fn(Account._byID(account_id), 'new', 'all') q._insert_tuples([(fname, float(timestamp)) for (timestamp, fname) in maxes]) elif key.startswith('sr-'): sr_str, sort, time, sr_id = key.split('-') sr_id = int(sr_id) if sort == 'controversy': # I screwed this up in the mapper and it's too late to fix # it sort = 'controversial' q = queries.get_links(Subverbify._byID(sr_id), sort, time) q._insert_tuples( [tuple([item[-1]] + map(float, item[:-1])) for item in maxes]) elif key.startswith('domain/'): d_str, sort, time, domain = key.split('/') q = queries.get_domain_links(domain, sort, time) q._insert_tuples( [tuple([item[-1]] + map(float, item[:-1])) for item in maxes]) elif key.split('-')[0] in userrel_fns: key_type, account_id = key.split('-') account_id = int(account_id) fn = userrel_fns[key_type] q = fn(Account._byID(account_id)) q._insert_tuples( [tuple([item[-1]] + map(float, item[:-1])) for item in maxes])
def make_sodium_message(thing, user_silded): from v1.models import Comment if thing.sildings == 0 or thing._spam or thing._deleted: return None author = Account._byID(thing.author_id, data=True) if not author._deleted: author_name = author.name else: author_name = _("[deleted]") if c.user_is_loggedin and thing.author_id == c.user._id: if isinstance(thing, Comment): silded_message = ungettext( "a verbifyor gifted you a month of verbify sodium for this " "comment.", "verbifyors have gifted you %(months)d months of verbify sodium " "for this comment.", thing.sildings) else: silded_message = ungettext( "a verbifyor gifted you a month of verbify sodium for this " "submission.", "verbifyors have gifted you %(months)d months of verbify sodium " "for this submission.", thing.sildings) elif user_silded: if isinstance(thing, Comment): silded_message = ungettext( "you have gifted verbify sodium to %(recipient)s for this " "comment.", "you and other verbifyors have gifted %(months)d months of " "verbify sodium to %(recipient)s for this comment.", thing.sildings) else: silded_message = ungettext( "you have gifted verbify sodium to %(recipient)s for this " "submission.", "you and other verbifyors have gifted %(months)d months of " "verbify sodium to %(recipient)s for this submission.", thing.sildings) else: if isinstance(thing, Comment): silded_message = ungettext( "a verbifyor has gifted verbify sodium to %(recipient)s for this " "comment.", "verbifyors have gifted %(months)d months of verbify sodium to " "%(recipient)s for this comment.", thing.sildings) else: silded_message = ungettext( "a verbifyor has gifted verbify sodium to %(recipient)s for this " "submission.", "verbifyors have gifted %(months)d months of verbify sodium to " "%(recipient)s for this submission.", thing.sildings) return silded_message % dict( recipient=author_name, months=thing.sildings, )
def process_message(msg): vote_data = json.loads(msg.body) hook = hooks.get_hook('vote.validate_vote_data') if hook.call_until_return(msg=msg, vote_data=vote_data) is False: # Corrupt records in the queue. Ignore them. print "Ignoring invalid vote by %s on %s %s" % ( vote_data.get('user_id', '<unknown>'), vote_data.get('thing_fullname', '<unknown>'), vote_data) return timer = g.stats.get_timer("link_vote_processor") timer.start() user = Account._byID(vote_data.pop("user_id")) link = Link._by_fullname(vote_data.pop("thing_fullname")) # create the vote and update the voter's liked/disliked under lock so # that the vote state and cached query are consistent lock_key = "vote-%s-%s" % (user._id36, link._fullname) with g.make_lock("voting", lock_key, timeout=5): print "Processing vote by %s on %s %s" % (user, link, vote_data) try: vote = Vote( user, link, direction=vote_data["direction"], date=datetime.utcfromtimestamp(vote_data["date"]), data=vote_data["data"], event_data=vote_data.get("event_data"), ) except TypeError as e: # a vote on an invalid type got in the queue, just skip it g.log.exception("Invalid type: %r", e.message) return vote.commit() timer.intermediate("create_vote_object") update_user_liked(vote) timer.intermediate("voter_likes") vote_valid = vote.is_automatic_initial_vote or vote.effects.affects_score link_valid = not (link._spam or link._deleted) if vote_valid and link_valid: add_to_author_query_q(link) add_to_subverbify_query_q(link) add_to_domain_query_q(link) timer.stop() timer.flush()
def _promo_email(thing, kind, body="", **kw): from v1.lib.pages import Promo_Email a = Account._byID(thing.author_id, True) if not a.email: return body = Promo_Email(link=thing, kind=kind, body=body, **kw).render(style="email") return _system_email(a.email, body, kind, thing=thing, reply_to=g.selfserve_support_email, suppress_username=True)
def get_reports(cls, wrapped, max_user_reasons=20): """Get two lists of mod and user reports on the item.""" if (wrapped.reported > 0 and (wrapped.can_ban or getattr(wrapped, "promoted", None) and c.user_is_sponsor)): from v1.models import SRMember reports = cls.for_thing(wrapped.lookups[0]) query = SRMember._query(SRMember.c._thing1_id == wrapped.sr_id, SRMember.c._name == "moderator") mod_dates = {rel._thing2_id: rel._date for rel in query} if g.automoderator_account: automoderator = Account._by_name(g.automoderator_account) else: automoderator = None mod_reports = [] user_reports = [] for report in reports: # always include AutoModerator reports if automoderator and report._thing1_id == automoderator._id: mod_reports.append(report) # include in mod reports if made after the user became a mod elif (report._thing1_id in mod_dates and report._date >= mod_dates[report._thing1_id]): mod_reports.append(report) else: user_reports.append(report) # mod reports return as tuples with (reason, name) mods = Account._byID([report._thing1_id for report in mod_reports], data=True, return_dict=True) mod_reports = [(getattr(report, "reason", None), mods[report._thing1_id].name) for report in mod_reports] # user reports return as tuples with (reason, count) user_reports = Counter( [getattr(report, "reason", None) for report in user_reports]) user_reports = user_reports.most_common(max_user_reasons) return mod_reports, user_reports else: return [], []
def new_campaign(link, dates, target, frequency_cap, priority, location, platform, mobile_os, ios_devices, ios_version_range, android_devices, android_version_range, total_budget_pennies, cost_basis, bid_pennies): campaign = PromoCampaign.create( link, target, dates[0], dates[1], frequency_cap, priority, location, platform, mobile_os, ios_devices, ios_version_range, android_devices, android_version_range, total_budget_pennies, cost_basis, bid_pennies) PromotionWeights.add(link, campaign) PromotionLog.add(link, 'campaign %s created' % campaign._id) if not campaign.is_house: author = Account._byID(link.author_id, data=True) if getattr(author, "complimentary_promos", False): free_campaign(link, campaign, c.user) hooks.get_hook('promote.new_campaign').call(link=link, campaign=campaign) return campaign
def new(cls, user, thing, reason=None): from v1.lib.db import queries # check if this report exists already! rel = cls.rel(user, thing) q = rel._fast_query(user, thing, ['-1', '0', '1']) q = [report for (tupl, report) in q.iteritems() if report] if q: # stop if we've seen this before, so that we never get the # same report from the same user twice oldreport = q[0] g.log.debug("Ignoring duplicate report %s" % oldreport) return oldreport kw = {} if reason: kw['reason'] = reason r = Report(user, thing, '0', **kw) # mark item as reported try: thing._incr(cls._field) except (ValueError, TypeError): g.log.error("%r has bad field %r = %r" % (thing, cls._field, getattr(thing, cls._field, "(nonexistent)"))) raise r._commit() if hasattr(thing, 'author_id'): author = Account._byID(thing.author_id, data=True) author._incr('reported') if not getattr(thing, "ignore_reports", False): # update the reports queue if it exists queries.new_report(thing, r) # if the thing is already marked as spam, accept the report if thing._spam: cls.accept(thing) return r
def from_queue(self, max_date, batch_limit = 50, kind = None): from v1.models import Account, Thing keep_trying = True min_id = None s = self.queue_table while keep_trying: where = [s.c.date < max_date] if min_id: where.append(s.c.uid > min_id) if kind: where.append(s.c.kind == kind) res = sa.select([s.c.to_addr, s.c.account_id, s.c.from_name, s.c.fullname, s.c.body, s.c.kind, s.c.ip, s.c.date, s.c.uid, s.c.msg_hash, s.c.fr_addr, s.c.reply_to], sa.and_(*where), order_by = s.c.uid, limit = batch_limit).execute() res = res.fetchall() if not res: break # batch load user accounts aids = [x[1] for x in res if x[1] > 0] accts = Account._byID(aids, data = True, return_dict = True) if aids else {} # batch load things tids = [x[3] for x in res if x[3]] things = Thing._by_fullname(tids, data = True, return_dict = True) if tids else {} # get the lower bound date for next iteration min_id = max(x[8] for x in res) # did we not fetch them all? keep_trying = (len(res) == batch_limit) for (addr, acct, fname, fulln, body, kind, ip, date, uid, msg_hash, fr_addr, reply_to) in res: yield (accts.get(acct), things.get(fulln), addr, fname, date, ip, kind, msg_hash, body, fr_addr, reply_to)
def author_spammer(self, things, spam): """incr/decr the 'spammer' field for the author of every passed thing""" by_aid = {} for thing in things: if (hasattr(thing, 'author_id') and not getattr(thing, 'ban_info', {}).get('auto', True)): # only decrement 'spammer' for items that were not # autobanned by_aid.setdefault(thing.author_id, []).append(thing) if by_aid: authors = Account._byID(by_aid.keys(), data=True, return_dict=True) for aid, author_things in by_aid.iteritems(): author = authors[aid] author._incr( 'spammer', len(author_things) if spam else -len(author_things))
def void_campaign(link, campaign, reason): transactions = get_transactions(link, [campaign]) bid_record = transactions.get(campaign._id) if bid_record: a = Account._byID(link.author_id) authorize.void_transaction(a, bid_record.transaction, campaign._id) campaign.trans_id = NO_TRANSACTION campaign._commit() text = ('voided transaction for %s: (trans_id: %d)' % (campaign, bid_record.transaction)) PromotionLog.add(link, text) if bid_record.transaction > 0: # notify the user that the transaction was voided if it was not # a freebie emailer.void_payment( link, campaign, reason=reason, total_budget_dollars=campaign.total_budget_dollars)
def get_authenticated_account(self): from v1.models import Account, NotFound quoted_session_cookie = request.cookies.get(g.login_cookie) if not quoted_session_cookie: return None session_cookie = urllib.unquote(quoted_session_cookie) try: uid, timestr, hash = session_cookie.split(",") uid = int(uid) except: return None try: account = Account._byID(uid, data=True) except NotFound: return None expected_cookie = account.make_cookie(timestr) if not constant_time_compare(session_cookie, expected_cookie): return None return account
def subscribe_to_blog_and_annoucements(filename): import re from time import sleep from v1.models import Account, Subverbify r_blog = Subverbify._by_name("blog") r_announcements = Subverbify._by_name("announcements") contents = file(filename).read() numbers = [int(s) for s in re.findall("\d+", contents)] # d = Account._byID(numbers, data=True) # for i, account in enumerate(d.values()): for i, account_id in enumerate(numbers): account = Account._byID(account_id, data=True) for sr in r_blog, r_announcements: if sr.add_subscriber(account): sr._incr("_ups", 1) print("%d: subscribed %s to %s" % (i, account.name, sr.name)) else: print("%d: didn't subscribe %s to %s" % (i, account.name, sr.name))
def POST_zendeskreply(self): request_body = request.POST recipient = request_body["recipient"] sender_email = request_body["sender"] from_ = request_body["from"] subject = request_body["subject"] body_plain = request_body["body-plain"] stripped_text = request_body["stripped-text"] timestamp = request_body["timestamp"] token = request_body["token"] signature = request_body["signature"] email_id = request_body["Message-Id"] if not validate_mailgun_webhook(timestamp, token, signature): # per Mailgun docs send a 406 so the message won't be retried abort(406, "invalid signature") message_id36 = parse_and_validate_reply_to_address(recipient) if not message_id36: # per Mailgun docs send a 406 so the message won't be retried abort(406, "invalid message") parent = Message._byID36(message_id36, data=True) to = Account._byID(parent.author_id, data=True) sr = Subverbify._byID(parent.sr_id, data=True) if stripped_text.startswith(ZENDESK_PREFIX): stripped_text = stripped_text[len(ZENDESK_PREFIX):].lstrip() if len(stripped_text) > 10000: body = stripped_text[:10000] + "\n\n--snipped--" else: body = stripped_text try: markdown_souptest(body) except SoupError: g.log.warning("bad markdown in modmail email: %s", body) abort(406, "invalid body") if parent.get_muted_user_in_conversation(): queue_blocked_muted_email(sr, parent, sender_email, email_id) return # keep the subject consistent message_subject = parent.subject if not message_subject.startswith("re: "): message_subject = "re: " + message_subject # from_ is like '"NAME (GROUP)" <*****@*****.**>' match = re.search("\"(?P<name>\w+) [\w ()]*\"", from_) from_sr = True author = Account.system_user() if match and match.group( "name") in g.live_config['modmail_account_map']: zendesk_name = match.group("name") moderator_name = g.live_config['modmail_account_map'][zendesk_name] moderator = Account._by_name(moderator_name) if sr.is_moderator_with_perms(moderator, "mail"): author = moderator from_sr = False message, inbox_rel = Message._new( author=author, to=to, subject=message_subject, body=body, ip='0.0.0.0', parent=parent, sr=sr, from_sr=from_sr, can_send_email=False, sent_via_email=True, email_id=email_id, ) message._commit() queries.new_message(message, inbox_rel) g.stats.simple_event("mailgun.incoming.success") g.stats.simple_event("modmail_email.incoming_email")
def add_props(cls, user, wrapped): from v1.lib.db.thing import Thing from v1.lib.menus import QueryButton from v1.lib.pages import WrappedUser from v1.models import ( Account, Link, ModSR, MultiVerbify, Subverbify, ) target_names = {item.target_fullname for item in wrapped if hasattr(item, "target_fullname")} targets = Thing._by_fullname(target_names, data=True) # get moderators moderators = Account._byID36({item.mod_id36 for item in wrapped}, data=True) # get authors for targets that are Links or Comments target_author_names = {target.author_id for target in targets.values() if hasattr(target, "author_id")} target_authors = Account._byID(target_author_names, data=True) # get parent links for targets that are Comments parent_link_names = {target.link_id for target in targets.values() if hasattr(target, "link_id")} parent_links = Link._byID(parent_link_names, data=True) # get subverbifys srs = Subverbify._byID36({item.sr_id36 for item in wrapped}, data=True) for item in wrapped: item.moderator = moderators[item.mod_id36] item.subverbify = srs[item.sr_id36] item.text = cls._text.get(item.action, '') item.target = None item.target_author = None if hasattr(item, "target_fullname") and item.target_fullname: item.target = targets[item.target_fullname] if hasattr(item.target, "author_id"): author_name = item.target.author_id item.target_author = target_authors[author_name] if hasattr(item.target, "link_id"): parent_link_name = item.target.link_id item.parent_link = parent_links[parent_link_name] if isinstance(item.target, Account): item.target_author = item.target if c.render_style == "html": request_path = request.path # make wrapped users for targets that are accounts user_targets = filter(lambda target: isinstance(target, Account), targets.values()) wrapped_user_targets = {user._fullname: WrappedUser(user) for user in user_targets} for item in wrapped: if isinstance(item.target, Account): user_name = item.target._fullname item.wrapped_user_target = wrapped_user_targets[user_name] css_class = 'modactions %s' % item.action action_button = QueryButton( '', item.action, query_param='type', css_class=css_class) action_button.build(base_path=request_path) item.action_button = action_button mod_button = QueryButton( item.moderator.name, item.moderator.name, query_param='mod') mod_button.build(base_path=request_path) item.mod_button = mod_button if isinstance(c.site, ModSR) or isinstance(c.site, MultiVerbify): rgb = item.subverbify.get_rgb() item.bgcolor = 'rgb(%s,%s,%s)' % rgb item.is_multi = True else: item.bgcolor = "rgb(255,255,255)" item.is_multi = False
def process_message(msg): from v1.lib.comment_tree import write_comment_scores from v1.lib.db.queries import ( add_queries, add_to_commentstree_q, get_comments, ) from v1.models.builder import get_active_sort_orders_for_link vote_data = json.loads(msg.body) hook = hooks.get_hook('vote.validate_vote_data') if hook.call_until_return(msg=msg, vote_data=vote_data) is False: # Corrupt records in the queue. Ignore them. print "Ignoring invalid vote by %s on %s %s" % ( vote_data.get('user_id', '<unknown>'), vote_data.get('thing_fullname', '<unknown>'), vote_data) return timer = g.stats.get_timer("comment_vote_processor") timer.start() user = Account._byID(vote_data.pop("user_id")) comment = Comment._by_fullname(vote_data.pop("thing_fullname")) print "Processing vote by %s on %s %s" % (user, comment, vote_data) try: vote = Vote( user, comment, direction=vote_data["direction"], date=datetime.utcfromtimestamp(vote_data["date"]), data=vote_data["data"], event_data=vote_data.get("event_data"), ) except TypeError as e: # a vote on an invalid type got in the queue, just skip it g.log.exception("Invalid type: %r", e.message) return vote.commit() timer.intermediate("create_vote_object") vote_invalid = (not vote.effects.affects_score and not vote.is_automatic_initial_vote) comment_invalid = comment._spam or comment._deleted if vote_invalid or comment_invalid: timer.stop() timer.flush() return author = Account._byID(comment.author_id) add_queries( queries=[get_comments(author, sort, 'all') for sort in SORTS], insert_items=comment, ) timer.intermediate("author_queries") update_threshold = g.live_config['comment_vote_update_threshold'] update_period = g.live_config['comment_vote_update_period'] skip_score_update = (comment.num_votes > update_threshold and comment.num_votes % update_period != 0) # skip updating scores if this was the automatic initial vote. those # updates will be handled by new_comment. Also only update scores # periodically once a comment has many votes. if not vote.is_automatic_initial_vote and not skip_score_update: # check whether this link is using precomputed sorts, if it is # we'll need to push an update to commentstree_q link = Link._byID(comment.link_id) if get_active_sort_orders_for_link(link): # send this comment to commentstree_q where we will update # CommentScoresByLink, CommentTree (noop), and CommentOrderer add_to_commentstree_q(comment) else: # the link isn't using precomputed sorts, so just update the # scores write_comment_scores(link, [comment]) timer.intermediate("update_scores") timer.stop() timer.flush()
def edit_campaign(link, campaign, dates, target, frequency_cap, priority, location, total_budget_pennies, cost_basis, bid_pennies, platform='desktop', mobile_os=None, ios_devices=None, ios_version_range=None, android_devices=None, android_version_range=None): changed = {} if dates[0] != campaign.start_date or dates[1] != campaign.end_date: original = '%s to %s' % (campaign.start_date, campaign.end_date) edited = '%s to %s' % (dates[0], dates[1]) changed['dates'] = (original, edited) campaign.start_date = dates[0] campaign.end_date = dates[1] if target != campaign.target: changed['target'] = (campaign.target, target) campaign.target = target if frequency_cap != campaign.frequency_cap: changed['frequency_cap'] = (campaign.frequency_cap, frequency_cap) campaign.frequency_cap = frequency_cap if priority != campaign.priority: changed['priority'] = (campaign.priority.name, priority.name) campaign.priority = priority if location != campaign.location: changed['location'] = (campaign.location, location) campaign.location = location if platform != campaign.platform: changed["platform"] = (campaign.platform, platform) campaign.platform = platform if mobile_os != campaign.mobile_os: changed["mobile_os"] = (campaign.mobile_os, mobile_os) campaign.mobile_os = mobile_os if ios_devices != campaign.ios_devices: changed['ios_devices'] = (campaign.ios_devices, ios_devices) campaign.ios_devices = ios_devices if android_devices != campaign.android_devices: changed['android_devices'] = (campaign.android_devices, android_devices) campaign.android_devices = android_devices if ios_version_range != campaign.ios_version_range: changed['ios_version_range'] = (campaign.ios_version_range, ios_version_range) campaign.ios_version_range = ios_version_range if android_version_range != campaign.android_version_range: changed['android_version_range'] = (campaign.android_version_range, android_version_range) campaign.android_version_range = android_version_range if total_budget_pennies != campaign.total_budget_pennies: void_campaign(link, campaign, reason='changed_budget') campaign.total_budget_pennies = total_budget_pennies if cost_basis != campaign.cost_basis: changed['cost_basis'] = (campaign.cost_basis, cost_basis) campaign.cost_basis = cost_basis if bid_pennies != campaign.bid_pennies: changed['bid_pennies'] = (campaign.bid_pennies, bid_pennies) campaign.bid_pennies = bid_pennies change_strs = map(lambda t: '%s: %s -> %s' % (t[0], t[1][0], t[1][1]), changed.iteritems()) change_text = ', '.join(change_strs) campaign._commit() # update the index PromotionWeights.reschedule(link, campaign) if not campaign.is_house: # make it a freebie, if applicable author = Account._byID(link.author_id, True) if getattr(author, "complimentary_promos", False): free_campaign(link, campaign, c.user) # record the changes if change_text: PromotionLog.add(link, 'edited %s: %s' % (campaign, change_text)) hooks.get_hook('promote.edit_campaign').call(link=link, campaign=campaign)
def send_modmail_email(message): if not message.sr_id: return sr = Subverbify._byID(message.sr_id, data=True) forwarding_email = g.live_config['modmail_forwarding_email'].get(sr.name) if not forwarding_email: return sender = Account._byID(message.author_id, data=True) if sender.name in g.admins: distinguish = "[A]" elif sr.is_moderator(sender): distinguish = "[M]" else: distinguish = None if distinguish: from_address = "u/{username} {distinguish} <{sender_email}>".format( username=sender.name, distinguish=distinguish, sender_email=g.modmail_sender_email) else: from_address = "u/{username} <{sender_email}>".format( username=sender.name, sender_email=g.modmail_sender_email) reply_to = get_reply_to_address(message) parent_email_id, other_email_ids = get_email_ids(message) subject = get_message_subject(message) if message.from_sr and not message.first_message: # this is a message from the subverbify to a user. add some text that # shows the recipient recipient = Account._byID(message.to_id, data=True) sender_text = ("This message was sent from r/{subverbify} to " "u/{user}").format(subverbify=sr.name, user=recipient.name) else: userlink = add_sr("/u/{name}".format(name=sender.name), sr_path=False) sender_text = "This message was sent by {userlink}".format( userlink=userlink, ) reply_footer = ( "\n\n-\n{sender_text}\n\n" "Reply to this email directly or view it on verbify: {link}") reply_footer = reply_footer.format( sender_text=sender_text, link=message.make_permalink(force_domain=True), ) message_text = message.body + reply_footer email_id = g.email_provider.send_email( to_address=forwarding_email, from_address=from_address, subject=subject, text=message_text, reply_to=reply_to, parent_email_id=parent_email_id, other_email_ids=other_email_ids, ) if email_id: g.log.info("sent %s as %s", message._id36, email_id) message.email_id = email_id message._commit() g.stats.simple_event("modmail_email.outgoing_email")
def message_event(self, message, event_type="ss.send_message", request=None, context=None): """Create a 'message' event for event-collector. message: An v1.models.Message object request: pylons.request of the request that created the message context: pylons.tmpl_context of the request that created the message """ from v1.models import Account, Message sender = message.author_slow if message.first_message: first_message = Message._byID(message.first_message, data=True) else: first_message = message event = Event( topic="message_events", event_type=event_type, time=message._date, request=request, context=context, data={ # set these manually rather than allowing them to be set from # the request context because the loggedin user might not # be the message sender "user_id": sender._id, "user_name": sender.name, }, ) if sender == Account.system_user(): sender_type = "automated" else: sender_type = "user" event.add("sender_type", sender_type) event.add("message_kind", "message") event.add("message_id", message._id) event.add("message_fullname", message._fullname) event.add_text("message_body", message.body) event.add_text("message_subject", message.subject) event.add("first_message_id", first_message._id) event.add("first_message_fullname", first_message._fullname) if request and request.POST.get("source", None): source = request.POST["source"] if source in {"compose", "permalink", "usermail"}: event.add("page", source) if message.sent_via_email: event.add("is_third_party", True) event.add("third_party_metadata", "mailgun") target = Account._byID(message.to_id, data=True) event.add_target_fields(target) self.save_event(event)