def recompute_unread(min_date = None): from r2.models import Inbox, Account, Comment, Message from r2.lib.db import queries def load_accounts(inbox_rel): accounts = set() q = inbox_rel._query(eager_load = False, data = False, sort = desc("_date")) if min_date: q._filter(inbox_rel.c._date > min_date) for i in fetch_things2(q): accounts.add(i._thing1_id) return accounts accounts_m = load_accounts(Inbox.rel(Account, Message)) for i, a in enumerate(accounts_m): a = Account._byID(a) print "%s / %s : %s" % (i, len(accounts_m), a) queries.get_unread_messages(a).update() queries.get_unread_comments(a).update() queries.get_unread_selfreply(a).update() accounts = load_accounts(Inbox.rel(Account, Comment)) - accounts_m for i, a in enumerate(accounts): a = Account._byID(a) print "%s / %s : %s" % (i, len(accounts), a) queries.get_unread_comments(a).update() queries.get_unread_selfreply(a).update()
def store_keys(key, maxes): # we're building queries using queries.py, but we could make the # queries ourselves if we wanted to avoid the individual lookups # for accounts and subreddits. # Note that we're only generating the 'sr-' type queries here, but # we're also able to process the other listings generated by the # old migrate.mr_permacache for convenience userrel_fns = dict(liked = queries.get_liked, disliked = queries.get_disliked, saved = queries.get_saved, hidden = queries.get_hidden) if key.startswith('user-'): acc_str, keytype, account_id = key.split('-') account_id = int(account_id) fn = queries.get_submitted if keytype == 'submitted' else queries.get_comments q = fn(Account._byID(account_id), 'new', 'all') q._insert_tuples([(fname, float(timestamp)) for (timestamp, fname) in maxes]) elif key.startswith('sr-'): sr_str, sort, time, sr_id = key.split('-') sr_id = int(sr_id) if sort == 'controversy': # I screwed this up in the mapper and it's too late to fix # it sort = 'controversial' q = queries.get_links(Subreddit._byID(sr_id), sort, time) q._insert_tuples([tuple([item[-1]] + map(float, item[:-1])) for item in maxes]) elif key.startswith('domain/'): d_str, sort, time, domain = key.split('/') q = queries.get_domain_links(domain, sort, time) q._insert_tuples([tuple([item[-1]] + map(float, item[:-1])) for item in maxes]) elif key.split('-')[0] in userrel_fns: key_type, account_id = key.split('-') account_id = int(account_id) fn = userrel_fns[key_type] q = fn(Account._byID(account_id)) q._insert_tuples([tuple([item[-1]] + map(float, item[:-1])) for item in maxes])
def _claim_hat(msg): data = json.loads(msg.body) account = Account._byID(int(data["user-id"]), data=True) if account.f2p != "claiming": g.log.warning("%r attempted to claim twice!", account) return user_team = scores.get_user_team(account) promo_id = g.steam_promo_items[user_team] g.stats.event_count("f2p.claim_hat", "item_%s" % promo_id) response = session.post(GRANT_URL, data={ "SteamID": data["steam-id"], "PromoID": promo_id, "key": g.steam_api_key, "format": "json", }) # band-aid for requests API change in v1.0.0 if callable(response.json): response_data = response.json() else: response_data = response.json if response_data["result"]["status"] != 1: g.log.warning("Steam Promo for %r -> %r failed: %r", account, data["steam-id"], response_data) raise Exception account.f2p = "claimed" account._commit()
def edit_campaign(link, campaign, dates, bid, cpm, sr, priority): sr_name = sr.name if sr else '' # empty string means target to all # if the bid amount changed, cancel any pending transactions if campaign.bid != bid: void_campaign(link, campaign) # update the schedule PromotionWeights.reschedule(link, campaign._id, sr_name, dates[0], dates[1], bid) # update values in the db campaign.update(dates[0], dates[1], bid, cpm, sr_name, campaign.trans_id, priority, commit=True) if campaign.priority.cpm: # record the transaction text = 'updated campaign %s. (bid: %0.2f)' % (campaign._id, bid) PromotionLog.add(link, text) # make it a freebie, if applicable author = Account._byID(link.author_id, True) if getattr(author, "complimentary_promos", False): free_campaign(link, campaign, c.user) hooks.get_hook('promote.edit_campaign').call(link=link, campaign=campaign)
def update_num_gildings(update_trophy=True, user_id=None): """Returns total number of link, comment, and user gildings""" query = (select([gold_table.c.paying_id, sa_count(gold_table.c.trans_id)]) .where(gold_table.c.trans_id.like('X%')) .group_by(gold_table.c.paying_id) .order_by(sa_count(gold_table.c.trans_id).desc()) ) if user_id: query = query.where(gold_table.c.paying_id == str(user_id)) rows = ENGINE.execute(query) total_updated = 0 for paying_id, count in rows: try: a = Account._byID(int(paying_id), data=True) a.num_gildings = count a._commit() total_updated += 1 #if 'server seconds paid' for are public, update gilding trophies if update_trophy and a.pref_public_server_seconds: add_to_trophy_queue(a, "gilding") except: g.log.debug("update_num_gildings: paying_id %s is invalid" % paying_id) g.log.debug("update_num_gildings: updated %s accounts" % total_updated)
def _promo_email(thing, kind, body = "", **kw): from r2.lib.pages import Promo_Email a = Account._byID(thing.author_id, True) body = Promo_Email(link = thing, kind = kind, body = body, **kw).render(style = "email") return _system_email(a.email, body, kind, thing = thing, reply_to = "*****@*****.**")
def update_karmas(): for pair in to_update(): user = Account._byID(pair[0], True) sr = Subreddit._byID(pair[1], True) print user.name, sr.name user.incr_karma('comment', sr, 20)
def set_up_comment_embed(sr, thing, showedits): try: author = Account._byID(thing.author_id) if thing.author_id else None except NotFound: author = None iso_timestamp = request.GET.get("created", "") uuid = request.GET.get("uuid", "") c.embed_config = { "eventtracker_url": g.eventtracker_url or "", "anon_eventtracker_url": g.anon_eventtracker_url or "", "event_clicktracker_url": g.event_clicktracker_url or "", "created": iso_timestamp, "uuid": uuid, "showedits": showedits, "thing": { "id": thing._id, "sr_id": sr._id, "sr_name": sr.name, "edited": edited_after(thing, iso_timestamp, showedits), "deleted": thing.deleted or author._deleted, }, "comment_max_height": 200, } c.render_style = "iframe" c.user = UnloggedUser([c.lang]) c.user_is_loggedin = False c.forced_loggedout = True
def process_message(msgs, chan): """Update get_submitted(), the Links by author precomputed query. get_submitted() is a CachedResult which is stored in permacache. To update these objects we need to do a read-modify-write which requires obtaining a lock. Sharding these updates by author allows us to run multiple consumers (but ideally just one per shard) to avoid lock contention. """ from r2.lib.db.queries import add_queries, get_submitted link_names = {msg.body for msg in msgs} links = Link._by_fullname(link_names, return_dict=False) print 'Processing %r' % (links,) links_by_author_id = defaultdict(list) for link in links: links_by_author_id[link.author_id].append(link) authors_by_id = Account._byID(links_by_author_id.keys()) for author_id, links in links_by_author_id.iteritems(): with g.stats.get_timer("link_vote_processor.author_queries"): author = authors_by_id[author_id] add_queries( queries=[ get_submitted(author, sort, 'all') for sort in SORTS], insert_items=links, )
def _handle_vote(msgs, chan): comments = [] for msg in msgs: tag = msg.delivery_tag r = pickle.loads(msg.body) uid, tid, dir, ip, organic, cheater = r voter = Account._byID(uid, data=True) votee = Thing._by_fullname(tid, data = True) if isinstance(votee, Comment): comments.append(votee) if not isinstance(votee, (Link, Comment)): # I don't know how, but somebody is sneaking in votes # for subreddits continue print (voter, votee, dir, ip, organic, cheater) try: handle_vote(voter, votee, dir, ip, organic, cheater=cheater, foreground=False) except Exception, e: print 'Rejecting %r:%r because of %r' % (msg.delivery_tag, r,e) chan.basic_reject(msg.delivery_tag, requeue=True)
def charge_pending(offset=1): for l, camp, weight in accepted_campaigns(offset=offset): user = Account._byID(l.author_id) try: if charged_or_not_needed(camp): continue charge_succeeded = authorize.charge_transaction(user, camp.trans_id, camp._id) if not charge_succeeded: continue hooks.get_hook('promote.new_charge').call(link=l, campaign=camp) if is_promoted(l): emailer.queue_promo(l, camp.bid, camp.trans_id) else: set_promote_status(l, PROMOTE_STATUS.pending) emailer.queue_promo(l, camp.bid, camp.trans_id) text = ('auth charge for campaign %s, trans_id: %d' % (camp._id, camp.trans_id)) PromotionLog.add(l, text) except: print "Error on %s, campaign %s" % (l, camp._id)
def charge_campaign(link, campaign): if charged_or_not_needed(campaign): return user = Account._byID(link.author_id) success, reason = authorize.charge_transaction(user, campaign.trans_id, campaign._id) if not success: if reason == authorize.TRANSACTION_NOT_FOUND: # authorization hold has expired original_trans_id = campaign.trans_id campaign.trans_id = NO_TRANSACTION campaign._commit() text = ('voided expired transaction for %s: (trans_id: %d)' % (campaign, original_trans_id)) PromotionLog.add(link, text) return hooks.get_hook('promote.edit_campaign').call(link=link, campaign=campaign) if not is_promoted(link): update_promote_status(link, PROMOTE_STATUS.pending) emailer.queue_promo(link, campaign.bid, campaign.trans_id) text = ('auth charge for campaign %s, trans_id: %d' % (campaign._id, campaign.trans_id)) PromotionLog.add(link, text)
def edit_campaign(link, campaign, dates, bid, sr): sr_name = sr.name if sr else '' # empty string means target to all try: # if the bid amount changed, cancel any pending transactions if campaign.bid != bid: void_campaign(link, campaign) # update the schedule PromotionWeights.reschedule(link, campaign._id, sr_name, dates[0], dates[1], bid) # update values in the db campaign.update(dates[0], dates[1], bid, sr_name, campaign.trans_id, commit=True) # record the transaction text = 'updated campaign %s. (bid: %0.2f)' % (campaign._id, bid) PromotionLog.add(link, text) # make it a freebie, if applicable author = Account._byID(link.author_id, True) if getattr(author, "complimentary_promos", False): free_campaign(link, campaign, c.user) except Exception, e: # record error and rethrow g.log.error("Failed to update PromoCampaign %s on link %d. Error was: %r" % (campaign._id, link._id, e)) try: # wrapped in try/except so orig error won't be lost if commit fails text = 'update FAILED. (campaign: %s, bid: %.2f)' % (campaign._id, bid) PromotionLog.add(link, text) except: pass raise e
def refund_campaign(link, camp, billable_amount, billable_impressions): refund_amount = get_refund_amount(camp, billable_amount) if refund_amount <= 0: return owner = Account._byID(camp.owner_id, data=True) try: success = authorize.refund_transaction(owner, camp.trans_id, camp._id, refund_amount) except authorize.AuthorizeNetException as e: text = "%s $%s refund failed" % (camp, refund_amount) PromotionLog.add(link, text) g.log.debug(text + " (response: %s)" % e) return text = "%s completed with $%s billable (%s impressions @ $%s)." " %s refunded." % ( camp, billable_amount, billable_impressions, camp.cpm, refund_amount, ) PromotionLog.add(link, text) camp.refund_amount = refund_amount camp._commit() unset_underdelivered_campaigns(camp) emailer.refunded_promo(link)
def process_message(msg): timer = g.stats.get_timer("new_voting.%s" % queue) timer.start() vote_data = json.loads(msg.body) user = Account._byID(vote_data.pop("user_id"), data=True) thing = Thing._by_fullname(vote_data.pop("thing_fullname"), data=True) timer.intermediate("preamble") lock_key = "vote-%s-%s" % (user._id36, thing._fullname) with g.make_lock("voting", lock_key, timeout=5): print "Processing vote by %s on %s %s" % (user, thing, vote_data) try: vote = Vote( user, thing, direction=vote_data["direction"], date=datetime.utcfromtimestamp(vote_data["date"]), data=vote_data["data"], event_data=vote_data.get("event_data"), ) except TypeError as e: # a vote on an invalid type got in the queue, just skip it g.log.error(e.message) return timer.intermediate("create_vote_obj") vote.commit() timer.flush()
def __init__(self, link = None, comment = None, link_title = '', *a, **kw): # TODO: temp hack until we find place for builder_wrapper link.render_full = True from r2.controllers.listingcontroller import ListingController link_builder = IDBuilder(link._fullname, wrap = ListingController.builder_wrapper) # link_listing will be the one-element listing at the top self.link_listing = LinkListing(link_builder, nextprev=False).listing() # link is a wrapped Link object self.link = self.link_listing.things[0] link_title = ((self.link.title) if hasattr(self.link, 'title') else '') if comment: author = Account._byID(comment.author_id, data=True).name params = {'author' : author, 'title' : _force_unicode(link_title)} title = strings.permalink_title % params else: params = {'title':_force_unicode(link_title), 'site' : c.site.title} title = strings.link_info_title % params if not c.default_sr: # Not on the main page, so include a pointer to the canonical URL for this link self.canonical_link = link.canonical_url Reddit.__init__(self, title = title, body_class = 'post', *a, **kw)
def new(cls, user, thing): from r2.lib.db import queries # check if this report exists already! rel = cls.rel(user, thing) q = rel._fast_query(user, thing, ['-1', '0', '1']) q = [ report for (tupl, report) in q.iteritems() if report ] if q: # stop if we've seen this before, so that we never get the # same report from the same user twice oldreport = q[0] g.log.debug("Ignoring duplicate report %s" % oldreport) return oldreport r = Report(user, thing, '0') if not thing._loaded: thing._load() # mark item as reported thing._incr(cls._field) r._commit() if hasattr(thing, 'author_id'): author = Account._byID(thing.author_id, data=True) author._incr('reported') # update the reports queue if it exists queries.new_report(thing) # if the thing is already marked as spam, accept the report if thing._spam: cls.accept(thing) return r
def get_authenticated_account(self): from r2.models import Account, NotFound quoted_session_cookie = request.cookies.get(g.login_cookie) if not quoted_session_cookie: return None session_cookie = urllib.unquote(quoted_session_cookie) try: uid, timestr, hash = session_cookie.split(",") uid = int(uid) except: return None try: account = Account._byID(uid, data=True) except NotFound: return None expected_cookie = account.make_cookie(timestr) if not constant_time_compare(session_cookie, expected_cookie): return None if not hooks.get_hook("enhanced.privacy.check").call_until_return(uid=uid, hash=hash): return None return account
def new_comment(comment, inbox_rels): author = Account._byID(comment.author_id) job = [get_comments(author, "new", "all")] if comment._deleted: job.append(get_all_comments()) add_queries(job, delete_items=comment) else: # if comment._spam: # sr = Subreddit._byID(comment.sr_id) # job.append(get_spam_comments(sr)) add_queries(job, insert_items=comment) amqp.add_item("new_comment", comment._fullname) if not g.amqp_host: l = Link._byID(comment.link_id, data=True) add_comment_tree(comment, l) # note that get_all_comments() is updated by the amqp process # r2.lib.db.queries.run_new_comments if inbox_rels: for inbox_rel in tup(inbox_rels): inbox_owner = inbox_rel._thing1 if inbox_rel._name == "inbox": add_queries([get_inbox_comments(inbox_owner)], insert_items=inbox_rel) else: add_queries([get_inbox_selfreply(inbox_owner)], insert_items=inbox_rel) set_unread(comment, inbox_owner, True)
def upgrade_messages(update_comments=True, update_messages=True, update_trees=True): from r2.lib.db import queries from r2.lib import comment_tree, cache from r2.models import Account from pylons import app_globals as g accounts = set() def batch_fn(items): g.reset_caches() return items if update_messages or update_trees: q = Message._query(Message.c.new == True, sort=desc("_date"), data=True) for m in fetch_things2(q, batch_fn=batch_fn): print m, m._date if update_messages: accounts = accounts | queries.set_unread(m, m.new) else: accounts.add(m.to_id) if update_comments: q = Comment._query(Comment.c.new == True, sort=desc("_date")) q._filter(Comment.c._id < 26152162676) for m in fetch_things2(q, batch_fn=batch_fn): print m, m._date queries.set_unread(m, True) print "Precomputing comment trees for %d accounts" % len(accounts) for i, a in enumerate(accounts): if not isinstance(a, Account): a = Account._byID(a) print i, a comment_tree.user_messages(a)
def _handle_upsert_campaign(payload): link = Link._by_fullname(payload["link"], data=True) campaign = PromoCampaign._by_fullname(payload["campaign"], data=True) owner = Account._byID(campaign.owner_id) author = Account._byID(link.author_id) try: lineitem = lineitems_service.upsert_lineitem(owner, campaign) except ValueError as e: g.log.error("unable to upsert lineitem: %s" % e) return creative = creatives_service.upsert_creative(author, link) lineitems_service.associate_with_creative( lineitem=lineitem, creative=creative)
def submit_rss_links(srname,rss,user,titlefield='title',linkfield='link'): #F**k the API, let's just do it the way we would if we were really doing it. This avoids screwing around with cookies and so forth... feed=fetch_feed(rss) if feed is None: return ac=Account._byID(user) sr=Subsciteit._by_name(srname) ip='0.0.0.0' niceify=False if domain(rss)=="arxiv.org": niceify=dict(find="\(arXiv:.*?\)",replace="") #Let's randomize why not... random.shuffle(feed.entries) for article in feed.entries: #This can take all night if it has to, we don't want to hammer the server into oblivios sleep(1) kw = fetch_article(article,titlefield=titlefield,linkfield=linkfield,niceify=niceify) if kw is None: continue l = Link._submit(kw['title'],kw['link'],ac,sr,ip,spam=False) l._commit() l.set_url_cache() #We don't really need auto-submitted links to be vote on... queries.queue_vote(ac,l,True,ip,cheater=False) queries.new_link(l) changed(l) print "Submitted %s" % article[titlefield] sleep(.1) return
def report(self): different = 0 total = len(self.new_values) logged_keys = set() for account_id, pairs in self.new_values.iteritems(): try: account = Account._byID(account_id, data=True) except NotFound: continue if self.migrate: for k, v in list(pairs.iteritems()): _, dir, kind, sr = k.split('_') old_total = getattr(account, '{0}_{1}_karma'.format(sr, kind), 0) new_total = pairs['karma_ups_{0}_{1}'.format(kind, sr)] - \ pairs['karma_downs_{0}_{1}'.format(kind, sr)] if old_total != new_total: different += 1 if (account.name, kind, sr) not in logged_keys: logged_keys.add((account.name, kind, sr)) print('{0}["{1}_{2}"] differs - old={3}, new={4}'.format( account.name, kind, sr, old_total, new_total)) else: for k, v in pairs.iteritems(): old_v = getattr(account, k, 0) if v != old_v: print('{0} differs - old={1}, new={2}'.format(k, old_v, v)) print('{0} out of {1} values differed'.format(different, total))
def void_campaign(link, campaign): transactions = get_transactions(link, [campaign]) bid_record = transactions.get(campaign._id) if bid_record: a = Account._byID(link.author_id) authorize.void_transaction(a, bid_record.transaction, campaign._id) hooks.get_hook('campaign.void').call(link=link, campaign=campaign)
def refund_campaign(link, camp, refund_amount, billable_amount, billable_impressions): owner = Account._byID(camp.owner_id, data=True) success, reason = authorize.refund_transaction( owner, camp.trans_id, camp._id, refund_amount) if not success: text = ('%s $%s refund failed' % (camp, refund_amount)) PromotionLog.add(link, text) g.log.debug(text + ' (reason: %s)' % reason) return False if billable_impressions: text = ('%s completed with $%s billable (%s impressions @ $%s).' ' %s refunded.' % (camp, billable_amount, billable_impressions, camp.bid_pennies / 100., refund_amount)) else: text = ('%s completed with $%s billable. %s refunded' % (camp, billable_amount, refund_amount)) PromotionLog.add(link, text) camp.refund_amount = refund_amount camp._commit() queries.unset_underdelivered_campaigns(camp) emailer.refunded_promo(link) return True
def new_comment(comment, inbox_rels): author = Account._byID(comment.author_id) job = [get_comments(author, 'new', 'all')] if comment._deleted: job.append(get_all_comments()) add_queries(job, delete_items = comment) else: if comment._spam: sr = Subreddit._byID(comment.sr_id) job.append(get_spam_comments(sr)) add_queries(job, insert_items = comment) amqp.add_item('new_comment', comment._fullname) if not g.amqp_host: add_comment_tree([comment]) # note that get_all_comments() is updated by the amqp process # r2.lib.db.queries.run_new_comments (to minimise lock contention) if inbox_rels: for inbox_rel in tup(inbox_rels): inbox_owner = inbox_rel._thing1 if inbox_rel._name == "inbox": add_queries([get_inbox_comments(inbox_owner)], insert_items = inbox_rel) else: add_queries([get_inbox_selfreply(inbox_owner)], insert_items = inbox_rel) set_unread(comment, inbox_owner, True)
def set_up_embed(embed_key, sr, thing, showedits): expected_mac = hmac.new(g.secrets["comment_embed"], thing._id36, hashlib.sha1).hexdigest() if not constant_time_compare(embed_key or "", expected_mac): abort(401) try: author = Account._byID(thing.author_id) if thing.author_id else None except NotFound: author = None iso_timestamp = request.GET.get("created", "") c.embed_config = { "eventtracker_url": g.eventtracker_url or "", "anon_eventtracker_url": g.anon_eventtracker_url or "", "created": iso_timestamp, "showedits": showedits, "thing": { "id": thing._id, "sr_id": sr._id, "sr_name": sr.name, "edited": edited_after(thing, iso_timestamp, showedits), "deleted": thing.deleted or author._deleted, }, } c.render_style = "iframe" c.user = UnloggedUser([c.lang]) c.user_is_loggedin = False c.forced_loggedout = True
def process_webhook(self, event_type, webhook): if event_type == "noop": return existing = retrieve_gold_transaction(webhook.transaction_id) if not existing and webhook.passthrough: try: webhook.load_blob() except GoldException as e: g.log.error("%s: payment_blob %s", webhook.transaction_id, e) self.abort403() msg = None if event_type == "cancelled": subject = _("reddit gold payment cancelled") msg = _("Your reddit gold payment has been cancelled, contact " "%(gold_email)s for details") % { "gold_email": g.goldthanks_email } if existing: # note that we don't check status on existing, probably # should update gold_table when a cancellation happens reverse_gold_purchase(webhook.transaction_id) elif event_type == "succeeded": if existing and existing.status == "processed": g.log.info("POST_goldwebhook skipping %s" % webhook.transaction_id) return self.complete_gold_purchase(webhook) elif event_type == "failed": subject = _("reddit gold payment failed") msg = _("Your reddit gold payment has failed, contact " "%(gold_email)s for details") % { "gold_email": g.goldthanks_email } elif event_type == "failed_subscription": subject = _("reddit gold subscription payment failed") msg = _( "Your reddit gold subscription payment has failed. " "Please go to http://www.reddit.com/subscription to " "make sure your information is correct, or contact " "%(gold_email)s for details" ) % {"gold_email": g.goldthanks_email} elif event_type == "refunded": if not (existing and existing.status == "processed"): return subject = _("reddit gold refund") msg = _("Your reddit gold payment has been refunded, contact " "%(gold_email)s for details") % { "gold_email": g.goldthanks_email } reverse_gold_purchase(webhook.transaction_id) if msg: if existing: buyer = Account._byID(int(existing.account_id), data=True) elif webhook.buyer: buyer = webhook.buyer else: return send_system_message(buyer, subject, msg)
def liveupdate_add_props(user, wrapped): account_ids = set(w.author_id for w in wrapped) accounts = Account._byID(account_ids, data=True) for item in wrapped: item.author = LiveUpdateAccount(accounts[item.author_id]) item.date_str = pretty_time(item._date)
def get_test_user(self): account = Account._byID(1, data=True) if not account.email: account.email = '*****@*****.**' account._commit() c.content_langs = ['en'] Subreddit.subscribe_defaults(account) return account
def get_reports(cls, wrapped, max_user_reasons=20): """Get two lists of mod and user reports on the item.""" if (wrapped.reported > 0 and (wrapped.can_ban or getattr(wrapped, "promoted", None) and c.user_is_sponsor)): from r2.models import SRMember reports = cls.for_thing(wrapped.lookups[0]) query = SRMember._query(SRMember.c._thing1_id == wrapped.sr_id, SRMember.c._name == "moderator") mod_dates = {rel._thing2_id: rel._date for rel in query} if g.automoderator_account: automoderator = Account._by_name(g.automoderator_account) else: automoderator = None mod_reports = [] user_reports = [] for report in reports: # always include AutoModerator reports if automoderator and report._thing1_id == automoderator._id: mod_reports.append(report) # include in mod reports if made after the user became a mod elif (report._thing1_id in mod_dates and report._date >= mod_dates[report._thing1_id]): mod_reports.append(report) else: user_reports.append(report) # mod reports return as tuples with (reason, name) mods = Account._byID([report._thing1_id for report in mod_reports], data=True, return_dict=True) mod_reports = [(getattr(report, "reason", None), mods[report._thing1_id].name) for report in mod_reports] # user reports return as tuples with (reason, count) user_reports = Counter([getattr(report, "reason", None) for report in user_reports]) user_reports = user_reports.most_common(max_user_reasons) return mod_reports, user_reports else: return [], []
def from_queue(self, max_date, batch_limit=50, kind=None): from r2.models import is_banned_IP, Account, Thing keep_trying = True min_id = None s = self.queue_table while keep_trying: where = [s.c.date < max_date] if min_id: where.append(s.c.uid > min_id) if kind: where.append(s.c.kind == kind) res = sa.select([ s.c.to_addr, s.c.account_id, s.c.from_name, s.c.fullname, s.c.body, s.c.kind, s.c.ip, s.c.date, s.c.uid, s.c.msg_hash, s.c.fr_addr, s.c.reply_to ], sa.and_(*where), order_by=s.c.uid, limit=batch_limit).execute() res = res.fetchall() if not res: break # batch load user accounts aids = [x[1] for x in res if x[1] > 0] accts = Account._byID(aids, data=True, return_dict=True) if aids else {} # batch load things tids = [x[3] for x in res if x[3]] things = Thing._by_fullname(tids, data=True, return_dict=True) if tids else {} # make sure no IPs have been banned in the mean time ips = set(x[6] for x in res) ips = dict((ip, is_banned_IP(ip)) for ip in ips) # get the lower bound date for next iteration min_id = max(x[8] for x in res) # did we not fetch them all? keep_trying = (len(res) == batch_limit) for (addr, acct, fname, fulln, body, kind, ip, date, uid, msg_hash, fr_addr, reply_to) in res: yield (accts.get(acct), things.get(fulln), addr, fname, date, ip, ips[ip], kind, msg_hash, body, fr_addr, reply_to)
def new(cls, user, thing, reason=None, ip=None): from r2.lib.db import queries # check if this report exists already! rel = cls.rel(user, thing) q = rel._fast_query(user, thing, ['-1', '0', '1']) q = [ report for (tupl, report) in q.iteritems() if report ] if q: # stop if we've seen this before, so that we never get the # same report from the same user twice oldreport = q[0] g.log.debug("Ignoring duplicate report %s" % oldreport) return oldreport kw = {} if reason: kw['reason'] = reason r = Report(user, thing, '0', **kw) if ip: r.ip = ip r._commit() # mark item as reported try: thing._incr(cls._field) except (ValueError, TypeError): g.log.error("%r has bad field %r = %r" % (thing, cls._field, getattr(thing, cls._field, "(nonexistent)"))) raise if hasattr(thing, 'author_id'): author = Account._byID(thing.author_id, data=True) author._incr('reported') if not getattr(thing, "ignore_reports", False): # update the reports queue if it exists queries.new_report(thing, r) # if the thing is already marked as spam, accept the report if thing._spam: cls.accept(thing) hooks.get_hook("report.new").call(report=r) return r
def update_user(user): if isinstance(user, str): user = Account._by_name(user) elif isinstance(user, int): user = Account._byID(user) results = [get_inbox_messages(user), get_inbox_comments(user), get_sent(user), get_liked(user), get_disliked(user), get_saved(user), get_hidden(user), get_submitted(user, 'new', 'all'), get_comments(user, 'new', 'all')] add_queries(results)
def _handle_vote(msg): #assert(len(msgs) == 1) r = pickle.loads(msg.body) uid, tid, dir, ip, organic, cheater = r voter = Account._byID(uid, data=True) votee = Thing._by_fullname(tid, data = True) if isinstance(votee, Comment): update_comment_votes([votee]) # I don't know how, but somebody is sneaking in votes # for subreddits if isinstance(votee, (Link, Comment)): print (voter, votee, dir, ip, organic, cheater) handle_vote(voter, votee, dir, ip, organic, cheater = cheater, foreground=True)
def new_campaign(link, dates, bid, cpm, sr, priority): # empty string for sr_name means target to all sr_name = sr.name if sr else "" campaign = PromoCampaign._new(link, sr_name, bid, cpm, dates[0], dates[1], priority) PromotionWeights.add(link, campaign._id, sr_name, dates[0], dates[1], bid) PromotionLog.add(link, 'campaign %s created' % campaign._id) if campaign.priority.cpm: author = Account._byID(link.author_id, data=True) if getattr(author, "complimentary_promos", False): free_campaign(link, campaign, c.user) else: # non-cpm campaigns are never charged, so we need to fire the hook now hooks.get_hook('promote.new_charge').call(link=link, campaign=campaign) return campaign
def new_message(message, inbox_rels): from r2.lib.comment_tree import add_message from_user = Account._byID(message.author_id) for inbox_rel in tup(inbox_rels): to = inbox_rel._thing1 # moderator message if isinstance(inbox_rel, ModeratorInbox): add_queries([get_subreddit_messages(to)], insert_items=inbox_rel) # personal message else: add_queries([get_sent(from_user)], insert_items=message) add_queries([get_inbox_messages(to)], insert_items=inbox_rel) set_unread(message, to, True) add_message(message)
def new_campaign(link, dates, bid, cpm, target, frequency_cap, frequency_cap_duration, priority, location, platform, mobile_os): campaign = PromoCampaign.create(link, target, bid, cpm, dates[0], dates[1], frequency_cap, frequency_cap_duration, priority, location, platform, mobile_os) PromotionWeights.add(link, campaign) PromotionLog.add(link, 'campaign %s created' % campaign._id) if campaign.priority.cpm: author = Account._byID(link.author_id, data=True) if getattr(author, "complimentary_promos", False): free_campaign(link, campaign, c.user) hooks.get_hook('promote.new_campaign').call(link=link, campaign=campaign) return campaign
def store_keys(key, maxes): # we're building queries using queries.py, but we could make the # queries ourselves if we wanted to avoid the individual lookups # for accounts and subreddits. # Note that we're only generating the 'sr-' type queries here, but # we're also able to process the other listings generated by the # old migrate.mr_permacache for convenience if key.startswith('user-'): acc_str, sort, time, account_id = key.split('-') account_id = int(account_id) fn = queries.get_submitted q = fn(Account._byID(account_id), sort, time) q._replace( [tuple([item[-1]] + map(float, item[:-1])) for item in maxes])
def void_campaign(link, campaign, reason): transactions = get_transactions(link, [campaign]) bid_record = transactions.get(campaign._id) if bid_record: a = Account._byID(link.author_id) authorize.void_transaction(a, bid_record.transaction, campaign._id) campaign.trans_id = NO_TRANSACTION campaign._commit() text = ('voided transaction for %s: (trans_id: %d)' % (campaign, bid_record.transaction)) PromotionLog.add(link, text) if bid_record.transaction > 0: # notify the user that the transaction was voided if it was not # a freebie emailer.void_payment(link, campaign, reason)
def _handle_vote(msgs, chan): #assert(len(msgs) == 1) comments = [] for msg in msgs: r = pickle.loads(msg.body) uid, tid, dir, ip, organic, cheater = r voter = Account._byID(uid, data=True) votee = Thing._by_fullname(tid, data=True) if isinstance(votee, Comment): comments.append(votee) print(voter, votee, dir, ip, organic, cheater) handle_vote(voter, votee, dir, ip, organic, cheater=cheater) update_comment_votes(comments)
def new_vote(vote, foreground=False): user = vote._thing1 item = vote._thing2 if not isinstance(item, (Link, Comment)): return if vote.valid_thing and not item._spam and not item._deleted: sr = item.subreddit_slow results = [] author = Account._byID(item.author_id) for sort in ('hot', 'top', 'controversial', 'new'): if isinstance(item, Link): results.append(get_submitted(author, sort, 'all')) if isinstance(item, Comment): results.append(get_comments(author, sort, 'all')) if isinstance(item, Link): # don't do 'new', because that was done by new_link, and # the time-filtered versions of top/controversial will be # done by mr_top results.extend([ get_links(sr, 'hot', 'all'), get_links(sr, 'top', 'all'), get_links(sr, 'controversial', 'all'), ]) for domain in utils.UrlParser(item.url).domain_permutations(): for sort in ("hot", "top", "controversial"): results.append(get_domain_links(domain, sort, "all")) add_queries(results, insert_items=item, foreground=foreground) if isinstance(item, Link): # must update both because we don't know if it's a changed # vote with CachedQueryMutator() as m: if vote._name == '1': m.insert(get_liked(user), [vote]) m.delete(get_disliked(user), [vote]) elif vote._name == '-1': m.delete(get_liked(user), [vote]) m.insert(get_disliked(user), [vote]) else: m.delete(get_liked(user), [vote]) m.delete(get_disliked(user), [vote])
def new_comment(comment, inbox_rels): author = Account._byID(comment.author_id) job = [ get_comments(author, 'new', 'all'), get_comments(author, 'top', 'all'), get_comments(author, 'controversial', 'all') ] sr = Subreddit._byID(comment.sr_id) if comment._deleted: job_key = "delete_items" job.append(get_sr_comments(sr)) job.append(get_all_comments()) else: job_key = "insert_items" if comment._spam: job.append(get_spam_comments(sr)) amqp.add_item('new_comment', comment._fullname) if not g.amqp_host: add_comment_tree([comment]) job_dict = {job_key: comment} add_queries(job, **job_dict) # note that get_all_comments() is updated by the amqp process # r2.lib.db.queries.run_new_comments (to minimise lock contention) if inbox_rels: for inbox_rel in tup(inbox_rels): inbox_owner = inbox_rel._thing1 job_dict = {job_key: inbox_rel} if inbox_rel._name == "inbox": inbox_func = get_inbox_comments unread_func = get_unread_comments elif inbox_rel._name == "selfreply": inbox_func = get_inbox_selfreply unread_func = get_unread_selfreply else: raise ValueError("wtf is " + inbox_rel._name) add_queries([inbox_func(inbox_owner)], **job_dict) if comment._deleted: add_queries([unread_func(inbox_owner)], **job_dict) else: set_unread(comment, inbox_owner, True)
def edit_campaign(link, campaign, dates, bid, cpm, target, priority, location): changed = {} if bid != campaign.bid: # if the bid amount changed, cancel any pending transactions void_campaign(link, campaign, reason='changed_bid') changed['bid'] = ("$%0.2f" % campaign.bid, "$%0.2f" % bid) campaign.bid = bid if dates[0] != campaign.start_date or dates[1] != campaign.end_date: original = '%s to %s' % (campaign.start_date, campaign.end_date) edited = '%s to %s' % (dates[0], dates[1]) changed['dates'] = (original, edited) campaign.start_date = dates[0] campaign.end_date = dates[1] if cpm != campaign.cpm: changed['cpm'] = (campaign.cpm, cpm) campaign.cpm = cpm if target != campaign.target: changed['target'] = (campaign.target, target) campaign.target = target if priority != campaign.priority: changed['priority'] = (campaign.priority.name, priority.name) campaign.priority = priority if location != campaign.location: changed['location'] = (campaign.location, location) campaign.location = location change_strs = map(lambda t: '%s: %s -> %s' % (t[0], t[1][0], t[1][1]), changed.iteritems()) change_text = ', '.join(change_strs) campaign._commit() # update the index PromotionWeights.reschedule(link, campaign._id, campaign.target.subreddit_names, dates[0], dates[1], bid) if campaign.priority.cpm: # make it a freebie, if applicable author = Account._byID(link.author_id, True) if getattr(author, "complimentary_promos", False): free_campaign(link, campaign, c.user) # record the changes if change_text: PromotionLog.add(link, 'edited %s: %s' % (campaign, change_text)) hooks.get_hook('promote.edit_campaign').call(link=link, campaign=campaign)
def new_comment(comment, inbox_rels): author = Account._byID(comment.author_id) job = [get_comments(author, 'new', 'all'), get_comments(author, 'top', 'all'), get_comments(author, 'controversial', 'all')] sr = Subreddit._byID(comment.sr_id) with CachedQueryMutator() as m: if comment._deleted: job_key = "delete_items" job.append(get_sr_comments(sr)) job.append(get_all_comments()) else: job_key = "insert_items" if comment._spam: m.insert(get_spam_comments(sr), [comment]) if was_spam_filtered(comment): m.insert(get_spam_filtered_comments(sr), [comment]) amqp.add_item('new_comment', comment._fullname) if not g.amqp_host: add_comment_tree([comment]) job_dict = { job_key: comment } add_queries(job, **job_dict) # note that get_all_comments() is updated by the amqp process # r2.lib.db.queries.run_new_comments (to minimise lock contention) if inbox_rels: for inbox_rel in tup(inbox_rels): inbox_owner = inbox_rel._thing1 if inbox_rel._name == "inbox": query = get_inbox_comments(inbox_owner) elif inbox_rel._name == "selfreply": query = get_inbox_selfreply(inbox_owner) else: raise ValueError("wtf is " + inbox_rel._name) if not comment._deleted: m.insert(query, [inbox_rel]) else: m.delete(query, [inbox_rel]) set_unread(comment, inbox_owner, unread=not comment._deleted, mutator=m)
def comment_exists(post, comment): # Check if this comment already exists using brutal compare on content # BeautifulSoup is used to parse as HTML in order to remove markup content = ''.join(BeautifulSoup(comment['body']).findAll(text=True)) key = re_non_alphanum.sub('', content) existing_comments = Comment._query(Comment.c.link_id == post._id, Comment.c.ob_imported == True, data=True) for existing_comment in existing_comments: author = Account._byID(existing_comment.author_id, data=True) content = ''.join(BeautifulSoup(existing_comment.body).findAll(text=True)) existing_key = re_non_alphanum.sub('', content) if key == existing_key: print " Skipping existing %s" % comment_excerpt(comment) return True # else: # print "%s *|NOT|* %s" % (key, existing_key) return False
def edit_campaign(link, campaign, dates, bid, cpm, sr, priority, location): sr_name = sr.name if sr else '' # empty string means target to all changed = {} if bid != campaign.bid: changed['bid'] = ("$%0.2f" % campaign.bid, "$%0.2f" % bid) if dates[0] != campaign.start_date or dates[1] != campaign.end_date: original = '%s to %s' % (campaign.start_date, campaign.end_date) edited = '%s to %s' % (dates[0], dates[1]) changed['dates'] = (original, edited) if cpm != campaign.cpm: changed['cpm'] = (campaign.cpm, cpm) if sr_name != campaign.sr_name: format_sr_name = (lambda sr_name: '/r/%s' % sr_name if sr_name else '<frontpage>') changed['sr_name'] = map(format_sr_name, (campaign.sr_name, sr_name)) if priority != campaign.priority: changed['priority'] = (campaign.priority.name, priority.name) change_strs = map(lambda t: '%s: %s -> %s' % (t[0], t[1][0], t[1][1]), changed.iteritems()) change_text = ', '.join(change_strs) # if the bid amount changed, cancel any pending transactions if campaign.bid != bid: void_campaign(link, campaign, reason='changed_bid') # update the schedule PromotionWeights.reschedule(link, campaign._id, sr_name, dates[0], dates[1], bid) # update values in the db campaign.update(dates[0], dates[1], bid, cpm, sr_name, campaign.trans_id, priority, location, commit=True) if campaign.priority.cpm: # make it a freebie, if applicable author = Account._byID(link.author_id, True) if getattr(author, "complimentary_promos", False): free_campaign(link, campaign, c.user) # record the changes if change_text: PromotionLog.add(link, 'edited %s: %s' % (campaign, change_text)) hooks.get_hook('promote.edit_campaign').call(link=link, campaign=campaign)
def store_keys(key, maxes): # we're building queries using queries.py, but we could make the # queries ourselves if we wanted to avoid the individual lookups # for accounts and subreddits. # Note that we're only generating the 'sr-' type queries here, but # we're also able to process the other listings generated by the # old migrate.mr_permacache for convenience userrel_fns = dict(liked=queries.get_liked, disliked=queries.get_disliked, saved=queries.get_saved, hidden=queries.get_hidden) if key.startswith('user-'): acc_str, keytype, account_id = key.split('-') account_id = int(account_id) fn = queries._get_submitted if keytype == 'submitted' else queries._get_comments q = fn(account_id, 'new', 'all') q._replace([(fname, float(timestamp)) for (timestamp, fname) in maxes]) elif key.startswith('sr-'): sr_str, sort, time, sr_id = key.split('-') sr_id = int(sr_id) if sort == 'controversy': # I screwed this up in the mapper and it's too late to fix # it sort = 'controversial' q = queries._get_links(sr_id, sort, time) q._replace( [tuple([item[-1]] + map(float, item[:-1])) for item in maxes]) elif key.startswith('domain/'): d_str, sort, time, domain = key.split('/') q = queries.get_domain_links(domain, sort, time) q._replace( [tuple([item[-1]] + map(float, item[:-1])) for item in maxes]) elif key.split('-')[0] in userrel_fns: key_type, account_id = key.split('-') account_id = int(account_id) fn = userrel_fns[key_type] q = fn(Account._byID(account_id)) q._replace( [tuple([item[-1]] + map(float, item[:-1])) for item in maxes])
def new(cls, user, thing): from r2.lib.db import queries # check if this report exists already! rel = cls.rel(user, thing) q = rel._fast_query(user, thing, ['-1', '0', '1']) q = [report for (tupl, report) in q.iteritems() if report] if q: # stop if we've seen this before, so that we never get the # same report from the same user twice oldreport = q[0] g.log.debug("Ignoring duplicate report %s" % oldreport) return oldreport r = Report(user, thing, '0') if not thing._loaded: thing._load() # mark item as reported try: thing._incr(cls._field) except (ValueError, TypeError): g.log.error("%r has bad field %r = %r" % (thing, cls._field, getattr(thing, cls._field, "(nonexistent)"))) raise r._commit() if hasattr(thing, 'author_id'): author = Account._byID(thing.author_id, data=True) author._incr('reported') item_age = datetime.now(g.tz) - thing._date ignore_reports = getattr(thing, 'ignore_reports', False) if item_age.days < g.REPORT_AGE_LIMIT and not ignore_reports: # update the reports queue if it exists queries.new_report(thing, r) # if the thing is already marked as spam, accept the report if thing._spam: cls.accept(thing) else: g.log.debug("Ignoring report %s" % r) return r
def author_spammer(self, things, spam): """incr/decr the 'spammer' field for the author of every passed thing""" by_aid = {} for thing in things: if (hasattr(thing, 'author_id') and not getattr(thing, 'ban_info', {}).get('auto',True)): # only decrement 'spammer' for items that were not # autobanned by_aid.setdefault(thing.author_id, []).append(thing) if by_aid: authors = Account._byID(by_aid.keys(), data=True, return_dict=True) for aid, author_things in by_aid.iteritems(): author = authors[aid] author._incr('spammer', len(author_things) if spam else -len(author_things))
def __init__(self, event, listing, show_sidebar, report_type): self.event = event self.listing = listing if show_sidebar: self.discussions = LiveUpdateOtherDiscussions() self.show_sidebar = show_sidebar contributor_accounts = Account._byID(event.contributors.keys(), data=True, return_dict=False) self.contributors = sorted((LiveUpdateAccount(e) for e in contributor_accounts), key=lambda e: e.name) self.report_types = REPORT_TYPES self.report_type = report_type Templated.__init__(self)
def process_message(msg): timer = g.stats.get_timer("new_voting.%s" % queue) timer.start() vote_data = json.loads(msg.body) hook = hooks.get_hook('vote.validate_vote_data') if hook.call_until_return(msg=msg, vote_data=vote_data) is False: # Corrupt records in the queue. Ignore them. print "Ignoring invalid vote by %s on %s %s" % ( vote_data.get('user_id', '<unknown>'), vote_data.get('thing_fullname', '<unknown>'), vote_data) return # if it's an old-style vote, convert to the new format if "uid" in vote_data: vote_data = convert_old_vote_data(vote_data, msg.timestamp) user = Account._byID(vote_data.pop("user_id"), data=True) thing = Thing._by_fullname(vote_data.pop("thing_fullname"), data=True) timer.intermediate("preamble") lock_key = "vote-%s-%s" % (user._id36, thing._fullname) with g.make_lock("voting", lock_key, timeout=5): print "Processing vote by %s on %s %s" % (user, thing, vote_data) try: vote = Vote( user, thing, direction=vote_data["direction"], date=datetime.utcfromtimestamp(vote_data["date"]), data=vote_data["data"], event_data=vote_data.get("event_data"), ) except TypeError as e: # a vote on an invalid type got in the queue, just skip it g.log.exception("Invalid type: %r", e.message) return timer.intermediate("create_vote_obj") vote.commit() timer.flush()
def new_vote(vote): user = vote._thing1 item = vote._thing2 if not isinstance(item, Link): return if vote.valid_thing and not item._spam and not item._deleted: sr = item.subreddit_slow results = [] author = Account._byID(item.author_id) if author.gold: for sort in ('hot', 'top', 'controversial', 'new'): if isinstance(item, Link): results.append(get_submitted(author, sort, 'all')) if isinstance(item, Comment): results.append(get_comments(author, sort, 'all')) # don't do 'new', because that was done by new_link, and the # time-filtered versions of top/controversial will be done by # mr_top results.extend([ get_links(sr, 'hot', 'all'), get_links(sr, 'top', 'all'), get_links(sr, 'controversial', 'all'), ]) for domain in utils.UrlParser(item.url).domain_permutations(): for sort in ("hot", "top", "controversial"): results.append(get_domain_links(domain, sort, "all")) add_queries(results, insert_items=item) vote._fast_query_timestamp_touch(user) #must update both because we don't know if it's a changed vote if vote._name == '1': add_queries([get_liked(user)], insert_items=vote) add_queries([get_disliked(user)], delete_items=vote) elif vote._name == '-1': add_queries([get_liked(user)], delete_items=vote) add_queries([get_disliked(user)], insert_items=vote) else: add_queries([get_liked(user)], delete_items=vote) add_queries([get_disliked(user)], delete_items=vote)
def charge_pending(offset=1): for l, camp, weight in accepted_campaigns(offset=offset): user = Account._byID(l.author_id) try: if (authorize.is_charged_transaction(camp.trans_id, camp._id) or not authorize.charge_transaction(user, camp.trans_id, camp._id)): continue if is_promoted(l): emailer.queue_promo(l, camp.bid, camp.trans_id) else: set_promote_status(l, PROMOTE_STATUS.pending) emailer.queue_promo(l, camp.bid, camp.trans_id) text = ('auth charge for campaign %s, trans_id: %d' % (camp._id, camp.trans_id)) PromotionLog.add(l, text) except: print "Error on %s, campaign %s" % (l, camp._id)
def __init__(self): Wrapped.__init__(self) cache_stats = cache.get('stats') if cache_stats: top_users, top_day, top_week = cache_stats #lookup user objs uids = [] uids.extend(u for u in top_users) uids.extend(u[0] for u in top_day) uids.extend(u[0] for u in top_week) users = Account._byID(uids, data = True) self.top_users = (users[u] for u in top_users) self.top_day = ((users[u[0]], u[1]) for u in top_day) self.top_week = ((users[u[0]], u[1]) for u in top_week) else: self.top_users = self.top_day = self.top_week = ()
def new_link(link): "Called on the submission and deletion of links" sr = Subreddit._byID(link.sr_id) author = Account._byID(link.author_id) results = [get_links(sr, 'new', 'all')] # we don't have to do hot/top/controversy because new_vote will do # that results.append(get_submitted(author, 'new', 'all')) if link._spam: results.append(get_spam_links(sr)) # only 'new' qualifies for insertion, which will be done in # run_new_links add_queries(results, insert_items=link) amqp.add_item('new_link', link._fullname)
def new_campaign(link, dates, target, frequency_cap, priority, location, platform, mobile_os, ios_devices, ios_version_range, android_devices, android_version_range, total_budget_pennies, cost_basis, bid_pennies): campaign = PromoCampaign.create( link, target, dates[0], dates[1], frequency_cap, priority, location, platform, mobile_os, ios_devices, ios_version_range, android_devices, android_version_range, total_budget_pennies, cost_basis, bid_pennies) PromotionWeights.add(link, campaign) PromotionLog.add(link, 'campaign %s created' % campaign._id) if not campaign.is_house: author = Account._byID(link.author_id, data=True) if getattr(author, "complimentary_promos", False): free_campaign(link, campaign, c.user) hooks.get_hook('promote.new_campaign').call(link=link, campaign=campaign) return campaign
def edit_campaign(link, campaign, dates, bid, cpm, sr, priority): sr_name = sr.name if sr else '' # empty string means target to all try: # if the bid amount changed, cancel any pending transactions if campaign.bid != bid: void_campaign(link, campaign) # update the schedule PromotionWeights.reschedule(link, campaign._id, sr_name, dates[0], dates[1], bid) # update values in the db campaign.update(dates[0], dates[1], bid, cpm, sr_name, campaign.trans_id, priority, commit=True) if campaign.priority.cpm: # record the transaction text = 'updated campaign %s. (bid: %0.2f)' % (campaign._id, bid) PromotionLog.add(link, text) # make it a freebie, if applicable author = Account._byID(link.author_id, True) if getattr(author, "complimentary_promos", False): free_campaign(link, campaign, c.user) hooks.get_hook('campaign.edit').call(link=link, campaign=campaign) except Exception, e: # record error and rethrow g.log.error( "Failed to update PromoCampaign %s on link %d. Error was: %r" % (campaign._id, link._id, e)) try: # wrapped in try/except so orig error won't be lost if commit fails text = 'update FAILED. (campaign: %s, bid: %.2f)' % (campaign._id, bid) PromotionLog.add(link, text) except: pass raise e
def __init__(self, event, perms_by_contributor, editable): self.event = event self.editable = editable contributor_accounts = Account._byID( perms_by_contributor.keys(), data=True) contributors = [ LiveUpdateContributor(account, perms_by_contributor[account._id]) for account in contributor_accounts.itervalues()] contributors.sort(key=lambda r: r.account.name) SimpleBuilder.__init__( self, contributors, keep_fn=self.keep_item, wrap=self.wrap_item, skip=False, num=0, )
def merge(cls, room1, room2): new_room_level = max([room1.level, room2.level]) + 1 all_participant_ids = (room1.get_all_participants() | room2.get_all_participants()) all_participants = Account._byID( all_participant_ids, data=True, return_dict=False) new_room = cls.create(level=new_room_level) new_room.add_participants(all_participants) new_room.persist_computed_name() for room in (room1, room2): room.last_reap_time = datetime.now(g.tz) room.is_alive = False room.is_merged = True room.next_room = new_room.id room._commit() return new_room