def link_comments(link_id, _update=False): key = comments_key(link_id) r = g.permacache.get(key) if r and not _update: return r else: # This operation can take longer than most (note the inner # locks) better to time out request temporarily than to deal # with an inconsistent tree with g.make_lock(lock_key(link_id), timeout=180): r = _load_link_comments(link_id) # rebuild parent dict cids, cid_tree, depth, num_children, num_comments = r r = r[: -1] # Remove num_comments from r; we don't need to cache it. g.permacache.set(parent_comments_key(link_id), _parent_dict_from_tree(cid_tree)) g.permacache.set(key, r) # update the link's comment count and schedule it for search # reindexing link = Link._byID(link_id, data=True) link.num_comments = num_comments link._commit() from r2.lib.db.queries import changed changed(link) return r
def link_comments(link_id, _update=False): key = comments_key(link_id) r = g.permacache.get(key) if r and not _update: return r else: # This operation can take longer than most (note the inner # locks) better to time out request temporarily than to deal # with an inconsistent tree with g.make_lock(lock_key(link_id), timeout=180): r = _load_link_comments(link_id) # rebuild parent dict cids, cid_tree, depth, num_children, num_comments = r r = r[:-1] # Remove num_comments from r; we don't need to cache it. g.permacache.set(parent_comments_key(link_id), _parent_dict_from_tree(cid_tree)) g.permacache.set(key, r) # update the link's comment count and schedule it for search # reindexing link = Link._byID(link_id, data = True) link.num_comments = num_comments link._commit() from r2.lib.db.queries import changed changed(link) return r
def submit_rss_links(srname,rss,user,titlefield='title',linkfield='link'): #F**k the API, let's just do it the way we would if we were really doing it. This avoids screwing around with cookies and so forth... feed=fetch_feed(rss) if feed is None: return ac=Account._byID(user) sr=Subsciteit._by_name(srname) ip='0.0.0.0' niceify=False if domain(rss)=="arxiv.org": niceify=dict(find="\(arXiv:.*?\)",replace="") #Let's randomize why not... random.shuffle(feed.entries) for article in feed.entries: #This can take all night if it has to, we don't want to hammer the server into oblivios sleep(1) kw = fetch_article(article,titlefield=titlefield,linkfield=linkfield,niceify=niceify) if kw is None: continue l = Link._submit(kw['title'],kw['link'],ac,sr,ip,spam=False) l._commit() l.set_url_cache() #We don't really need auto-submitted links to be vote on... queries.queue_vote(ac,l,True,ip,cheater=False) queries.new_link(l) changed(l) print "Submitted %s" % article[titlefield] sleep(.1) return
def _new(cls, author, link, parent, body, ip): from r2.lib.db.queries import changed c = Comment(_ups=1, body=body, link_id=link._id, sr_id=link.sr_id, author_id=author._id, ip=ip) c._spam = author._spam # these props aren't relations if parent: c.parent_id = parent._id link._incr("num_comments", 1) to = None name = "inbox" if parent: to = Account._byID(parent.author_id, True) elif link.is_self and not link.noselfreply: to = Account._byID(link.author_id, True) name = "selfreply" c._commit() changed(link, True) # only the number of comments has changed inbox_rel = None # only global admins can be message spammed. if to and (not c._spam or to.name in g.admins): inbox_rel = Inbox._add(to, c, name) return (c, inbox_rel)
def _new(cls, author, link, parent, body, ip,criticism=False): from r2.lib.db.queries import changed #We're turing it off for now... criticism = False c = Comment(_ups = 1, body = body, link_id = link._id, sr_id = link.sr_id, author_id = author._id, ip = ip) c._spam = author._spam c.criticism=criticism #these props aren't relations if parent: c.parent_id = parent._id #should increment based on crit flag #Each should contain the root author and its id, problem is the id isn't created yet if we're the root so have to be clever if criticism: link._incr("num_criticisms",1) if parent: c.rootauthor=parent.rootauthor if parent.rootid: c.rootid=parent.rootid else: c.rootid=parent._id else: c.rootauthor=author._id c.rootid=False else: link._incr('num_comments', 1) to = None name = 'inbox' if parent: to = Account._byID(parent.author_id, True) elif link.is_self and not link.noselfreply: to = Account._byID(link.author_id, True) name = 'selfreply' c._commit() changed(link, True) # link's number of comments changed inbox_rel = None # only global admins can be message spammed. # Don't send the message if the recipient has blocked # the author if to and ((not c._spam and author._id not in to.enemies) or to.name in g.admins): # When replying to your own comment, record the inbox # relation, but don't give yourself an orangered orangered = (to.name != author.name) inbox_rel = Inbox._add(to, c, name, orangered=orangered) return (c, inbox_rel)
def delete_comment(comment): link = Link._byID(comment.link_id, data=True) timer = g.stats.get_timer('comment_tree.delete.%s' % link.comment_tree_version) timer.start() with CommentTree.mutation_context(link): timer.intermediate('lock') cache = get_comment_tree(link) timer.intermediate('get') cache.delete_comment(comment, link) timer.intermediate('update') from r2.lib.db.queries import changed changed([link]) timer.intermediate('changed') timer.stop()
def get_comment_tree(link, _update=False, timer=None): if timer is None: timer = SimpleSillyStub() cache = CommentTree.by_link(link) timer.intermediate('load') if cache and not _update: return cache with CommentTree.mutation_context(link, timeout=180): timer.intermediate('lock') cache = CommentTree.rebuild(link) timer.intermediate('rebuild') # the tree rebuild updated the link's comment count, so schedule it for # search reindexing from r2.lib.db.queries import changed changed([link]) timer.intermediate('changed') return cache
def _new(cls, author, link, parent, body, ip): from r2.lib.db.queries import changed c = Comment(_ups=1, body=body, link_id=link._id, sr_id=link.sr_id, author_id=author._id, ip=ip) c._spam = author._spam if author._spam: g.stats.simple_event('spam.autoremove.comment') #these props aren't relations if parent: c.parent_id = parent._id link._incr('num_comments', 1) to = None name = 'inbox' if parent: to = Account._byID(parent.author_id, True) elif link.is_self and not link.noselfreply: to = Account._byID(link.author_id, True) name = 'selfreply' c._commit() changed(link, True) # link's number of comments changed inbox_rel = None # only global admins can be message spammed. # Don't send the message if the recipient has blocked # the author if to and ((not c._spam and author._id not in to.enemies) or to.name in g.admins): # When replying to your own comment, record the inbox # relation, but don't give yourself an orangered orangered = (to.name != author.name) inbox_rel = Inbox._add(to, c, name, orangered=orangered) return (c, inbox_rel)
def submit_link(user, subreddit, title, url, thumb_url): account = Account._by_name(user) subreddit = Subreddit._by_name(subreddit) ip = '127.0.0.1' # submit the link link = Link._submit(title, url, account, subreddit, ip, spam=False) # force the thumbnail before scraper_q gets in the mix image_data = urllib.urlopen(thumb_url).read() force_thumbnail(link, image_data) # various backend processing things queries.queue_vote(account, link, True, ip) queries.new_link(link) queries.changed(link) print link.make_permalink_slow()
def _new(cls, author, link, parent, body, ip): from r2.lib.db.queries import changed c = Comment(_ups=1, body=body, link_id=link._id, sr_id=link.sr_id, author_id=author._id, ip=ip) c._spam = author._spam #these props aren't relations if parent: c.parent_id = parent._id link._incr('num_comments', 1) to = None name = 'inbox' if parent: to = Account._byID(parent.author_id, True) elif link.is_self and not link.noselfreply: to = Account._byID(link.author_id, True) name = 'selfreply' c._commit() changed(link, True) # link's number of comments changed inbox_rel = None # only global admins can be message spammed. # Don't send the message if the recipient has blocked # the author if to and ((not c._spam and author._id not in to.enemies) or to.name in g.admins): # When replying to your own comment, record the inbox # relation, but don't give yourself an orangered orangered = (to.name != author.name) inbox_rel = Inbox._add(to, c, name, orangered=orangered) return (c, inbox_rel)
def delete_comment(comment): with g.make_lock(lock_key(comment.link_id)): cids, comment_tree, depth, num_children = link_comments(comment.link_id) # only completely remove comments with no children if comment._id not in comment_tree: if comment._id in cids: cids.remove(comment._id) if comment._id in depth: del depth[comment._id] if comment._id in num_children: del num_children[comment._id] g.permacache.set(comments_key(comment.link_id), (cids, comment_tree, depth, num_children)) # update the link's comment count and schedule it for search reindexing link = Link._byID(comment.link_id, data = True) link._incr('num_comments', -1) from r2.lib.db.queries import changed changed(link)
def delete_comment(comment): with g.make_lock(lock_key(comment.link_id)): cids, comment_tree, depth, num_children = link_comments( comment.link_id) # only completely remove comments with no children if comment._id not in comment_tree: if comment._id in cids: cids.remove(comment._id) if comment._id in depth: del depth[comment._id] if comment._id in num_children: del num_children[comment._id] g.permacache.set(comments_key(comment.link_id), (cids, comment_tree, depth, num_children)) # update the link's comment count and schedule it for search reindexing link = Link._byID(comment.link_id, data=True) link._incr('num_comments', -1) from r2.lib.db.queries import changed changed(link)
def _new(cls, author, link, parent, body, ip): from r2.lib.db.queries import changed c = Comment(_ups = 1, body = body, link_id = link._id, sr_id = link.sr_id, author_id = author._id, ip = ip) c._spam = author._spam #these props aren't relations if parent: c.parent_id = parent._id link._incr('num_comments', 1) to = None name = 'inbox' if parent: to = Account._byID(parent.author_id, True) elif link.is_self and not link.noselfreply: to = Account._byID(link.author_id, True) name = 'selfreply' c._commit() changed(link, True) # only the number of comments has changed inbox_rel = None # only global admins can be message spammed. if to and (not c._spam or to.name in g.admins): inbox_rel = Inbox._add(to, c, name) return (c, inbox_rel)
def load_fixtures(num_votes = 10): accounts = [] for name, account_data in load_fixture('accounts').items(): print "creating account %r" % (name,) try: a = Account._by_name(name) except NotFound: a = Account(name=name, password=bcrypt_password(account_data['password'])) # new accounts keep the profanity filter settings until opting out for key, val in account_data.items(): if key in ('password',): continue setattr(a, key, val) a._commit() # clear the caches Account._by_name(name, _update = True) Account._by_name(name, allow_deleted = True, _update = True) accounts.append(a) for name, subreddit_data in load_fixture('subreddits').items(): print "creating subreddit %r" % (name,) try: sr = Subreddit._by_name(name) except NotFound: author = Account._by_name(subreddit_data['author']) sr = Subreddit._new(name = name, title = subreddit_data['title'], author_id = author._id, ip = subreddit_data['ip']) for key, val in subreddit_data.items(): if key in ('author', 'ip', 'title', 'subscribers'): continue if val is None or val == '': continue setattr(sr, key, val) sr._downs = 10 sr._commit() for sub_name in subreddit_data.get('subscribers', []): subscriber = Account._by_name(sub_name) Subreddit.subscribe_defaults(subscriber) if sr.add_subscriber(subscriber): sr._incr('_ups', 1) queries.changed(sr, True) for mod_name in subreddit_data.get('moderators', []): moderator = Account._by_name(mod_name) sr.add_moderator(moderator) # defined here so it has access to the 'authors' var def load_comments(link, comments, parent_comment=None): for comment_data in comments: comment_author = Account._by_name(comment_data['author']) (c, inbox_rel) = Comment._new(comment_author, link, parent_comment, comment_data['body'], comment_data['ip']) queries.new_comment(c, inbox_rel) for i in range(int(random.betavariate(2, 8) * 10)): another_user = random.choice(accounts) v = Vote.vote(another_user, c, True, '127.0.0.1') queries.new_vote(v) if comment_data.has_key('comments'): load_comments(link, comment_data['comments'], c) for link_label, link_data in load_fixture('links').items(): print "creating link for %r" % (link_data['title'],) author = Account._by_name(link_data['author']) sr = Subreddit._by_name(link_data['sr']) link = Link._submit(link_data['title'], link_data['url'], author, sr, link_data['ip']) for key, val in link_data.items(): if key in ('title', 'url', 'author', 'sr', 'comments'): continue if val is None or val == '': continue setattr(link, key, val) link._commit() queries.new_link(link) like = random.randint(50,100) for i in range(int(random.betavariate(2, 8) * 5 * num_votes)): user = random.choice(accounts) v = Vote.vote(user, link, random.randint(0, 100) <= like, '127.0.0.1') queries.new_vote(v) if link_data.has_key('comments'): load_comments(link, link_data['comments']) queries.worker.join()
def cast_vote(sub, obj, dir, ip, vote_info, cheater, timer, date): from r2.models.admintools import valid_user, valid_thing, update_score from r2.lib.count import incr_sr_count from r2.lib.db import queries names_by_dir = {True: "1", None: "0", False: "-1"} # `vote` mimics the old pg vote rel interface so downstream code doesn't # need to change. (but it totally needn't stay that way forever!) vote = Storage( _thing1=sub, _thing2=obj, _name=names_by_dir[dir], _date=date, valid_thing=True, valid_user=True, ip=ip, ) # these track how much ups/downs should change on `obj` ups_delta = 1 if int(vote._name) > 0 else 0 downs_delta = 1 if int(vote._name) < 0 else 0 # see if the user has voted on this thing before old_votes = VoteDetailsByThing.get_details(obj, [sub]) old_vote = None if old_votes: old_vote = old_votes[0] timer.intermediate("cass_read_vote") if old_vote: vote._date = datetime.utcfromtimestamp( old_vote["date"]).replace(tzinfo=pytz.UTC) vote.valid_thing = old_vote["valid_thing"] vote.valid_user = old_vote["valid_user"] vote.ip = old_vote["ip"] if vote._name == old_vote["direction"]: # the old vote and new vote are the same. bail out. return vote # remove the old vote from the score old_direction = int(old_vote["direction"]) ups_delta -= 1 if old_direction > 0 else 0 downs_delta -= 1 if old_direction < 0 else 0 # calculate valid_thing and valid_user sr = obj.subreddit_slow kind = obj.__class__.__name__.lower() karma = sub.karma(kind, sr) if vote.valid_thing: vote.valid_thing = valid_thing(vote, karma, cheater, vote_info) if vote.valid_user: vote.valid_user = vote.valid_thing and valid_user(vote, sr, karma) if kind == "link" and getattr(obj, "is_self", False): # self-posts do not generate karma vote.valid_user = False g.stats.simple_event("vote.valid_thing." + str(vote.valid_thing).lower()) g.stats.simple_event("vote.valid_user." + str(vote.valid_user).lower()) # update various score/karma/vote counts if not (not old_vote and obj.author_id == sub._id and vote._name == "1"): # newly created objects start out with _ups = 1, so we skip updating # their score here if this is the author's own initial vote on it. old_valid_thing = old_vote["valid_thing"] if old_vote else True update_score(obj, ups_delta, downs_delta, vote, old_valid_thing) timer.intermediate("pg_update_score") if vote.valid_user: author = Account._byID(obj.author_id, data=True) author.incr_karma(kind, sr, ups_delta - downs_delta) timer.intermediate("pg_incr_karma") if not old_vote and vote.valid_thing and kind == "link": if sub._id != obj.author_id: incr_sr_count(sr) timer.intermediate("incr_sr_counts") # write the vote to cassandra VotesByAccount.copy_from(vote, vote_info) timer.intermediate("cassavotes") # update the search index queries.changed(vote._thing2, boost_only=True) timer.intermediate("changed") return vote
def cast_vote(sub, obj, dir, ip, vote_info, cheater, timer, date): from r2.models.admintools import valid_user, valid_thing, update_score from r2.lib.count import incr_sr_count from r2.lib.db import queries names_by_dir = {True: "1", None: "0", False: "-1"} # `vote` mimics the old pg vote rel interface so downstream code doesn't # need to change. (but it totally needn't stay that way forever!) vote = Storage( _thing1=sub, _thing2=obj, _name=names_by_dir[dir], _date=date, valid_thing=True, valid_user=True, ip=ip, ) # these track how much ups/downs should change on `obj` ups_delta = 1 if int(vote._name) > 0 else 0 downs_delta = 1 if int(vote._name) < 0 else 0 # see if the user has voted on this thing before pgrel = Vote.rel(sub, obj) pgoldvote = pgrel._fast_query(sub, obj, ["-1", "0", "1"]).values() try: pgoldvote = filter(None, pgoldvote)[0] except IndexError: pgoldvote = None timer.intermediate("pg_read_vote") if pgoldvote: # old_vote is mimicking `{Link,Comment}VoteDetailsByThing` here because # that will eventually be exactly what it is old_vote = { "direction": pgoldvote._name, "valid_thing": pgoldvote.valid_thing, "valid_user": pgoldvote.valid_user, "ip": getattr(pgoldvote, "ip", None), } vote.valid_thing = old_vote["valid_thing"] vote.valid_user = old_vote["valid_user"] if vote._name == old_vote["direction"]: # the old vote and new vote are the same. bail out. return vote # remove the old vote from the score old_direction = int(old_vote["direction"]) ups_delta -= 1 if old_direction > 0 else 0 downs_delta -= 1 if old_direction < 0 else 0 else: old_vote = None # calculate valid_thing and valid_user sr = obj.subreddit_slow kind = obj.__class__.__name__.lower() karma = sub.karma(kind, sr) if vote.valid_thing: vote.valid_thing = valid_thing(vote, karma, cheater, vote_info) if vote.valid_user: vote.valid_user = vote.valid_thing and valid_user(vote, sr, karma) if kind == "link" and getattr(obj, "is_self", False): # self-posts do not generate karma vote.valid_user = False g.stats.simple_event("vote.valid_thing." + str(vote.valid_thing).lower()) g.stats.simple_event("vote.valid_user." + str(vote.valid_user).lower()) # write out the new/modified vote to postgres if pgoldvote: pgvote = pgoldvote pgvote._name = vote._name else: pgvote = pgrel(sub, obj, vote._name, date=vote._date, ip=ip) pgvote.valid_thing = vote.valid_thing pgvote.valid_user = vote.valid_user pgvote._commit() timer.intermediate("pg_write_vote") # update various score/karma/vote counts if not (not old_vote and obj.author_id == sub._id and vote._name == "1"): # newly created objects start out with _ups = 1, so we skip updating # their score here if this is the author's own initial vote on it. old_valid_thing = old_vote["valid_thing"] if old_vote else True update_score(obj, ups_delta, downs_delta, vote, old_valid_thing) timer.intermediate("pg_update_score") if vote.valid_user: author = Account._byID(obj.author_id, data=True) author.incr_karma(kind, sr, ups_delta - downs_delta) timer.intermediate("pg_incr_karma") if not old_vote and vote.valid_thing and kind == "link": if sub._id != obj.author_id: incr_sr_count(sr) timer.intermediate("incr_sr_counts") # write the vote to cassandra VotesByAccount.copy_from(vote, vote_info) timer.intermediate("cassavotes") # update the search index queries.changed(vote._thing2, boost_only=True) timer.intermediate("changed") return vote
def post_if_goal_reached(date): # bail out if this day's already been submitted for link in get_recent_name_submissions(): if link.revenue_date == date: return revenue = gold_revenue_multi([date]).get(date, 0) goal = gold_goal_on(date) percent = revenue / float(goal) bucket = int(percent) if bucket == 0: return buyer_count = len(gold_buyers_on(date)) link = Link._submit( title=date.strftime("%a %Y-%m-%d"), url="self", author=SYSTEM_ACCOUNT, sr=SERVERNAME_SR, ip="127.0.0.1", spam=False, ) template_wp = WikiPage.get(SERVERNAME_SR, "templates/selftext") template = random.choice(template_wp._get("content").split("\r\n---\r\n")) boilerplate = WikiPage.get(SERVERNAME_SR, "templates/boilerplate")._get("content") selftext_template = template + "\n\n---\n\n" + boilerplate link.flair_text = "Name pending..." link.flair_css_class = "goal-bucket-%d-active" % bucket link.revenue_date = date link.revenue_bucket = bucket link.server_names = [] link.url = link.make_permalink(SERVERNAME_SR) link.selftext = selftext_template % { "percent": int(percent * 100), "buyers": buyer_count, } link.is_self = True link._commit() UPVOTE = True queries.queue_vote(SYSTEM_ACCOUNT, link, UPVOTE, "127.0.0.1") queries.new_link(link) queries.changed(link) template = WikiPage.get(SERVERNAME_SR, "templates/notification-message")._get("content") subject_template, sep, body_template = template.partition("\r\n") for id in gold_buyers_on(date): recipient = Account._byID(id, data=True) send_system_message( recipient, subject_template, body_template % { "percent": int(percent * 100), "buyers": buyer_count, "user": recipient.name, "link": link.url, }, )
def vote(cls, sub, obj, dir, ip, organic = False, cheater = False, timer=None): from admintools import valid_user, valid_thing, update_score from r2.lib.count import incr_sr_count from r2.lib.db import queries if timer is None: timer = SimpleSillyStub() sr = obj.subreddit_slow kind = obj.__class__.__name__.lower() karma = sub.karma(kind, sr) is_self_link = (kind == 'link' and getattr(obj,'is_self',False)) #check for old vote rel = cls.rel(sub, obj) oldvote = rel._fast_query(sub, obj, ['-1', '0', '1']).values() oldvote = filter(None, oldvote) timer.intermediate("pg_read_vote") amount = 1 if dir is True else 0 if dir is None else -1 is_new = False #old vote if len(oldvote): v = oldvote[0] oldamount = int(v._name) v._name = str(amount) #these still need to be recalculated old_valid_thing = getattr(v, 'valid_thing', False) v.valid_thing = (valid_thing(v, karma, cheater = cheater) and getattr(v,'valid_thing', False)) v.valid_user = (getattr(v, 'valid_user', False) and v.valid_thing and valid_user(v, sr, karma)) #new vote else: is_new = True oldamount = 0 v = rel(sub, obj, str(amount)) v.ip = ip old_valid_thing = v.valid_thing = valid_thing(v, karma, cheater = cheater) v.valid_user = (v.valid_thing and valid_user(v, sr, karma) and not is_self_link) if organic: v.organic = organic v._commit() timer.intermediate("pg_write_vote") up_change, down_change = score_changes(amount, oldamount) if not (is_new and obj.author_id == sub._id and amount == 1): # we don't do this if it's the author's initial automatic # vote, because we checked it in with _ups == 1 update_score(obj, up_change, down_change, v, old_valid_thing) timer.intermediate("pg_update_score") if v.valid_user: author = Account._byID(obj.author_id, data=True) author.incr_karma(kind, sr, up_change - down_change) timer.intermediate("pg_incr_karma") #update the sr's valid vote count if is_new and v.valid_thing and kind == 'link': if sub._id != obj.author_id: incr_sr_count(sr) timer.intermediate("incr_sr_counts") # now write it out to Cassandra. We'll write it out to both # this way for a while VotesByAccount.copy_from(v) timer.intermediate("cassavotes") queries.changed(v._thing2, True) timer.intermediate("changed") return v
def add_comments_nolock(link_id, comments): cids, comment_tree, depth, num_children = link_comments(link_id) #dfs to find the list of parents for the new comment def find_parents(): stack = [cid for cid in comment_tree[None]] parents = [] while stack: cur_cm = stack.pop() if cur_cm == cm_id: return parents elif cur_cm in comment_tree: #make cur_cm the end of the parents list parents = parents[:depth[cur_cm]] + [cur_cm] for child in comment_tree[cur_cm]: stack.append(child) new_parents = {} for comment in comments: cm_id = comment._id p_id = comment.parent_id #make sure we haven't already done this before (which would happen #if the tree isn't cached when you add a comment) if comment._id in cids: continue #add to comment list cids.append(comment._id) #add to tree comment_tree.setdefault(p_id, []).append(cm_id) #add to depth depth[cm_id] = depth[p_id] + 1 if p_id else 0 #update children num_children[cm_id] = 0 #if this comment had a parent, find the parent's parents if p_id: new_parents[cm_id] = p_id for p_id in find_parents(): num_children[p_id] += 1 # update our cache of children -> parents as well: key = parent_comments_key(link_id) r = g.permacache.get(key) if not r: r = _parent_dict_from_tree(comment_tree) for cm_id, parent_id in new_parents.iteritems(): # print "Now, I set %s -> %s" % (cm_id, parent_id) r[cm_id] = parent_id for comment in comments: cm_id = comment._id if cm_id not in new_parents: r[cm_id] = None # print "And I set %s -> None" % cm_id # update the link's comment count and schedule it for search reindexing link = Link._byID(link_id, data = True) link._incr('num_comments', len(comments)) from r2.lib.db.queries import changed changed(link) g.permacache.set(key, r) g.permacache.set(comments_key(link_id), (cids, comment_tree, depth, num_children))
def _delete(self): from r2.lib.db.queries import changed link = Link._byID(self.link_id, data = True) link._incr('num_comments', -1) changed(link, True)
def vote(cls, sub, obj, dir, ip, organic=False, cheater=False): from admintools import valid_user, valid_thing, update_score from r2.lib.count import incr_sr_count from r2.lib.db import queries sr = obj.subreddit_slow kind = obj.__class__.__name__.lower() karma = sub.karma(kind, sr) is_self_link = (kind == 'link' and getattr(obj, 'is_self', False)) #check for old vote rel = cls.rel(sub, obj) oldvote = rel._fast_query(sub, obj, ['-1', '0', '1']).values() oldvote = filter(None, oldvote) amount = 1 if dir is True else 0 if dir is None else -1 is_new = False #old vote if len(oldvote): v = oldvote[0] oldamount = int(v._name) v._name = str(amount) #these still need to be recalculated old_valid_thing = getattr(v, 'valid_thing', False) v.valid_thing = (valid_thing(v, karma, cheater=cheater) and getattr(v, 'valid_thing', False)) v.valid_user = (getattr(v, 'valid_user', False) and v.valid_thing and valid_user(v, sr, karma)) #new vote else: is_new = True oldamount = 0 v = rel(sub, obj, str(amount)) v.ip = ip old_valid_thing = v.valid_thing = valid_thing(v, karma, cheater=cheater) v.valid_user = (v.valid_thing and valid_user(v, sr, karma) and not is_self_link) if organic: v.organic = organic v._commit() up_change, down_change = score_changes(amount, oldamount) if not (is_new and obj.author_id == sub._id and amount == 1): # we don't do this if it's the author's initial automatic # vote, because we checked it in with _ups == 1 update_score(obj, up_change, down_change, v, old_valid_thing) if v.valid_user: author = Account._byID(obj.author_id, data=True) author.incr_karma(kind, sr, up_change - down_change) #update the sr's valid vote count if is_new and v.valid_thing and kind == 'link': if sub._id != obj.author_id: incr_sr_count(sr) # now write it out to Cassandra. We'll write it out to both # this way for a while CassandraVote._copy_from(v) queries.changed(v._thing2, True) return v
def vote(cls, sub, obj, dir, ip, organic=False, cheater=False): from admintools import valid_user, valid_thing, update_score from r2.lib.count import incr_sr_count from r2.lib.db import queries sr = obj.subreddit_slow kind = obj.__class__.__name__.lower() karma = sub.karma(kind, sr) is_self_link = kind == "link" and getattr(obj, "is_self", False) # check for old vote rel = cls.rel(sub, obj) oldvote = rel._fast_query(sub, obj, ["-1", "0", "1"]).values() oldvote = filter(None, oldvote) amount = 1 if dir is True else 0 if dir is None else -1 is_new = False # old vote if len(oldvote): v = oldvote[0] oldamount = int(v._name) v._name = str(amount) # these still need to be recalculated old_valid_thing = getattr(v, "valid_thing", False) v.valid_thing = valid_thing(v, karma, cheater=cheater) and getattr(v, "valid_thing", False) v.valid_user = v.valid_user and v.valid_thing and valid_user(v, sr, karma) # new vote else: is_new = True oldamount = 0 v = rel(sub, obj, str(amount)) v.ip = ip old_valid_thing = v.valid_thing = valid_thing(v, karma, cheater=cheater) v.valid_user = v.valid_thing and valid_user(v, sr, karma) and not is_self_link if organic: v.organic = organic v._commit() v._fast_query_timestamp_touch(sub) up_change, down_change = score_changes(amount, oldamount) if not (is_new and obj.author_id == sub._id and amount == 1): # we don't do this if it's the author's initial automatic # vote, because we checked it in with _ups == 1 update_score(obj, up_change, down_change, v.valid_thing, old_valid_thing) if v.valid_user: author = Account._byID(obj.author_id, data=True) author.incr_karma(kind, sr, up_change - down_change) # update the sr's valid vote count if is_new and v.valid_thing and kind == "link": if sub._id != obj.author_id: incr_sr_count(sr) # now write it out to Cassandra. We'll write it out to both # this way for a while voter = v._thing1 votee = v._thing2 cvc = CassandraVote._rel(Account, votee.__class__) try: cv = cvc._fast_query(voter._id36, votee._id36) except tdb_cassandra.NotFound: cv = cvc(thing1_id=voter._id36, thing2_id=votee._id36) cv.name = v._name cv.valid_user, cv.valid_thing = v.valid_user, v.valid_thing cv.ip = v.ip if getattr(v, "organic", False) or hasattr(cv, "organic"): cv.organic = getattr(v, "organic", False) cv._commit() queries.changed(votee, True) return v
def vote(cls, sub, obj, dir, ip, organic=False, cheater=False): from admintools import valid_user, valid_thing, update_score from r2.lib.count import incr_counts from r2.lib.db import queries sr = obj.subreddit_slow kind = obj.__class__.__name__.lower() karma = sub.karma(kind, sr) is_self_link = (kind == 'link' and hasattr(obj, 'is_self') and obj.is_self) #check for old vote rel = cls.rel(sub, obj) oldvote = rel._fast_query(sub, obj, ['-1', '0', '1']).values() oldvote = filter(None, oldvote) amount = 1 if dir is True else 0 if dir is None else -1 is_new = False #old vote if len(oldvote): v = oldvote[0] oldamount = int(v._name) v._name = str(amount) #these still need to be recalculated old_valid_thing = v.valid_thing v.valid_thing = (valid_thing(v, karma, cheater=cheater) and v.valid_thing) v.valid_user = (v.valid_user and v.valid_thing and valid_user(v, sr, karma)) #new vote else: is_new = True oldamount = 0 v = rel(sub, obj, str(amount)) v.author_id = obj.author_id v.sr_id = sr._id v.ip = ip old_valid_thing = v.valid_thing = \ valid_thing(v, karma, cheater = cheater) v.valid_user = (v.valid_thing and valid_user(v, sr, karma) and not is_self_link) if organic: v.organic = organic v._commit() v._fast_query_timestamp_touch(sub) up_change, down_change = score_changes(amount, oldamount) if not (is_new and obj.author_id == sub._id and amount == 1): # we don't do this if it's the author's initial automatic # vote, because we checked it in with _ups == 1 update_score(obj, up_change, down_change, v.valid_thing, old_valid_thing) if v.valid_user: author = Account._byID(obj.author_id, data=True) author.incr_karma(kind, sr, up_change - down_change) #update the sr's valid vote count if is_new and v.valid_thing and kind == 'link': if sub._id != obj.author_id: incr_counts([sr]) # now write it out to Cassandra. We'll write it out to both # this way for a while voter = v._thing1 votee = v._thing2 cvc = CassandraVote._rel(Account, votee.__class__) try: cv = cvc._fast_query(voter._id36, votee._id36) except tdb_cassandra.NotFound: cv = cvc(thing1_id=voter._id36, thing2_id=votee._id36) cv.name = v._name cv.valid_user, cv.valid_thing = v.valid_user, v.valid_thing cv.ip = v.ip if getattr(v, 'organic', False) or hasattr(cv, 'organic'): cv.organic = getattr(v, 'organic', False) cv._commit() queries.changed(votee, True) return v
def _delete(self): from r2.lib.db.queries import changed link = Link._byID(self.link_id, data=True) link._incr('num_comments', -1) changed(link, True)
def insert(title, sr_name, url, description, date, author='ArxivBot', cross_srs=[]): a = Account._by_name(author) sr = subreddit_or_create(sr_name, a) srs = [subreddit_or_create(sr_name, a) for sr_name in cross_srs] ups = 0 if author=='AnnalsBot': ups = 1 downs = 0 if False: try: ls = Link._by_url(url, None) print 'Found %d links' % len(ls) for l in ls: if l.author_id == a._id and l.sr_id != sr._id: ups = ups + l._ups - 1 downs = downs + l._downs l._deleted=True l._commit() changed(l) x = l.subreddit_slow queries.delete_links(l) print 'Deleting ' + str(l) else: print 'Not deleting ' + str(l) print 'Seed votes %s %s' % (ups, downs) except NotFound: pass try: l = Link._by_url(url, sr) print "!! Link already exists" return l except NotFound: print "Submitting link" user = a l = Link(_ups = ups, _downs = downs, title = title, url = url, _spam = False, author_id = user._id, sr_id = sr._id, lang = sr.lang, ip = '127.0.0.1', multi_sr_id = [sr._id]+[sr._id for sr in srs], selftext = description) l.verdict = 'admin-approved' l.approval_checkmark = _("auto-approved") l._date = datetime(date.year,date.month,date.day,tzinfo=g.tz) l.selftext = description l._commit() #for cross_sr in cross_srs: # LinkSR(l, subreddit_or_create(cross_sr, a), 'crosspost')._commit() l.set_url_cache() vote = None if author == 'AnnalsBot': vote = True queries.queue_vote(user, l, vote, '127.0.0.1') queries.new_savehide(l._save(user)) queries.new_link(l) changed(l) queries.worker.join() end_trial(l, "admin-approved") admintools.unspam(l, user.name) ModAction.create(sr, user, 'approvelink', target=l)