def new_vote(vote, foreground=False, timer=None): user = vote._thing1 item = vote._thing2 if timer is None: timer = SimpleSillyStub() if not isinstance(item, (Link, Comment)): return if vote.valid_thing and not item._spam and not item._deleted: sr = item.subreddit_slow results = [] author = Account._byID(item.author_id) for sort in ('hot', 'top', 'controversial', 'new'): if isinstance(item, Link): results.append(get_submitted(author, sort, 'all')) if isinstance(item, Comment): results.append(get_comments(author, sort, 'all')) if isinstance(item, Link): # don't do 'new', because that was done by new_link, and # the time-filtered versions of top/controversial will be # done by mr_top results.extend([ get_links(sr, 'hot', 'all'), get_links(sr, 'top', 'all'), get_links(sr, 'controversial', 'all'), ]) for domain in utils.UrlParser(item.url).domain_permutations(): for sort in ("hot", "top", "controversial"): results.append(get_domain_links(domain, sort, "all")) add_queries(results, insert_items=item, foreground=foreground) timer.intermediate("permacache") if isinstance(item, Link): # must update both because we don't know if it's a changed # vote with CachedQueryMutator() as m: if vote._name == '1': m.insert(get_liked(user), [vote]) m.delete(get_disliked(user), [vote]) elif vote._name == '-1': m.delete(get_liked(user), [vote]) m.insert(get_disliked(user), [vote]) else: m.delete(get_liked(user), [vote]) m.delete(get_disliked(user), [vote])
def new_vote(vote, foreground=False, timer=None): user = vote._thing1 item = vote._thing2 if timer is None: timer = SimpleSillyStub() if not isinstance(item, (Link, Comment)): return if vote.valid_thing and not item._spam and not item._deleted: sr = item.subreddit_slow results = [] author = Account._byID(item.author_id) for sort in ('hot', 'top', 'controversial', 'new'): if isinstance(item, Link): results.append(get_submitted(author, sort, 'all')) if isinstance(item, Comment): results.append(get_comments(author, sort, 'all')) if isinstance(item, Link): # don't do 'new', because that was done by new_link, and # the time-filtered versions of top/controversial will be # done by mr_top results.extend([get_links(sr, 'hot', 'all'), get_links(sr, 'top', 'all'), get_links(sr, 'controversial', 'all'), ]) for domain in utils.UrlParser(item.url).domain_permutations(): for sort in ("hot", "top", "controversial"): results.append(get_domain_links(domain, sort, "all")) add_queries(results, insert_items = item, foreground=foreground) timer.intermediate("permacache") if isinstance(item, Link): # must update both because we don't know if it's a changed # vote with CachedQueryMutator() as m: if vote._name == '1': m.insert(get_liked(user), [vote]) m.delete(get_disliked(user), [vote]) elif vote._name == '-1': m.delete(get_liked(user), [vote]) m.insert(get_disliked(user), [vote]) else: m.delete(get_liked(user), [vote]) m.delete(get_disliked(user), [vote])
def get_comment_tree(link, _update=False, timer=None): if timer is None: timer = SimpleSillyStub() cache = CommentTree.by_link(link) timer.intermediate('load') if cache and not _update: return cache with CommentTree.mutation_context(link, timeout=180): timer.intermediate('lock') cache = CommentTree.rebuild(link) timer.intermediate('rebuild') # the tree rebuild updated the link's comment count, so schedule it for # search reindexing link.update_search_index() timer.intermediate('update_search_index') return cache
def by_link(cls, link, timer=None): if timer is None: timer = SimpleSillyStub() impl = cls.IMPLEMENTATIONS[link.comment_tree_version] data = impl.by_link(link, timer) return cls(link, **data)
def pre(self): action = request.environ["pylons.routes_dict"].get("action") if action: c.request_timer = g.stats.get_timer(request_timer_name(action)) else: c.request_timer = SimpleSillyStub() c.response_wrappers = [] c.start_time = datetime.now(g.tz) c.request_timer.start() g.reset_caches() c.domain_prefix = request.environ.get("reddit-domain-prefix", g.domain_prefix) c.secure = request.host in g.secure_domains #check if user-agent needs a dose of rate-limiting if not c.error_page: ratelimit_throttled() ratelimit_agents() c.allow_loggedin_cache = False c.show_wiki_actions = False # the domain has to be set before Cookies get initialized set_subreddit() c.errors = ErrorSet() c.cookies = Cookies() # if an rss feed, this will also log the user in if a feed= # GET param is included set_content_type() c.request_timer.intermediate("minimal-pre")
def by_link(cls, link, timer=None): if timer is None: timer = SimpleSillyStub() pieces = CommentTreePermacache.get_tree_pieces(link, timer) cids, tree, depth, parents, num_children = pieces comment_tree = cls(link, cids, tree, depth, parents, num_children) return comment_tree
def by_link(cls, link, timer=None): if timer is None: timer = SimpleSillyStub() impl = cls.IMPLEMENTATIONS[link.comment_tree_version] cids, tree, depth, parents = impl.get_tree_pieces(link, timer) comment_tree = cls(link, cids, tree, depth, parents) return comment_tree
def handle_vote(user, thing, dir, ip, organic, cheater=False, foreground=False, timer=None): if timer is None: timer = SimpleSillyStub() from r2.lib.db import tdb_sql from sqlalchemy.exc import IntegrityError try: v = Vote.vote(user, thing, dir, ip, organic, cheater = cheater, timer=timer) except (tdb_sql.CreationError, IntegrityError): g.log.error("duplicate vote for: %s" % str((user, thing, dir))) return timestamps = [] if isinstance(thing, Link): new_vote(v, foreground=foreground, timer=timer) #update the modified flags if user._id == thing.author_id: timestamps.append('Overview') timestamps.append('Submitted') #update sup listings sup.add_update(user, 'submitted') #update sup listings if dir: sup.add_update(user, 'liked') elif dir is False: sup.add_update(user, 'disliked') elif isinstance(thing, Comment): #update last modified if user._id == thing.author_id: timestamps.append('Overview') timestamps.append('Commented') #update sup listings sup.add_update(user, 'commented') timer.intermediate("sup") for timestamp in timestamps: set_last_modified(user, timestamp.lower()) LastModified.touch(user._fullname, timestamps) timer.intermediate("last_modified")
def get_comment_tree(link, _update=False, timer=None): if timer is None: timer = SimpleSillyStub() cache = CommentTree.by_link(link) timer.intermediate("load") if cache and not _update: return cache with CommentTree.mutation_context(link, timeout=180): timer.intermediate("lock") cache = CommentTree.rebuild(link) timer.intermediate("rebuild") # the tree rebuild updated the link's comment count, so schedule it for # search reindexing from r2.lib.db.queries import changed changed([link]) timer.intermediate("changed") return cache
def pre(self): action = request.environ["pylons.routes_dict"].get("action") if action: if not self._get_action_handler(): action = 'invalid' c.request_timer = g.stats.get_timer(request_timer_name(action)) else: c.request_timer = SimpleSillyStub() c.response_wrapper = None c.start_time = datetime.now(g.tz) c.request_timer.start() g.reset_caches() c.domain_prefix = request.environ.get("reddit-domain-prefix", g.domain_prefix) c.secure = request.host in g.secure_domains # wsgi.url_scheme is used in generating absolute urls, such as by webob # for translating some of our relative-url redirects to rfc compliant # absolute-url ones. TODO: consider using one of webob's methods of # setting wsgi.url_scheme based on incoming request headers added by # upstream things like stunnel/haproxy. if c.secure: request.environ["wsgi.url_scheme"] = "https" c.request_origin = request.host_url #check if user-agent needs a dose of rate-limiting if not c.error_page: ratelimit_throttled() ratelimit_agents() c.allow_loggedin_cache = False # the domain has to be set before Cookies get initialized set_subreddit() c.errors = ErrorSet() c.cookies = Cookies() # if an rss feed, this will also log the user in if a feed= # GET param is included set_content_type() c.request_timer.intermediate("minimal-pre") # True/False forces. None updates for most non-POST requests c.update_last_visit = None g.stats.count_string('user_agents', request.user_agent) hooks.get_hook("reddit.request.minimal_begin").call()
def pre(self): action = request.environ["pylons.routes_dict"].get("action") if action: if not self._get_action_handler(): action = 'invalid' c.request_timer = g.stats.get_timer(request_timer_name(action)) else: c.request_timer = SimpleSillyStub() c.response_wrapper = None c.start_time = datetime.now(g.tz) c.request_timer.start() g.reset_caches() c.domain_prefix = request.environ.get("reddit-domain-prefix", g.domain_prefix) c.secure = request.environ["wsgi.url_scheme"] == "https" c.request_origin = request.host_url #check if user-agent needs a dose of rate-limiting if not c.error_page: ratelimit_throttled() ratelimit_agents() c.allow_loggedin_cache = False c.allow_framing = False c.cdn_cacheable = (request.via_cdn and g.login_cookie not in request.cookies) # the domain has to be set before Cookies get initialized set_subreddit() c.errors = ErrorSet() c.cookies = Cookies() # if an rss feed, this will also log the user in if a feed= # GET param is included set_content_type() c.request_timer.intermediate("minimal-pre") # True/False forces. None updates for most non-POST requests c.update_last_visit = None g.stats.count_string('user_agents', request.user_agent) if not self.defer_ratelimiting: self.run_sitewide_ratelimits() c.request_timer.intermediate("minimal-ratelimits") hooks.get_hook("reddit.request.minimal_begin").call()
def handle_vote(user, thing, dir, ip, organic, cheater=False, foreground=False, timer=None): if timer is None: timer = SimpleSillyStub() from r2.lib.db import tdb_sql from sqlalchemy.exc import IntegrityError try: v = Vote.vote(user, thing, dir, ip, organic, cheater=cheater, timer=timer) except (tdb_sql.CreationError, IntegrityError): g.log.error("duplicate vote for: %s" % str((user, thing, dir))) return timestamps = [] if isinstance(thing, Link): new_vote(v, foreground=foreground, timer=timer) #update the modified flags if user._id == thing.author_id: timestamps.append('Overview') timestamps.append('Submitted') #update sup listings sup.add_update(user, 'submitted') #update sup listings if dir: sup.add_update(user, 'liked') elif dir is False: sup.add_update(user, 'disliked') elif isinstance(thing, Comment): #update last modified if user._id == thing.author_id: timestamps.append('Overview') timestamps.append('Commented') #update sup listings sup.add_update(user, 'commented') timer.intermediate("sup") for timestamp in timestamps: set_last_modified(user, timestamp.lower()) LastModified.touch(user._fullname, timestamps) timer.intermediate("last_modified")
def pre(self): BaseController.pre(self) action = request.environ["pylons.routes_dict"].get("action") if action: if not self._get_action_handler(): action = 'invalid' controller = request.environ["pylons.routes_dict"]["controller"] timer_name = "service_time.web.{}.{}".format(controller, action) c.request_timer = g.stats.get_timer(timer_name) else: c.request_timer = SimpleSillyStub() c.request_timer.start() if "Origin" in request.headers: oauth_origin = "https://%s" % g.oauth_domain response.headers["Access-Control-Allow-Origin"] = oauth_origin response.headers["Vary"] = "Origin" response.headers["Access-Control-Allow-Methods"] = "GET" response.headers["Access-Control-Allow-Credentials"] = "true"
def get_comment_tree(link, timer=None): if timer is None: timer = SimpleSillyStub() cache = CommentTree.by_link(link, timer) return cache
def vote(cls, sub, obj, dir, ip, organic = False, cheater = False, timer=None): from admintools import valid_user, valid_thing, update_score from r2.lib.count import incr_sr_count from r2.lib.db import queries if timer is None: timer = SimpleSillyStub() sr = obj.subreddit_slow kind = obj.__class__.__name__.lower() karma = sub.karma(kind, sr) is_self_link = (kind == 'link' and getattr(obj,'is_self',False)) #check for old vote rel = cls.rel(sub, obj) oldvote = rel._fast_query(sub, obj, ['-1', '0', '1']).values() oldvote = filter(None, oldvote) timer.intermediate("pg_read_vote") amount = 1 if dir is True else 0 if dir is None else -1 is_new = False #old vote if len(oldvote): v = oldvote[0] oldamount = int(v._name) v._name = str(amount) #these still need to be recalculated old_valid_thing = getattr(v, 'valid_thing', False) v.valid_thing = (valid_thing(v, karma, cheater = cheater) and getattr(v,'valid_thing', False)) v.valid_user = (getattr(v, 'valid_user', False) and v.valid_thing and valid_user(v, sr, karma)) #new vote else: is_new = True oldamount = 0 v = rel(sub, obj, str(amount)) v.ip = ip old_valid_thing = v.valid_thing = valid_thing(v, karma, cheater = cheater) v.valid_user = (v.valid_thing and valid_user(v, sr, karma) and not is_self_link) if organic: v.organic = organic v._commit() timer.intermediate("pg_write_vote") up_change, down_change = score_changes(amount, oldamount) if not (is_new and obj.author_id == sub._id and amount == 1): # we don't do this if it's the author's initial automatic # vote, because we checked it in with _ups == 1 update_score(obj, up_change, down_change, v, old_valid_thing) timer.intermediate("pg_update_score") if v.valid_user: author = Account._byID(obj.author_id, data=True) author.incr_karma(kind, sr, up_change - down_change) timer.intermediate("pg_incr_karma") #update the sr's valid vote count if is_new and v.valid_thing and kind == 'link': if sub._id != obj.author_id: incr_sr_count(sr) timer.intermediate("incr_sr_counts") # now write it out to Cassandra. We'll write it out to both # this way for a while VotesByAccount.copy_from(v) timer.intermediate("cassavotes") queries.changed(v._thing2, True) timer.intermediate("changed") return v
def vote(cls, sub, obj, dir, ip, organic=False, cheater=False, timer=None): from admintools import valid_user, valid_thing, update_score from r2.lib.count import incr_sr_count from r2.lib.db import queries if timer is None: timer = SimpleSillyStub() sr = obj.subreddit_slow kind = obj.__class__.__name__.lower() karma = sub.karma(kind, sr) is_self_link = (kind == 'link' and getattr(obj, 'is_self', False)) #check for old vote rel = cls.rel(sub, obj) oldvote = rel._fast_query(sub, obj, ['-1', '0', '1']).values() oldvote = filter(None, oldvote) timer.intermediate("pg_read_vote") amount = 1 if dir is True else 0 if dir is None else -1 is_new = False #old vote if len(oldvote): v = oldvote[0] oldamount = int(v._name) v._name = str(amount) #these still need to be recalculated old_valid_thing = getattr(v, 'valid_thing', False) v.valid_thing = (valid_thing(v, karma, cheater=cheater) and getattr(v, 'valid_thing', False)) v.valid_user = (getattr(v, 'valid_user', False) and v.valid_thing and valid_user(v, sr, karma)) #new vote else: is_new = True oldamount = 0 v = rel(sub, obj, str(amount)) v.ip = ip old_valid_thing = v.valid_thing = valid_thing(v, karma, cheater=cheater) v.valid_user = (v.valid_thing and valid_user(v, sr, karma) and not is_self_link) if organic: v.organic = organic v._commit() timer.intermediate("pg_write_vote") up_change, down_change = score_changes(amount, oldamount) if not (is_new and obj.author_id == sub._id and amount == 1): # we don't do this if it's the author's initial automatic # vote, because we checked it in with _ups == 1 update_score(obj, up_change, down_change, v, old_valid_thing) timer.intermediate("pg_update_score") if v.valid_user: author = Account._byID(obj.author_id, data=True) author.incr_karma(kind, sr, up_change - down_change) timer.intermediate("pg_incr_karma") #update the sr's valid vote count if is_new and v.valid_thing and kind == 'link': if sub._id != obj.author_id: incr_sr_count(sr) timer.intermediate("incr_sr_counts") # now write it out to Cassandra. We'll write it out to both # this way for a while CassandraVote._copy_from(v) VotesByAccount.copy_from(v) timer.intermediate("cassavotes") queries.changed(v._thing2, True) timer.intermediate("changed") return v