Example #1
0
 def operator(self, sort):
     if sort == 'new':
         return operators.desc('_t1_date')
     elif sort == 'old':
         return operators.asc('_t1_date')
     elif sort == 'top':
         return operators.desc('_t1_score')
Example #2
0
 def operator(self, sort):
     if sort == "new":
         return operators.desc("_t1_date")
     elif sort == "old":
         return operators.asc("_t1_date")
     elif sort == "top":
         return operators.desc("_t1_score")
Example #3
0
def upgrade_messages(update_comments=True, update_messages=True, update_trees=True):
    from r2.lib.db import queries
    from r2.lib import comment_tree, cache
    from r2.models import Account
    from pylons import app_globals as g

    accounts = set()

    def batch_fn(items):
        g.reset_caches()
        return items

    if update_messages or update_trees:
        q = Message._query(Message.c.new == True, sort=desc("_date"), data=True)
        for m in fetch_things2(q, batch_fn=batch_fn):
            print m, m._date
            if update_messages:
                accounts = accounts | queries.set_unread(m, m.new)
            else:
                accounts.add(m.to_id)
    if update_comments:
        q = Comment._query(Comment.c.new == True, sort=desc("_date"))
        q._filter(Comment.c._id < 26152162676)

        for m in fetch_things2(q, batch_fn=batch_fn):
            print m, m._date
            queries.set_unread(m, True)

    print "Precomputing comment trees for %d accounts" % len(accounts)

    for i, a in enumerate(accounts):
        if not isinstance(a, Account):
            a = Account._byID(a)
        print i, a
        comment_tree.user_messages(a)
Example #4
0
    def GET_editreddit(self, location, num, after, reverse, count):
        """Edit reddit form."""
        if isinstance(c.site, FakeSubreddit):
            return self.abort404()

        # moderator is either reddit's moderator or an admin
        is_moderator = c.user_is_loggedin and c.site.is_moderator(c.user) or c.user_is_admin

        if is_moderator and location == 'edit':
            pane = CreateSubreddit(site = c.site, listings = ListingController.listing_names())
        elif location == 'moderators':
            pane = ModList(editable = is_moderator)
        elif is_moderator and location == 'banned':
            pane = BannedList(editable = is_moderator)
        elif location == 'contributors' and c.site.type != 'public':
            pane = ContributorList(editable = is_moderator)
        elif (location == 'stylesheet'
              and c.site.can_change_stylesheet(c.user)
              and not g.css_killswitch):
            if hasattr(c.site,'stylesheet_contents_user') and c.site.stylesheet_contents_user:
                stylesheet_contents = c.site.stylesheet_contents_user
            elif hasattr(c.site,'stylesheet_contents') and c.site.stylesheet_contents:
                stylesheet_contents = c.site.stylesheet_contents
            else:
                stylesheet_contents = ''
            pane = SubredditStylesheet(site = c.site,
                                       stylesheet_contents = stylesheet_contents)
        elif is_moderator and location == 'reports':
            links = Link._query(Link.c.reported != 0,
                                Link.c._spam == False)
            comments = Comment._query(Comment.c.reported != 0,
                                      Comment.c._spam == False)
            query = thing.Merge((links, comments),
                                Link.c.sr_id == c.site._id,
                                sort = desc('_date'),
                                data = True)
            
            builder = QueryBuilder(query, num = num, after = after, 
                                   count = count, reverse = reverse,
                                   wrap = ListingController.builder_wrapper)
            listing = LinkListing(builder)
            pane = listing.listing()

        elif is_moderator and location == 'spam':
            links = Link._query(Link.c._spam == True)
            comments = Comment._query(Comment.c._spam == True)
            query = thing.Merge((links, comments),
                                Link.c.sr_id == c.site._id,
                                sort = desc('_date'),
                                data = True)
            
            builder = QueryBuilder(query, num = num, after = after, 
                                   count = count, reverse = reverse,
                                   wrap = ListingController.builder_wrapper)
            listing = LinkListing(builder)
            pane = listing.listing()
        else:
            return self.abort404()

        return EditReddit(content = pane).render()
Example #5
0
    def gen_keys():
        yield promoted_memo_key

        # just let this one do its own writing
        load_all_reddits()

        yield queries.get_all_comments().iden

        l_q = Link._query(Link.c._spam == (True, False),
                          Link.c._deleted == (True, False),
                          sort=desc('_date'),
                          data=True,
                          )
        for link in fetch_things2(l_q, verbosity):
            yield comments_key(link._id)
            yield last_modified_key(link, 'comments')

        a_q = Account._query(Account.c._spam == (True, False),
                             sort=desc('_date'),
                             )
        for account in fetch_things2(a_q, verbosity):
            yield messages_key(account._id)
            yield last_modified_key(account, 'overview')
            yield last_modified_key(account, 'commented')
            yield last_modified_key(account, 'submitted')
            yield last_modified_key(account, 'liked')
            yield last_modified_key(account, 'disliked')
            yield queries.get_comments(account, 'new', 'all').iden
            yield queries.get_submitted(account, 'new', 'all').iden
            yield queries.get_liked(account).iden
            yield queries.get_disliked(account).iden
            yield queries.get_hidden(account).iden
            yield queries.get_saved(account).iden
            yield queries.get_inbox_messages(account).iden
            yield queries.get_unread_messages(account).iden
            yield queries.get_inbox_comments(account).iden
            yield queries.get_unread_comments(account).iden
            yield queries.get_inbox_selfreply(account).iden
            yield queries.get_unread_selfreply(account).iden
            yield queries.get_sent(account).iden

        sr_q = Subreddit._query(Subreddit.c._spam == (True, False),
                                sort=desc('_date'),
                                )
        for sr in fetch_things2(sr_q, verbosity):
            yield last_modified_key(sr, 'stylesheet_contents')
            yield queries.get_links(sr, 'hot', 'all').iden
            yield queries.get_links(sr, 'new', 'all').iden

            for sort in 'top', 'controversial':
                for time in 'hour', 'day', 'week', 'month', 'year', 'all':
                    yield queries.get_links(sr, sort, time,
                                            merge_batched=False).iden
            yield queries.get_spam_links(sr).iden
            yield queries.get_spam_comments(sr).iden
            yield queries.get_reported_links(sr).iden
            yield queries.get_reported_comments(sr).iden
            yield queries.get_subreddit_messages(sr).iden
            yield queries.get_unread_subreddit_messages(sr).iden
Example #6
0
File: front.py Project: cmak/reddit
 def GET_random(self):
     """The Serendipity button"""
     n = rand.randint(0, 9)
     links = Link._query(*c.site.query_rules())
     links._sort = desc('_date') if n > 5 else desc('_hot')
     links._limit = 50
     links = list(links)
     l = links[rand.randint(0, len(links)-1)]
     l._load()
     return self.redirect(l.url)
Example #7
0
 def operator(self, sort):
     if sort == 'hot':
         return operators.desc('_hot')
     elif sort == 'new':
         return operators.desc('_date')
     elif sort == 'old':
         return operators.asc('_date')
     elif sort == 'top':
         return operators.desc('_score')
     elif sort == 'controversial':
         return operators.desc('_controversy')
Example #8
0
    def special_reddits_cache(cls, user_id, query_param):
        reddits = SRMember._query(SRMember.c._name == query_param,
                                  SRMember.c._thing2_id == user_id,
                                  #hack to prevent the query from
                                  #adding it's own date
                                  sort = (desc('_t1_ups'), desc('_t1_date')),
                                  eager_load = True,
                                  thing_data = True,
                                  limit = 100)

        return [ sr._thing1_id for sr in reddits ]
Example #9
0
 def operator(self, sort):
     if sort == "hot":
         return operators.desc("_hot")
     elif sort == "new":
         return operators.desc("_date")
     elif sort == "old":
         return operators.asc("_date")
     elif sort == "top":
         return operators.desc("_score")
     elif sort == "controversial":
         return operators.desc("_controversy")
Example #10
0
    def gen_keys():
        yield promoted_memo_key

        # just let this one do its own writing
        load_all_reddits()

        yield queries.get_all_comments().iden

        l_q = Link._query(
            Link.c._spam == (True, False), Link.c._deleted == (True, False), sort=desc("_date"), data=True
        )
        for link in fetch_things2(l_q, verbosity):
            yield comments_key(link._id)
            yield last_modified_key(link, "comments")

        a_q = Account._query(Account.c._spam == (True, False), sort=desc("_date"))
        for account in fetch_things2(a_q, verbosity):
            yield messages_key(account._id)
            yield last_modified_key(account, "overview")
            yield last_modified_key(account, "commented")
            yield last_modified_key(account, "submitted")
            yield last_modified_key(account, "liked")
            yield last_modified_key(account, "disliked")
            yield queries.get_comments(account, "new", "all").iden
            yield queries.get_submitted(account, "new", "all").iden
            yield queries.get_liked(account).iden
            yield queries.get_disliked(account).iden
            yield queries.get_hidden(account).iden
            yield queries.get_saved(account).iden
            yield queries.get_inbox_messages(account).iden
            yield queries.get_unread_messages(account).iden
            yield queries.get_inbox_comments(account).iden
            yield queries.get_unread_comments(account).iden
            yield queries.get_inbox_selfreply(account).iden
            yield queries.get_unread_selfreply(account).iden
            yield queries.get_sent(account).iden

        sr_q = Subreddit._query(Subreddit.c._spam == (True, False), sort=desc("_date"))
        for sr in fetch_things2(sr_q, verbosity):
            yield last_modified_key(sr, "stylesheet_contents")
            yield queries.get_links(sr, "hot", "all").iden
            yield queries.get_links(sr, "new", "all").iden

            for sort in "top", "controversial":
                for time in "hour", "day", "week", "month", "year", "all":
                    yield queries.get_links(sr, sort, time, merge_batched=False).iden
            yield queries.get_spam_links(sr).iden
            yield queries.get_spam_comments(sr).iden
            yield queries.get_reported_links(sr).iden
            yield queries.get_reported_comments(sr).iden
            yield queries.get_subreddit_messages(sr).iden
            yield queries.get_unread_subreddit_messages(sr).iden
Example #11
0
 def operator(self, sort):
     if sort == 'hot':
         return operators.desc('_hot')
     elif sort == 'new':
         return operators.desc('_date')
     elif sort == 'old':
         return operators.asc('_date')
     elif sort == 'top':
         return operators.desc('_score')
     elif sort == 'controversial':
         return operators.desc('_controversy')
     elif sort == 'confidence':
         return operators.desc('_confidence')
     elif sort == 'random':
         return operators.shuffled('_confidence')
Example #12
0
def port_cassavotes():
    from r2.models import Vote, Account, Link, Comment
    from r2.models.vote import CassandraVote, CassandraLinkVote, CassandraCommentVote
    from r2.lib.db.tdb_cassandra import CL
    from r2.lib.utils import fetch_things2, to36, progress

    ts = [(Vote.rel(Account, Link), CassandraLinkVote),
          (Vote.rel(Account, Comment), CassandraCommentVote)]

    dataattrs = set(['valid_user', 'valid_thing', 'ip', 'organic'])

    for prel, crel in ts:
        vq = prel._query(sort=desc('_date'),
                         data=True,
                         eager_load=False)
        vq = fetch_things2(vq)
        vq = progress(vq, persec=True)
        for v in vq:
            t1 = to36(v._thing1_id)
            t2 = to36(v._thing2_id)
            cv = crel(thing1_id = t1,
                      thing2_id = t2,
                      date=v._date,
                      name=v._name)
            for dkey, dval in v._t.iteritems():
                if dkey in dataattrs:
                    setattr(cv, dkey, dval)

            cv._commit(write_consistency_level=CL.ONE)
def run(verbose=True, sleep_time = 60, num_items = 1):
    key = "indextank_cursor"
    cursor = g.cache.get(key)
    if cursor is None:
        raise ValueError("%s is not set!" % key)
    cursor = int(cursor)

    while True:
        if verbose:
            print "Looking for %d items with _id < %d" % (num_items, cursor)
        q = Link._query(sort = desc('_id'),
                        limit = num_items)
        q._after(Link._byID(cursor))
        last_date = None
        for item in q:
            cursor = item._id
            last_date = item._date
            amqp.add_item('indextank_changes', item._fullname,
                      message_id = item._fullname,
                      delivery_mode = amqp.DELIVERY_TRANSIENT)
        g.cache.set(key, cursor)

        if verbose:
            if last_date:
                last_date = last_date.strftime("%Y-%m-%d")
            print ("Just enqueued %d items. New cursor=%s (%s). Sleeping %d seconds."
                   % (num_items, cursor, last_date, sleep_time))

        sleep(sleep_time)
Example #14
0
    def get_all_comments(self):
        from r2.lib.db import queries
        from r2.models import Comment
        from r2.controllers.errors import UserRequiredException

        if not c.user_is_loggedin:
            raise UserRequiredException

        friends = self.get_important_friends(c.user._id)

        if not friends:
            return []

        if g.use_query_cache:
            # with the precomputer enabled, this Subreddit only supports
            # being sorted by 'new'. it would be nice to have a
            # cleaner UI than just blatantly ignoring their sort,
            # though
            sort = "new"
            time = "all"

            friends = Account._byID(friends, return_dict=False)

            crs = [queries.get_comments(friend, sort, time) for friend in friends]
            return queries.MergedCachedResults(crs)

        else:
            q = Comment._query(Comment.c.author_id == friends, sort=desc("_date"), data=True)
            return q
Example #15
0
def get_query(after_user_id):
    q = SRMember._query(
        SRMember.c._name == "subscriber",
        SRMember.c._thing2_id < after_user_id,
        sort=desc("_thing2_id"),
    )
    return q
Example #16
0
def rebuild_link_index(start_at=None, sleeptime=1, cls=Link,
                       uploader=LinkUploader, doc_api='CLOUDSEARCH_DOC_API',
                       estimate=50000000, chunk_size=1000):
    doc_api = getattr(g, doc_api)
    uploader = uploader(doc_api)

    q = cls._query(cls.c._deleted == (True, False), sort=desc('_date'))

    if start_at:
        after = cls._by_fullname(start_at)
        assert isinstance(after, cls)
        q._after(after)

    q = r2utils.fetch_things2(q, chunk_size=chunk_size)
    q = r2utils.progress(q, verbosity=1000, estimate=estimate, persec=True,
                         key=_progress_key)
    for chunk in r2utils.in_chunks(q, size=chunk_size):
        uploader.things = chunk
        for x in range(5):
            try:
                uploader.inject()
            except httplib.HTTPException as err:
                print "Got %s, sleeping %s secs" % (err, x)
                time.sleep(x)
                continue
            else:
                break
        else:
            raise err
        last_update = chunk[-1]
        print "last updated %s" % last_update._fullname
        time.sleep(sleeptime)
Example #17
0
    def top_lang_srs(cls, lang, limit, filter_allow_top = False, over18 = True,
                     over18_only = False):
        """Returns the default list of subreddits for a given language, sorted
        by popularity"""
        pop_reddits = Subreddit._query(Subreddit.c.type == ('public',
                                                            'restricted'),
                                       sort=desc('_downs'),
                                       limit = limit,
                                       data = True,
                                       read_cache = True,
                                       write_cache = True,
                                       cache_time = 5 * 60)
        if lang != 'all':
            pop_reddits._filter(Subreddit.c.lang == lang)

        if not over18:
            pop_reddits._filter(Subreddit.c.over_18 == False)
        elif over18_only:
            pop_reddits._filter(Subreddit.c.over_18 == True)

        if filter_allow_top:
            pop_reddits._limit = 2 * limit
            pop_reddits = filter(lambda sr: sr.allow_top == True,
                                 pop_reddits)[:limit]

        # reddits with negative author_id are system reddits and shouldn't be displayed
        return [x for x in pop_reddits
                if getattr(x, "author_id", 0) is None or getattr(x, "author_id", 0) >= 0]
def subreddit_stats(config, ranges):
    def get_id(*args, **kwargs):
        kwargs.setdefault('limit', 1)
        results = list(kind._query(*args, **kwargs))
        if not results:
            return None
        else:
            return results[0]._id

    sr_counts = defaultdict(int)
    for kind in (Link, Comment):
        thing_table, data_table = get_thing_table(kind._type_id)
        first_id = get_id(kind.c._date > ranges['yesterday'][0], sort=asc('_date'))
        last_id = get_id(kind.c._date < ranges['yesterday'][1], sort=desc('_date'))
        if not first_id or not last_id:
            continue

        q = sa.select([data_table.c.value, sa.func.count(data_table.c.value)],
                (data_table.c.thing_id > first_id)
                    & (data_table.c.thing_id < last_id)
                    & (data_table.c.key == 'sr_id')
                    & (thing_table.c.thing_id == data_table.c.thing_id)
                    & (thing_table.c.spam == False),
                group_by=data_table.c.value)

        for sr_id, count in q.execute():
            sr_counts[sr_id] += count

    return {'subreddits_active_yesterday': len(list(count for count in sr_counts.itervalues() if count > 5))}
Example #19
0
File: front.py Project: cmak/reddit
    def GET_editreddit(self, location, num, after, reverse, count):
        """Edit reddit form. """
        if isinstance(c.site, FakeSubreddit):
            return self.abort404()

        # moderator is either reddit's moderator or an admin
        is_moderator = c.user_is_loggedin and c.site.is_moderator(c.user) or c.user_is_admin

        if is_moderator and location == 'edit':
            pane = CreateSubreddit(site = c.site)
        elif location == 'moderators':
            pane = ModList(editable = is_moderator)
        elif is_moderator and location == 'banned':
            pane = BannedList(editable = is_moderator)
        elif location == 'contributors' and c.site.type != 'public':
            pane = ContributorList(editable = is_moderator)
        elif is_moderator and location == 'spam':
            links = Link._query(Link.c._spam == True)
            comments = Comment._query(Comment.c._spam == True)
            query = thing.Merge((links, comments),
                                sort = desc('_date'),
                                data = True,
                                *c.site.query_rules())
            
            builder = QueryBuilder(query, num = num, after = after, 
                                   count = count, reverse = reverse,
                                   wrap = ListingController.builder_wrapper)
            listing = LinkListing(builder)
            pane = listing.listing()
        else:
            return self.abort404()

        return EditReddit(content = pane).render()
Example #20
0
def rebuild_index(start_at=None, sleeptime=1, cls=Link, estimate=50000000,
                  chunk_size=1000):
    if start_at is _REBUILD_INDEX_CACHE_KEY:
        start_at = g.cache.get(start_at)
        if not start_at:
            raise ValueError("Told me to use '%s' key, but it's not set" %
                             _REBUILD_INDEX_CACHE_KEY)
    
    q = cls._query(cls.c._deleted == (True, False),
                   sort=desc('_date'), data=True)
    if start_at:
        after = cls._by_fullname(start_at)
        assert isinstance(after, cls)
        q._after(after)
    q = r2utils.fetch_things2(q, chunk_size=chunk_size)
    q = r2utils.progress(q, verbosity=1000, estimate=estimate, persec=True,
                         key=_progress_key)
    for chunk in r2utils.in_chunks(q, size=chunk_size):
        for x in range(5):
            try:
                inject(chunk)
            except httplib.HTTPException as err:
                print "Got  %s, sleeping %s secs" % (err, x)
                time.sleep(x)
                continue
            else:
                break
        else:
            raise err
        last_update = chunk[-1]
        g.cache.set(_REBUILD_INDEX_CACHE_KEY, last_update._fullname)
        time.sleep(sleeptime)
Example #21
0
def get_hot(sr):
    q = Link._query(Link.c.sr_id == sr._id,
                    sort = desc('_hot'),
                    write_cache = True,
                    limit = 150)

    iden = q._iden()

    read_cache = True
    #if query is in the cache, the expire flag is true, and the access
    #time is old, set read_cache = False
    if cache.get(iden) is not None:
        if cache.get(expire_key(sr)):
            access_time = cache.get(access_key(sr))
            if not access_time or datetime.now() > access_time + expire_delta:
                cache.delete(expire_key(sr))
                read_cache = False
    #if the query isn't in the cache, set read_cache to false so we
    #record the access time
    else:
        read_cache = False

    if not read_cache:
        cache.set(access_key(sr), datetime.now())
    
    q._read_cache = read_cache
    res = list(q)
    
    #set the #1 link so we can ignore it later. expire after TOP_CACHE
    #just in case something happens and that sr doesn't update
    if res:
        cache.set(top_key(sr), res[0]._fullname, TOP_CACHE)

    return res
Example #22
0
 def test_comment_order_invalid_sticky(self):
     self.link.sticky_comment_id = 101
     sort = operators.desc("_confidence")
     builder = CommentBuilder(self.link, sort, num=1500)
     builder._get_comments()
     self.assertEqual(builder.comment_order,
         [100, 101, 102, 104, 105, 106, 103, 107, 108, 110, 109])
Example #23
0
    def top_lang_srs(cls, lang, limit):
        """Returns the default list of subreddits for a given language, sorted
        by popularity"""
        pop_reddits = Subreddit._query(Subreddit.c.type == ('public',
                                                            'restricted'),
                                       sort=desc('_downs'),
                                       limit = limit * 1.5 if limit else None,
                                       data = True,
                                       read_cache = True,
                                       write_cache = True,
                                       cache_time = g.page_cache_time)
        if lang != 'all':
            pop_reddits._filter(Subreddit.c.lang == lang)

        if not c.over18:
            pop_reddits._filter(Subreddit.c.over_18 == False)

        # evaluate the query and remove the ones with
        # allow_top==False.  Note that because this filtering is done
        # after the query is run, if there are a lot of top reddits
        # with allow_top==False, we may return fewer than `limit`
        # results.
        srs = filter(lambda sr: sr.allow_top, pop_reddits)

        return srs[:limit] if limit else srs
Example #24
0
 def by_award(cls, award):
     q = Trophy._query(Trophy.c._thing2_id == award._id,
                       eager_load = True, thing_data = True,
                       data = True,
                       sort = desc('_date'))
     q._limit = 500
     return list(q)
Example #25
0
 def by_account(cls, account):
     q = Trophy._query(Trophy.c._thing1_id == account._id,
                       eager_load = True, thing_data = True,
                       data = True,
                       sort = desc('_date'))
     q._limit = 50
     return list(q)
Example #26
0
 def test_comment_order_permalink_context(self):
     sort = operators.desc("_confidence")
     comment = MagicMock()
     comment._id = 104
     builder = CommentBuilder(
         self.link, sort, comment=comment, context=3, num=1500)
     builder._get_comments()
     self.assertEqual(builder.comment_order, [100, 102, 104])
Example #27
0
 def test_comment_order_invalid_permalink_defocus(self):
     sort = operators.desc("_confidence")
     comment = MagicMock()
     comment._id = 999999
     builder = CommentBuilder(self.link, sort, comment=comment, num=1500)
     builder._get_comments()
     self.assertEqual(builder.comment_order,
         [100, 101, 102, 104, 105, 106, 103, 107, 108, 110, 109])
Example #28
0
def load_all_reddits():
    query_cache = {}

    q = Subreddit._query(Subreddit.c.type == 'public',
                         Subreddit.c._downs > 1,
                         sort = (desc('_downs'), desc('_ups')),
                         data = True)
    for sr in utils.fetch_things2(q):
        name = sr.name.lower()
        for i in xrange(len(name)):
            prefix = name[:i + 1]
            names = query_cache.setdefault(prefix, [])
            if len(names) < 10:
                names.append((sr.name, sr.over_18))

    for name_prefix, subreddits in query_cache.iteritems():
        SubredditsByPartialName._set_values(name_prefix, {'tups': subreddits})
Example #29
0
def backfill_campaign_targets():
    from r2.lib.db.operators import desc
    from r2.lib.utils import fetch_things2

    q = PromoCampaign._query(sort=desc("_date"), data=True)
    for campaign in fetch_things2(q):
        sr_name = campaign.sr_name or Frontpage.name
        campaign.target = Target(sr_name)
        campaign._commit()
Example #30
0
    def get_reported_authors(cls, time = None, sort = None):
        reports = {}
        for t_cls in (Link, Comment, Message):
            q = t_cls._query(t_cls.c._spam == False,
                             t_cls.c.reported > 0,
                             data = True)
            q._sort = desc("_date")
            if time:
                q._filter(time)
            reports.update(Report.reported(things = list(q), amount = 0))

        # at this point, we have a full list of reports made on the interval specified
        # build up an author to report list
        authors = Account._byID([k[1].author_id 
                                 for k, v in reports.iteritems()],
                                data = True) if reports else []

        # and build up a report on each author
        author_rep = {}
        for (tattler, thing, amount), r in reports.iteritems():
            aid = thing.author_id
            if not author_rep.get(aid):
                author_rep[aid] = Storage(author = authors[aid])
                author_rep[aid].num_reports = 1
                author_rep[aid].acct_correct = tattler.report_correct
                author_rep[aid].acct_wrong = tattler.report_ignored
                author_rep[aid].most_recent = r._date
                author_rep[aid].reporters = set([tattler])
            else:
                author_rep[aid].num_reports += 1
                author_rep[aid].acct_correct += tattler.report_correct
                author_rep[aid].acct_wrong += tattler.report_ignored
                if author_rep[aid].most_recent < r._date:
                    author_rep[aid].most_recent = r._date
                author_rep[aid].reporters.add(tattler)
                
        authors = author_rep.values()
        if sort == "hot":
            def report_hotness(a):
                return a.acct_correct / max(a.acct_wrong + a.acct_correct,1)
            def better_reporter(a, b):
                q = report_hotness(b) - report_hotness(a)
                if q == 0:
                    return b.acct_correct - a.acct_correct
                else:
                    return 1 if q > 0 else -1
            authors.sort(better_reporter)
        if sort == "top":
            authors.sort(lambda x, y: y.num_reports - x.num_reports)
        elif sort == "new":
            def newer_reporter(a, b):
                t = b.most_recent - a.most_recent
                t0 = datetime.timedelta(0)
                return 1 if t > t0 else -1 if t < t0 else 0
            authors.sort(newer_reporter)
        return authors
Example #31
0
    def gen_keys():
        yield promoted_memo_key

        # just let this one do its own writing
        load_all_reddits()

        yield queries.get_all_comments().iden

        l_q = Link._query(
            Link.c._spam == (True, False),
            Link.c._deleted == (True, False),
            sort=desc('_date'),
            data=True,
        )
        for link in fetch_things2(l_q, verbosity):
            yield comments_key(link._id)
            yield last_modified_key(link, 'comments')

        a_q = Account._query(
            Account.c._spam == (True, False),
            sort=desc('_date'),
        )
        for account in fetch_things2(a_q, verbosity):
            yield messages_key(account._id)
            yield last_modified_key(account, 'overview')
            yield last_modified_key(account, 'commented')
            yield last_modified_key(account, 'submitted')
            yield last_modified_key(account, 'liked')
            yield last_modified_key(account, 'disliked')
            yield queries.get_comments(account, 'new', 'all').iden
            yield queries.get_submitted(account, 'new', 'all').iden
            yield queries.get_liked(account).iden
            yield queries.get_disliked(account).iden
            yield queries.get_hidden(account).iden
            yield queries.get_saved(account).iden
            yield queries.get_inbox_messages(account).iden
            yield queries.get_unread_messages(account).iden
            yield queries.get_inbox_comments(account).iden
            yield queries.get_unread_comments(account).iden
            yield queries.get_inbox_selfreply(account).iden
            yield queries.get_unread_selfreply(account).iden
            yield queries.get_sent(account).iden

        sr_q = Subreddit._query(
            Subreddit.c._spam == (True, False),
            sort=desc('_date'),
        )
        for sr in fetch_things2(sr_q, verbosity):
            yield last_modified_key(sr, 'stylesheet_contents')
            yield queries.get_links(sr, 'hot', 'all').iden
            yield queries.get_links(sr, 'new', 'all').iden

            for sort in 'top', 'controversial':
                for time in 'hour', 'day', 'week', 'month', 'year', 'all':
                    yield queries.get_links(sr,
                                            sort,
                                            time,
                                            merge_batched=False).iden
            yield queries.get_spam_links(sr).iden
            yield queries.get_spam_comments(sr).iden
            yield queries.get_reported_links(sr).iden
            yield queries.get_reported_comments(sr).iden
            yield queries.get_subreddit_messages(sr).iden
            yield queries.get_unread_subreddit_messages(sr).iden
Example #32
0
 def by_sr_cache(cls, sr_id):
     q = AdSR._query(AdSR.c._thing2_id == sr_id,
                     sort = desc('_date'))
     q._limit = 500
     return [ t._id for t in q ]
Example #33
0
 def by_award_cache(cls, award_id):
     q = Trophy._query(Trophy.c._thing2_id == award_id, sort=desc('_date'))
     q._limit = 50
     return [t._id for t in q]
Example #34
0
 def by_account_cache(cls, account_id):
     q = Trophy._query(Trophy.c._thing1_id == account_id,
                       sort=desc('_date'))
     q._limit = 500
     return [t._id for t in q]
Example #35
0
#
# All portions of the code written by reddit are Copyright (c) 2006-2013 reddit
# Inc. All Rights Reserved.
###############################################################################

import urllib2

from pylons import g

from r2.lib.db.operators import desc
from r2.lib.utils import fetch_things2
from r2.lib.media import upload_media
from r2.models.subreddit import Subreddit
from r2.models.wiki import WikiPage, ImagesByWikiPage

all_subreddits = Subreddit._query(sort=desc("_date"))
for sr in fetch_things2(all_subreddits):
    images = sr.images.copy()
    images.pop("/empties/", None)

    if not images:
        continue

    print 'Processing /r/%s (id36: %s)' % (sr.name, sr._id36)

    # upgrade old-style image ids to urls
    for name, image_url in images.items():
        if not isinstance(image_url, int):
            continue

        print "  upgrading image %r" % image_url
Example #36
0
class SortMenu(NavMenu):
    name = 'sort'
    hidden_options = []
    button_cls = QueryButton

    # these are _ prefixed to avoid colliding with NavMenu attributes
    _default = 'hot'
    _options = ('hot', 'new', 'top', 'old', 'controversial')
    _type = 'lightdrop'
    _title = N_("sorted by")

    def __init__(self,
                 default=None,
                 title='',
                 base_path='',
                 separator='|',
                 _id='',
                 css_class=''):
        options = self.make_buttons()
        default = default or self._default
        base_path = base_path or request.path
        title = title or _(self._title)
        NavMenu.__init__(self,
                         options,
                         default=default,
                         title=title,
                         type=self._type,
                         base_path=base_path,
                         separator=separator,
                         _id=_id,
                         css_class=css_class)

    def make_buttons(self):
        buttons = []
        for name in self._options:
            css_class = 'hidden' if name in self.hidden_options else ''
            button = self.button_cls(self.make_title(name),
                                     name,
                                     self.name,
                                     css_class=css_class)
            buttons.append(button)
        return buttons

    def make_title(self, attr):
        return menu[attr]

    _mapping = {
        "hot": operators.desc('_hot'),
        "new": operators.desc('_date'),
        "old": operators.asc('_date'),
        "top": operators.desc('_score'),
        "controversial": operators.desc('_controversy'),
        "confidence": operators.desc('_confidence'),
        "random": operators.shuffled('_confidence'),
        "qa": operators.desc('_qa'),
    }
    _reverse_mapping = {v: k for k, v in _mapping.iteritems()}

    @classmethod
    def operator(cls, sort):
        return cls._mapping.get(sort)

    @classmethod
    def sort(cls, operator):
        return cls._reverse_mapping.get(operator)
Example #37
0
def get_all_comments():
    """the master /comments page"""
    q = Comment._query(sort=desc('_date'))
    return make_results(q)
Example #38
0
 def operator(self, sort):
     if sort == 'new':
         return operators.desc('_date')
Example #39
0
def get_sent(user_id):
    return Message._query(Message.c.author_id == user_id,
                          Message.c._spam == (True, False),
                          sort=desc('_date'))
Example #40
0
def _get_sr_comments(sr_id):
    """the subreddit /r/foo/comments page"""
    q = Comment._query(Comment.c.sr_id == sr_id, sort=desc('_date'))
    return make_results(q)
Example #41
0
    def GET_editreddit(self, location, num, after, reverse, count):
        """Edit reddit form."""
        if isinstance(c.site, FakeSubreddit):
            return self.abort404()

        # moderator is either reddit's moderator or an admin
        is_moderator = c.user_is_loggedin and c.site.is_moderator(
            c.user) or c.user_is_admin

        if is_moderator and location == 'edit':
            pane = CreateSubreddit(site=c.site,
                                   listings=ListingController.listing_names())
        elif location == 'moderators':
            pane = ModList(editable=is_moderator)
        elif is_moderator and location == 'banned':
            pane = BannedList(editable=is_moderator)
        elif location == 'contributors' and c.site.type != 'public':
            pane = ContributorList(editable=is_moderator)
        elif (location == 'stylesheet' and c.site.can_change_stylesheet(c.user)
              and not g.css_killswitch):
            if hasattr(c.site, 'stylesheet_contents_user'
                       ) and c.site.stylesheet_contents_user:
                stylesheet_contents = c.site.stylesheet_contents_user
            elif hasattr(c.site,
                         'stylesheet_contents') and c.site.stylesheet_contents:
                stylesheet_contents = c.site.stylesheet_contents
            else:
                stylesheet_contents = ''
            pane = SubredditStylesheet(site=c.site,
                                       stylesheet_contents=stylesheet_contents)
        elif is_moderator and location == 'reports':
            links = Link._query(Link.c.reported != 0, Link.c._spam == False)
            comments = Comment._query(Comment.c.reported != 0,
                                      Comment.c._spam == False)
            query = thing.Merge((links, comments),
                                Link.c.sr_id == c.site._id,
                                sort=desc('_date'),
                                data=True)

            builder = QueryBuilder(query,
                                   num=num,
                                   after=after,
                                   count=count,
                                   reverse=reverse,
                                   wrap=ListingController.builder_wrapper)
            listing = LinkListing(builder)
            pane = listing.listing()

        elif is_moderator and location == 'spam':
            links = Link._query(Link.c._spam == True)
            comments = Comment._query(Comment.c._spam == True)
            query = thing.Merge((links, comments),
                                Link.c.sr_id == c.site._id,
                                sort=desc('_date'),
                                data=True)

            builder = QueryBuilder(query,
                                   num=num,
                                   after=after,
                                   count=count,
                                   reverse=reverse,
                                   wrap=ListingController.builder_wrapper)
            listing = LinkListing(builder)
            pane = listing.listing()
        else:
            return self.abort404()

        return EditReddit(content=pane).render()
Example #42
0
 def by_ad_cache(cls, ad_id):
     q = AdSR._query(AdSR.c._thing1_id == ad_id,
                     sort = desc('_date'))
     q._limit = 500
     return [ t._id for t in q ]
Example #43
0
 def prev_in_promoted(self):
     q = self._link_nav_query(Link.c.blessed == True,
                              operators.desc('_date'))
     return self._link_for_query(q)
Example #44
0
 def prev_link(self):
     q = self._link_nav_query(sort=operators.desc('_date'))
     return self._link_for_query(q)
Example #45
0
def get_sent(user):
    q = Message._query(Message.c.author_id == user._id,
                       Message.c._spam == (True, False),
                       sort=desc('_date'))
    return make_results(q)
Example #46
0
 def prev_by_author(self):
     q = self._link_nav_query(Link.c.author_id == self.author_id,
                              operators.desc('_date'))
     return self._link_for_query(q)
Example #47
0
 def _all_global_bans_cache(cls):
     # g.log.warning("!!! dbg: _all_global_bans_cache was flushed, running a db query")
     return [ a._id for a in GlobalBan._query(sort=desc('_date'), limit=5000) ]
Example #48
0
 def prev_by_tag(self, tag):
     return self._next_link_for_tag(tag, operators.desc('_t1_date'))
Example #49
0
 def prev_in_top(self):
     q = self._link_nav_query(Link.c.top_link == True,
                              operators.desc('_date'))
     return self._link_for_query(q)
Example #50
0
def get_sr_counts():
    srs = utils.fetch_things2(Subreddit._query(sort=desc("_date")))

    return dict((sr._fullname, sr._ups) for sr in srs)
Example #51
0
 def _all_ads_cache(cls):
     return [ a._id for a in Ad._query(sort=desc('_date'), limit=1000) ]