Example #1
0
    def query(self):
        #no need to worry when working from the cache
        if g.use_query_cache or isinstance(c.site, DefaultSR):
            self.fix_listing = False

        if isinstance(c.site, DefaultSR):
            if c.user_is_loggedin:
                srlimit = Subreddit.DEFAULT_LIMIT
                over18 = c.user.has_subscribed and c.over18
            else:
                srlimit = g.num_default_reddits
                over18 = False

            sr_ids = Subreddit.user_subreddits(c.user,
                                               limit=srlimit,
                                               over18=over18)
            return normalized_hot(sr_ids)

        elif isinstance(c.site, MultiReddit):
            return normalized_hot(c.site.kept_sr_ids, obey_age_limit=False)

        #if not using the query_cache we still want cached front pages
        elif (not g.use_query_cache and not isinstance(c.site, FakeSubreddit)
              and self.after is None and self.count == 0):
            return get_hot([c.site])
        else:
            return c.site.get_links('hot', 'all')
Example #2
0
    def query(self):
        #no need to worry when working from the cache
        if g.use_query_cache or isinstance(c.site, DefaultSR):
            self.fix_listing = False

        if isinstance(c.site, DefaultSR):
            if c.user_is_loggedin:
                srlimit = Subreddit.DEFAULT_LIMIT
                over18 = c.user.has_subscribed and c.over18
            else:
                srlimit = g.num_default_reddits
                over18 = False

            sr_ids = Subreddit.user_subreddits(c.user,
                                               limit=srlimit,
                                               over18=over18)
            return normalized_hot(sr_ids)

        elif isinstance(c.site, MultiReddit):
            return normalized_hot(c.site.kept_sr_ids, obey_age_limit=False)

        #if not using the query_cache we still want cached front pages
        elif (not g.use_query_cache
              and not isinstance(c.site, FakeSubreddit)
              and self.after is None
              and self.count == 0):
            return get_hot([c.site])
        else:
            return c.site.get_links('hot', 'all')
Example #3
0
    def query(self):
        if isinstance(c.site, DefaultSR):
            if c.user_is_loggedin:
                srlimit = Subreddit.DEFAULT_LIMIT
                over18 = c.user.has_subscribed and c.over18
            else:
                srlimit = g.num_default_reddits
                over18 = False

            sr_ids = Subreddit.user_subreddits(c.user,
                                               limit=srlimit,
                                               over18=over18)
            return normalized_hot(sr_ids)

        elif isinstance(c.site, MultiReddit):
            return normalized_hot(c.site.kept_sr_ids, obey_age_limit=False)
        else:
            if c.site.sticky_fullname:
                link_list = [c.site.sticky_fullname]
                wrapped = wrap_links(link_list,
                                     wrapper=self.builder_wrapper,
                                     keep_fn=self.keep_fn(),
                                     skip=True)
                # add all other items and decrement count if sticky is visible
                if wrapped.things:
                    link_list += [l for l in c.site.get_links('hot', 'all')
                                    if l != c.site.sticky_fullname]
                    if not self.after:
                        self.count -= 1
                        self.num += 1
                    return link_list
            
            # no sticky or sticky hidden
            return c.site.get_links('hot', 'all')
Example #4
0
def find_preview_links(sr):
    from r2.lib.normalized_hot import normalized_hot

    # try to find a link to use, otherwise give up and return
    links = normalized_hot([sr._id])
    if not links:
        links = normalized_hot(Subreddit.default_subreddits())

    if links:
        links = links[:25]
        links = Link._by_fullname(links, data=True, return_dict=False)

    return links
Example #5
0
def find_preview_links(sr):
    from r2.lib.normalized_hot import normalized_hot

    # try to find a link to use, otherwise give up and return
    links = normalized_hot([sr._id])
    if not links:
        links = normalized_hot(Subreddit.default_subreddits())

    if links:
        links = links[:25]
        links = Link._by_fullname(links, data=True, return_dict=False)

    return links
Example #6
0
 def query(self):
     user = c.user if c.user_is_loggedin else None
     srs = Subreddit._byID(Subreddit.user_subreddits(user),
                           data = True,
                           return_dict = False)
     links = normalized_hot(srs)
     return links
Example #7
0
def get_hot_items(srs, item_type, src):
    """Get hot links from specified srs."""
    hot_srs = {sr._id: sr for sr in srs}  # for looking up sr by id
    hot_link_fullnames = normalized_hot([sr._id for sr in srs])
    hot_links = Link._by_fullname(hot_link_fullnames, return_dict=False)
    hot_items = []
    for l in hot_links:
        hot_items.append(ExploreItem(item_type, src, hot_srs[l.sr_id], l))
    return hot_items
Example #8
0
def get_hot_items(srs, item_type, src):
    """Get hot links from specified srs."""
    hot_srs = {sr._id: sr for sr in srs}  # for looking up sr by id
    hot_link_fullnames = normalized_hot(sr._id for sr in srs)
    hot_links = Link._by_fullname(hot_link_fullnames, return_dict=False)
    hot_items = []
    for l in hot_links:
        hot_items.append(ExploreItem(item_type, src, hot_srs[l.sr_id], l))
    return hot_items
Example #9
0
    def query(self):
        # no need to worry when working from the cache
        # TODO: just remove this then since we're always using the query cache
        self.fix_listing = False

        if isinstance(c.site, DefaultSR):
            if c.user_is_loggedin:
                srlimit = Subreddit.DEFAULT_LIMIT
                over18 = c.user.has_subscribed and c.over18
            else:
                srlimit = g.num_default_reddits
                over18 = False

            sr_ids = Subreddit.user_subreddits(c.user, limit=srlimit, over18=over18)
            return normalized_hot(sr_ids)

        elif isinstance(c.site, MultiReddit):
            return normalized_hot(c.site.kept_sr_ids, obey_age_limit=False)
        else:
            return c.site.get_links("hot", "all")
    def query(self):

        if isinstance(c.site, DefaultSR):
            sr_ids = Subreddit.user_subreddits(c.user)
            return normalized_hot(sr_ids)
        elif isinstance(c.site, MultiReddit):
            return normalized_hot(c.site.kept_sr_ids, obey_age_limit=False)
        else:
            if c.site.sticky_fullname:
                link_list = [c.site.sticky_fullname]
                wrapped = wrap_links(link_list, wrapper=self.builder_wrapper, keep_fn=self.keep_fn(), skip=True)
                # add all other items and decrement count if sticky is visible
                if wrapped.things:
                    link_list += [l for l in c.site.get_links("hot", "all") if l != c.site.sticky_fullname]
                    if not self.after:
                        self.count -= 1
                        self.num += 1
                    return link_list

            # no sticky or sticky hidden
            return c.site.get_links("hot", "all")
Example #11
0
    def query(self):
        #no need to worry when working from the cache
        # TODO: just remove this then since we're always using the query cache
        self.fix_listing = False

        if isinstance(c.site, DefaultSR):
            if c.user_is_loggedin:
                srlimit = Subreddit.DEFAULT_LIMIT
                over18 = c.user.has_subscribed and c.over18
            else:
                srlimit = g.num_default_reddits
                over18 = False

            sr_ids = Subreddit.user_subreddits(c.user,
                                               limit=srlimit,
                                               over18=over18)
            return normalized_hot(sr_ids)

        elif isinstance(c.site, MultiReddit):
            return normalized_hot(c.site.kept_sr_ids, obey_age_limit=False)
        else:
            if c.site.sticky_fullname:
                link_list = [c.site.sticky_fullname]
                wrapped = wrap_links(link_list,
                                     wrapper=self.builder_wrapper,
                                     keep_fn=self.keep_fn(),
                                     skip=True)
                # add all other items and decrement count if sticky is visible
                if wrapped.things:
                    link_list += [
                        l for l in c.site.get_links('hot', 'all')
                        if l != c.site.sticky_fullname
                    ]
                    if not self.after:
                        self.count -= 1
                        self.num += 1
                    return link_list

            # no sticky or sticky hidden
            return c.site.get_links('hot', 'all')
Example #12
0
    def query(self):
        #no need to worry when working from the cache
        # TODO: just remove this then since we're always using the query cache
        self.fix_listing = False

        if isinstance(c.site, DefaultSR):
            if c.user_is_loggedin:
                srlimit = Subreddit.DEFAULT_LIMIT
                over18 = c.user.has_subscribed and c.over18
            else:
                srlimit = g.num_default_reddits
                over18 = False

            sr_ids = Subreddit.user_subreddits(c.user,
                                               limit=srlimit,
                                               over18=over18)
            return normalized_hot(sr_ids)

        elif isinstance(c.site, MultiReddit):
            return normalized_hot(c.site.kept_sr_ids, obey_age_limit=False)
        else:
            return c.site.get_links('hot', 'all')
Example #13
0
    def query(self):
        # no need to worry when working from the cache
        if g.use_query_cache or c.site == Default:
            self.fix_listing = False

        if c.site == Default:
            sr_ids = Subdigg.user_subreddits(c.user)
            return normalized_hot(sr_ids)
        # if not using the query_cache we still want cached front pages
        elif not g.use_query_cache and not isinstance(c.site, FakeSubdigg) and self.after is None and self.count == 0:
            return [l._fullname for l in get_hot(c.site)]
        else:
            return c.site.get_links("hot", "all")
    def query(self):
        #no need to worry when working from the cache
        if g.use_query_cache or c.site == Default:
            self.fix_listing = False

        if c.site == Default:
            sr_ids = Subreddit.user_subreddits(c.user)
            return normalized_hot(sr_ids)
        #if not using the query_cache we still want cached front pages
        elif (not g.use_query_cache and not isinstance(c.site, FakeSubreddit)
              and self.after is None and self.count == 0):
            return get_hot([c.site], only_fullnames=True)[0]
        else:
            return c.site.get_links('hot', 'all')
Example #15
0
def get_comment_items(srs, src, count=4):
    """Get hot links from srs, plus top comment from each link."""
    link_fullnames = normalized_hot([sr._id for sr in srs])
    hot_links = Link._by_fullname(link_fullnames[:count], return_dict=False)
    top_comments = []
    for link in hot_links:
        builder = CommentBuilder(
            link, operators.desc("_confidence"), comment=None, context=None, num=1, load_more=False
        )
        listing = NestedListing(builder, parent_name=link._fullname).listing()
        top_comments.extend(listing.things)
    srs = Subreddit._byID([com.sr_id for com in top_comments])
    links = Link._byID([com.link_id for com in top_comments])
    comment_items = [ExploreItem(TYPE_COMMENT, src, srs[com.sr_id], links[com.link_id], com) for com in top_comments]
    return comment_items
    def query(self):
        #no need to worry when working from the cache
        if g.use_query_cache or c.site == Default:
            self.fix_listing = False

        if c.site == Default:
            sr_ids = Subreddit.user_subreddits(c.user)
            return normalized_hot(sr_ids)
        #if not using the query_cache we still want cached front pages
        elif (not g.use_query_cache
              and not isinstance(c.site, FakeSubreddit)
              and self.after is None
              and self.count == 0):
            return get_hot([c.site], only_fullnames = True)[0]
        else:
            return c.site.get_links('hot', 'all')
Example #17
0
    def query(self):
        #no need to worry when working from the cache
        if g.use_query_cache or c.site == Default:
            self.fix_listing = False

        if c.site == Default:
            sr_ids = Subreddit.user_subreddits(c.user,
                                               limit=(Subreddit.sr_limit
                                                      if c.user_is_loggedin
                                                      else g.num_default_reddits))
            return normalized_hot(sr_ids)
        #if not using the query_cache we still want cached front pages
        elif (not g.use_query_cache
              and not isinstance(c.site, FakeSubreddit)
              and self.after is None
              and self.count == 0):
            return get_hot([c.site])
        else:
            return c.site.get_links('hot', 'all')
Example #18
0
 def query(self):
     if c.site == Default:
         self.fix_listing = False
         self.builder_cls = IDBuilder
         user = c.user if c.user_is_loggedin else None
         sr_ids = Subreddit.user_subreddits(user)
         links = normalized_hot(sr_ids)
         return links
     elif (not isinstance(c.site, FakeSubreddit)
           and self.after is None
           and self.count == 0):
         self.builder_cls = IDBuilder
         links = [l._fullname for l in get_hot(c.site)]
         return links
     else:
         q = Link._query(sort = desc('_hot'), *c.site.query_rules())
         q._read_cache = True
         self.collect_stats = True
         return q
Example #19
0
def get_comment_items(srs, src, count=4):
    """Get hot links from srs, plus top comment from each link."""
    link_fullnames = normalized_hot([sr._id for sr in srs])
    hot_links = Link._by_fullname(link_fullnames[:count], return_dict=False)
    top_comments = []
    for link in hot_links:
        builder = CommentBuilder(link,
                                 operators.desc('_confidence'),
                                 comment=None,
                                 context=None,
                                 load_more=False)
        listing = NestedListing(builder, num=1,
                                parent_name=link._fullname).listing()
        top_comments.extend(listing.things)
    srs = Subreddit._byID([com.sr_id for com in top_comments])
    links = Link._byID([com.link_id for com in top_comments])
    comment_items = [
        ExploreItem(TYPE_COMMENT, src, srs[com.sr_id], links[com.link_id], com)
        for com in top_comments
    ]
    return comment_items
Example #20
0
def cached_organic_links(*sr_ids):
    sr_count = count.get_link_counts()
    #only use links from reddits that you're subscribed to
    link_names = filter(lambda n: sr_count[n][1] in sr_ids, sr_count.keys())
    link_names.sort(key=lambda n: sr_count[n][0])

    if not link_names and g.debug:
        q = All.get_links('new', 'all')
        q._limit = 100  # this decomposes to a _query
        link_names = [x._fullname for x in q if x.promoted is None]
        g.log.debug('Used inorganic links')

    #potentially add an up and coming link
    if random.choice((True, False)) and sr_ids:
        sr_id = random.choice(sr_ids)
        fnames = normalized_hot([sr_id])
        if fnames:
            if len(fnames) == 1:
                new_item = fnames[0]
            else:
                new_item = random.choice(fnames[1:4])
            link_names.insert(0, new_item)

    return link_names
Example #21
0
def cached_organic_links(*sr_ids):
    sr_count = count.get_link_counts()
    #only use links from reddits that you're subscribed to
    link_names = filter(lambda n: sr_count[n][1] in sr_ids, sr_count.keys())
    link_names.sort(key = lambda n: sr_count[n][0])

    if not link_names and g.debug:
        q = All.get_links('new', 'all')
        q._limit = 100 # this decomposes to a _query
        link_names = [x._fullname for x in q if x.promoted is None]
        g.log.debug('Used inorganic links')

    #potentially add an up and coming link
    if random.choice((True, False)) and sr_ids:
        sr_id = random.choice(sr_ids)
        fnames = normalized_hot([sr_id])
        if fnames:
            if len(fnames) == 1:
                new_item = fnames[0]
            else:
                new_item = random.choice(fnames[1:4])
            link_names.insert(0, new_item)

    return link_names