Пример #1
0
def find_preview_links(sr):
    from r2.lib.normalized_hot import get_hot

    # try to find a link to use, otherwise give up and return
    links = get_hot(c.site)
    if not links:
        sr = Subreddit._by_name(g.default_sr)
        if sr:
            links = get_hot(sr)

    return links
Пример #2
0
def find_preview_links(sr):
    from r2.lib.normalized_hot import get_hot

    # try to find a link to use, otherwise give up and return
    links = get_hot(c.site)
    if not links:
        sr = Subreddit._by_name(g.default_sr)
        if sr:
            links = get_hot(sr)

    return links
Пример #3
0
def find_preview_links(sr):
    from r2.lib.normalized_hot import get_hot

    # try to find a link to use, otherwise give up and return
    links = get_hot([c.site])
    if not links:
        links = get_hot(Subreddit.default_subreddits(ids=False))

    if links:
        links = links[:25]
        links = Link._by_fullname(links, data=True, return_dict=False)

    return links
Пример #4
0
def find_preview_links(sr):
    from r2.lib.normalized_hot import get_hot

    # try to find a link to use, otherwise give up and return
    links = get_hot([c.site])
    if not links:
        links = get_hot(Subreddit.default_subreddits(ids=False))

    if links:
        links = links[:25]
        links = Link._by_fullname(links, data=True, return_dict=False)

    return links
Пример #5
0
def find_preview_links(sr):
    from r2.lib.normalized_hot import get_hot

    # try to find a link to use, otherwise give up and return
    links = get_hot([c.site], only_fullnames = True)[0]
    if not links:
        sr = Subreddit._by_name(g.default_sr)
        if sr:
            links = get_hot([sr], only_fullnames = True)[0]

    if links:
        links = links[:25]
        links = Link._by_fullname(links, data=True, return_dict=False)

    return links
Пример #6
0
def find_preview_links(sr):
    from r2.lib.normalized_hot import get_hot

    # try to find a link to use, otherwise give up and return
    links = get_hot([c.site],get_children=False)
    if not links:
        sr = Subsciteit._by_name(g.default_sr)
        if sr:
            links = get_hot([sr],get_children=False)

    if links:
        links = links[:25]
        links = Link._by_fullname(links, data=True, return_dict=False)

    return links
Пример #7
0
    def query(self):
        #no need to worry when working from the cache
        if g.use_query_cache or isinstance(c.site, DefaultSR):
            self.fix_listing = False

        if isinstance(c.site, DefaultSR):
            if c.user_is_loggedin:
                srlimit = Subreddit.sr_limit
                over18 = c.user.has_subscribed and c.over18
            else:
                srlimit = g.num_default_reddits
                over18 = False

            sr_ids = Subreddit.user_subreddits(c.user,
                                               limit=srlimit,
                                               over18=over18)
            return normalized_hot(sr_ids)
        #if not using the query_cache we still want cached front pages
        elif (not g.use_query_cache
              and not isinstance(c.site, FakeSubreddit)
              and self.after is None
              and self.count == 0):
            return get_hot([c.site])
        else:
            return c.site.get_links('hot', 'all')
Пример #8
0
    def query(self):
        #no need to worry when working from the cache
        if g.use_query_cache or isinstance(c.site, DefaultSR):
            self.fix_listing = False

        if isinstance(c.site, DefaultSR):
            if c.user_is_loggedin:
                srlimit = Subreddit.DEFAULT_LIMIT
                over18 = c.user.has_subscribed and c.over18
            else:
                srlimit = g.num_default_reddits
                over18 = False

            sr_ids = Subreddit.user_subreddits(c.user,
                                               limit=srlimit,
                                               over18=over18)
            return normalized_hot(sr_ids)

        elif isinstance(c.site, MultiReddit):
            return normalized_hot(c.site.kept_sr_ids, obey_age_limit=False)

        #if not using the query_cache we still want cached front pages
        elif (not g.use_query_cache and not isinstance(c.site, FakeSubreddit)
              and self.after is None and self.count == 0):
            return get_hot([c.site])
        else:
            return c.site.get_links('hot', 'all')
Пример #9
0
    def query(self):
        # no need to worry when working from the cache
        if g.use_query_cache or c.site == Default:
            self.fix_listing = False

        if c.site == Default:
            sr_ids = Subdigg.user_subreddits(c.user)
            return normalized_hot(sr_ids)
        # if not using the query_cache we still want cached front pages
        elif not g.use_query_cache and not isinstance(c.site, FakeSubdigg) and self.after is None and self.count == 0:
            return [l._fullname for l in get_hot(c.site)]
        else:
            return c.site.get_links("hot", "all")
    def query(self):
        #no need to worry when working from the cache
        if g.use_query_cache or c.site == Default:
            self.fix_listing = False

        if c.site == Default:
            sr_ids = Subreddit.user_subreddits(c.user)
            return normalized_hot(sr_ids)
        #if not using the query_cache we still want cached front pages
        elif (not g.use_query_cache and not isinstance(c.site, FakeSubreddit)
              and self.after is None and self.count == 0):
            return get_hot([c.site], only_fullnames=True)[0]
        else:
            return c.site.get_links('hot', 'all')
    def query(self):
        #no need to worry when working from the cache
        if g.use_query_cache or c.site == Default:
            self.fix_listing = False

        if c.site == Default:
            sr_ids = Subreddit.user_subreddits(c.user)
            return normalized_hot(sr_ids)
        #if not using the query_cache we still want cached front pages
        elif (not g.use_query_cache
              and not isinstance(c.site, FakeSubreddit)
              and self.after is None
              and self.count == 0):
            return get_hot([c.site], only_fullnames = True)[0]
        else:
            return c.site.get_links('hot', 'all')
Пример #12
0
 def query(self):
     if c.site == Default:
         self.fix_listing = False
         self.builder_cls = IDBuilder
         user = c.user if c.user_is_loggedin else None
         sr_ids = Subreddit.user_subreddits(user)
         links = normalized_hot(sr_ids)
         return links
     elif (not isinstance(c.site, FakeSubreddit)
           and self.after is None
           and self.count == 0):
         self.builder_cls = IDBuilder
         links = [l._fullname for l in get_hot(c.site)]
         return links
     else:
         q = Link._query(sort = desc('_hot'), *c.site.query_rules())
         q._read_cache = True
         self.collect_stats = True
         return q
Пример #13
0
    def query(self):
        #no need to worry when working from the cache
        if g.use_query_cache or c.site == Default:
            self.fix_listing = False

        if c.site == Default:
            sr_ids = Subreddit.user_subreddits(c.user,
                                               limit=(Subreddit.sr_limit
                                                      if c.user_is_loggedin
                                                      else g.num_default_reddits))
            return normalized_hot(sr_ids)
        #if not using the query_cache we still want cached front pages
        elif (not g.use_query_cache
              and not isinstance(c.site, FakeSubreddit)
              and self.after is None
              and self.count == 0):
            return get_hot([c.site])
        else:
            return c.site.get_links('hot', 'all')
Пример #14
0
def cached_organic_links(user_id, langs):
    if user_id is None:
        sr_ids = Subreddit.default_subreddits()
    else:
        user = Account._byID(user_id, data=True)
        sr_ids = Subreddit.user_subreddits(user)

    sr_count = count.get_link_counts()

    #only use links from reddits that you're subscribed to
    link_names = filter(lambda n: sr_count[n][1] in sr_ids, sr_count.keys())
    link_names.sort(key = lambda n: sr_count[n][0])

    #potentially add a up and coming link
    if random.choice((True, False)) and sr_ids:
        sr = Subreddit._byID(random.choice(sr_ids))
        items = only_recent(get_hot(sr))
        if items:
            if len(items) == 1:
                new_item = items[0]
            else:
                new_item = random.choice(items[1:4])
            link_names.insert(0, new_item._fullname)

    # remove any that the user has acted on
    builder = IDBuilder(link_names,
                        skip = True, keep_fn = keep_link,
                        num = organic_length)
    link_names = [ x._fullname for x in builder.get_items()[0] ]

    #if not logged in, don't reset the count. if we did that we might get in a
    #cycle where the cache will return the same link over and over
    if user_id:
        update_pos(0)

    insert_promoted(link_names, sr_ids, user_id is not None)

    # remove any duplicates caused by insert_promoted if the user is logged in
    if user_id:
        link_names = list(UniqueIterator(link_names))

    return link_names
Пример #15
0
def cached_organic_links(user_id, langs):
    if user_id is None:
        sr_ids = Subreddit.default_srs(langs, ids = True)
    else:
        user = Account._byID(user_id, data=True)
        sr_ids = Subreddit.user_subreddits(user)

    sr_count = count.get_link_counts()

    #only use links from reddits that you're subscribed to
    link_names = filter(lambda n: sr_count[n][1] in sr_ids, sr_count.keys())
    link_names.sort(key = lambda n: sr_count[n][0])

    #potentially add a up and coming link
    if random.choice((True, False)):
        sr = Subreddit._byID(random.choice(sr_ids))
        items = only_recent(get_hot(sr))
        if items:
            if len(items) == 1:
                new_item = items[0]
            else:
                new_item = random.choice(items[1:4])
            link_names.insert(0, new_item._fullname)

    # remove any that the user has acted on
    builder = IDBuilder(link_names,
                        skip = True, keep_fn = keep_link,
                        num = organic_length)
    link_names = [ x._fullname for x in builder.get_items()[0] ]

    calculation_key = str(time())
    update_pos(0, calculation_key)

    insert_promoted(link_names, sr_ids, user_id is not None)

    # remove any duplicates caused by insert_promoted
    ret = [ l for l in UniqueIterator(link_names) ]

    return (calculation_key, ret)
Пример #16
0
def cached_organic_links(*sr_ids):
    sr_count = count.get_link_counts()
    #only use links from reddits that you're subscribed to
    link_names = filter(lambda n: sr_count[n][1] in sr_ids, sr_count.keys())
    link_names.sort(key=lambda n: sr_count[n][0])

    if not link_names and g.debug:
        q = All.get_links('new', 'all')
        q._limit = 100  # this decomposes to a _query
        link_names = [x._fullname for x in q if x.promoted is None]
        g.log.debug('Used inorganic links')

    #potentially add an up and coming link
    if random.choice((True, False)) and sr_ids:
        sr = Subreddit._byID(random.choice(sr_ids))
        fnames = get_hot([sr])
        if fnames:
            if len(fnames) == 1:
                new_item = fnames[0]
            else:
                new_item = random.choice(fnames[1:4])
            link_names.insert(0, new_item)

    return link_names
Пример #17
0
def cached_organic_links(*sr_ids):
    sr_count = count.get_link_counts()
    #only use links from reddits that you're subscribed to
    link_names = filter(lambda n: sr_count[n][1] in sr_ids, sr_count.keys())
    link_names.sort(key = lambda n: sr_count[n][0])

    if not link_names and g.debug:
        q = All.get_links('new', 'all')
        q._limit = 100 # this decomposes to a _query
        link_names = [x._fullname for x in q if x.promoted is None]
        g.log.debug('Used inorganic links')

    #potentially add an up and coming link
    if random.choice((True, False)) and sr_ids:
        sr = Subreddit._byID(random.choice(sr_ids))
        fnames = get_hot([sr])
        if fnames:
            if len(fnames) == 1:
                new_item = fnames[0]
            else:
                new_item = random.choice(fnames[1:4])
            link_names.insert(0, new_item)

    return link_names