def calc_rising(): sr_count = count.get_link_counts() link_count = dict((k, v[0]) for k,v in sr_count.iteritems()) link_names = Link._by_fullname(sr_count.keys(), data=True) #max is half the average of the top 10 counts counts = link_count.values() counts.sort(reverse=True) maxcount = sum(counts[:10]) / 20 #prune the list rising = [(n, link_names[n].sr_id) for n in link_names.keys() if link_count[n] < maxcount] cur_time = datetime.now(g.tz) def score(pair): name = pair[0] link = link_names[name] hours = (cur_time - link._date).seconds / 3600 + 1 return float(link._ups) / (max(link_count[name], 1) * hours) def r(x): return 1 if x > 0 else -1 if x < 0 else 0 rising.sort(lambda x, y: r(score(y) - score(x))) return rising
def calc_rising(): #As far as I can tell this can only ever return a series of 0's as that is what is hard coded in... In which case nothing should ever be rising unless I explicitly make it so. sr_count = count.get_link_counts() link_count = dict((k, v[0]) for k,v in sr_count.iteritems()) link_names = Link._by_fullname(sr_count.keys(), data=True) #max is half the average of the top 10 counts counts = link_count.values() counts.sort(reverse=True) maxcount = sum(counts[:10]) / 2.*min(10,len(counts)) #prune the list print link_count print link_names print maxcount rising = [(n, link_names[n].sr_id) for n in link_names.keys() if False or link_count[n] < maxcount] print rising cur_time = datetime.now(g.tz) def score(pair): name = pair[0] link = link_names[name] hours = (cur_time - link._date).seconds / 3600 + 1 return float(link._ups) / (max(link_count[name], 1) * hours) def r(x): return 1 if x > 0 else -1 if x < 0 else 0 rising.sort(lambda x, y: r(score(y) - score(x))) return rising
def calc_rising(): link_counts = count.get_link_counts() links = Link._by_fullname(link_counts.keys(), data=True) def score(link): count = link_counts[link._fullname][0] return float(link._ups) / max(count, 1) # build the rising list, excluding items having 1 or less upvotes rising = [] for link in links.values(): if link._ups > 1: rising.append((link._fullname, score(link), link.sr_id)) # return rising sorted by score return sorted(rising, key=lambda x: x[1], reverse=True)
def cached_organic_links(user_id, langs): if user_id is None: sr_ids = Subreddit.default_subreddits() else: user = Account._byID(user_id, data=True) sr_ids = Subreddit.user_subreddits(user) sr_count = count.get_link_counts() #only use links from reddits that you're subscribed to link_names = filter(lambda n: sr_count[n][1] in sr_ids, sr_count.keys()) link_names.sort(key = lambda n: sr_count[n][0]) #potentially add a up and coming link if random.choice((True, False)) and sr_ids: sr = Subreddit._byID(random.choice(sr_ids)) items = only_recent(get_hot(sr)) if items: if len(items) == 1: new_item = items[0] else: new_item = random.choice(items[1:4]) link_names.insert(0, new_item._fullname) # remove any that the user has acted on builder = IDBuilder(link_names, skip = True, keep_fn = keep_link, num = organic_length) link_names = [ x._fullname for x in builder.get_items()[0] ] #if not logged in, don't reset the count. if we did that we might get in a #cycle where the cache will return the same link over and over if user_id: update_pos(0) insert_promoted(link_names, sr_ids, user_id is not None) # remove any duplicates caused by insert_promoted if the user is logged in if user_id: link_names = list(UniqueIterator(link_names)) return link_names
def cached_organic_links(user_id, langs): if user_id is None: sr_ids = Subreddit.default_srs(langs, ids = True) else: user = Account._byID(user_id, data=True) sr_ids = Subreddit.user_subreddits(user) sr_count = count.get_link_counts() #only use links from reddits that you're subscribed to link_names = filter(lambda n: sr_count[n][1] in sr_ids, sr_count.keys()) link_names.sort(key = lambda n: sr_count[n][0]) #potentially add a up and coming link if random.choice((True, False)): sr = Subreddit._byID(random.choice(sr_ids)) items = only_recent(get_hot(sr)) if items: if len(items) == 1: new_item = items[0] else: new_item = random.choice(items[1:4]) link_names.insert(0, new_item._fullname) # remove any that the user has acted on builder = IDBuilder(link_names, skip = True, keep_fn = keep_link, num = organic_length) link_names = [ x._fullname for x in builder.get_items()[0] ] calculation_key = str(time()) update_pos(0, calculation_key) insert_promoted(link_names, sr_ids, user_id is not None) # remove any duplicates caused by insert_promoted ret = [ l for l in UniqueIterator(link_names) ] return (calculation_key, ret)
def cached_organic_links(*sr_ids): sr_count = count.get_link_counts() #only use links from reddits that you're subscribed to link_names = filter(lambda n: sr_count[n][1] in sr_ids, sr_count.keys()) link_names.sort(key=lambda n: sr_count[n][0]) if not link_names and g.debug: q = All.get_links('new', 'all') q._limit = 100 # this decomposes to a _query link_names = [x._fullname for x in q if x.promoted is None] g.log.debug('Used inorganic links') #potentially add an up and coming link if random.choice((True, False)) and sr_ids: sr_id = random.choice(sr_ids) fnames = normalized_hot([sr_id]) if fnames: if len(fnames) == 1: new_item = fnames[0] else: new_item = random.choice(fnames[1:4]) link_names.insert(0, new_item) return link_names
def cached_organic_links(*sr_ids): sr_count = count.get_link_counts() #only use links from reddits that you're subscribed to link_names = filter(lambda n: sr_count[n][1] in sr_ids, sr_count.keys()) link_names.sort(key = lambda n: sr_count[n][0]) if not link_names and g.debug: q = All.get_links('new', 'all') q._limit = 100 # this decomposes to a _query link_names = [x._fullname for x in q if x.promoted is None] g.log.debug('Used inorganic links') #potentially add an up and coming link if random.choice((True, False)) and sr_ids: sr_id = random.choice(sr_ids) fnames = normalized_hot([sr_id]) if fnames: if len(fnames) == 1: new_item = fnames[0] else: new_item = random.choice(fnames[1:4]) link_names.insert(0, new_item) return link_names