def get_subdigg(self): """checks if the current url refers to a subdigg and returns that subdigg object. The cases here are: * the hostname is unset or is g.domain, in which case it looks for /r/XXXX or /diggs. The default in this case is Default. * the hostname is a cname to a known subdigg. On failure to find a subdigg, returns None. """ from pylons import g from r2.models import Subdigg, Sub, NotFound, Default try: if not self.hostname or self.hostname.startswith(g.domain): if self.path.startswith('/r/'): return Subdigg._by_name(self.path.split('/')[2]) elif self.path.startswith('/diggs/'): return Sub else: return Default elif self.hostname: return Subdigg._by_domain(self.hostname) except NotFound: pass return None
def submit_all(): from r2.models import Subdigg, Account, Link, NotFound from r2.lib.media import set_media from r2.lib.db import queries sr = Subdigg._by_name('testmedia') author = Account._by_name('testmedia') links = [] for url in test_urls: try: # delete any existing version of the link l = Link._by_url(url, sr) print "Deleting %s" % l l._deleted = True l._commit() except NotFound: pass l = Link._submit(url, url, author, sr, '0.0.0.0') try: set_media(l) except Exception, e: print e if g.write_query_queue: queries.new_link(l) links.append(l)
def update_karmas(): for pair in to_update(): user = Account._byID(pair[0], True) sr = Subdigg._byID(pair[1], True) print user.name, sr.name user.incr_karma('comment', sr, 20)
def run(): sr_counts = count.get_sr_counts() names = [k for k, v in sr_counts.iteritems() if v != 0] srs = Subdigg._by_fullname(names) for name in names: sr,c = srs[name], sr_counts[name] if c != sr._downs and c > 0: sr._downs = max(c, 0) sr._commit() count.clear_sr_counts(names)
def add_allow_top_to_srs(): "Add the allow_top property to all stored subdiggs" from r2.models import Subdigg from r2.lib.db.operators import desc from r2.lib.utils import fetch_things2 q = Subdigg._query(Subreddit.c._spam == (True,False), sort = desc('_date')) for sr in fetch_things2(q): sr.allow_top = True; sr._commit()
def subscribe_to_blog_and_annoucements(filename): import re from time import sleep from r2.models import Account, Subdigg r_blog = Subdigg._by_name("blog") r_announcements = Subdigg._by_name("announcements") contents = file(filename).read() numbers = [ int(s) for s in re.findall("\d+", contents) ] # d = Account._byID(numbers, data=True) # for i, account in enumerate(d.values()): for i, account_id in enumerate(numbers): account = Account._byID(account_id, data=True) for sr in r_blog, r_announcements: if sr.add_subscriber(account): sr._incr("_ups", 1) print ("%d: subscribed %s to %s" % (i, account.name, sr.name)) else: print ("%d: didn't subscribe %s to %s" % (i, account.name, sr.name))
def default_queries(): from r2.models import Link, Subdigg from r2.lib.db.operators import desc from copy import deepcopy queries = [] q = Link._query(Link.c.sr_id == Subdigg.user_subreddits(None), sort = desc('_hot'), limit = 37) queries.append(q) #add a higher limit one too q = deepcopy(q) q._limit = 75 queries.append(q) return queries
def normalized_hot_cached(sr_ids): """Fetches the hot lists for each subdigg, normalizes the scores, and interleaves the results.""" results = [] srs = Subdigg._byID(sr_ids, data = True, return_dict = False) for sr in srs: items = only_recent(get_hot(sr)) if not items: continue top_score = max(max(x._hot for x in items), 1) if items: results.extend((l, l._hot / top_score) for l in items) results.sort(key = lambda x: (x[1], x[0]._hot), reverse = True) return [l[0]._fullname for l in results]
def link_from_url(path, filter_spam = False, multiple = True): from pylons import c from r2.models import IDBuilder, Link, Subdigg, NotFound if not path: return try: links = Link._by_url(path, c.site) except NotFound: return [] if multiple else None links = tup(links) # run the list through a builder to remove any that the user # isn't allowed to see links = IDBuilder([link._fullname for link in links], skip = False).get_items()[0] if not links: return if filter_spam: # first, try to remove any spam links_nonspam = [ link for link in links if not link._spam ] if links_nonspam: links = links_nonspam # if it occurs in one or more of their subscriptions, show them # that one first subs = set(Subdigg.user_subreddits(c.user, limit = None)) def cmp_links(a, b): if a.sr_id in subs and b.sr_id not in subs: return -1 elif a.sr_id not in subs and b.sr_id in subs: return 1 else: return cmp(b._hot, a._hot) links = sorted(links, cmp = cmp_links) # among those, show them the hottest one return links if multiple else links[0]
def get_sr_counts(period=count_period): srs = Subdigg._query() return dict((l._fullname, (0, l.sr_id)) for l in links)