def url_links_builder(url, exclude=None, num=None, after=None, reverse=None, count=None): from r2.lib.template_helpers import add_sr from r2.models import IDBuilder, Link, NotFound from operator import attrgetter if url.startswith('/'): url = add_sr(url, force_hostname=True) try: links = Link._by_url(url, None) except NotFound: links = [] links = [ link for link in links if link._fullname != exclude ] links.sort(key=attrgetter('num_comments'), reverse=True) # don't show removed links in duplicates unless admin or mod # or unless it's your own post def include_link(link): return (not link._spam or (c.user_is_loggedin and (link.author_id == c.user._id or c.user_is_admin or link.subreddit.is_moderator(c.user)))) builder = IDBuilder([link._fullname for link in links], skip=True, keep_fn=include_link, num=num, after=after, reverse=reverse, count=count) return builder
def url_links_builder(url, exclude=None): from r2.models import IDBuilder, Link, NotFound from operator import attrgetter try: links = tup(Link._by_url(url, None)) except NotFound: links = [] links = [ link for link in links if link._fullname != exclude ] links.sort(key=attrgetter('num_comments'), reverse=True) # don't show removed links in duplicates unless admin or mod # or unless it's your own post def include_link(link): return (not link._spam or (c.user_is_loggedin and (link.author_id == c.user._id or c.user_is_admin or link.subreddit.is_moderator(c.user)))) builder = IDBuilder([link._fullname for link in links], skip=True, keep_fn=include_link) return builder
def url_links_builder(url, exclude=None, num=None, after=None, reverse=None, count=None): from r2.lib.template_helpers import add_sr from r2.models import IDBuilder, Link, NotFound from operator import attrgetter if url.startswith('/'): url = add_sr(url, force_hostname=True) try: links = tup(Link._by_url(url, None)) except NotFound: links = [] links = [ link for link in links if link._fullname != exclude ] links.sort(key=attrgetter('num_comments'), reverse=True) # don't show removed links in duplicates unless admin or mod # or unless it's your own post def include_link(link): return (not link._spam or (c.user_is_loggedin and (link.author_id == c.user._id or c.user_is_admin or link.subreddit.is_moderator(c.user)))) builder = IDBuilder([link._fullname for link in links], skip=True, keep_fn=include_link, num=num, after=after, reverse=reverse, count=count) return builder
def submit_all(): from r2.models import Subdigg, Account, Link, NotFound from r2.lib.media import set_media from r2.lib.db import queries sr = Subdigg._by_name('testmedia') author = Account._by_name('testmedia') links = [] for url in test_urls: try: # delete any existing version of the link l = Link._by_url(url, sr) print "Deleting %s" % l l._deleted = True l._commit() except NotFound: pass l = Link._submit(url, url, author, sr, '0.0.0.0') try: set_media(l) except Exception, e: print e if g.write_query_queue: queries.new_link(l) links.append(l)
def _get_related_link_ids(cls, event_id): url = add_sr("/live/%s" % event_id, sr_path=False, force_hostname=True) try: links = tup(Link._by_url(url, sr=None)) except NotFound: links = [] return [link._id for link in links]
def hot_links_by_url_listing(url, sr=None, num=None, **kw): try: links_for_url = Link._by_url(url, sr) except NotFound: links_for_url = [] links_for_url.sort(key=lambda link: link._hot, reverse=True) listing = wrap_links(links_for_url, num=num, **kw) return listing
def _get_related_link_ids(cls, event_id): url = make_event_url(c.liveupdate_event._id) try: links = tup(Link._by_url(url, sr=None)) except NotFound: links = [] return [link._id for link in links]
def url_links(url, exclude=None): from r2.models import Link, NotFound try: links = tup(Link._by_url(url, None)) except NotFound: links = [] links = [link for link in links if link._fullname != exclude] return links
def link_duplicates(article): from r2.models import Link, NotFound try: links = tup(Link._by_url(article.url, None)) except NotFound: links = [] duplicates = [link for link in links if link._fullname != article._fullname] return duplicates
def _get_related_link_ids(event_id): # imported here to avoid circular import from reddit_liveupdate.pages import make_event_url url = make_event_url(event_id) try: links = Link._by_url(url, sr=None) except NotFound: links = [] links = itertools.islice(links, MAX_LINK_IDS_TO_CACHE) return [link._fullname for link in links]
def link_from_url(path, filter_spam=False, multiple=True): from pylons import c from r2.models import IDBuilder, Link, Subreddit, NotFound if not path: return try: links = Link._by_url(path, c.site) except NotFound: return [] if multiple else None return filter_links(tup(links), filter_spam=filter_spam, multiple=multiple)
def link_duplicates(article): from r2.models import Link, NotFound # don't bother looking it up if the link doesn't have a URL anyway if getattr(article, "is_self", False): return [] try: links = tup(Link._by_url(article.url, None)) except NotFound: links = [] duplicates = [link for link in links if link._fullname != article._fullname] return duplicates
def link_duplicates(article): from r2.models import Link, NotFound # don't bother looking it up if the link doesn't have a URL anyway if getattr(article, 'is_self', False): return [] try: links = tup(Link._by_url(article.url, None)) except NotFound: links = [] duplicates = [ link for link in links if link._fullname != article._fullname ] return duplicates
def link_from_url(path, filter_spam = False, multiple = True): from pylons import c from r2.models import IDBuilder, Link, Subdigg, NotFound if not path: return try: links = Link._by_url(path, c.site) except NotFound: return [] if multiple else None links = tup(links) # run the list through a builder to remove any that the user # isn't allowed to see links = IDBuilder([link._fullname for link in links], skip = False).get_items()[0] if not links: return if filter_spam: # first, try to remove any spam links_nonspam = [ link for link in links if not link._spam ] if links_nonspam: links = links_nonspam # if it occurs in one or more of their subscriptions, show them # that one first subs = set(Subdigg.user_subreddits(c.user, limit = None)) def cmp_links(a, b): if a.sr_id in subs and b.sr_id not in subs: return -1 elif a.sr_id not in subs and b.sr_id in subs: return 1 else: return cmp(b._hot, a._hot) links = sorted(links, cmp = cmp_links) # among those, show them the hottest one return links if multiple else links[0]
def url_links_builder(url, exclude=None, num=None, after=None, reverse=None, count=None, public_srs_only=False): from r2.lib.template_helpers import add_sr from r2.models import IDBuilder, Link, NotFound, Subreddit from operator import attrgetter if url.startswith("/"): url = add_sr(url, force_hostname=True) try: links = Link._by_url(url, None) except NotFound: links = [] links = [link for link in links if link._fullname != exclude] if public_srs_only and not c.user_is_admin: subreddits = Subreddit._byID([link.sr_id for link in links], data=True) links = [link for link in links if subreddits[link.sr_id].type != "private"] links.sort(key=attrgetter("num_comments"), reverse=True) # don't show removed links in duplicates unless admin or mod # or unless it's your own post def include_link(link): return not link._spam or ( c.user_is_loggedin and (link.author_id == c.user._id or c.user_is_admin or link.subreddit.is_moderator(c.user)) ) builder = IDBuilder( [link._fullname for link in links], skip=True, keep_fn=include_link, num=num, after=after, reverse=reverse, count=count, ) return builder