Ejemplo n.º 1
0
def new_report(thing):
    if isinstance(thing, Link):
        sr = Subsciteit._byID(thing.sr_id)
        add_queries([get_reported_links(sr)], insert_items = thing)
    elif isinstance(thing, Comment):
        sr = Subsciteit._byID(thing.sr_id)
        add_queries([get_reported_comments(sr)], insert_items = thing)
Ejemplo n.º 2
0
    def get_subsciteit(self):
        """checks if the current url refers to a subsciteit and returns
        that subsciteit object.  The cases here are:

          * the hostname is unset or is g.domain, in which case it
            looks for /r/XXXX or /sciteits.  The default in this case
            is Default.
          * the hostname is a cname to a known subsciteit.

        On failure to find a subsciteit, returns None.
        """
        from pylons import g
        from r2.models import Subsciteit, Sub, NotFound, DefaultSR
        try:
            if not self.hostname or self.hostname.startswith(g.domain):
                if self.path.startswith('/r/'):
                    return Subsciteit._by_name(self.path.split('/')[2])
                elif self.path.startswith('/sciteits/'):
                    return Sub
                else:
                    return DefaultSR()
            elif self.hostname:
                return Subsciteit._by_domain(self.hostname)
        except NotFound:
            pass
        return None
Ejemplo n.º 3
0
def build_sr_tree(root_id):
    """Builds the tree below this point."""
    from r2.models import Subsciteit
    tree=[root_id]
    undone=Subsciteit._byID(root_id).children
    while undone:
        tree.extend(undone)
	tmp=Subsciteit._byID(undone)
	undone=[]
	for k in tmp:
	    if tmp[k].children:
	        undone.extend(tmp[k].children)
    return tree
Ejemplo n.º 4
0
def run():
    #rss = get_sr_rss()
    #names = rss.keys()
    #Build tree order, this will be from root to leaves.
    order=build_sr_tree(Subsciteit._by_name(g.default_sr)._id)
    #Populate RSS in the other order...
    order.reverse()
    for sr_id in order:
        sr = Subsciteit._byID(sr_id)
	if sr.rss_source:
	    #ac=Account._byID(srob.moderators[0])
	    ac=Account._by_name(g.system_user)
            print "Populating %s as %s using feed |%s|" % (sr.name,ac.name,sr.rss_source)
            submit_rss_links(sr.name,sr.rss_source,user=ac._id)
Ejemplo n.º 5
0
def submit_rss_links(srname,rss,user,titlefield='title',linkfield='link'):
    #F**k the API, let's just do it the way we would if we were really doing it.  This avoids screwing around with cookies and so forth...
    feed=fetch_feed(rss)
    if feed is None:
        return
    ac=Account._byID(user)
    sr=Subsciteit._by_name(srname)
    ip='0.0.0.0'
    niceify=False
    if domain(rss)=="arxiv.org":
        niceify=dict(find="\(arXiv:.*?\)",replace="")
    #Let's randomize why not...
    random.shuffle(feed.entries)
    for article in feed.entries:
	#This can take all night if it has to, we don't want to hammer the server into oblivios
	sleep(1)
        kw = fetch_article(article,titlefield=titlefield,linkfield=linkfield,niceify=niceify)
        if kw is None:
	    continue
	l = Link._submit(kw['title'],kw['link'],ac,sr,ip,spam=False)
	l._commit()
	l.set_url_cache()
	#We don't really need auto-submitted links to be vote on...
	queries.queue_vote(ac,l,True,ip,cheater=False)
	queries.new_link(l)
	changed(l)
	print "Submitted %s" % article[titlefield]
	sleep(.1)
    return
Ejemplo n.º 6
0
def update_karmas():
    for pair in to_update():
        user = Account._byID(pair[0], True)
        sr = Subsciteit._byID(pair[1], True)

        print user.name, sr.name
        user.incr_karma('comment', sr, 20)
Ejemplo n.º 7
0
def filter_links(links, filter_spam = False, multiple = True):
    # run the list through a builder to remove any that the user
    # isn't allowed to see
    from pylons import c
    from r2.models import IDBuilder, Link, Subsciteit, NotFound
    links = IDBuilder([link._fullname for link in links],
                      skip = False).get_items()[0]
    if not links:
        return

    if filter_spam:
        # first, try to remove any spam
        links_nonspam = [ link for link in links
                          if not link._spam ]
        if links_nonspam:
            links = links_nonspam

    # if it occurs in one or more of their subscriptions, show them
    # that one first
    subs = set(Subsciteit.user_subsciteits(c.user, limit = None))
    def cmp_links(a, b):
        if a.sr_id in subs and b.sr_id not in subs:
            return -1
        elif a.sr_id not in subs and b.sr_id in subs:
            return 1
        else:
            return cmp(b._hot, a._hot)
    links = sorted(links, cmp = cmp_links)

    # among those, show them the hottest one
    return links if multiple else links[0]
Ejemplo n.º 8
0
    def gen_keys():
        yield promoted_memo_key

        # just let this one do its own writing
        load_all_sciteits()

        yield queries.get_all_comments().iden

        l_q = Link._query(Link.c._spam == (True, False),
                          Link.c._deleted == (True, False),
                          sort=desc('_date'),
                          data=True,
                          )
        for link in fetch_things2(l_q, verbosity):
            yield comments_key(link._id)
            yield last_modified_key(link, 'comments')

        a_q = Account._query(Account.c._spam == (True, False),
                             sort=desc('_date'),
                             )
        for account in fetch_things2(a_q, verbosity):
            yield messages_key(account._id)
            yield last_modified_key(account, 'overview')
            yield last_modified_key(account, 'commented')
            yield last_modified_key(account, 'submitted')
            yield last_modified_key(account, 'liked')
            yield last_modified_key(account, 'disliked')
            yield queries.get_comments(account, 'new', 'all').iden
            yield queries.get_submitted(account, 'new', 'all').iden
            yield queries.get_liked(account).iden
            yield queries.get_disliked(account).iden
            yield queries.get_hidden(account).iden
            yield queries.get_saved(account).iden
            yield queries.get_inbox_messages(account).iden
            yield queries.get_unread_messages(account).iden
            yield queries.get_inbox_comments(account).iden
            yield queries.get_unread_comments(account).iden
            yield queries.get_inbox_selfreply(account).iden
            yield queries.get_unread_selfreply(account).iden
            yield queries.get_sent(account).iden

        sr_q = Subsciteit._query(Subsciteit.c._spam == (True, False),
                                sort=desc('_date'),
                                )
        for sr in fetch_things2(sr_q, verbosity):
            yield last_modified_key(sr, 'stylesheet_contents')
            yield queries.get_links(sr, 'hot', 'all').iden
            yield queries.get_links(sr, 'new', 'all').iden

            for sort in 'top', 'controversial':
                for time in 'hour', 'day', 'week', 'month', 'year', 'all':
                    yield queries.get_links(sr, sort, time,
                                            merge_batched=False).iden
            yield queries.get_spam_links(sr).iden
            yield queries.get_spam_comments(sr).iden
            yield queries.get_reported_links(sr).iden
            yield queries.get_reported_comments(sr).iden
            yield queries.get_subsciteit_messages(sr).iden
            yield queries.get_unread_subsciteit_messages(sr).iden
Ejemplo n.º 9
0
 def multi_load_tree(sr_ids):
     srs = Subsciteit._byID(sr_ids, return_dict = False)
     res = {}
     for sr in srs:
         trees = subsciteit_messages_nocache(sr)
         if trees:
             res[sr._id] = trees
     return res
Ejemplo n.º 10
0
def get_spam(sr):
    if isinstance(sr, ModContribSR):
        srs = Subsciteit._byID(sr.sr_ids(), return_dict=False)
        results = [ get_spam_links(sr) for sr in srs ]
        return merge_results(*results)
    else:
        return merge_results(get_spam_links(sr),
                             get_spam_comments(sr))
Ejemplo n.º 11
0
def add_all_ban_report_srs():
    """Adds the initial spam/reported pages to the report queue"""
    q = Subsciteit._query(sort = asc('_date'))
    for sr in fetch_things2(q):
        add_queries([get_spam_links(sr),
                     get_spam_comments(sr),
                     get_reported_links(sr),
                     get_reported_comments(sr),
                     ])
Ejemplo n.º 12
0
def set_downs():
    sr_counts = count.get_sr_counts()
    names = [k for k, v in sr_counts.iteritems() if v != 0]
    srs = Subsciteit._by_fullname(names)
    for name in names:
        sr,c = srs[name], sr_counts[name]
        if c != sr._downs and c > 0:
            sr._downs = max(c, 0)
            sr._commit()
Ejemplo n.º 13
0
def assign_trial(account, juries_already_on, ip, slash16):
    from r2.models import Jury, Subsciteit, Trial
    from r2.lib.db import queries

    defendants_assigned_to = []
    for jury in juries_already_on:
        defendants_assigned_to.append(jury._thing2_id)

    subscribed_sr_ids = Subsciteit.user_subsciteits(account, ids=True, limit=None)

    # Pull defendants, except ones which already have lots of juryvotes
    defs = Trial.all_defendants(quench=True)

    # Filter out defendants outside this user's subscribed SRs
    defs = filter (lambda d: d.sr_id in subscribed_sr_ids, defs)

    # Dictionary of sr_id => SR for all defendants' SRs
    srs = Subsciteit._byID(set([ d.sr_id for d in defs ]))

    # Dictionary of sr_id => eligibility bool
    submit_srs = {}
    for sr_id, sr in srs.iteritems():
        submit_srs[sr_id] = sr.can_submit(account) and not sr._spam

    # Filter out defendants with ineligible SRs
    defs = filter (lambda d: submit_srs.get(d.sr_id), defs)

    likes = queries.get_likes(account, defs)

    if not g.debug:
        # Filter out things that the user has upvoted or downvoted
        defs = filter (lambda d: likes.get((account, d)) is None, defs)

    # Prefer oldest trials
    defs.sort(key=lambda x: x._date)

    for defendant in defs:
        sr = srs[defendant.sr_id]

        if voir_dire(account, ip, slash16, defendants_assigned_to, defendant, sr):
            j = Jury._new(account, defendant)
            return defendant

    return None
Ejemplo n.º 14
0
def add_allow_top_to_srs():
    "Add the allow_top property to all stored subsciteits"
    from r2.models import Subsciteit
    from r2.lib.db.operators import desc
    from r2.lib.utils import fetch_things2

    q = Subsciteit._query(Subsciteit.c._spam == (True,False),
                         sort = desc('_date'))
    for sr in fetch_things2(q):
        sr.allow_top = True; sr._commit()
Ejemplo n.º 15
0
def get_reported(sr):
    if isinstance(sr, ModContribSR):
        srs = Subsciteit._byID(sr.sr_ids(), return_dict=False)
        results = []
        results.extend(get_reported_links(sr) for sr in srs)
        results.extend(get_reported_comments(sr) for sr in srs)
        return merge_results(*results)
    else:
        return merge_results(get_reported_links(sr),
                             get_reported_comments(sr))
Ejemplo n.º 16
0
    def degolden(self, account, severe=False):

        if severe:
            account.gold_charter = False
            Award.take_away("charter_subscriber", account)

        Award.take_away("sciteit_gold", account)
        account.gold = False
        account._commit()

        if g.lounge_sciteit and not getattr(account, "gold_charter", False):
            sr = Subsciteit._by_name(g.lounge_sciteit)
            sr.remove_contributor(account)
Ejemplo n.º 17
0
def popular_searches():
    top_sciteits = Subsciteit._query(Subsciteit.c.type == 'public',
                                   sort = desc('_downs'),
                                   limit = 100,
                                   data = True)
    top_searches = {}
    for sr in top_sciteits:
        name = sr.name.lower()
        for i in xrange(min(len(name), 3)):
            query = name[:i + 1]
            r = search_sciteits(query)
            top_searches[query] = r
    return top_searches
Ejemplo n.º 18
0
 def thing_attr(self, thing, attr):
     from r2.models import Subsciteit
     if attr == "has_mail":
         if c.user_is_loggedin and thing._id == c.user._id:
             return bool(c.have_messages)
         return None
     if attr == "has_mod_mail":
         if c.user_is_loggedin and thing._id == c.user._id:
             return bool(c.have_mod_messages)
         return None
     if attr == "is_mod":
         return bool(Subsciteit.reverse_moderator_ids(thing))
     return ThingJsonTemplate.thing_attr(self, thing, attr)
Ejemplo n.º 19
0
def add_all_srs():
    """Recalculates every listing query for every subsciteit. Very,
       very slow."""
    q = Subsciteit._query(sort = asc('_date'))
    for sr in fetch_things2(q):
        for q in all_queries(get_links, sr, ('hot', 'new'), ['all'],no_children=True):
            q.update()
        for q in all_queries(get_links, sr, time_filtered_sorts, db_times.keys(),no_children=True):
            q.update()
        get_spam_links(sr).update()
        get_spam_comments(sr).update()
        get_reported_links(sr).update()
        get_reported_comments(sr).update()
Ejemplo n.º 20
0
def subscribe_to_blog_and_annoucements(filename):
    import re
    from time import sleep
    from r2.models import Account, Subsciteit

    r_blog = Subsciteit._by_name("blog")
    r_announcements = Subsciteit._by_name("announcements")

    contents = file(filename).read()
    numbers = [ int(s) for s in re.findall("\d+", contents) ]

#    d = Account._byID(numbers, data=True)

#   for i, account in enumerate(d.values()):
    for i, account_id in enumerate(numbers):
        account = Account._byID(account_id, data=True)

        for sr in r_blog, r_announcements:
            if sr.add_subscriber(account):
                sr._incr("_ups", 1)
                print ("%d: subscribed %s to %s" % (i, account.name, sr.name))
            else:
                print ("%d: didn't subscribe %s to %s" % (i, account.name, sr.name))
Ejemplo n.º 21
0
    def set_last_sr_ban(self, things):
        by_srid = {}
        for thing in things:
            if getattr(thing, 'sr_id', None) is not None:
                by_srid.setdefault(thing.sr_id, []).append(thing)

        if by_srid:
            srs = Subsciteit._byID(by_srid.keys(), data=True, return_dict=True)
            for sr_id, sr_things in by_srid.iteritems():
                sr = srs[sr_id]

                sr.last_mod_action = datetime.now(g.tz)
                sr._commit()
                sr._incr('mod_actions', len(sr_things))
Ejemplo n.º 22
0
def _by_srid(things,srs=True):
    """Takes a list of things and returns them in a dict separated by
       sr_id, in addition to the looked-up subsciteits"""
    ret = {}

    for thing in tup(things):
        if getattr(thing, 'sr_id', None) is not None:
            ret.setdefault(thing.sr_id, []).append(thing)

    if srs:
        _srs = Subsciteit._byID(ret.keys(), return_dict=True) if ret else {}
        return ret, _srs
    else:
        return ret
Ejemplo n.º 23
0
def store_keys(key, maxes):
    # we're building queries using queries.py, but we could make the
    # queries ourselves if we wanted to avoid the individual lookups
    # for accounts and subsciteits.

    # Note that we're only generating the 'sr-' type queries here, but
    # we're also able to process the other listings generated by the
    # old migrate.mr_permacache for convenience

    userrel_fns = dict(liked = queries.get_liked,
                       disliked = queries.get_disliked,
                       saved = queries.get_saved,
                       hidden = queries.get_hidden)

    if key.startswith('user-'):
        acc_str, keytype, account_id = key.split('-')
        account_id = int(account_id)
        fn = queries.get_submitted if keytype == 'submitted' else queries.get_comments
        q = fn(Account._byID(account_id), 'new', 'all')
        q._insert_tuples([(fname, float(timestamp))
                    for (timestamp, fname)
                    in maxes])

    elif key.startswith('sr-'):
        sr_str, sort, time, sr_id = key.split('-')
        sr_id = int(sr_id)

        if sort == 'controversy':
            # I screwed this up in the mapper and it's too late to fix
            # it
            sort = 'controversial'

        q = queries.get_links(Subsciteit._byID(sr_id), sort, time)
        q._insert_tuples([tuple([item[-1]] + map(float, item[:-1]))
                    for item in maxes])
    elif key.startswith('domain/'):
        d_str, sort, time, domain = key.split('/')
        q = queries.get_domain_links(domain, sort, time)
        q._insert_tuples([tuple([item[-1]] + map(float, item[:-1]))
                    for item in maxes])


    elif key.split('-')[0] in userrel_fns:
        key_type, account_id = key.split('-')
        account_id = int(account_id)
        fn = userrel_fns[key_type]
        q = fn(Account._byID(account_id))
        q._insert_tuples([tuple([item[-1]] + map(float, item[:-1]))
                    for item in maxes])
Ejemplo n.º 24
0
    def by_sr_merged(cls, sr, _update=False):
        if sr.name == g.default_sr:
            return cls.by_sr(sr)

        my_adsrs =     cls.by_sr(sr)
        global_adsrs = cls.by_sr(Subsciteit._by_name(g.default_sr, stale=True))

        seen = {}
        for adsr in my_adsrs:
            seen[adsr._thing1.codename] = True
        for adsr in global_adsrs:
            if adsr._thing1.codename not in seen:
                my_adsrs.append(adsr)

        return my_adsrs
Ejemplo n.º 25
0
def default_queries():
    from r2.models import Link, Subsciteit
    from r2.lib.db.operators import desc
    from copy import deepcopy
    queries = []

    q = Link._query(Link.c.sr_id == Subsciteit.user_subsciteits(None),
                    sort = desc('_hot'),
                    limit = 37)

    queries.append(q)
    #add a higher limit one too
    q = deepcopy(q)
    q._limit = 75
    queries.append(q)

    return queries
Ejemplo n.º 26
0
def load_all_sciteits():
    query_cache = {}

    q = Subsciteit._query(Subsciteit.c.type == 'public',
                         Subsciteit.c._downs > 1,
                         sort = (desc('_downs'), desc('_ups')),
                         data = True)
    for sr in utils.fetch_things2(q):
        name = sr.name.lower()
        for i in xrange(len(name)):
            prefix = name[:i + 1]
            names = query_cache.setdefault(prefix, [])
            if len(names) < 10:
                names.append(sr.name)

    for name_prefix, subsciteits in query_cache.iteritems():
        SubsciteitsByPartialName._set_values(name_prefix, {'srs': subsciteits})
Ejemplo n.º 27
0
def moderator_messages(user):
    from r2.models import Subsciteit
    sr_ids = Subsciteit.reverse_moderator_ids(user)

    def multi_load_tree(sr_ids):
        srs = Subsciteit._byID(sr_ids, return_dict = False)
        res = {}
        for sr in srs:
            trees = subsciteit_messages_nocache(sr)
            if trees:
                res[sr._id] = trees
        return res

    res = sgm(g.permacache, sr_ids, miss_fn = multi_load_tree,
              prefix = sr_messages_key(""))

    return sorted(chain(*res.values()), key = tree_sort_fn, reverse = True)
Ejemplo n.º 28
0
def cache_lists():
    def _chop(srs):
        srs.sort(key=lambda s: s._downs, reverse=True)
        return srs[:limit]

    # bylang    =:= dict((lang, over18_state) -> [Subsciteit])
    # lang      =:= all | lang()
    # nsfwstate =:= no_over18 | allow_over18 | only_over18
    bylang = {}

    for sr in fetch_things2(Subsciteit._query(sort=desc('_date'),
                                             data=True)):
        aid = getattr(sr, 'author_id', None)
        if aid is not None and aid < 0:
            # skip special system sciteits like promos
            continue

        type = getattr(sr, 'type', 'private')
        if type not in ('public', 'restricted'):
            # skips sciteits that can't appear in the default list
            # because of permissions
            continue

        for lang in 'all', sr.lang:
            over18s = ['allow_over18']
            if sr.over_18:
                over18s.append('only_over18')
            else:
                over18s.append('no_over18')

            for over18 in over18s:
                k = (lang, over18)
                bylang.setdefault(k, []).append(sr)

                # keep the lists small while we work
                if len(bylang[k]) > limit*2:
                    bylang[k] = _chop(bylang[k])

    for (lang, over18), srs in bylang.iteritems():
        srs = _chop(srs)
        sr_tuples = map(lambda sr: (sr._downs, sr.allow_top, sr._id), srs)

        print "For %s/%s setting %s" % (lang, over18,
                                        map(lambda sr: sr.name, srs[:50]))

        SubsciteitPopularityByLanguage._set_values(lang, {over18: sr_tuples})
Ejemplo n.º 29
0
def new_comment(comment, inbox_rels):
    author = Account._byID(comment.author_id)
    job = [get_comments(author, 'new', 'all'),
           get_comments(author, 'top', 'all'),
           get_comments(author, 'controversial', 'all')]

    sr = Subsciteit._byID(comment.sr_id)

    if comment._deleted:
        job_key = "delete_items"
        job.append(get_sr_comments(sr))
        job.append(get_all_comments())
    else:
        job_key = "insert_items"
        if comment._spam:
            job.append(get_spam_comments(sr))
        amqp.add_item('new_comment', comment._fullname)
        if not g.amqp_host:
            add_comment_tree([comment])

    job_dict = { job_key: comment }
    add_queries(job, **job_dict)

    # note that get_all_comments() is updated by the amqp process
    # r2.lib.db.queries.run_new_comments (to minimise lock contention)

    if inbox_rels:
        for inbox_rel in tup(inbox_rels):
            inbox_owner = inbox_rel._thing1
            job_dict = { job_key: inbox_rel }
            if inbox_rel._name == "inbox":
                inbox_func  = get_inbox_comments
                unread_func = get_unread_comments
            elif inbox_rel._name == "selfreply":
                inbox_func = get_inbox_selfreply
                unread_func = get_unread_selfreply
            else:
                raise ValueError("wtf is " + inbox_rel._name)

            add_queries([inbox_func(inbox_owner)], **job_dict)

            if comment._deleted:
                add_queries([unread_func(inbox_owner)], **job_dict)
            else:
                set_unread(comment, inbox_owner, True)
Ejemplo n.º 30
0
def new_link(link):
    "Called on the submission and deletion of links"
    sr = Subsciteit._byID(link.sr_id)
    author = Account._byID(link.author_id)

    results = [get_links(sr, 'new', 'all',no_children=True)]
    # we don't have to do hot/top/controversy because new_vote will do
    # that

    results.append(get_submitted(author, 'new', 'all'))

    for domain in utils.UrlParser(link.url).domain_permutations():
        results.append(get_domain_links(domain, 'new', "all"))

    if link._spam:
        results.append(get_spam_links(sr))

    add_queries(results, insert_items = link)
    amqp.add_item('new_link', link._fullname)