コード例 #1
0
def new_vote(vote, foreground=False):
    user = vote._thing1
    item = vote._thing2

    if not isinstance(item, (Link, Comment)):
        return

    if vote.valid_thing and not item._spam and not item._deleted:
        sr = item.subreddit_slow
        results = []

        author = Account._byID(item.author_id)
        for sort in ('hot', 'top', 'controversial', 'new'):
            if isinstance(item, Link):
                results.append(get_submitted(author, sort, 'all'))
            if isinstance(item, Comment):
                results.append(get_comments(author, sort, 'all'))

        if isinstance(item, Link):
            # don't do 'new', because that was done by new_link, and
            # the time-filtered versions of top/controversial will be
            # done by mr_top
            results.extend([
                get_links(sr, 'hot', 'all'),
                get_links(sr, 'top', 'all'),
                get_links(sr, 'controversial', 'all'),
            ])

            for domain in utils.UrlParser(item.url).domain_permutations():
                for sort in ("hot", "top", "controversial"):
                    results.append(get_domain_links(domain, sort, "all"))

        add_queries(results, insert_items=item, foreground=foreground)

    vote._fast_query_timestamp_touch(user)

    if isinstance(item, Link):
        # must update both because we don't know if it's a changed
        # vote
        if vote._name == '1':
            add_queries([get_liked(user)],
                        insert_items=vote,
                        foreground=foreground)
            add_queries([get_disliked(user)],
                        delete_items=vote,
                        foreground=foreground)
        elif vote._name == '-1':
            add_queries([get_liked(user)],
                        delete_items=vote,
                        foreground=foreground)
            add_queries([get_disliked(user)],
                        insert_items=vote,
                        foreground=foreground)
        else:
            add_queries([get_liked(user)],
                        delete_items=vote,
                        foreground=foreground)
            add_queries([get_disliked(user)],
                        delete_items=vote,
                        foreground=foreground)
コード例 #2
0
ファイル: cloudsearch.py プロジェクト: drewshaver/reddit
 def site(self):
     if self.link.is_self:
         return g.domain
     else:
         try:
             url = r2utils.UrlParser(self.link.url)
             return list(url.domain_permutations())
         except ValueError:
             return None
コード例 #3
0
def new_vote(vote, foreground=False, timer=None):
    user = vote._thing1
    item = vote._thing2

    if timer is None:
        timer = SimpleSillyStub()

    if not isinstance(item, (Link, Comment)):
        return

    if vote.valid_thing and not item._spam and not item._deleted:
        sr = item.subreddit_slow
        results = []

        author = Account._byID(item.author_id)
        for sort in ('hot', 'top', 'controversial', 'new'):
            if isinstance(item, Link):
                results.append(get_submitted(author, sort, 'all'))
            if isinstance(item, Comment):
                results.append(get_comments(author, sort, 'all'))

        if isinstance(item, Link):
            # don't do 'new', because that was done by new_link, and
            # the time-filtered versions of top/controversial will be
            # done by mr_top
            results.extend([
                get_links(sr, 'hot', 'all'),
                get_links(sr, 'top', 'all'),
                get_links(sr, 'controversial', 'all'),
            ])

            for domain in utils.UrlParser(item.url).domain_permutations():
                for sort in ("hot", "top", "controversial"):
                    results.append(get_domain_links(domain, sort, "all"))

        add_queries(results, insert_items=item, foreground=foreground)

    timer.intermediate("permacache")

    if isinstance(item, Link):
        # must update both because we don't know if it's a changed
        # vote
        with CachedQueryMutator() as m:
            if vote._name == '1':
                m.insert(get_liked(user), [vote])
                m.delete(get_disliked(user), [vote])
            elif vote._name == '-1':
                m.delete(get_liked(user), [vote])
                m.insert(get_disliked(user), [vote])
            else:
                m.delete(get_liked(user), [vote])
                m.delete(get_disliked(user), [vote])
コード例 #4
0
def add_xml(thing, version, srs, accounts):
    '''Return an etree XML representation of the thing, suitable for
    sending to cloudsearch
    
    '''
    add = etree.Element("add",
                        id=thing._fullname,
                        version=str(version),
                        lang="en")

    account = accounts[thing.author_id]
    sr = srs[thing.sr_id]
    nsfw = sr.over_18 or thing.over_18 or Link._nsfw.findall(thing.title)

    fields = {
        "ups": max(0, thing._ups),
        "downs": max(0, thing._downs),
        "num_comments": max(0, getattr(thing, 'num_comments', 0)),
        "fullname": thing._fullname,
        "subreddit": sr.name,
        "reddit": sr.name,
        "title": thing.title,
        "timestamp": thing._date.strftime("%s"),
        "sr_id": thing.sr_id,
        "over18": 1 if nsfw else 0,
        "is_self": 1 if thing.is_self else 0,
        "author_fullname": account._fullname,
    }

    if account._deleted:
        fields['author'] = '[deleted]'
    else:
        fields['author'] = account.name

    if thing.is_self:
        fields['site'] = g.domain
        if thing.selftext:
            fields['selftext'] = thing.selftext
    else:
        fields['url'] = thing.url
        try:
            fields['site'] = ' '.join(
                r2utils.UrlParser(thing.url).domain_permutations())
        except ValueError:
            # UrlParser couldn't handle thing.url, oh well
            pass

    for field_name, value in fields.iteritems():
        field = etree.SubElement(add, "field", name=field_name)
        field.text = _safe_xml_str(value)

    return add
コード例 #5
0
    def fields(self, thing):
        '''Return fields relevant to a Link search index'''
        account = self.accounts[thing.author_id]
        sr = self.srs[thing.sr_id]
        nsfw = sr.over_18 or thing.over_18 or Link._nsfw.findall(thing.title)

        fields = {
            "ups": max(0, thing._ups),
            "downs": max(0, thing._downs),
            "num_comments": max(0, getattr(thing, 'num_comments', 0)),
            "fullname": thing._fullname,
            "subreddit": sr.name,
            "reddit": sr.name,
            "title": thing.title,
            "timestamp": int(time.mktime(thing._date.utctimetuple())),
            "sr_id": thing.sr_id,
            "over18": 1 if nsfw else 0,
            "is_self": 1 if thing.is_self else 0,
            "author_fullname": account._fullname,
            "type_id": thing._type_id
        }

        if account._deleted:
            fields['author'] = '[deleted]'
        else:
            fields['author'] = account.name

        if thing.is_self:
            fields['site'] = g.domain
            if thing.selftext:
                fields['selftext'] = thing.selftext
        else:
            fields['url'] = thing.url
            try:
                url = r2utils.UrlParser(thing.url)
                fields['site'] = list(url.domain_permutations())
            except ValueError:
                # UrlParser couldn't handle thing.url, oh well
                pass

        if thing.flair_css_class or thing.flair_text:
            fields['flair_css_class'] = thing.flair_css_class or ''
            fields['flair_text'] = thing.flair_text or ''

        return fields
コード例 #6
0
def new_link(link):
    "Called on the submission and deletion of links"
    sr = Subreddit._byID(link.sr_id)
    author = Account._byID(link.author_id)

    results = [get_links(sr, 'new', 'all')]
    # we don't have to do hot/top/controversy because new_vote will do
    # that

    results.append(get_submitted(author, 'new', 'all'))

    for domain in utils.UrlParser(link.url).domain_permutations():
        results.append(get_domain_links(domain, 'new', "all"))

    if link._spam:
        results.append(get_spam_links(sr))

    add_queries(results, insert_items=link)
    amqp.add_item('new_link', link._fullname)
コード例 #7
0
ファイル: utils_test.py プロジェクト: zeantsoi/reddit
 def test_url_query(self):
     # Send a reddit.com url and get back a different url
     outbound = utils.generate_outbound_link(self.thing, self.url)
     urlparser = utils.UrlParser(outbound.url)
     self.assertEqual(urlparser.query_dict["url"], self.url)