Exemple #1
0
def new_promotion(title, url, selftext, user, ip):
    """
    Creates a new promotion with the provided title, etc, and sets it
    status to be 'unpaid'.
    """
    sr = Subreddit._byID(get_promote_srid())
    l = Link._submit(title, url, user, sr, ip)
    l.promoted = True
    l.disable_comments = False
    PromotionLog.add(l, "promotion created")

    if url == "self":
        l.url = l.make_permalink_slow()
        l.is_self = True
        l.selftext = selftext

    l._commit()

    update_promote_status(l, PROMOTE_STATUS.unpaid)

    # the user has posted a promotion, so enable the promote menu unless
    # they have already opted out
    if user.pref_show_promote is not False:
        user.pref_show_promote = True
        user._commit()

    # notify of new promo
    emailer.new_promo(l)
    return l
Exemple #2
0
def submit_all():
    from r2.models import Subdigg, Account, Link, NotFound
    from r2.lib.media import set_media
    from r2.lib.db import queries
    sr = Subdigg._by_name('testmedia')
    author = Account._by_name('testmedia')
    links = []
    for url in test_urls:
        try:
            # delete any existing version of the link
            l = Link._by_url(url, sr)
            print "Deleting %s" % l
            l._deleted = True
            l._commit()
        except NotFound:
            pass

        l = Link._submit(url, url, author, sr, '0.0.0.0')

        try:
            set_media(l)
        except Exception, e:
            print e

        if g.write_query_queue:
            queries.new_link(l)

        links.append(l)
Exemple #3
0
def dfp_creative_to_link(creative, link=None):
    from r2.models import (
        Link,
        PROMOTE_STATUS,
    )

    user = get_dfp_user()
    sr = get_dfp_subreddit()
    attributes = dfp_template_to_dict(
        creative.creativeTemplateVariableValues)

    kind = "self" if attributes["selftext"] else "link"
    url = attributes["url"] if kind == "link" else "self"

    if not link:
        link = Link._submit(
            attributes["title"], url, user, sr,
            ip="127.0.0.1", sendreplies=False,
        )

    if kind == "self":
        link.url = link.make_permalink_slow()
        link.is_self = True
        link.selftext = attributes["selftext"]

    link.promoted = True
    link.promote_status = PROMOTE_STATUS.promoted
    link.thumbnail_url = attributes["thumbnail_url"]
    link.mobile_ad_url = attributes["mobile_ad_url"]
    link.third_party_tracking = attributes["third_party_tracking"]
    link.third_party_tracking_2 = attributes["third_party_tracking_2"]
    link.dfp_creative_id = creative["id"]

    link._commit()
    return link
Exemple #4
0
def submit_link(user, subreddit, title, url, thumb_url):
    account = Account._by_name(user)
    subreddit = Subreddit._by_name(subreddit)
    ip = '127.0.0.1'

    # submit the link
    link = Link._submit(
        is_self=False,
        title=title,
        content=url,
        author=account,
        sr=subreddit,
        ip=ip,
        spam=False,
    )

    try:
        # force the thumbnail before scraper_q gets in the mix
        image_data = urllib.urlopen(thumb_url).read()
        force_thumbnail(link, image_data)
    except:
        pass

    # various backend processing things
    queries.new_link(link)
    link.update_search_index()

    # wait for the amqp worker to finish up
    worker.join()

    print link.make_permalink_slow()
def _create_link(creative):

    """
    Creates a link to allow third party voting/commenting
    """

    user = _get_user()
    sr = _get_subreddit()
    attributes = _template_to_dict(creative)

    kind = "self" if attributes["selftext"] else "link"
    url = attributes["url"] if kind == "link" else "self"
    link = Link._submit(
        attributes["title"], url, user, sr,
        ip="127.0.0.1", sendreplies=False,
    )

    if kind == "self":
        link.url = link.make_permalink_slow()
        link.is_self = True
        link.selftext = attributes["selftext"]

    link.promoted = True
    link.third_party_promo = True
    link.thumbnail_url = attributes["thumbnail_url"]
    link.mobile_ad_url = attributes["mobile_ad_url"]
    link.third_party_tracking = attributes["third_party_tracking"]
    link.third_party_tracking_2 = attributes["third_party_tracking_2"]
    link.external_id = creative["id"]

    link._commit()
    return link
Exemple #6
0
def new_promotion(is_self, title, content, author, ip):
    """
    Creates a new promotion with the provided title, etc, and sets it
    status to be 'unpaid'.
    """
    sr = Subreddit._byID(Subreddit.get_promote_srid())
    l = Link._submit(
        is_self=is_self,
        title=title,
        content=content,
        author=author,
        sr=sr,
        ip=ip,
    )

    l.promoted = True
    l.disable_comments = False
    l.sendreplies = True
    PromotionLog.add(l, 'promotion created')

    update_promote_status(l, PROMOTE_STATUS.unpaid)

    # the user has posted a promotion, so enable the promote menu unless
    # they have already opted out
    if author.pref_show_promote is not False:
        author.pref_show_promote = True
        author._commit()

    # notify of new promo
    emailer.new_promo(l)
    return l
Exemple #7
0
def new_promotion(is_self, title, content, author, ip):
    """
    Creates a new promotion with the provided title, etc, and sets it
    status to be 'unpaid'.
    """
    sr = Subreddit._byID(Subreddit.get_promote_srid())
    l = Link._submit(
        is_self=is_self,
        title=title,
        content=content,
        author=author,
        sr=sr,
        ip=ip,
    )

    l.promoted = True
    l.disable_comments = False
    l.sendreplies = True
    PromotionLog.add(l, 'promotion created')

    update_promote_status(l, PROMOTE_STATUS.unpaid)

    # the user has posted a promotion, so enable the promote menu unless
    # they have already opted out
    if author.pref_show_promote is not False:
        author.pref_show_promote = True
        author._commit()

    # notify of new promo
    emailer.new_promo(l)
    return l
Exemple #8
0
def submit_rss_links(srname,rss,user,titlefield='title',linkfield='link'):
    #F**k the API, let's just do it the way we would if we were really doing it.  This avoids screwing around with cookies and so forth...
    feed=fetch_feed(rss)
    if feed is None:
        return
    ac=Account._byID(user)
    sr=Subsciteit._by_name(srname)
    ip='0.0.0.0'
    niceify=False
    if domain(rss)=="arxiv.org":
        niceify=dict(find="\(arXiv:.*?\)",replace="")
    #Let's randomize why not...
    random.shuffle(feed.entries)
    for article in feed.entries:
	#This can take all night if it has to, we don't want to hammer the server into oblivios
	sleep(1)
        kw = fetch_article(article,titlefield=titlefield,linkfield=linkfield,niceify=niceify)
        if kw is None:
	    continue
	l = Link._submit(kw['title'],kw['link'],ac,sr,ip,spam=False)
	l._commit()
	l.set_url_cache()
	#We don't really need auto-submitted links to be vote on...
	queries.queue_vote(ac,l,True,ip,cheater=False)
	queries.new_link(l)
	changed(l)
	print "Submitted %s" % article[titlefield]
	sleep(.1)
    return
Exemple #9
0
def new_promotion(title, url, selftext, user, ip):
    """
    Creates a new promotion with the provided title, etc, and sets it
    status to be 'unpaid'.
    """
    sr = Subreddit._byID(get_promote_srid())
    l = Link._submit(title, url, user, sr, ip)
    l.promoted = True
    l.disable_comments = False
    PromotionLog.add(l, 'promotion created')

    if url == 'self':
        l.url = l.make_permalink_slow()
        l.is_self = True
        l.selftext = selftext

    l._commit()

    # set the status of the link, populating the query queue
    if c.user_is_sponsor or user.trusted_sponsor:
        set_promote_status(l, PROMOTE_STATUS.accepted)
    else:
        set_promote_status(l, PROMOTE_STATUS.unpaid)

    # the user has posted a promotion, so enable the promote menu unless
    # they have already opted out
    if user.pref_show_promote is not False:
        user.pref_show_promote = True
        user._commit()

    # notify of new promo
    emailer.new_promo(l)
    return l
Exemple #10
0
def new_promotion(title, url, selftext, user, ip):
    """
    Creates a new promotion with the provided title, etc, and sets it
    status to be 'unpaid'.
    """
    sr = Subreddit._byID(Subreddit.get_promote_srid())
    l = Link._submit(title, url, user, sr, ip)
    l.promoted = True
    l.disable_comments = False
    l.sendreplies = True
    PromotionLog.add(l, 'promotion created')

    if url == 'self':
        l.url = l.make_permalink_slow()
        l.is_self = True
        l.selftext = selftext

    l._commit()

    update_promote_status(l, PROMOTE_STATUS.unpaid)

    # the user has posted a promotion, so enable the promote menu unless
    # they have already opted out
    if user.pref_show_promote is not False:
        user.pref_show_promote = True
        user._commit()

    # notify of new promo
    emailer.new_promo(l)
    return l
Exemple #11
0
    def process_post(self, post_data, sr):
        # Prepare data for import
        title = self.kill_tags_re.sub('', post_data['title'])
        article = u'%s%s' % (post_data['description'],
                             Link._more_marker + post_data['mt_text_more']
                             if post_data['mt_text_more'] else u'')
        ip = '127.0.0.1'
        tags = [
            self.transform_categories_re.sub('_', tag.lower())
            for tag in post_data.get('category', [])
        ]
        naive_date = datetime.datetime.strptime(post_data['dateCreated'],
                                                DATE_FORMAT)
        local_date = INPUT_TIMEZONE.localize(
            naive_date, is_dst=False)  # Pick the non daylight savings time
        utc_date = local_date.astimezone(pytz.utc)

        # Determine account to use for this post
        account = self._get_or_create_account(post_data['author'],
                                              post_data['authorEmail'])

        # Look for an existing post created due to a previous import
        post = self._query_post(Link.c.ob_permalink == post_data['permalink'])

        if not post:
            # Create new post
            post = Link._submit(title,
                                article,
                                account,
                                sr,
                                ip,
                                tags,
                                date=utc_date)
            post.blessed = True
            post.comment_sort_order = 'old'
            post.ob_permalink = post_data['permalink']
            post._commit()
        else:
            # Update existing post
            post.title = title
            post.article = article
            post.author_id = account._id
            post.sr_id = sr._id
            post.ip = ip
            post.set_tags(tags)
            post._date = utc_date
            post.blessed = True
            post.comment_sort_order = 'old'
            post._commit()

        # Process each comment for this post
        comment_dictionary = {}
        comments = self._query_comments(Comment.c.link_id == post._id,
                                        Comment.c.ob_imported == True)
        [
            self.process_comment(comment_data, comment, post,
                                 comment_dictionary)
            for comment_data, comment in map(None, post_data.get(
                'comments', []), comments)
        ]
Exemple #12
0
def new_promotion(title, url, user, ip):
    """
    Creates a new promotion with the provided title, etc, and sets it
    status to be 'unpaid'.
    """
    sr = Subreddit._byID(get_promote_srid())
    l = Link._submit(title, url, user, sr, ip)
    l.promoted = True
    l.disable_comments = False
    PromotionLog.add(l, 'promotion created')
    l._commit()

    # set the status of the link, populating the query queue
    if c.user_is_sponsor or user.trusted_sponsor:
        set_promote_status(l, PROMOTE_STATUS.accepted)
    else:
        set_promote_status(l, PROMOTE_STATUS.unpaid)

    # the user has posted a promotion, so enable the promote menu unless
    # they have already opted out
    if user.pref_show_promote is not False:
        user.pref_show_promote = True
        user._commit()

    # notify of new promo
    emailer.new_promo(l)
    return l
Exemple #13
0
def post_if_goal_reached(date):
    # bail out if this day's already been submitted
    for link in get_recent_name_submissions():
        if link.revenue_date == date:
            return

    revenue = gold_revenue_multi([date]).get(date, 0)
    goal = gold_goal_on(date)
    percent = revenue / float(goal)
    bucket = int(percent)
    if bucket == 0:
        return

    buyer_count = len(gold_buyers_on(date))
    template_wp = WikiPage.get(SERVERNAME_SR, "templates/selftext")
    template = random.choice(template_wp._get("content").split("\r\n---\r\n"))
    boilerplate = WikiPage.get(SERVERNAME_SR,
                               "templates/boilerplate")._get("content")
    selftext_template = template + "\n\n---\n\n" + boilerplate

    link = Link._submit(
        is_self=True,
        title=date.strftime("%a %Y-%m-%d"),
        content=selftext_template % {
            "percent": int(percent * 100),
            "buyers": buyer_count,
        },
        author=SYSTEM_ACCOUNT,
        sr=SERVERNAME_SR,
        ip="127.0.0.1",
        spam=False,
    )

    link.flair_text = "Name pending..."
    link.flair_css_class = "goal-bucket-%d-active" % bucket
    link.revenue_date = date
    link.revenue_bucket = bucket
    link.server_names = []
    link._commit()

    queries.new_link(link)
    link.update_search_index()

    template = WikiPage.get(SERVERNAME_SR,
                            "templates/notification-message")._get("content")
    subject_template, sep, body_template = template.partition("\r\n")
    for id in gold_buyers_on(date):
        recipient = Account._byID(id, data=True)
        send_system_message(
            recipient,
            subject_template,
            body_template % {
                "percent": int(percent * 100),
                "buyers": buyer_count,
                "user": recipient.name,
                "link": link.url,
            },
        )
def post_if_goal_reached(date):
    # bail out if this day's already been submitted
    for link in get_recent_name_submissions():
        if link.revenue_date == date:
            return

    revenue = gold_revenue_multi([date]).get(date, 0)
    goal = gold_goal_on(date)
    percent = revenue / float(goal)
    bucket = int(percent)
    if bucket == 0:
        return

    buyer_count = len(gold_buyers_on(date))
    template_wp = WikiPage.get(SERVERNAME_SR, "templates/selftext")
    template = random.choice(template_wp._get("content").split("\r\n---\r\n"))
    boilerplate = WikiPage.get(SERVERNAME_SR, "templates/boilerplate")._get("content")
    selftext_template = template + "\n\n---\n\n" + boilerplate

    link = Link._submit(
        is_self=True,
        title=date.strftime("%a %Y-%m-%d"),
        content=selftext_template % {
            "percent": int(percent * 100),
            "buyers": buyer_count,
        },
        author=SYSTEM_ACCOUNT,
        sr=SERVERNAME_SR,
        ip="127.0.0.1",
        spam=False,
    )

    link.flair_text = "Name pending..."
    link.flair_css_class = "goal-bucket-%d-active" % bucket
    link.revenue_date = date
    link.revenue_bucket = bucket
    link.server_names = []
    link._commit()

    UPVOTE = True
    queries.queue_vote(SYSTEM_ACCOUNT, link, UPVOTE, "127.0.0.1")
    queries.new_link(link)
    link.update_search_index()

    template = WikiPage.get(SERVERNAME_SR, "templates/notification-message")._get("content")
    subject_template, sep, body_template = template.partition("\r\n")
    for id in gold_buyers_on(date):
        recipient = Account._byID(id, data=True)
        send_system_message(
            recipient,
            subject_template,
            body_template % {
                "percent": int(percent * 100),
                "buyers": buyer_count,
                "user": recipient.name,
                "link": link.url,
            },
        )
Exemple #15
0
    def test_get_links(self):
        from r2.lib.db import queries
        from r2.models import Subreddit, Account, Link, Thing

        account = Account._byID(1, data=True)
        sr = Subreddit._by_name("reddit_test0")
        link_url = self.make_unique_url()

        new_link = Link._submit("test_get_links", link_url, account, sr, "127.0.0.1", kind="link")
        queries.new_link(new_link, foreground=True)

        res = Thing._by_fullname(queries.get_links(sr, "new", "all"), return_dict=False)
        self.assert_true(len(res) > 0, "no links returned")
        self.assert_equal(new_link._id, res[0]._id)
  def POST_create(self, res, title, description, location, latitude, longitude, timestamp, tzoffset, ip):
    if res._chk_error(errors.NO_TITLE):
      res._chk_error(errors.TITLE_TOO_LONG)
      res._focus('title')

    res._chk_errors((errors.NO_LOCATION,
                     errors.NO_DESCRIPTION,
                     errors.INVALID_DATE,
                     errors.NO_DATE))

    if res.error: return

    meetup = Meetup(
      author_id = c.user._id,

      title = title,
      description = description,

      location = location,
      latitude = latitude,
      longitude = longitude,

      timestamp = timestamp,
      tzoffset = tzoffset
    )

    # Expire all meetups in the render cache
    g.rendercache.invalidate_key_group(Meetup.group_cache_key())

    meetup._commit()

    l = Link._submit(meetup_article_title(meetup), meetup_article_text(meetup),
                     c.user, Subreddit._by_name('meetups'),ip, [])

    l.meetup = meetup._id36
    l._commit()
    meetup.assoc_link = l._id
    meetup._commit()

    when = datetime.now(g.tz) + timedelta(0, 3600)  # Leave a short window of time before notification, in case
                                                    # the meetup is edited/deleted soon after its creation
    PendingJob.store(when, 'process_new_meetup', {'meetup_id': meetup._id})

    #update the queries
    if g.write_query_queue:
      queries.new_link(l)

    res._redirect(url_for(action='show', id=meetup._id36))
    def test_sending_an_email(self):
        sr = Subreddit._by_name('reddit_test0')
        account = self.get_test_user()
        sr.add_subscriber(account)
        self.assertIn(sr._id, account.spaces)

        summary_email.reset_last_email_sent_at_for_all_accounts()
        assert summary_email.should_send_activity_summary_email(account)

        link_url = self.make_unique_url()
        new_link = Link._submit("test_get_links", link_url, account, sr, '127.0.0.1', kind='link')
        queries.new_link(new_link, foreground=True)

        send_email = Mock()
        summary_email.send_account_summary_email(1, send_email=send_email)
        self.assert_equal(1, send_email.call_count)
        self.assert_equal('*****@*****.**', send_email.call_args[0][0])
Exemple #18
0
def submit_link(user, subreddit, title, url, thumb_url):
    account = Account._by_name(user)
    subreddit = Subreddit._by_name(subreddit)
    ip = '127.0.0.1'

    # submit the link
    link = Link._submit(title, url, account, subreddit, ip, spam=False)

    # force the thumbnail before scraper_q gets in the mix
    image_data = urllib.urlopen(thumb_url).read()
    force_thumbnail(link, image_data)

    # various backend processing things
    queries.queue_vote(account, link, True, ip)
    queries.new_link(link)
    queries.changed(link)

    print link.make_permalink_slow()
  def POST_create(self, res, title, description, location, latitude, longitude, timestamp, tzoffset, ip):
    if res._chk_error(errors.NO_TITLE):
      res._chk_error(errors.TITLE_TOO_LONG)
      res._focus('title')

    res._chk_errors((errors.NO_LOCATION,
                     errors.NO_DESCRIPTION,
                     errors.INVALID_DATE,
                     errors.NO_DATE))

    if res.error: return

    meetup = Meetup(
      author_id = c.user._id,

      title = title,
      description = description,

      location = location,
      latitude = latitude,
      longitude = longitude,

      timestamp = timestamp,
      tzoffset = tzoffset
    )

    # Expire all meetups in the render cache
    g.rendercache.invalidate_key_group(Meetup.group_cache_key())

    meetup._commit()

    l = Link._submit(meetup_article_title(meetup), meetup_article_text(meetup),
                     c.user, Subreddit._by_name('discussion'),ip, [])

    l.meetup = meetup._id36
    l._commit()
    meetup.assoc_link = l._id
    meetup._commit()

    #update the queries
    if g.write_query_queue:
      queries.new_link(l)

    res._redirect(url_for(action='show', id=meetup._id36))
Exemple #20
0
    def process_post(self, post_data, sr):
        # Prepare data for import
        title = self.kill_tags_re.sub('', post_data['title'])
        article = u'%s%s' % (post_data['description'],
                             Link._more_marker + post_data['mt_text_more'] if post_data['mt_text_more'] else u'')
        ip = '127.0.0.1'
        tags = [self.transform_categories_re.sub('_', tag.lower()) for tag in post_data.get('category', [])]
        naive_date = datetime.datetime.strptime(post_data['dateCreated'], DATE_FORMAT)
        local_date = INPUT_TIMEZONE.localize(naive_date, is_dst=False) # Pick the non daylight savings time
        utc_date = local_date.astimezone(pytz.utc)

        # Determine account to use for this post
        account = self._get_or_create_account(post_data['author'], post_data['authorEmail'])

        # Look for an existing post created due to a previous import
        post = self._query_post(Link.c.ob_permalink == post_data['permalink'])

        if not post:
            # Create new post
            post = Link._submit(title, article, account, sr, ip, tags, date=utc_date)
            post.blessed = True
            post.comment_sort_order = 'old'
            post.ob_permalink = post_data['permalink']
            post._commit()
        else:
            # Update existing post
            post.title = title
            post.article = article
            post.author_id = account._id
            post.sr_id = sr._id
            post.ip = ip
            post.set_tags(tags)
            post._date = utc_date
            post.blessed = True
            post.comment_sort_order = 'old'
            post._commit()

        # Process each comment for this post
        comment_dictionary = {}
        comments = self._query_comments(Comment.c.link_id == post._id, Comment.c.ob_imported == True)
        [self.process_comment(comment_data, comment, post, comment_dictionary)
         for comment_data, comment in map(None, post_data.get('comments', []), comments)]
Exemple #21
0
    def test_get_files(self):
        from r2.lib.db import queries
        from r2.models import Subreddit, Account, Link, Thing

        account = Account._byID(1, data=True)
        sr = Subreddit._by_name("reddit_test0")
        link_url = self.make_unique_url()

        new_link = Link._submit("test_get_files", link_url, account, sr, "127.0.0.1", kind="file")
        queries.new_link(new_link, foreground=True)

        # make sure it returns like a normal link
        res = Thing._by_fullname(queries.get_links(sr, "new", "all"), return_dict=False)
        self.assert_true(len(res) > 0, "no links returned")
        self.assert_equal(new_link._id, res[0]._id)

        # should return with a kind = 'file' filter
        res = list(queries.get_files(sr))
        self.assert_true(len(res) > 0, "no links returned")
        self.assert_equal(new_link._id, res[0]._id)
Exemple #22
0
def submit_link(user, subreddit, title, url, thumb_url):
    account = Account._by_name(user)
    subreddit = Subreddit._by_name(subreddit)
    ip = '127.0.0.1'

    # submit the link
    link = Link._submit(title, url, account, subreddit, ip, spam=False)

    # force the thumbnail before scraper_q gets in the mix
    image_data = urllib.urlopen(thumb_url).read()
    force_thumbnail(link, image_data)

    # various backend processing things
    queries.queue_vote(account, link, UPVOTE, ip)
    queries.new_link(link)
    link.update_search_index()

    # wait for the amqp worker to finish up
    worker.join()

    print link.make_permalink_slow()
Exemple #23
0
def submit_link(user, subreddit, title, url, thumb_url):
    account = Account._by_name(user)
    subreddit = Subreddit._by_name(subreddit)
    ip = '127.0.0.1'

    # submit the link
    link = Link._submit(title, url, account, subreddit, ip, spam=False)

    # force the thumbnail before scraper_q gets in the mix
    image_data = urllib.urlopen(thumb_url).read()
    force_thumbnail(link, image_data)

    # various backend processing things
    queries.queue_vote(account, link, UPVOTE, ip)
    queries.new_link(link)
    link.update_search_index()

    # wait for the amqp worker to finish up
    worker.join()

    print link.make_permalink_slow()
Exemple #24
0
    def POST_create(self, res, title, description, location, latitude,
                    longitude, timestamp, tzoffset, ip):
        if res._chk_error(errors.NO_TITLE):
            res._chk_error(errors.TITLE_TOO_LONG)
            res._focus('title')

        res._chk_errors((errors.NO_LOCATION, errors.NO_DESCRIPTION,
                         errors.INVALID_DATE, errors.NO_DATE))

        if res.error: return

        meetup = Meetup(
            author_id=c.user._id,
            title=title,
            description=description,
            location=location,
            latitude=latitude,
            longitude=longitude,
            timestamp=timestamp / 1000,  # Value from form is in ms UTC
            tzoffset=tzoffset)

        # Expire all meetups in the render cache
        g.rendercache.invalidate_key_group(Meetup.group_cache_key())

        meetup._commit()

        l = Link._submit(meetup_article_title(meetup),
                         meetup_article_text(meetup), c.user,
                         Subreddit._by_name('discussion'), ip, [])

        l.meetup = meetup._id36
        l._commit()
        meetup.assoc_link = l._id
        meetup._commit()

        #update the queries
        if g.write_query_queue:
            queries.new_link(l)

        res._redirect(url_for(action='show', id=meetup._id36))
Exemple #25
0
def dfp_creative_to_link(creative, link=None):
    from r2.models import (
        Link,
        PROMOTE_STATUS,
    )

    user = get_dfp_user()
    sr = get_dfp_subreddit()
    attributes = dfp_template_to_dict(creative.creativeTemplateVariableValues)

    kind = "self" if attributes["selftext"] else "link"
    url = attributes["url"] if kind == "link" else "self"

    if not link:
        link = Link._submit(
            attributes["title"],
            url,
            user,
            sr,
            ip="127.0.0.1",
            sendreplies=False,
        )

    if kind == "self":
        link.url = link.make_permalink_slow()
        link.is_self = True
        link.selftext = attributes["selftext"]

    link.promoted = True
    link.promote_status = PROMOTE_STATUS.promoted
    link.thumbnail_url = attributes["thumbnail_url"]
    link.mobile_ad_url = attributes["mobile_ad_url"]
    link.third_party_tracking = attributes["third_party_tracking"]
    link.third_party_tracking_2 = attributes["third_party_tracking_2"]
    link.dfp_creative_id = creative["id"]

    link._commit()
    return link
def inject_test_data(num_links=25, num_comments=25, num_votes=5):
    """Flood your reddit install with test data based on reddit.com."""

    print ">>>> Ensuring configured objects exist"
    system_user = ensure_account(g.system_user)
    ensure_account(g.automoderator_account)
    ensure_subreddit(g.default_sr, system_user)
    ensure_subreddit(g.takedown_sr, system_user)
    ensure_subreddit(g.beta_sr, system_user)
    ensure_subreddit(g.promo_sr_name, system_user)

    print
    print

    print ">>>> Fetching real data from reddit.com"
    modeler = Modeler()
    subreddits = [
        modeler.model_subreddit("pics"),
        modeler.model_subreddit("worldnews"),
        modeler.model_subreddit("gaming"),
    ]
    extra_settings = {
        "worldnews": {
            "show_media": True,
        },
        "pics": {
            "show_media": True,
        },
    }

    print
    print

    print ">>>> Generating test data"
    print ">>> Accounts"
    account_query = Account._query(sort="_date", limit=500, data=True)
    accounts = [a for a in account_query if a.name != g.system_user]
    accounts.extend(
        ensure_account(modeler.generate_username())
        for i in xrange(50 - len(accounts)))

    print ">>> Content"
    things = []
    for sr_model in subreddits:
        sr_author = random.choice(accounts)
        sr = ensure_subreddit(sr_model.name, sr_author)

        # make the system user subscribed for easier testing
        if sr.add_subscriber(system_user):
            sr._incr("_ups", 1)

        # apply any custom config we need for this sr
        for setting, value in extra_settings.get(sr.name, {}).iteritems():
            setattr(sr, setting, value)
        sr._commit()

        for i in xrange(num_links):
            link_author = random.choice(accounts)
            url = sr_model.generate_link_url()
            is_self = (url == "self")
            content = sr_model.generate_selfpost_body() if is_self else url
            link = Link._submit(
                is_self=is_self,
                title=sr_model.generate_link_title(),
                content=content,
                author=link_author,
                sr=sr,
                ip="127.0.0.1",
            )
            queries.new_link(link)
            things.append(link)

            comments = [None]
            for i in xrange(fuzz_number(num_comments)):
                comment_author = random.choice(accounts)
                comment, inbox_rel = Comment._new(
                    comment_author,
                    link,
                    parent=random.choice(comments),
                    body=sr_model.generate_comment_body(),
                    ip="127.0.0.1",
                )
                queries.new_comment(comment, inbox_rel)
                comments.append(comment)
                things.append(comment)

    for thing in things:
        for i in xrange(fuzz_number(num_votes)):
            direction = random.choice([
                Vote.DIRECTIONS.up,
                Vote.DIRECTIONS.unvote,
                Vote.DIRECTIONS.down,
            ])
            voter = random.choice(accounts)

            cast_vote(voter, thing, direction)

    amqp.worker.join()

    srs = [Subreddit._by_name(n) for n in ("worldnews", "pics")]
    LocalizedDefaultSubreddits.set_global_srs(srs)
    LocalizedFeaturedSubreddits.set_global_srs(
        [Subreddit._by_name('worldnews')])
Exemple #27
0
def inject_test_data(num_links=25, num_comments=25, num_votes=5):
    """Flood your reddit install with test data based on reddit.com."""

    print ">>>> Ensuring configured objects exist"
    system_user = ensure_account(g.system_user)
    ensure_account(g.automoderator_account)
    ensure_subreddit(g.default_sr, system_user)
    ensure_subreddit(g.takedown_sr, system_user)

    print
    print

    print ">>>> Fetching real data from reddit.com"
    modeler = Modeler()
    subreddits = [
        modeler.model_subreddit("pics"),
        modeler.model_subreddit("videos"),
        modeler.model_subreddit("askhistorians"),
    ]
    extra_settings = {
        "pics": {
            "show_media": True,
        },
        "videos": {
            "show_media": True,
        },
    }

    print
    print

    print ">>>> Generating test data"
    print ">>> Accounts"
    account_query = Account._query(sort="_date", limit=500, data=True)
    accounts = [a for a in account_query if a.name != g.system_user]
    accounts.extend(
        ensure_account(modeler.generate_username())
        for i in xrange(50 - len(accounts)))

    print ">>> Content"
    things = []
    for sr_model in subreddits:
        sr_author = random.choice(accounts)
        sr = ensure_subreddit(sr_model.name, sr_author)

        # make the system user subscribed for easier testing
        if sr.add_subscriber(system_user):
            sr._incr("_ups", 1)

        # apply any custom config we need for this sr
        for setting, value in extra_settings.get(sr.name, {}).iteritems():
            setattr(sr, setting, value)
        sr._commit()

        for i in xrange(num_links):
            link_author = random.choice(accounts)

            link = Link._submit(
                title=sr_model.generate_link_title(),
                url=sr_model.generate_link_url(),
                author=link_author,
                sr=sr,
                ip="127.0.0.1",
            )
            if link.url == "self":
                link.url = link.make_permalink(sr)
                link.is_self = True
                link.selftext = sr_model.generate_selfpost_body()
                link._commit()
            queries.queue_vote(link_author, link, dir=True, ip="127.0.0.1")
            queries.new_link(link)
            things.append(link)

            comments = [None]
            for i in xrange(fuzz_number(num_comments)):
                comment_author = random.choice(accounts)
                comment, inbox_rel = Comment._new(
                    comment_author,
                    link,
                    parent=random.choice(comments),
                    body=sr_model.generate_comment_body(),
                    ip="127.0.0.1",
                )
                queries.queue_vote(comment_author,
                                   comment,
                                   dir=True,
                                   ip="127.0.0.1")
                queries.new_comment(comment, inbox_rel)
                comments.append(comment)
                things.append(comment)

    for thing in things:
        for i in xrange(fuzz_number(num_votes)):
            direction = random.choice([True, None, False])
            voter = random.choice(accounts)
            queries.queue_vote(voter, thing, dir=direction, ip="127.0.0.1")

    amqp.worker.join()
Exemple #28
0
def create_about_post():
    user = Account._by_name('Eliezer_Yudkowsky')
    sr = Subreddit._by_name('admin')
    link = Link._submit('About LessWrong', 'TBC', user, sr, '::1', [])
def inject_test_data(num_links=25, num_comments=25, num_votes=5):
    """Flood your reddit install with test data based on reddit.com."""

    print ">>>> Ensuring configured objects exist"
    system_user = ensure_account(g.system_user)
    ensure_account(g.automoderator_account)
    ensure_subreddit(g.default_sr, system_user)
    ensure_subreddit(g.takedown_sr, system_user)
    ensure_subreddit(g.beta_sr, system_user)
    ensure_subreddit(g.promo_sr_name, system_user)

    print
    print

    print ">>>> Fetching real data from reddit.com"
    modeler = Modeler()
    subreddits = [
        modeler.model_subreddit("pics"),
        modeler.model_subreddit("videos"),
        modeler.model_subreddit("askhistorians"),
    ]
    extra_settings = {
        "pics": {
            "show_media": True,
        },
        "videos": {
            "show_media": True,
        },
    }

    print
    print

    print ">>>> Generating test data"
    print ">>> Accounts"
    account_query = Account._query(sort="_date", limit=500, data=True)
    accounts = [a for a in account_query if a.name != g.system_user]
    accounts.extend(
        ensure_account(modeler.generate_username())
        for i in xrange(50 - len(accounts)))

    print ">>> Content"
    things = []
    for sr_model in subreddits:
        sr_author = random.choice(accounts)
        sr = ensure_subreddit(sr_model.name, sr_author)

        # make the system user subscribed for easier testing
        if sr.add_subscriber(system_user):
            sr._incr("_ups", 1)

        # apply any custom config we need for this sr
        for setting, value in extra_settings.get(sr.name, {}).iteritems():
            setattr(sr, setting, value)
        sr._commit()

        for i in xrange(num_links):
            link_author = random.choice(accounts)
            url = sr_model.generate_link_url()
            is_self = (url == "self")
            content = sr_model.generate_selfpost_body() if is_self else url
            link = Link._submit(
                is_self=is_self,
                title=sr_model.generate_link_title(),
                content=content,
                author=link_author,
                sr=sr,
                ip="127.0.0.1",
            )
            queries.new_link(link)
            things.append(link)

            comments = [None]
            for i in xrange(fuzz_number(num_comments)):
                comment_author = random.choice(accounts)
                comment, inbox_rel = Comment._new(
                    comment_author,
                    link,
                    parent=random.choice(comments),
                    body=sr_model.generate_comment_body(),
                    ip="127.0.0.1",
                )
                queries.new_comment(comment, inbox_rel)
                comments.append(comment)
                things.append(comment)

    for thing in things:
        for i in xrange(fuzz_number(num_votes)):
            direction = random.choice([
                Vote.DIRECTIONS.up,
                Vote.DIRECTIONS.unvote,
                Vote.DIRECTIONS.down,
            ])
            voter = random.choice(accounts)

            cast_vote(voter, thing, direction)

    amqp.worker.join()

    srs = [Subreddit._by_name(n) for n in ("pics", "videos", "askhistorians")]
    LocalizedDefaultSubreddits.set_global_srs(srs)
Exemple #30
0
def create_about_post():
    user = Account._by_name('Eliezer_Yudkowsky')
    sr = Subreddit._by_name('admin')
    link = Link._submit('About LessWrong', 'TBC', user, sr, '::1', [])