Ejemplo n.º 1
0
    def _handle_check_edits(payload):
        existing = Link._by_fullname(payload["link"], data=True)
        creative = creatives_service.get_creative(existing)

        link = utils.dfp_creative_to_link(
            creative, link=Link._by_fullname(payload["link"], data=True))

        link.dfp_checking_edits = False
        link._commit()
Ejemplo n.º 2
0
 def _handle_adzerk(msg):
     data = json.loads(msg.body)
     g.log.debug('data: %s' % data)
     action = data.get('action')
     if action == 'deactivate_link':
         link = Link._by_fullname(data['link'], data=True)
         _deactivate_link(link)
     elif action == 'deactivate_campaign':
         link = Link._by_fullname(data['link'], data=True)
         campaign = PromoCampaign._by_fullname(data['campaign'], data=True)
         _deactivate_campaign(link, campaign)
     elif action == 'update_adzerk':
         link = Link._by_fullname(data['link'], data=True)
         campaign = PromoCampaign._by_fullname(data['campaign'], data=True)
         _update_adzerk(link, campaign)
Ejemplo n.º 3
0
    def _handle_adzerk(msg):
        data = json.loads(msg.body)
        g.log.debug('data: %s' % data)

        action = data.get('action')

        if action == 'deactivate_orphaned_flight':
            _deactivate_orphaned_flight(data['flight'])
            return

        link = Link._by_fullname(data['link'], data=True)
        if data['campaign']:
            campaign = PromoCampaign._by_fullname(data['campaign'], data=True)
        else:
            campaign = None

        if action == 'update_adzerk':
            if 'triggered_by' in data and data['triggered_by'] is not None:
                triggered_by = Account._by_fullname(data['triggered_by'], data=True)
            else:
                triggered_by = None

            _update_adzerk(link, campaign, triggered_by)

        elif action == 'deactivate_overdelivered':
            _deactivate_overdelivered(link, campaign)
Ejemplo n.º 4
0
    def bid_history(cls, start_date, end_date=None, account_id=None):
        from r2.models import Link
        from r2.lib import promote
        start_date = to_date(start_date)
        end_date = to_date(end_date)
        q = cls.query()
        q = q.filter(and_(cls.date >= start_date, cls.date < end_date))
        q = list(q)

        links = Link._by_fullname([x.thing_name for x in q], data=True)

        d = start_date
        res = []
        while d < end_date:
            bid = 0
            refund = 0
            for i in q:
                if d == i.date:
                    l = links[i.thing_name]
                    if not promote.is_rejected(l) and not promote.is_unpaid(
                            l) and not l._deleted:
                        camp = l.campaigns[i.promo_idx]
                        bid += i.bid
                        refund += i.bid if camp[-1] <= 0 else 0
            res.append([d, bid, refund])
            d += datetime.timedelta(1)
        return res
Ejemplo n.º 5
0
def set_recent_clicks():
    c.recent_clicks = []
    if not c.user_is_loggedin:
        return

    click_cookie = read_user_cookie('recentclicks2')
    if click_cookie:
        if valid_click_cookie(click_cookie):
            names = [x for x in UniqueIterator(click_cookie.split(',')) if x]

            if len(names) > 5:
                names = names[:5]
                set_user_cookie('recentclicks2', ','.join(names))
            #eventually this will look at the user preference
            names = names[:5]

            try:
                c.recent_clicks = Link._by_fullname(names,
                                                    data=True,
                                                    return_dict=False)
            except NotFound:
                # clear their cookie because it's got bad links in it
                set_user_cookie('recentclicks2', '')
        else:
            #if the cookie wasn't valid, clear it
            set_user_cookie('recentclicks2', '')
Ejemplo n.º 6
0
    def thing_lookup(self, tuples):
        links = Link._by_fullname([t.link for t in tuples], data=True, return_dict=True, stale=self.stale)

        return [
            Storage({"thing": links[t.link], "_id": links[t.link]._id, "weight": t.weight, "campaign": t.campaign})
            for t in tuples
        ]
Ejemplo n.º 7
0
    def process_message(msgs, chan):
        """Update get_links(), the Links by Subreddit precomputed query.

        get_links() is a CachedResult which is stored in permacache. To
        update these objects we need to do a read-modify-write which requires
        obtaining a lock. Sharding these updates by subreddit allows us to run
        multiple consumers (but ideally just one per shard) to avoid lock
        contention.

        """

        from r2.lib.db.queries import add_queries, get_links

        link_names = {msg.body for msg in msgs}
        links = Link._by_fullname(link_names, return_dict=False)
        print 'Processing %r' % (links, )

        links_by_sr_id = defaultdict(list)
        for link in links:
            links_by_sr_id[link.sr_id].append(link)

        srs_by_id = Subreddit._byID(links_by_sr_id.keys(), stale=True)

        for sr_id, links in links_by_sr_id.iteritems():
            with g.stats.get_timer("link_vote_processor.subreddit_queries"):
                sr = srs_by_id[sr_id]
                add_queries(
                    queries=[get_links(sr, sort, "all") for sort in SORTS],
                    insert_items=links,
                )
Ejemplo n.º 8
0
    def process_message(msgs, chan):
        """Update get_domain_links(), the Links by domain precomputed query.

        get_domain_links() is a CachedResult which is stored in permacache. To
        update these objects we need to do a read-modify-write which requires
        obtaining a lock. Sharding these updates by domain allows us to run
        multiple consumers (but ideally just one per shard) to avoid lock
        contention.

        """

        from r2.lib.db.queries import add_queries, get_domain_links

        link_names = {msg.body for msg in msgs}
        links = Link._by_fullname(link_names, return_dict=False)
        print 'Processing %r' % (links, )

        links_by_domain = defaultdict(list)
        for link in links:
            parsed = UrlParser(link.url)

            # update the listings for all permutations of the link's domain
            for domain in parsed.domain_permutations():
                links_by_domain[domain].append(link)

        for d, links in links_by_domain.iteritems():
            with g.stats.get_timer("link_vote_processor.domain_queries"):
                add_queries(
                    queries=[
                        get_domain_links(d, sort, "all") for sort in SORTS
                    ],
                    insert_items=links,
                )
Ejemplo n.º 9
0
    def bid_history(cls, start_date, end_date = None, account_id = None):
        from r2.models import Link
        from r2.lib import promote
        start_date = to_date(start_date)
        end_date   = to_date(end_date)
        q = cls.query()
        q = q.filter(and_(cls.date >= start_date, cls.date < end_date))
        q = list(q)

        links = Link._by_fullname([x.thing_name for x in q], data=True)

        d = start_date
        res = []
        while d < end_date:
            bid = 0
            refund = 0
            for i in q:
                if d == i.date:
                    l = links[i.thing_name]
                    if (not promote.is_rejected(l) and 
                        not promote.is_unpaid(l) and 
                        not l._deleted and 
                        i.promo_idx in getattr(l, 'campaigns', {})):
                        
                        camp = l.campaigns[i.promo_idx]
                        bid += i.bid
                        refund += i.bid if camp[-1] <= 0 else 0
            res.append([d, bid, refund])
            d += datetime.timedelta(1)
        return res
Ejemplo n.º 10
0
def set_recent_clicks():
    c.recent_clicks = []
    if not c.user_is_loggedin:
        return

    click_cookie = read_user_cookie('recentclicks2')
    if click_cookie:
        if valid_click_cookie(click_cookie):
            names = [ x for x in UniqueIterator(click_cookie.split(',')) if x ]

            if len(names) > 5:
                names = names[:5]
                set_user_cookie('recentclicks2', ','.join(names))
            #eventually this will look at the user preference
            names = names[:5]

            try:
                c.recent_clicks = Link._by_fullname(names, data=True,
                                                    return_dict=False)
            except NotFound:
                # clear their cookie because it's got bad links in it
                set_user_cookie('recentclicks2', '')
        else:
            #if the cookie wasn't valid, clear it
            set_user_cookie('recentclicks2', '')
Ejemplo n.º 11
0
    def process_message(msgs, chan):
        """Update get_domain_links(), the Links by domain precomputed query.

        get_domain_links() is a CachedResult which is stored in permacache. To
        update these objects we need to do a read-modify-write which requires
        obtaining a lock. Sharding these updates by domain allows us to run
        multiple consumers (but ideally just one per shard) to avoid lock
        contention.

        """

        from r2.lib.db.queries import add_queries, get_domain_links

        link_names = {msg.body for msg in msgs}
        links = Link._by_fullname(link_names, return_dict=False)
        print 'Processing %r' % (links,)

        links_by_domain = defaultdict(list)
        for link in links:
            parsed = UrlParser(link.url)

            # update the listings for all permutations of the link's domain
            for domain in parsed.domain_permutations():
                links_by_domain[domain].append(link)

        for d, links in links_by_domain.iteritems():
            with g.stats.get_timer("link_vote_processor.domain_queries"):
                add_queries(
                    queries=[
                        get_domain_links(d, sort, "all") for sort in SORTS],
                    insert_items=links,
                )
Ejemplo n.º 12
0
    def process_message(msgs, chan):
        """Update get_links(), the Links by Subreddit precomputed query.

        get_links() is a CachedResult which is stored in permacache. To
        update these objects we need to do a read-modify-write which requires
        obtaining a lock. Sharding these updates by subreddit allows us to run
        multiple consumers (but ideally just one per shard) to avoid lock
        contention.

        """

        from r2.lib.db.queries import add_queries, get_links

        link_names = {msg.body for msg in msgs}
        links = Link._by_fullname(link_names, return_dict=False)
        print 'Processing %r' % (links,)

        links_by_sr_id = defaultdict(list)
        for link in links:
            links_by_sr_id[link.sr_id].append(link)

        srs_by_id = Subreddit._byID(links_by_sr_id.keys(), stale=True)

        for sr_id, links in links_by_sr_id.iteritems():
            with g.stats.get_timer("link_vote_processor.subreddit_queries"):
                sr = srs_by_id[sr_id]
                add_queries(
                    queries=[get_links(sr, sort, "all") for sort in SORTS],
                    insert_items=links,
                )
Ejemplo n.º 13
0
    def GET_report(self, start, end, link_text=None):
        now = datetime.now(g.tz).replace(hour=0,
                                         minute=0,
                                         second=0,
                                         microsecond=0)
        end = end or now - timedelta(days=1)
        start = start or end - timedelta(days=7)

        if link_text is not None:
            names = link_text.replace(',', ' ').split()
            try:
                links = Link._by_fullname(names, data=True)
            except NotFound:
                links = {}

            bad_links = [name for name in names if name not in links]
            links = links.values()
        else:
            links = []
            bad_links = []

        content = PromoteReport(links, link_text, bad_links, start, end)
        if c.render_style == 'csv':
            return content.as_csv()
        else:
            return PromotePage('report', content=content).render()
Ejemplo n.º 14
0
    def thing_lookup(self, tuples):
        links = Link._by_fullname([t.link for t in tuples], data=True,
                                  return_dict=True, stale=self.stale)

        return [Storage({'thing': links[t.link],
                         '_id': links[t.link]._id,
                         'weight': t.weight,
                         'campaign': t.campaign}) for t in tuples]
Ejemplo n.º 15
0
    def thing_lookup(self, tuples):
        links = Link._by_fullname([t.link for t in tuples], data=True,
                                  return_dict=True, stale=self.stale)

        return [Storage({'thing': links[t.link],
                         '_id': links[t.link]._id,
                         'weight': t.weight,
                         'campaign': t.campaign}) for t in tuples]
Ejemplo n.º 16
0
def send_account_summary_email(account_thing_id, verbose=False, send_email=send_email):
    account = Account._byID(account_thing_id, data=True)
    if not should_send_activity_summary_email(account):
        return

    # if we've never sent an email, only tell about the last 24 hours
    a_day_ago = datetime.datetime.now(pytz.utc) - datetime.timedelta(hours=24)
    if getattr(account, 'last_email_sent_at', None) is None:
        account.last_email_sent_at = a_day_ago

    c.content_langs = 'en-US'

    # Find all the "active" links for this user.  Frontpage uses the c.user global
    # to find the right subreddits for the current user
    c.user = account
    c.user_is_loggedin = True
    thing_ids = []
    for link in Frontpage.get_links('active', 'all'):
        thing_ids.append(link)
    active_links_hash = Link._by_fullname(thing_ids, data=True)

    active_links = [active_links_hash[t_id] for t_id in thing_ids if active_links_hash[t_id]._active > account.last_email_sent_at]
    idx = 0
    for ll in active_links:
        idx += 1
        ll.num = idx 

    # Find all new spaces created since we last sent the user an email
    new_spaces = list(fetch_things2(Subreddit._query(
        Subreddit.c._date > account.last_email_sent_at,
        sort=asc('_date'))))

    # don't bother sending email if there's noting to report.
    if len(new_spaces) == 0 and len(active_links) == 0:
        return

    # Get the date and time
    now = datetime.datetime.now(pytz.timezone('US/Eastern'))
    date_string = now.strftime("%A %B %d, %Y")
    time_string = now.strftime("%I:%M %p")

    # Render the template
    html_email_template = g.mako_lookup.get_template('summary_email.html')
    html_body = html_email_template.render(
        last_email_sent_at=account.last_email_sent_at,
        new_spaces=new_spaces, 
        active_links=active_links,
        date_string=date_string,
        time_string=time_string)

    # with open('out.html', 'w') as ff:
    #     ff.write(html_body)
    if verbose:
        print "sending email to %s" % (account.email,)
    send_email(account.email, html_body, date_string)

    account.last_email_sent_at = datetime.datetime.now(pytz.utc)
    account._commit()
Ejemplo n.º 17
0
def get_hot_items(srs, item_type, src):
    """Get hot links from specified srs."""
    hot_srs = {sr._id: sr for sr in srs}  # for looking up sr by id
    hot_link_fullnames = normalized_hot(sr._id for sr in srs)
    hot_links = Link._by_fullname(hot_link_fullnames, return_dict=False)
    hot_items = []
    for l in hot_links:
        hot_items.append(ExploreItem(item_type, src, hot_srs[l.sr_id], l))
    return hot_items
Ejemplo n.º 18
0
def get_hot_items(srs, item_type, src):
    """Get hot links from specified srs."""
    hot_srs = {sr._id: sr for sr in srs}  # for looking up sr by id
    hot_link_fullnames = normalized_hot([sr._id for sr in srs])
    hot_links = Link._by_fullname(hot_link_fullnames, return_dict=False)
    hot_items = []
    for l in hot_links:
        hot_items.append(ExploreItem(item_type, src, hot_srs[l.sr_id], l))
    return hot_items
Ejemplo n.º 19
0
def get_hot(sr):
    """Get the hottest links for a subreddit. If g.use_query_cache is
    True, it'll use the query cache, otherwise it'll use cached_query()
    from above."""
    q = sr.get_links('hot', 'all')
    if isinstance(q, Query):
        return cached_query(q, sr)
    else:
        return Link._by_fullname(list(q)[:150], return_dict = False)
Ejemplo n.º 20
0
 def _get_selfserve_links(self, count):
     links = Subreddit._by_name(g.advertising_links_sr).get_links(
         'new', 'all')
     items = Link._by_fullname(links, data=True, return_dict=False)
     id36s = map(
         lambda x: self.advertising_link_id36_re.match(x.url).group(1),
         items)
     ad_links = Link._byID36(id36s, return_dict=False, data=True)
     return wrap_links(ad_links, num=count)
Ejemplo n.º 21
0
def get_rising_items(omit_sr_ids, count=4):
    """Get links that are rising right now."""
    all_rising = rising.get_all_rising()
    candidate_sr_ids = {sr_id for link, score, sr_id in all_rising}.difference(omit_sr_ids)
    link_fullnames = [link for link, score, sr_id in all_rising if sr_id in candidate_sr_ids]
    link_fullnames_to_show = random_sample(link_fullnames, count)
    rising_links = Link._by_fullname(link_fullnames_to_show, return_dict=False, data=True)
    rising_items = [ExploreItem(TYPE_RISING, "ris", Subreddit._byID(l.sr_id), l) for l in rising_links]
    return rising_items
Ejemplo n.º 22
0
def _handle_upsert_campaign(payload):
    link = Link._by_fullname(payload["link"], data=True)
    campaign = PromoCampaign._by_fullname(payload["campaign"], data=True)
    owner = Account._byID(campaign.owner_id)

    lineitem = lineitems_service.upsert_lineitem(owner, campaign)
    creative = creatives_service.get_creative(link)

    lineitems_service.associate_with_creative(
        lineitem=lineitem, creative=creative)
Ejemplo n.º 23
0
    def process_message(msg):
        vote_data = json.loads(msg.body)
        hook = hooks.get_hook('vote.validate_vote_data')
        if hook.call_until_return(msg=msg, vote_data=vote_data) is False:
            # Corrupt records in the queue. Ignore them.
            print "Ignoring invalid vote by %s on %s %s" % (
                    vote_data.get('user_id', '<unknown>'),
                    vote_data.get('thing_fullname', '<unknown>'),
                    vote_data)
            return

        timer = g.stats.get_timer("link_vote_processor")
        timer.start()

        user = Account._byID(vote_data.pop("user_id"))
        link = Link._by_fullname(vote_data.pop("thing_fullname"))

        # create the vote and update the voter's liked/disliked under lock so
        # that the vote state and cached query are consistent
        lock_key = "vote-%s-%s" % (user._id36, link._fullname)
        with g.make_lock("voting", lock_key, timeout=5):
            print "Processing vote by %s on %s %s" % (user, link, vote_data)

            try:
                vote = Vote(
                    user,
                    link,
                    direction=vote_data["direction"],
                    date=datetime.utcfromtimestamp(vote_data["date"]),
                    data=vote_data["data"],
                    event_data=vote_data.get("event_data"),
                    # CUSTOM: voting model
                    vote_direction=vote_data["vote_direction"],
                )
            except TypeError as e:
                # a vote on an invalid type got in the queue, just skip it
                g.log.exception("Invalid type: %r", e.message)
                return

            vote.commit()
            timer.intermediate("create_vote_object")

            update_user_liked(vote)
            timer.intermediate("voter_likes")

        vote_valid = vote.is_automatic_initial_vote or vote.effects.affects_score
        link_valid = not (link._spam or link._deleted)
        if vote_valid and link_valid:
            add_to_author_query_q(link)
            add_to_subreddit_query_q(link)
            add_to_domain_query_q(link)

        timer.stop()
        timer.flush()
Ejemplo n.º 24
0
def get_rising_items(omit_sr_ids, count=4):
    """Get links that are rising right now."""
    all_rising = rising.get_all_rising()
    candidate_sr_ids = {sr_id for link, score, sr_id in all_rising}.difference(omit_sr_ids)
    link_fullnames = [link for link, score, sr_id in all_rising if sr_id in candidate_sr_ids]
    link_fullnames_to_show = random_sample(link_fullnames, count)
    rising_links = Link._by_fullname(link_fullnames_to_show,
                                     return_dict=False,
                                     data=True)
    rising_items = [ExploreItem(TYPE_RISING, 'ris', Subreddit._byID(l.sr_id), l)
                   for l in rising_links]
    return rising_items
Ejemplo n.º 25
0
    def process_message(msg):
        vote_data = json.loads(msg.body)
        hook = hooks.get_hook('vote.validate_vote_data')
        if hook.call_until_return(msg=msg, vote_data=vote_data) is False:
            # Corrupt records in the queue. Ignore them.
            print "Ignoring invalid vote by %s on %s %s" % (
                    vote_data.get('user_id', '<unknown>'),
                    vote_data.get('thing_fullname', '<unknown>'),
                    vote_data)
            return

        timer = g.stats.get_timer("link_vote_processor")
        timer.start()

        user = Account._byID(vote_data.pop("user_id"))
        link = Link._by_fullname(vote_data.pop("thing_fullname"))

        # create the vote and update the voter's liked/disliked under lock so
        # that the vote state and cached query are consistent
        lock_key = "vote-%s-%s" % (user._id36, link._fullname)
        with g.make_lock("voting", lock_key, timeout=5):
            print "Processing vote by %s on %s %s" % (user, link, vote_data)

            try:
                vote = Vote(
                    user,
                    link,
                    direction=vote_data["direction"],
                    date=datetime.utcfromtimestamp(vote_data["date"]),
                    data=vote_data["data"],
                    event_data=vote_data.get("event_data"),
                )
            except TypeError as e:
                # a vote on an invalid type got in the queue, just skip it
                g.log.exception("Invalid type: %r", e.message)
                return

            vote.commit()
            timer.intermediate("create_vote_object")

            update_user_liked(vote)
            timer.intermediate("voter_likes")

        vote_valid = vote.is_automatic_initial_vote or vote.effects.affects_score
        link_valid = not (link._spam or link._deleted)
        if vote_valid and link_valid:
            add_to_author_query_q(link)
            add_to_subreddit_query_q(link)
            add_to_domain_query_q(link)

        timer.stop()
        timer.flush()
Ejemplo n.º 26
0
 def _run(msgs, chan):
     items = [json.loads(msg.body) for msg in msgs]
     if QUEUE_ALL in items:
         # QUEUE_ALL is just an indicator to run make_daily_promotions.
         # There's no promotion log to update in this case.
         print "Received %s QUEUE_ALL message(s)" % items.count(QUEUE_ALL)
         items = [i for i in items if i != QUEUE_ALL]
     make_daily_promotions()
     links = Link._by_fullname([i["link"] for i in items])
     for item in items:
         PromotionLog.add(links[item['link']],
                          "Finished remaking current promotions (this link "
                          "was: %(message)s" % item)
Ejemplo n.º 27
0
 def _run(msgs, chan):
     items = [json.loads(msg.body) for msg in msgs]
     if QUEUE_ALL in items:
         # QUEUE_ALL is just an indicator to run make_daily_promotions.
         # There's no promotion log to update in this case.
         print "Received %s QUEUE_ALL message(s)" % items.count(QUEUE_ALL)
         items = [i for i in items if i != QUEUE_ALL]
     make_daily_promotions()
     links = Link._by_fullname([i["link"] for i in items])
     for item in items:
         PromotionLog.add(links[item['link']],
                          "Finished remaking current promotions (this link "
                          "was: %(message)s" % item)
Ejemplo n.º 28
0
def get_comment_items(srs, src, count=4):
    """Get hot links from srs, plus top comment from each link."""
    link_fullnames = normalized_hot([sr._id for sr in srs])
    hot_links = Link._by_fullname(link_fullnames[:count], return_dict=False)
    top_comments = []
    for link in hot_links:
        builder = CommentBuilder(
            link, operators.desc("_confidence"), comment=None, context=None, num=1, load_more=False
        )
        listing = NestedListing(builder, parent_name=link._fullname).listing()
        top_comments.extend(listing.things)
    srs = Subreddit._byID([com.sr_id for com in top_comments])
    links = Link._byID([com.link_id for com in top_comments])
    comment_items = [ExploreItem(TYPE_COMMENT, src, srs[com.sr_id], links[com.link_id], com) for com in top_comments]
    return comment_items
    def _handle_adzerk(msg):
        data = json.loads(msg.body)
        g.log.debug('data: %s' % data)

        action = data.get('action')
        link = Link._by_fullname(data['link'], data=True)
        if data['campaign']:
            campaign = PromoCampaign._by_fullname(data['campaign'], data=True)
        else:
            campaign = None

        if action == 'update_adzerk':
            _update_adzerk(link, campaign)
        elif action == 'deactivate_overdelivered':
            _deactivate_overdelivered(link, campaign)
Ejemplo n.º 30
0
    def _handle_adzerk(msg):
        data = json.loads(msg.body)
        g.log.debug('data: %s' % data)

        action = data.get('action')
        link = Link._by_fullname(data['link'], data=True)
        if data['campaign']:
            campaign = PromoCampaign._by_fullname(data['campaign'], data=True)
        else:
            campaign = None

        if action == 'update_adzerk':
            _update_adzerk(link, campaign)
        elif action == 'deactivate_overdelivered':
            _deactivate_overdelivered(link, campaign)
Ejemplo n.º 31
0
    def _handle_upsert_campaign(payload):
        link = Link._by_fullname(payload["link"], data=True)
        campaign = PromoCampaign._by_fullname(payload["campaign"], data=True)
        owner = Account._byID(campaign.owner_id)
        author = Account._byID(link.author_id)

        try:
            lineitem = lineitems_service.upsert_lineitem(owner, campaign)
        except ValueError as e:
            g.log.error("unable to upsert lineitem: %s" % e)
            return

        creative = creatives_service.upsert_creative(author, link)

        lineitems_service.associate_with_creative(
            lineitem=lineitem, creative=creative)
Ejemplo n.º 32
0
def get_scheduled(date, sr_name=""):
    campaign_ids = PromotionWeights.get_campaign_ids(date, sr_names=[sr_name])
    campaigns = PromoCampaign._byID(campaign_ids, return_dict=False, data=True)
    links = Link._by_fullname({camp.link_id for camp in campaigns}, return_dict=False, data=True)
    links = {l._id: l for l in links}
    kept = []
    for camp in campaigns:
        if camp.trans_id == 0:
            continue

        link = links[camp.link_id]
        if link._spam or not promote.is_accepted(link):
            continue

        kept.append(camp._id)

    return [(camp._fullname, camp.link_id, camp.bid) for camp in kept]
Ejemplo n.º 33
0
def get_scheduled(date, sr_name=''):
    campaign_ids = PromotionWeights.get_campaign_ids(date, sr_names=[sr_name])
    campaigns = PromoCampaign._byID(campaign_ids, return_dict=False, data=True)
    links = Link._by_fullname({camp.link_id for camp in campaigns},
                              return_dict=False, data=True)
    links = {l._id: l for l in links}
    kept = []
    for camp in campaigns:
        if camp.trans_id == 0:
            continue

        link = links[camp.link_id]
        if link._spam or not promote.is_accepted(link):
            continue

        kept.append(camp._id)

    return [(camp._fullname, camp.link_id, camp.bid) for camp in kept]
Ejemplo n.º 34
0
def promote_v2():
    # alter table bids add column campaign integer;
    # update bids set campaign = 0; 
    from r2.models import Link, NotFound, PromoteDates, Bid
    from datetime import datetime
    from pylons import g
    for p in PromoteDates.query():
        try:
            l = Link._by_fullname(p.thing_name,
                                  data = True, return_dict = False)
            if not l:
                raise NotFound, p.thing_name

            # update the promote status
            l.promoted = True
            l.promote_status = getattr(l, "promote_status", STATUS.unseen)
            l._date = datetime(*(list(p.start_date.timetuple()[:7]) + [g.tz]))
            set_status(l, l.promote_status)

            # add new campaign
            print (l, (p.start_date, p.end_date), p.bid, None)
            if not p.bid:
                print "no bid? ", l
                p.bid = 20
            new_campaign(l, (p.start_date, p.end_date), p.bid, None)
            print "updated: %s (%s)" % (l, l._date)

        except NotFound:
            print "NotFound: %s" % p.thing_name

    print "updating campaigns"
    for b in Bid.query():
        l = Link._byID(int(b.thing_id))
        print "updating: ", l
        campaigns = getattr(l, "campaigns", {}).copy()
        indx = b.campaign
        if indx in campaigns:
            sd, ed, bid, sr, trans_id = campaigns[indx]
            campaigns[indx] = sd, ed, bid, sr, b.transaction
            l.campaigns = campaigns
            l._commit()
        else:
            print "no campaign information: ", l
Ejemplo n.º 35
0
def promote_v2():
    # alter table bids add column campaign integer;
    # update bids set campaign = 0; 
    from r2.models import Link, NotFound, PromoteDates, Bid
    from datetime import datetime
    from pylons import g
    for p in PromoteDates.query():
        try:
            l = Link._by_fullname(p.thing_name,
                                  data = True, return_dict = False)
            if not l:
                raise NotFound, p.thing_name

            # update the promote status
            l.promoted = True
            l.promote_status = getattr(l, "promote_status", STATUS.unseen)
            l._date = datetime(*(list(p.start_date.timetuple()[:7]) + [g.tz]))
            set_status(l, l.promote_status)

            # add new campaign
            print (l, (p.start_date, p.end_date), p.bid, None)
            if not p.bid:
                print "no bid? ", l
                p.bid = 20
            new_campaign(l, (p.start_date, p.end_date), p.bid, None)
            print "updated: %s (%s)" % (l, l._date)

        except NotFound:
            print "NotFound: %s" % p.thing_name

    print "updating campaigns"
    for b in Bid.query():
        l = Link._byID(int(b.thing_id))
        print "updating: ", l
        campaigns = getattr(l, "campaigns", {}).copy()
        indx = b.campaign
        if indx in campaigns:
            sd, ed, bid, sr, trans_id = campaigns[indx]
            campaigns[indx] = sd, ed, bid, sr, b.transaction
            l.campaigns = campaigns
            l._commit()
        else:
            print "no campaign information: ", l
Ejemplo n.º 36
0
def get_scheduled(date, sr_name=""):
    all_promotions = PromotionWeights.get_campaigns(date)
    fp_promotions = [p for p in all_promotions if p.sr_name == sr_name]
    campaigns = PromoCampaign._byID([i.promo_idx for i in fp_promotions], return_dict=False, data=True)
    links = Link._by_fullname([i.thing_name for i in fp_promotions], return_dict=False, data=True)
    links = {l._id: l for l in links}
    kept = []
    for camp in campaigns:
        if camp.trans_id == 0:
            continue

        link = links[camp.link_id]
        if link._spam or not promote.is_accepted(link):
            continue

        kept.append(camp._id)

    return [
        ("%s_%s" % (PC_PREFIX, to36(p.promo_idx)), p.thing_name, p.bid) for p in fp_promotions if p.promo_idx in kept
    ]
Ejemplo n.º 37
0
def get_comment_items(srs, src, count=4):
    """Get hot links from srs, plus top comment from each link."""
    link_fullnames = normalized_hot([sr._id for sr in srs])
    hot_links = Link._by_fullname(link_fullnames[:count], return_dict=False)
    top_comments = []
    for link in hot_links:
        builder = CommentBuilder(link,
                                 operators.desc('_confidence'),
                                 comment=None,
                                 context=None,
                                 load_more=False)
        listing = NestedListing(builder, num=1,
                                parent_name=link._fullname).listing()
        top_comments.extend(listing.things)
    srs = Subreddit._byID([com.sr_id for com in top_comments])
    links = Link._byID([com.link_id for com in top_comments])
    comment_items = [
        ExploreItem(TYPE_COMMENT, src, srs[com.sr_id], links[com.link_id], com)
        for com in top_comments
    ]
    return comment_items
Ejemplo n.º 38
0
def accepted_campaigns(offset=0):
    now = promo_datetime_now(offset=offset)
    promo_weights = PromotionWeights.get_campaigns(now)
    all_links = Link._by_fullname(set(x.thing_name for x in promo_weights), data=True, return_dict=True)
    accepted_links = {}
    for link_fullname, link in all_links.iteritems():
        if is_accepted(link):
            accepted_links[link._id] = link

    accepted_link_ids = accepted_links.keys()
    campaign_query = PromoCampaign._query(PromoCampaign.c.link_id == accepted_link_ids, data=True)
    campaigns = dict((camp._id, camp) for camp in campaign_query)
    for pw in promo_weights:
        campaign = campaigns.get(pw.promo_idx)
        if not campaign or (not campaign.trans_id and campaign.priority.cpm):
            continue
        link = accepted_links.get(campaign.link_id)
        if not link:
            continue

        yield (link, campaign, pw.weight)
Ejemplo n.º 39
0
    def bid_history(cls, start_date, end_date = None, account_id = None):
        from r2.lib import promote
        from r2.models import PromoCampaign
        
        if not end_date:
            end_date = datetime.datetime.now(g.tz)
        
        start_date = to_date(start_date)
        end_date   = to_date(end_date)
        q = cls.query()
        q = q.filter(and_(cls.date >= start_date, cls.date < end_date))
        q = list(q)

        links = Link._by_fullname([x.thing_name for x in q], data=True)

        d = start_date
        res = []
        while d < end_date:
            bid = 0
            refund = 0
            for i in q:
                if d == i.date:
                    l = links[i.thing_name]
                    if (not promote.is_rejected(l) and 
                        not promote.is_unpaid(l) and 
                        not l._deleted):

                        try:
                            camp = PromoCampaign._byID(i.promo_idx, data=True)
                            bid += i.bid
                            refund += i.bid if camp.is_freebie() else 0
                        except NotFound:
                            g.log.error("Skipping missing PromoCampaign in "
                                        "bidding.bid_history, campaign id: %d" 
                                        % i.promo_idx)
            res.append([d, bid, refund])
            d += datetime.timedelta(1)
        return res
Ejemplo n.º 40
0
def get_scheduled(date, sr_name=''):
    all_promotions = PromotionWeights.get_campaigns(date)
    fp_promotions = [p for p in all_promotions if p.sr_name == sr_name]
    campaigns = PromoCampaign._byID([i.promo_idx for i in fp_promotions],
                                    return_dict=False,
                                    data=True)
    links = Link._by_fullname([i.thing_name for i in fp_promotions],
                              return_dict=False,
                              data=True)
    links = {l._id: l for l in links}
    kept = []
    for camp in campaigns:
        if camp.trans_id == 0:
            continue

        link = links[camp.link_id]
        if link._spam or not promote.is_accepted(link):
            continue

        kept.append(camp._id)

    return [('%s_%s' % (PC_PREFIX, to36(p.promo_idx)), p.thing_name, p.bid)
            for p in fp_promotions if p.promo_idx in kept]
Ejemplo n.º 41
0
def accepted_campaigns(offset=0):
    now = promo_datetime_now(offset=offset)
    promo_weights = PromotionWeights.get_campaigns(now)
    all_links = Link._by_fullname(set(x.thing_name for x in promo_weights),
                                  data=True, return_dict=True)
    accepted_links = {}
    for link_fullname, link in all_links.iteritems():
        if is_accepted(link):
            accepted_links[link._id] = link

    accepted_link_ids = accepted_links.keys()
    campaign_query = PromoCampaign._query(PromoCampaign.c.link_id == accepted_link_ids,
                                          data=True)
    campaigns = dict((camp._id, camp) for camp in campaign_query)
    for pw in promo_weights:
        campaign = campaigns.get(pw.promo_idx)
        if not campaign or not campaign.trans_id:
            continue
        link = accepted_links.get(campaign.link_id)
        if not link:
            continue

        yield (link, campaign, pw.weight)
Ejemplo n.º 42
0
    def GET_report(self, start, end, link_text=None):
        now = datetime.now(g.tz).replace(hour=0, minute=0, second=0,
                                         microsecond=0)
        end = end or now - timedelta(days=1)
        start = start or end - timedelta(days=7)

        if link_text is not None:
            names = link_text.replace(',', ' ').split()
            try:
                links = Link._by_fullname(names, data=True)
            except NotFound:
                links = {}

            bad_links = [name for name in names if name not in links]
            links = links.values()
        else:
            links = []
            bad_links = []

        content = PromoteReport(links, link_text, bad_links, start, end)
        if c.render_style == 'csv':
            return content.as_csv()
        else:
            return PromotePage('report', content=content).render()
Ejemplo n.º 43
0
    def _handle_upsert_promotion(payload):
        link = Link._by_fullname(payload["link"], data=True)
        author = Account._byID(link.author_id)

        creatives_service.upsert_creative(author, link)
Ejemplo n.º 44
0
def adzerk_request(
    keywords, properties, user_id,
    num_placements=1,
    timeout=1.5,
    platform="desktop",
    is_refresh=False,
    referrer=None,
):
    placements = []
    divs = ["div%s" % i for i in xrange(num_placements)]
    subreddit = None

    if isinstance(c.site, Subreddit) and not c.default_sr:
        subreddit = c.site.name

    for div in divs:
        placement = {
          "divName": div,
          "networkId": g.az_selfserve_network_id,
          "siteId": g.az_selfserve_site_ids[platform],
          "adTypes": [LEADERBOARD_AD_TYPE],
          "eventIds": [EVENT_TYPE_UPVOTE, EVENT_TYPE_DOWNVOTE],
          "properties": properties,
        }

        if subreddit is not None:
            placement["properties"] = {
                "subreddit": subreddit,
            }

        placements.append(placement)

    keywords = [word.lower() for word in keywords]
    data = {
        "placements": placements,
        "keywords": keywords,
        "ip": request.ip,
        "enableBotFiltering": True,
        "includePricingData": True,
    }

    page_url = request.headers.get("referer", None)

    if page_url is not None:
        data["url"] = page_url

    if referrer is not None:
        data["referrer"] = referrer

    if user_id:
        data["user"] = {"key": user_id}

    url = 'https://%s/api/v2' % g.adzerk_engine_domain
    headers = {
        'content-type': 'application/json',
        'user-agent': request.headers.get('User-Agent'),
    }

    do_not_track = request.headers.get("DNT", None)

    if do_not_track and feature.is_enabled("adzerk_do_not_track"):
        headers["DNT"] = do_not_track

    timer = g.stats.get_timer("providers.adzerk")
    timer.start()

    for placement in placements:
        g.ad_events.ad_request(
            keywords=keywords,
            platform=platform,
            placement_name=placement["divName"],
            placement_types=placement["adTypes"],
            is_refresh=is_refresh,
            subreddit=c.site,
            request=request,
            context=c,
        )

    try:
        r = requests.post(url, data=json.dumps(data), headers=headers,
                          timeout=timeout)
    except (requests.exceptions.Timeout, requests.exceptions.SSLError):
        g.stats.simple_event('adzerk.request.timeout')
        return None
    except requests.exceptions.ConnectionError:
        g.stats.simple_event('adzerk.request.refused')
        return None
    except select.error:
        return None
    finally:
        timer.stop()

    errored = False

    try:
        response = adzerk_api.handle_response(r)
    except adzerk_api.AdzerkError:
        g.stats.simple_event('adzerk.request.badresponse')
        g.log.error('adzerk_request: bad response (%s) %r', r.status_code,
                    r.content)
        errored = True
    finally:
        # Temporarily log request data and response body,
        # sample at 1%
        if random.random() < g.live_config.get('ad_log_sample_rate', 0):
            g.log.info("ad_request [DNT=%s]: %s, ad_response: [%s] %s",
                do_not_track, json.dumps(data), r.status_code, r.text)

        if errored:
            return None



    decisions = response['decisions']

    if not decisions:
        return None

    placements_by_div = {placement["divName"]: placement
        for placement in placements}

    res = []
    for div in divs:
        decision = decisions[div]
        if not decision:
            continue

        placement = placements_by_div[div]
        ad_id = decision['adId']
        pricing = decision.get("pricing", {})
        revenue = pricing.get("revenue")
        rate_type_id = pricing.get("rateType")
        rate_type = RATE_TYPE_NAMES.get(rate_type_id, None)
        impression_url = decision.get("impressionUrl")
        impression_b64_data = UrlParser(impression_url).query_dict.get("e", "")
        impression_id, matched_keywords = None, []

        try:
            # fix padding and string encode
            impression_b64_data = str(
                impression_b64_data +
                ("=" * (len(impression_b64_data) % 4))
            )
            impression_data = json.loads(
                base64.urlsafe_b64decode(impression_b64_data),
                strict=False,
            )
        except UnicodeDecodeError:
            g.log.info("unable to decode impression data: %s", impression_b64_data)
            impression_data = None
        except TypeError, ValueError:
            impression_data = None

        if impression_data is not None:
            impression_id = impression_data.get("di")
            matched_keywords = impression_data.get("mk")

        if matched_keywords:
            matched_keywords = matched_keywords.split(",")


        # adserver ads are not reddit links, we return the body
        if decision['campaignId'] in g.adserver_campaign_ids:
            g.ad_events.ad_response(
                keywords=keywords,
                platform=platform,
                placement_name=div,
                placement_types=placement["adTypes"],
                ad_id=ad_id,
                impression_id=impression_id,
                matched_keywords=matched_keywords,
                rate_type=rate_type,
                clearing_price=revenue,
                subreddit=c.site,
                request=request,
                context=c,
            )

            return AdserverResponse(decision['contents'][0]['body'])

        adzerk_flight_id = decision['flightId']
        imp_pixel = decision['impressionUrl']
        click_url = decision['clickUrl']
        events_by_id = {event["id"]: event["url"] for event in decision["events"]}
        upvote_pixel = events_by_id[EVENT_TYPE_UPVOTE]
        downvote_pixel = events_by_id[EVENT_TYPE_DOWNVOTE]

        campaign_fullname = PromoCampaignByFlightIdCache.get(adzerk_flight_id)
        contents = decision['contents'][0]
        body = json.loads(contents['body'])
        link_fullname = body['link']
        target = body['target']
        priority = None
        priority_id = body.get('priorityId', None)
        ecpm = body.get('ecpm', None)
        moat_query = body.get('moatQuery', None)

        if priority_id is not None:
            try:
                priority_id = int(priority_id)
            except ValueError:
                pass

            for k, v in g.az_selfserve_priorities.iteritems():
                if priority_id == v:
                    priority = k

        g.ad_events.ad_response(
            keywords=keywords,
            platform=platform,
            placement_name=div,
            placement_types=placement["adTypes"],
            ad_id=ad_id,
            impression_id=impression_id,
            matched_keywords=matched_keywords,
            rate_type=rate_type,
            clearing_price=revenue,
            subreddit=c.site,
            link_fullname=link_fullname,
            campaign_fullname=campaign_fullname,
            priority=priority,
            ecpm=ecpm,
            request=request,
            context=c,
        )

        if not campaign_fullname:
            link = Link._by_fullname(link_fullname, data=True, stale=True)

            if promote.is_external(link):
                campaign_fullname = promote.EXTERNAL_CAMPAIGN
            else:
                adzerk_campaign_id = decision['campaignId']

                g.stats.simple_event('adzerk.request.orphaned_flight')
                g.log.error('adzerk_request: couldn\'t find campaign for flight (az campaign: %s, flight: %s)',
                    adzerk_campaign_id, adzerk_flight_id)

                # deactivate the flight, it will be reactivated if a
                # valid campaign actually exists
                deactivate_orphaned_flight(adzerk_flight_id)
                continue

        res.append(AdzerkResponse(
            link=link_fullname,
            campaign=campaign_fullname,
            target=target,
            ecpm=ecpm,
            priority=priority,
            moat_query=moat_query,
            imp_pixel=imp_pixel,
            click_url=click_url,
            upvote_pixel=upvote_pixel,
            downvote_pixel=downvote_pixel,
        ))
Ejemplo n.º 45
0
def make_daily_promotions(offset=0, test=False):
    """
    Arguments:
      offset - number of days after today to get the schedule for
      test - if True, new schedule will be generated but not launched
    Raises Exception with list of campaigns that had errors if there were any
    """
    by_srname, links, error_campaigns = get_scheduled(offset)
    all_links = set([l._fullname for l in links])
    srs = Subreddit._by_name(by_srname.keys())

    # over18 check
    for srname, adweights in by_srname.iteritems():
        if srname:
            sr = srs[srname]
            if sr.over_18:
                sr_links = Link._by_fullname([a.link for a in adweights],
                                             return_dict=False)
                for l in sr_links:
                    l.over_18 = True
                    if not test:
                        l._commit()

    old_ads = get_live_promotions([LiveAdWeights.ALL_ADS])
    old_links = set(x.link for x in old_ads[LiveAdWeights.ALL_ADS])

    # links that need to be promoted
    new_links = all_links - old_links
    # links that have already been promoted
    old_links = old_links - all_links

    links = Link._by_fullname(new_links.union(old_links),
                              data=True,
                              return_dict=True)

    for l in old_links:
        if is_promoted(links[l]):
            if test:
                print "unpromote", l
            else:
                # update the query queue
                set_promote_status(links[l], PROMOTE_STATUS.finished)
                emailer.finished_promo(links[l])

    for l in new_links:
        if is_accepted(links[l]):
            if test:
                print "promote2", l
            else:
                # update the query queue
                set_promote_status(links[l], PROMOTE_STATUS.promoted)
                emailer.live_promo(links[l])

    # convert the weighted dict to use sr_ids which are more useful
    by_srid = {
        srs[srname]._id: adweights
        for srname, adweights in by_srname.iteritems() if srname != ''
    }
    if '' in by_srname:
        by_srid[''] = by_srname['']

    if not test:
        set_live_promotions(by_srid)
        _mark_promos_updated()
    else:
        print by_srid

    # after launching as many campaigns as possible, raise an exception to
    #   report any error campaigns. (useful for triggering alerts in irc)
    if error_campaigns:
        raise Exception("Some scheduled campaigns could not be added to daily "
                        "promotions: %r" % error_campaigns)
Ejemplo n.º 46
0
def _handle_upsert_promotion(payload):
    link = Link._by_fullname(payload["link"], data=True)
    author = Account._byID(link.author_id)

    creatives_service.upsert_creative(author, link)
Ejemplo n.º 47
0
def make_daily_promotions(offset=0, test=False):
    """
    Arguments:
      offset - number of days after today to get the schedule for
      test - if True, new schedule will be generated but not launched
    Raises Exception with list of campaigns that had errors if there were any
    """

    scheduled_adweights, error_campaigns = get_scheduled(offset)
    current_adweights_byid = get_live_promotions([LiveAdWeights.ALL_ADS])
    current_adweights = current_adweights_byid[LiveAdWeights.ALL_ADS]

    link_names = [aw.link for aw in itertools.chain(scheduled_adweights,
                                                    current_adweights)]
    links = Link._by_fullname(link_names, data=True)

    camp_names = [aw.campaign for aw in itertools.chain(scheduled_adweights,
                                                        current_adweights)]
    campaigns = PromoCampaign._by_fullname(camp_names, data=True)
    srs = Subreddit._by_name([camp.sr_name for camp in campaigns.itervalues()
                              if camp.sr_name])

    expired_links = ({aw.link for aw in current_adweights} -
                     {aw.link for aw in scheduled_adweights})
    for link_name in expired_links:
        link = links[link_name]
        if is_promoted(link):
            if test:
                print "unpromote", link_name
            else:
                # update the query queue
                set_promote_status(link, PROMOTE_STATUS.finished)
                emailer.finished_promo(link)

    by_srid = defaultdict(list)
    for adweight in scheduled_adweights:
        link = links[adweight.link]
        campaign = campaigns[adweight.campaign]
        if campaign.sr_name:
            sr = srs[campaign.sr_name]
            sr_id = sr._id
            sr_over_18 = sr.over_18
        else:
            sr_id = ''
            sr_over_18 = False

        if sr_over_18:
            if test:
                print "over18", link._fullname
            else:
                link.over_18 = True
                link._commit()

        if is_accepted(link) and not is_promoted(link):
            if test:
                print "promote2", link._fullname
            else:
                # update the query queue
                set_promote_status(link, PROMOTE_STATUS.promoted)
                emailer.live_promo(link)

        by_srid[sr_id].append(adweight)

    if not test:
        set_live_promotions(by_srid)
        _mark_promos_updated()
    else:
        print by_srid

    finalize_completed_campaigns(daysago=offset+1)
    hooks.get_hook('promote.make_daily_promotions').call(offset=offset)

    # after launching as many campaigns as possible, raise an exception to 
    #   report any error campaigns. (useful for triggering alerts in irc)
    if error_campaigns:
        raise Exception("Some scheduled campaigns could not be added to daily "
                        "promotions: %r" % error_campaigns)
Ejemplo n.º 48
0
 def _get_selfserve_links(self, count):
     links = Subreddit._by_name(g.advertising_links_sr).get_links('new', 'all')
     items = Link._by_fullname(links, data=True, return_dict=False)
     id36s = map(lambda x: self.advertising_link_id36_re.match(x.url).group(1), items)
     ad_links = Link._byID36(id36s, return_dict=False, data=True)
     return wrap_links(ad_links, num=count)
Ejemplo n.º 49
0
def make_daily_promotions(offset=0, test=False):
    """
    Arguments:
      offset - number of days after today to get the schedule for
      test - if True, new schedule will be generated but not launched
    Raises Exception with list of campaigns that had errors if there were any
    """
    by_srname, links, error_campaigns = get_scheduled(offset)
    all_links = set([l._fullname for l in links])
    srs = Subreddit._by_name(by_srname.keys())

    # over18 check
    for srname, adweights in by_srname.iteritems():
        if srname:
            sr = srs[srname]
            if sr.over_18:
                sr_links = Link._by_fullname([a.link for a in adweights],
                                             return_dict=False)
                for l in sr_links:
                    l.over_18 = True
                    if not test:
                        l._commit()

    old_ads = get_live_promotions([LiveAdWeights.ALL_ADS])
    old_links = set(x.link for x in old_ads[LiveAdWeights.ALL_ADS])

    # links that need to be promoted
    new_links = all_links - old_links
    # links that have already been promoted
    old_links = old_links - all_links

    links = Link._by_fullname(new_links.union(old_links), data=True,
                              return_dict=True)

    for l in old_links:
        if is_promoted(links[l]):
            if test:
                print "unpromote", l
            else:
                # update the query queue
                set_promote_status(links[l], PROMOTE_STATUS.finished)
                emailer.finished_promo(links[l])

    for l in new_links:
        if is_accepted(links[l]):
            if test:
                print "promote2", l
            else:
                # update the query queue
                set_promote_status(links[l], PROMOTE_STATUS.promoted)
                emailer.live_promo(links[l])

    # convert the weighted dict to use sr_ids which are more useful
    by_srid = {srs[srname]._id: adweights for srname, adweights
                                          in by_srname.iteritems()
                                          if srname != ''}
    if '' in by_srname:
        by_srid[''] = by_srname['']

    if not test:
        set_live_promotions(by_srid)
        _mark_promos_updated()
    else:
        print by_srid

    # after launching as many campaigns as possible, raise an exception to 
    #   report any error campaigns. (useful for triggering alerts in irc)
    if error_campaigns:
        raise Exception("Some scheduled campaigns could not be added to daily "
                        "promotions: %r" % error_campaigns)
Ejemplo n.º 50
0
def make_daily_promotions(offset=0, test=False):
    """
    Arguments:
      offset - number of days after today to get the schedule for
      test - if True, new schedule will be generated but not launched
    Raises Exception with list of campaigns that had errors if there were any
    """

    scheduled_adweights, error_campaigns = get_scheduled(offset)
    current_adweights_byid = get_live_promotions([LiveAdWeights.ALL_ADS])
    current_adweights = current_adweights_byid[LiveAdWeights.ALL_ADS]

    link_names = [aw.link for aw in itertools.chain(scheduled_adweights,
                                                    current_adweights)]
    links = Link._by_fullname(link_names, data=True)

    camp_names = [aw.campaign for aw in itertools.chain(scheduled_adweights,
                                                        current_adweights)]
    campaigns = PromoCampaign._by_fullname(camp_names, data=True)
    srs = Subreddit._by_name([camp.sr_name for camp in campaigns.itervalues()
                              if camp.sr_name])

    expired_links = ({aw.link for aw in current_adweights} -
                     {aw.link for aw in scheduled_adweights})
    for link_name in expired_links:
        link = links[link_name]
        if is_promoted(link):
            if test:
                print "unpromote", link_name
            else:
                # update the query queue
                set_promote_status(link, PROMOTE_STATUS.finished)
                emailer.finished_promo(link)

    by_srid = defaultdict(list)
    for adweight in scheduled_adweights:
        link = links[adweight.link]
        campaign = campaigns[adweight.campaign]
        if campaign.sr_name:
            sr = srs[campaign.sr_name]
            sr_id = sr._id
            sr_over_18 = sr.over_18
        else:
            sr_id = ''
            sr_over_18 = False

        if sr_over_18:
            if test:
                print "over18", link._fullname
            else:
                link.over_18 = True
                link._commit()

        if is_accepted(link) and not is_promoted(link):
            if test:
                print "promote2", link._fullname
            else:
                # update the query queue
                set_promote_status(link, PROMOTE_STATUS.promoted)
                emailer.live_promo(link)

        by_srid[sr_id].append(adweight)

    if not test:
        set_live_promotions(by_srid)
        _mark_promos_updated()
    else:
        print by_srid

    finalize_completed_campaigns(daysago=offset+1)
    hooks.get_hook('promote.make_daily_promotions').call(offset=offset)

    # after launching as many campaigns as possible, raise an exception to 
    #   report any error campaigns. (useful for triggering alerts in irc)
    if error_campaigns:
        raise Exception("Some scheduled campaigns could not be added to daily "
                        "promotions: %r" % error_campaigns)