Ejemplo n.º 1
0
def new_comment(comment, inbox_rels):
    author = Account._byID(comment.author_id)
    job = [get_comments(author, "new", "all")]
    if comment._deleted:
        job.append(get_all_comments())
        add_queries(job, delete_items=comment)
    else:
        # if comment._spam:
        #    sr = Subreddit._byID(comment.sr_id)
        #    job.append(get_spam_comments(sr))
        add_queries(job, insert_items=comment)
        amqp.add_item("new_comment", comment._fullname)
        if not g.amqp_host:
            l = Link._byID(comment.link_id, data=True)
            add_comment_tree(comment, l)

    # note that get_all_comments() is updated by the amqp process
    # r2.lib.db.queries.run_new_comments

    if inbox_rels:
        for inbox_rel in tup(inbox_rels):
            inbox_owner = inbox_rel._thing1
            if inbox_rel._name == "inbox":
                add_queries([get_inbox_comments(inbox_owner)], insert_items=inbox_rel)
            else:
                add_queries([get_inbox_selfreply(inbox_owner)], insert_items=inbox_rel)
            set_unread(comment, inbox_owner, True)
Ejemplo n.º 2
0
def _use_adserver_reporting(thing):
    if not feature.is_enabled("adserver_reporting"):
        return False

    if not g.adserver_reporting_cutoff:
        return False

    try:
        cutoff = parse_date(g.adserver_reporting_cutoff)
    except ValueError:
        return False

    if isinstance(thing, PromoCampaign):
        link = Link._byID(thing.link_id)
    else:
        link = thing

    campaigns = list(PromoCampaign._by_link(link._id))

    # No campaigns, so nothing to report. Show the new
    # view anyway.
    if not campaigns:
        return True

    end_date = max(campaign.end_date for campaign in campaigns)
    end_date = end_date.replace(tzinfo=g.tz)
    cutoff = cutoff.replace(tzinfo=g.tz)

    if end_date < cutoff:
        return False

    return not feature.is_enabled("legacy_ad_reporting")
Ejemplo n.º 3
0
def new_comment(comment, inbox_rels):
    author = Account._byID(comment.author_id)
    job = [get_comments(author, 'new', 'all')]
    if comment._deleted:
        job.append(get_all_comments())
        add_queries(job, delete_items=comment)
    else:
        #if comment._spam:
        #    sr = Subreddit._byID(comment.sr_id)
        #    job.append(get_spam_comments(sr))
        add_queries(job, insert_items=comment)
        amqp.add_item('new_comment', comment._fullname)
        if not g.amqp_host:
            l = Link._byID(comment.link_id, data=True)
            add_comment_tree(comment, l)

    # note that get_all_comments() is updated by the amqp process
    # r2.lib.db.queries.run_new_comments

    if inbox_rels:
        for inbox_rel in tup(inbox_rels):
            inbox_owner = inbox_rel._thing1
            if inbox_rel._name == "inbox":
                add_queries([get_inbox_comments(inbox_owner)],
                            insert_items=inbox_rel)
            else:
                add_queries([get_inbox_selfreply(inbox_owner)],
                            insert_items=inbox_rel)
            set_unread(comment, inbox_owner, True)
Ejemplo n.º 4
0
    def get_links(cls, event_id):
        link_ids = cls._get_related_link_ids(event_id)
        links = Link._byID(link_ids, data=True, return_dict=False)
        links.sort(key=lambda L: L.num_comments, reverse=True)

        sr_ids = set(L.sr_id for L in links)
        subreddits = Subreddit._byID(sr_ids, data=True)

        wrapped = []
        for link in links:
            w = Wrapped(link)

            if w._spam or w._deleted:
                continue

            if not getattr(w, "allow_liveupdate", True):
                continue

            w.subreddit = subreddits[link.sr_id]

            # ideally we'd check if the user can see the subreddit, but by
            # doing this we keep everything user unspecific which makes caching
            # easier.
            if w.subreddit.type == "private":
                continue

            comment_label = ungettext("comment", "comments", link.num_comments)
            w.comments_label = strings.number_label % dict(
                num=link.num_comments, thing=comment_label)

            wrapped.append(w)
        return wrapped
Ejemplo n.º 5
0
    def get_links(cls, event_id):
        link_ids = cls._get_related_link_ids(event_id)
        links = Link._byID(link_ids, data=True, return_dict=False)
        links.sort(key=lambda L: L.num_comments, reverse=True)

        sr_ids = set(L.sr_id for L in links)
        subreddits = Subreddit._byID(sr_ids, data=True)

        wrapped = []
        for link in links:
            w = Wrapped(link)

            if w._spam or w._deleted:
                continue

            if not getattr(w, "allow_liveupdate", True):
                continue

            w.subreddit = subreddits[link.sr_id]

            # ideally we'd check if the user can see the subreddit, but by
            # doing this we keep everything user unspecific which makes caching
            # easier.
            if w.subreddit.type == "private":
                continue

            comment_label = ungettext("comment", "comments", link.num_comments)
            w.comments_label = strings.number_label % dict(
                num=link.num_comments, thing=comment_label)

            wrapped.append(w)
        return wrapped
Ejemplo n.º 6
0
def test_run_link(start_link, count=1000):
    """Inject `count` number of links, starting with `start_link`"""
    if isinstance(start_link, basestring):
        start_link = int(start_link, 36)
    links = Link._byID(range(start_link - count, start_link), data=True, return_dict=False)
    uploader = LinkUploader(g.CLOUDSEARCH_DOC_API, things=links)
    return uploader.inject()
Ejemplo n.º 7
0
def add_comment_tree(comments):
    # update the comment cache
    add_comments(comments)
    # update last modified
    links = Link._byID(list(set(com.link_id for com in tup(comments))), data=True, return_dict=False)
    for link in links:
        set_last_modified(link, "comments")
Ejemplo n.º 8
0
def run(verbose=True, sleep_time = 60, num_items = 1):
    key = "indextank_cursor"
    cursor = g.cache.get(key)
    if cursor is None:
        raise ValueError("%s is not set!" % key)
    cursor = int(cursor)

    while True:
        if verbose:
            print "Looking for %d items with _id < %d" % (num_items, cursor)
        q = Link._query(sort = desc('_id'),
                        limit = num_items)
        q._after(Link._byID(cursor))
        last_date = None
        for item in q:
            cursor = item._id
            last_date = item._date
            amqp.add_item('indextank_changes', item._fullname,
                      message_id = item._fullname,
                      delivery_mode = amqp.DELIVERY_TRANSIENT)
        g.cache.set(key, cursor)

        if verbose:
            if last_date:
                last_date = last_date.strftime("%Y-%m-%d")
            print ("Just enqueued %d items. New cursor=%s (%s). Sleeping %d seconds."
                   % (num_items, cursor, last_date, sleep_time))

        sleep(sleep_time)
Ejemplo n.º 9
0
def get_promos(date, sr_names=None, link=None):
    campaign_ids = PromotionWeights.get_campaign_ids(date, sr_names=sr_names, link=link)
    campaigns = PromoCampaign._byID(campaign_ids, data=True, return_dict=False)
    link_ids = {camp.link_id for camp in campaigns}
    links = Link._byID(link_ids, data=True)
    for camp in campaigns:
        yield camp, links[camp.link_id]
Ejemplo n.º 10
0
def test_run(start_link, count=1000):
    '''Inject `count` number of links, starting with `start_link`'''
    if isinstance(start_link, basestring):
        start_link = int(start_link, 36)
    links = Link._byID(range(start_link - count, start_link), data=True,
                       return_dict=False)
    return inject(links)
Ejemplo n.º 11
0
    def GET_report(self, start, end, link_text=None, owner=None):
        now = datetime.now(g.tz).replace(hour=0, minute=0, second=0,
                                         microsecond=0)
        end = end or now - timedelta(days=1)
        start = start or end - timedelta(days=7)

        links = []
        bad_links = []
        owner_name = owner.name if owner else ''

        if owner:
            promo_weights = PromotionWeights.get_campaigns(start, end,
                                                           author_id=owner._id)
            campaign_ids = [pw.promo_idx for pw in promo_weights]
            campaigns = PromoCampaign._byID(campaign_ids, data=True)
            link_ids = {camp.link_id for camp in campaigns.itervalues()}
            links.extend(Link._byID(link_ids, data=True, return_dict=False))

        if link_text is not None:
            id36s = link_text.replace(',', ' ').split()
            try:
                links_from_text = Link._byID36(id36s, data=True)
            except NotFound:
                links_from_text = {}

            bad_links = [id36 for id36 in id36s if id36 not in links_from_text]
            links.extend(links_from_text.values())

        content = PromoteReport(links, link_text, owner_name, bad_links, start,
                                end)
        if c.render_style == 'csv':
            return content.as_csv()
        else:
            return PromotePage(title=_("sponsored link report"),
                               content=content).render()
Ejemplo n.º 12
0
    def GET_report(self, start, end, link_text=None, owner=None):
        now = datetime.now(g.tz).replace(hour=0, minute=0, second=0,
                                         microsecond=0)
        end = end or now - timedelta(days=1)
        start = start or end - timedelta(days=7)

        links = []
        bad_links = []
        owner_name = owner.name if owner else ''

        if owner:
            promo_weights = PromotionWeights.get_campaigns(start, end,
                                                           author_id=owner._id)
            campaign_ids = [pw.promo_idx for pw in promo_weights]
            campaigns = PromoCampaign._byID(campaign_ids, data=True)
            link_ids = {camp.link_id for camp in campaigns.itervalues()}
            links.extend(Link._byID(link_ids, data=True, return_dict=False))

        if link_text is not None:
            id36s = link_text.replace(',', ' ').split()
            try:
                links_from_text = Link._byID36(id36s, data=True)
            except NotFound:
                links_from_text = {}

            bad_links = [id36 for id36 in id36s if id36 not in links_from_text]
            links.extend(links_from_text.values())

        content = PromoteReport(links, link_text, owner_name, bad_links, start,
                                end)
        if c.render_style == 'csv':
            return content.as_csv()
        else:
            return PromotePage(title=_("sponsored link report"),
                               content=content).render()
Ejemplo n.º 13
0
def _use_adserver_reporting(thing):
    if not feature.is_enabled("adserver_reporting"):
        return False

    if not g.adserver_reporting_cutoff:
        return False

    try:
        cutoff = parse_date(g.adserver_reporting_cutoff)
    except ValueError:
        return False

    if isinstance(thing, PromoCampaign):
        link = Link._byID(thing.link_id)
    else:
        link = thing

    campaigns = list(PromoCampaign._by_link(link._id))

    # No campaigns, so nothing to report. Show the new
    # view anyway.
    if not campaigns:
        return True

    end_date = max(campaign.end_date for campaign in campaigns)
    end_date = end_date.replace(tzinfo=g.tz)
    cutoff = cutoff.replace(tzinfo=g.tz)

    if end_date < cutoff:
        return False

    return not feature.is_enabled("legacy_ad_reporting")
Ejemplo n.º 14
0
def run(verbose=True, sleep_time=60, num_items=1):
    key = "indextank_cursor"
    cursor = g.cache.get(key)
    if cursor is None:
        raise ValueError("%s is not set!" % key)
    cursor = int(cursor)

    while True:
        if verbose:
            print "Looking for %d items with _id < %d" % (num_items, cursor)
        q = Link._query(sort=desc('_id'), limit=num_items)
        q._after(Link._byID(cursor))
        last_date = None
        for item in q:
            cursor = item._id
            last_date = item._date
            amqp.add_item('indextank_changes',
                          item._fullname,
                          message_id=item._fullname,
                          delivery_mode=amqp.DELIVERY_TRANSIENT)
        g.cache.set(key, cursor)

        if verbose:
            if last_date:
                last_date = last_date.strftime("%Y-%m-%d")
            print(
                "Just enqueued %d items. New cursor=%s (%s). Sleeping %d seconds."
                % (num_items, cursor, last_date, sleep_time))

        sleep(sleep_time)
Ejemplo n.º 15
0
def _handle_generate_daily_link_reports(link_ids, campaign_ids):
    now = datetime.utcnow()
    links = Link._byID(link_ids, data=True, return_dict=False)
    campaigns = PromoCampaign._byID(campaign_ids, data=True, return_dict=False)

    if not campaigns:
        return

    links_start, links_end = _get_campaigns_date_range(campaigns)
    now = now.replace(tzinfo=pytz.utc)
    links_start = links_start.replace(tzinfo=pytz.utc)
    links_end = links_end.replace(tzinfo=pytz.utc)

    # if data has already been processed then there's no need
    # to redo it.  use the last time the report was run as a 
    # starting point, but subtract 24hrs since initial numbers
    # are preliminary.
    last_run = min(getattr(l, "last_daily_report_run", links_start) for l in links)
    start = max(
        last_run - timedelta(hours=24),
        links_start,
    )

    # in cases where we may be running a report well after a link
    # has completed ensure we always use the actual start.
    if start > links_end:
        start = links_start

    end = min([now, links_end])

    link_fullnames = ",".join([l._fullname for l in links])
    g.log.info("generating report for link %s (%s-%s)" % (
        link_fullnames, start.strftime('%Y-%m-%d'), end.strftime('%Y-%m-%d')))

    report_id = report.queue_report(
        start=start,
        end=end,
        groups=["optionId", "day"],
        parameters=[{
            "campaignId": l.external_campaign_id,
        } for l in links],
    )

    g.log.info("processing report for link (%s/%s)" %
        (link_fullnames, report_id))

    try:
        _process_daily_link_reports(
            links=links,
            report_id=report_id,
            queued_date=now,
        )

        g.log.info("successfully processed report for link (%s/%s)" %
            (link_fullnames, report_id))
    except report.ReportFailedException as e:
        g.log.error(e)
        # retry if report failed
        _generate_link_reports(links)
Ejemplo n.º 16
0
def test_run_link(start_link, count=1000):
    '''Inject `count` number of links, starting with `start_link`'''
    if isinstance(start_link, basestring):
        start_link = int(start_link, 36)
    links = Link._byID(range(start_link - count, start_link), data=True,
                       return_dict=False)
    uploader = LinkUploader(g.CLOUDSEARCH_DOC_API, things=links)
    return uploader.inject()
Ejemplo n.º 17
0
def add_comment_tree(comments):
    #update the comment cache
    add_comments(comments)
    #update last modified
    links = Link._byID(list(set(com.link_id for com in tup(comments))),
                       data = True, return_dict = False)
    for link in links:
        set_last_modified(link, 'comments')
Ejemplo n.º 18
0
def test_run(start_link, count=1000):
    '''Inject `count` number of links, starting with `start_link`'''
    if isinstance(start_link, basestring):
        start_link = int(start_link, 36)
    links = Link._byID(range(start_link - count, start_link),
                       data=True,
                       return_dict=False)
    return inject(links)
Ejemplo n.º 19
0
def get_promos(date, sr_names=None, link=None):
    pws = PromotionWeights.get_campaigns(date, sr_names=sr_names, link=link)
    campaign_ids = {pw.promo_idx for pw in pws}
    campaigns = PromoCampaign._byID(campaign_ids, data=True, return_dict=False)
    link_ids = {camp.link_id for camp in campaigns}
    links = Link._byID(link_ids, data=True)
    for camp in campaigns:
        yield camp, links[camp.link_id]
Ejemplo n.º 20
0
def test_run_link(start_link, count=1000):
    '''Inject `count` number of links, starting with `start_link`'''
    if isinstance(start_link, basestring):
        start_link = int(start_link, 36)
    links = Link._byID(range(start_link - count, start_link), data=True,
                       return_dict=False)
    uploader = SolrLinkUploader(things=links)
    return uploader.inject()
Ejemplo n.º 21
0
def finalize_completed_campaigns(daysago=1):
    # PromoCampaign.end_date is utc datetime with year, month, day only
    now = datetime.datetime.now(g.tz)
    date = now - datetime.timedelta(days=daysago)
    date = date.replace(hour=0, minute=0, second=0, microsecond=0)

    q = PromoCampaign._query(
        PromoCampaign.c.end_date == date,
        # exclude no transaction and freebies
        PromoCampaign.c.trans_id > 0,
        data=True)
    campaigns = list(q)

    # check that traffic is up to date
    earliest_campaign = min(campaigns, key=lambda camp: camp.start_date)
    start, end = promote.get_total_run(earliest_campaign)
    missing_traffic = get_missing_traffic(start.replace(tzinfo=None),
                                          date.replace(tzinfo=None))
    if missing_traffic:
        raise ValueError("Can't finalize campaigns finished on %s."
                         "Missing traffic from %s" % (date, missing_traffic))

    links = Link._byID([camp.link_id for link in links], data=True)

    for camp in campaigns:
        if hasattr(camp, 'refund_amount'):
            continue

        link = links[camp.link_id]
        billable_impressions = promote.get_billable_impressions(camp)
        billable_amount = promote.get_billable_amount(camp,
                                                      billable_impressions)

        if billable_amount >= camp.bid:
            text = ('%s completed with $%s billable (%s impressions @ $%s).' %
                    (camp, billable_amount, billable_impressions, camp.cpm))
            PromotionLog.add(link, text)
            refund_amount = 0.
        else:
            refund_amount = camp.bid - billable_amount
            user = Account._byID(link.author_id, data=True)
            try:
                success = authorize.refund_transaction(user, camp.trans_id,
                                                       camp._id, refund_amount)
            except authorize.AuthorizeNetException as e:
                text = ('%s $%s refund failed' % (camp, refund_amount))
                PromotionLog.add(link, text)
                g.log.debug(text + ' (response: %s)' % e)
                continue
            text = ('%s completed with $%s billable (%s impressions @ $%s).'
                    ' %s refunded.' %
                    (camp, billable_amount, billable_impressions, camp.cpm,
                     refund_amount))
            PromotionLog.add(link, text)

        camp.refund_amount = refund_amount
        camp._commit()
def finalize_completed_campaigns(daysago=1):
    # PromoCampaign.end_date is utc datetime with year, month, day only
    now = datetime.datetime.now(g.tz)
    date = now - datetime.timedelta(days=daysago)
    date = date.replace(hour=0, minute=0, second=0, microsecond=0)

    q = PromoCampaign._query(PromoCampaign.c.end_date == date,
                             # exclude no transaction and freebies
                             PromoCampaign.c.trans_id > 0,
                             data=True)
    campaigns = list(q)

    # check that traffic is up to date
    earliest_campaign = min(campaigns, key=lambda camp: camp.start_date)
    start, end = promote.get_total_run(earliest_campaign)
    missing_traffic = get_missing_traffic(start.replace(tzinfo=None),
                                          date.replace(tzinfo=None))
    if missing_traffic:
        raise ValueError("Can't finalize campaigns finished on %s."
                         "Missing traffic from %s" % (date, missing_traffic))

    links = Link._byID([camp.link_id for link in links], data=True)

    for camp in campaigns:
        if hasattr(camp, 'refund_amount'):
            continue

        link = links[camp.link_id]
        billable_impressions = promote.get_billable_impressions(camp)
        billable_amount = promote.get_billable_amount(camp,
                                                      billable_impressions)

        if billable_amount >= camp.bid:
            text = ('%s completed with $%s billable (%s impressions @ $%s).'
                    % (camp, billable_amount, billable_impressions, camp.cpm))
            PromotionLog.add(link, text)
            refund_amount = 0.
        else:
            refund_amount = camp.bid - billable_amount
            user = Account._byID(link.author_id, data=True)
            try:
                success = authorize.refund_transaction(user, camp.trans_id,
                                                       camp._id, refund_amount)
            except authorize.AuthorizeNetException as e:
                text = ('%s $%s refund failed' % (camp, refund_amount))
                PromotionLog.add(link, text)
                g.log.debug(text + ' (response: %s)' % e)
                continue
            text = ('%s completed with $%s billable (%s impressions @ $%s).'
                    ' %s refunded.' % (camp, billable_amount,
                                       billable_impressions, camp.cpm,
                                       refund_amount))
            PromotionLog.add(link, text)

        camp.refund_amount = refund_amount
        camp._commit()
Ejemplo n.º 23
0
def fix_about_post():
    user = Account._by_name('Eliezer_Yudkowsky')
    l = Link._byID(1, data=True)
    # l = Link._byID(int('1i', 36))
    if l.url.lower() == 'self':
        l.url = l.make_permalink_slow()
        l.is_self = True
        l._commit()
        l.set_url_cache()
    v = Vote.vote(user, l, True, l.ip, False)
Ejemplo n.º 24
0
def comment_reply_effect(comment):
    if comment.parent_id is not None:
        parent = Comment._byID(comment.parent_id, data=True)
    else:
        parent = Link._byID(comment.link_id, data=True)
    all_effects = effects.get_all_effects([parent._fullname])
    parent_effects = all_effects.get(parent._fullname, [])
    for item_name in parent_effects:
        item = items.get_item(item_name)
        item.on_reply(c.user, parent)
Ejemplo n.º 25
0
def comment_reply_effect(comment):
    if comment.parent_id is not None:
        parent = Comment._byID(comment.parent_id, data=True)
    else:
        parent = Link._byID(comment.link_id, data=True)
    all_effects = effects.get_all_effects([parent._fullname])
    parent_effects = all_effects.get(parent._fullname, [])
    for item_name in parent_effects:
        item = items.get_item(item_name)
        item.on_reply(c.user, parent)
Ejemplo n.º 26
0
def fix_about_post():
    user = Account._by_name('Eliezer_Yudkowsky')
    l = Link._byID(1, data=True)
    # l = Link._byID(int('1i', 36))
    if l.url.lower() == 'self':
        l.url = l.make_permalink_slow()
        l.is_self = True
        l._commit()
        l.set_url_cache()
    v = Vote.vote(user, l, True, l.ip, False)
Ejemplo n.º 27
0
def deactivate_campaign(campaign):
    # Do we need to deactivate the link objects and map?
    # Campaign can get voided without ever having been sent to adzerk!
    link = Link._byID(campaign.link_id, data=True)
    if not (hasattr(link, 'adzerk_campaign_id')
            and hasattr(campaign, 'adzerk_flight_id')):
        return

    az_flight = update_flight(link, campaign)
    az_flight.IsActive = False
    az_flight._send()
def deactivate_campaign(campaign):
    # Do we need to deactivate the link objects and map?
    # Campaign can get voided without ever having been sent to adzerk!
    link = Link._byID(campaign.link_id, data=True)
    if not (hasattr(link, 'adzerk_campaign_id') and
            hasattr(campaign, 'adzerk_flight_id')):
        return

    az_flight = update_flight(link, campaign)
    az_flight.IsActive = False
    az_flight._send()
Ejemplo n.º 29
0
 def fields(self, thing):
     '''Return fields relevant to a Link search index'''
     account = self.accounts[thing.author_id]
     sr = self.srs[thing.sr_id]
     if isinstance(thing, Comment):
         comment = thing
         link = Link._byID(thing.link_id, data=True, return_dict=False)
     else:
         comment = None
         link = thing
     return LinkFields(link, account, sr, comment).fields()
Ejemplo n.º 30
0
def finalize_completed_campaigns(daysago=1):
    # PromoCampaign.end_date is utc datetime with year, month, day only
    now = datetime.datetime.now(g.tz)
    date = now - datetime.timedelta(days=daysago)
    date = date.replace(hour=0, minute=0, second=0, microsecond=0)

    q = PromoCampaign._query(
        PromoCampaign.c.end_date == date,
        # exclude no transaction
        PromoCampaign.c.trans_id != NO_TRANSACTION,
        data=True)
    # filter out freebies
    campaigns = filter(lambda camp: camp.trans_id > NO_TRANSACTION, q)

    if not campaigns:
        return

    # check that traffic is up to date
    earliest_campaign = min(campaigns, key=lambda camp: camp.start_date)
    start, end = get_total_run(earliest_campaign)
    missing_traffic = traffic.get_missing_traffic(start.replace(tzinfo=None),
                                                  date.replace(tzinfo=None))
    if missing_traffic:
        raise ValueError("Can't finalize campaigns finished on %s."
                         "Missing traffic from %s" % (date, missing_traffic))

    links = Link._byID([camp.link_id for camp in campaigns], data=True)
    underdelivered_campaigns = []

    for camp in campaigns:
        if hasattr(camp, 'refund_amount'):
            continue

        link = links[camp.link_id]
        billable_impressions = get_billable_impressions(camp)
        billable_amount = get_billable_amount(camp, billable_impressions)

        if billable_amount >= camp.total_budget_pennies:
            if hasattr(camp, 'cpm'):
                text = '%s completed with $%s billable (%s impressions @ $%s).'
                text %= (camp, billable_amount, billable_impressions,
                         camp.bid_dollars)
            else:
                text = '%s completed with $%s billable (pre-CPM).'
                text %= (camp, billable_amount)
            PromotionLog.add(link, text)
            camp.refund_amount = 0.
            camp._commit()
        elif charged_or_not_needed(camp):
            underdelivered_campaigns.append(camp)

        if underdelivered_campaigns:
            queries.set_underdelivered_campaigns(underdelivered_campaigns)
Ejemplo n.º 31
0
    def add_props(cls, user, wrapped):
        user_fullnames = {w.user_fullname for w in wrapped}
        target_fullnames = {w.target_fullname for w in wrapped}

        users = Account._by_fullname(user_fullnames,
                                     data=True,
                                     return_dict=True)
        targets = Thing._by_fullname(target_fullnames,
                                     data=True,
                                     return_dict=True)

        author_ids = {
            t.author_id
            for t in targets.itervalues() if hasattr(t, 'author_id')
        }
        link_ids = {
            t.link_id
            for t in targets.itervalues() if hasattr(t, 'link_id')
        }
        sr_ids = {t.sr_id for t in targets.itervalues() if hasattr(t, 'sr_id')}

        authors = Account._byID(author_ids, data=True, return_dict=True)
        links = Link._byID(link_ids, data=True, return_dict=True)
        subreddits = Subreddit._byID(sr_ids, data=True, return_dict=True)

        target_things = {}
        for fullname, target in targets.iteritems():
            if isinstance(target, (Comment, Link)):
                author = authors[target.author_id]
                if isinstance(target, Link):
                    subreddit = subreddits[target.sr_id]
                    path = target.make_permalink(subreddit)
                else:
                    link = links[target.link_id]
                    subreddit = subreddits[link.sr_id]
                    path = target.make_permalink(link, subreddit)
                target_things[fullname] = GameLogTarget(
                    target, path, author, subreddit)
            elif isinstance(target, Account):
                target_things[fullname] = WrappedUser(target)

        for w in wrapped:
            w.is_self = (c.user_is_loggedin
                         and w.user_fullname == c.user._fullname)
            w.user = WrappedUser(users[w.user_fullname])
            w.target = target_things[w.target_fullname]
            w.item = g.f2pitems[w.item]
            w.user_team = scores.get_user_team(users[w.user_fullname])
            if isinstance(w.target, WrappedUser):
                target_user = targets[w.target.fullname]
            else:
                target_user = authors[targets[w.target_fullname].author_id]
            w.target_team = scores.get_user_team(target_user)
Ejemplo n.º 32
0
def finalize_completed_campaigns(daysago=1):
    # PromoCampaign.end_date is utc datetime with year, month, day only
    now = datetime.datetime.now(g.tz)
    date = now - datetime.timedelta(days=daysago)
    date = date.replace(hour=0, minute=0, second=0, microsecond=0)

    q = PromoCampaign._query(PromoCampaign.c.end_date == date,
                             # exclude no transaction
                             PromoCampaign.c.trans_id != NO_TRANSACTION,
                             data=True)
    # filter out freebies
    campaigns = filter(lambda camp: camp.trans_id > NO_TRANSACTION, q)

    if not campaigns:
        return

    # check that traffic is up to date
    earliest_campaign = min(campaigns, key=lambda camp: camp.start_date)
    start, end = get_total_run(earliest_campaign)
    missing_traffic = traffic.get_missing_traffic(start.replace(tzinfo=None),
                                                  date.replace(tzinfo=None))
    if missing_traffic:
        raise ValueError("Can't finalize campaigns finished on %s."
                         "Missing traffic from %s" % (date, missing_traffic))

    links = Link._byID([camp.link_id for camp in campaigns], data=True)
    underdelivered_campaigns = []

    for camp in campaigns:
        if hasattr(camp, 'refund_amount'):
            continue

        link = links[camp.link_id]
        billable_impressions = get_billable_impressions(camp)
        billable_amount = get_billable_amount(camp, billable_impressions)

        if billable_amount >= camp.total_budget_pennies:
            if hasattr(camp, 'cpm'):
                text = '%s completed with $%s billable (%s impressions @ $%s).'
                text %= (camp, billable_amount, billable_impressions,
                    camp.bid_dollars)
            else:
                text = '%s completed with $%s billable (pre-CPM).'
                text %= (camp, billable_amount) 
            PromotionLog.add(link, text)
            camp.refund_amount = 0.
            camp._commit()
        elif charged_or_not_needed(camp):
            underdelivered_campaigns.append(camp)

        if underdelivered_campaigns:
            queries.set_underdelivered_campaigns(underdelivered_campaigns)
Ejemplo n.º 33
0
def finalize_completed_campaigns(daysago=1):
    # PromoCampaign.end_date is utc datetime with year, month, day only
    now = datetime.now(g.tz)
    date = now - timedelta(days=daysago)
    date = date.replace(hour=0, minute=0, second=0, microsecond=0)

    q = PromoCampaign._query(
        PromoCampaign.c.end_date == date,
        # exclude no transaction and freebies
        PromoCampaign.c.trans_id > 0,
        data=True,
    )
    campaigns = list(q)

    if not campaigns:
        return

    # check that traffic is up to date
    earliest_campaign = min(campaigns, key=lambda camp: camp.start_date)
    start, end = get_total_run(earliest_campaign)
    missing_traffic = traffic.get_missing_traffic(start.replace(tzinfo=None), date.replace(tzinfo=None))
    if missing_traffic:
        raise ValueError("Can't finalize campaigns finished on %s." "Missing traffic from %s" % (date, missing_traffic))

    links = Link._byID([camp.link_id for camp in campaigns], data=True)
    underdelivered_campaigns = []

    for camp in campaigns:
        if hasattr(camp, "refund_amount"):
            continue

        link = links[camp.link_id]
        billable_impressions = get_billable_impressions(camp)
        billable_amount = get_billable_amount(camp, billable_impressions)

        if billable_amount >= camp.bid:
            if hasattr(camp, "cpm"):
                text = "%s completed with $%s billable (%s impressions @ $%s)."
                text %= (camp, billable_amount, billable_impressions, camp.cpm)
            else:
                text = "%s completed with $%s billable (pre-CPM)."
                text %= (camp, billable_amount)
            PromotionLog.add(link, text)
            camp.refund_amount = 0.0
            camp._commit()
        else:
            underdelivered_campaigns.append(camp)

        if underdelivered_campaigns:
            set_underdelivered_campaigns(underdelivered_campaigns)
Ejemplo n.º 34
0
    def _run_commentstree(msg):
        fname = msg.body
        comment = Comment._by_fullname(fname, data=True)

        link = Link._byID(comment.link_id, data=True)

        try:
            add_comment_tree(comment, link)
        except KeyError:
            # Hackity hack. Try to recover from a corrupted comment
            # tree
            print "Trying to fix broken comments-tree."
            link_comments(link._id, _update=True)
            add_comment_tree(comment, link)
Ejemplo n.º 35
0
    def GET_show(self, meetup, sort, num_comments):
        article = Link._byID(meetup.assoc_link)

        # figure out number to show based on the menu
        user_num = c.user.pref_num_comments or g.num_comments
        num = g.max_comments if num_comments == 'true' else user_num

        builder = CommentBuilder(article, CommentSortMenu.operator(sort), None,
                                 None)
        listing = NestedListing(builder,
                                num=num,
                                parent_name=article._fullname)
        displayPane = PaneStack()

        # insert reply box only for logged in user
        if c.user_is_loggedin:
            displayPane.append(CommentReplyBox())
            displayPane.append(CommentReplyBox(link_name=article._fullname))

        # finally add the comment listing
        displayPane.append(listing.listing())

        sort_menu = CommentSortMenu(default=sort, type='dropdown2')
        nav_menus = [
            sort_menu,
            NumCommentsMenu(article.num_comments, default=num_comments)
        ]

        content = CommentListing(
            content=displayPane,
            num_comments=article.num_comments,
            nav_menus=nav_menus,
        )

        # Update last viewed time, and return the previous last viewed time.  Actually tracked on the article
        lastViewed = None
        if c.user_is_loggedin:
            clicked = article._getLastClickTime(c.user)
            lastViewed = clicked._date if clicked else None
            article._click(c.user)

        res = ShowMeetup(meetup=meetup,
                         content=content,
                         fullname=article._fullname,
                         lastViewed=lastViewed)

        return BoringPage(pagename=meetup.title,
                          content=res,
                          body_class='meetup').render()
Ejemplo n.º 36
0
    def _run_commentstree(msg):
        fname = msg.body
        comment = Comment._by_fullname(fname, data=True)

        link = Link._byID(comment.link_id,
                          data=True)

        try:
            add_comment_tree(comment, link)
        except KeyError:
            # Hackity hack. Try to recover from a corrupted comment
            # tree
            print "Trying to fix broken comments-tree."
            link_comments(link._id, _update=True)
            add_comment_tree(comment, link)
Ejemplo n.º 37
0
def get_comment_items(srs, src, count=4):
    """Get hot links from srs, plus top comment from each link."""
    link_fullnames = normalized_hot([sr._id for sr in srs])
    hot_links = Link._by_fullname(link_fullnames[:count], return_dict=False)
    top_comments = []
    for link in hot_links:
        builder = CommentBuilder(
            link, operators.desc("_confidence"), comment=None, context=None, num=1, load_more=False
        )
        listing = NestedListing(builder, parent_name=link._fullname).listing()
        top_comments.extend(listing.things)
    srs = Subreddit._byID([com.sr_id for com in top_comments])
    links = Link._byID([com.link_id for com in top_comments])
    comment_items = [ExploreItem(TYPE_COMMENT, src, srs[com.sr_id], links[com.link_id], com) for com in top_comments]
    return comment_items
Ejemplo n.º 38
0
def nameaserver_vote_lockdown(thing):
    if getattr(thing, "sr_id", None):
        sr = Subreddit._byID(thing.sr_id, data=True)
        if sr.name == g.gold_servername_sr:
            if isinstance(thing, Link):
                # no votes on links in this subreddit
                abort(403, "Forbidden")
            elif isinstance(thing, Comment):
                # only allow votes on comments in active threads by people
                # who bought gold.
                link = Link._byID(thing.link_id, data=True)

                if (hasattr(link, "revenue_date") and
                    (link.server_names or
                     c.user._id not in gold_buyers_on(link.revenue_date))):
                    abort(403, "Forbidden")
Ejemplo n.º 39
0
def nameaserver_vote_lockdown(thing):
    if getattr(thing, "sr_id", None):
        sr = Subreddit._byID(thing.sr_id, data=True)
        if sr.name == g.gold_servername_sr:
            if isinstance(thing, Link):
                # no votes on links in this subreddit
                abort(403, "Forbidden")
            elif isinstance(thing, Comment):
                # only allow votes on comments in active threads by people
                # who bought gold.
                link = Link._byID(thing.link_id, data=True)

                if (hasattr(link, "revenue_date") and
                    (link.server_names
                     or c.user._id not in gold_buyers_on(link.revenue_date))):
                    abort(403, "Forbidden")
Ejemplo n.º 40
0
    def add_props(cls, user, wrapped):
        user_fullnames = {w.user_fullname for w in wrapped}
        target_fullnames = {w.target_fullname for w in wrapped}

        users = Account._by_fullname(user_fullnames, data=True,
                                     return_dict=True)
        targets = Thing._by_fullname(target_fullnames, data=True,
                                     return_dict=True)

        author_ids = {t.author_id for t in targets.itervalues()
                      if hasattr(t, 'author_id')}
        link_ids = {t.link_id for t in targets.itervalues()
                    if hasattr(t, 'link_id')}
        sr_ids = {t.sr_id for t in targets.itervalues() if hasattr(t, 'sr_id')}

        authors = Account._byID(author_ids, data=True, return_dict=True)
        links = Link._byID(link_ids, data=True, return_dict=True)
        subreddits = Subreddit._byID(sr_ids, data=True, return_dict=True)

        target_things = {}
        for fullname, target in targets.iteritems():
            if isinstance(target, (Comment, Link)):
                author = authors[target.author_id]
                if isinstance(target, Link):
                    subreddit = subreddits[target.sr_id]
                    path = target.make_permalink(subreddit)
                else:
                    link = links[target.link_id]
                    subreddit = subreddits[link.sr_id]
                    path = target.make_permalink(link, subreddit)
                target_things[fullname] = GameLogTarget(target, path, author,
                                                        subreddit)
            elif isinstance(target, Account):
                target_things[fullname] = WrappedUser(target)

        for w in wrapped:
            w.is_self = (c.user_is_loggedin and
                         w.user_fullname == c.user._fullname)
            w.user = WrappedUser(users[w.user_fullname])
            w.target = target_things[w.target_fullname]
            w.item = g.f2pitems[w.item]
            w.user_team = scores.get_user_team(users[w.user_fullname])
            if isinstance(w.target, WrappedUser):
                target_user = targets[w.target.fullname]
            else:
                target_user = authors[targets[w.target_fullname].author_id]
            w.target_team = scores.get_user_team(target_user)
Ejemplo n.º 41
0
    def _run_commentstree(msgs, chan):
        fnames = [msg.body for msg in msgs]
        comments = Comment._by_fullname(fnames, data=True, return_dict=False)

        links = Link._byID(set(cm.link_id for cm in comments), data=True, return_dict=True)

        # add the comment to the comments-tree
        for comment in comments:
            l = links[comment.link_id]
            try:
                add_comment_tree(comment, l)
            except KeyError:
                # Hackity hack. Try to recover from a corrupted
                # comment tree
                print "Trying to fix broken comments-tree."
                link_comments(l._id, _update=True)
                add_comment_tree(comment, l)
Ejemplo n.º 42
0
  def GET_show(self, meetup, sort, num_comments):
    article = Link._byID(meetup.assoc_link)

    # figure out number to show based on the menu
    user_num = c.user.pref_num_comments or g.num_comments
    num = g.max_comments if num_comments == 'true' else user_num

    builder = CommentBuilder(article, CommentSortMenu.operator(sort), None, None)
    listing = NestedListing(builder, num=num, parent_name = article._fullname)
    displayPane = PaneStack()
    
    # insert reply box only for logged in user
    if c.user_is_loggedin:
      displayPane.append(CommentReplyBox())
      displayPane.append(CommentReplyBox(link_name = 
                                         article._fullname))

    # finally add the comment listing
    displayPane.append(listing.listing())

    sort_menu = CommentSortMenu(default = sort, type='dropdown2')
    nav_menus = [sort_menu,
                 NumCommentsMenu(article.num_comments,
                                 default=num_comments)]

    content = CommentListing(
      content = displayPane,
      num_comments = article.num_comments,
      nav_menus = nav_menus,
      )


    # Update last viewed time, and return the previous last viewed time.  Actually tracked on the article
    lastViewed = None
    if c.user_is_loggedin:
      clicked = article._getLastClickTime(c.user)
      lastViewed = clicked._date if clicked else None
      article._click(c.user)

    res = ShowMeetup(meetup = meetup, content = content, 
                     fullname=article._fullname,
                     lastViewed = lastViewed)

    return BoringPage(pagename = meetup.title, 
                      content = res,
                      body_class = 'meetup').render()
Ejemplo n.º 43
0
    def comment_event(self, new_comment, request=None, context=None):
        """Create a 'comment' event for event-collector.

        new_comment: An r2.models.Comment object
        request, context: Should be pylons.request & pylons.c respectively
        """
        from r2.models import Comment, Link

        event = Event(
            topic="comment_events",
            event_type="ss.comment",
            time=new_comment._date,
            request=request,
            context=context,
            truncatable_field="comment_body",
        )

        event.add("comment_id", new_comment._id)
        event.add("comment_fullname", new_comment._fullname)

        event.add_text("comment_body", new_comment.body)

        post = Link._byID(new_comment.link_id)
        event.add("post_id", post._id)
        event.add("post_fullname", post._fullname)
        event.add("post_created_ts", to_epoch_milliseconds(post._date))
        if post.promoted:
            event.add("post_is_promoted", bool(post.promoted))

        if new_comment.parent_id:
            parent = Comment._byID(new_comment.parent_id)
        else:
            # If this is a top-level comment, parent is the same as the post
            parent = post
        event.add("parent_id", parent._id)
        event.add("parent_fullname", parent._fullname)
        event.add("parent_created_ts", to_epoch_milliseconds(parent._date))

        event.add("user_neutered", new_comment.author_slow._spam)

        event.add_subreddit_fields(new_comment.subreddit_slow)

        self.save_event(event)
Ejemplo n.º 44
0
    def _run_commentstree(msgs, chan):
        fnames = [msg.body for msg in msgs]
        comments = Comment._by_fullname(fnames, data=True, return_dict=False)

        links = Link._byID(set(cm.link_id for cm in comments),
                           data=True,
                           return_dict=True)

        # add the comment to the comments-tree
        for comment in comments:
            l = links[comment.link_id]
            try:
                add_comment_tree(comment, l)
            except KeyError:
                # Hackity hack. Try to recover from a corrupted
                # comment tree
                print "Trying to fix broken comments-tree."
                link_comments(l._id, _update=True)
                add_comment_tree(comment, l)
Ejemplo n.º 45
0
def promote_v2():
    # alter table bids add column campaign integer;
    # update bids set campaign = 0; 
    from r2.models import Link, NotFound, PromoteDates, Bid
    from datetime import datetime
    from pylons import g
    for p in PromoteDates.query():
        try:
            l = Link._by_fullname(p.thing_name,
                                  data = True, return_dict = False)
            if not l:
                raise NotFound, p.thing_name

            # update the promote status
            l.promoted = True
            l.promote_status = getattr(l, "promote_status", STATUS.unseen)
            l._date = datetime(*(list(p.start_date.timetuple()[:7]) + [g.tz]))
            set_status(l, l.promote_status)

            # add new campaign
            print (l, (p.start_date, p.end_date), p.bid, None)
            if not p.bid:
                print "no bid? ", l
                p.bid = 20
            new_campaign(l, (p.start_date, p.end_date), p.bid, None)
            print "updated: %s (%s)" % (l, l._date)

        except NotFound:
            print "NotFound: %s" % p.thing_name

    print "updating campaigns"
    for b in Bid.query():
        l = Link._byID(int(b.thing_id))
        print "updating: ", l
        campaigns = getattr(l, "campaigns", {}).copy()
        indx = b.campaign
        if indx in campaigns:
            sd, ed, bid, sr, trans_id = campaigns[indx]
            campaigns[indx] = sd, ed, bid, sr, b.transaction
            l.campaigns = campaigns
            l._commit()
        else:
            print "no campaign information: ", l
Ejemplo n.º 46
0
    def on_use(self, user, target):
        link = Link._byID(target.link_id)
        comment_tree = get_comment_tree(link)
        child_ids = comment_tree.tree[target._id]
        grandchild_ids = []
        for child_id in child_ids:
            grandchild_ids.extend(comment_tree.tree[child_id])

        comments = Comment._byID(child_ids + grandchild_ids, data=True,
                                 return_dict=True)
        children = [comments[cid] for cid in child_ids]
        grandchildren = [comments[cid] for cid in grandchild_ids]

        for comment in itertools.chain([target], children, grandchildren):
            effects.add_effect(user, comment, self.item_name)

        self.apply_damage_and_log(user, [target], self.direct_damage)
        self.apply_damage_and_log(user, children, self.child_damage)
        self.apply_damage_and_log(user, grandchildren, self.grandchild_damage)
Ejemplo n.º 47
0
    def comment_event(self, new_comment, request=None, context=None):
        """Create a 'comment' event for event-collector.

        new_comment: An r2.models.Comment object
        request, context: Should be pylons.request & pylons.c respectively
        """
        from r2.models import Comment, Link

        event = Event(
            topic="comment_events",
            event_type="ss.comment",
            time=new_comment._date,
            request=request,
            context=context,
            truncatable_field="comment_body",
        )

        event.add("comment_id", new_comment._id)
        event.add("comment_fullname", new_comment._fullname)

        event.add_text("comment_body", new_comment.body)

        post = Link._byID(new_comment.link_id)
        event.add("post_id", post._id)
        event.add("post_fullname", post._fullname)
        event.add("post_created_ts", to_epoch_milliseconds(post._date))
        if post.promoted:
            event.add("post_is_promoted", bool(post.promoted))

        if new_comment.parent_id:
            parent = Comment._byID(new_comment.parent_id)
        else:
            # If this is a top-level comment, parent is the same as the post
            parent = post
        event.add("parent_id", parent._id)
        event.add("parent_fullname", parent._fullname)
        event.add("parent_created_ts", to_epoch_milliseconds(parent._date))

        event.add("user_neutered", new_comment.author_slow._spam)

        event.add_subreddit_fields(new_comment.subreddit_slow)

        self.save_event(event)
Ejemplo n.º 48
0
def promote_v2():
    # alter table bids add column campaign integer;
    # update bids set campaign = 0; 
    from r2.models import Link, NotFound, PromoteDates, Bid
    from datetime import datetime
    from pylons import g
    for p in PromoteDates.query():
        try:
            l = Link._by_fullname(p.thing_name,
                                  data = True, return_dict = False)
            if not l:
                raise NotFound, p.thing_name

            # update the promote status
            l.promoted = True
            l.promote_status = getattr(l, "promote_status", STATUS.unseen)
            l._date = datetime(*(list(p.start_date.timetuple()[:7]) + [g.tz]))
            set_status(l, l.promote_status)

            # add new campaign
            print (l, (p.start_date, p.end_date), p.bid, None)
            if not p.bid:
                print "no bid? ", l
                p.bid = 20
            new_campaign(l, (p.start_date, p.end_date), p.bid, None)
            print "updated: %s (%s)" % (l, l._date)

        except NotFound:
            print "NotFound: %s" % p.thing_name

    print "updating campaigns"
    for b in Bid.query():
        l = Link._byID(int(b.thing_id))
        print "updating: ", l
        campaigns = getattr(l, "campaigns", {}).copy()
        indx = b.campaign
        if indx in campaigns:
            sd, ed, bid, sr, trans_id = campaigns[indx]
            campaigns[indx] = sd, ed, bid, sr, b.transaction
            l.campaigns = campaigns
            l._commit()
        else:
            print "no campaign information: ", l
Ejemplo n.º 49
0
    def on_use(self, user, target):
        link = Link._byID(target.link_id)
        comment_tree = get_comment_tree(link)
        child_ids = comment_tree.tree[target._id]
        grandchild_ids = []
        for child_id in child_ids:
            grandchild_ids.extend(comment_tree.tree[child_id])

        comments = Comment._byID(child_ids + grandchild_ids,
                                 data=True,
                                 return_dict=True)
        children = [comments[cid] for cid in child_ids]
        grandchildren = [comments[cid] for cid in grandchild_ids]

        for comment in itertools.chain([target], children, grandchildren):
            effects.add_effect(user, comment, self.item_name)

        self.apply_damage_and_log(user, [target], self.direct_damage)
        self.apply_damage_and_log(user, children, self.child_damage)
        self.apply_damage_and_log(user, grandchildren, self.grandchild_damage)
Ejemplo n.º 50
0
def posts_with_divs():
    link_count = max_link_id()
    print >> sys.stderr, "# %d links to process" % link_count
    for link_id in xrange(link_count):
        try:
            link = Link._byID(link_id, data=True)
        except NotFound:
            continue

        if hasattr(link, 'ob_permalink'):
            article = link.article
            if isinstance(article, str):
                try:
                    article = article.decode('utf-8')
                except UnicodeDecodeError:
                    print >> sys.stderr, "UnicodeDecodeError, using 'ignore' error mode, link: %d" % link._id
                    article = article.decode('utf-8', errors='ignore')

            if '<div' in article:
                print >> sys.stderr, link.canonical_url.encode('utf-8')
Ejemplo n.º 51
0
def posts_with_divs():
    link_count = max_link_id()
    print >>sys.stderr, "# %d links to process" % link_count
    for link_id in xrange(link_count):
        try:
            link = Link._byID(link_id, data=True)
        except NotFound:
            continue
    
        if hasattr(link, 'ob_permalink'):
            article = link.article
            if isinstance(article, str):
                try:
                    article = article.decode('utf-8')
                except UnicodeDecodeError:
                    print >>sys.stderr, "UnicodeDecodeError, using 'ignore' error mode, link: %d" % link._id
                    article = article.decode('utf-8', errors='ignore')
                
            if '<div' in article:
                print >>sys.stderr, link.canonical_url.encode('utf-8')
Ejemplo n.º 52
0
def port_cassaurls(after_id=None, estimate=15231317):
    from r2.models import Link, LinksByUrlAndSubreddit
    from r2.lib.db import tdb_cassandra
    from r2.lib.db.operators import desc
    from r2.lib.db.tdb_cassandra import CL
    from r2.lib.utils import fetch_things2, in_chunks, progress

    q = Link._query(Link.c._spam == (True, False),
                    sort=desc('_date'), data=True)
    if after_id:
        q._after(Link._byID(after_id,data=True))
    q = fetch_things2(q, chunk_size=500)
    q = progress(q, estimate=estimate)
    q = (l for l in q
         if getattr(l, 'url', 'self') != 'self'
         and not getattr(l, 'is_self', False))
    chunks = in_chunks(q, 500)

    for chunk in chunks:
        for l in chunk:
            LinksByUrlAndSubreddit.add_link(l)
Ejemplo n.º 53
0
def get_comment_items(srs, src, count=4):
    """Get hot links from srs, plus top comment from each link."""
    link_fullnames = normalized_hot([sr._id for sr in srs])
    hot_links = Link._by_fullname(link_fullnames[:count], return_dict=False)
    top_comments = []
    for link in hot_links:
        builder = CommentBuilder(link,
                                 operators.desc('_confidence'),
                                 comment=None,
                                 context=None,
                                 load_more=False)
        listing = NestedListing(builder, num=1,
                                parent_name=link._fullname).listing()
        top_comments.extend(listing.things)
    srs = Subreddit._byID([com.sr_id for com in top_comments])
    links = Link._byID([com.link_id for com in top_comments])
    comment_items = [
        ExploreItem(TYPE_COMMENT, src, srs[com.sr_id], links[com.link_id], com)
        for com in top_comments
    ]
    return comment_items
Ejemplo n.º 54
0
def port_cassaurls(after_id=None, estimate=15231317):
    from r2.models import Link, LinksByUrlAndSubreddit
    from r2.lib.db import tdb_cassandra
    from r2.lib.db.operators import desc
    from r2.lib.db.tdb_cassandra import CL
    from r2.lib.utils import fetch_things2, in_chunks, progress

    q = Link._query(Link.c._spam == (True, False),
                    sort=desc('_date'),
                    data=True)
    if after_id:
        q._after(Link._byID(after_id, data=True))
    q = fetch_things2(q, chunk_size=500)
    q = progress(q, estimate=estimate)
    q = (l for l in q if getattr(l, 'url', 'self') != 'self'
         and not getattr(l, 'is_self', False))
    chunks = in_chunks(q, 500)

    for chunk in chunks:
        for l in chunk:
            LinksByUrlAndSubreddit.add_link(l)
Ejemplo n.º 55
0
def port_cassaurls(after_id=None, estimate=15231317):
    from r2.models import Link, LinksByUrl
    from r2.lib.db import tdb_cassandra
    from r2.lib.db.operators import desc
    from r2.lib.db.tdb_cassandra import CL
    from r2.lib.utils import fetch_things2, in_chunks, progress

    q = Link._query(Link.c._spam == (True, False), sort=desc("_date"), data=True)
    if after_id:
        q._after(Link._byID(after_id, data=True))
    q = fetch_things2(q, chunk_size=500)
    q = progress(q, estimate=estimate)
    q = (l for l in q if getattr(l, "url", "self") != "self" and not getattr(l, "is_self", False))
    chunks = in_chunks(q, 500)

    for chunk in chunks:
        with LinksByUrl._cf.batch(write_consistency_level=CL.ONE) as b:
            for l in chunk:
                k = LinksByUrl._key_from_url(l.url)
                if k:
                    b.insert(k, {l._id36: l._id36})
Ejemplo n.º 56
0
def staging_links():
    from r2.models import Link
    ids = (
        int('1', 36),
        int('4', 36),
        int('5', 36),
        int('6', 36),
        int('8', 36),
        int('9', 36),
        int('a', 36),
        int('3', 36),
        int('7', 36),
        int('b', 36),
        int('c', 36),
        int('d', 36),
        int('e', 36),
        int('f', 36),
        int('k', 36),
        int('l', 36),
        int('j', 36),
        int('p', 36),
        int('q', 36),
        int('r', 36),
        int('t', 36),
        int('u', 36),
        int('y', 36),
        int('11', 36),
        int('12', 36),
        int('z', 36),
        int('2', 36),
        int('13', 36),
        int('o', 36),
        int('n', 36),
        int('10', 36),
        int('i', 36),
        int('h', 36),
        int('g', 36),
    )
    return Link._byID(ids, data=True, return_dict=False)
Ejemplo n.º 57
0
    def POST_update(self, res, meetup, title, description, location, latitude,
                    longitude, timestamp, tzoffset):
        if res._chk_error(errors.NO_TITLE):
            res._chk_error(errors.TITLE_TOO_LONG)
            res._focus('title')

        res._chk_errors((errors.NO_LOCATION, errors.NO_DESCRIPTION,
                         errors.INVALID_DATE, errors.NO_DATE))

        if res.error: return

        meetup.title = title
        meetup.description = description

        meetup.location = location
        meetup.latitude = latitude
        meetup.longitude = longitude

        meetup.timestamp = timestamp
        meetup.tzoffset = tzoffset

        # Expire all meetups in the render cache
        g.rendercache.invalidate_key_group(Meetup.group_cache_key())

        meetup._commit()

        # Update the linked article
        article = Link._byID(meetup.assoc_link)
        article._load()
        article_old_url = article.url
        article.title = meetup_article_title(meetup)
        article.article = meetup_article_text(meetup)
        article._commit()
        article.update_url_cache(article_old_url)

        res._redirect(url_for(action='show', id=meetup._id36))
Ejemplo n.º 58
0
def port_cassaurls(after_id=None, estimate=15231317):
    from r2.models import Link, LinksByUrl
    from r2.lib.db import tdb_cassandra
    from r2.lib.db.operators import desc
    from r2.lib.db.tdb_cassandra import CL
    from r2.lib.utils import fetch_things2, in_chunks, progress

    q = Link._query(Link.c._spam == (True, False),
                    sort=desc('_date'),
                    data=True)
    if after_id:
        q._after(Link._byID(after_id, data=True))
    q = fetch_things2(q, chunk_size=500)
    q = progress(q, estimate=estimate)
    q = (l for l in q if getattr(l, 'url', 'self') != 'self'
         and not getattr(l, 'is_self', False))
    chunks = in_chunks(q, 500)

    for chunk in chunks:
        with LinksByUrl._cf.batch(write_consistency_level=CL.ONE) as b:
            for l in chunk:
                k = LinksByUrl._key_from_url(l.url)
                if k:
                    b.insert(k, {l._id36: l._id36})
Ejemplo n.º 59
0
    def add_props(cls, user, wrapped):

        from r2.lib.menus import NavButton
        from r2.lib.db.thing import Thing
        from r2.lib.pages import WrappedUser
        from r2.lib.filters import _force_unicode

        TITLE_MAX_WIDTH = 50

        request_path = request.path

        target_fullnames = [item.target_fullname for item in wrapped if hasattr(item, 'target_fullname')]
        targets = Thing._by_fullname(target_fullnames, data=True)
        authors = Account._byID([t.author_id for t in targets.values() if hasattr(t, 'author_id')], data=True)
        links = Link._byID([t.link_id for t in targets.values() if hasattr(t, 'link_id')], data=True)

        sr_ids = set([t.sr_id for t in targets.itervalues() if hasattr(t, 'sr_id')] +
                     [w.sr_id for w in wrapped])
        subreddits = Subreddit._byID(sr_ids, data=True)

        # Assemble target links
        target_links = {}
        target_accounts = {}
        for fullname, target in targets.iteritems():
            if isinstance(target, Link):
                author = authors[target.author_id]
                title = _force_unicode(target.title)
                if len(title) > TITLE_MAX_WIDTH:
                    short_title = title[:TITLE_MAX_WIDTH] + '...'
                else:
                    short_title = title
                text = '%(link)s "%(title)s" %(by)s %(author)s' % {
                        'link': _('link'),
                        'title': short_title, 
                        'by': _('by'),
                        'author': author.name}
                path = target.make_permalink(subreddits[target.sr_id])
                target_links[fullname] = (text, path, title)
            elif isinstance(target, Comment):
                author = authors[target.author_id]
                link = links[target.link_id]
                title = _force_unicode(link.title)
                if len(title) > TITLE_MAX_WIDTH:
                    short_title = title[:TITLE_MAX_WIDTH] + '...'
                else:
                    short_title = title
                text = '%(comment)s %(by)s %(author)s %(on)s "%(title)s"' % {
                        'comment': _('comment'),
                        'by': _('by'),
                        'author': author.name,
                        'on': _('on'),
                        'title': short_title}
                path = target.make_permalink(link, subreddits[link.sr_id])
                target_links[fullname] = (text, path, title)
            elif isinstance(target, Account):
                target_accounts[fullname] = WrappedUser(target)

        for item in wrapped:
            # Can I move these buttons somewhere else? Not great to have request stuff in here
            css_class = 'modactions %s' % item.action
            item.button = NavButton('', item.action, opt='type', css_class=css_class)
            item.button.build(base_path=request_path)

            mod_name = item.author.name
            item.mod = NavButton(mod_name, mod_name, opt='mod')
            item.mod.build(base_path=request_path)
            item.text = ModAction._text.get(item.action, '')
            item.details = item.get_extra_text()

            if hasattr(item, 'target_fullname') and item.target_fullname:
                target = targets[item.target_fullname]
                if isinstance(target, Account):
                    item.target_wrapped_user = target_accounts[item.target_fullname]
                elif isinstance(target, Link) or isinstance(target, Comment):
                    item.target_text, item.target_path, item.target_title = target_links[item.target_fullname]

            item.bgcolor = ModAction.get_rgb(item.sr_id)
            item.sr_name = subreddits[item.sr_id].name
            item.sr_path = subreddits[item.sr_id].path

        Printable.add_props(user, wrapped)
Ejemplo n.º 60
0
 def GET_edit_promo_campaign(self, campaign):
     if not campaign:
         return self.abort404()
     link = Link._byID(campaign.link_id)
     return self.redirect(promote.promo_edit_url(link))