Esempio n. 1
0
    def process_response(self):
        data = request.POST

        transaction_id = 'RG%s' % data['transaction_id']
        pennies = int(data['pennies'])
        months = int(data['months'])
        status = 'succeeded'

        goldtype = data['goldtype']
        buyer = Account._by_name(data['buyer'])

        if goldtype == 'gift':
            gift_kw = {
                'recipient': Account._by_name(data['recipient']),
                'giftmessage': _force_utf8(data.get('giftmessage', None)),
                'signed': data.get('signed') == 'True',
            }
        else:
            gift_kw = {}

        webhook = Webhook(
            transaction_id=transaction_id,
            pennies=pennies,
            months=months,
            goldtype=goldtype,
            buyer=buyer,
            **gift_kw)
        return status, webhook
Esempio n. 2
0
def recompute_unread(min_date = None):
    from r2.models import Inbox, Account, Comment, Message
    from r2.lib.db import queries

    def load_accounts(inbox_rel):
        accounts = set()
        q = inbox_rel._query(eager_load = False, data = False,
                             sort = desc("_date"))
        if min_date:
            q._filter(inbox_rel.c._date > min_date)

        for i in fetch_things2(q):
            accounts.add(i._thing1_id)

        return accounts

    accounts_m = load_accounts(Inbox.rel(Account, Message))
    for i, a in enumerate(accounts_m):
        a = Account._byID(a)
        print "%s / %s : %s" % (i, len(accounts_m), a)
        queries.get_unread_messages(a).update()
        queries.get_unread_comments(a).update()
        queries.get_unread_selfreply(a).update()

    accounts = load_accounts(Inbox.rel(Account, Comment)) - accounts_m
    for i, a in enumerate(accounts):
        a = Account._byID(a)
        print "%s / %s : %s" % (i, len(accounts), a)
        queries.get_unread_comments(a).update()
        queries.get_unread_selfreply(a).update()
Esempio n. 3
0
File: ipn.py Progetto: dinxx/reddit
def validate_blob(custom):
    """Validate payment_blob and return a dict with everything looked up."""
    ret = {}

    if not custom:
        raise GoldException('no custom')

    payment_blob = g.hardcache.get('payment_blob-%s' % str(custom))
    if not payment_blob:
        raise GoldException('no payment_blob')

    if 'account_id' in payment_blob and 'account_name' in payment_blob:
        try:
            buyer = Account._byID(payment_blob['account_id'], data=True)
            ret['buyer'] = buyer
        except NotFound:
            raise GoldException('bad account_id')

        if not buyer.name.lower() == payment_blob['account_name'].lower():
            raise GoldException('buyer mismatch')
    elif 'email' in payment_blob:
        ret['email'] = payment_blob['email']
    else:
        raise GoldException('no account_id or email')

    goldtype = payment_blob['goldtype']
    ret['goldtype'] = goldtype

    if goldtype == 'gift':
        recipient_name = payment_blob.get('recipient', None)
        if not recipient_name:
            raise GoldException('gift missing recpient')
        try:
            recipient = Account._by_name(recipient_name)
            ret['recipient'] = recipient
        except NotFound:
            raise GoldException('bad recipient')
        thing_fullname = payment_blob.get('thing', None)
        if thing_fullname:
            try:
                ret['thing'] = Thing._by_fullname(thing_fullname)
            except NotFound:
                raise GoldException('bad thing')
        ret['signed'] = payment_blob.get('signed', False)
        giftmessage = payment_blob.get('giftmessage')
        giftmessage = _force_unicode(giftmessage) if giftmessage else None
        ret['giftmessage'] = giftmessage
    elif goldtype not in ('onetime', 'autorenew', 'creddits', 'code'):
        raise GoldException('bad goldtype')

    return ret
Esempio n. 4
0
    def get_reports(cls, wrapped, max_user_reasons=20):
        """Get two lists of mod and user reports on the item."""
        if (wrapped.reported > 0 and
                (wrapped.can_ban or
                 getattr(wrapped, "promoted", None) and c.user_is_sponsor)):
            from r2.models import SRMember

            reports = cls.for_thing(wrapped.lookups[0])

            q = SRMember._simple_query(
                ["_thing2_id", "_date"],
                SRMember.c._thing1_id == wrapped.sr_id,
                SRMember.c._name == "moderator",
            )
            mod_dates = {rel._thing2_id: rel._date for rel in q}

            if g.automoderator_account:
                automoderator = Account._by_name(g.automoderator_account)
            else:
                automoderator = None

            mod_reports = []
            user_reports = []

            for report in reports:
                # always include AutoModerator reports
                if automoderator and report._thing1_id == automoderator._id:
                    mod_reports.append(report)
                # include in mod reports if made after the user became a mod
                elif (report._thing1_id in mod_dates and
                        report._date >= mod_dates[report._thing1_id]):
                    mod_reports.append(report)
                else:
                    user_reports.append(report)

            # mod reports return as tuples with (reason, name)
            mods = Account._byID([report._thing1_id
                                  for report in mod_reports],
                                 data=True, return_dict=True)
            mod_reports = [(getattr(report, "reason", None),
                            mods[report._thing1_id].name)
                            for report in mod_reports]

            # user reports return as tuples with (reason, count)
            user_reports = Counter([getattr(report, "reason", None)
                                    for report in user_reports])
            user_reports = user_reports.most_common(max_user_reasons)

            return mod_reports, user_reports
        else:
            return [], []
Esempio n. 5
0
def validate_blob(custom):
    """Validate payment_blob and return a dict with everything looked up."""
    ret = {}

    if not custom:
        raise GoldException("no custom")

    payment_blob = g.hardcache.get("payment_blob-%s" % str(custom))
    if not payment_blob:
        raise GoldException("no payment_blob")

    if not ("account_id" in payment_blob and "account_name" in payment_blob):
        raise GoldException("no account_id")

    try:
        buyer = Account._byID(payment_blob["account_id"], data=True)
        ret["buyer"] = buyer
    except NotFound:
        raise GoldException("bad account_id")

    if not buyer.name.lower() == payment_blob["account_name"].lower():
        raise GoldException("buyer mismatch")

    goldtype = payment_blob["goldtype"]
    ret["goldtype"] = goldtype

    if goldtype == "gift":
        recipient_name = payment_blob.get("recipient", None)
        if not recipient_name:
            raise GoldException("gift missing recpient")
        try:
            recipient = Account._by_name(recipient_name)
            ret["recipient"] = recipient
        except NotFound:
            raise GoldException("bad recipient")
        comment_fullname = payment_blob.get("comment", None)
        if comment_fullname:
            try:
                ret["comment"] = Comment._by_fullname(comment_fullname)
            except NotFound:
                raise GoldException("bad comment")
        ret["signed"] = payment_blob.get("signed", False)
        giftmessage = payment_blob.get("giftmessage")
        giftmessage = _force_unicode(giftmessage) if giftmessage else None
        ret["giftmessage"] = giftmessage
    elif goldtype not in ("onetime", "autorenew", "creddits"):
        raise GoldException("bad goldtype")

    return ret
Esempio n. 6
0
def store_keys(key, maxes):
    # we're building queries using queries.py, but we could make the
    # queries ourselves if we wanted to avoid the individual lookups
    # for accounts and subreddits.

    # Note that we're only generating the 'sr-' type queries here, but
    # we're also able to process the other listings generated by the
    # old migrate.mr_permacache for convenience

    userrel_fns = dict(liked = queries.get_liked,
                       disliked = queries.get_disliked,
                       saved = queries.get_saved,
                       hidden = queries.get_hidden)

    if key.startswith('user-'):
        acc_str, keytype, account_id = key.split('-')
        account_id = int(account_id)
        fn = queries.get_submitted if keytype == 'submitted' else queries.get_comments
        q = fn(Account._byID(account_id), 'new', 'all')
        q._insert_tuples([(fname, float(timestamp))
                    for (timestamp, fname)
                    in maxes])

    elif key.startswith('sr-'):
        sr_str, sort, time, sr_id = key.split('-')
        sr_id = int(sr_id)

        if sort == 'controversy':
            # I screwed this up in the mapper and it's too late to fix
            # it
            sort = 'controversial'

        q = queries.get_links(Subreddit._byID(sr_id), sort, time)
        q._insert_tuples([tuple([item[-1]] + map(float, item[:-1]))
                    for item in maxes])
    elif key.startswith('domain/'):
        d_str, sort, time, domain = key.split('/')
        q = queries.get_domain_links(domain, sort, time)
        q._insert_tuples([tuple([item[-1]] + map(float, item[:-1]))
                    for item in maxes])


    elif key.split('-')[0] in userrel_fns:
        key_type, account_id = key.split('-')
        account_id = int(account_id)
        fn = userrel_fns[key_type]
        q = fn(Account._byID(account_id))
        q._insert_tuples([tuple([item[-1]] + map(float, item[:-1]))
                    for item in maxes])
Esempio n. 7
0
    def report(self):
        different = 0
        total = len(self.new_values)
        logged_keys = set()

        for account_id, pairs in self.new_values.iteritems():
            try:
                account = Account._byID(account_id, data=True)
            except NotFound:
                continue

            if self.migrate:
                for k, v in list(pairs.iteritems()):
                    _, dir, kind, sr = k.split('_')
                    old_total = getattr(account, '{0}_{1}_karma'.format(sr, kind), 0)
                    new_total = pairs['karma_ups_{0}_{1}'.format(kind, sr)] - \
                                pairs['karma_downs_{0}_{1}'.format(kind, sr)]
                    if old_total != new_total:
                        different += 1
                        if (account.name, kind, sr) not in logged_keys:
                            logged_keys.add((account.name, kind, sr))
                            print('{0}["{1}_{2}"] differs - old={3}, new={4}'.format(
                                account.name, kind, sr, old_total, new_total))
            else:
                for k, v in pairs.iteritems():
                    old_v = getattr(account, k, 0)
                    if v != old_v:
                        print('{0} differs - old={1}, new={2}'.format(k, old_v, v))

        print('{0} out of {1} values differed'.format(different, total))
Esempio n. 8
0
def charge_pending(offset=1):
    for l, camp, weight in accepted_campaigns(offset=offset):
        user = Account._byID(l.author_id)
        try:
            if charged_or_not_needed(camp):
                continue

            charge_succeeded = authorize.charge_transaction(user, camp.trans_id,
                                                            camp._id)

            if not charge_succeeded:
                continue

            hooks.get_hook('promote.new_charge').call(link=l, campaign=camp)

            if is_promoted(l):
                emailer.queue_promo(l, camp.bid, camp.trans_id)
            else:
                set_promote_status(l, PROMOTE_STATUS.pending)
                emailer.queue_promo(l, camp.bid, camp.trans_id)
            text = ('auth charge for campaign %s, trans_id: %d' %
                    (camp._id, camp.trans_id))
            PromotionLog.add(l, text)
        except:
            print "Error on %s, campaign %s" % (l, camp._id)
Esempio n. 9
0
def send_system_message(user, subject, body, system_user=None,
                        distinguished='admin', repliable=False,
                        add_to_sent=True, author=None):
    from r2.lib.db import queries

    if system_user is None:
        system_user = Account.system_user()
    if not system_user:
        g.log.warning("Can't send system message "
                      "- invalid system_user or g.system_user setting")
        return
    if not author:
        author = system_user

    item, inbox_rel = Message._new(author, user, subject, body,
                                   ip='0.0.0.0')
    item.distinguished = distinguished
    item.repliable = repliable
    item.display_author = system_user._id
    item._commit()

    try:
        queries.new_message(item, inbox_rel, add_to_sent=add_to_sent)
    except MemcachedError:
        raise MessageError('reddit_inbox')
Esempio n. 10
0
    def _get_sr_restriction(sr):
        """Return a cloudsearch appropriate query string that restricts
        results to only contain results from self.sr
        
        """
        bq = []
        if (not sr) or sr == All or isinstance(sr, DefaultSR):
            return None
        elif isinstance(sr, MultiReddit):
            bq = ["(or"]
            for sr_id in sr.sr_ids:
                bq.append("sr_id:%s" % sr_id)
            bq.append(")")
        elif isinstance(sr, DomainSR):
            bq = ["site:'%s'" % sr.domain]
        elif sr == Friends:
            if not c.user_is_loggedin or not c.user.friends:
                return None
            bq = ["(or"]
            # The query limit is roughly 8k bytes. Limit to 200 friends to
            # avoid getting too close to that limit
            friend_ids = c.user.friends[:200]
            friends = ["author_fullname:'%s'" % Account._fullname_from_id36(r2utils.to36(id_)) for id_ in friend_ids]
            bq.extend(friends)
            bq.append(")")
        elif isinstance(sr, ModContribSR):
            bq = ["(or"]
            for sr_id in sr.sr_ids:
                bq.append("sr_id:%s" % sr_id)
            bq.append(")")
        elif not isinstance(sr, FakeSubreddit):
            bq = ["sr_id:%s" % sr._id]

        return " ".join(bq)
Esempio n. 11
0
    def _restrict_sr(sr):
        '''Return a cloudsearch appropriate query string that restricts
        results to only contain results from self.sr
        
        '''
        if isinstance(sr, MultiReddit):
            if not sr.sr_ids:
                raise InvalidQuery
            srs = ["sr_id:%s" % sr_id for sr_id in sr.sr_ids]
            return "(or %s)" % ' '.join(srs)
        elif isinstance(sr, DomainSR):
            return "site:'%s'" % sr.domain
        elif isinstance(sr, FriendsSR):
            if not c.user_is_loggedin or not c.user.friends:
                raise InvalidQuery
            # The query limit is roughly 8k bytes. Limit to 200 friends to
            # avoid getting too close to that limit
            friend_ids = c.user.friends[:200]
            friends = ["author_fullname:'%s'" %
                       Account._fullname_from_id36(r2utils.to36(id_))
                       for id_ in friend_ids]
            return "(or %s)" % ' '.join(friends)
        elif not isinstance(sr, FakeSubreddit):
            return "sr_id:%s" % sr._id

        return None
Esempio n. 12
0
def _promo_email(thing, kind, body = "", **kw):
    from r2.lib.pages import Promo_Email
    a = Account._byID(thing.author_id, True)
    body = Promo_Email(link = thing, kind = kind,
                       body = body, **kw).render(style = "email")
    return _system_email(a.email, body, kind, thing = thing,
                         reply_to = "*****@*****.**")
Esempio n. 13
0
def set_up_comment_embed(sr, thing, showedits):
    try:
        author = Account._byID(thing.author_id) if thing.author_id else None
    except NotFound:
        author = None

    iso_timestamp = request.GET.get("created", "")
    uuid = request.GET.get("uuid", "")

    c.embed_config = {
        "eventtracker_url": g.eventtracker_url or "",
        "anon_eventtracker_url": g.anon_eventtracker_url or "",
        "event_clicktracker_url": g.event_clicktracker_url or "",
        "created": iso_timestamp,
        "uuid": uuid,
        "showedits": showedits,
        "thing": {
            "id": thing._id,
            "sr_id": sr._id,
            "sr_name": sr.name,
            "edited": edited_after(thing, iso_timestamp, showedits),
            "deleted": thing.deleted or author._deleted,
        },
        "comment_max_height": 200,
    }

    c.render_style = "iframe"
    c.user = UnloggedUser([c.lang])
    c.user_is_loggedin = False
    c.forced_loggedout = True
Esempio n. 14
0
def update_user(user):
    if isinstance(user, str):
        user = Account._by_name(user)
    elif isinstance(user, int):
        user = Account._byID(user)

    results = [get_inbox_messages(user),
               get_inbox_comments(user),
               get_sent(user),
               get_liked(user),
               get_disliked(user),
               get_saved(user),
               get_hidden(user),
               get_submitted(user, 'new', 'all'),
               get_comments(user, 'new', 'all')]
    add_queries(results)
Esempio n. 15
0
def charge_campaign(link, campaign):
    if charged_or_not_needed(campaign):
        return

    user = Account._byID(link.author_id)
    success, reason = authorize.charge_transaction(user, campaign.trans_id,
                                                   campaign._id)

    if not success:
        if reason == authorize.TRANSACTION_NOT_FOUND:
            # authorization hold has expired
            original_trans_id = campaign.trans_id
            campaign.trans_id = NO_TRANSACTION
            campaign._commit()
            text = ('voided expired transaction for %s: (trans_id: %d)'
                    % (campaign, original_trans_id))
            PromotionLog.add(link, text)
        return

    hooks.get_hook('promote.edit_campaign').call(link=link, campaign=campaign)

    if not is_promoted(link):
        update_promote_status(link, PROMOTE_STATUS.pending)

    emailer.queue_promo(link, campaign.bid, campaign.trans_id)
    text = ('auth charge for campaign %s, trans_id: %d' %
            (campaign._id, campaign.trans_id))
    PromotionLog.add(link, text)
Esempio n. 16
0
def edit_campaign(link, campaign, dates, bid, sr):
    sr_name = sr.name if sr else '' # empty string means target to all
    try:
        # if the bid amount changed, cancel any pending transactions
        if campaign.bid != bid:
            void_campaign(link, campaign)

        # update the schedule
        PromotionWeights.reschedule(link, campaign._id, sr_name,
                                    dates[0], dates[1], bid)

        # update values in the db
        campaign.update(dates[0], dates[1], bid, sr_name, campaign.trans_id, commit=True)

        # record the transaction
        text = 'updated campaign %s. (bid: %0.2f)' % (campaign._id, bid)
        PromotionLog.add(link, text)

        # make it a freebie, if applicable
        author = Account._byID(link.author_id, True)
        if getattr(author, "complimentary_promos", False):
            free_campaign(link, campaign, c.user)

    except Exception, e: # record error and rethrow 
        g.log.error("Failed to update PromoCampaign %s on link %d. Error was: %r" %
                    (campaign._id, link._id, e))
        try: # wrapped in try/except so orig error won't be lost if commit fails
            text = 'update FAILED. (campaign: %s, bid: %.2f)' % (campaign._id,
                                                                 bid)
            PromotionLog.add(link, text)
        except:
            pass
        raise e
Esempio n. 17
0
    def pre(self):
        set_extension(request.environ, "json")
        MinimalController.pre(self)
        require_https()

        try:
            access_token = OAuth2AccessToken.get_token(self._get_bearer_token())
            require(access_token)
            require(access_token.check_valid())
            c.oauth2_access_token = access_token
            account = Account._byID36(access_token.user_id, data=True)
            require(account)
            require(not account._deleted)
            c.oauth_user = account
        except RequirementException:
            self._auth_error(401, "invalid_token")

        handler = self._get_action_handler()
        if handler:
            oauth2_perms = getattr(handler, "oauth2_perms", None)
            if oauth2_perms:
                grant = OAuth2Scope(access_token.scope)
                if grant.subreddit_only and c.site.name not in grant.subreddits:
                    self._auth_error(403, "insufficient_scope")
                required_scopes = set(oauth2_perms['allowed_scopes'])
                if not (grant.scopes >= required_scopes):
                    self._auth_error(403, "insufficient_scope")
            else:
                self._auth_error(400, "invalid_request")
Esempio n. 18
0
def upgrade_messages(update_comments=True, update_messages=True, update_trees=True):
    from r2.lib.db import queries
    from r2.lib import comment_tree, cache
    from r2.models import Account
    from pylons import app_globals as g

    accounts = set()

    def batch_fn(items):
        g.reset_caches()
        return items

    if update_messages or update_trees:
        q = Message._query(Message.c.new == True, sort=desc("_date"), data=True)
        for m in fetch_things2(q, batch_fn=batch_fn):
            print m, m._date
            if update_messages:
                accounts = accounts | queries.set_unread(m, m.new)
            else:
                accounts.add(m.to_id)
    if update_comments:
        q = Comment._query(Comment.c.new == True, sort=desc("_date"))
        q._filter(Comment.c._id < 26152162676)

        for m in fetch_things2(q, batch_fn=batch_fn):
            print m, m._date
            queries.set_unread(m, True)

    print "Precomputing comment trees for %d accounts" % len(accounts)

    for i, a in enumerate(accounts):
        if not isinstance(a, Account):
            a = Account._byID(a)
        print i, a
        comment_tree.user_messages(a)
Esempio n. 19
0
File: api.py Progetto: eerock/reddit
    def process_response(self, res):
        from r2.models import Account

        fullname = res.merchantcustomerid.contents[0]
        name = res.description.contents[0]
        customer_id = int(res.customerprofileid.contents[0])
        acct = Account._by_name(name)

        # make sure we are updating the correct account!
        if acct.name == name:
            CustomerID.set(acct, customer_id)
        else:
            raise AuthorizeNetException, "account name doesn't match authorize.net account"

        # parse the ship-to list, and make sure the Account is up todate
        ship_to = []
        for profile in res.findAll("shiptolist"):
            a = Address.fromXML(profile)
            ShippingAddress.add(acct, a.customerAddressId)
            ship_to.append(a)

        # parse the payment profiles, and ditto
        profiles = []
        for profile in res.findAll("paymentprofiles"):
            a = Address.fromXML(profile)
            cc = CreditCard.fromXML(profile.payment)
            payprof = PaymentProfile(a, cc, int(a.customerPaymentProfileId))
            PayID.add(acct, a.customerPaymentProfileId)
            profiles.append(payprof)

        return acct, Profile(acct, profiles, ship_to)
Esempio n. 20
0
 def _get_sr_restriction(sr):
     '''Return a solr-appropriate query string that restricts
     results to only contain results from self.sr
     
     '''
     bq = []
     if (not sr) or sr == All or isinstance(sr, DefaultSR):
         return None
     elif isinstance(sr, MultiReddit):
         for sr_id in sr.sr_ids:
             bq.append("sr_id:%s" % sr_id)
     elif isinstance(sr, DomainSR):
         bq = ["site:'%s'" % sr.domain]
     elif sr == Friends:
         if not c.user_is_loggedin or not c.user.friends:
             return None
         friend_ids = c.user.friends
         friends = ["author_fullname:'%s'" %
                    Account._fullname_from_id36(r2utils.to36(id_))
                    for id_ in friend_ids]
         bq.extend(friends)
     elif isinstance(sr, ModContribSR):
         for sr_id in sr.sr_ids:
             bq.append("sr_id:%s" % sr_id)
     elif not isinstance(sr, FakeSubreddit):
         bq = ["sr_id:%s" % sr._id]
     return ' OR '.join(bq)
Esempio n. 21
0
def new_comment(comment, inbox_rels):
    author = Account._byID(comment.author_id)
    job = [get_comments(author, "new", "all")]
    if comment._deleted:
        job.append(get_all_comments())
        add_queries(job, delete_items=comment)
    else:
        # if comment._spam:
        #    sr = Subreddit._byID(comment.sr_id)
        #    job.append(get_spam_comments(sr))
        add_queries(job, insert_items=comment)
        amqp.add_item("new_comment", comment._fullname)
        if not g.amqp_host:
            l = Link._byID(comment.link_id, data=True)
            add_comment_tree(comment, l)

    # note that get_all_comments() is updated by the amqp process
    # r2.lib.db.queries.run_new_comments

    if inbox_rels:
        for inbox_rel in tup(inbox_rels):
            inbox_owner = inbox_rel._thing1
            if inbox_rel._name == "inbox":
                add_queries([get_inbox_comments(inbox_owner)], insert_items=inbox_rel)
            else:
                add_queries([get_inbox_selfreply(inbox_owner)], insert_items=inbox_rel)
            set_unread(comment, inbox_owner, True)
Esempio n. 22
0
    def _gift_using_creddits(self, recipient, months=1, thing_fullname=None, proxying_for=None):
        with creddits_lock(c.user):
            if not c.user.employee and c.user.gold_creddits < months:
                err = RedditError("INSUFFICIENT_CREDDITS")
                self.on_validation_error(err)

            note = None
            buyer = c.user
            if c.user.name.lower() in g.live_config["proxy_gilding_accounts"]:
                note = "proxy-%s" % c.user.name
                if proxying_for:
                    try:
                        buyer = Account._by_name(proxying_for)
                    except NotFound:
                        pass

            send_gift(
                buyer=buyer,
                recipient=recipient,
                months=months,
                days=months * 31,
                signed=False,
                giftmessage=None,
                thing_fullname=thing_fullname,
                note=note,
            )

            if not c.user.employee:
                c.user.gold_creddits -= months
                c.user._commit()
Esempio n. 23
0
File: ipn.py Progetto: rolmos/reddit
    def process_response(self):
        data = request.POST

        transaction_id = 'RG%s' % data['transaction_id']
        pennies = int(data['pennies'])
        months = int(data['months'])
        status = 'succeeded'

        buyer_name = data['buyer']
        goldtype = data['goldtype']

        buyer = Account._by_name(buyer_name)

        blob = {
            'goldtype': goldtype,
            'account_id': buyer._id,
            'account_name': buyer.name,
            'status': 'initialized',
        }

        if goldtype == 'gift':
            blob['recipient'] = data['recipient']
            giftmessage = data.get('giftmessage', None)
            blob['giftmessage'] = _force_utf8(giftmessage)
            signed = data.get('signed')
            blob['signed'] = True if signed == 'True' else False

        passthrough = generate_blob(blob)

        return status, passthrough, transaction_id, pennies, months
Esempio n. 24
0
def refund_campaign(link, camp, refund_amount, billable_amount,
        billable_impressions):
    owner = Account._byID(camp.owner_id, data=True)
    success, reason = authorize.refund_transaction(
        owner, camp.trans_id, camp._id, refund_amount)
    if not success:
        text = ('%s $%s refund failed' % (camp, refund_amount))
        PromotionLog.add(link, text)
        g.log.debug(text + ' (reason: %s)' % reason)

        return False

    if billable_impressions:
        text = ('%s completed with $%s billable (%s impressions @ $%s).'
                ' %s refunded.' % (camp, billable_amount,
                                   billable_impressions,
                                   camp.bid_pennies / 100.,
                                   refund_amount))
    else:
        text = ('%s completed with $%s billable. %s refunded' % (camp,
            billable_amount, refund_amount))

    PromotionLog.add(link, text)
    camp.refund_amount = refund_amount
    camp._commit()
    queries.unset_underdelivered_campaigns(camp)
    emailer.refunded_promo(link)

    return True
Esempio n. 25
0
    def authenticate_with_token(self):
        set_extension(request.environ, "json")
        set_content_type()
        require_https()
        require_domain(g.oauth_domain)

        try:
            access_token = OAuth2AccessToken.get_token(self._get_bearer_token())
            require(access_token)
            require(access_token.check_valid())
            c.oauth2_access_token = access_token
            account = Account._byID36(access_token.user_id, data=True)
            require(account)
            require(not account._deleted)
            c.oauth_user = account
        except RequirementException:
            self._auth_error(401, "invalid_token")

        handler = self._get_action_handler()
        if handler:
            oauth2_perms = getattr(handler, "oauth2_perms", None)
            if oauth2_perms or True:
                grant = OAuth2Scope(access_token.scope)
                required = set(oauth2_perms['allowed_scopes'])
                if not grant.has_access(c.site.name, required):
                    self._auth_error(403, "insufficient_scope")
                c.oauth_scope = grant
            else:
                self._auth_error(400, "invalid_request")
Esempio n. 26
0
def refund_campaign(link, camp, billable_amount, billable_impressions):
    refund_amount = get_refund_amount(camp, billable_amount)
    if refund_amount <= 0:
        return

    owner = Account._byID(camp.owner_id, data=True)
    try:
        success = authorize.refund_transaction(owner, camp.trans_id, camp._id, refund_amount)
    except authorize.AuthorizeNetException as e:
        text = "%s $%s refund failed" % (camp, refund_amount)
        PromotionLog.add(link, text)
        g.log.debug(text + " (response: %s)" % e)
        return

    text = "%s completed with $%s billable (%s impressions @ $%s)." " %s refunded." % (
        camp,
        billable_amount,
        billable_impressions,
        camp.cpm,
        refund_amount,
    )
    PromotionLog.add(link, text)
    camp.refund_amount = refund_amount
    camp._commit()
    unset_underdelivered_campaigns(camp)
    emailer.refunded_promo(link)
Esempio n. 27
0
def submit_all():
    from r2.models import Subdigg, Account, Link, NotFound
    from r2.lib.media import set_media
    from r2.lib.db import queries
    sr = Subdigg._by_name('testmedia')
    author = Account._by_name('testmedia')
    links = []
    for url in test_urls:
        try:
            # delete any existing version of the link
            l = Link._by_url(url, sr)
            print "Deleting %s" % l
            l._deleted = True
            l._commit()
        except NotFound:
            pass

        l = Link._submit(url, url, author, sr, '0.0.0.0')

        try:
            set_media(l)
        except Exception, e:
            print e

        if g.write_query_queue:
            queries.new_link(l)

        links.append(l)
Esempio n. 28
0
    def _handle_vote(msgs, chan):
        comments = []

        for msg in msgs:
            tag = msg.delivery_tag
            r = pickle.loads(msg.body)

            uid, tid, dir, ip, organic, cheater = r
            voter = Account._byID(uid, data=True)
            votee = Thing._by_fullname(tid, data = True)
            if isinstance(votee, Comment):
                comments.append(votee)

            if not isinstance(votee, (Link, Comment)):
                # I don't know how, but somebody is sneaking in votes
                # for subreddits
                continue

            print (voter, votee, dir, ip, organic, cheater)
            try:
                handle_vote(voter, votee, dir, ip, organic,
                            cheater=cheater, foreground=False)
            except Exception, e:
                print 'Rejecting %r:%r because of %r' % (msg.delivery_tag, r,e)
                chan.basic_reject(msg.delivery_tag, requeue=True)
Esempio n. 29
0
    def setUpClass(cls):
        cls._backup_user = c.user
        cls._backup_loggedin = c.user_is_loggedin

        # Create a dummy account for testing with; won't touch the database
        # as long as we don't `._commit()`
        name = "unit_tester_%s" % uuid.uuid4().hex
        cls._password = uuid.uuid4().hex
        try:
            Account._by_name(name)
            raise AccountExists
        except NotFound:
            cls._account = Account(
                name=name,
                password=bcrypt_password(cls._password)
            )
Esempio n. 30
0
def update_karmas():
    for pair in to_update():
        user = Account._byID(pair[0], True)
        sr = Subreddit._byID(pair[1], True)

        print user.name, sr.name
        user.incr_karma('comment', sr, 20)
Esempio n. 31
0
    def _handle_vote(msgs, chan):
        #assert(len(msgs) == 1)
        comments = []
        for msg in msgs:
            r = pickle.loads(msg.body)

            uid, tid, dir, ip, organic, cheater = r
            voter = Account._byID(uid, data=True)
            votee = Thing._by_fullname(tid, data=True)
            if isinstance(votee, Comment):
                comments.append(votee)

            print(voter, votee, dir, ip, organic, cheater)
            handle_vote(voter, votee, dir, ip, organic, cheater=cheater)

        update_comment_votes(comments)
Esempio n. 32
0
def store_keys(key, maxes):
    # we're building queries using queries.py, but we could make the
    # queries ourselves if we wanted to avoid the individual lookups
    # for accounts and subreddits.

    # Note that we're only generating the 'sr-' type queries here, but
    # we're also able to process the other listings generated by the
    # old migrate.mr_permacache for convenience

    if key.startswith('user-'):
        acc_str, sort, time, account_id = key.split('-')
        account_id = int(account_id)
        fn = queries.get_submitted
        q = fn(Account._byID(account_id), sort, time)
        q._replace(
            [tuple([item[-1]] + map(float, item[:-1])) for item in maxes])
Esempio n. 33
0
def new_campaign(link, dates, bid, cpm, target, frequency_cap,
                 frequency_cap_duration, priority, location, platform,
                 mobile_os):
    campaign = PromoCampaign.create(link, target, bid, cpm, dates[0], dates[1],
                                    frequency_cap, frequency_cap_duration,
                                    priority, location, platform, mobile_os)
    PromotionWeights.add(link, campaign)
    PromotionLog.add(link, 'campaign %s created' % campaign._id)

    if campaign.priority.cpm:
        author = Account._byID(link.author_id, data=True)
        if getattr(author, "complimentary_promos", False):
            free_campaign(link, campaign, c.user)

    hooks.get_hook('promote.new_campaign').call(link=link, campaign=campaign)
    return campaign
Esempio n. 34
0
def void_campaign(link, campaign, reason):
    transactions = get_transactions(link, [campaign])
    bid_record = transactions.get(campaign._id)
    if bid_record:
        a = Account._byID(link.author_id)
        authorize.void_transaction(a, bid_record.transaction, campaign._id)
        campaign.trans_id = NO_TRANSACTION
        campaign._commit()
        text = ('voided transaction for %s: (trans_id: %d)' %
                (campaign, bid_record.transaction))
        PromotionLog.add(link, text)

        if bid_record.transaction > 0:
            # notify the user that the transaction was voided if it was not
            # a freebie
            emailer.void_payment(link, campaign, reason)
Esempio n. 35
0
    def _handle_vote(msg):
        #assert(len(msgs) == 1)
        r = pickle.loads(msg.body)

        uid, tid, dir, ip, organic, cheater = r
        voter = Account._byID(uid, data=True)
        votee = Thing._by_fullname(tid, data = True)
        if isinstance(votee, Comment):
            update_comment_votes([votee])

        # I don't know how, but somebody is sneaking in votes
        # for subreddits
        if isinstance(votee, (Link, Comment)):
            print (voter, votee, dir, ip, organic, cheater)
            handle_vote(voter, votee, dir, ip, organic,
                        cheater = cheater, foreground=True)
Esempio n. 36
0
def new_message(message, inbox_rels):
    from r2.lib.comment_tree import add_message

    from_user = Account._byID(message.author_id)
    for inbox_rel in tup(inbox_rels):
        to = inbox_rel._thing1
        # moderator message
        if isinstance(inbox_rel, ModeratorInbox):
            add_queries([get_subreddit_messages(to)], insert_items=inbox_rel)
        # personal message
        else:
            add_queries([get_sent(from_user)], insert_items=message)
            add_queries([get_inbox_messages(to)], insert_items=inbox_rel)
        set_unread(message, to, True)

    add_message(message)
Esempio n. 37
0
def new_campaign(link, dates, bid, cpm, sr, priority):
    # empty string for sr_name means target to all
    sr_name = sr.name if sr else ""
    campaign = PromoCampaign._new(link, sr_name, bid, cpm, dates[0], dates[1],
                                  priority)
    PromotionWeights.add(link, campaign._id, sr_name, dates[0], dates[1], bid)
    PromotionLog.add(link, 'campaign %s created' % campaign._id)

    if campaign.priority.cpm:
        author = Account._byID(link.author_id, data=True)
        if getattr(author, "complimentary_promos", False):
            free_campaign(link, campaign, c.user)
    else:
        # non-cpm campaigns are never charged, so we need to fire the hook now
        hooks.get_hook('promote.new_charge').call(link=link, campaign=campaign)
    return campaign
Esempio n. 38
0
def message_notification_email(data):
    """Queues a system email for a new message notification."""
    from r2.lib.pages import MessageNotificationEmail

    MAX_EMAILS_PER_DAY = 1000
    MESSAGE_THROTTLE_KEY = 'message_notification_emails'

    # If our counter's expired, initialize it again.
    g.cache.add(MESSAGE_THROTTLE_KEY, 0, time=24 * 60 * 60)

    for datum in data.itervalues():
        datum = json.loads(datum)
        user = Account._byID36(datum['to'], data=True)
        comment = Comment._by_fullname(datum['comment'], data=True)

        # In case a user has enabled the preference while it was enabled for
        # them, but we've since turned it off.  We need to explicitly state the
        # user because we're not in the context of an HTTP request from them.
        if not feature.is_enabled('orangereds_as_emails', user=user):
            continue

        if g.cache.get(MESSAGE_THROTTLE_KEY) > MAX_EMAILS_PER_DAY:
            raise Exception(
                'Message notification emails: safety limit exceeded!')

        mac = generate_notification_email_unsubscribe_token(
            datum['to'],
            user_email=user.email,
            user_password_hash=user.password)
        base = g.https_endpoint or g.origin
        unsubscribe_link = base + '/mail/unsubscribe/%s/%s' % (datum['to'],
                                                               mac)

        templateData = {
            'sender_username': datum.get('from', ''),
            'comment': comment,
            'permalink': datum['permalink'],
            'unsubscribe_link': unsubscribe_link,
        }
        _system_email(
            user.email,
            MessageNotificationEmail(**templateData).render(style='email'),
            Email.Kind.MESSAGE_NOTIFICATION,
            from_address=g.notification_email)

        g.stats.simple_event('email.message_notification.queued')
        g.cache.incr(MESSAGE_THROTTLE_KEY)
Esempio n. 39
0
def edit_campaign(link, campaign, dates, bid, cpm, target, priority, location):
    changed = {}
    if bid != campaign.bid:
        # if the bid amount changed, cancel any pending transactions
        void_campaign(link, campaign, reason='changed_bid')
        changed['bid'] = ("$%0.2f" % campaign.bid, "$%0.2f" % bid)
        campaign.bid = bid
    if dates[0] != campaign.start_date or dates[1] != campaign.end_date:
        original = '%s to %s' % (campaign.start_date, campaign.end_date)
        edited = '%s to %s' % (dates[0], dates[1])
        changed['dates'] = (original, edited)
        campaign.start_date = dates[0]
        campaign.end_date = dates[1]
    if cpm != campaign.cpm:
        changed['cpm'] = (campaign.cpm, cpm)
        campaign.cpm = cpm
    if target != campaign.target:
        changed['target'] = (campaign.target, target)
        campaign.target = target
    if priority != campaign.priority:
        changed['priority'] = (campaign.priority.name, priority.name)
        campaign.priority = priority
    if location != campaign.location:
        changed['location'] = (campaign.location, location)
        campaign.location = location

    change_strs = map(lambda t: '%s: %s -> %s' % (t[0], t[1][0], t[1][1]),
                      changed.iteritems())
    change_text = ', '.join(change_strs)
    campaign._commit()

    # update the index
    PromotionWeights.reschedule(link, campaign._id,
                                campaign.target.subreddit_names, dates[0],
                                dates[1], bid)

    if campaign.priority.cpm:
        # make it a freebie, if applicable
        author = Account._byID(link.author_id, True)
        if getattr(author, "complimentary_promos", False):
            free_campaign(link, campaign, c.user)

    # record the changes
    if change_text:
        PromotionLog.add(link, 'edited %s: %s' % (campaign, change_text))

    hooks.get_hook('promote.edit_campaign').call(link=link, campaign=campaign)
Esempio n. 40
0
def new_comment(comment, inbox_rels):
    author = Account._byID(comment.author_id)
    job = [
        get_comments(author, 'new', 'all'),
        get_comments(author, 'top', 'all'),
        get_comments(author, 'controversial', 'all')
    ]

    sr = Subreddit._byID(comment.sr_id)

    if comment._deleted:
        job_key = "delete_items"
        job.append(get_sr_comments(sr))
        job.append(get_all_comments())
    else:
        job_key = "insert_items"
        if comment._spam:
            job.append(get_spam_comments(sr))
        amqp.add_item('new_comment', comment._fullname)
        if not g.amqp_host:
            add_comment_tree([comment])

    job_dict = {job_key: comment}
    add_queries(job, **job_dict)

    # note that get_all_comments() is updated by the amqp process
    # r2.lib.db.queries.run_new_comments (to minimise lock contention)

    if inbox_rels:
        for inbox_rel in tup(inbox_rels):
            inbox_owner = inbox_rel._thing1
            job_dict = {job_key: inbox_rel}
            if inbox_rel._name == "inbox":
                inbox_func = get_inbox_comments
                unread_func = get_unread_comments
            elif inbox_rel._name == "selfreply":
                inbox_func = get_inbox_selfreply
                unread_func = get_unread_selfreply
            else:
                raise ValueError("wtf is " + inbox_rel._name)

            add_queries([inbox_func(inbox_owner)], **job_dict)

            if comment._deleted:
                add_queries([unread_func(inbox_owner)], **job_dict)
            else:
                set_unread(comment, inbox_owner, True)
Esempio n. 41
0
def new_vote(vote, foreground=False):
    user = vote._thing1
    item = vote._thing2

    if not isinstance(item, (Link, Comment)):
        return

    if vote.valid_thing and not item._spam and not item._deleted:
        sr = item.subreddit_slow
        results = []

        author = Account._byID(item.author_id)
        for sort in ('hot', 'top', 'controversial', 'new'):
            if isinstance(item, Link):
                results.append(get_submitted(author, sort, 'all'))
            if isinstance(item, Comment):
                results.append(get_comments(author, sort, 'all'))

        if isinstance(item, Link):
            # don't do 'new', because that was done by new_link, and
            # the time-filtered versions of top/controversial will be
            # done by mr_top
            results.extend([
                get_links(sr, 'hot', 'all'),
                get_links(sr, 'top', 'all'),
                get_links(sr, 'controversial', 'all'),
            ])

            for domain in utils.UrlParser(item.url).domain_permutations():
                for sort in ("hot", "top", "controversial"):
                    results.append(get_domain_links(domain, sort, "all"))

        add_queries(results, insert_items=item, foreground=foreground)

    if isinstance(item, Link):
        # must update both because we don't know if it's a changed
        # vote
        with CachedQueryMutator() as m:
            if vote._name == '1':
                m.insert(get_liked(user), [vote])
                m.delete(get_disliked(user), [vote])
            elif vote._name == '-1':
                m.delete(get_liked(user), [vote])
                m.insert(get_disliked(user), [vote])
            else:
                m.delete(get_liked(user), [vote])
                m.delete(get_disliked(user), [vote])
Esempio n. 42
0
    def get_details(cls, thing, voters=None):
        from r2.models import Comment, Link
        if isinstance(thing, Link):
            details_cls = VoteDetailsByLink
        elif isinstance(thing, Comment):
            details_cls = VoteDetailsByComment
        else:
            raise ValueError

        voter_id36s = None
        if voters:
            voter_id36s = [voter._id36 for voter in voters]

        try:
            row = details_cls._byID(thing._id36, properties=voter_id36s)
            raw_details = row._values()
        except tdb_cassandra.NotFound:
            return []

        try:
            row = VoterIPByThing._byID(thing._fullname, properties=voter_id36s)
            ips = row._values()
        except tdb_cassandra.NotFound:
            ips = {}

        details = []
        for voter_id36, json_data in raw_details.iteritems():
            data = json.loads(json_data)
            data = cls.convert_old_details(data)

            user = Account._byID36(voter_id36, data=True)
            direction = Vote.deserialize_direction(data.pop("direction"))
            date = datetime.utcfromtimestamp(data.pop("date"))
            effects = data.pop("effects")
            data["ip"] = ips.get(voter_id36)

            vote = Vote(user,
                        thing,
                        direction,
                        date,
                        data,
                        effects,
                        get_previous_vote=False)
            details.append(vote)
        details.sort(key=lambda d: d.date)

        return details
Esempio n. 43
0
def new_comment(comment, inbox_rels):
    author = Account._byID(comment.author_id)
    job = [get_comments(author, 'new', 'all'),
           get_comments(author, 'top', 'all'),
           get_comments(author, 'controversial', 'all')]

    sr = Subreddit._byID(comment.sr_id)

    with CachedQueryMutator() as m:
        if comment._deleted:
            job_key = "delete_items"
            job.append(get_sr_comments(sr))
            job.append(get_all_comments())
        else:
            job_key = "insert_items"
            if comment._spam:
                m.insert(get_spam_comments(sr), [comment])
            if was_spam_filtered(comment):
                m.insert(get_spam_filtered_comments(sr), [comment])
            amqp.add_item('new_comment', comment._fullname)
            if not g.amqp_host:
                add_comment_tree([comment])

        job_dict = { job_key: comment }
        add_queries(job, **job_dict)

        # note that get_all_comments() is updated by the amqp process
        # r2.lib.db.queries.run_new_comments (to minimise lock contention)

        if inbox_rels:
            for inbox_rel in tup(inbox_rels):
                inbox_owner = inbox_rel._thing1
                if inbox_rel._name == "inbox":
                    query = get_inbox_comments(inbox_owner)
                elif inbox_rel._name == "selfreply":
                    query = get_inbox_selfreply(inbox_owner)
                else:
                    raise ValueError("wtf is " + inbox_rel._name)

                if not comment._deleted:
                    m.insert(query, [inbox_rel])
                else:
                    m.delete(query, [inbox_rel])

                set_unread(comment, inbox_owner,
                           unread=not comment._deleted, mutator=m)
Esempio n. 44
0
def edit_campaign(link, campaign, dates, bid, cpm, sr, priority, location):
    sr_name = sr.name if sr else '' # empty string means target to all

    changed = {}
    if bid != campaign.bid:
        changed['bid'] = ("$%0.2f" % campaign.bid, "$%0.2f" % bid)
    if dates[0] != campaign.start_date or dates[1] != campaign.end_date:
        original = '%s to %s' % (campaign.start_date, campaign.end_date)
        edited = '%s to %s' % (dates[0], dates[1])
        changed['dates'] = (original, edited)
    if cpm != campaign.cpm:
        changed['cpm'] = (campaign.cpm, cpm)
    if sr_name != campaign.sr_name:
        format_sr_name = (lambda sr_name: '/r/%s' % sr_name if sr_name
                                                            else '<frontpage>')
        changed['sr_name'] = map(format_sr_name, (campaign.sr_name, sr_name))
    if priority != campaign.priority:
        changed['priority'] = (campaign.priority.name, priority.name)

    change_strs = map(lambda t: '%s: %s -> %s' % (t[0], t[1][0], t[1][1]),
                      changed.iteritems())
    change_text = ', '.join(change_strs)

    # if the bid amount changed, cancel any pending transactions
    if campaign.bid != bid:
        void_campaign(link, campaign, reason='changed_bid')

    # update the schedule
    PromotionWeights.reschedule(link, campaign._id, sr_name,
                                dates[0], dates[1], bid)

    # update values in the db
    campaign.update(dates[0], dates[1], bid, cpm, sr_name,
                    campaign.trans_id, priority, location, commit=True)

    if campaign.priority.cpm:
        # make it a freebie, if applicable
        author = Account._byID(link.author_id, True)
        if getattr(author, "complimentary_promos", False):
            free_campaign(link, campaign, c.user)

    # record the changes
    if change_text:
        PromotionLog.add(link, 'edited %s: %s' % (campaign, change_text))

    hooks.get_hook('promote.edit_campaign').call(link=link, campaign=campaign)
Esempio n. 45
0
def store_keys(key, maxes):
    # we're building queries using queries.py, but we could make the
    # queries ourselves if we wanted to avoid the individual lookups
    # for accounts and subreddits.

    # Note that we're only generating the 'sr-' type queries here, but
    # we're also able to process the other listings generated by the
    # old migrate.mr_permacache for convenience

    userrel_fns = dict(liked=queries.get_liked,
                       disliked=queries.get_disliked,
                       saved=queries.get_saved,
                       hidden=queries.get_hidden)

    if key.startswith('user-'):
        acc_str, keytype, account_id = key.split('-')
        account_id = int(account_id)
        fn = queries._get_submitted if keytype == 'submitted' else queries._get_comments
        q = fn(account_id, 'new', 'all')
        q._replace([(fname, float(timestamp)) for (timestamp, fname) in maxes])

    elif key.startswith('sr-'):
        sr_str, sort, time, sr_id = key.split('-')
        sr_id = int(sr_id)

        if sort == 'controversy':
            # I screwed this up in the mapper and it's too late to fix
            # it
            sort = 'controversial'

        q = queries._get_links(sr_id, sort, time)
        q._replace(
            [tuple([item[-1]] + map(float, item[:-1])) for item in maxes])
    elif key.startswith('domain/'):
        d_str, sort, time, domain = key.split('/')
        q = queries.get_domain_links(domain, sort, time)
        q._replace(
            [tuple([item[-1]] + map(float, item[:-1])) for item in maxes])

    elif key.split('-')[0] in userrel_fns:
        key_type, account_id = key.split('-')
        account_id = int(account_id)
        fn = userrel_fns[key_type]
        q = fn(Account._byID(account_id))
        q._replace(
            [tuple([item[-1]] + map(float, item[:-1])) for item in maxes])
Esempio n. 46
0
    def author_spammer(self, things, spam):
        """incr/decr the 'spammer' field for the author of every
           passed thing"""
        by_aid = {}
        for thing in things:
            if (hasattr(thing, 'author_id')
                and not getattr(thing, 'ban_info', {}).get('auto',True)):
                # only decrement 'spammer' for items that were not
                # autobanned
                by_aid.setdefault(thing.author_id, []).append(thing)

        if by_aid:
            authors = Account._byID(by_aid.keys(), data=True, return_dict=True)

            for aid, author_things in by_aid.iteritems():
                author = authors[aid]
                author._incr('spammer', len(author_things) if spam else -len(author_things))
Esempio n. 47
0
    def new(cls, user, thing, reason=None):
        from r2.lib.db import queries

        # check if this report exists already!
        rel = cls.rel(user, thing)
        q = rel._fast_query(user, thing, ['-1', '0', '1'])
        q = [report for (tupl, report) in q.iteritems() if report]
        if q:
            # stop if we've seen this before, so that we never get the
            # same report from the same user twice
            oldreport = q[0]
            g.log.debug("Ignoring duplicate report %s" % oldreport)
            return oldreport

        kw = {}
        if reason:
            kw['reason'] = reason

        r = Report(user, thing, '0', **kw)
        if not thing._loaded:
            thing._load()

        # mark item as reported
        try:
            thing._incr(cls._field)
        except (ValueError, TypeError):
            g.log.error("%r has bad field %r = %r" %
                        (thing, cls._field,
                         getattr(thing, cls._field, "(nonexistent)")))
            raise

        r._commit()

        if hasattr(thing, 'author_id'):
            author = Account._byID(thing.author_id, data=True)
            author._incr('reported')

        if not getattr(thing, "ignore_reports", False):
            # update the reports queue if it exists
            queries.new_report(thing, r)

            # if the thing is already marked as spam, accept the report
            if thing._spam:
                cls.accept(thing)

        return r
Esempio n. 48
0
    def __init__(self, event, listing, show_sidebar, report_type):
        self.event = event
        self.listing = listing
        if show_sidebar:
            self.discussions = LiveUpdateOtherDiscussions()
        self.show_sidebar = show_sidebar

        contributor_accounts = Account._byID(event.contributors.keys(),
                                             data=True, return_dict=False)
        self.contributors = sorted((LiveUpdateAccount(e)
                                   for e in contributor_accounts),
                                   key=lambda e: e.name)

        self.report_types = REPORT_TYPES
        self.report_type = report_type

        Templated.__init__(self)
Esempio n. 49
0
def comment_exists(post, comment):
    # Check if this comment already exists using brutal compare on content
    # BeautifulSoup is used to parse as HTML in order to remove markup
    content = ''.join(BeautifulSoup(comment['body']).findAll(text=True))
    key = re_non_alphanum.sub('', content)
    existing_comments = Comment._query(Comment.c.link_id == post._id, Comment.c.ob_imported == True, data=True)
    for existing_comment in existing_comments:
        author = Account._byID(existing_comment.author_id, data=True)
        content = ''.join(BeautifulSoup(existing_comment.body).findAll(text=True))
        existing_key = re_non_alphanum.sub('', content)
        if key == existing_key:
            print " Skipping existing %s" % comment_excerpt(comment)
            return True
        # else:
        #     print "%s *|NOT|* %s" % (key, existing_key)

    return False
Esempio n. 50
0
def new_vote(vote):
    user = vote._thing1
    item = vote._thing2

    if not isinstance(item, Link):
        return

    if vote.valid_thing and not item._spam and not item._deleted:
        sr = item.subreddit_slow
        results = []
        author = Account._byID(item.author_id)
        if author.gold:
            for sort in ('hot', 'top', 'controversial', 'new'):
                if isinstance(item, Link):
                    results.append(get_submitted(author, sort, 'all'))
                if isinstance(item, Comment):
                    results.append(get_comments(author, sort, 'all'))

        # don't do 'new', because that was done by new_link, and the
        # time-filtered versions of top/controversial will be done by
        # mr_top
        results.extend([
            get_links(sr, 'hot', 'all'),
            get_links(sr, 'top', 'all'),
            get_links(sr, 'controversial', 'all'),
        ])

        for domain in utils.UrlParser(item.url).domain_permutations():
            for sort in ("hot", "top", "controversial"):
                results.append(get_domain_links(domain, sort, "all"))

        add_queries(results, insert_items=item)

    vote._fast_query_timestamp_touch(user)

    #must update both because we don't know if it's a changed vote
    if vote._name == '1':
        add_queries([get_liked(user)], insert_items=vote)
        add_queries([get_disliked(user)], delete_items=vote)
    elif vote._name == '-1':
        add_queries([get_liked(user)], delete_items=vote)
        add_queries([get_disliked(user)], insert_items=vote)
    else:
        add_queries([get_liked(user)], delete_items=vote)
        add_queries([get_disliked(user)], delete_items=vote)
Esempio n. 51
0
    def process_message(msg):
        timer = g.stats.get_timer("new_voting.%s" % queue)
        timer.start()

        vote_data = json.loads(msg.body)
        hook = hooks.get_hook('vote.validate_vote_data')
        if hook.call_until_return(msg=msg, vote_data=vote_data) is False:
            # Corrupt records in the queue. Ignore them.
            print "Ignoring invalid vote by %s on %s %s" % (
                vote_data.get('user_id', '<unknown>'),
                vote_data.get('thing_fullname', '<unknown>'), vote_data)
            return

        # if it's an old-style vote, convert to the new format
        if "uid" in vote_data:
            vote_data = convert_old_vote_data(vote_data, msg.timestamp)

        user = Account._byID(vote_data.pop("user_id"), data=True)
        thing = Thing._by_fullname(vote_data.pop("thing_fullname"), data=True)

        timer.intermediate("preamble")

        lock_key = "vote-%s-%s" % (user._id36, thing._fullname)
        with g.make_lock("voting", lock_key, timeout=5):
            print "Processing vote by %s on %s %s" % (user, thing, vote_data)

            try:
                vote = Vote(
                    user,
                    thing,
                    direction=vote_data["direction"],
                    date=datetime.utcfromtimestamp(vote_data["date"]),
                    data=vote_data["data"],
                    event_data=vote_data.get("event_data"),
                )
            except TypeError as e:
                # a vote on an invalid type got in the queue, just skip it
                g.log.exception("Invalid type: %r", e.message)
                return

            timer.intermediate("create_vote_obj")

            vote.commit()

            timer.flush()
Esempio n. 52
0
def new_campaign(link, dates, target, frequency_cap, priority, location,
                 platform, mobile_os, ios_devices, ios_version_range,
                 android_devices, android_version_range, total_budget_pennies,
                 cost_basis, bid_pennies):
    campaign = PromoCampaign.create(
        link, target, dates[0], dates[1], frequency_cap, priority, location,
        platform, mobile_os, ios_devices, ios_version_range, android_devices,
        android_version_range, total_budget_pennies, cost_basis, bid_pennies)
    PromotionWeights.add(link, campaign)
    PromotionLog.add(link, 'campaign %s created' % campaign._id)

    if not campaign.is_house:
        author = Account._byID(link.author_id, data=True)
        if getattr(author, "complimentary_promos", False):
            free_campaign(link, campaign, c.user)

    hooks.get_hook('promote.new_campaign').call(link=link, campaign=campaign)
    return campaign
Esempio n. 53
0
def charge_pending(offset=1):
    for l, camp, weight in accepted_campaigns(offset=offset):
        user = Account._byID(l.author_id)
        try:
            if (authorize.is_charged_transaction(camp.trans_id, camp._id) or not
                authorize.charge_transaction(user, camp.trans_id, camp._id)):
                continue

            if is_promoted(l):
                emailer.queue_promo(l, camp.bid, camp.trans_id)
            else:
                set_promote_status(l, PROMOTE_STATUS.pending)
                emailer.queue_promo(l, camp.bid, camp.trans_id)
            text = ('auth charge for campaign %s, trans_id: %d' %
                    (camp._id, camp.trans_id))
            PromotionLog.add(l, text)
        except:
            print "Error on %s, campaign %s" % (l, camp._id)
Esempio n. 54
0
def new_link(link):
    "Called on the submission and deletion of links"
    sr = Subreddit._byID(link.sr_id)
    author = Account._byID(link.author_id)

    results = [get_links(sr, 'new', 'all')]
    # we don't have to do hot/top/controversy because new_vote will do
    # that

    results.append(get_submitted(author, 'new', 'all'))
    if link._spam:
        results.append(get_spam_links(sr))

    # only 'new' qualifies for insertion, which will be done in
    # run_new_links
    add_queries(results, insert_items=link)

    amqp.add_item('new_link', link._fullname)
Esempio n. 55
0
def backfill_deleted_accounts(resume_id=None):
    del_accts = Account._query(Account.c._deleted == True, sort=desc('_date'))
    if resume_id:
        del_accts._filter(Account.c._id < resume_id)

    for i, account in enumerate(progress(fetch_things2(del_accts))):
        # Don't kill the rabbit! Wait for the relevant queues to calm down.
        if i % 1000 == 0:
            del_len = get_queue_length('del_account_q')
            cs_len = get_queue_length('cloudsearch_changes')
            while (del_len > 1000 or cs_len > 10000):
                sys.stderr.write(("CS: %d, DEL: %d" % (cs_len, del_len)) +
                                 "\n")
                sys.stderr.flush()
                time.sleep(1)
                del_len = get_queue_length('del_account_q')
                cs_len = get_queue_length('cloudsearch_changes')
        amqp.add_item('account_deleted', account._fullname)
Esempio n. 56
0
def submit_link(user, subreddit, title, url, thumb_url):
    account = Account._by_name(user)
    subreddit = Subreddit._by_name(subreddit)
    ip = '127.0.0.1'

    # submit the link
    link = Link._submit(title, url, account, subreddit, ip, spam=False)

    # force the thumbnail before scraper_q gets in the mix
    image_data = urllib.urlopen(thumb_url).read()
    force_thumbnail(link, image_data)

    # various backend processing things
    queries.queue_vote(account, link, True, ip)
    queries.new_link(link)
    queries.changed(link)

    print link.make_permalink_slow()
Esempio n. 57
0
    def __init__(self):
        Wrapped.__init__(self)
        cache_stats = cache.get('stats')
        if cache_stats:
            top_users, top_day, top_week = cache_stats

            #lookup user objs
            uids = []
            uids.extend(u    for u in top_users)
            uids.extend(u[0] for u in top_day)
            uids.extend(u[0] for u in top_week)
            users = Account._byID(uids, data = True)

            self.top_users = (users[u]            for u in top_users)
            self.top_day   = ((users[u[0]], u[1]) for u in top_day)
            self.top_week  = ((users[u[0]], u[1]) for u in top_week)
        else:
            self.top_users = self.top_day = self.top_week = ()
Esempio n. 58
0
    def from_queue(self, max_date, batch_limit=50, kind=None):
        from r2.models import Account, Thing
        keep_trying = True
        min_id = None
        s = self.queue_table
        while keep_trying:
            where = [s.c.date < max_date]
            if min_id:
                where.append(s.c.uid > min_id)
            if kind:
                where.append(s.c.kind == kind)

            res = sa.select([
                s.c.to_addr, s.c.account_id, s.c.from_name, s.c.fullname,
                s.c.body, s.c.kind, s.c.ip, s.c.date, s.c.uid, s.c.msg_hash,
                s.c.fr_addr, s.c.reply_to
            ],
                            sa.and_(*where),
                            order_by=s.c.uid,
                            limit=batch_limit).execute()
            res = res.fetchall()

            if not res: break

            # batch load user accounts
            aids = [x[1] for x in res if x[1] > 0]
            accts = Account._byID(aids, data=True,
                                  return_dict=True) if aids else {}

            # batch load things
            tids = [x[3] for x in res if x[3]]
            things = Thing._by_fullname(tids, data=True,
                                        return_dict=True) if tids else {}

            # get the lower bound date for next iteration
            min_id = max(x[8] for x in res)

            # did we not fetch them all?
            keep_trying = (len(res) == batch_limit)

            for (addr, acct, fname, fulln, body, kind, ip, date, uid, msg_hash,
                 fr_addr, reply_to) in res:
                yield (accts.get(acct), things.get(fulln), addr, fname, date,
                       ip, kind, msg_hash, body, fr_addr, reply_to)
Esempio n. 59
0
def edit_campaign(link, campaign, dates, bid, cpm, sr, priority):
    sr_name = sr.name if sr else ''  # empty string means target to all
    try:
        # if the bid amount changed, cancel any pending transactions
        if campaign.bid != bid:
            void_campaign(link, campaign)

        # update the schedule
        PromotionWeights.reschedule(link, campaign._id, sr_name, dates[0],
                                    dates[1], bid)

        # update values in the db
        campaign.update(dates[0],
                        dates[1],
                        bid,
                        cpm,
                        sr_name,
                        campaign.trans_id,
                        priority,
                        commit=True)

        if campaign.priority.cpm:
            # record the transaction
            text = 'updated campaign %s. (bid: %0.2f)' % (campaign._id, bid)
            PromotionLog.add(link, text)

            # make it a freebie, if applicable
            author = Account._byID(link.author_id, True)
            if getattr(author, "complimentary_promos", False):
                free_campaign(link, campaign, c.user)

        hooks.get_hook('campaign.edit').call(link=link, campaign=campaign)

    except Exception, e:  # record error and rethrow
        g.log.error(
            "Failed to update PromoCampaign %s on link %d. Error was: %r" %
            (campaign._id, link._id, e))
        try:  # wrapped in try/except so orig error won't be lost if commit fails
            text = 'update FAILED. (campaign: %s, bid: %.2f)' % (campaign._id,
                                                                 bid)
            PromotionLog.add(link, text)
        except:
            pass
        raise e
Esempio n. 60
0
    def __init__(self, event, perms_by_contributor, editable):
        self.event = event
        self.editable = editable

        contributor_accounts = Account._byID(
            perms_by_contributor.keys(), data=True)
        contributors = [
            LiveUpdateContributor(account, perms_by_contributor[account._id])
            for account in contributor_accounts.itervalues()]
        contributors.sort(key=lambda r: r.account.name)

        SimpleBuilder.__init__(
            self,
            contributors,
            keep_fn=self.keep_item,
            wrap=self.wrap_item,
            skip=False,
            num=0,
        )