Ejemplo n.º 1
0
def test_send_summary_emails():
    accounts = fetch_things2(Account._query(Account.c.email != None, sort=asc('_date')))
    for account in accounts:
        a_day_ago = datetime.datetime.now(pytz.utc) - datetime.timedelta(hours=24)
        account.last_email_sent_at = a_day_ago
        account._commit()
        send_account_summary_email(account._id, verbose=True)
Ejemplo n.º 2
0
def test_cassasavehide():
    from r2.models import Account, Link, CassandraSave, SavesByAccount
    from r2.lib.db import tdb_cassandra

    a = list(Account._query(sort=desc('_date'),
                            limit=1))[0]
    l = list(Link._query(sort=desc('_date'),
                         limit=1))[0]

    try:
        csh = CassandraSave._fast_query(a._id36, l._id36)
        print "Warning! Deleting!", csh
        CassandraSave._fast_query(a._id36, l._id36)._destroy()
    except tdb_cassandra.NotFound:
        pass

    csh = CassandraSave._save(a, l)
    csh._commit()
    assert CassandraSave._fast_query(a._id36, l._id36) == csh

    # check for the SavesByAccount object too
    assert SavesByAccount._byID(a._id36)[csh._id] == csh._id

    csh._destroy()

    try:
        CassandraSave._fast_query(a._id36, l._id36) == csh
        raise Exception("shouldn't exist after destroying")
    except tdb_cassandra.NotFound:
        pass

    try:
        assert csh._id not in SavesByAccount._byID(a._id36, properties = csh._id)._values()
    except tdb_cassandra.NotFound:
        pass
Ejemplo n.º 3
0
def clear_account_by_name_cache():
    q = Account._query(Account.c._deleted == (True, False), data = True)
    for account in q:
        name = account.name
        clear_memo('account._by_name', Account, name.lower(), True)
        clear_memo('account._by_name', Account, name.lower(), False)
        print "Cleared cache for %s" % account.name
Ejemplo n.º 4
0
def reset_last_email_sent_at_for_all_accounts():
    start_of_epoc = pytz.utc.localize(datetime.datetime.utcfromtimestamp(0))

    accounts = fetch_things2(Account._query(Account.c.email != None, sort=asc('_date')))
    for account in accounts:
        account.last_email_sent_at = start_of_epoc
        account._commit()
Ejemplo n.º 5
0
def test_cassasavehide():
    from r2.models import Account, Link, CassandraSave, SavesByAccount
    from r2.lib.db import tdb_cassandra

    a = list(Account._query(sort=desc('_date'), limit=1))[0]
    l = list(Link._query(sort=desc('_date'), limit=1))[0]

    try:
        csh = CassandraSave._fast_query(a._id36, l._id36)
        print "Warning! Deleting!", csh
        CassandraSave._fast_query(a._id36, l._id36)._destroy()
    except tdb_cassandra.NotFound:
        pass

    csh = CassandraSave._save(a, l)
    csh._commit()
    assert CassandraSave._fast_query(a._id36, l._id36) == csh

    # check for the SavesByAccount object too
    assert SavesByAccount._byID(a._id36)[csh._id] == csh._id

    csh._destroy()

    try:
        CassandraSave._fast_query(a._id36, l._id36) == csh
        raise Exception("shouldn't exist after destroying")
    except tdb_cassandra.NotFound:
        pass

    try:
        assert csh._id not in SavesByAccount._byID(
            a._id36, properties=csh._id)._values()
    except tdb_cassandra.NotFound:
        pass
Ejemplo n.º 6
0
    def write_karmas(self):
        STEP = 100
        account_id_max = sa.select([sa.func.max(karmatotals.c.account_id)]).scalar()
        account_id_start = 0  #int(self.state.kvstore.get('karma.cur_write_account_id', '0'))

        print('Writing karma keys, starting at account {0}, max account id is {1}'.format(
            account_id_start, account_id_max))

        for account_id_low in xrange(account_id_start, account_id_max + 1, STEP):
            accounts = list(Account._query(
                Account.c._id >= account_id_low,
                Account.c._id < account_id_low + STEP))
            accounts = dict((a._id, a) for a in accounts)
            karmas = karmatotals.select(
                sa.and_(karmatotals.c.account_id >= account_id_low,
                        karmatotals.c.account_id < account_id_low + STEP)).execute().fetchall()

            print('{0}: writing karmas, {1} of {2} accounts'.format(
                datetime.now().isoformat(' '), account_id_low, account_id_max))

            for k in karmas:
                account = accounts.get(k['account_id'])
                if account is not None:
                    key = self.make_karma_key(k)
                    setattr(account, key, k['amount'])

            for ac in accounts.values():
                ac._commit()
Ejemplo n.º 7
0
    def gen_keys():
        yield promoted_memo_key

        # just let this one do its own writing
        load_all_reddits()

        yield queries.get_all_comments().iden

        l_q = Link._query(Link.c._spam == (True, False),
                          Link.c._deleted == (True, False),
                          sort=desc('_date'),
                          data=True,
                          )
        for link in fetch_things2(l_q, verbosity):
            yield comments_key(link._id)
            yield last_modified_key(link, 'comments')

        a_q = Account._query(Account.c._spam == (True, False),
                             sort=desc('_date'),
                             )
        for account in fetch_things2(a_q, verbosity):
            yield messages_key(account._id)
            yield last_modified_key(account, 'overview')
            yield last_modified_key(account, 'commented')
            yield last_modified_key(account, 'submitted')
            yield last_modified_key(account, 'liked')
            yield last_modified_key(account, 'disliked')
            yield queries.get_comments(account, 'new', 'all').iden
            yield queries.get_submitted(account, 'new', 'all').iden
            yield queries.get_liked(account).iden
            yield queries.get_disliked(account).iden
            yield queries.get_hidden(account).iden
            yield queries.get_saved(account).iden
            yield queries.get_inbox_messages(account).iden
            yield queries.get_unread_messages(account).iden
            yield queries.get_inbox_comments(account).iden
            yield queries.get_unread_comments(account).iden
            yield queries.get_inbox_selfreply(account).iden
            yield queries.get_unread_selfreply(account).iden
            yield queries.get_sent(account).iden

        sr_q = Subreddit._query(Subreddit.c._spam == (True, False),
                                sort=desc('_date'),
                                )
        for sr in fetch_things2(sr_q, verbosity):
            yield last_modified_key(sr, 'stylesheet_contents')
            yield queries.get_links(sr, 'hot', 'all').iden
            yield queries.get_links(sr, 'new', 'all').iden

            for sort in 'top', 'controversial':
                for time in 'hour', 'day', 'week', 'month', 'year', 'all':
                    yield queries.get_links(sr, sort, time,
                                            merge_batched=False).iden
            yield queries.get_spam_links(sr).iden
            yield queries.get_spam_comments(sr).iden
            yield queries.get_reported_links(sr).iden
            yield queries.get_reported_comments(sr).iden
            yield queries.get_subreddit_messages(sr).iden
            yield queries.get_unread_subreddit_messages(sr).iden
Ejemplo n.º 8
0
    def gen_keys():
        yield promoted_memo_key

        # just let this one do its own writing
        load_all_reddits()

        yield queries.get_all_comments().iden

        l_q = Link._query(Link.c._spam == (True, False),
                          Link.c._deleted == (True, False),
                          sort=desc('_date'),
                          data=True,
                          )
        for link in fetch_things2(l_q, verbosity):
            yield comments_key(link._id)
            yield last_modified_key(link, 'comments')

        a_q = Account._query(Account.c._spam == (True, False),
                             sort=desc('_date'),
                             )
        for account in fetch_things2(a_q, verbosity):
            yield messages_key(account._id)
            yield last_modified_key(account, 'overview')
            yield last_modified_key(account, 'commented')
            yield last_modified_key(account, 'submitted')
            yield last_modified_key(account, 'liked')
            yield last_modified_key(account, 'disliked')
            yield queries.get_comments(account, 'new', 'all').iden
            yield queries.get_submitted(account, 'new', 'all').iden
            yield queries.get_liked(account).iden
            yield queries.get_disliked(account).iden
            yield queries.get_hidden(account).iden
            yield queries.get_saved(account).iden
            yield queries.get_inbox_messages(account).iden
            yield queries.get_unread_messages(account).iden
            yield queries.get_inbox_comments(account).iden
            yield queries.get_unread_comments(account).iden
            yield queries.get_inbox_selfreply(account).iden
            yield queries.get_unread_selfreply(account).iden
            yield queries.get_sent(account).iden

        sr_q = Subreddit._query(Subreddit.c._spam == (True, False),
                                sort=desc('_date'),
                                )
        for sr in fetch_things2(sr_q, verbosity):
            yield last_modified_key(sr, 'stylesheet_contents')
            yield queries.get_links(sr, 'hot', 'all').iden
            yield queries.get_links(sr, 'new', 'all').iden

            for sort in 'top', 'controversial':
                for time in 'hour', 'day', 'week', 'month', 'year', 'all':
                    yield queries.get_links(sr, sort, time,
                                            merge_batched=False).iden
            yield queries.get_spam_links(sr).iden
            yield queries.get_spam_comments(sr).iden
            yield queries.get_reported_links(sr).iden
            yield queries.get_reported_comments(sr).iden
            yield queries.get_subreddit_messages(sr).iden
            yield queries.get_unread_subreddit_messages(sr).iden
Ejemplo n.º 9
0
 def _query_account(self, *args):
     account = None
     kwargs = {'data': True}
     q = Account._query(*args, **kwargs)
     accounts = list(q)
     if accounts:
         account = accounts[0]
     return account
Ejemplo n.º 10
0
 def _query_account(self, *args):
     account = None
     kwargs = {'data': True}
     q = Account._query(*args, **kwargs)
     accounts = list(q)
     if accounts:
         account = accounts[0]
     return account
Ejemplo n.º 11
0
def cancel_subscription(subscr_id):
    q = Account._query(Account.c.gold_subscr_id == subscr_id, data=True)
    l = list(q)
    if len(l) != 1:
        g.log.warning("Found %d matches for canceled subscription %s" % (len(l), subscr_id))
    for account in l:
        account.gold_subscr_id = None
        account._commit()
        g.log.info("%s canceled their recurring subscription %s" % (account.name, subscr_id))
Ejemplo n.º 12
0
def accountid_from_paypalsubscription(subscr_id):
    if subscr_id is None:
        return None

    q = Account._query(Account.c.gold_subscr_id == subscr_id, data=False)
    l = list(q)
    if l:
        return l[0]._id
    else:
        return None
Ejemplo n.º 13
0
def accountid_from_paypalsubscription(subscr_id):
    if subscr_id is None:
        return None

    q = Account._query(Account.c.gold_subscr_id == subscr_id, data=False)
    l = list(q)
    if l:
        return l[0]._id
    else:
        return None
Ejemplo n.º 14
0
def get_users_to_notify_for_meetup(coords):
    # This query could definitely be optimized, but I don't expect it to be
    # run too often, so it's probably not worth the effort.
    users = Account._query(
        Account.c.pref_meetup_notify_enabled == True,
        Account.c.email != None,
        Account.c.pref_latitude != None,
        Account.c.pref_longitude != None)
    users = filter(lambda u: u.is_within_radius(coords, u.pref_meetup_notify_radius), users)
    return list(users)
Ejemplo n.º 15
0
def get_users_to_notify_for_meetup(coords):
    # This query could definitely be optimized, but I don't expect it to be
    # run too often, so it's probably not worth the effort.
    users = Account._query(Account.c.pref_meetup_notify_enabled == True,
                           Account.c.email != None,
                           Account.c.pref_latitude != None,
                           Account.c.pref_longitude != None)
    users = filter(
        lambda u: u.is_within_radius(coords, u.pref_meetup_notify_radius),
        users)
    return list(users)
Ejemplo n.º 16
0
def cancel_subscription(subscr_id):
    q = Account._query(Account.c.gold_subscr_id == subscr_id, data=True)
    l = list(q)
    if len(l) != 1:
        g.log.warning("Found %d matches for canceled subscription %s" %
                      (len(l), subscr_id))
    for account in l:
        account.gold_subscr_id = None
        account._commit()
        g.log.info("%s canceled their recurring subscription %s" %
                   (account.name, subscr_id))
Ejemplo n.º 17
0
def accountid_from_subscription(subscr_id):
    if subscr_id is None:
        return None

    q = Account._query(Account.c.gold_subscr_id == subscr_id,
                       Account.c._spam == (True, False),
                       Account.c._deleted == (True, False), data=False)
    l = list(q)
    if l:
        return l[0]._id
    else:
        return None
Ejemplo n.º 18
0
def accountid_from_subscription(subscr_id):
    if subscr_id is None:
        return None

    q = Account._query(Account.c.gold_subscr_id == subscr_id,
                       Account.c._spam == (True, False),
                       Account.c._deleted == (True, False), data=False)
    l = list(q)
    if l:
        return l[0]._id
    else:
        return None
Ejemplo n.º 19
0
    def gen_keys():
        yield promoted_memo_key

        # just let this one do its own writing
        load_all_reddits()

        yield queries.get_all_comments().iden

        l_q = Link._query(
            Link.c._spam == (True, False), Link.c._deleted == (True, False), sort=desc("_date"), data=True
        )
        for link in fetch_things2(l_q, verbosity):
            yield comments_key(link._id)
            yield last_modified_key(link, "comments")

        a_q = Account._query(Account.c._spam == (True, False), sort=desc("_date"))
        for account in fetch_things2(a_q, verbosity):
            yield messages_key(account._id)
            yield last_modified_key(account, "overview")
            yield last_modified_key(account, "commented")
            yield last_modified_key(account, "submitted")
            yield last_modified_key(account, "liked")
            yield last_modified_key(account, "disliked")
            yield queries.get_comments(account, "new", "all").iden
            yield queries.get_submitted(account, "new", "all").iden
            yield queries.get_liked(account).iden
            yield queries.get_disliked(account).iden
            yield queries.get_hidden(account).iden
            yield queries.get_saved(account).iden
            yield queries.get_inbox_messages(account).iden
            yield queries.get_unread_messages(account).iden
            yield queries.get_inbox_comments(account).iden
            yield queries.get_unread_comments(account).iden
            yield queries.get_inbox_selfreply(account).iden
            yield queries.get_unread_selfreply(account).iden
            yield queries.get_sent(account).iden

        sr_q = Subreddit._query(Subreddit.c._spam == (True, False), sort=desc("_date"))
        for sr in fetch_things2(sr_q, verbosity):
            yield last_modified_key(sr, "stylesheet_contents")
            yield queries.get_links(sr, "hot", "all").iden
            yield queries.get_links(sr, "new", "all").iden

            for sort in "top", "controversial":
                for time in "hour", "day", "week", "month", "year", "all":
                    yield queries.get_links(sr, sort, time, merge_batched=False).iden
            yield queries.get_spam_links(sr).iden
            yield queries.get_spam_comments(sr).iden
            yield queries.get_reported_links(sr).iden
            yield queries.get_reported_comments(sr).iden
            yield queries.get_subreddit_messages(sr).iden
            yield queries.get_unread_subreddit_messages(sr).iden
Ejemplo n.º 20
0
def all_users():
    q = Account._query(or_(Account.c.link_karma != 0,
                           Account.c.comment_karma != 0),
                       Account.c._spam == (True, False),
                       Account.c._deleted == (True, False),
                       sort = desc('_date'),
                       limit = 200,
                       data = True)
    users = list(q)
    while users:
        for l in users:
            yield l
        users = list(q._after(l))
Ejemplo n.º 21
0
def user_sort_options():
    pref = 'browse_sort'
    users = Account._query(data=True)
    for user in users:
        print user.name,
        user_prefs = copy(user.sort_options)
        user_pref = user_prefs.get(pref)
        if user_pref and user_pref == 'all':
            user_prefs[pref] = 'quarter'
            user.sort_options = user_prefs
            user._commit()
            print " *"
        else:
            print
Ejemplo n.º 22
0
def geolocate_users():
    users = list(Account._query(Account.c.pref_location != None, data=True))
    log('Geolocating {0} users...'.format(len(users)))

    for user in users:
        if not user.pref_location or user.pref_latitude:
            continue
        coords = geolocate_address(user.pref_location)
        if coords:
            user.pref_latitude, user.pref_longitude = coords
            user._commit()
            log('{0} ({1!r}) => ({2:.3}, {3:.3})'.format(
                user.name, user.pref_location, user.pref_latitude,
                user.pref_longitude))
Ejemplo n.º 23
0
def user_sort_options():
    pref = 'browse_sort'
    users = Account._query(data=True)
    for user in users:
        print user.name,
        user_prefs = copy(user.sort_options)
        user_pref = user_prefs.get(pref)
        if user_pref and user_pref == 'all':
            user_prefs[pref] = 'quarter'
            user.sort_options = user_prefs
            user._commit()
            print " *"
        else:
            print
Ejemplo n.º 24
0
def geolocate_users():
    users = list(Account._query(Account.c.pref_location != None,
                                data=True))
    log('Geolocating {0} users...'.format(len(users)))

    for user in users:
        if not user.pref_location or user.pref_latitude:
            continue
        coords = geolocate_address(user.pref_location)
        if coords:
            user.pref_latitude, user.pref_longitude = coords
            user._commit()
            log('{0} ({1!r}) => ({2:.3}, {3:.3})'.format(
                user.name, user.pref_location, user.pref_latitude, user.pref_longitude))
Ejemplo n.º 25
0
def user_downvote_karma_count(filename):
    users = Account._query(data=True)
    
    f = open(filename, 'w')
    f.write("Username,Karma,Down Votes\n")
    
    for user in users:
        downvote_count = g.cache.get(user.vote_cache_key())
        if downvote_count is None:
            downvote_count = len(list(Vote._query(Vote.c._thing1_id == user._id,
                                                  Vote.c._name == str(-1))))

        f.write("%s,%d,%d\n" % (user.name, user.safe_karma, downvote_count))

    f.close()
Ejemplo n.º 26
0
def queue_summary_emails():
    start = datetime.datetime.now()
    # find all accounts that should get an email

    # this implementation is slow, as it iterates over all accounts that have an email
    # address.  One idea to make it faster is to turn the "last_email_sent_at" data 
    # attribute into an actual sql column you can query

    accounts = fetch_things2(Account._query(Account.c.email != None, sort=asc('_date')))
    for account in accounts:
        if should_send_activity_summary_email(account):
            # using _add_item over add_item as that skips using a daemon thread to talk
            # to the amqp server that might not finish it's job before the process exits
            amqp._add_item('summary_email_q', str(account._id))
            print "Queued summary email for %r" % (account.email,)
    end = datetime.datetime.now()
    print "Time to scan accounts to queue emails: %s" % (end - start)
Ejemplo n.º 27
0
def backfill_deleted_accounts(resume_id=None):
    del_accts = Account._query(Account.c._deleted == True, sort=desc('_date'))
    if resume_id:
        del_accts._filter(Account.c._id < resume_id)

    for i, account in enumerate(progress(fetch_things2(del_accts))):
        # Don't kill the rabbit! Wait for the relevant queues to calm down.
        if i % 1000 == 0:
            del_len = get_queue_length('del_account_q')
            cs_len = get_queue_length('cloudsearch_changes')
            while (del_len > 1000 or cs_len > 10000):
                sys.stderr.write(("CS: %d, DEL: %d" % (cs_len, del_len)) +
                                 "\n")
                sys.stderr.flush()
                time.sleep(1)
                del_len = get_queue_length('del_account_q')
                cs_len = get_queue_length('cloudsearch_changes')
        amqp.add_item('account_deleted', account._fullname)
Ejemplo n.º 28
0
def backfill_deleted_accounts(resume_id=None):
    del_accts = Account._query(Account.c._deleted == True, sort=desc('_date'))
    if resume_id:
        del_accts._filter(Account.c._id < resume_id)

    for i, account in enumerate(progress(fetch_things2(del_accts))):
        # Don't kill the rabbit! Wait for the relevant queues to calm down.
        if i % 1000 == 0:
            del_len = get_queue_length('del_account_q')
            cs_len = get_queue_length('cloudsearch_changes')
            while (del_len > 1000 or
                    cs_len > 10000):
                sys.stderr.write(("CS: %d, DEL: %d" % (cs_len, del_len)) + "\n")
                sys.stderr.flush()
                time.sleep(1)
                del_len = get_queue_length('del_account_q')
                cs_len = get_queue_length('cloudsearch_changes')
        amqp.add_item('account_deleted', account._fullname)
Ejemplo n.º 29
0
def post_user_stats():
    import re
    from r2.models import Account, Subreddit, Award
    from r2.lib import utils
    from datetime import datetime, timedelta

    
    q = Account._query(sort = asc('_date'),
                       limit = 1000,
                       data = True)
    

    spammers=0
    deleted=0
    prev_visited=0
    total=0
    ctime = datetime.now(g.tz)
    for account in utils.fetch_things2(q):
        print "\n-------------------------------------"
        print account.name
        print account._spam
        print account._deleted
        total+=1
        prev_visit = last_visit(account)
        if not prev_visit:
            print "%s hasn't logged in for a long time" % account.name
            prev_visited+=1
        else:
            tsince = ctime - prev_visit
            if tsince.days > 10:
                prev_visited+=1
            print "%s hasn't logged in for %d days" % (account.name,tsince.days)

        if  account._spam:
            print "%s is a spammer and banned " % account.name
            spammers+=1
        if  account._deleted:
            print "%s has deleted his/her account " % account.name
            deleted+=1
    print "there are %d deleted accounts, %d spammers, %d haven't logged in in a long time from a total of %d. so there are %d more valid users" % (deleted,spammers,prev_visited,total,total-spammers-deleted-prev_visited)
Ejemplo n.º 30
0
def give_awards():
    import re
    from r2.models import Account, Subreddit, Award
    from r2.lib import utils
    from datetime import datetime, timedelta

    
    q = Account._query(Account.c._spam == False,
                       Account.c._deleted == False,
                       sort = asc('_date'),
                       #Account.c.link_karma>-1,
                       limit = 1000,
                       data = True)
    

    i=0
    ctime = datetime.now(g.tz)
    for account in utils.fetch_things2(q):
        prev_visit = last_visit(account)
        if not prev_visit:
            print "%s hasn't logged in for a long time" % account.name
            continue
        if (ctime - prev_visit).days > 90:
            print "%s hasn't logged in for %d days" % (account.name,(ctime-prev_visit).days)
            continue
        tsince = ctime - account._date
        if tsince.days < 30:
            Award.give_if_needed("newbie", account)
        elif tsince.days >= 30:
            Award.take_away("newbie", account)
        if tsince.days >= 365 and tsince.days < 365*2:
            Award.give_if_needed("1year", account)
        elif tsince.days >= 365*2:
            Award.take_away("1year", account)
            Award.give_if_needed("2years", account)
        if account.email_verified:
            print "%s has verified email: %s" % (account.name,account.email)
            Award.give_if_needed("verified_email", account)
Ejemplo n.º 31
0
    def _run_realtime_email_queue(msgs, chan):

        if time.time() - run_realtime_email_queue.last_got_accounts > 600:
            #-- Pick up a fresh list of accounts, if we havenn't done so recently, in case settings change
            if g.email_debug:
                g.log.info('Getting accounts')
            run_realtime_email_queue.accounts = Account._query(Account.c.email != None, sort = asc('_date'), data=True)
            run_realtime_email_queue.last_got_accounts = time.time()
        
        for msg in msgs:
            # msg.body contains the unique name of the post, comment or message, e.g. 't1_2n'(comment #95) or 't6_q'(post #26)
            fullname = str(msg.body)
            fullname_type = fullname[0:2]
            id36 = fullname[3:]
            if g.email_debug:
                g.log.info('msg: %r', fullname)
            howold = (datetime.datetime.now() - msg.timestamp).total_seconds() 
            if  howold < 110:
                # Wait until this item is 2 minutes old, to allow time for corrections
                if g.email_debug:
                    g.log.info('waiting for a moment')
                time.sleep(120 - howold)

            is_com = is_post = False
            thing = link = comment = None
            if fullname_type == 't1':
                # a comment
                is_com = True
                comment = Comment._byID36(id36, data=True)
                if g.email_debug:
                    g.log.info('comment: %r', comment.body)
                thing = comment
                author = Account._byID(comment.author_id, True)
                kind = Email.Kind.REALTIME_COMMENT
                template = 'email_realtime_comment.html'
                link = Link._byID(comment.link_id, data=True)  
                subject = 'Re: %s' % link.title
                sr_id = comment.sr_id
                
            elif fullname_type == 't6':
                # a post/link
                is_post = True
                link = Link._byID36(id36, data=True)
                if g.email_debug:
                    g.log.info('post: %r', link.title)
                thing = link
                author = Account._byID(link.author_id, True)
                kind = Email.Kind.REALTIME_POST
                template = 'email_realtime_post.html'
                subject = link.title
                sr_id = link.sr_id
                
            else:
                return
            
            sr = Subreddit._byID(sr_id, data=True)
            
            subject = "[%s] %s" % (sr.name, subject)
            
            for account in run_realtime_email_queue.accounts:
                
                sub = sr.get_subscriber(account)
                
                if is_com: 
                    if hasattr(sub,'email_comments') and sub.email_comments:
                        if g.email_debug:
                            g.log.info('  account %r: we should send this comment, because of the space setting', account.name)
                        whysend = 'space'
                    else:
                        email_thread = Link._somethinged(SaveHide, account, link, 'email')[account,link,'email']
                        if email_thread:
                            if g.email_debug:
                                g.log.info('  account %r: we should send this comment, because of the thread setting', account.name)
                            whysend = 'thread'
                        else:    
                            continue
                    
                elif is_post:
                    if hasattr(sub,'email_posts') and sub.email_posts:
                        if g.email_debug:
                            g.log.info('  account %r: we should send this post', account.name)
                        whysend = 'space'
                    else:
                        continue

                if not ('session' in locals()):
                    # Open the SMTP session
                    if g.email_debug:
                        g.log.info('Opening SMTP session')
                    session = open_smtp_session()

                # Render the template
                html_email_template = g.mako_lookup.get_template(template)
                html_body = html_email_template.render(link=link, comment=comment, thing=thing, account=account, sub=sub, whysend=whysend)
            
                from_email = '"%s" <%s>' % (g.realtime_email_from_name, g.share_reply,)
                send_html_email(account.email, g.share_reply, subject, html_body, from_full=from_email, session=session)
                if g.email_debug:
                    g.log.info('    sent to %r at %r', account.name, account.email)

        if g.email_debug:
            g.log.info('Done running queue')

        if 'session' in locals():
            # Close the session.
            session.quit()
Ejemplo n.º 32
0
    if msg._spam and msg.author_id != account._id:
        return False

    if msg.author_id in account.enemies:
        return False

    # don't show user their own unread stuff
    if msg.author_id == account._id:
        return False

    return True

resume_id = long(sys.argv[1]) if len(sys.argv) > 1 else None

msg_accounts = Account._query(sort=desc("_date"), data=True)

if resume_id:
    msg_accounts._filter(Account.c._id < resume_id)

for account in progress(fetch_things2(msg_accounts), estimate=resume_id):
    current_inbox_count = account.inbox_count
    unread_messages = list(queries.get_unread_inbox(account))

    if account._id % 100000 == 0:
        g.reset_caches()

    if not len(unread_messages):
        if current_inbox_count:
            account._incr('inbox_count', -current_inbox_count)
    else:
Ejemplo n.º 33
0
def all_gold_users():
    q = Account._query(Account.c.gold == True, data=True,
                       sort="_id")
    return fetch_things2(q)
Ejemplo n.º 34
0
Archivo: gold.py Proyecto: z0r0/saidit
def account_from_stripe_customer_id(stripe_customer_id):
    q = Account._query(Account.c.gold_subscr_id == stripe_customer_id,
                       Account.c._spam == (True, False),
                       data=True)
    return next(iter(q), None)
Ejemplo n.º 35
0
def get_or_create_account(name):
    try:
        # Look for an account we have cached
        account = username_mapping[name]
    except KeyError:
        # See if there's a previously imported account
        account = list(Account._query(Account.c.ob_account_name == name, data=True))
        if len(account) == 1:
            account = account[0]
        elif len(account) > 1:
            print " Got more than one account for OB username '%s', select one below:" % name
            for i in range(len(account)):
                email = account[i].email if hasattr(account[i], 'email') else ''
                print "  %d. %s, %s" % (i, account[i].name, email)
            i += 1
            print "  %d. Create new" % i
            i += 1
            print "  %d. None, abort" % i
            
            max_choice = i
            choice = -1
            while choice < 0 or choice > max_choice:
                choice = raw_input("Enter selection: ")
                try:
                    choice = int(choice)
                except ValueError:
                    choice = -1
            if choice in range(len(account)):
                account = account[choice]
            elif choice == max_choice:
                raise Exception("Aborting")
            else:
                # Fall through to code below
                account = None
        else:
            # Try derivatives of the name that may exist
            candidates = (
                name,
                name.replace(' ', ''),
                name.replace(' ', '_')
            )

            for candidate in candidates:
                try:
                    account = Account._by_name(candidate)
                except NotFound:
                    continue

                if account:
                    if not dryrun:
                        account.ob_account_name = name
                        account._commit()
                    break

        # No account found, create a new one
        if not account:
            account = create_account(name)

        username_mapping[name] = account

    return account
Ejemplo n.º 36
0
# The Original Developer is the Initial Developer.  The Initial Developer of
# the Original Code is reddit Inc.
#
# All portions of the code written by reddit are Copyright (c) 2006-2013 reddit
# Inc. All Rights Reserved.
###############################################################################
"""Ensure modmsgtime is properly set on all accounts.

See the comment in Account.is_moderator_somewhere for possible values of this
attribute now.

"""

from r2.lib.db.operators import desc
from r2.lib.utils import fetch_things2, progress
from r2.models import Account, Subreddit


all_accounts = Account._query(sort=desc("_date"))
for account in progress(fetch_things2(all_accounts)):
    is_moderator_somewhere = bool(Subreddit.reverse_moderator_ids(account))
    if is_moderator_somewhere:
        if not account.modmsgtime:
            account.modmsgtime = False
        else:
            # the account already has a date for modmsgtime meaning unread mail
            pass
    else:
        account.modmsgtime = None
    account._commit()
Ejemplo n.º 37
0
def add_all_users():
    q = Account._query(sort = asc('_date'))
    for user in fetch_things2(q):
        update_user(user)
Ejemplo n.º 38
0
def all_gold_users():
    q = Account._query(Account.c.gold == True, data=True, sort="_id")
    return fetch_things2(q)
Ejemplo n.º 39
0
def inject_test_data(num_links=25, num_comments=25, num_votes=5):
    """Flood your reddit install with test data based on reddit.com."""

    print ">>>> Ensuring configured objects exist"
    system_user = ensure_account(g.system_user)
    ensure_account(g.automoderator_account)
    ensure_subreddit(g.default_sr, system_user)
    ensure_subreddit(g.takedown_sr, system_user)

    print
    print

    print ">>>> Fetching real data from reddit.com"
    modeler = Modeler()
    subreddits = [
        modeler.model_subreddit("pics"),
        modeler.model_subreddit("videos"),
        modeler.model_subreddit("askhistorians"),
    ]
    extra_settings = {
        "pics": {
            "show_media": True,
        },
        "videos": {
            "show_media": True,
        },
    }

    print
    print

    print ">>>> Generating test data"
    print ">>> Accounts"
    account_query = Account._query(sort="_date", limit=500, data=True)
    accounts = [a for a in account_query if a.name != g.system_user]
    accounts.extend(
        ensure_account(modeler.generate_username())
        for i in xrange(50 - len(accounts)))

    print ">>> Content"
    things = []
    for sr_model in subreddits:
        sr_author = random.choice(accounts)
        sr = ensure_subreddit(sr_model.name, sr_author)

        # make the system user subscribed for easier testing
        if sr.add_subscriber(system_user):
            sr._incr("_ups", 1)

        # apply any custom config we need for this sr
        for setting, value in extra_settings.get(sr.name, {}).iteritems():
            setattr(sr, setting, value)
        sr._commit()

        for i in xrange(num_links):
            link_author = random.choice(accounts)

            link = Link._submit(
                title=sr_model.generate_link_title(),
                url=sr_model.generate_link_url(),
                author=link_author,
                sr=sr,
                ip="127.0.0.1",
            )
            if link.url == "self":
                link.url = link.make_permalink(sr)
                link.is_self = True
                link.selftext = sr_model.generate_selfpost_body()
                link._commit()
            queries.queue_vote(link_author, link, dir=True, ip="127.0.0.1")
            queries.new_link(link)
            things.append(link)

            comments = [None]
            for i in xrange(fuzz_number(num_comments)):
                comment_author = random.choice(accounts)
                comment, inbox_rel = Comment._new(
                    comment_author,
                    link,
                    parent=random.choice(comments),
                    body=sr_model.generate_comment_body(),
                    ip="127.0.0.1",
                )
                queries.queue_vote(comment_author,
                                   comment,
                                   dir=True,
                                   ip="127.0.0.1")
                queries.new_comment(comment, inbox_rel)
                comments.append(comment)
                things.append(comment)

    for thing in things:
        for i in xrange(fuzz_number(num_votes)):
            direction = random.choice([True, None, False])
            voter = random.choice(accounts)
            queries.queue_vote(voter, thing, dir=direction, ip="127.0.0.1")

    amqp.worker.join()
Ejemplo n.º 40
0
def ban():
    user = #place name of user here
    banned = list(Account._query(Account.c.name == user))[0]
    banned.messagebanned = True
    banned._commit()
Ejemplo n.º 41
0
def inject_test_data(num_links=25, num_comments=25, num_votes=5):
    """Flood your reddit install with test data based on reddit.com."""

    print ">>>> Ensuring configured objects exist"
    system_user = ensure_account(g.system_user)
    ensure_account(g.automoderator_account)
    ensure_subreddit(g.default_sr, system_user)
    ensure_subreddit(g.takedown_sr, system_user)
    ensure_subreddit(g.beta_sr, system_user)
    ensure_subreddit(g.promo_sr_name, system_user)

    print
    print

    print ">>>> Fetching real data from reddit.com"
    modeler = Modeler()
    subreddits = [
        modeler.model_subreddit("pics"),
        modeler.model_subreddit("videos"),
        modeler.model_subreddit("askhistorians"),
    ]
    extra_settings = {
        "pics": {
            "show_media": True,
        },
        "videos": {
            "show_media": True,
        },
    }

    print
    print

    print ">>>> Generating test data"
    print ">>> Accounts"
    account_query = Account._query(sort="_date", limit=500, data=True)
    accounts = [a for a in account_query if a.name != g.system_user]
    accounts.extend(
        ensure_account(modeler.generate_username())
        for i in xrange(50 - len(accounts)))

    print ">>> Content"
    things = []
    for sr_model in subreddits:
        sr_author = random.choice(accounts)
        sr = ensure_subreddit(sr_model.name, sr_author)

        # make the system user subscribed for easier testing
        if sr.add_subscriber(system_user):
            sr._incr("_ups", 1)

        # apply any custom config we need for this sr
        for setting, value in extra_settings.get(sr.name, {}).iteritems():
            setattr(sr, setting, value)
        sr._commit()

        for i in xrange(num_links):
            link_author = random.choice(accounts)
            url = sr_model.generate_link_url()
            is_self = (url == "self")
            content = sr_model.generate_selfpost_body() if is_self else url
            link = Link._submit(
                is_self=is_self,
                title=sr_model.generate_link_title(),
                content=content,
                author=link_author,
                sr=sr,
                ip="127.0.0.1",
            )
            queries.new_link(link)
            things.append(link)

            comments = [None]
            for i in xrange(fuzz_number(num_comments)):
                comment_author = random.choice(accounts)
                comment, inbox_rel = Comment._new(
                    comment_author,
                    link,
                    parent=random.choice(comments),
                    body=sr_model.generate_comment_body(),
                    ip="127.0.0.1",
                )
                queries.new_comment(comment, inbox_rel)
                comments.append(comment)
                things.append(comment)

    for thing in things:
        for i in xrange(fuzz_number(num_votes)):
            direction = random.choice([
                Vote.DIRECTIONS.up,
                Vote.DIRECTIONS.unvote,
                Vote.DIRECTIONS.down,
            ])
            voter = random.choice(accounts)

            cast_vote(voter, thing, direction)

    amqp.worker.join()

    srs = [Subreddit._by_name(n) for n in ("pics", "videos", "askhistorians")]
    LocalizedDefaultSubreddits.set_global_srs(srs)
Ejemplo n.º 42
0
#
# The Original Developer is the Initial Developer.  The Initial Developer of
# the Original Code is reddit Inc.
#
# All portions of the code written by reddit are Copyright (c) 2006-2015 reddit
# Inc. All Rights Reserved.
###############################################################################
"""Ensure modmsgtime is properly set on all accounts.

See the comment in Account.is_moderator_somewhere for possible values of this
attribute now.

"""

from r2.lib.db.operators import desc
from r2.lib.utils import fetch_things2, progress
from r2.models import Account, Subreddit

all_accounts = Account._query(sort=desc("_date"))
for account in progress(fetch_things2(all_accounts)):
    is_moderator_somewhere = bool(Subreddit.reverse_moderator_ids(account))
    if is_moderator_somewhere:
        if not account.modmsgtime:
            account.modmsgtime = False
        else:
            # the account already has a date for modmsgtime meaning unread mail
            pass
    else:
        account.modmsgtime = None
    account._commit()
Ejemplo n.º 43
0
def account_from_stripe_customer_id(stripe_customer_id):
    q = Account._query(Account.c.gold_subscr_id == stripe_customer_id,
                       Account.c._spam == (True, False), data=True)
    return next(iter(q), None)
Ejemplo n.º 44
0
def add_all_users():
    q = Account._query(sort=asc('_date'))
    for user in fetch_things2(q):
        update_user(user)
Ejemplo n.º 45
0
def inject_test_data(num_links=25, num_comments=25, num_votes=5):
    """Flood your reddit install with test data based on reddit.com."""

    print ">>>> Ensuring configured objects exist"
    system_user = ensure_account(g.system_user)
    ensure_account(g.automoderator_account)
    ensure_subreddit(g.default_sr, system_user)
    ensure_subreddit(g.takedown_sr, system_user)
    ensure_subreddit(g.beta_sr, system_user)
    ensure_subreddit(g.promo_sr_name, system_user)

    print
    print

    print ">>>> Fetching real data from reddit.com"
    modeler = Modeler()
    subreddits = [
        modeler.model_subreddit("pics"),
        modeler.model_subreddit("worldnews"),
        modeler.model_subreddit("gaming"),
    ]
    extra_settings = {
        "worldnews": {
            "show_media": True,
        },
        "pics": {
            "show_media": True,
        },
    }

    print
    print

    print ">>>> Generating test data"
    print ">>> Accounts"
    account_query = Account._query(sort="_date", limit=500, data=True)
    accounts = [a for a in account_query if a.name != g.system_user]
    accounts.extend(
        ensure_account(modeler.generate_username())
        for i in xrange(50 - len(accounts)))

    print ">>> Content"
    things = []
    for sr_model in subreddits:
        sr_author = random.choice(accounts)
        sr = ensure_subreddit(sr_model.name, sr_author)

        # make the system user subscribed for easier testing
        if sr.add_subscriber(system_user):
            sr._incr("_ups", 1)

        # apply any custom config we need for this sr
        for setting, value in extra_settings.get(sr.name, {}).iteritems():
            setattr(sr, setting, value)
        sr._commit()

        for i in xrange(num_links):
            link_author = random.choice(accounts)
            url = sr_model.generate_link_url()
            is_self = (url == "self")
            content = sr_model.generate_selfpost_body() if is_self else url
            link = Link._submit(
                is_self=is_self,
                title=sr_model.generate_link_title(),
                content=content,
                author=link_author,
                sr=sr,
                ip="127.0.0.1",
            )
            queries.new_link(link)
            things.append(link)

            comments = [None]
            for i in xrange(fuzz_number(num_comments)):
                comment_author = random.choice(accounts)
                comment, inbox_rel = Comment._new(
                    comment_author,
                    link,
                    parent=random.choice(comments),
                    body=sr_model.generate_comment_body(),
                    ip="127.0.0.1",
                )
                queries.new_comment(comment, inbox_rel)
                comments.append(comment)
                things.append(comment)

    for thing in things:
        for i in xrange(fuzz_number(num_votes)):
            direction = random.choice([
                Vote.DIRECTIONS.up,
                Vote.DIRECTIONS.unvote,
                Vote.DIRECTIONS.down,
            ])
            voter = random.choice(accounts)

            cast_vote(voter, thing, direction)

    amqp.worker.join()

    srs = [Subreddit._by_name(n) for n in ("worldnews", "pics")]
    LocalizedDefaultSubreddits.set_global_srs(srs)
    LocalizedFeaturedSubreddits.set_global_srs(
        [Subreddit._by_name('worldnews')])
Ejemplo n.º 46
0
    if msg._spam and msg.author_id != account._id:
        return False

    if msg.author_id in account.enemies:
        return False

    # don't show user their own unread stuff
    if msg.author_id == account._id:
        return False

    return True


resume_id = long(sys.argv[1]) if len(sys.argv) > 1 else None

msg_accounts = Account._query(sort=desc("_date"), data=True)

if resume_id:
    msg_accounts._filter(Account.c._id < resume_id)

for account in progress(fetch_things2(msg_accounts), estimate=resume_id):
    current_inbox_count = account.inbox_count
    unread_messages = list(queries.get_unread_inbox(account))

    if account._id % 100000 == 0:
        g.reset_caches()

    if not len(unread_messages):
        if current_inbox_count:
            account._incr('inbox_count', -current_inbox_count)
    else: