Exemplo n.º 1
0
def main():
    logging.config.fileConfig(path_to_cfg)
    start_utc = datetime.utcnow()
    start_time = time()

    global r
    try:
        r = reddit.Reddit(user_agent=cfg_file.get('reddit', 'user_agent'))
        logging.info('Logging in as %s', cfg_file.get('reddit', 'username'))
        r.login(cfg_file.get('reddit', 'username'),
            cfg_file.get('reddit', 'password'))
    except Exception as e:
        logging.error('  ERROR: %s', e)

    mod_subreddit = r.get_subreddit('mod')


    #
    # Do actions on individual subreddits
    #
    
    # get subreddit list
    subreddits = Subreddit.query.filter(Subreddit.enabled == True).all()
    sr_dict = dict()
    for subreddit in subreddits:
        sr_dict[subreddit.name.lower()] = subreddit
    
    # do actions on subreddits
    do_subreddits(mod_subreddit, sr_dict, start_utc)
    

    #
    # Do actions on networks
    #
    mods_checked = 0

    # get network list
    networks = Network.query.filter(Network.enabled == True).all()
    
    # do actions on each network
    for network in networks:
        # get subreddits in network
        network_subs = Subreddit.query.filter(Subreddit.network == network.id, Subreddit.enabled == True).all()
        network_sr_dict = dict()
        for subreddit in network_subs:
            network_sr_dict[subreddit.name.lower()] = subreddit
        
        # do subreddit actions on subreddits
        do_subreddits(mod_subreddit, network_sr_dict, start_utc)
        
        # check network mods
        logging.info('Checking network moderators')
        for subreddit in network_sr_dict.itervalues():
            # only check subs in networks
            if subreddit.network:
                mods_checked += check_network_moderators(network, network_sr_dict)
    
    logging.info('  Checked %s networks, added %s moderators', len(networks), mods_checked)

    logging.info('Completed full run in %s', elapsed_since(start_time))
Exemplo n.º 2
0
def main():
    logging.config.fileConfig(path_to_cfg)
    start_utc = datetime.utcnow()
    start_time = time()

    global r
    try:
        r = reddit.Reddit(user_agent=cfg_file.get('reddit', 'user_agent'))
        logging.info('Logging in as %s', cfg_file.get('reddit', 'username'))
        r.login(cfg_file.get('reddit', 'username'),
            cfg_file.get('reddit', 'password'))

        subreddits = Subreddit.query.filter(Subreddit.enabled == True).all()
        sr_dict = dict()
        for subreddit in subreddits:
            sr_dict[subreddit.name.lower()] = subreddit
        mod_subreddit = r.get_subreddit('mod')
    except Exception as e:
        logging.error('  ERROR: %s', e)

    # check reports
    items = mod_subreddit.get_reports(limit=1000)
    stop_time = datetime.utcnow() - REPORT_BACKLOG_LIMIT
    check_items('report', items, sr_dict, stop_time)

    # check spam
    items = mod_subreddit.get_spam(limit=1000)
    stop_time = (db.session.query(func.max(Subreddit.last_spam))
                 .filter(Subreddit.enabled == True).one()[0])
    check_items('spam', items, sr_dict, stop_time)

    # check new submissions
    items = mod_subreddit.get_new_by_date(limit=1000)
    stop_time = (db.session.query(func.max(Subreddit.last_submission))
                 .filter(Subreddit.enabled == True).one()[0])
    check_items('submission', items, sr_dict, stop_time)

    # check new comments
    comment_multi = '+'.join([s.name for s in subreddits
                              if not s.reported_comments_only])
    if comment_multi:
        comment_multi_sr = r.get_subreddit(comment_multi)
        items = comment_multi_sr.get_comments(limit=1000)
        stop_time = (db.session.query(func.max(Subreddit.last_comment))
                     .filter(Subreddit.enabled == True).one()[0])
        check_items('comment', items, sr_dict, stop_time)

    # respond to modmail
    try:
        respond_to_modmail(r.user.get_modmail(), start_utc)
    except Exception as e:
        logging.error('  ERROR: %s', e)

    # check reports html
    try:
        check_reports_html(sr_dict)
    except Exception as e:
        logging.error('  ERROR: %s', e)

    logging.info('Completed full run in %s', elapsed_since(start_time))
Exemplo n.º 3
0
def main():
    global r
    logging.config.fileConfig(path_to_cfg)
    # the below only works with re2
    # re.set_fallback_notification(re.FALLBACK_EXCEPTION)

    # which queues to check and the function to call
    queue_funcs = {'report': 'get_reports',
                   'spam': 'get_mod_queue',
                   'submission': 'get_new',
                   'comment': 'get_comments'}

    while True:
        try:
            r = praw.Reddit(user_agent=cfg_file.get('reddit', 'user_agent'))
            logging.info('Logging in as {0}'
                         .format(cfg_file.get('reddit', 'username')))
            r.login(cfg_file.get('reddit', 'username'),
                    cfg_file.get('reddit', 'password'))
            sr_dict, cond_dict = initialize(queue_funcs.keys())
            break
        except Exception as e:
            logging.error('ERROR: {0}'.format(e))
    
    run_counter = 0
    while True:
        run_counter += 1
        try:
            # only check reports every 10 runs
            # sleep afterwards in case ^C is needed
            if run_counter % 10 == 0:
                check_queues(queue_funcs, sr_dict, cond_dict)

                Condition.clear_standard_cache()
                if process_messages():
                    sr_dict, cond_dict = initialize(queue_funcs.keys(),
                                                    reload_mod_subs=False)
                logging.info('Sleeping ({0})'.format(datetime.now()))
                sleep(5)
                run_counter = 0
            else:
                check_queues({q: queue_funcs[q]
                              for q in queue_funcs
                              if q != 'report'},
                             sr_dict, cond_dict)
                if process_messages():
                    sr_dict, cond_dict = initialize(queue_funcs.keys(),
                                                    reload_mod_subs=False)
        except (praw.errors.ModeratorRequired,
                praw.errors.ModeratorOrScopeRequired,
                HTTPError) as e:
            if not isinstance(e, HTTPError) or e.response.status_code == 403:
                logging.info('Re-initializing due to {0}'.format(e))
                sr_dict, cond_dict = initialize(queue_funcs.keys())
        except KeyboardInterrupt:
            raise
        except Exception as e:
            logging.error('ERROR: {0}'.format(e))
            session.rollback()
Exemplo n.º 4
0
def main():
    global r
    logging.config.fileConfig(path_to_cfg)
    
    while True:
        try:
            r = praw.Reddit(user_agent=cfg_file.get('reddit', 'user_agent'))
            logging.info('Logging in as {0}'
                         .format(cfg_file.get('reddit', 'username')))
            r.login(cfg_file.get('reddit', 'username'),
                    cfg_file.get('reddit', 'password'))
            sr_dict = get_enabled_subreddits()            
            settings_dict = {subreddit: update_from_wiki(sr, cfg_file.get('reddit', 'owner_username')) for subreddit, sr in sr_dict.iteritems()}
            break
        except Exception as e:
            logging.error('ERROR: {0}'.format(e))
            traceback.print_exc(file=sys.stdout)

    while True:
        try:
            bans_to_remove = session.query(Ban).filter(Ban.unban_after <= datetime.utcnow()).all()
            logging.debug("\nChecking due bans")
            
            for ban in bans_to_remove:
                logging.debug("  Unbanning /u/{0} from /r/{1}".format(ban.user, ban.subreddit))
                sr = sr_dict[ban.subreddit]
                sr.remove_ban(ban.user)
                session.add(Log(ban.user, ban.subreddit, 'unban'))
                session.delete(ban)
            
            sleep(5)
            logging.info("\nLOOP\n")

            updated_srs = process_messages(sr_dict, settings_dict)
            if updated_srs:
                if any(subreddit not in sr_dict.keys() for subreddit in updated_srs):
                    # settings and mod subs out of sync, reload everything
                    settings_dict = sr_dict.copy()
                    sr_dict = get_enabled_subreddits(reload_mod_subs=True)
                else:
                    sr_dict = get_enabled_subreddits(reload_mod_subs=False)
                    
                settings_dict.update(updated_srs)
            
        except (praw.errors.ModeratorRequired,
                praw.errors.ModeratorOrScopeRequired,
                praw.requests.HTTPError) as e:
            if not isinstance(e, praw.requests.HTTPError) or e.response.status_code == 403:
                logging.info('Re-initializing due to {0}'.format(e))
                sr_dict = get_enabled_subreddits()
        except KeyboardInterrupt:
            raise
        except Exception as e:
            logging.error('ERROR: {0}'.format(e))
            import traceback
            traceback.print_exc()
Exemplo n.º 5
0
def main():
    logging.config.fileConfig(path_to_cfg)
    start_utc = datetime.utcnow()
    start_time = time()

    global r
    try:
        r = reddit.Reddit(user_agent=cfg_file.get('reddit', 'user_agent'))
        logging.info('Logging in as %s', cfg_file.get('reddit', 'username'))
        r.login(cfg_file.get('reddit', 'username'),
                cfg_file.get('reddit', 'password'))

        subreddits = Subreddit.query.filter(Subreddit.enabled == True).all()
        sr_dict = dict()
        for subreddit in subreddits:
            sr_dict[subreddit.name.lower()] = subreddit
        mod_subreddit = r.get_subreddit('mod')
    except Exception as e:
        logging.error('  ERROR: %s', e)

    # check reports
    items = mod_subreddit.get_reports(limit=1000)
    stop_time = datetime.utcnow() - REPORT_BACKLOG_LIMIT
    check_items('report', items, sr_dict, stop_time)

    # check spam
    items = mod_subreddit.get_modqueue(limit=1000)
    stop_time = (db.session.query(func.max(
        Subreddit.last_spam)).filter(Subreddit.enabled == True).one()[0])
    check_items('spam', items, sr_dict, stop_time)

    # check new submissions
    items = mod_subreddit.get_new_by_date(limit=1000)
    stop_time = (db.session.query(func.max(
        Subreddit.last_submission)).filter(Subreddit.enabled == True).one()[0])
    check_items('submission', items, sr_dict, stop_time)

    # check new comments
    comment_multi = '+'.join(
        [s.name for s in subreddits if not s.reported_comments_only])
    if comment_multi:
        comment_multi_sr = r.get_subreddit(comment_multi)
        items = comment_multi_sr.get_comments(limit=1000)
        stop_time = (db.session.query(func.max(Subreddit.last_comment)).filter(
            Subreddit.enabled == True).one()[0])
        check_items('comment', items, sr_dict, stop_time)

    # respond to modmail
    try:
        respond_to_modmail(r.user.get_modmail(), start_utc)
    except Exception as e:
        logging.error('  ERROR: %s', e)

    logging.info('Completed full run in %s', elapsed_since(start_time))
Exemplo n.º 6
0
def main():
    # we get cfg_file from models.py
    # see import at the top

    username = cfg_file.get('reddit', 'username')
    password = cfg_file.get('reddit', 'password')

    print "Logging in..."
    comments = Monitor(username, password)
    print "  Success!"

    comments.monitor_comments()
Exemplo n.º 7
0
def main():
    logging.config.fileConfig(path_to_cfg)
    start_utc = datetime.utcnow()
    start_time = time()

    global r
    try:
        r = praw.Reddit(user_agent=cfg_file.get('reddit', 'user_agent'))
        logging.info('Logging in as %s', cfg_file.get('reddit', 'username'))
        r.login(cfg_file.get('reddit', 'username'),
            cfg_file.get('reddit', 'password'))
        log_request('login')

        subreddits = session.query(Subreddit).filter(
                        Subreddit.enabled == True).all()
        logging.info('Getting list of moderated subreddits')
        modded_subs = list([s.display_name.lower()
                            for s in r.get_my_moderation(limit=None)])
        log_request('mod_subs', len(modded_subs) / 100 + 1)

        # build sr_dict including only subs both in db and modded_subs
        sr_dict = dict()
        cond_dict = dict()
        ms_dict = dict()
        for subreddit in subreddits:
            if subreddit.name.lower() in modded_subs:
                sr_dict[subreddit.name.lower()] = subreddit
                conditions = subreddit.conditions.all()
                cond_dict[subreddit.name.lower()] = {
                    'report': filter_conditions('report', conditions),
                    'spam': filter_conditions('spam', conditions),
                    'submission': filter_conditions('submission', conditions),
                    'comment': filter_conditions('comment', conditions) }
                message_schedules = subreddit.message_schedules.all()
                ms_dict[subreddit.name.lower()] = message_schedules

    except Exception as e:
        logging.error('  ERROR: %s', e)

    check_queues(sr_dict, cond_dict)

    check_message_schedules(sr_dict, ms_dict)

    # respond to modmail
    try:
        respond_to_modmail(r.get_mod_mail(), start_utc)
    except Exception as e:
        logging.error('  ERROR: %s', e)

    logging.info('Completed full run in %s (%s due to reddit requests - %s)',
                    elapsed_since(start_time),
                    timedelta(seconds=sum(log_request.counts.values())*2),
                    log_request.counts)
Exemplo n.º 8
0
def main():
    # we get cfg_file from models.py
    # see import at the top
    
    username = cfg_file.get('reddit', 'username')
    password = cfg_file.get('reddit', 'password')
    
    print "Logging in..."
    comments = Monitor(username, password)
    print "  Success!"
    
    comments.monitor_comments()
Exemplo n.º 9
0
    def build_message(self, text, item, match,
                      disclaimer=False, permalink=False, intro=False):
        """Builds a message/comment for the bot to post or send."""
        if intro:
            message = cfg_file.get('reddit', 'intro')
            message = message + " " + text
        else:
            message = text
        if disclaimer:
            message = message+'\n\n'+cfg_file.get('reddit', 'disclaimer')
        if permalink and '{{permalink}}' not in message:
            message = '{{permalink}}\n\n'+message
        message = replace_placeholders(message, item, match)

        return message
Exemplo n.º 10
0
def check_queues(queue_funcs, sr_dict, cond_dict):
    """Checks all the queues for new items to process."""
    global r
    
    for queue in queue_funcs:
        subreddits = [s for s in sr_dict if len(cond_dict[s][queue]) > 0]
        if len(subreddits) == 0:
            continue

        multireddits = build_multireddit_groups(subreddits)

        # fetch and process the items for each multireddit
        for multi in multireddits:
            if queue == 'report':
                limit = cfg_file.get('reddit', 'report_backlog_limit_hours')
                stop_time = datetime.utcnow() - timedelta(hours=int(limit))
            else:
                stop_time = max([getattr(sr, 'last_'+queue)
                                 for sr in sr_dict.values()
                                 if sr.name in multi])

            queue_subreddit = r.get_subreddit('+'.join(multi))
            if queue_subreddit:
                queue_func = getattr(queue_subreddit, queue_funcs[queue])
                items = queue_func(limit=None)
                check_items(queue, items, stop_time, sr_dict, cond_dict)
Exemplo n.º 11
0
def check_queues(queue_funcs, sr_dict, cond_dict):
    """Checks all the queues for new items to process."""
    global r

    for queue in queue_funcs:
        subreddits = [s for s in sr_dict if len(cond_dict[s][queue]) > 0]
        if len(subreddits) == 0:
            continue

        multireddits = build_multireddit_groups(subreddits)

        # fetch and process the items for each multireddit
        for multi in multireddits:
            if queue == 'report':
                limit = cfg_file.get('reddit', 'report_backlog_limit_hours')
                stop_time = datetime.utcnow() - timedelta(hours=int(limit))
            else:
                stop_time = max(getattr(sr, 'last_'+queue)
                                 for sr in sr_dict.values()
                                 if sr.name in multi)

            queue_subreddit = r.get_subreddit('+'.join(multi))
            if queue_subreddit:
                queue_func = getattr(queue_subreddit, queue_funcs[queue])
                items = queue_func(limit=None)
                check_items(queue, items, stop_time, sr_dict, cond_dict)
Exemplo n.º 12
0
def respond_to_modmail(modmail, start_time):
    """Responds to modmail if any submitters sent one before approval."""
    cache = list()
    # respond to any modmail sent in the configured window of time
    time_window = timedelta(minutes=int(cfg_file.get('reddit',
                                   'modmail_response_window_mins')))
    approvals = session.query(ActionLog).filter(
                    and_(ActionLog.action == 'approve',
                         ActionLog.action_time >= start_time - time_window)
                    ).all()

    for item in approvals:
        found = None
        done = False

        for i in cache:
            if datetime.utcfromtimestamp(i.created_utc) < item.created_utc:
                done = True
                break
            if (i.dest.lower() == '#'+item.subreddit.name.lower() and
                    i.author.name == item.user and
                    not i.replies):
                found = i
                break

        if not found and not done:
            for i in modmail:
                cache.append(i)
                if datetime.utcfromtimestamp(i.created_utc) < item.created_utc:
                    break
                if (i.dest.lower() == '#'+item.subreddit.name.lower() and
                        i.author.name == item.user and
                        not i.replies):
                    found = i
                    break

        if found:
            found.reply('Your submission has been approved automatically by '+
                cfg_file.get('reddit', 'username')+'. For future submissions '
                'please wait at least '+cfg_file.get('reddit',
                'modmail_response_window_mins')+' minutes before messaging '
                'the mods, this post would have been approved automatically '
                'even without you sending this message.')
            log_request('modmail')

    log_request('modmail_listing', len(cache) / 100 + 1)
Exemplo n.º 13
0
def send_error_message(user, sr_name, error):
    """Sends an error message to the user if a wiki update failed."""
    global r
    r.send_message(
        user, 'Error updating from wiki in /r/{0}'.format(sr_name),
        '### Error updating from [wiki configuration in /r/{0}]'
        '(http://www.reddit.com/r/{0}/wiki/{1}):\n\n---\n\n'
        '{2}\n\n---\n\n[View configuration documentation](https://'
        'github.com/Deimos/AutoModerator/wiki/Wiki-Configuration)'.format(
            sr_name, cfg_file.get('reddit', 'wiki_page_name'), error))
Exemplo n.º 14
0
def send_error_message(user, sr_name, error):
    """Sends an error message to the user if a wiki update failed."""
    global r
    r.send_message(user,
                   'Error updating from wiki in /r/{0}'.format(sr_name),
                   '### Error updating from [wiki configuration in /r/{0}]'
                   '(http://www.reddit.com/r/{0}/wiki/{1}):\n\n---\n\n'
                   '{2}\n\n---\n\n[View configuration documentation](https://'
                   'github.com/Deimos/AutoModerator/wiki/Wiki-Configuration)'
                   .format(sr_name,
                           cfg_file.get('reddit', 'wiki_page_name'),
                           error))
Exemplo n.º 15
0
def get_user_info(username, condition):
    """Gets user info from cache, or from reddit if not cached or expired."""
    global r

    try:
        cache_row = (session.query(UserCache)
                        .filter(UserCache.user == username)
                        .one())
        # see if the condition includes a check that expires
        if (condition.is_gold or
                condition.link_karma or
                condition.comment_karma or
                condition.combined_karma):
            expiry = timedelta(hours=int(cfg_file.get('reddit',
                                           'user_cache_expiry_hours')))
        else:
            expiry = None

        # if not past the expiry, return cached data
        if (not expiry or
                datetime.utcnow() - cache_row.info_last_check < expiry):
            cached = r.get_redditor(username, fetch=False)
            cached.is_gold = cache_row.is_gold
            cached.created_utc = timegm(cache_row.created_utc.timetuple())
            cached.link_karma = cache_row.link_karma
            cached.comment_karma = cache_row.comment_karma
            
            return cached
    except NoResultFound:
        cache_row = UserCache()
        cache_row.user = username
        session.add(cache_row)

    # fetch the user's info from reddit
    try:
        user = r.get_redditor(username)
        log_request('user')

        # save to cache
        cache_row.is_gold = user.is_gold
        cache_row.created_utc = datetime.utcfromtimestamp(user.created_utc)
        cache_row.link_karma = user.link_karma
        cache_row.comment_karma = user.comment_karma
        cache_row.info_last_check = datetime.utcnow()
        session.commit()
    except urllib2.HTTPError as e:
        if e.code == 404:
            # weird case where the user is deleted but API still shows username
            return None
        else:
            raise

    return user
Exemplo n.º 16
0
def main():
    r = praw.Reddit(user_agent=cfg_file.get('reddit', 'user_agent'))
    r.login(cfg_file.get('reddit', 'username'),
            cfg_file.get('reddit', 'password'))

    # update exclude_banned_modqueue values for subreddits
    subreddits = (session.query(Subreddit)
                         .filter(Subreddit.enabled == True)
                         .all())
    for sr in subreddits:
        try:
            settings = r.get_subreddit(sr.name).get_settings()
            sr.exclude_banned_modqueue = settings['exclude_banned_modqueue']
        except Exception as e:
            sr.exclude_banned_modqueue = False

    session.commit()

    # delete old log entries
    log_retention_days = int(cfg_file.get('database', 'log_retention_days'))
    log_cutoff = datetime.utcnow() - timedelta(days=log_retention_days)
    deleted = session.query(Log).filter(Log.datetime < log_cutoff).delete()
    session.commit()
    print 'Deleted {0} log rows'.format(deleted)
Exemplo n.º 17
0
def respond_to_modmail(modmail, start_time):
    """Responds to modmail if any submitters sent one before approval."""
    cache = list()
    # respond to any modmail sent in the last 5 mins
    time_window = timedelta(minutes=5)
    approvals = ActionLog.query.filter(
                    and_(ActionLog.action == 'approve',
                         ActionLog.action_time >= start_time - time_window)
                    ).all()

    for item in approvals:
        found = None
        done = False

        for i in cache:
            if datetime.utcfromtimestamp(i.created_utc) < item.created_utc:
                done = True
                break
            if (i.dest.lower() == '#'+item.subreddit.name.lower() and
                    i.author.name == item.user and
                    not i.replies):
                found = i
                break

        if not found and not done:
            for i in modmail:
                cache.append(i)
                if datetime.utcfromtimestamp(i.created_utc) < item.created_utc:
                    break
                if (i.dest.lower() == '#'+item.subreddit.name.lower() and
                        i.author.name == item.user and
                        not i.replies):
                    found = i
                    break

        if found:
            found.reply('Your submission has been approved automatically by '+
                cfg_file.get('reddit', 'username')+'. For future submissions '
                'please wait at least 5 minutes before messaging the mods, '
                'this post would have been approved automatically even '
                'without you sending this message.')
Exemplo n.º 18
0
def check_queues(sr_dict, cond_dict):
    """Checks all the queues for new items to process."""
    global r

    for queue in QUEUES:
        subreddits = get_subreddits_for_queue(sr_dict, cond_dict, queue)
        if not subreddits:
            continue

        if queue == 'report':
            report_backlog_limit = timedelta(hours=int(cfg_file.get('reddit',
                                                'report_backlog_limit_hours')))
            stop_time = datetime.utcnow() - report_backlog_limit
        else:
            last_attr = getattr(Subreddit, 'last_'+queue)
            stop_time = (session.query(func.max(last_attr))
                         .filter(Subreddit.enabled == True).one()[0])

        # issues with request being too long at multireddit of ~3000 chars
        # so split into multiple checks if it's longer than that
        # split comment checks into groups of max 40 subreddits as well
        multireddits = []
        current_multi = []
        current_len = 0
        for sub in subreddits:
            if (current_len > 3000 or
                    queue == 'comment' and len(current_multi) >= 40):
                multireddits.append('+'.join(current_multi))
                current_multi = []
                current_len = 0
            current_multi.append(sub)
            current_len += len(sub) + 1
        multireddits.append('+'.join(current_multi))

        # fetch and process the items for each multireddit
        for multi in multireddits:
            queue_subreddit = r.get_subreddit(multi)
            if queue_subreddit:
                queue_method = getattr(queue_subreddit, QUEUES[queue])
                items = queue_method(limit=1000)
                check_items(queue, items, sr_dict, cond_dict, stop_time)
Exemplo n.º 19
0
def respond_to_modmail(modmail, start_time):
    """Responds to modmail if any submitters sent one before approval."""
    cache = list()
    approvals = ActionLog.query.filter(
                    and_(ActionLog.action == 'approve',
                         ActionLog.action_time >= start_time)).all()

    for item in approvals:
        found = None
        done = False

        for i in cache:
            if datetime.utcfromtimestamp(i.created_utc) < item.created_utc:
                done = True
                break
            if (i.dest.lower() == '#'+item.subreddit.name.lower() and
                    i.author.name == item.user and
                    not i.replies):
                found = i
                break

        if not found and not done:
            for i in modmail:
                cache.append(i)
                if datetime.utcfromtimestamp(i.created_utc) < item.created_utc:
                    break
                if (i.dest.lower() == '#'+item.subreddit.name.lower() and
                        i.author.name == item.user and
                        not i.replies):
                    found = i
                    break

        if found:
            found.reply('Your submission has been approved automatically by '+
                cfg_file.get('reddit', 'username')+'. For future submissions '
                'please wait at least 5 minutes before messaging the mods, '
                'this post would have been approved automatically even '
                'without you sending this message.')
Exemplo n.º 20
0
def update_from_wiki(sr, requester=None):
    """Returns updated settings object from the subreddit's wiki."""
    
    global r
    username = cfg_file.get('reddit', 'username')
    if not requester:
        requester = '/r/{0}'.format(sr.display_name)

    logging.info('Updating from wiki in /r/{0}'.format(sr.display_name))

    try:
        page = sr.get_wiki_page(cfg_file.get('reddit', 'wiki_page_name'))
    except Exception:
        send_error_message(requester, sr.display_name,
            'The wiki page could not be accessed. Please ensure the page '
            'http://www.reddit.com/r/{0}/wiki/{1} exists and that {2} '
            'has the "wiki" mod permission to be able to access it.'
            .format(sr.display_name,
                    cfg_file.get('reddit', 'wiki_page_name'),
                    username))
        return False

    html_parser = HTMLParser.HTMLParser()
    page_content = html_parser.unescape(page.content_md)

    # check that all the settings are valid yaml
    settings_defs = [def_group for def_group in yaml.safe_load_all(page_content)]
    if len(settings_defs) == 1:
        settings = settings_defs[0]
    else:
        send_error_message(requester, sr.display_name,
            'Error when reading settings from wiki - '
            '/u/{0} requires a single configuration section, multiple sections found.'
            .format(username))
        return False
    
    if not isinstance(settings, dict):
        send_error_message(requester, sr.display_name,
            'Error when reading settings from wiki - '
            'no settings found.')
        return False
    
    if len(settings) > 0:
        settings = lowercase_keys_recursively(settings)

#         init = defaults.copy()
#         init = {name: value
#                 for name, value in settings
#                 if name in init
#         (init, settings)
        
        for setting, value in settings.iteritems():
            # only keep settings values that we have defined
            if setting not in defaults:
                send_error_message(requester, sr.display_name,
                    'Error while updating from wiki - '
                    'unknown configuration directive `{0}` encountered.'.format(setting))
                return False
            # only keep proper value types
            if type(value) is not settings_values[setting]:
                send_error_message(requester, sr.display_name,
                    'Error while updating from wiki - '
                    '`{0}` may not be type {1}.'
                    .format(value, type(value)))
                return False

            if setting == 'default_ban_duration':
                if value == '' or value == 'forever' or value == None:
                    settings[setting] = None
                else:
                    settings[setting] = str_to_timedelta(value)
            else:
                # everything checks out
                settings[setting] = value

            
    r.send_message(requester,
                   '{0} settings updated'.format(username),
                   "{0}'s settings were successfully updated for /r/{1}"
                   .format(username, sr.display_name))
    return settings
Exemplo n.º 21
0
def check_queues(sr_dict, cond_dict):
    """Checks all the queues for new items to process."""
    global r

    for queue in QUEUES:
        subreddits = get_subreddits_for_queue(sr_dict, cond_dict, queue)
        if not subreddits:
            continue

        multireddits = []

        if queue == 'comment':
            # split comment checks into groups, using max size from cfg
            # create the number of empty multireddits that we'll need
            num_multis = int(ceil(len(subreddits) / float(cfg_file.get('reddit',
                                                'comment_multireddit_size'))))
            for i in range(num_multis):
                multireddits.append([])
            multi_avg_comments = Counter()

            for sub in subreddits:
                # find the index with the lowest total
                lowest_index = 0
                lowest_sum = multi_avg_comments[0]
                for i in range(1, num_multis):
                    if multi_avg_comments[i] < lowest_sum:
                        lowest_index = i
                        lowest_sum = multi_avg_comments[i]

                # add this subreddit to that multi
                multireddits[lowest_index].append(sub)
                multi_avg_comments[lowest_index] += sr_dict[sub.lower()].avg_comments
        else:
            # issues with request being too long at multireddit of ~3000 chars
            # so split into multiple checks if it's longer than that
            current_multi = []
            current_len = 0
            for sub in subreddits:
                if current_len > 3000:
                    multireddits.append(current_multi)
                    current_multi = []
                    current_len = 0
                current_multi.append(sub)
                current_len += len(sub) + 1
            multireddits.append(current_multi)

        # fetch and process the items for each multireddit
        for multi in multireddits:
            if queue == 'report':
                report_backlog_limit = timedelta(
                        hours=int(cfg_file.get('reddit',
                                               'report_backlog_limit_hours')))
                stop_time = datetime.utcnow() - report_backlog_limit
            else:
                stop_time = max([getattr(sr, 'last_'+queue)
                                 for sr in sr_dict.values()
                                 if sr.name in multi])

            queue_subreddit = r.get_subreddit('+'.join(multi))
            if queue_subreddit:
                queue_method = getattr(queue_subreddit, QUEUES[queue])
                items = queue_method(limit=1000)
                check_items(queue, items, sr_dict, cond_dict, stop_time)
Exemplo n.º 22
0
def check_conditions(subreddit, item, conditions, stop_after_match=False):
    """Checks an item against a list of conditions.

    Returns True if any conditions matched, False otherwise.
    """
    bot_username = cfg_file.get('reddit', 'username')

    if isinstance(item, praw.objects.Submission):
        conditions = [
            c for c in conditions if c.type in ('submission', 'both')
        ]
    elif isinstance(item, praw.objects.Comment):
        conditions = [c for c in conditions if c.type in ('comment', 'both')]

    # get what's already been performed out of the log
    performed_actions = set()
    performed_yaml = set()
    log_entries = (session.query(Log).filter(
        Log.item_fullname == item.name).all())
    for entry in log_entries:
        performed_actions.add(entry.action)
        performed_yaml.add(entry.condition_yaml)

    # sort the conditions by desc priority, and then by required requests
    conditions.sort(key=lambda c: c.requests_required)
    conditions.sort(key=lambda c: c.priority, reverse=True)

    any_matched = False
    for condition in conditions:
        # don't check remove/spam/report conditions on posts made by mods
        if (condition.moderators_exempt
                and condition.action in ('remove', 'spam', 'report')
                and item.author
                and get_user_rank(item.author, item.subreddit) == 'moderator'):
            continue

        # never remove anything if it's been approved by another mod
        if (condition.action in ('remove', 'spam') and item.approved_by
                and item.approved_by.name.lower() != bot_username.lower()):
            continue

        # don't bother checking condition if this action has already been done
        if condition.action in performed_actions:
            continue

        # don't send repeat messages for the same item
        if ((condition.comment or condition.modmail or condition.message)
                and condition.yaml in performed_yaml):
            continue

        # don't overwrite existing flair
        if ((condition.link_flair_text or condition.link_flair_class)
                and isinstance(item, praw.objects.Submission)
                and (item.link_flair_text or item.link_flair_css_class)):
            continue
        if ((condition.user_flair_text or condition.user_flair_class)
                and (item.author_flair_text or item.author_flair_css_class)
                and not condition.overwrite_user_flair):
            continue

        try:
            start_time = time()
            match = condition.check_item(item)
            if match:
                if condition.action:
                    performed_actions.add(condition.action)
                performed_yaml.add(condition.yaml)

            logging.debug('{0}\n  Result {1} in {2}'.format(
                condition.yaml, match, elapsed_since(start_time)))
        except (praw.errors.ModeratorRequired,
                praw.errors.ModeratorOrScopeRequired, HTTPError) as e:
            raise
        except Exception as e:
            logging.error('ERROR: {0}\n{1}'.format(e, condition.yaml))
            match = False

        any_matched = (any_matched or match)
        if stop_after_match and any_matched:
            break

    return any_matched
Exemplo n.º 23
0
def process_messages():
    """Processes the bot's messages looking for invites/commands."""
    global r
    stop_time = int(cfg_file.get('reddit', 'last_message'))
    owner_username = cfg_file.get('reddit', 'owner_username')
    new_last_message = None
    update_srs = set()
    invite_srs = set()
    sleep_after = False

    logging.debug('Checking messages')

    try:
        for message in r.get_inbox():
            if int(message.created_utc) <= stop_time:
                break

            if message.was_comment:
                continue

            if not new_last_message:
                new_last_message = int(message.created_utc)

            # if it's a subreddit invite
            if (not message.author and
                    message.subject.startswith('invitation to moderate /r/')):
                invite_srs.add(message.subreddit.display_name.lower())
            elif message.body.strip().lower() == 'update':
                # handle if they put in something like '/r/' in the subject
                if '/' in message.subject:
                    sr_name = message.subject[message.subject.rindex('/') + 1:]
                else:
                    sr_name = message.subject

                if (sr_name.lower(), message.author.name) in update_srs:
                    continue

                try:
                    subreddit = r.get_subreddit(sr_name)
                    if (message.author.name == owner_username
                            or message.author in subreddit.get_moderators()):
                        update_srs.add((sr_name.lower(), message.author.name))
                    else:
                        send_error_message(
                            message.author, sr_name,
                            'You do not moderate /r/{0}'.format(sr_name))
                except HTTPError as e:
                    send_error_message(
                        message.author, sr_name,
                        'Unable to access /r/{0}'.format(sr_name))
            elif (message.subject.strip().lower() == 'sleep'
                  and message.author.name == owner_username):
                sleep_after = True

        # accept subreddit invites
        for subreddit in invite_srs:
            try:
                # workaround for praw clearing mod sub list on accept
                mod_subs = r.user._mod_subs
                r.accept_moderator_invite(subreddit)
                r.user._mod_subs = mod_subs
                r.user._mod_subs[subreddit] = r.get_subreddit(subreddit)
                logging.info('Accepted mod invite in /r/{0}'.format(subreddit))
            except praw.errors.InvalidInvite:
                pass

        # do requested updates from wiki pages
        updated_srs = []
        for subreddit, sender in update_srs:
            if update_from_wiki(r.get_subreddit(subreddit),
                                r.get_redditor(sender)):
                updated_srs.append(subreddit)
                logging.info('Updated from wiki in /r/{0}'.format(subreddit))
            else:
                logging.info(
                    'Error updating from wiki in /r/{0}'.format(subreddit))

        if sleep_after:
            logging.info('Sleeping for 10 seconds')
            sleep(10)
            logging.info('Sleep ended, resuming')

    except Exception as e:
        logging.error('ERROR: {0}'.format(e))
        raise
    finally:
        # update cfg with new last_message value
        if new_last_message:
            cfg_file.set('reddit', 'last_message', str(new_last_message))
            cfg_file.write(open(path_to_cfg, 'w'))

    return updated_srs
Exemplo n.º 24
0
def check_items(name, items, sr_dict, cond_dict, stop_time):
    """Checks the items generator for any matching conditions."""
    item_count = 0
    comment_counts = Counter()
    start_time = time()
    seen_subs = set()

    logging.info('Checking new %ss', name)

    try:
        for item in items:
            # skip any items in /new that have been approved
            if name == 'submission' and item.approved_by:
                continue

            item_time = datetime.utcfromtimestamp(item.created_utc)
            if item_time <= stop_time:
                break

            subreddit = sr_dict[item.subreddit.display_name.lower()]
            conditions = cond_dict[item.subreddit.display_name.lower()][name]

            # don't need to check for shadowbanned unless we're in spam
            if name == 'spam':
                for condition in conditions:
                    condition.check_shadowbanned = True
            else:
                for condition in conditions:
                    condition.check_shadowbanned = False

            item_count += 1
            if name == 'comment':
                comment_counts[item.subreddit.display_name.lower()] += 1

            if subreddit.name not in seen_subs:
                setattr(subreddit, 'last_'+name, item_time)
                seen_subs.add(subreddit.name)

            logging.debug('  Checking item %s', get_permalink(item))

            # check removal conditions, stop checking if any matched
            if check_conditions(subreddit, item,
                    [c for c in conditions if c.action == 'remove']):
                continue

            # check set_flair conditions 
            check_conditions(subreddit, item,
                    [c for c in conditions if c.action == 'set_flair'])

            # check approval conditions
            check_conditions(subreddit, item,
                    [c for c in conditions if c.action == 'approve'])

            # check alert conditions
            check_conditions(subreddit, item,
                    [c for c in conditions if c.action == 'alert'])

            # check report conditions
            check_conditions(subreddit, item,
                    [c for c in conditions if c.action == 'report'])

            # if doing reports, check auto-reapproval if enabled
            if (name == 'report' and subreddit.auto_reapprove and
                    item.approved_by is not None):
                try:
                    # see if this item has already been auto-reapproved
                    entry = (session.query(AutoReapproval).filter(
                            AutoReapproval.permalink == get_permalink(item))
                            .one())
                    in_db = True
                except NoResultFound:
                    entry = AutoReapproval()
                    entry.subreddit_id = subreddit.id
                    entry.permalink = get_permalink(item)
                    entry.original_approver = item.approved_by.name
                    entry.total_reports = 0
                    entry.first_approval_time = datetime.utcnow()
                    in_db = False

                if (in_db or item.approved_by.name !=
                        cfg_file.get('reddit', 'username')):
                    item.approve()
                    entry.total_reports += item.num_reports
                    entry.last_approval_time = datetime.utcnow()

                    session.add(entry)
                    session.commit()
                    logging.info('  Re-approved %s', entry.permalink)
                    log_request('reapprove')
                            
        session.commit()
    except Exception as e:
        logging.error('  ERROR: %s', e)
        session.rollback()

    # This isn't really correct, since we don't collect any 0 samples
    # but the difference won't matter much in practice
    for subreddit in comment_counts:
        prev_total = (sr_dict[subreddit].avg_comments * 
                     sr_dict[subreddit].avg_comments_samples)
        new_avg = ((prev_total + comment_counts[subreddit]) /
                   (sr_dict[subreddit].avg_comments_samples + 1))
        sr_dict[subreddit].avg_comments = new_avg
        sr_dict[subreddit].avg_comments_samples += 1
    session.commit()

    logging.info('  Checked %s items in %s',
            item_count, elapsed_since(start_time))
    log_request('listing', item_count / 100 + 1)
Exemplo n.º 25
0
def update_from_wiki(subreddit, requester):
    """Updates conditions from the subreddit's wiki."""
    global r
    username = cfg_file.get('reddit', 'username')

    try:
        page = subreddit.get_wiki_page(cfg_file.get('reddit', 'wiki_page_name'))
    except Exception:
        send_error_message(requester, subreddit.display_name,
            'The wiki page could not be accessed. Please ensure the page '
            'http://www.reddit.com/r/{0}/wiki/{1} exists and that {2} '
            'has the "wiki" mod permission to be able to access it.'
            .format(subreddit.display_name,
                    cfg_file.get('reddit', 'wiki_page_name'),
                    username))
        return

    html_parser = HTMLParser.HTMLParser()
    page_content = html_parser.unescape(page.content_md)

    # check that all the conditions are valid yaml
    condition_defs = yaml.safe_load_all(page_content)
    condition_num = 1
    try:
        for cond_def in condition_defs:
            condition_num += 1
    except Exception as e:
        indented = ''
        for line in str(e).split('\n'):
            indented += '    {0}\n'.format(line)
        send_error_message(requester, subreddit.display_name,
            'Error when reading conditions from wiki - '
            'Syntax invalid in section #{0}:\n\n{1}'
            .format(condition_num, indented))
        return

    # reload and actually process the conditions
    condition_defs = yaml.safe_load_all(page_content)
    condition_num = 1
    kept_sections = []
    for cond_def in condition_defs:
        # ignore any non-dict sections (can be used as comments, etc.)
        if not isinstance(cond_def, dict):
            continue

        # lowercase all keys
        cond_def = {k.lower(): v for k, v in cond_def.iteritems()}

        try:
            check_condition_valid(cond_def)
        except ValueError as e:
            send_error_message(requester, subreddit.display_name,
                'Invalid condition in section #{0} - {1}'
                .format(condition_num, e))
            return

        # create a condition for final checks
        condition = Condition(cond_def)

        # test to make sure that the final regex(es) are valid
        for pattern in condition.match_patterns.values():
            try:
                re.compile(pattern)
            except Exception as e:
                send_error_message(requester, subreddit.display_name,
                    'Generated an invalid regex from section #{0} - {1}'
                    .format(condition_num, e))
                return

        condition_num += 1
        kept_sections.append(cond_def)

    # Update the subreddit, or add it if necessary
    try:
        db_subreddit = (session.query(Subreddit)
                       .filter(Subreddit.name == subreddit.display_name.lower())
                       .one())
    except NoResultFound:
        db_subreddit = Subreddit()
        db_subreddit.name = subreddit.display_name.lower()
        db_subreddit.last_submission = datetime.utcnow() - timedelta(days=1)
        db_subreddit.last_spam = datetime.utcnow() - timedelta(days=1)
        db_subreddit.last_comment = datetime.utcnow() - timedelta(days=1)
        session.add(db_subreddit)

    db_subreddit.conditions_yaml = page_content
    session.commit()

    r.send_message(requester,
                   '{0} conditions updated'.format(username),
                   "{0}'s conditions were successfully updated for /r/{1}"
                   .format(username, subreddit.display_name))
Exemplo n.º 26
0
def main():
    logging.config.fileConfig(path_to_cfg)
    start_utc = datetime.utcnow()
    start_time = time()

    global r
    mod_subreddit = None
    try:
        r = praw.Reddit(user_agent=cfg_file.get('reddit', 'user_agent'))
        logging.info('Logging in as %s', cfg_file.get('reddit', 'username'))
        r.login(cfg_file.get('reddit', 'username'),
            cfg_file.get('reddit', 'password'))

        subreddits = Subreddit.query.filter(Subreddit.enabled == True).all()
        sr_dict = dict()
        for subreddit in subreddits:
            sr_dict[subreddit.name.lower()] = subreddit

        # force population of _mod_subs and build multi-reddit
        list(r.get_subreddit('mod').get_spam(limit=1))
        mod_multi = '+'.join([s.name for s in subreddits
                              if s.name.lower() in r.user._mod_subs])
        mod_subreddit = r.get_subreddit(mod_multi)
    except Exception as e:
        logging.error('  ERROR: %s', e)
    
    # check if we got a subreddit to use
    if mod_subreddit == None:
        logging.error('AutoModerator has no subreddits to run on')
        return
    
    # check reports
    items = mod_subreddit.get_reports(limit=1000)
    stop_time = datetime.utcnow() - REPORT_BACKLOG_LIMIT
    check_items('report', items, sr_dict, stop_time)

    # check spam
    items = mod_subreddit.get_modqueue(limit=1000)
    stop_time = (db.session.query(func.max(Subreddit.last_spam))
                 .filter(Subreddit.enabled == True).one()[0])
    check_items('spam', items, sr_dict, stop_time)

    # check new submissions
    items = mod_subreddit.get_new_by_date(limit=1000)
    stop_time = (db.session.query(func.max(Subreddit.last_submission))
                 .filter(Subreddit.enabled == True).one()[0])
    check_items('submission', items, sr_dict, stop_time)

    # check new comments
    comment_multi = '+'.join([s.name for s in subreddits
                              if not s.reported_comments_only])
    if comment_multi:
        comment_multi_sr = r.get_subreddit(comment_multi)
        items = comment_multi_sr.get_comments(limit=1000)
        stop_time = (db.session.query(func.max(Subreddit.last_comment))
                     .filter(Subreddit.enabled == True).one()[0])
        check_items('comment', items, sr_dict, stop_time)

    # respond to modmail
    try:
        respond_to_modmail(r.user.get_modmail(), start_utc)
    except Exception as e:
        logging.error('  ERROR: %s', e)

    logging.info('Completed full run in %s', elapsed_since(start_time))
Exemplo n.º 27
0
def process_messages():
    """Processes the bot's messages looking for invites/commands."""
    global r
    stop_time = int(cfg_file.get('reddit', 'last_message'))
    new_last_message = None
    changes_made = False

    logging.debug('Checking messages')

    try:
        for message in r.get_inbox():
            if int(message.created_utc) <= stop_time:
                break

            if message.was_comment:
                continue

            if not new_last_message:
                new_last_message = int(message.created_utc)

            # if it's a subreddit invite
            if (not message.author and
                    message.subject.startswith('invitation to moderate /r/')):
                try:
                    subreddit = message.subreddit

                    # workaround for praw clearing mod sub list on accept
                    mod_subs = r.user._mod_subs
                    # r.accept_moderator_invite(subreddit)
                    r.user._mod_subs = mod_subs
                    r.user._mod_subs[subreddit.display_name.lower()] = subreddit
                    logging.info('Accepted mod invite in /r/{0}'
                                 .format(message.subreddit.display_name))
                except praw.errors.InvalidInvite:
                    pass
            elif message.body.strip().lower() == 'update':
                # handle if they put in something like '/r/' in the subject
                if '/' in message.subject:
                    sr_name = message.subject[message.subject.rindex('/')+1:]
                else:
                    sr_name = message.subject

                try:
                    subreddit = r.get_subreddit(sr_name)
                    if message.author in subreddit.get_moderators():
                        logging.info('Updating from wiki in /r/{0}'
                                     .format(sr_name))
                        update_from_wiki(subreddit, message.author)
                        changes_made = True
                    else:
                        send_error_message(message.author, sr_name,
                            'You are not a moderator of that subreddit.')
                except HTTPError as e:
                    if e.response.status_code == 404:
                        send_error_message(message.author, sr_name,
                            "The message's subject was not a valid subreddit")
                    else:
                        raise
    except Exception as e:
        logging.error('ERROR: {0}'.format(e))
        raise
    finally:
        # update cfg with new last_message value
        if new_last_message:
            cfg_file.set('reddit', 'last_message', str(new_last_message))
            cfg_file.write(open(path_to_cfg, 'w'))

    return changes_made
Exemplo n.º 28
0
def main():
    global r
    logging.config.fileConfig(path_to_cfg)

    # which queues to check and the function to call
    queue_funcs = {'report': 'get_reports',
                   'spam': 'get_mod_queue',
                   'submission': 'get_new',
                   'comment': 'get_comments'}

    while True:
        try:
            r = praw.Reddit(user_agent=cfg_file.get('reddit', 'user_agent'))
            logging.info('Logging in as {0}'
                         .format(cfg_file.get('reddit', 'username')))
            r.login(cfg_file.get('reddit', 'username'),
                    cfg_file.get('reddit', 'password'))
            sr_dict = get_enabled_subreddits()
            Condition.update_standards()
            cond_dict = load_all_conditions(sr_dict, queue_funcs.keys())
            break
        except Exception as e:
            logging.error('ERROR: {0}'.format(e))

    reports_mins = int(cfg_file.get('reddit', 'reports_check_period_mins'))
    reports_check_period = timedelta(minutes=reports_mins)
    last_reports_check = time()

    while True:
        try:
            # if the standard conditions have changed, reinit all conditions
            if Condition.update_standards():
                logging.info('Updating standard conditions from database')
                cond_dict = load_all_conditions(sr_dict, queue_funcs.keys())

            # check reports if past checking period
            if elapsed_since(last_reports_check) > reports_check_period:
                last_reports_check = time()
                check_queues({'report': queue_funcs['report']},
                             sr_dict, cond_dict)
                             
            check_queues({q: queue_funcs[q]
                          for q in queue_funcs
                          if q != 'report'},
                         sr_dict, cond_dict)

            updated_srs = process_messages()
            if updated_srs:
                if any(sr not in sr_dict for sr in updated_srs):
                    sr_dict = get_enabled_subreddits(reload_mod_subs=True)
                else:
                    sr_dict = get_enabled_subreddits(reload_mod_subs=False)
                for sr in updated_srs:
                    update_conditions_for_sr(cond_dict,
                                             queue_funcs.keys(),
                                             sr_dict[sr])
        except (praw.errors.ModeratorRequired,
                praw.errors.ModeratorOrScopeRequired,
                HTTPError) as e:
            if not isinstance(e, HTTPError) or e.response.status_code == 403:
                logging.info('Re-initializing due to {0}'.format(e))
                sr_dict = get_enabled_subreddits()
        except KeyboardInterrupt:
            raise
        except Exception as e:
            logging.error('ERROR: {0}'.format(e))
            session.rollback()
Exemplo n.º 29
0
def check_items(name, items, sr_dict, stop_time):
    """Checks the items generator for any matching conditions."""
    item_count = 0
    skip_count = 0
    skip_subs = set()
    start_time = time()
    seen_subs = set()

    logging.info('Checking new %ss', name)

    try:
        for item in items:
            # skip any items in /new that have been approved
            if name == 'submission' and item.approved_by:
                continue

            item_time = datetime.utcfromtimestamp(item.created_utc)
            if item_time <= stop_time:
                break

            try:
                subreddit = sr_dict[item.subreddit.display_name.lower()]
            except KeyError:
                skip_count += 1
                skip_subs.add(item.subreddit.display_name.lower())
                continue

            conditions = (subreddit.conditions
                            .filter(Condition.parent_id == None)
                            .all())
            conditions = filter_conditions(name, conditions)

            item_count += 1

            if subreddit.name not in seen_subs:
                setattr(subreddit, 'last_'+name, item_time)
                seen_subs.add(subreddit.name)

            # check removal conditions, stop checking if any matched
            if check_conditions(subreddit, item,
                    [c for c in conditions if c.action == 'remove']):
                continue

            # check set_flair conditions 
            check_conditions(subreddit, item,
                    [c for c in conditions if c.action == 'set_flair'])

            # check approval conditions
            check_conditions(subreddit, item,
                    [c for c in conditions if c.action == 'approve'])

            # check alert conditions
            check_conditions(subreddit, item,
                    [c for c in conditions if c.action == 'alert'])

            # if doing reports, check auto-reapproval if enabled
            if (name == 'report' and subreddit.auto_reapprove and
                    item.approved_by is not None):
                try:
                    # see if this item has already been auto-reapproved
                    entry = (AutoReapproval.query.filter(
                            AutoReapproval.permalink == get_permalink(item))
                            .one())
                    in_db = True
                except NoResultFound:
                    entry = AutoReapproval()
                    entry.subreddit_id = subreddit.id
                    entry.permalink = get_permalink(item)
                    entry.original_approver = item.approved_by.name
                    entry.total_reports = 0
                    entry.first_approval_time = datetime.utcnow()
                    in_db = False

                if (in_db or item.approved_by.name !=
                        cfg_file.get('reddit', 'username')):
                    item.approve()
                    entry.total_reports += item.num_reports
                    entry.last_approval_time = datetime.utcnow()

                    db.session.add(entry)
                    db.session.commit()
                    logging.info('  Re-approved %s', entry.permalink)
                            
        db.session.commit()
    except Exception as e:
        logging.error('  ERROR: %s', e)
        db.session.rollback()

    logging.info('  Checked %s items, skipped %s items in %s (skips: %s)',
            item_count, skip_count, elapsed_since(start_time),
            ', '.join(skip_subs))
Exemplo n.º 30
0
def check_items(queue, items, stop_time, sr_dict, cond_dict):
    """Checks the items generator for any matching conditions."""
    item_count = 0
    start_time = time()
    last_updates = {}

    logging.debug('Checking {0} queue'.format(queue))

    bot_username = cfg_file.get('reddit', 'username')
    for item in items:
        # skip non-removed (reported) items when checking spam
        if queue == 'spam' and not item.banned_by:
            continue

        # never check the bot's own posts
        if item.author and item.author.name.lower() == bot_username.lower():
            continue

        item_time = datetime.utcfromtimestamp(item.created_utc)
        if (item_time < stop_time and
                (queue != 'submission' or not item.approved_by)):
            break

        sr_name = item.subreddit.display_name.lower()
        subreddit = sr_dict[sr_name]
        conditions = cond_dict[sr_name][queue]

        if (queue != 'report' and
                (queue != 'submission' or not item.approved_by) and
                sr_name not in last_updates):
            last_updates[sr_name] = item_time

        # don't need to check for shadowbanned unless we're in spam
        # and the subreddit doesn't exclude shadowbanned posts
        if queue == 'spam' and not subreddit.exclude_banned_modqueue:
            for condition in conditions:
                condition.check_shadowbanned = True
        else:
            for condition in conditions:
                condition.check_shadowbanned = False

        item_count += 1

        logging.debug('Checking item %s', get_permalink(item))

        try:
            # check removal conditions, stop checking if any matched
            if check_conditions(subreddit, item,
                                [c for c in conditions
                                 if c.action in ('remove', 'spam')],
                                stop_after_match=True):
                continue

            # check all other conditions
            check_conditions(subreddit, item,
                             [c for c in conditions
                              if c.action not in ('remove', 'spam')])
        except (praw.errors.ModeratorRequired,
                praw.errors.ModeratorOrScopeRequired,
                HTTPError) as e:
            if not isinstance(e, HTTPError) or e.response.status_code == 403:
                logging.error('Permissions error in /r/{0}'
                              .format(subreddit.name))
            raise
        except Exception as e:
            logging.error('ERROR: {0}'.format(e))

    # Update "last_" entries in db
    for sr in last_updates:
        setattr(sr_dict[sr], 'last_'+queue, last_updates[sr])
    session.commit()

    logging.debug('Checked {0} items in {1}'
                 .format(item_count, elapsed_since(start_time)))
Exemplo n.º 31
0
def check_items(queue, items, stop_time, sr_dict, cond_dict):
    """Checks the items generator for any matching conditions."""
    item_count = 0
    start_time = time()
    last_updates = {}

    logging.debug('Checking {0} queue'.format(queue))

    bot_username = cfg_file.get('reddit', 'username')
    for item in items:
        # skip non-removed (reported) items when checking spam
        if queue == 'spam' and not item.banned_by:
            continue

        # never check the bot's own posts
        if item.author and item.author.name.lower() == bot_username.lower():
            continue

        item_time = datetime.utcfromtimestamp(item.created_utc)
        if (item_time < stop_time and
                (queue != 'submission' or not item.approved_by)):
            break

        sr_name = item.subreddit.display_name.lower()
        subreddit = sr_dict[sr_name]
        conditions = cond_dict[sr_name][queue]

        if (queue != 'report' and
                (queue != 'submission' or not item.approved_by) and
                sr_name not in last_updates):
            last_updates[sr_name] = item_time

        # don't need to check for shadowbanned unless we're in spam
        # and the subreddit doesn't exclude shadowbanned posts
        if queue == 'spam' and not subreddit.exclude_banned_modqueue:
            for condition in conditions:
                condition.check_shadowbanned = True
        else:
            for condition in conditions:
                condition.check_shadowbanned = False

        item_count += 1

        logging.debug('Checking item %s', get_permalink(item))

        try:
            # check removal conditions, stop checking if any matched
            if check_conditions(subreddit, item,
                                [c for c in conditions
                                 if c.action in ('remove', 'spam')],
                                stop_after_match=True):
                continue

            # check all other conditions
            check_conditions(subreddit, item,
                             [c for c in conditions
                              if c.action not in ('remove', 'spam')])
        except (praw.errors.ModeratorRequired,
                praw.errors.ModeratorOrScopeRequired,
                HTTPError) as e:
            if not isinstance(e, HTTPError) or e.response.status_code == 403:
                logging.error('Permissions error in /r/{0}'
                              .format(subreddit.name))
            raise
        except Exception as e:
            logging.error('ERROR: {0}'.format(e))

    # Update "last_" entries in db
    for sr in last_updates:
        setattr(sr_dict[sr], 'last_'+queue, last_updates[sr])
    session.commit()

    logging.debug('Checked {0} items in {1}'
                 .format(item_count, elapsed_since(start_time)))
Exemplo n.º 32
0
def update_from_wiki(subreddit, requester):
    """Updates conditions from the subreddit's wiki."""
    global r
    username = cfg_file.get('reddit', 'username')

    try:
        page = subreddit.get_wiki_page(cfg_file.get('reddit', 'wiki_page_name'))
    except Exception:
        send_error_message(requester, subreddit.display_name,
            'The wiki page could not be accessed. Please ensure the page '
            'http://www.reddit.com/r/{0}/wiki/{1} exists and that {2} '
            'has the "wiki" mod permission to be able to access it.'
            .format(subreddit.display_name,
                    cfg_file.get('reddit', 'wiki_page_name'),
                    username))
        return False

    html_parser = HTMLParser.HTMLParser()
    page_content = html_parser.unescape(page.content_md)

    # check that all the conditions are valid yaml
    condition_defs = yaml.safe_load_all(page_content)
    condition_num = 1
    try:
        for cond_def in condition_defs:
            condition_num += 1
    except Exception as e:
        indented = ''
        for line in str(e).split('\n'):
            indented += '    {0}\n'.format(line)
        send_error_message(requester, subreddit.display_name,
            'Error when reading conditions from wiki - '
            'Syntax invalid in section #{0}:\n\n{1}'
            .format(condition_num, indented))
        return False

    # reload and actually process the conditions
    condition_defs = yaml.safe_load_all(page_content)
    condition_num = 1
    kept_sections = []
    for cond_def in condition_defs:
        # ignore any non-dict sections (can be used as comments, etc.)
        if not isinstance(cond_def, dict):
            continue

        cond_def = lowercase_keys_recursively(cond_def)

        try:
            check_condition_valid(cond_def)
        except ValueError as e:
            send_error_message(requester, subreddit.display_name,
                'Invalid condition in section #{0} - {1}'
                .format(condition_num, e))
            return False

        # create a condition for final checks
        condition = Condition(cond_def)

        # test to make sure that the final regex(es) are valid
        for pattern in condition.match_patterns.values():
            try:
                re.compile(pattern)
            except Exception as e:
                send_error_message(requester, subreddit.display_name,
                    'Generated an invalid regex from section #{0} - {1}'
                    .format(condition_num, e))
                return False

        condition_num += 1
        kept_sections.append(cond_def)

    # Update the subreddit, or add it if necessary
    try:
        db_subreddit = (session.query(Subreddit)
                       .filter(Subreddit.name == subreddit.display_name.lower())
                       .one())
    except NoResultFound:
        db_subreddit = Subreddit()
        db_subreddit.name = subreddit.display_name.lower()
        db_subreddit.last_submission = datetime.utcnow() - timedelta(days=1)
        db_subreddit.last_spam = datetime.utcnow() - timedelta(days=1)
        db_subreddit.last_comment = datetime.utcnow() - timedelta(days=1)
        session.add(db_subreddit)

    db_subreddit.conditions_yaml = page_content
    session.commit()

    r.send_message(requester,
                   '{0} conditions updated'.format(username),
                   "{0}'s conditions were successfully updated for /r/{1}"
                   .format(username, subreddit.display_name))
    return True
Exemplo n.º 33
0
def main():
    global r
    logging.config.fileConfig(path_to_cfg)
    re.set_fallback_notification(re.FALLBACK_EXCEPTION)

    # which queues to check and the function to call
    queue_funcs = {
        'report': 'get_reports',
        'spam': 'get_mod_queue',
        'submission': 'get_new',
        'comment': 'get_comments'
    }

    while True:
        try:
            r = praw.Reddit(user_agent=cfg_file.get('reddit', 'user_agent'))
            logging.info('Logging in as {0}'.format(
                cfg_file.get('reddit', 'username')))
            r.login(cfg_file.get('reddit', 'username'),
                    cfg_file.get('reddit', 'password'))
            sr_dict = get_enabled_subreddits()
            Condition.update_standards()
            cond_dict = load_all_conditions(sr_dict, queue_funcs.keys())
            break
        except Exception as e:
            logging.error('ERROR: {0}'.format(e))

    reports_mins = int(cfg_file.get('reddit', 'reports_check_period_mins'))
    reports_check_period = timedelta(minutes=reports_mins)
    last_reports_check = time()

    while True:
        try:
            sr_dict = get_enabled_subreddits(reload_mod_subs=False)

            # if the standard conditions have changed, reinit all conditions
            if Condition.update_standards():
                logging.info('Updating standard conditions from database')
                cond_dict = load_all_conditions(sr_dict, queue_funcs.keys())

            # check reports if past checking period
            if elapsed_since(last_reports_check) > reports_check_period:
                last_reports_check = time()
                check_queues({'report': queue_funcs['report']}, sr_dict,
                             cond_dict)

            check_queues(
                {q: queue_funcs[q]
                 for q in queue_funcs if q != 'report'}, sr_dict, cond_dict)

            updated_srs = process_messages()
            if updated_srs:
                if any(sr not in sr_dict for sr in updated_srs):
                    sr_dict = get_enabled_subreddits(reload_mod_subs=True)
                else:
                    sr_dict = get_enabled_subreddits(reload_mod_subs=False)
                for sr in updated_srs:
                    update_conditions_for_sr(cond_dict, queue_funcs.keys(),
                                             sr_dict[sr])
        except (praw.errors.ModeratorRequired,
                praw.errors.ModeratorOrScopeRequired, HTTPError) as e:
            if not isinstance(e, HTTPError) or e.response.status_code == 403:
                logging.info('Re-initializing due to {0}'.format(e))
                sr_dict = get_enabled_subreddits()
        except KeyboardInterrupt:
            raise
        except Exception as e:
            logging.error('ERROR: {0}'.format(e))
            session.rollback()
Exemplo n.º 34
0
def process_messages():
    """Processes the bot's messages looking for invites/commands."""
    global r
    stop_time = int(cfg_file.get('reddit', 'last_message'))
    owner_username = cfg_file.get('reddit', 'owner_username')
    new_last_message = None
    update_srs = set()
    invite_srs = set()
    sleep_after = False

    logging.debug('Checking messages')

    try:
        for message in r.get_inbox():
            if int(message.created_utc) <= stop_time:
                break

            if message.was_comment:
                continue

            if not new_last_message:
                new_last_message = int(message.created_utc)

            # if it's a subreddit invite
            if (not message.author and
                    message.subject.startswith('invitation to moderate /r/')):
                invite_srs.add(message.subreddit.display_name.lower())
            elif message.body.strip().lower() == 'update':
                # handle if they put in something like '/r/' in the subject
                if '/' in message.subject:
                    sr_name = message.subject[message.subject.rindex('/')+1:]
                else:
                    sr_name = message.subject

                if (sr_name.lower(), message.author.name) in update_srs:
                    continue

                try:
                    subreddit = r.get_subreddit(sr_name)
                    if (message.author.name == owner_username or
                            message.author in subreddit.get_moderators()):
                        update_srs.add((sr_name.lower(), message.author.name))
                    else:
                        send_error_message(message.author, sr_name,
                            'You do not moderate /r/{0}'.format(sr_name))
                except HTTPError as e:
                    send_error_message(message.author, sr_name,
                        'Unable to access /r/{0}'.format(sr_name))
            elif (message.subject.strip().lower() == 'sleep' and
                  message.author.name == owner_username):
                sleep_after = True

        # accept subreddit invites
        for subreddit in invite_srs:
            try:
                # workaround for praw clearing mod sub list on accept
                mod_subs = r.user._mod_subs
                r.user._mod_subs = mod_subs
                r.user._mod_subs[subreddit] = r.get_subreddit(subreddit)
                logging.info('Accepted mod invite in /r/{0}'
                             .format(subreddit))
            except praw.errors.InvalidInvite:
                pass

        # do requested updates from wiki pages
        updated_srs = []
        for subreddit, sender in update_srs:
            if update_from_wiki(r.get_subreddit(subreddit),
                                r.get_redditor(sender)):
                updated_srs.append(subreddit)
                logging.info('Updated from wiki in /r/{0}'.format(subreddit))
            else:
                logging.info('Error updating from wiki in /r/{0}'
                             .format(subreddit))

        if sleep_after:
            logging.info('Sleeping for 10 seconds')
            sleep(10)
            logging.info('Sleep ended, resuming')

    except Exception as e:
        logging.error('ERROR: {0}'.format(e))
        raise
    finally:
        # update cfg with new last_message value
        if new_last_message:
            cfg_file.set('reddit', 'last_message', str(new_last_message))
            cfg_file.write(open(path_to_cfg, 'w'))

    return updated_srs
Exemplo n.º 35
0
def check_reports_html(sr_dict):
    """Does report alerts/reapprovals, requires loading HTML page."""
    global r

    logging.info('Checking reports html page')
    reports_page = r._request('http://www.reddit.com/r/mod/about/reports')
    soup = BeautifulSoup(reports_page)

    # check for report alerts
    for reported_item in soup.findAll(
            attrs={'class': 'rounded reported-stamp stamp'}):
        permalink = (reported_item.parent
                     .findAll('li', attrs={'class': 'first'})[0].a['href'])
        sub_name = re.search('^http://www.reddit.com/r/([^/]+)',
                    permalink).group(1).lower()
        try:
            subreddit = sr_dict[sub_name]
        except KeyError:
            continue

        if not subreddit.report_threshold:
            continue

        reports = re.search('(\d+)$', reported_item.text).group(1)
        if int(reports) >= subreddit.report_threshold:
            try:
                # check log to see if this item has already had an alert
                ActionLog.query.filter(
                    and_(ActionLog.subreddit_id == subreddit.id,
                         ActionLog.permalink == permalink,
                         ActionLog.action == 'alert')).one()
            except NoResultFound:
                c = Condition()
                c.action = 'alert'
                perform_action(subreddit, permalink, c)

    # do auto-reapprovals
    for approved_item in soup.findAll(
            attrs={'class': 'approval-checkmark'}):
        report_stamp = approved_item.parent.parent.findAll(
                        attrs={'class': 'rounded reported-stamp stamp'})[0]

        permalink = (report_stamp.parent
                     .findAll('li', attrs={'class': 'first'})[0].a['href'])
        sub_name = re.search('^http://www.reddit.com/r/([^/]+)',
                    permalink).group(1).lower()
        try:
            subreddit = sr_dict[sub_name]
        except KeyError:
            continue

        if not subreddit.auto_reapprove:
            continue

        num_reports = re.search('(\d+)$', report_stamp.text).group(1)
        num_reports = int(num_reports)

        try:
            # see if this item has already been auto-reapproved
            entry = (AutoReapproval.query.filter(
                        and_(AutoReapproval.subreddit_id == subreddit.id,
                             AutoReapproval.permalink == permalink))
                        .one())
            in_db = True
        except NoResultFound:
            entry = AutoReapproval()
            entry.subreddit_id = subreddit.id
            entry.permalink = permalink
            entry.original_approver = (re.search('approved by (.+)$',
                                                 approved_item['title'])
                                       .group(1))
            entry.total_reports = 0
            entry.first_approval_time = datetime.utcnow()
            in_db = False

        if (in_db or
                approved_item['title'].lower() != \
                'approved by '+cfg_file.get('reddit', 'username').lower()):
            sub = r.get_submission(permalink)
            sub.approve()
            entry.total_reports += num_reports
            entry.last_approval_time = datetime.utcnow()

            db.session.add(entry)
            db.session.commit()
            logging.info('    Re-approved %s', entry.permalink)
Exemplo n.º 36
0
def check_conditions(subreddit, item, conditions, stop_after_match=False):
    """Checks an item against a list of conditions.

    Returns True if any conditions matched, False otherwise.
    """
    bot_username = cfg_file.get('reddit', 'username')

    if isinstance(item, praw.objects.Submission):
        conditions = [c for c in conditions
                          if c.type in ('submission', 'both')]
    elif isinstance(item, praw.objects.Comment):
        conditions = [c for c in conditions
                          if c.type in ('comment', 'both')]

    # get what's already been performed out of the log
    performed_actions = set()
    performed_yaml = set()
    log_entries = (session.query(Log)
                          .filter(Log.item_fullname == item.name)
                          .all())
    for entry in log_entries:
        performed_actions.add(entry.action)
        performed_yaml.add(entry.condition_yaml)

    # sort the conditions by desc priority, and then by required requests
    conditions.sort(key=lambda c: c.requests_required)
    conditions.sort(key=lambda c: c.priority, reverse=True)

    any_matched = False
    for condition in conditions:
        # never remove anything if it's been approved by another mod
        if (condition.action in ('remove', 'spam') and
                item.approved_by and
                item.approved_by.name.lower() != bot_username.lower()):
            continue

        # don't bother checking condition if this action has already been done
        if condition.action in performed_actions:
                continue

        # don't send repeat messages for the same item
        if ((condition.comment or condition.modmail or condition.message) and
            condition.yaml in performed_yaml):
                continue

        # don't overwrite existing flair
        if ((condition.link_flair_text or condition.link_flair_class) and
                isinstance(item, praw.objects.Submission) and
                (item.link_flair_text or item.link_flair_css_class)):
            continue
        if ((condition.user_flair_text or condition.user_flair_class) and
                (item.author_flair_text or item.author_flair_css_class)):
            continue

        try:
            start_time = time()
            match = condition.check_item(item)
            if match:
                performed_actions.add(condition.action)
                performed_yaml.add(condition.yaml)

            logging.debug('{0}\n  Result {1} in {2}'
                          .format(condition.yaml,
                                  match,
                                  elapsed_since(start_time)))
        except (praw.errors.ModeratorRequired,
                praw.errors.ModeratorOrScopeRequired,
                HTTPError) as e:
            raise
        except Exception as e:
            logging.error('ERROR: {0}\n{1}'.format(e, condition.yaml))
            match = False

        any_matched = (any_matched or match)
        if stop_after_match and any_matched:
            break

    return any_matched
Exemplo n.º 37
0
def process_messages(sr_dict, settings_dict):
    """Processes the bot's messages looking for invites/commands."""
    global r
    stop_time = int(cfg_file.get('reddit', 'last_message'))
    owner_username = cfg_file.get('reddit', 'owner_username')
    new_last_message = None
    update_srs = set()
    invite_srs = set()
    sleep_after = False

    logging.debug('Checking messages')

    try:
        for message in r.get_inbox():
            logging.debug("Reading message from {0}".format(message.author))
            # use exceptions to send error message reply to user
            try:
                if int(message.created_utc) <= stop_time:
                    logging.debug("  Message too old")
                    break
    
                if message.was_comment:
                    logging.debug("  Message was comment")
                    continue

                # don't get stuck in conversation loops with other bots
                if message.author.name.lower() in ['reddit', 'ban_timer', 'mod_mailer', 'f7u12_hampton', 'botwatchman']:
                    continue

                if not new_last_message:
                    new_last_message = int(message.created_utc)
    
                # if it's a subreddit invite
                if (message.subreddit and
                        message.subject.startswith('invitation to moderate /r/')):
                    message.mark_as_read()
                    raise UserspaceError("/u/ban_timer is currently in closed beta. Message the mods of /r/ban_timer for access.")
                    try:
                        subreddit = message.subreddit.display_name.lower()
                        # workaround for praw clearing mod sub list on accept
                        mod_subs = r.user._mod_subs
                        r.accept_moderator_invite(subreddit)
                        r.user._mod_subs = mod_subs
                        r.user._mod_subs[subreddit] = r.get_subreddit(subreddit)
                        logging.info('Accepted mod invite in /r/{0}'
                                     .format(subreddit))
                    except praw.errors.InvalidInvite:
                        pass
                # if it's a control message
                elif '/' in message.subject:
                    logging.debug("  Control Message")
                    sr_name = message.subject[message.subject.rindex('/')+1:].lower()
                    logging.debug("  /r/{0}".format(sr_name))
    
                    if sr_name in sr_dict:
                        sr = sr_dict[sr_name]
                    else:
                        logging.debug("  unknown subreddit /r/{0}".format(sr_name))
                        message.mark_as_read()
                        raise UserspaceError("/r/{0} is not registered with /u/ban_timer. "
                                        "Please invite /u/ban_timer to moderate /r/{0} "
                                        "with at least the `access` permission.".format(subreddit))
    
                    if (message.author in sr.get_moderators() or
                        message.author.name == owner_username):
                        pass
                    else:
                        logging.debug("  unauthorized user /u/{0}".format(message.author.name))
                        message.mark_as_read()
                        raise UserspaceError("You do not moderate /r/{0}".format(sr_name))
    
                    if message.body.strip().lower() == 'update':
                        if (message.author.name == owner_username or
                                message.author in sr.get_moderators()):
                            logging.debug("  update message")
                            update_srs.add((sr_name.lower(), message.author.name))
                    else:
                        logging.debug("  ban message")
                        
                        # add or remove a ban
                        args = message.body.strip().split("\n")
                        args = filter(lambda arg: arg.strip() != '', args)
                        user = args[1]
        
        #                 for mod in permissions:
        #                     print mod.permissions
        
                        if args[0].lower() == 'ban':
                            duration = args[2].lower() if 2 < len(args) else None
                            duration = duration if duration != 'forever' else None
                            note = args[3] if 3 < len(args) else None
                                                        
                            logging.debug("  Banning /u/{0}".format(user))
                            ban = Ban(sr_name, user, message.author.name, duration, note)
                            sr.add_ban(ban.user, note="<{0}> {1} | /u/ban_timer for /u/{2}".format(ban.duration, ban.note, ban.banned_by))
#                             sr.add_ban(ban.user)
                            session.add(ban)
                            message.mark_as_read()
                            raise UserspaceReply("Successfully banned /u/{0} from /r/{1} for {2}.".format(user, sr_name, duration))
                        elif args[0].lower() == 'unban':
                            logging.debug("  Unbanning /u/{0}".format(user))
                            ban = session.query(Ban).filter(Ban.user==user, Ban.subreddit==sr_name).one()
                            sr.remove_ban(ban.user)
                            session.delete(ban)
                            message.mark_as_read()
                            raise UserspaceReply("Successfully unbanned /u/{0} from /r/{1}".format(user, sr_name))
                        else:
                            message.reply('Unrecognized command syntax. Please check the command syntax documentation.')
                elif (message.subject.strip().lower() == 'sleep' and
                      message.author.name == owner_username):
                    logging.debug("  Sleep Message")
                    sleep_after = True
                    message.mark_as_read()
                else:
                    logging.debug("  Unknown Message")
            except UserspaceReply as e:
                message.reply("{0}".format(e))
            except UserspaceError as e:
                message.reply("Error: {0}".format(e))
        
        # do requested updates from wiki pages
        updated_srs = {}
        for subreddit, sender in update_srs:
            new_settings = update_from_wiki(r.get_subreddit(subreddit), r.get_redditor(sender))
            if new_settings:
                updated_srs[subreddit] = new_settings
                logging.info('Updated from wiki in /r/{0}'.format(subreddit))
            else:
                logging.info('Error updating from wiki in /r/{0}'
                             .format(subreddit))

        if sleep_after:
            logging.info('Sleeping for 10 seconds')
            sleep(10)
            logging.info('Sleep ended, resuming')

    except Exception as e:
        logging.error('ERROR: {0}'.format(e))
        raise
    finally:
        # update cfg with new last_message value
        if new_last_message:
            cfg_file.set('reddit', 'last_message', str(new_last_message))
            cfg_file.write(open(path_to_cfg, 'w'))
        # push bans to the database
        session.commit()


    return updated_srs