コード例 #1
0
ファイル: user.py プロジェクト: jlblatt/Hollowbot
def checkLogin():
    global isLoggedIn

    cur.execute("select cookie, modhash from session")
    if cur.rowcount is 0:
        return
    else:
        for s in cur.fetchall():
            opener.addheaders.append(('Cookie', 'reddit_session=%s' % s[0]))
            opener.addheaders.append(('X-Modhash', s[1]))

    try:
        success = False
        for i in range(_['http_retries']):
            f = opener.open('http://www.reddit.com/api/me.json')
            if f.getcode() == 200:
                success = True
                break
            else:
                log.write('Error %d for login status check attempt' % f.getcode(), 'error')
                if f.getcode() in [401, 403, 404]: 
                    return
                time.sleep(_['sleep'])

        if success == False:
            log.write('Retries exhausted for login status check', 'error');
            return

        time.sleep(_['sleep'])

    except Exception, e:
        log.write('Error checking login status: %e' %e, 'error')
        return
コード例 #2
0
ファイル: hollowbot.py プロジェクト: jlblatt/Hollowbot
import user

# Delete old links and comments
if 'runall' in argv or 'cleanup' in argv:
    if _['delete_links_after'] > -1: cur.execute("delete from t3 where created < date_sub(now(), interval %s second)", (_['delete_links_after'],))
    if _['delete_comments_after'] > -1: cur.execute("delete from t1 where created < date_sub(now(), interval %s second)", (_['delete_comments_after'],))
    db.commit();

# Build/store locations to retrieve links
if 'runall' in argv or 'locations' in argv:
    locations.build(_['crawl_subreddits'], _['crawl_urls'])

# Crawls URLS from locations
if 'runall' in argv or 'links' in argv:
    cur.execute("select id, url from crawl_locations where last_crawled < date_sub(now(), interval %s second)", (_['find_links_after'],))
    for l in cur.fetchall():
        links.get("%s?limit=%d" % (l[1], _['links_per_page']))
        cur.execute("update crawl_locations set last_crawled = now() where id = %s", (l[0],))
        db.commit()

# Crawl eligible links
if 'runall' in argv or 'comments' in argv:
    cur.execute("select id, permalink from t3 where last_crawled < date_sub(now(), interval %s second)", (_['recrawl_links_after'],))
    for l in cur.fetchall():
        for sort in _['comment_sort']:
            comments.get("http://www.reddit.com%s" % l[1], 't3_' + lib.base36encode(l[0]).lower(), '', "limit=%d&depth=%d&sort=%s" % (_['comment_limit_per_request'], _['comment_depth_per_request'], sort))
            cur.execute("update t3 set last_crawled = now() where id = %s", (l[0],))
            db.commit()
            sleep(_['sleep'])

#Login and respond to links/comments
コード例 #3
0
ファイル: respond.py プロジェクト: jlblatt/Hollowbot
from init import db, cur, opener
import lib
import log
import stats
import user
import userfunctions

quotedRE = re.compile("^&gt;.*$", re.I | re.M)

for i in range(len(_["rules"])):
    if "regex" in _["rules"][i]:
        _["rules"][i]["re"] = re.compile(_["rules"][i]["regex"], re.I | re.M)

cur.execute("select distinct thing_id from responses")
responses = cur.fetchall()

rcount = 0


def processComment(cid, body, author):
    for rule in _["rules"]:
        if "flags" in rule and "ignoreQuotedText" in rule["flags"]:
            body = re.sub(quotedRE, "", body)

        if "flags" not in rule or ("flags" in rule and "selftextOnly" not in rule["flags"]):
            if "user_function" in rule:
                try:
                    getattr(userfunctions, rule["user_function"])(cid, body, author)
                except Exception, e:
                    log.write('Error running user function "%s": %s' % (rule["user_function"], e), "error")