예제 #1
0
파일: user.py 프로젝트: jlblatt/Hollowbot
def checkLogin():
    global isLoggedIn

    cur.execute("select cookie, modhash from session")
    if cur.rowcount is 0:
        return
    else:
        for s in cur.fetchall():
            opener.addheaders.append(('Cookie', 'reddit_session=%s' % s[0]))
            opener.addheaders.append(('X-Modhash', s[1]))

    try:
        success = False
        for i in range(_['http_retries']):
            f = opener.open('http://www.reddit.com/api/me.json')
            if f.getcode() == 200:
                success = True
                break
            else:
                log.write('Error %d for login status check attempt' % f.getcode(), 'error')
                if f.getcode() in [401, 403, 404]: 
                    return
                time.sleep(_['sleep'])

        if success == False:
            log.write('Retries exhausted for login status check', 'error');
            return

        time.sleep(_['sleep'])

    except Exception, e:
        log.write('Error checking login status: %e' %e, 'error')
        return
예제 #2
0
파일: user.py 프로젝트: jlblatt/Hollowbot
def login():
    global isLoggedIn
    
    log.write('Logging in user %s' % _['reddit_username'], 'message')

    try: 
        success = False
        for i in range(_['http_retries']):
            f = opener.open('http://www.reddit.com/api/login', 'api_type=json&user=%s&passwd=%s' % (_['reddit_username'], _['reddit_password']))
            if f.getcode() == 200:
                success = True
                break
            else:
                log.write('Error %d for login attempt' % f.getcode(), 'error')
                if f.getcode() in [401, 403, 404]: 
                    return
                time.sleep(_['sleep'])

        if success == False:
            log.write('Retries exhausted for login', 'error');
            return

        time.sleep(_['sleep'])

    except Exception, e:
        log.write('Error logging in: %e' %e, 'error')
        return
예제 #3
0
def postComment(thing_id, text):
    # don't doublepost
    global responses
    for response in responses:
        if thing_id in response[0]:
            return

    log.write("Posting reply [%s] to %s" % (text, thing_id), "message")

    try:
        success = False
        for i in range(_["http_retries"]):
            f = opener.open("http://www.reddit.com/api/comment", "api_type=json&thing_id=%s&text=%s" % (thing_id, text))
            if f.getcode() == 200:
                success = True
                break
            else:
                log.write("Error %d for reply attempt to %s" % (f.getcode(), thing_id), "error")
                if f.getcode() in [401, 403, 404]:
                    return
                time.sleep(_["sleep"])

        if success == False:
            log.write("Retries exhausted for reply to %s" % thing_id, "error")
            return

        time.sleep(_["sleep"])

    except Exception, e:
        log.write("Error replying to %s: %s" % (thing_id, e), "error")
        return
예제 #4
0
def get(url, linkid, commentid = '', args = '', depth = 0, post = False):

    if depth > _['comment_depth_total']: 
        return

    global ccount

    url = url.encode('ascii', 'ignore')

    if post:
        log.write("Autogetting <= 20 comments from: %s.json via POST: %s..." % (url + commentid, args), 'message')
    else:
        log.write("Getting %d comment(s) at depth %d from: %s.json?%s..." % (_['comment_limit_per_request'], depth, url + commentid, args), 'message')
    
    start = time.time()

    try: 
        success = False
        for i in range(_['http_retries']):
            if post:
                f = opener.open(url + commentid + '.json', args)
            else: 
                f = opener.open(url + commentid + '.json?' + args)
            
            if f.getcode() == 200:
                success = True
                break
            else:
                log.write('Error %d for comments url: %s' % (f.getcode(), url), 'error')
                if f.getcode() in [401, 403, 404]: 
                    return
                time.sleep(_['sleep'])

        if success == False:
            log.write('Retries exhausted for comments url: %s' % url, 'error');
            return

        time.sleep(_['sleep'])

    except Exception, e:
        log.write('Error opening comments url: %s - %s' % (url, e), 'error')
        return
예제 #5
0
파일: links.py 프로젝트: jlblatt/Hollowbot
def get(url):
    log.write("Getting %d page(s) of %d links from: %s..." % (_['page_limit'], _['links_per_page'], url), 'message')
    start = time.time()

    after = ''

    for p in range(_['page_limit']):

        if after is None: break

        if after != '': finalUrl = url + '&after=' + after
        else: finalUrl = url

        try: 
            success = False
            for i in range(_['http_retries']):
                f = opener.open(finalUrl)
                if f.getcode() == 200:
                    success = True
                    break
                else:
                    log.write('Error %d for links url: %s' % (f.getcode(), finalUrl), 'error')
                    if f.getcode() in [401, 403, 404]: 
                        return
                    time.sleep(_['sleep'])

            if success == False:
                log.write('Retries exhausted for links url: %s' % finalUrl, 'error');
                return

            time.sleep(_['sleep'])

        except Exception, e:
            log.write('Error opening links url: %s - %s' % (finalUrl, e), 'error')
            return

        rJSON = f.read()
        f.close()

        try: links = json.loads(rJSON)
        except Exception, e:
            log.write('Error parsing links url: %s - %s' % (finalUrl, e), 'error')
            return