def main():
    #get startup info
    
    while True:
        try:
            Guser = raw_input("Gmail username :"******"Gmail password :"******"connecting to smtp.gmail.com")
            myCellEmail = raw_input("SMS email address?")
            mail = smtplib.SMTP('smtp.gmail.com', 587)
            mail.ehlo()
            mail.starttls()
            mail.ehlo()
            mail.login(Guser,Gpass)
            mail.quit()
            break
        except smtplib.SMTPConnectError:
            print("cannot connect to smtp.gmail.com:587")
        except smtplib.SMTPAuthenticationError:
            print("cannot authenticate. doublecheck the Username/password")
    
    
    subreddit = raw_input("monitor reddit.com/r/")
    
    #Logging posts on the subreddit
    recordedPosts=[] 
    reddit = ReddiWrap()
    
    startPosts = reddit.get("/r/%s/new" % subreddit )
    for post in startPosts:
        recordedPosts.append(post.id)
    
    #the main check loop, refreshes once per second
    while(True):
        newPosts=reddit.get("/r/%s" % subreddit+"/new/?count=0")
        mailToSend = []
        for post in newPosts:
            if (not post.id in recordedPosts):
                mailToSend.append(post)
                recordedPosts.append(post.id)
        if (len(mailToSend)>0):
            mail = smtplib.SMTP('smtp.gmail.com', 587)
            mail.ehlo()
            mail.starttls()
            mail.ehlo()
            mail.login(Guser,Gpass)
            for newpost in mailToSend:
                seconds = (datetime.datetime.now()-datetime.datetime.fromtimestamp(newpost.created-7*60*60)).seconds
                print("(Posted %s:%s minutes ago): \"%s\"" %( seconds/60, (seconds%60/10==0)*"0"+str(seconds%60) , newpost.title) )
                mail.sendmail("RedditMonitor indev", [myCellEmail], newpost.title+"\r\n"+newpost.selftext*post.is_self+newpost.url*(not newpost.is_self))
            mail.quit()
        
        sleep(1)
Beispiel #2
0
    # iterate through subreddit tuple add a link to html for the subreddit,
    # then run posts through filters.
    print 'Getting latest from ' + str(subreddit)
    subLink = a(str('/r/' + subreddit))
    subLink.attributes['id'] = subreddit
    subLink.attributes['href'] = 'http://www.reddit.com/r/' + subreddit
    subLink.attributes['target'] = '_blank'
    subLink.render()

    subNavLink = a(subreddit)
    subNavLink.attributes['href'] = '#'+subreddit
    subNavLink.render()

    dropdownList << li(subNavLink)

    sideDivUl << li(subLink, cl="nav-header active")

    if subreddit == 'jpop':
        posts = reddit.get('/r/' + subreddit)
        getMatches(posts, 168, "youtubeItem", youtubeMatch)
        getMatches(posts, 168, "soundcloudItem", soundcloudMatch)
        getMatches(posts, 168, "jpopItem", jpopSuki)
    else:
        posts = reddit.get('/r/' + subreddit)
        getMatches(posts, 24, "youtubeItem", youtubeMatch)
        getMatches(posts, 24, "soundcloudItem", soundcloudMatch)
        getMatches(posts, 24, "eighttracksItem", eightTracksMatch)

page.printOut(file="/home/reynn/Python/untitled/Page.html")
#page.printOut(file="C://Projects/redditfun/Page.html")
		# 1 means invalid password, 2 means rate limited, -1 means unexpected error
		print('unable to log in: %d' % login)
		print('remember to change USERNAME and PASSWORD')
		exit(1)
	# Save cookies so we won't have to log in again later
	reddit.save_cookies('cookies.txt')

print('logged in as %s' % reddit.user)

uinfo = reddit.user_info()
print('\nlink karma:    %d' % uinfo.link_karma)
print('comment karma: %d' % uinfo.comment_karma)
	

# Retrieve posts in a subreddit
posts = reddit.get('/r/%s' % MOD_SUB)
print('posts in subreddit /r/%s:' % MOD_SUB)
for post in posts:
	if(post.clicked is False):
		reddit.fetch_comments(post)
		for comment in post.comments:
			words = re.split("\?|\ |,|!|\n",comment.body)
			for word in words:
				if "http://www.tobaccoreviews.com" in word:
					linkURL = word
					if "/blend/" in word:
						webtry = urllib2.urlopen(word)
						html = webtry.read()
						parser = MyHTMLParser()
						parser.feed(html)
						replyMessage = "["+tobaccoName+"]("+word+")"+"\n\n"
Beispiel #4
0
olddict = {}
newdict = {}

clock = datetime.datetime

a = clock.now()
f = open("%s_%s_%srecord.csv" % (a.month, a.day, a.hour), "a")
f.write("id, author, subreddit, title, created, link, url")

b = clock.now()

while b - a < datetime.timedelta(hours=6):
    print b - a
    time.sleep(1)
    new = reddit.get('/r/all/new')
    seen = False
    for post in new:
        entry = {"subreddit":post.subreddit.encode('ascii', 'ignore'), "link":post.permalink.encode('ascii', 'ignore'), "title":post.title.encode('ascii', 'ignore'),\
         "created":post.created, "id":post.id, "url":post.url.encode('ascii', 'ignore'), "author":post.author.encode('ascii', 'ignore')}
        if olddict.has_key(post.id):
            seen = True
        elif not newdict.has_key(post.id):
            newdict[post.id] = entry
    if not seen:
        for item in olddict.values():
            f.write("%s,%s,%s,%s,%s,%s,%s\n" %(item["id"], item["author"], item["subreddit"], item["title"],\
             item["created"], item["link"], item["url"]))
        olddict = newdict
        newdict = {}
    b = clock.now()
Beispiel #5
0
		print('remember to change USERNAME and PASSWORD')
		exit(1)
	# Save cookies so we won't have to log in again later
	reddit.save_cookies('cookies.txt')

print('logged in as %s' % reddit.user)

uinfo = reddit.user_info()
print('\nlink karma:    %d' % uinfo.link_karma)
print('comment karma: %d' % uinfo.comment_karma)
created = int(uinfo.created)
print('account created on:  %s' % reddit.time_to_date(created))
print('time since creation: %s\n' % reddit.time_since(created))

# Retrieve posts in a subreddit
posts = reddit.get('/r/%s' % MOD_SUB)
print('posts in subreddit /r/%s:' % MOD_SUB)
for post in posts:
	print(post)

# Store first post for other functions
post = None
for p in posts:
	if p.num_comments > 0:
		post = p
		break
if post == None:
	print('unable to find post with comments. exiting')
	exit(1)

# Retrieve comments for the first post
    if login != 0:
        # 1 means invalid password, 2 means rate limited, -1 means unexpected error
        print('unable to log in: %d' % login)
        print('remember to change USERNAME and PASSWORD')
        exit(1)
    # Save cookies so we won't have to log in again later
    reddit.save_cookies('cookies.txt')

print('logged in as %s' % reddit.user)

uinfo = reddit.user_info()
print('\nlink karma:    %d' % uinfo.link_karma)
print('comment karma: %d' % uinfo.comment_karma)

# Retrieve posts in a subreddit
posts = reddit.get('/r/%s' % MOD_SUB)
print('posts in subreddit /r/%s:' % MOD_SUB)
for post in posts:
    if (post.clicked is False):
        reddit.fetch_comments(post)
        for comment in post.comments:
            words = re.split("\?|\ |,|!|\n", comment.body)
            for word in words:
                if "http://www.tobaccoreviews.com" in word:
                    linkURL = word
                    if "/blend/" in word:
                        webtry = urllib2.urlopen(word)
                        html = webtry.read()
                        parser = MyHTMLParser()
                        parser.feed(html)
                        replyMessage = "[" + tobaccoName + "](" + word + ")" + "\n\n"
# To print titles of top 10 in r/aww
import urllib
from ReddiWrap import ReddiWrap
from datetime import datetime

def getUTCTime(timestamp_string):
    return datetime.utcfromtimestamp(timestamp_string)

def getTime(timestamp_string):
    return datetime.fromtimestamp(timestamp_string)

# Create new ReddiWrap instance
reddit = ReddiWrap()
aww = reddit.get('/r/aww')
strFormat = "%Y-%m-%d"

topten = aww[:10]

print "Title", "|", "Time", "|", "Votes + -"
for p in topten:
    #image = urllib.urlopen("http://site.com/image.jpg").read()
    #print p.url, p.title
    print "=", p.title, "="
    print "Votes | +:", p.upvotes, "-:", p.downvotes, "fuzzy:", p.score
    print "Time[Local]:", getTime(p.created) ,"Time[UTC]:", getUTCTime(p.created_utc)
    print
Beispiel #8
0
	Web: Web requests utility library, used by ReddiWrap

(c) Antonio Herraiz August/2013
"""
from ReddiWrap import ReddiWrap

reddit = ReddiWrap(user_agent='ReddiWrap')
import time # For sleep(), to avoid API rate limit
count = 0
pics = []
comments = reddit.get_user_comments('Only_Says_Nice_Tits')
if comments != None:
	while True:
		for comment in comments:
			# reddit.last_url will be like: http://reddit.com/r/funny/comments/1jkgf3/cbfmmzu.json
			post = reddit.get('/r/%s/comments/%s/%s' % (comment.subreddit, comment.link_id[3:], comment.id))
			url = post[0].url
			if 'imgur' in url and 'i.imgur' not in url:
				# TODO: http://imgur.com/ipv9GiY ==> http://i.imgur.com/ipv9GiY.xxx
				print('Transforming imgur URL')
			pics.append(url)
			print('Pic URL: %s' % (url))
			count += 1
		if count >= 100 or not reddit.has_next(): break
		time.sleep(2) # One request every 2 seconds.
		comments = reddit.get_next()
else:
	print('I can\'t retrieve stuff from reddit.com')
# TODO: do something with 'pics'

# EOF
Beispiel #9
0
	#Log in
	reddit.load_cookies('cookies.txt')

	if not reddit.logged_in or reddit.user.lower() != USERNAME.lower():
		print('logging into %s' % USERNAME)
		login = reddit.login(user=USERNAME, password=PASSWORD)
		if login != 0:
			print('unable to log in: %d' % login)
			print('remember to change USERNAME and PASSWORD')
			exit(1)
		reddit.save_cookies('cookies.txt')
	print('logged in as %s' % reddit.user)

	#Get posts in subreddit SUB
	posts = reddit.get('/r/%s' % SUB)
	print('getting posts in subreddit /r/%s' % SUB)
	while reddit.has_next():
		posts += reddit.get_next()
		time.sleep(2)	
	print('number of posts in /r/%s: %d' % (SUB, len(posts)))

	#Set up XML
	srn = ET.Element('searchresult')
	subredditnode = ET.SubElement(srn, SUB)

	#Set up for JSON dump
	subreddit = {}

	#Get data for each post
	post_count = -1
Beispiel #10
0
# To print titles of top 10 in r/aww
import urllib

from ReddiWrap import ReddiWrap

# Create new ReddiWrap instance
reddit = ReddiWrap()
front = reddit.get('/r/all')

topten = front[:10]
for p in topten:
    print p.title
import time
import datetime
from ReddiWrap import ReddiWrap
import numpy as np

reddit = ReddiWrap() # Create new instance of ReddiWrap
login_result = reddit.login('brainsareneat', 'isminus1')

all_posts = []
new = reddit.get('/r/all/new')

while True:
	time.sleep(1.5)
	for post in new:
		all_posts.append(post)
	posts= np.array(all_posts)
	np.savez('records.npz', posts = posts)	
	new = reddit.get_next()
	print 'ok...'
Beispiel #12
0
        print('logged in as %s' % reddit.user)

        # uinfo = reddit.user_info()
        # print('\nlink karma:    %d' % uinfo.link_karma)
        # print('comment karma: %d' % uinfo.comment_karma)
        # created = int(uinfo.created)
        # print('account created on:  %s' % reddit.time_to_date(created))
        # print('time since creation: %s\n' % reddit.time_since(created))

        # # # # # # # # Finding Subreddit
        print "Finding Subreddit ..."
        subreddit = ""
        flag = False
        # if we find the subreddit, this flag is going to be Ture
        while True:
            subreddits = reddit.get('/reddits')
            for subred in subreddits:
                if subred.display_name == MOD_SUB.lower():
                    subreddit = subred
                    flag = True
                    break
            if (not reddit.has_next()) or flag:
                break
            time.sleep(2)
            subreddits = reddit.get_next()

        # # # # # # # # saving subreddit in subreddit table
        print "Saving Subreddit ... "
        over18 = 0
        if subreddit.over18:
            over18 = 1
Beispiel #13
0
        print('remember to change USERNAME and PASSWORD')
        exit(1)
    # Save cookies so we won't have to log in again later
    reddit.save_cookies('cookies.txt')

print('logged in as %s' % reddit.user)

uinfo = reddit.user_info()
print('\nlink karma:    %d' % uinfo.link_karma)
print('comment karma: %d' % uinfo.comment_karma)
created = int(uinfo.created)
print('account created on:  %s' % reddit.time_to_date(created))
print('time since creation: %s\n' % reddit.time_since(created))

# Retrieve posts in a subreddit
posts = reddit.get('/r/%s' % MOD_SUB)
print('posts in subreddit /r/%s:' % MOD_SUB)
for post in posts:
    print(post)

# Store first post for other functions
post = None
for p in posts:
    if p.num_comments > 0:
        post = p
        break
if post == None:
    print('unable to find post with comments. exiting')
    exit(1)

# Retrieve comments for the first post