def handle(self, *args, **options): # create a github handle gh = github.GitHub() # get the latest github update that's in the db updates = Update.objects.filter(source=SOURCE).order_by('-timestamp') # if there are no github updates in the db, just add all of them # if there are updates in the db already, check the date of the most recent one. for r in gh.repos.forUser(GITHUB_USER): most_recent_branch = False most_recent_branch_ts = False most_recent_commit = False branches = gh.repos.branches(GITHUB_USER, r.name) for branchname, _ in branches.items(): commits = gh.commits.forBranch('qmat', r.name, branchname) # ignore the branch if there are no commits if not commits: continue branch_ts = self.__interpret_date(commits[0].authored_date) # if it's the first check then take that one, otherwise compare if not most_recent_branch or branch_ts > most_recent_branch_ts: most_recent_branch = branchname most_recent_branch_ts = branch_ts most_recent_commit = commits[0] # we now have the most recent branch if branches and (not updates or most_recent_branch_ts > updates[0].timestamp): new_update = Update() new_update.source = SOURCE new_update.timestamp = most_recent_branch_ts new_update.author = most_recent_commit.author.name new_update.text = "Changed %s repo" % r.name new_update.save()
def handle(self, *args, **options): conn = imaplib.IMAP4_SSL('imap.gmail.com', 993) try: conn.login(GMAIL_USER, GMAIL_PASS) except: print sys.exc_info()[1] sys.exit(1) conn.select('Inbox') # Select inbox or default namespace retcode, messages = conn.search(None, '(UNSEEN)') if retcode == 'OK' and messages[0]: for message in messages[0].split(' '): print 'Processing :', message ret, mesginfo = conn.fetch(message, '(BODY[HEADER.FIELDS (SUBJECT FROM DATE)])') if ret == 'OK': # get the right bits out of the imap data new_update = Update() new_update.source = SOURCE fields = mesginfo[0][1].split('\r\n') for field in fields: if field.startswith('Date: '): new_update.timestamp = self.__interpret_date(field[len('Date: '):]) if field.startswith('From: '): author = field[len('From: '):] # filter out email address if a name was specified bps = [author.find('<'), author.find('>')] # do some sanity checking if bps[0] < bps[1] and bps[0] >= 0 and bps[1] >= 0: author = author[0:bps[0]-1] author = author.strip("\"' ") new_update.author = author if field.startswith('Subject: '): new_update.text = field[len('Subject: '):] new_update.save() conn.store(messages[0].replace(' ',','),'+FLAGS','SEEN') conn.close()
def handle(self, *args, **options): articles = feedparser.parse(FEED_URL)['entries'] # get the latest vimeo update timestamp= updates = Update.objects.filter(source=SOURCE).order_by('-timestamp') for article in articles: timestamp = self.__interpret_date(article['updated']) if not updates or timestamp > updates[0].timestamp: new_update = Update() new_update.source = SOURCE new_update.timestamp = timestamp new_update.author = article['author'] new_update.text = article['title'] new_update.save()
def handle(self, *args, **options): videos = json.loads(urllib2.urlopen(VIMEO_CHANNEL_API_URI).read()) # get the latest vimeo update timestamp= updates = Update.objects.filter(source=SOURCE).order_by('-timestamp') for video in videos: timestamp = self.__interpret_date(video['upload_date']) if not updates or timestamp > updates[0].timestamp: new_update = Update() new_update.source = SOURCE new_update.timestamp = timestamp new_update.author = video['user_name'] new_update.text = video['description'] new_update.save()
def handle(self, *args, **options): auth = tweepy.auth.OAuthHandler(TWITTER_USER, TWITTER_PASS) api = tweepy.API(auth) updates = Update.objects.filter(source=SOURCE).order_by('-timestamp') tweets = api.search('@QMATChandelier') for tweet in tweets: if not updates or tweet.created_at > updates[0].timestamp: new_update = Update() new_update.source = SOURCE new_update.timestamp = tweet.created_at new_update.author = tweet.from_user new_update.text = tweet.text new_update.save()
def handle(self, *args, **options): client = gdata.docs.service.DocsService() client.ClientLogin(GDOCS_USER, GDOCS_PASS) documents_feed = client.GetDocumentListFeed() updates = Update.objects.filter(source=SOURCE).order_by("-timestamp") for document_entry in documents_feed.entry: timestamp = self.__interpret_date(document_entry.updated.text) if not updates or timestamp > updates[0].timestamp: new_update = Update() new_update.source = SOURCE new_update.timestamp = timestamp new_update.author = document_entry.lastModifiedBy.name.text new_update.text = document_entry.title.text new_update.save()
def handle(self, *args, **options): client = gdata.docs.service.DocsService() client.ClientLogin(GDOCS_USER, GDOCS_PASS) documents_feed = client.GetDocumentListFeed() updates = Update.objects.filter(source=SOURCE).order_by('-timestamp') for document_entry in documents_feed.entry: timestamp = self.__interpret_date(document_entry.updated.text) if not updates or timestamp > updates[0].timestamp: new_update = Update() new_update.source = SOURCE new_update.timestamp = timestamp new_update.author = document_entry.lastModifiedBy.name.text new_update.text = document_entry.title.text new_update.save()
def handle(self, *args, **options): auth = tweepy.OAuthHandler(TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET) auth.set_access_token(TWITTER_ACCESS_KEY, TWITTER_ACCESS_SECRET) api = tweepy.API(auth) updates = Update.objects.filter(source=SOURCE).order_by('-timestamp') #tweets = api.search('@QMATChandelier') tweets = api.home_timeline() for tweet in tweets: if not updates or tweet.created_at > updates[0].timestamp: new_update = Update() new_update.source = SOURCE new_update.timestamp = tweet.created_at new_update.author = tweet.user.screen_name new_update.text = tweet.text new_update.save()
def handle(self, *args, **options): conn = imaplib.IMAP4_SSL('imap.gmail.com', 993) try: conn.login(GMAIL_USER, GMAIL_PASS) except: print sys.exc_info()[1] sys.exit(1) conn.select('Inbox') # Select inbox or default namespace retcode, messages = conn.search(None, '(UNSEEN)') if retcode == 'OK' and messages[0]: for message in messages[0].split(' '): print 'Processing :', message ret, mesginfo = conn.fetch( message, '(BODY[HEADER.FIELDS (SUBJECT FROM DATE)])') if ret == 'OK': # get the right bits out of the imap data new_update = Update() new_update.source = SOURCE fields = mesginfo[0][1].split('\r\n') for field in fields: if field.startswith('Date: '): new_update.timestamp = self.__interpret_date( field[len('Date: '):]) if field.startswith('From: '): author = field[len('From: '):] # filter out email address if a name was specified bps = [author.find('<'), author.find('>')] # do some sanity checking if bps[0] < bps[1] and bps[0] >= 0 and bps[1] >= 0: author = author[0:bps[0] - 1] author = author.strip("\"' ") new_update.author = author if field.startswith('Subject: '): new_update.text = field[len('Subject: '):] new_update.save() conn.store(messages[0].replace(' ', ','), '+FLAGS', 'SEEN') conn.close()