def retrieve(force, **args): """ this is how we will create new items/tweets """ username, password = args['account'], None if isinstance(username, tuple): username, password = username search = True user = False force = False search_term = "swarthmore" if search: print "[community.sources.twitter | INFO]: Working with Search" url = "http://search.twitter.com/search.json?q=%s" % search_term last_id = 0 if force: log.info("Forcing update of all tweets available.") else: try: last_id = Tweet.objects.order_by('-tweet_id')[0].tweet_id except Exception, e: log.debug('%s', e) log.debug("Last id processed: %s", last_id) tweets = utils.get_remote_data(url, rformat="json") if not tweets: log.warning('no tweets returned, twitter possibly overloaded.') return tweets = tweets['results'] for t in tweets: curr_id = t['id'] if curr_id > last_id: log.info("Working with %s.", t['id']) tweet_text = t['text'] tweet_text = re.sub(r'@((?:\w|\.(?=\w))+)',r'<a href="http://www.twitter.com/\1/">\1</a>',tweet_text) tweet_text = tweet_text.replace("@","@") owner_user = smart_unicode(t['from_user']) url = "http://twitter.com/%s/statuses/%s" % (owner_user, t['id']) icon = t['profile_image_url'].replace("_normal","_bigger",1) tweet, created = Tweet.objects.get_or_create( title = str(curr_id) + " " + tweet_text[:50], description = tweet_text, tweet_id = curr_id, timestamp = utils.parsedate(t['created_at']), source_type = "tweet", owner_user = owner_user, url = url, icon = icon, ) tweet.source = smart_unicode(t['source']) else: log.warning("No more tweets, stopping...") break
def exe_method(self, method, **kwargs): kwargs['method'] = '%s.%s' % (self.method,method) kwargs['api_key'] = self.api_key kwargs['format'] = self.format kwargs['nojsoncallback']= self.nojsoncallback url = "http://api.flickr.com/services/rest/?" for k,v in kwargs.iteritems(): kwargs[k] = v res = utils.get_remote_data(url + urllib.urlencode(kwargs), rformat='json') if res.get("stat", "") == "fail": log.error("flickr retrieve failed.") log.error("%s" % res.get("stat")) return False return res
def exe_method(self, method, **kwargs): kwargs['method'] = '%s.%s' % (self.method, method) kwargs['api_key'] = self.api_key kwargs['format'] = self.format kwargs['nojsoncallback'] = self.nojsoncallback url = "http://api.flickr.com/services/rest/?" for k, v in kwargs.iteritems(): kwargs[k] = v res = utils.get_remote_data(url + urllib.urlencode(kwargs), rformat='json') if res.get("stat", "") == "fail": log.error("flickr retrieve failed.") log.error("%s" % res.get("stat")) return False return res
rformat = 'json' last_update = datetime.datetime.fromtimestamp(0) if force: if password: url = "https://api.del.icio.us/v1/posts/all" rformat = "rss" log.info("Forcing update of all bookmarks available.") else: try: last_update = Bookmark.objects.filter(owner_user="******"+tag).order_by('-timestamp')[0].timestamp except Exception, e: log.debug('%s', e) if force and password: marks = utils.get_remote_data(url, rformat=rformat, username=username, password=password) else: marks = utils.get_remote_data(url, rformat=rformat) if marks: for mark in marks: if password and force: _handle_rss_bookmark(mark, tag) continue dt = utils.parsedate(mark['dt']) if dt > last_update: _handle_bookmark(mark, dt, tag) else: log.warning("No more bookmarks, stopping...") break
def retrieve(force, **args): """ this is how we will create new items/tweets """ username, password = args['account'], None if isinstance(username, tuple): username, password = username search = True user = False force = False search_term = "swarthmore" if search: print "[community.sources.twitter | INFO]: Working with Search" url = "http://search.twitter.com/search.json?q=%s" % search_term last_id = 0 if force: log.info("Forcing update of all tweets available.") else: try: last_id = Tweet.objects.order_by('-tweet_id')[0].tweet_id except Exception, e: log.debug('%s', e) log.debug("Last id processed: %s", last_id) tweets = utils.get_remote_data(url, rformat="json") if not tweets: log.warning('no tweets returned, twitter possibly overloaded.') return tweets = tweets['results'] for t in tweets: curr_id = t['id'] if curr_id > last_id: log.info("Working with %s.", t['id']) tweet_text = t['text'] tweet_text = re.sub( r'@((?:\w|\.(?=\w))+)', r'<a href="http://www.twitter.com/\1/">\1</a>', tweet_text) tweet_text = tweet_text.replace("@", "@") owner_user = smart_unicode(t['from_user']) url = "http://twitter.com/%s/statuses/%s" % (owner_user, t['id']) icon = t['profile_image_url'].replace("_normal", "_bigger", 1) tweet, created = Tweet.objects.get_or_create( title=str(curr_id) + " " + tweet_text[:50], description=tweet_text, tweet_id=curr_id, timestamp=utils.parsedate(t['created_at']), source_type="tweet", owner_user=owner_user, url=url, icon=icon, ) tweet.source = smart_unicode(t['source']) else: log.warning("No more tweets, stopping...") break
print "[community.sources.twitter | INFO]: Working with Users" url = "http://twitter.com/statuses/user_timeline/%s.json" % username last_id = 0 if force: log.info("Forcing update of all tweets available.") else: try: last_id = Tweet.objects.filter( owner_user=username).order_by('-tweet_id')[0].tweet_id except Exception, e: log.debug('%s', e) log.debug("Last id processed: %s", last_id) if not password: tweets = utils.get_remote_data(url, rformat="json", username=username) else: tweets = utils.get_remote_data(url, rformat="json", username=username, password=password) if not tweets: log.warning('no tweets returned, twitter possibly overloaded.') return for t in tweets: if t['id'] > last_id: log.info("Working with %s.", t['id']) tweet_text = t['text']
print "[community.sources.twitter | INFO]: Working with Users" url = "http://twitter.com/statuses/user_timeline/%s.json" % username last_id = 0 if force: log.info("Forcing update of all tweets available.") else: try: last_id = Tweet.objects.filter(owner_user=username).order_by('-tweet_id')[0].tweet_id except Exception, e: log.debug('%s', e) log.debug("Last id processed: %s", last_id) if not password: tweets = utils.get_remote_data(url, rformat="json", username=username) else: tweets = utils.get_remote_data(url, rformat="json", username=username, password=password) if not tweets: log.warning('no tweets returned, twitter possibly overloaded.') return for t in tweets: if t['id'] > last_id: log.info("Working with %s.", t['id']) tweet_text = t['text'] tweet_text = re.sub(r'@((?:\w|\.(?=\w))+)',r'<a href="http://www.twitter.com/\1/">\1</a>',tweet_text) tweet_text = tweet_text.replace("@","@") owner_user = smart_unicode(t['user']['screen_name'])
last_update = datetime.datetime.fromtimestamp(0) if force: if password: url = "https://api.del.icio.us/v1/posts/all" rformat = "rss" log.info("Forcing update of all bookmarks available.") else: try: last_update = Bookmark.objects.filter( owner_user="******" + tag).order_by('-timestamp')[0].timestamp except Exception, e: log.debug('%s', e) if force and password: marks = utils.get_remote_data(url, rformat=rformat, username=username, password=password) else: marks = utils.get_remote_data(url, rformat=rformat) if marks: for mark in marks: if password and force: _handle_rss_bookmark(mark, tag) continue dt = utils.parsedate(mark['dt']) if dt > last_update: _handle_bookmark(mark, dt, tag) else: log.warning("No more bookmarks, stopping...") break