def get(self): date = datetime.datetime.now() - datetime.timedelta(days=1) search_cache_query = SearchCache().all() search_cache_query.filter('tweeted_at <', date) search_cache_query.order('tweeted_at') search_cache = search_cache_query.fetch(100) logging.info('Delete old cache. count: %d' % len(search_cache)) for cache in search_cache: cache.delete()
def get(self): search_cache_query = SearchCache().all() search_cache_query.order('-tweeted_at') search_cache = search_cache_query.fetch(200) cache_ids = [] for tweet in search_cache: cache_ids.append(tweet.key().id()) if memcache.get('cache_ids') is not None: memcache.Client().replace('cache_ids', cache_ids, 300) else: memcache.Client().add('cache_ids', cache_ids, 300) logging.info('Updated cache_ids.')
def get(self): import random from django.utils import simplejson cache_ids = memcache.get('cache_ids') if cache_ids is None: search_cache_query = SearchCache().all() search_cache_query.order('-tweeted_at') search_cache = search_cache_query.fetch(100) cache_ids = [] for tweet in search_cache: cache_ids.append(tweet.key().id()) memcache.Client().add('cache_ids', cache_ids, 300) logging.info('cache_ids from datastore.') else: logging.info('cache_ids from memcache.') if len(cache_ids) > 0: cache_id = cache_ids[random.randint(0, len(cache_ids))] logging.info('Random cache_id: %d' % cache_id) data = memcache.get('cache_%d' % cache_id) if data is None: logging.info('Tweet cache from datastore.') search_cache = SearchCache().get_by_id(int(cache_id)) if search_cache is not None: delta = datetime.datetime.now() - search_cache.tweeted_at if delta.seconds < 120: delta_str = int(delta.seconds) else: delta_str = '-' created_at = search_cache.tweeted_at + datetime.timedelta( hours=9) if search_cache.time_zone: time_zone = search_cache.time_zone else: time_zone = '' if len(search_cache.text) < 60: data = { 'id': search_cache.key().id(), 'name': search_cache.name, 'screen_name': search_cache.screen_name, 'text': search_cache.text, 'profile_image_url': search_cache.profile_image_url, 'delta': delta_str, 'created_at': created_at.strftime('%Y-%m-%d %H:%M:%S (JST)'), 'time_zone': time_zone } memcache.Client().add('cache_%d' % cache_id, data, 3600) json = simplejson.dumps(data, ensure_ascii=False) self.response.content_type = 'application/json' self.response.out.write(json) else: logging.info('Tweet cache from memcache.') json = simplejson.dumps(data, ensure_ascii=False) self.response.content_type = 'application/json' self.response.out.write(json)
def get(self): tweet_id = self.request.get('tweet_id') keyword_id = self.request.get('keyword_id') search_keywords = SearchKeywords.get_by_id(int(keyword_id)) if search_keywords is not None: logging.info('Feed: %s' % search_keywords.keyword) user_prefs = UserPrefs().get_by_id( search_keywords.user_id.key().id()) if user_prefs is not None: logging.info('Keyword owner name: %s' % user_prefs.google_id.nickname()) if user_prefs.oauth_access_token_key is not None: oauth_access_token = OAuthAccessToken.get_by_key_name( user_prefs.oauth_access_token_key.key().name()) if oauth_access_token is not None: logging.info( 'Twitter Account: %s' % user_prefs.oauth_access_token_key.specifier) try: search_cache_query = SearchCache().all() search_cache_query.filter('tweet_id =', int(tweet_id)) search_cache_query.filter('keyword_key =', search_keywords.key()) if search_cache_query.get() is None: client = OAuthClient('twitter', self) client.token = oauth_access_token tweet = client.get('/statuses/show/%d' % int(tweet_id)) logging.info( 'Tweet: (%s) %s' % (tweet['user']['name'], tweet['text'])) logging.info(tweet['created_at']) search_cache = SearchCache() search_cache.tweet_id = int(tweet_id) search_cache.keyword_key = search_keywords.key( ) search_cache.name = tweet['user']['name'] search_cache.screen_name = tweet['user'][ 'screen_name'] search_cache.profile_image_url = tweet['user'][ 'profile_image_url'] search_cache.text = tweet['text'] search_cache.location = tweet['user'][ 'location'] search_cache.time_zone = tweet['user'][ 'time_zone'] search_cache.tweeted_at = datetime.datetime.strptime( tweet['created_at'], "%a %b %d %H:%M:%S +0000 %Y") search_cache.put() else: logging.info('Skip. tweet_id: %d' % int(tweet_id)) except Exception, error: logging.error('Cache Failed: %s' % error)