def get(self, user_id, page_id): ratelimit_key = self.ratelimit_key_template % self.request.remote_ip remote_ip_rate = Cache.incr(ratelimit_key) if remote_ip_rate is None: Cache.set(ratelimit_key, 1, time=60) elif remote_ip_rate > 60: self.set_status(503) self.set_header('Retry-After', '60') self.write( 'Rate limit exceeded. Please do not make more than 60 requests per minute.' ) # Don't log every single time we rate limit a host (that would get spammy fast), # but do log significant breakpoints on exactly how spammy a host is being. if remote_ip_rate in (61, 100, 1000, 10000): logging.info('Rate limited IP %s - %s requests/min' % (self.request.remote_ip, remote_ip_rate)) return self.finish() self.gplus_user_id = user_id self.gplus_page_id = page_id if len(user_id) != 21: self.write( "Google+ profile IDs are exactly 21 digits long. Please specify a proper profile ID." ) return self.finish() if page_id and len(page_id) != 21: self.write( "Google+ page IDs are exactly 21 digits long. Please specify a proper page ID." ) self.cache_key = self.cache_key_template % user_id if page_id: self.cache_key += str(page_id) cached_result = Cache.get(self.cache_key) flush_requested = self.request.arguments.get('flush', [None])[0] if cached_result: if not Config.getboolean('cache', 'allow-flush') or not flush_requested: return self._respond(**cached_result) if page_id: OAuth2Handler.authed_fetch( user_id, self.json_url % (page_id, self.request.remote_ip), self._on_api_response) else: OAuth2Handler.authed_fetch( user_id, self.json_url % ('me', self.request.remote_ip), self._on_api_response)
def get(self, user_id, page_id): ratelimit_key = self.ratelimit_key_template % self.request.remote_ip remote_ip_rate = Cache.incr(ratelimit_key) if remote_ip_rate is None: Cache.set(ratelimit_key, 1, time=60) elif remote_ip_rate > 60: self.set_status(503) self.set_header('Retry-After', '60') self.write('Rate limit exceeded. Please do not make more than 60 requests per minute.') # Don't log every single time we rate limit a host (that would get spammy fast), # but do log significant breakpoints on exactly how spammy a host is being. if remote_ip_rate in (61, 100, 1000, 10000): logging.info('Rate limited IP %s - %s requests/min' % (self.request.remote_ip, remote_ip_rate)) return self.finish() self.gplus_user_id = user_id self.gplus_page_id = page_id if len(user_id) != 21: self.write("Google+ profile IDs are exactly 21 digits long. Please specify a proper profile ID.") return self.finish() if page_id and len(page_id) != 21: self.write("Google+ page IDs are exactly 21 digits long. Please specify a proper page ID.") self.cache_key = self.cache_key_template % user_id if page_id: self.cache_key += str(page_id) cached_result = Cache.get(self.cache_key) flush_requested = self.request.arguments.get('flush', [None])[0] if cached_result: if not Config.getboolean('cache', 'allow-flush') or not flush_requested: return self._respond(**cached_result) if page_id: OAuth2Handler.authed_fetch(user_id, self.json_url % (page_id, self.request.remote_ip), self._on_api_response) else: OAuth2Handler.authed_fetch(user_id, self.json_url % ('me', self.request.remote_ip), self._on_api_response)
import hashlib import json import logging from util.config import Config if Config.getboolean('cache', 'memcache'): try: import memcache _hush_pyflakes = (memcache,) del _hush_pyflakes except ImportError: logging.error("Config file has memcache enabled, but couldn't import memcache! Not caching data.") memcache = None else: memcache = None class Cache(object): """Wrapper around a singleton memcache client. Note: If the 'memcache' library is not available, this wrapper will do nothing - call() will transparently always call the provided function, and everything else will simply return None. """ client = memcache and memcache.Client([Config.get('cache', 'memcache-uri')], debug=0) @classmethod def call(cls, func, *args, **kwargs): if not cls.client: