def __setitem__(self, key, value): if key.find('/') >= 0: # Don't flood the cache with irrelevant "reply_to/..." and # "search/..." streams, we only need the main streams. return value = int(value) if value > self.get(key, 0): JsonCache.__setitem__(self, key, value)
def __setitem__(self, key, value): if key.find('/') >= 0: # Don't flood the cache with irrelevant "reply_to/..." and # "search/..." streams, we only need the main streams. return # Thank SCIENCE for lexically-sortable timestamp strings! if value > self.get(key, ''): JsonCache.__setitem__(self, key, value)
class RateLimiter(BaseRateLimiter): """Twitter rate limiter.""" def __init__(self): self._limits = JsonCache('twitter-ratelimiter') def _sanitize_url(self, uri): # Cache the URL sans any query parameters. return uri.host + uri.path def wait(self, message): # If we haven't seen this URL, default to no wait. seconds = self._limits.pop(self._sanitize_url(message.get_uri()), 0) log.debug('Sleeping for {} seconds!'.format(seconds)) time.sleep(seconds) # Don't sleep the same length of time more than once! self._limits.write() def update(self, message): info = message.response_headers url = self._sanitize_url(message.get_uri()) # This is time in the future, in UTC epoch seconds, at which the # current rate limiting window expires. rate_reset = info.get('X-Rate-Limit-Reset') # This is the number of calls still allowed in this window. rate_count = info.get('X-Rate-Limit-Remaining') if None not in (rate_reset, rate_count): rate_reset = int(rate_reset) rate_count = int(rate_count) rate_delta = abs(rate_reset - time.time()) if rate_count > 5: # If there are more than 5 calls allowed in this window, then # do no rate limiting. pass elif rate_count < 1: # There are no calls remaining, so wait until the close of the # current window. self._limits[url] = rate_delta else: wait_secs = rate_delta / rate_count self._limits[url] = wait_secs log.debug('Next access to {} must wait {} seconds!'.format( url, self._limits.get(url, 0)))
def test_writes(self): cache = JsonCache('stuff') cache.update(dict(pi=289/92)) with open(self._root.format('stuff'), 'r') as fd: empty = fd.read() self.assertEqual(empty, '{}') cache.write() with open(self._root.format('stuff'), 'r') as fd: result = fd.read() self.assertEqual(result, '{"pi": 3.141304347826087}')
def test_total_corruption(self): shutil.copyfile( resource_filename('friends.tests.data', 'facebook_ids_corrupt.json'), os.path.join(self._temp_cache, 'corrupt.json')) cache = JsonCache('corrupt') self.assertEqual(repr(cache), '{}')
def test_invalid_json(self): shutil.copyfile( resource_filename('friends.tests.data', 'facebook_ids_not.json'), os.path.join(self._temp_cache, 'invalid.json')) cache = JsonCache('invalid') self.assertEqual(repr(cache), '{}')
def test_values(self): cache = JsonCache('bar') cache['hello'] = 'world' with open(self._root.format('bar'), 'r') as fd: result = fd.read() self.assertEqual(result, '{"hello": "world"}')
def test_creation(self): cache = JsonCache('foo') with open(self._root.format('foo'), 'r') as fd: empty = fd.read() self.assertEqual(empty, '{}')
def __init__(self): self._limits = JsonCache('twitter-ratelimiter')