def test_remove_expired_expire_by_default(self): requests_cache.install_cache(backend='memory', session_factory=PerURLCacheSession, expire_after=1) response = requests.get(self.url) assert not response.from_cache response = requests.get(self.url) assert response.from_cache second_url = 'https://httpbin.org/anything' register_url(second_url, 10) response = requests.get(second_url) assert not response.from_cache response = requests.get(second_url) assert response.from_cache assert len(requests.Session().cache.responses) == 2 time.sleep(1) requests_cache.core.remove_expired_responses() assert len(requests.Session().cache.responses) == 1
def getfile(project_name, file_path): fmt = "%a, %d %b %Y %H:%M:%S %Z" api_base = 'https://gitlab.com/api/v4' pid = quote_plus(project_name) fid = quote_plus(file_path) url = f'{api_base}/projects/{pid}/repository/files/{fid}?ref=master' resp = requests.get(url) data = resp.json() cached_last_commit_id = data['last_commit_id'] print(','.join( [project_name.split('/')[-1], file_path, cached_last_commit_id[:8]])) if resp.from_cache: old_date = datetime.datetime.strptime(resp.headers.get('Date'), fmt) resp = requests.head(url) head = resp.headers server_last_commit_id = head.get('x-gitlab-last-commit-id') new_date = datetime.datetime.strptime(head.get('Date'), fmt) delta = new_date - old_date if cached_last_commit_id != server_last_commit_id: print(f"Response was cached on {old_date}") print( f"Last Commit ID differs between cache ({cached_last_commit_id[:8]}) and server ({server_last_commit_id[:8]})." ) register_url(url, 0) # Mark as "expired". requests_cache.remove_expired_responses() resp = requests.get(url, expire_after='default') data = resp.json() return data
def test_remove_expired(self): response = requests.get(self.url) assert not response.from_cache response = requests.get(self.url) assert response.from_cache second_url = 'https://httpbin.org/anything' register_url(second_url, 2) response = requests.get(second_url) assert not response.from_cache response = requests.get(second_url) assert response.from_cache third_url = 'https://httpbin.org/' response = requests.get(third_url, expire_after=10) assert not response.from_cache response = requests.get(third_url) assert response.from_cache assert len(requests.Session().cache.responses) == 3 time.sleep(2) # TODO: This should be without `core`. Investigate! # requests_cache.remove_expired_responses() requests_cache.core.remove_expired_responses() assert len(requests.Session().cache.responses) == 2
def test_auto_clear_expired(self): requests_cache.install_cache(backend='memory', session_factory=PerURLCacheSession, expire_after=1) second_url = 'https://httpbin.org/anything' register_url(self.url, 5) response = requests.get(self.url) assert not response.from_cache response = requests.get(self.url) assert response.from_cache response = requests.get(second_url) assert not response.from_cache time.sleep(2) response = requests.get(self.url) assert response.from_cache response = requests.get(second_url, expire_after=10) assert not response.from_cache response = requests.get(second_url) assert response.from_cache
def test_register_url_positive(self): register_url(self.url, 5) response = requests.get(self.url) assert not response.from_cache response = requests.get(self.url) assert response.from_cache
def test_register_url_change(self): register_url(self.url, -1) response = requests.get(self.url) assert not response.from_cache # This should delete the cached entry on a new request register_url(self.url, 2) response = requests.get(self.url) assert not response.from_cache time.sleep(2) response = requests.get(self.url) assert not response.from_cache
session_factory=PerURLCacheSession) def p(response, should_be=False): print('Request to', response.url, 'was', 'cached;' if response.from_cache else 'not cached;', 'should be', 'cached' if should_be else 'not cached') # Normal caching forever p(requests.get('https://httpbin.org/get')) p(requests.get('https://httpbin.org/get'), True) # Disable caching for /anything p(requests.get('https://httpbin.org/anything', expire_after=-1)) p(requests.get('https://httpbin.org/anything')) p(requests.get('https://httpbin.org/anything')) # It still works for /get p(requests.get('https://httpbin.org/get'), True) # Register get for an expiration of 1 second register_url('https://httpbin.org/get', 1) # Registration causes a reset, thus get is queried again... p(requests.get('https://httpbin.org/get')) # ... but cached for 1 second p(requests.get('https://httpbin.org/get'), True) # After > 1 second ... time.sleep(1) p(requests.get('https://httpbin.org/get'))