def test_expire_cache(self): delay = 1 url = httpbin('delay/%s' % delay) s = CachedSession(CACHE_NAME, backend=CACHE_BACKEND, expire_after=0.06) t = time.time() r = s.get(url) delta = time.time() - t self.assertGreaterEqual(delta, delay) time.sleep(0.5) t = time.time() r = s.get(url) delta = time.time() - t self.assertGreaterEqual(delta, delay) s.close()
def query_piston(ctx: click.Context, console: Console, payload: PistonQuery, cache_run: bool = False) -> dict: """Send a post request to the piston API with the code parameter.""" http_session = CachedSession( # To avoid caching conflicts f"piston-v{__version__}", backend=FileCache(REQUEST_CACHE_LOCATION), expire_after=REQUEST_CACHE_DURATION, ) output_json = { "language": payload.language, "source": payload.code, "args": payload.args, "stdin": payload.stdin, } location = None with console.status("Compiling", spinner=random.choice(SPINNERS)): logging.debug(f"Requests emkc v1 API with payload: {output_json}") try: return http_session.post( url="https://emkc.org/api/v1/piston/execute", data=json.dumps(output_json), timeout=3, ).json() except requests.exceptions.Timeout: if not cache_run: location = cache_query(output_json) console.print( "Connection timed out. Please check your connection and try again." f"Cached query saved at {location}" if location else "") ctx.exit() except requests.exceptions.RequestException as e: if not cache_run: location = cache_query(output_json) console.print( f"Request raised exception: {e}\n" f"Cached query saved at {location}" if location else "") ctx.exit() http_session.close()
if "playlist.m3u8" in media_url: if addon.getSetting("inputstream") == "true": li.setMimeType("application/vnd.apple.mpegurl") li.setProperty("inputstreamaddon", "inputstream.adaptive") li.setProperty("inputstream.adaptive.manifest_type", "hls") li.setProperty("inputstream.adaptive.stream_headers", media_url.split("|")[-1]) else: li.setMimeType("application/vnd.apple.mpegurl") else: li.setMimeType("video/x-mpegts") li.setArt({"thumb": image, "icon": image}) li.setContentLookup(False) xbmcplugin.setResolvedUrl(plugin.handle, True, li) if __name__ == "__main__": try: plugin.run(sys.argv) s.cookies.save(ignore_discard=True, ignore_expires=True) s.close() except requests.exceptions.RequestException as e: dialog = xbmcgui.Dialog() dialog.notification(plugin.name, str(e), xbmcgui.NOTIFICATION_ERROR) traceback.print_exc() xbmcplugin.endOfDirectory(plugin.handle, False) except (ImportError, UnicodeDecodeError): """ Invalidate cache when requests version changes """ s.cache.clear()
class BouyguesTV: _USER_AGENT = ("Mozilla/5.0 (X11; Fedora; Linux x86_64; rv:84.0) " "Gecko/20100101 Firefox/84.0") _LOGIN_URL = "https://www.mon-compte.bouyguestelecom.fr/cas/login" _OAUTH2_URL = "https://oauth2.bouyguestelecom.fr/authorize" _BASE_URL = "https://www.bouyguestelecom.fr/tv-direct" _CHANNEL_LIST_URL = "{}/data/list-chaines.json".format(_BASE_URL) _STREAM_API_URL = ( "https://8wwwu6s5l4.execute-api.eu-west-1.amazonaws.com/Prod/get-url") _EPG_BASE_URL = "{}/data/epg".format(_BASE_URL) _EPG_TIMEZONE = dateutil.tz.gettz("Europe/Paris") # pylint: disable=too-many-arguments def __init__( self, username, password, access_token=None, id_token=None, cache_name="bouyguestv", ): # type: (Text, Text, Optional[Text], Optional[Text], Text) -> None self._username = username self._password = password self._access_token = access_token self._id_token = id_token self._session = CachedSession(cache_name=cache_name, backend="sqlite", expire_after=21600) self._session.headers.update({"User-Agent": self._USER_AGENT}) self._session.hooks = {"response": [self._requests_raise_status]} self._channels = self._get_channels() def __enter__(self): return self def __exit__(self, *args): if self._session: self._session.close() @property def access_token(self): # type: () -> Optional[Text] return self._access_token @property def id_token(self): # type: () -> Optional[Text] return self._id_token @staticmethod def _requests_raise_status(response, *_args, **_kwargs): # type: (Response, Any, Any) -> None try: response.raise_for_status() except RequestException as ex: if response.status_code == 401: raise BouyguesTVLoginException(ex) raise ex def _parse_id_token(self): # type: () -> Tuple[int, Optional[Text]] jwt_payload = jwt.decode(self._id_token, verify=False) return jwt_payload.get("exp", 0), jwt_payload.get("id_personne") def _login(self): # type: () -> None response = self._session.get(self._LOGIN_URL) payload = dict( re.findall( r'<input type="hidden" name="(.*?)" value="(.*?)"/>', response.text, )) payload["username"] = self._username payload["password"] = self._password self._session.post(self._LOGIN_URL, data=payload) def _refresh_token(self): # type: () -> None if self._access_token and self._id_token: id_token_exp, _ = self._parse_id_token() now = time.mktime(datetime.now().timetuple()) if id_token_exp and id_token_exp > now: return self._login() payload = { "client_id": "a360.bouyguestelecom.fr", "response_type": "id_token token", "redirect_uri": "https://www.bouyguestelecom.fr/mon-compte/", } response = self._session.post(self._OAUTH2_URL, data=payload) fragment = dict(parse_qsl(urlparse(response.url).fragment)) self._access_token = fragment.get("access_token") or "" self._id_token = fragment.get("id_token") or "" if not self._access_token or not self._id_token: raise BouyguesTVException("Unable to retrieve token") def _get_channels(self): # type: () -> Dict[Text, Any] return OrderedDict((c.get("title"), c) for c in self._session.get( self._CHANNEL_LIST_URL).json().get("body", [])) def get_channels(self): # type: () -> List[Text] return list(self._channels.keys()) @classmethod def _epg_datetime(cls, epg_datetime): # type: (Optional[Text]) -> Optional[datetime] if not epg_datetime: return None return isoparse(epg_datetime # type: ignore ).replace(tzinfo=cls._EPG_TIMEZONE) def _get_channel_epg(self, epg_channel_number, day): # type: (int, date) -> List[Dict[Text, Any]] params = {"d": "{}{}{}".format(day.year, day.month - 1, day.day)} epg_url = "{}/{}.json".format(self._EPG_BASE_URL, epg_channel_number) return (self._session.get(epg_url, params=params).json().get("programs", [])) def get_channel_item(self, channel): # type: (Text) -> Optional[ParsedItem] now = ( datetime.utcnow().replace(tzinfo=dateutil.tz.UTC).astimezone( tz=self._EPG_TIMEZONE) # type: ignore ) if channel not in self._channels: raise BouyguesTVUnknownChannelException(channel) epg_channel_number = self._channels[channel].get("epgChannelNumber") if not epg_channel_number: return None info = {} # type: Dict[Text, Any] epg = self._get_channel_epg(epg_channel_number, now.date()) # Get current program for program in epg: start_time = self._epg_datetime(program.get("fullStartTime")) end_time = self._epg_datetime(program.get("fullEndTime")) if start_time and end_time and start_time <= now < end_time: info["duration"] = int((end_time - start_time).total_seconds()) break else: program = {} info["genre"] = program.get("genre") info["year"] = program.get("productionDate") info["episode"] = program.get("episodeNumber") info["season"] = program.get("seasonNumber") if program.get("pressRank"): info["rating"] = 2 * float(program["pressRank"]) # Don't mark live streams as read once played info["playcount"] = 0 info["cast"] = [( "{} {}".format(c.get("firstName", ""), c.get("lastName", "")).strip(), c.get("role"), ) for c in program.get("characters", [])] info["director"] = program.get("realisateur") info["plot"] = program.get("summary") info["title"] = program.get("title") or program.get("longtitle") if info["episode"] or info["season"]: info["mediatype"] = "episode" else: info["mediatype"] = "movie" url_media = program.get("urlMedia") if url_media and not url_media.startswith("http"): url_media = "{}/{}".format(self._BASE_URL, url_media) art = { "fanart": url_media, "icon": self._channels[channel].get("logoUrl"), "landscape": url_media, } # type: Art url = {"mode": "watch", "channel": channel} # type: Url label = "[B]{}[/B]".format(channel) if info.get("title"): label += " – {}".format(info["title"]) return ParsedItem(label, url, info, art) def get_channel_stream_url(self, channel): # type: (Text) -> Text if channel not in self._channels: raise BouyguesTVUnknownChannelException(channel) channel_url = self._channels.get(channel, {}).get("StreamURL") self._refresh_token() _, id_personne = self._parse_id_token() payload = { "id_personne": id_personne, "channel_url": channel_url, } headers = { "authorization": "Bearer {}".format(self._access_token), "origin": "https://www.bouyguestelecom.fr", } response = self._session.post( self._STREAM_API_URL, json=payload, headers=headers, ) return response.json().get("urlFlux")
class CacheTestCase(unittest.TestCase): def setUp(self): self.s = CachedSession(CACHE_NAME, backend=CACHE_BACKEND, fast_save=FAST_SAVE) self.s.cache.clear() requests_cache.uninstall_cache() @classmethod def tearDownClass(cls): super(CacheTestCase, cls).tearDownClass() filename = "{0}.{1}".format(CACHE_NAME, CACHE_BACKEND) if os.path.exists(filename): try: os.unlink(filename) except OSError: pass def tearDown(self): self.s.close() def test_expire_cache(self): delay = 1 url = httpbin('delay/%s' % delay) s = CachedSession(CACHE_NAME, backend=CACHE_BACKEND, expire_after=0.06) t = time.time() r = s.get(url) delta = time.time() - t self.assertGreaterEqual(delta, delay) time.sleep(0.5) t = time.time() r = s.get(url) delta = time.time() - t self.assertGreaterEqual(delta, delay) s.close() def test_delete_urls(self): url = httpbin('get') r = self.s.get(url) assert self.s.cache.has_url(url) self.s.cache.delete_url(url) assert not self.s.cache.has_url(url) def test_unregistered_backend(self): with self.assertRaises(ValueError): CachedSession(CACHE_NAME, backend='nonexistent') @mock.patch('requests_cache.backends.registry') def test_missing_backend_dependency(self, mocked_registry): # Testing that the correct error is thrown when a user does not have # the Python package `redis` installed. We mock out the registry # to simulate `redis` not being installed. mocked_registry.__getitem__.side_effect = KeyError with self.assertRaises(ImportError): CachedSession(CACHE_NAME, backend='redis') def test_hooks(self): state = defaultdict(int) for hook in ('response', ): # TODO it's only one hook here def hook_func(r, *args, **kwargs): state[hook] += 1 return r n = 5 for i in range(n): r = self.s.get(httpbin('get'), hooks={hook: hook_func}) self.assertEqual(state[hook], n) def test_attr_from_cache_in_hook(self): state = defaultdict(int) hook = 'response' def hook_func(r, *args, **kwargs): if state[hook] > 0: self.assertTrue(r.from_cache) state[hook] += 1 return r n = 5 for i in range(n): r = self.s.get(httpbin('get'), hooks={hook: hook_func}) self.assertEqual(state[hook], n) def test_post(self): url = httpbin('post') r1 = json.loads(self.s.post(url, data={'test1': 'test1'}).text) r2 = json.loads(self.s.post(url, data={'test2': 'test2'}).text) self.assertIn('test2', r2['form']) req = Request('POST', url).prepare() self.assertFalse(self.s.cache.has_key(self.s.cache.create_key(req))) def test_disabled(self): url = httpbin('get') requests_cache.install_cache(CACHE_NAME, backend=CACHE_BACKEND, fast_save=FAST_SAVE) requests.get(url) with requests_cache.disabled(): for i in range(2): r = requests.get(url) self.assertFalse(getattr(r, 'from_cache', False)) with self.s.cache_disabled(): for i in range(2): r = self.s.get(url) self.assertFalse(getattr(r, 'from_cache', False)) r = self.s.get(url) self.assertTrue(getattr(r, 'from_cache', False)) def test_enabled(self): url = httpbin('get') options = dict(cache_name=CACHE_NAME, backend=CACHE_BACKEND, fast_save=FAST_SAVE) with requests_cache.enabled(**options): r = requests.get(url) self.assertFalse(getattr(r, 'from_cache', False)) for i in range(2): r = requests.get(url) self.assertTrue(getattr(r, 'from_cache', False)) r = requests.get(url) self.assertFalse(getattr(r, 'from_cache', False)) def test_content_and_cookies(self): requests_cache.install_cache(CACHE_NAME, CACHE_BACKEND) s = requests.session() def js(url): return json.loads(s.get(url).text) r1 = js(httpbin('cookies/set/test1/test2')) with requests_cache.disabled(): r2 = js(httpbin('cookies')) self.assertEqual(r1, r2) r3 = js(httpbin('cookies')) with requests_cache.disabled(): r4 = js(httpbin('cookies/set/test3/test4')) # from cache self.assertEqual(r3, js(httpbin('cookies'))) # updated with requests_cache.disabled(): self.assertEqual(r4, js(httpbin('cookies'))) s.close() # TODO: Create mock responses instead of depending on httpbin @pytest.mark.skip( reason='httpbin.org/relative-redirect no longer returns redirects') def test_response_history(self): r1 = self.s.get(httpbin('relative-redirect/3')) def test_redirect_history(url): r2 = self.s.get(url) self.assertTrue(r2.from_cache) for r11, r22 in zip(r1.history, r2.history): self.assertEqual(r11.url, r22.url) test_redirect_history(httpbin('relative-redirect/3')) test_redirect_history(httpbin('relative-redirect/2')) r3 = requests.get(httpbin('relative-redirect/1')) self.assertEqual(len(r3.history), 1) # TODO: Create mock responses instead of depending on httpbin @pytest.mark.skip( reason='httpbin.org/relative-redirect no longer returns redirects') def test_response_history_simple(self): r1 = self.s.get(httpbin('relative-redirect/2')) r2 = self.s.get(httpbin('relative-redirect/1')) self.assertTrue(r2.from_cache) def post(self, data): return json.loads(self.s.post(httpbin('post'), data=data).text) def test_post_params(self): # issue #2 self.s = CachedSession(CACHE_NAME, CACHE_BACKEND, allowable_methods=('GET', 'POST')) d = {'param1': 'test1'} for _ in range(2): self.assertEqual(self.post(d)['form'], d) d = {'param1': 'test1', 'param3': 'test3'} self.assertEqual(self.post(d)['form'], d) self.assertTrue(self.s.post(httpbin('post'), data=d).from_cache) d.update({'something': 'else'}) self.assertFalse(self.s.post(httpbin('post'), data=d).from_cache) def test_post_data(self): # issue #2, raw payload self.s = CachedSession(CACHE_NAME, CACHE_BACKEND, allowable_methods=('GET', 'POST')) d1 = json.dumps({'param1': 'test1'}) d2 = json.dumps({'param1': 'test1', 'param2': 'test2'}) d3 = str('some unicode data') bin_data = bytes('some binary data', 'utf8') for d in (d1, d2, d3): self.assertEqual(self.post(d)['data'], d) r = self.s.post(httpbin('post'), data=d) self.assertTrue(hasattr(r, 'from_cache')) self.assertEqual(self.post(bin_data)['data'], bin_data.decode('utf8')) r = self.s.post(httpbin('post'), data=bin_data) self.assertTrue(hasattr(r, 'from_cache')) def test_get_params_as_argument(self): for _ in range(5): p = {'arg1': 'value1'} r = self.s.get(httpbin('get'), params=p) self.assertTrue(self.s.cache.has_url(httpbin('get?arg1=value1'))) @unittest.skipIf(sys.version_info < (2, 7), "No https in 2.6") def test_https_support(self): n = 10 delay = 1 url = 'https://httpbin.org/delay/%s?ar1=value1' % delay t = time.time() for _ in range(n): r = self.s.get(url, verify=False) self.assertLessEqual(time.time() - t, delay * n / 2) def test_from_cache_attribute(self): url = httpbin('get?q=1') self.assertFalse(self.s.get(url).from_cache) self.assertTrue(self.s.get(url).from_cache) self.s.cache.clear() self.assertFalse(self.s.get(url).from_cache) def test_gzip_response(self): url = httpbin('gzip') self.assertFalse(self.s.get(url).from_cache) self.assertTrue(self.s.get(url).from_cache) def test_close_response(self): for _ in range(3): r = self.s.get(httpbin("get")) r.close() def test_get_parameters_normalization(self): url = httpbin("get") params = {"a": "a", "b": ["1", "2", "3"], "c": "4"} self.assertFalse(self.s.get(url, params=params).from_cache) r = self.s.get(url, params=params) self.assertTrue(r.from_cache) self.assertEqual(r.json()["args"], params) self.assertFalse(self.s.get(url, params={"a": "b"}).from_cache) self.assertTrue( self.s.get(url, params=sorted(params.items())).from_cache) class UserSubclass(dict): def items(self): return sorted(super(UserSubclass, self).items(), reverse=True) params["z"] = "5" custom_dict = UserSubclass(params) self.assertFalse(self.s.get(url, params=custom_dict).from_cache) self.assertTrue(self.s.get(url, params=custom_dict).from_cache) def test_post_parameters_normalization(self): params = {"a": "a", "b": ["1", "2", "3"], "c": "4"} url = httpbin("post") s = CachedSession(CACHE_NAME, CACHE_BACKEND, allowable_methods=('GET', 'POST')) self.assertFalse(s.post(url, data=params).from_cache) self.assertTrue(s.post(url, data=params).from_cache) self.assertTrue(s.post(url, data=sorted(params.items())).from_cache) self.assertFalse( s.post(url, data=sorted(params.items(), reverse=True)).from_cache) def test_stream_requests_support(self): n = 100 url = httpbin("stream/%s" % n) r = self.s.get(url, stream=True) first_char = r.raw.read(1) lines = list(r.iter_lines()) self.assertTrue(first_char) self.assertEqual(len(lines), n) for i in range(2): r = self.s.get(url, stream=True) first_char_cached = r.raw.read(1) self.assertTrue(r.from_cache) cached_lines = list(r.iter_lines()) self.assertEqual(cached_lines, lines) self.assertEqual(first_char, first_char_cached) def test_headers_in_get_query(self): url = httpbin("get") s = CachedSession(CACHE_NAME, CACHE_BACKEND, include_get_headers=True) headers = {"Accept": "text/json"} self.assertFalse(s.get(url, headers=headers).from_cache) self.assertTrue(s.get(url, headers=headers).from_cache) headers["Accept"] = "text/xml" self.assertFalse(s.get(url, headers=headers).from_cache) self.assertTrue(s.get(url, headers=headers).from_cache) headers["X-custom-header"] = "custom" self.assertFalse(s.get(url, headers=headers).from_cache) self.assertTrue(s.get(url, headers=headers).from_cache) self.assertFalse(s.get(url).from_cache) self.assertTrue(s.get(url).from_cache) def test_str_and_repr(self): s = repr(CachedSession(CACHE_NAME, CACHE_BACKEND, expire_after=10)) self.assertIn(CACHE_NAME, s) self.assertIn("10", s) @mock.patch("requests_cache.core.datetime") @mock.patch("requests_cache.backends.base.datetime") def test_return_old_data_on_error(self, datetime_mock_backend, datetime_mock): now = datetime(2021, 1, 1, 0, 0, 0, tzinfo=timezone.utc) datetime_mock_backend.now.return_value = now datetime_mock.now.return_value = now expire_after = 100 url = httpbin("get") s = CachedSession(CACHE_NAME, CACHE_BACKEND, old_data_on_error=True, expire_after=expire_after) header = "X-Tst" def get(n): return s.get(url, headers={header: n}).json()["headers"][header] get("expired") self.assertEqual(get("2"), "expired") datetime_mock.now.return_value = now + timedelta(seconds=expire_after * 2) with mock.patch.object(s.cache, "save_response", side_effect=Exception): self.assertEqual(get("3"), "expired") with mock.patch( "requests_cache.core.OriginalSession.send") as send_mock: resp_mock = requests.Response() request = requests.Request("GET", url) resp_mock.request = request.prepare() resp_mock.status_code = 400 resp_mock._content = '{"other": "content"}' send_mock.return_value = resp_mock self.assertEqual(get("4"), "expired") resp_mock.status_code = 200 self.assertIs(s.get(url).content, resp_mock.content) # default behaviour datetime_mock.now.return_value = now + timedelta(seconds=expire_after * 5) s = CachedSession(CACHE_NAME, CACHE_BACKEND, old_data_on_error=False, expire_after=expire_after) with mock.patch.object(s.cache, "save_response", side_effect=Exception): with self.assertRaises(Exception): s.get(url) def test_ignore_parameters_get(self): url = httpbin("get") ignored_param = "ignored" usual_param = "some" params = {ignored_param: "1", usual_param: "1"} s = CachedSession(CACHE_NAME, CACHE_BACKEND, ignored_parameters=[ignored_param]) r = s.get(url, params=params) self.assertIn(ignored_param, r.json()['args'].keys()) self.assertFalse(r.from_cache) self.assertTrue(s.get(url, params=params).from_cache) params[ignored_param] = "new" self.assertTrue(s.get(url, params=params).from_cache) params[usual_param] = "new" self.assertFalse(s.get(url, params=params).from_cache) def test_ignore_parameters_post(self): url = httpbin("post") ignored_param = "ignored" usual_param = "some" d = {ignored_param: "1", usual_param: "1"} s = CachedSession( CACHE_NAME, CACHE_BACKEND, allowable_methods=('POST'), ignored_parameters=[ignored_param], ) r = s.post(url, data=d) self.assertIn(ignored_param, r.json()['form'].keys()) self.assertFalse(r.from_cache) self.assertTrue(s.post(url, data=d).from_cache) d[ignored_param] = "new" self.assertTrue(s.post(url, data=d).from_cache) d[usual_param] = "new" self.assertFalse(s.post(url, data=d).from_cache) def test_ignore_parameters_post_json(self): url = httpbin("post") ignored_param = "ignored" usual_param = "some" d = {ignored_param: "1", usual_param: "1"} s = CachedSession( CACHE_NAME, CACHE_BACKEND, allowable_methods=('POST'), ignored_parameters=[ignored_param], ) r = s.post(url, json=d) self.assertIn(ignored_param, json.loads(r.json()['data']).keys()) self.assertFalse(r.from_cache) self.assertTrue(s.post(url, json=d).from_cache) d[ignored_param] = "new" self.assertTrue(s.post(url, json=d).from_cache) d[usual_param] = "new" self.assertFalse(s.post(url, json=d).from_cache) def test_ignore_parameters_post_raw(self): url = httpbin("post") ignored_param = "ignored" raw_data = "raw test data" s = CachedSession( CACHE_NAME, CACHE_BACKEND, allowable_methods=('POST'), ignored_parameters=[ignored_param], ) self.assertFalse(s.post(url, data=raw_data).from_cache) self.assertTrue(s.post(url, data=raw_data).from_cache) raw_data = "new raw data" self.assertFalse(s.post(url, data=raw_data).from_cache) # TODO: Create mock responses instead of depending on httpbin @pytest.mark.skip( reason='httpbin.org/relative-redirect no longer returns redirects') @mock.patch("requests_cache.backends.base.datetime") @mock.patch("requests_cache.core.datetime") def test_remove_expired_entries(self, datetime_mock, datetime_mock2): expire_after = timedelta(minutes=10) start_time = datetime.utcnow().replace(year=2010, minute=0) datetime_mock.utcnow.return_value = start_time datetime_mock2.utcnow.return_value = start_time s = CachedSession(CACHE_NAME, CACHE_BACKEND, expire_after=expire_after) s.get(httpbin('get')) s.get(httpbin('relative-redirect/3')) datetime_mock.utcnow.return_value = start_time + expire_after * 2 datetime_mock2.utcnow.return_value = datetime_mock.utcnow.return_value ok_url = 'get?x=1' s.get(httpbin(ok_url)) self.assertEqual(len(s.cache.responses), 3) self.assertEqual(len(s.cache.keys_map), 3) s.remove_expired_responses() self.assertEqual(len(s.cache.responses), 1) self.assertEqual(len(s.cache.keys_map), 0) self.assertIn(ok_url, list(s.cache.responses.values())[0][0].url) def test_cache_unpickle_errors(self): url = httpbin('get?q=1') self.assertFalse(self.s.get(url).from_cache) with mock.patch( "requests_cache.backends.base.BaseCache.restore_response", side_effect=TypeError): resp = self.s.get(url) self.assertFalse(resp.from_cache) self.assertEqual(resp.json()["args"]["q"], "1") resp = self.s.get(url) self.assertTrue(resp.from_cache) self.assertEqual(resp.json()["args"]["q"], "1") def test_cache_date(self): url = httpbin('get') response1 = self.s.get(url) response2 = self.s.get(url) response3 = self.s.get(url) self.assertEqual(response1.cache_date, None) self.assertTrue(isinstance(response2.cache_date, datetime)) self.assertEqual(response2.cache_date, response3.cache_date)