def session_obj(self): session = CachedSession(allowable_methods=('GET', 'POST'), ignored_parameters=['smpbss']) if not IceDaily.RECAPTCHA: with session.cache_disabled(): response = session.get( url= 'https://www.theice.com/marketdata/reports/datawarehouse/ConsolidatedEndOfDayReportPDF.shtml', headers={ 'User-Agent': 'Mozilla/5.0', 'X-Requested-With': 'XMLHttpRequest' }, params={ 'selectionForm': '', 'exchangeCode': 'IFEU', 'optionRequest': self.flavor['optionRequest'] }) bs = BeautifulSoup(response.text) df = pd.DataFrame([(opt['value'], opt.text) for opt in bs.find_all('option')], columns=["WebActiveCode", "ActiveName"]) df['ActiveCode'] = df.WebActiveCode.apply( lambda s: s.split('|', 1)[1] if '|' in s else None) df = df.dropna(how='any') self.update_actives(df) return session
def fetch_df(url): r = requests.get(url) js = r.json() if "features" not in js: s = CachedSession() with s.cache_disabled(): r = s.get(url) js = r.json() features = [feature["attributes"] for feature in js["features"]] df = pd.DataFrame(features) return df
def api_get(endpoint, query, cache=False): if not cache: s = CachedSession() with s.cache_disabled(): r = s.get( app.config['API_URL'] + endpoint + '?q=' + urllib.parse.quote_plus(json.dumps(query)), headers=gen_api_header(), verify=app.config['VERIFY_SSL']) else: r = requests.get( app.config['API_URL'] + endpoint + '?q=' + urllib.parse.quote_plus(json.dumps(query)), headers=gen_api_header(), verify=app.config['VERIFY_SSL']) if r.status_code == 200: # If created then it returns the object data return json.loads(r.text).get('objects') else: return {}
def main(): session = CachedSession('example_cache', backend='sqlite') # The real request will only be made once; afterward, the cached response is used for i in range(5): response = session.get('http://httpbin.org/get') # This is more obvious when calling a slow endpoint for i in range(5): response = session.get('http://httpbin.org/delay/2') # Caching can be disabled if we want to get a fresh page and not cache it with session.cache_disabled(): print(session.get('http://httpbin.org/ip').text) # Get some debugging info about the cache print(session.cache) print('Cached URLS:', session.cache.urls)
class CacheTestCase(unittest.TestCase): def setUp(self): self.s = CachedSession(CACHE_NAME, backend=CACHE_BACKEND, fast_save=FAST_SAVE) self.s.cache.clear() requests_cache.uninstall_cache() def test_expire_cache(self): delay = 1 url = httpbin('delay/%s' % delay) s = CachedSession(CACHE_NAME, backend=CACHE_BACKEND, expire_after=0.06) t = time.time() r = s.get(url) delta = time.time() - t self.assertGreaterEqual(delta, delay) time.sleep(0.5) t = time.time() r = s.get(url) delta = time.time() - t self.assertGreaterEqual(delta, delay) def test_delete_urls(self): url = httpbin('relative-redirect/3') r = self.s.get(url) for i in range(1, 4): self.assert_(self.s.cache.has_url(httpbin('relative-redirect/%s' % i))) self.s.cache.delete_url(url) self.assert_(not self.s.cache.has_url(url)) def test_unregistered_backend(self): with self.assertRaises(ValueError): CachedSession(CACHE_NAME, backend='nonexistent') def test_hooks(self): state = defaultdict(int) for hook in ('response',): # TODO it's only one hook here def hook_func(r, *args, **kwargs): state[hook] += 1 return r n = 5 for i in range(n): r = self.s.get(httpbin('get'), hooks={hook: hook_func}) self.assertEqual(state[hook], n) def test_attr_from_cache_in_hook(self): state = defaultdict(int) hook = 'response' def hook_func(r, *args, **kwargs): if state[hook] > 0: self.assert_(r.from_cache, True) state[hook] += 1 return r n = 5 for i in range(n): r = self.s.get(httpbin('get'), hooks={hook: hook_func}) self.assertEqual(state[hook], n) def test_post(self): url = httpbin('post') r1 = json.loads(self.s.post(url, data={'test1': 'test1'}).text) r2 = json.loads(self.s.post(url, data={'test2': 'test2'}).text) self.assertIn('test2', r2['form']) req = Request('POST', url).prepare() self.assert_(not self.s.cache.has_key(self.s.cache.create_key(req))) def test_disabled(self): url = httpbin('get') requests_cache.install_cache(CACHE_NAME, backend=CACHE_BACKEND, fast_save=FAST_SAVE) requests.get(url) with requests_cache.disabled(): for i in range(2): r = requests.get(url) self.assertFalse(getattr(r, 'from_cache', False)) with self.s.cache_disabled(): for i in range(2): r = self.s.get(url) self.assertFalse(getattr(r, 'from_cache', False)) r = self.s.get(url) self.assertTrue(getattr(r, 'from_cache', False)) def test_enabled(self): url = httpbin('get') options = dict(cache_name=CACHE_NAME, backend=CACHE_BACKEND, fast_save=FAST_SAVE) with requests_cache.enabled(**options): r = requests.get(url) self.assertFalse(getattr(r, 'from_cache', False)) for i in range(2): r = requests.get(url) self.assertTrue(getattr(r, 'from_cache', False)) r = requests.get(url) self.assertFalse(getattr(r, 'from_cache', False)) def test_content_and_cookies(self): requests_cache.install_cache(CACHE_NAME, CACHE_BACKEND) s = requests.session() def js(url): return json.loads(s.get(url).text) r1 = js(httpbin('cookies/set/test1/test2')) with requests_cache.disabled(): r2 = js(httpbin('cookies')) self.assertEqual(r1, r2) r3 = js(httpbin('cookies')) with requests_cache.disabled(): r4 = js(httpbin('cookies/set/test3/test4')) # from cache self.assertEqual(r3, js(httpbin('cookies'))) # updated with requests_cache.disabled(): self.assertEqual(r4, js(httpbin('cookies'))) def test_response_history(self): r1 = self.s.get(httpbin('relative-redirect/3')) def test_redirect_history(url): r2 = self.s.get(url) self.assertTrue(r2.from_cache) for r11, r22 in zip(r1.history, r2.history): self.assertEqual(r11.url, r22.url) test_redirect_history(httpbin('relative-redirect/3')) test_redirect_history(httpbin('relative-redirect/2')) r3 = requests.get(httpbin('relative-redirect/1')) self.assertEqual(len(r3.history), 1) def test_response_history_simple(self): r1 = self.s.get(httpbin('relative-redirect/2')) r2 = self.s.get(httpbin('relative-redirect/1')) self.assertTrue(r2.from_cache) def post(self, data): return json.loads(self.s.post(httpbin('post'), data=data).text) def test_post_params(self): # issue #2 self.s = CachedSession(CACHE_NAME, CACHE_BACKEND, allowable_methods=('GET', 'POST')) d = {'param1': 'test1'} for _ in range(2): self.assertEqual(self.post(d)['form'], d) d = {'param1': 'test1', 'param3': 'test3'} self.assertEqual(self.post(d)['form'], d) self.assertTrue(self.s.post(httpbin('post'), data=d).from_cache) d.update({'something': 'else'}) self.assertFalse(self.s.post(httpbin('post'), data=d).from_cache) def test_post_data(self): # issue #2, raw payload self.s = CachedSession(CACHE_NAME, CACHE_BACKEND, allowable_methods=('GET', 'POST')) d1 = json.dumps({'param1': 'test1'}) d2 = json.dumps({'param1': 'test1', 'param2': 'test2'}) d3 = str('some unicode data') if is_py3: bin_data = bytes('some binary data', 'utf8') else: bin_data = bytes('some binary data') for d in (d1, d2, d3): self.assertEqual(self.post(d)['data'], d) r = self.s.post(httpbin('post'), data=d) self.assert_(hasattr(r, 'from_cache')) self.assertEqual(self.post(bin_data)['data'], bin_data.decode('utf8')) r = self.s.post(httpbin('post'), data=bin_data) self.assert_(hasattr(r, 'from_cache')) def test_get_params_as_argument(self): for _ in range(5): p = {'arg1': 'value1'} r = self.s.get(httpbin('get'), params=p) self.assert_(self.s.cache.has_url( httpbin('get?arg1=value1'))) def test_https_support(self): n = 10 delay = 1 url = 'https://httpbin.org/delay/%s?ar1=value1' % delay t = time.time() for _ in range(n): r = self.s.get(url, verify=False) self.assertLessEqual(time.time() - t, delay * n / 2) def test_from_cache_attribute(self): url = httpbin('get?q=1') self.assertFalse(self.s.get(url).from_cache) self.assertTrue(self.s.get(url).from_cache) self.s.cache.clear() self.assertFalse(self.s.get(url).from_cache) def test_gzip_response(self): url = httpbin('gzip') self.assertFalse(self.s.get(url).from_cache) self.assertTrue(self.s.get(url).from_cache) def test_close_response(self): for _ in range(3): r = self.s.get(httpbin("get")) r.close() def test_get_parameters_normalization(self): url = httpbin("get") params = {"a": "a", "b": ["1", "2", "3"], "c": "4"} self.assertFalse(self.s.get(url, params=params).from_cache) r = self.s.get(url, params=params) self.assertTrue(r.from_cache) self.assertEquals(r.json()["args"], params) self.assertFalse(self.s.get(url, params={"a": "b"}).from_cache) self.assertTrue(self.s.get(url, params=sorted(params.items())).from_cache) class UserSubclass(dict): def items(self): return sorted(super(UserSubclass, self).items(), reverse=True) params["z"] = "5" custom_dict = UserSubclass(params) self.assertFalse(self.s.get(url, params=custom_dict).from_cache) self.assertTrue(self.s.get(url, params=custom_dict).from_cache) def test_post_parameters_normalization(self): params = {"a": "a", "b": ["1", "2", "3"], "c": "4"} url = httpbin("post") s = CachedSession(CACHE_NAME, CACHE_BACKEND, allowable_methods=('GET', 'POST')) self.assertFalse(s.post(url, data=params).from_cache) self.assertTrue(s.post(url, data=params).from_cache) self.assertTrue(s.post(url, data=sorted(params.items())).from_cache) self.assertFalse(s.post(url, data=sorted(params.items(), reverse=True)).from_cache) def test_stream_requests_support(self): n = 100 url = httpbin("stream/%s" % n) r = self.s.get(url, stream=True) lines = list(r.iter_lines()) self.assertEquals(len(lines), n) for i in range(2): r = self.s.get(url, stream=True) self.assertTrue(r.from_cache) cached_lines = list(r.iter_lines()) self.assertEquals(cached_lines, lines) def test_headers_in_get_query(self): url = httpbin("get") s = CachedSession(CACHE_NAME, CACHE_BACKEND, include_get_headers=True) headers = {"Accept": "text/json"} self.assertFalse(s.get(url, headers=headers).from_cache) self.assertTrue(s.get(url, headers=headers).from_cache) headers["Accept"] = "text/xml" self.assertFalse(s.get(url, headers=headers).from_cache) self.assertTrue(s.get(url, headers=headers).from_cache) headers["X-custom-header"] = "custom" self.assertFalse(s.get(url, headers=headers).from_cache) self.assertTrue(s.get(url, headers=headers).from_cache) self.assertFalse(s.get(url).from_cache) self.assertTrue(s.get(url).from_cache) def test_str_and_repr(self): s = repr(CachedSession(CACHE_NAME, CACHE_BACKEND, expire_after=10)) self.assertIn(CACHE_NAME, s) self.assertIn("10", s) @mock.patch("requests_cache.core.datetime") def test_return_old_data_on_error(self, datetime_mock): datetime_mock.utcnow.return_value = datetime.utcnow() expire_after = 100 url = httpbin("get") s = CachedSession(CACHE_NAME, CACHE_BACKEND, old_data_on_error=True, expire_after=expire_after) header = "X-Tst" def get(n): return s.get(url, headers={header: n}).json()["headers"][header] get("expired") self.assertEquals(get("2"), "expired") datetime_mock.utcnow.return_value = datetime.utcnow() + timedelta(seconds=expire_after * 2) with mock.patch.object(s.cache, "save_response", side_effect=Exception): self.assertEquals(get("3"), "expired") with mock.patch("requests_cache.core.OriginalSession.send") as send_mock: resp_mock = requests.Response() request = requests.Request("GET", url) resp_mock.request = request.prepare() resp_mock.status_code = 400 resp_mock._content = '{"other": "content"}' send_mock.return_value = resp_mock self.assertEquals(get("3"), "expired") resp_mock.status_code = 200 self.assertIs(s.get(url).content, resp_mock.content) # default behaviour datetime_mock.return_value = datetime.utcnow() + timedelta(seconds=expire_after * 2) s = CachedSession(CACHE_NAME, CACHE_BACKEND, old_data_on_error=False, expire_after=100) with mock.patch.object(s.cache, "save_response", side_effect=Exception): with self.assertRaises(Exception): s.get(url) def test_ignore_parameters_get(self): url = httpbin("get") ignored_param = "ignored" usual_param = "some" params = {ignored_param: "1", usual_param: "1"} s = CachedSession(CACHE_NAME, CACHE_BACKEND, ignored_parameters=[ignored_param]) r = s.get(url, params=params) self.assertIn(ignored_param, r.json()['args'].keys()) self.assertFalse(r.from_cache) self.assertTrue(s.get(url, params=params).from_cache) params[ignored_param] = "new" self.assertTrue(s.get(url, params=params).from_cache) params[usual_param] = "new" self.assertFalse(s.get(url, params=params).from_cache) def test_ignore_parameters_post(self): url = httpbin("post") ignored_param = "ignored" usual_param = "some" d = {ignored_param: "1", usual_param: "1"} s = CachedSession(CACHE_NAME, CACHE_BACKEND, allowable_methods=('POST'), ignored_parameters=[ignored_param]) r = s.post(url, data=d) self.assertIn(ignored_param, r.json()['form'].keys()) self.assertFalse(r.from_cache) self.assertTrue(s.post(url, data=d).from_cache) d[ignored_param] = "new" self.assertTrue(s.post(url, data=d).from_cache) d[usual_param] = "new" self.assertFalse(s.post(url, data=d).from_cache) def test_ignore_parameters_post_json(self): url = httpbin("post") ignored_param = "ignored" usual_param = "some" d = {ignored_param: "1", usual_param: "1"} s = CachedSession(CACHE_NAME, CACHE_BACKEND, allowable_methods=('POST'), ignored_parameters=[ignored_param]) r = s.post(url, json=d) self.assertIn(ignored_param, json.loads(r.json()['data']).keys()) self.assertFalse(r.from_cache) self.assertTrue(s.post(url, json=d).from_cache) d[ignored_param] = "new" self.assertTrue(s.post(url, json=d).from_cache) d[usual_param] = "new" self.assertFalse(s.post(url, json=d).from_cache) def test_ignore_parameters_post_raw(self): url = httpbin("post") ignored_param = "ignored" raw_data = "raw test data" s = CachedSession(CACHE_NAME, CACHE_BACKEND, allowable_methods=('POST'), ignored_parameters=[ignored_param]) self.assertFalse(s.post(url, data=raw_data).from_cache) self.assertTrue(s.post(url, data=raw_data).from_cache) raw_data = "new raw data" self.assertFalse(s.post(url, data=raw_data).from_cache) @mock.patch("requests_cache.backends.base.datetime") @mock.patch("requests_cache.core.datetime") def test_remove_expired_entries(self, datetime_mock, datetime_mock2): expire_after = timedelta(minutes=10) start_time = datetime.utcnow().replace(year=2010, minute=0) datetime_mock.utcnow.return_value = start_time datetime_mock2.utcnow.return_value = start_time s = CachedSession(CACHE_NAME, CACHE_BACKEND, expire_after=expire_after) s.get(httpbin('get')) s.get(httpbin('relative-redirect/3')) datetime_mock.utcnow.return_value = start_time + expire_after * 2 datetime_mock2.utcnow.return_value = datetime_mock.utcnow.return_value ok_url = 'get?x=1' s.get(httpbin(ok_url)) self.assertEqual(len(s.cache.responses), 3) self.assertEqual(len(s.cache.keys_map), 3) s.remove_expired_responses() self.assertEqual(len(s.cache.responses), 1) self.assertEqual(len(s.cache.keys_map), 0) self.assertIn(ok_url, list(s.cache.responses.values())[0][0].url)
class CacheTestCase(unittest.TestCase): def setUp(self): self.s = CachedSession(CACHE_NAME, backend=CACHE_BACKEND, fast_save=FAST_SAVE) self.s.cache.clear() requests_cache.uninstall_cache() def test_expire_cache(self): delay = 1 url = httpbin('delay/%s' % delay) s = CachedSession(CACHE_NAME, backend=CACHE_BACKEND, expire_after=0.06) t = time.time() r = s.get(url) delta = time.time() - t self.assertGreaterEqual(delta, delay) time.sleep(0.5) t = time.time() r = s.get(url) delta = time.time() - t self.assertGreaterEqual(delta, delay) def test_delete_urls(self): url = httpbin('redirect/3') r = self.s.get(url) for i in range(1, 4): self.assert_(self.s.cache.has_url(httpbin('redirect/%s' % i))) self.s.cache.delete_url(url) self.assert_(not self.s.cache.has_url(url)) def test_unregistered_backend(self): with self.assertRaises(ValueError): CachedSession(CACHE_NAME, backend='nonexistent') # def test_async_compatibility(self): # try: # import grequests # except Exception: # self.skipTest('gevent is not installed') # n = 3 # def long_running(): # t = time.time() # rs = [grequests.get(httpbin('delay/%s' % i)) for i in range(n + 1)] # grequests.map(rs) # return time.time() - t # # cache it # delta = long_running() # self.assertGreaterEqual(delta, n) # # fast from cache # delta = 0 # for i in range(n): # delta += long_running() # self.assertLessEqual(delta, 1) def test_hooks(self): state = defaultdict(int) for hook in ('response',): # TODO it's only one hook here def hook_func(r, *args, **kwargs): state[hook] += 1 return r n = 5 for i in range(n): r = self.s.get(httpbin('get'), hooks={hook: hook_func}) self.assertEqual(state[hook], n) def test_attr_from_cache_in_hook(self): state = defaultdict(int) hook = 'response' def hook_func(r, *args, **kwargs): if state[hook] > 0: self.assert_(r.from_cache, True) state[hook] += 1 return r n = 5 for i in range(n): r = self.s.get(httpbin('get'), hooks={hook: hook_func}) self.assertEqual(state[hook], n) def test_post(self): url = httpbin('post') r1 = json.loads(self.s.post(url, data={'test1': 'test1'}).text) r2 = json.loads(self.s.post(url, data={'test2': 'test2'}).text) self.assertIn('test2', r2['form']) req = Request('POST', url).prepare() self.assert_(not self.s.cache.has_key(self.s.cache.create_key(req))) def test_disabled(self): url = httpbin('get') requests_cache.install_cache(CACHE_NAME, backend=CACHE_BACKEND, fast_save=FAST_SAVE) requests.get(url) with requests_cache.disabled(): for i in range(2): r = requests.get(url) self.assertFalse(getattr(r, 'from_cache', False)) with self.s.cache_disabled(): for i in range(2): r = self.s.get(url) self.assertFalse(getattr(r, 'from_cache', False)) r = self.s.get(url) self.assertTrue(getattr(r, 'from_cache', False)) def test_enabled(self): url = httpbin('get') options = dict(cache_name=CACHE_NAME, backend=CACHE_BACKEND, fast_save=FAST_SAVE) with requests_cache.enabled(**options): r = requests.get(url) self.assertFalse(getattr(r, 'from_cache', False)) for i in range(2): r = requests.get(url) self.assertTrue(getattr(r, 'from_cache', False)) r = requests.get(url) self.assertFalse(getattr(r, 'from_cache', False)) def test_content_and_cookies(self): requests_cache.install_cache(CACHE_NAME, CACHE_BACKEND) s = requests.session() def js(url): return json.loads(s.get(url).text) r1 = js(httpbin('cookies/set/test1/test2')) with requests_cache.disabled(): r2 = js(httpbin('cookies')) self.assertEqual(r1, r2) r3 = js(httpbin('cookies')) with requests_cache.disabled(): r4 = js(httpbin('cookies/set/test3/test4')) # from cache self.assertEqual(r3, js(httpbin('cookies'))) # updated with requests_cache.disabled(): self.assertEqual(r4, js(httpbin('cookies'))) def test_response_history(self): r1 = self.s.get(httpbin('redirect/3')) def test_redirect_history(url): r2 = self.s.get(url) self.assertTrue(r2.from_cache) for r11, r22 in zip(r1.history, r2.history): self.assertEqual(r11.url, r22.url) test_redirect_history(httpbin('redirect/3')) test_redirect_history(httpbin('redirect/2')) r3 = requests.get(httpbin('redirect/1')) self.assertEqual(len(r3.history), 1) def test_response_history_simple(self): r1 = self.s.get(httpbin('redirect/2')) r2 = self.s.get(httpbin('redirect/1')) self.assertTrue(r2.from_cache) def post(self, data): return json.loads(self.s.post(httpbin('post'), data=data).text) def test_post_params(self): # issue #2 self.s = CachedSession(CACHE_NAME, CACHE_BACKEND, allowable_methods=('GET', 'POST')) d = {'param1': 'test1'} for _ in range(2): self.assertEqual(self.post(d)['form'], d) d = {'param1': 'test1', 'param3': 'test3'} self.assertEqual(self.post(d)['form'], d) self.assertTrue(self.s.post(httpbin('post'), data=d).from_cache) d.update({'something': 'else'}) self.assertFalse(self.s.post(httpbin('post'), data=d).from_cache) def test_post_data(self): # issue #2, raw payload self.s = CachedSession(CACHE_NAME, CACHE_BACKEND, allowable_methods=('GET', 'POST')) d1 = json.dumps({'param1': 'test1'}) d2 = json.dumps({'param1': 'test1', 'param2': 'test2'}) d3 = str('some unicode data') if is_py3: bin_data = bytes('some binary data', 'utf8') else: bin_data = bytes('some binary data') for d in (d1, d2, d3): self.assertEqual(self.post(d)['data'], d) r = self.s.post(httpbin('post'), data=d) self.assert_(hasattr(r, 'from_cache')) self.assertEqual(self.post(bin_data)['data'], bin_data.decode('utf8')) r = self.s.post(httpbin('post'), data=bin_data) self.assert_(hasattr(r, 'from_cache')) def test_get_params_as_argument(self): for _ in range(5): p = {'arg1': 'value1'} r = self.s.get(httpbin('get'), params=p) self.assert_(self.s.cache.has_url( httpbin('get?arg1=value1'))) def test_https_support(self): n = 10 delay = 1 url = 'https://httpbin.org/delay/%s?ar1=value1' % delay t = time.time() for _ in range(n): r = self.s.get(url, verify=False) self.assertLessEqual(time.time() - t, delay * n / 2) def test_from_cache_attribute(self): url = httpbin('get?q=1') self.assertFalse(self.s.get(url).from_cache) self.assertTrue(self.s.get(url).from_cache) self.s.cache.clear() self.assertFalse(self.s.get(url).from_cache) def test_gzip_response(self): url = httpbin('gzip') self.assertFalse(self.s.get(url).from_cache) self.assertTrue(self.s.get(url).from_cache)
class CacheTestCase(unittest.TestCase): def setUp(self): self.s = CachedSession(CACHE_NAME, backend=CACHE_BACKEND, fast_save=FAST_SAVE) self.s.cache.clear() requests_cache.uninstall_cache() def test_expire_cache(self): delay = 1 url = httpbin('delay/%s' % delay) s = CachedSession(CACHE_NAME, backend=CACHE_BACKEND, expire_after=0.06) t = time.time() r = s.get(url) delta = time.time() - t self.assertGreaterEqual(delta, delay) time.sleep(0.5) t = time.time() r = s.get(url) delta = time.time() - t self.assertGreaterEqual(delta, delay) def test_delete_urls(self): url = httpbin('relative-redirect/3') r = self.s.get(url) for i in range(1, 4): self.assert_( self.s.cache.has_url(httpbin('relative-redirect/%s' % i))) self.s.cache.delete_url(url) self.assert_(not self.s.cache.has_url(url)) def test_unregistered_backend(self): with self.assertRaises(ValueError): CachedSession(CACHE_NAME, backend='nonexistent') def test_hooks(self): state = defaultdict(int) for hook in ('response', ): # TODO it's only one hook here def hook_func(r, *args, **kwargs): state[hook] += 1 return r n = 5 for i in range(n): r = self.s.get(httpbin('get'), hooks={hook: hook_func}) self.assertEqual(state[hook], n) def test_attr_from_cache_in_hook(self): state = defaultdict(int) hook = 'response' def hook_func(r, *args, **kwargs): if state[hook] > 0: self.assert_(r.from_cache, True) state[hook] += 1 return r n = 5 for i in range(n): r = self.s.get(httpbin('get'), hooks={hook: hook_func}) self.assertEqual(state[hook], n) def test_post(self): url = httpbin('post') r1 = json.loads(self.s.post(url, data={'test1': 'test1'}).text) r2 = json.loads(self.s.post(url, data={'test2': 'test2'}).text) self.assertIn('test2', r2['form']) req = Request('POST', url).prepare() self.assert_(not self.s.cache.has_key(self.s.cache.create_key(req))) def test_disabled(self): url = httpbin('get') requests_cache.install_cache(CACHE_NAME, backend=CACHE_BACKEND, fast_save=FAST_SAVE) requests.get(url) with requests_cache.disabled(): for i in range(2): r = requests.get(url) self.assertFalse(getattr(r, 'from_cache', False)) with self.s.cache_disabled(): for i in range(2): r = self.s.get(url) self.assertFalse(getattr(r, 'from_cache', False)) r = self.s.get(url) self.assertTrue(getattr(r, 'from_cache', False)) def test_enabled(self): url = httpbin('get') options = dict(cache_name=CACHE_NAME, backend=CACHE_BACKEND, fast_save=FAST_SAVE) with requests_cache.enabled(**options): r = requests.get(url) self.assertFalse(getattr(r, 'from_cache', False)) for i in range(2): r = requests.get(url) self.assertTrue(getattr(r, 'from_cache', False)) r = requests.get(url) self.assertFalse(getattr(r, 'from_cache', False)) def test_content_and_cookies(self): requests_cache.install_cache(CACHE_NAME, CACHE_BACKEND) s = requests.session() def js(url): return json.loads(s.get(url).text) r1 = js(httpbin('cookies/set/test1/test2')) with requests_cache.disabled(): r2 = js(httpbin('cookies')) self.assertEqual(r1, r2) r3 = js(httpbin('cookies')) with requests_cache.disabled(): r4 = js(httpbin('cookies/set/test3/test4')) # from cache self.assertEqual(r3, js(httpbin('cookies'))) # updated with requests_cache.disabled(): self.assertEqual(r4, js(httpbin('cookies'))) def test_response_history(self): r1 = self.s.get(httpbin('relative-redirect/3')) def test_redirect_history(url): r2 = self.s.get(url) self.assertTrue(r2.from_cache) for r11, r22 in zip(r1.history, r2.history): self.assertEqual(r11.url, r22.url) test_redirect_history(httpbin('relative-redirect/3')) test_redirect_history(httpbin('relative-redirect/2')) r3 = requests.get(httpbin('relative-redirect/1')) self.assertEqual(len(r3.history), 1) def test_response_history_simple(self): r1 = self.s.get(httpbin('relative-redirect/2')) r2 = self.s.get(httpbin('relative-redirect/1')) self.assertTrue(r2.from_cache) def post(self, data): return json.loads(self.s.post(httpbin('post'), data=data).text) def test_post_params(self): # issue #2 self.s = CachedSession(CACHE_NAME, CACHE_BACKEND, allowable_methods=('GET', 'POST')) d = {'param1': 'test1'} for _ in range(2): self.assertEqual(self.post(d)['form'], d) d = {'param1': 'test1', 'param3': 'test3'} self.assertEqual(self.post(d)['form'], d) self.assertTrue(self.s.post(httpbin('post'), data=d).from_cache) d.update({'something': 'else'}) self.assertFalse(self.s.post(httpbin('post'), data=d).from_cache) def test_post_data(self): # issue #2, raw payload self.s = CachedSession(CACHE_NAME, CACHE_BACKEND, allowable_methods=('GET', 'POST')) d1 = json.dumps({'param1': 'test1'}) d2 = json.dumps({'param1': 'test1', 'param2': 'test2'}) d3 = str('some unicode data') if is_py3: bin_data = bytes('some binary data', 'utf8') else: bin_data = bytes('some binary data') for d in (d1, d2, d3): self.assertEqual(self.post(d)['data'], d) r = self.s.post(httpbin('post'), data=d) self.assert_(hasattr(r, 'from_cache')) self.assertEqual(self.post(bin_data)['data'], bin_data.decode('utf8')) r = self.s.post(httpbin('post'), data=bin_data) self.assert_(hasattr(r, 'from_cache')) def test_get_params_as_argument(self): for _ in range(5): p = {'arg1': 'value1'} r = self.s.get(httpbin('get'), params=p) self.assert_(self.s.cache.has_url(httpbin('get?arg1=value1'))) def test_https_support(self): n = 10 delay = 1 url = 'https://httpbin.org/delay/%s?ar1=value1' % delay t = time.time() for _ in range(n): r = self.s.get(url, verify=False) self.assertLessEqual(time.time() - t, delay * n / 2) def test_from_cache_attribute(self): url = httpbin('get?q=1') self.assertFalse(self.s.get(url).from_cache) self.assertTrue(self.s.get(url).from_cache) self.s.cache.clear() self.assertFalse(self.s.get(url).from_cache) def test_gzip_response(self): url = httpbin('gzip') self.assertFalse(self.s.get(url).from_cache) self.assertTrue(self.s.get(url).from_cache) def test_close_response(self): for _ in range(3): r = self.s.get(httpbin("get")) r.close() def test_get_parameters_normalization(self): url = httpbin("get") params = {"a": "a", "b": ["1", "2", "3"], "c": "4"} self.assertFalse(self.s.get(url, params=params).from_cache) r = self.s.get(url, params=params) self.assertTrue(r.from_cache) self.assertEquals(r.json()["args"], params) self.assertFalse(self.s.get(url, params={"a": "b"}).from_cache) self.assertTrue( self.s.get(url, params=sorted(params.items())).from_cache) class UserSubclass(dict): def items(self): return sorted(super(UserSubclass, self).items(), reverse=True) params["z"] = "5" custom_dict = UserSubclass(params) self.assertFalse(self.s.get(url, params=custom_dict).from_cache) self.assertTrue(self.s.get(url, params=custom_dict).from_cache) def test_post_parameters_normalization(self): params = {"a": "a", "b": ["1", "2", "3"], "c": "4"} url = httpbin("post") s = CachedSession(CACHE_NAME, CACHE_BACKEND, allowable_methods=('GET', 'POST')) self.assertFalse(s.post(url, data=params).from_cache) self.assertTrue(s.post(url, data=params).from_cache) self.assertTrue(s.post(url, data=sorted(params.items())).from_cache) self.assertFalse( s.post(url, data=sorted(params.items(), reverse=True)).from_cache) def test_stream_requests_support(self): n = 100 url = httpbin("stream/%s" % n) r = self.s.get(url, stream=True) lines = list(r.iter_lines()) self.assertEquals(len(lines), n) for i in range(2): r = self.s.get(url, stream=True) self.assertTrue(r.from_cache) cached_lines = list(r.iter_lines()) self.assertEquals(cached_lines, lines) def test_headers_in_get_query(self): url = httpbin("get") s = CachedSession(CACHE_NAME, CACHE_BACKEND, include_get_headers=True) headers = {"Accept": "text/json"} self.assertFalse(s.get(url, headers=headers).from_cache) self.assertTrue(s.get(url, headers=headers).from_cache) headers["Accept"] = "text/xml" self.assertFalse(s.get(url, headers=headers).from_cache) self.assertTrue(s.get(url, headers=headers).from_cache) headers["X-custom-header"] = "custom" self.assertFalse(s.get(url, headers=headers).from_cache) self.assertTrue(s.get(url, headers=headers).from_cache) self.assertFalse(s.get(url).from_cache) self.assertTrue(s.get(url).from_cache) def test_str_and_repr(self): s = repr(CachedSession(CACHE_NAME, CACHE_BACKEND, expire_after=10)) self.assertIn(CACHE_NAME, s) self.assertIn("10", s) @mock.patch("requests_cache.core.datetime") def test_return_old_data_on_error(self, datetime_mock): datetime_mock.utcnow.return_value = datetime.utcnow() expire_after = 100 url = httpbin("get") s = CachedSession(CACHE_NAME, CACHE_BACKEND, old_data_on_error=True, expire_after=expire_after) header = "X-Tst" def get(n): return s.get(url, headers={header: n}).json()["headers"][header] get("expired") self.assertEquals(get("2"), "expired") datetime_mock.utcnow.return_value = datetime.utcnow() + timedelta( seconds=expire_after * 2) with mock.patch.object(s.cache, "save_response", side_effect=Exception): self.assertEquals(get("3"), "expired") with mock.patch( "requests_cache.core.OriginalSession.send") as send_mock: resp_mock = requests.Response() request = requests.Request("GET", url) resp_mock.request = request.prepare() resp_mock.status_code = 400 resp_mock._content = '{"other": "content"}' send_mock.return_value = resp_mock self.assertEquals(get("3"), "expired") resp_mock.status_code = 200 self.assertIs(s.get(url).content, resp_mock.content) # default behaviour datetime_mock.return_value = datetime.utcnow() + timedelta( seconds=expire_after * 2) s = CachedSession(CACHE_NAME, CACHE_BACKEND, old_data_on_error=False, expire_after=100) with mock.patch.object(s.cache, "save_response", side_effect=Exception): with self.assertRaises(Exception): s.get(url) def test_ignore_parameters_get(self): url = httpbin("get") ignored_param = "ignored" usual_param = "some" params = {ignored_param: "1", usual_param: "1"} s = CachedSession(CACHE_NAME, CACHE_BACKEND, ignored_parameters=[ignored_param]) r = s.get(url, params=params) self.assertIn(ignored_param, r.json()['args'].keys()) self.assertFalse(r.from_cache) self.assertTrue(s.get(url, params=params).from_cache) params[ignored_param] = "new" self.assertTrue(s.get(url, params=params).from_cache) params[usual_param] = "new" self.assertFalse(s.get(url, params=params).from_cache) def test_ignore_parameters_post(self): url = httpbin("post") ignored_param = "ignored" usual_param = "some" d = {ignored_param: "1", usual_param: "1"} s = CachedSession(CACHE_NAME, CACHE_BACKEND, allowable_methods=('POST'), ignored_parameters=[ignored_param]) r = s.post(url, data=d) self.assertIn(ignored_param, r.json()['form'].keys()) self.assertFalse(r.from_cache) self.assertTrue(s.post(url, data=d).from_cache) d[ignored_param] = "new" self.assertTrue(s.post(url, data=d).from_cache) d[usual_param] = "new" self.assertFalse(s.post(url, data=d).from_cache) def test_ignore_parameters_post_json(self): url = httpbin("post") ignored_param = "ignored" usual_param = "some" d = {ignored_param: "1", usual_param: "1"} s = CachedSession(CACHE_NAME, CACHE_BACKEND, allowable_methods=('POST'), ignored_parameters=[ignored_param]) r = s.post(url, json=d) self.assertIn(ignored_param, json.loads(r.json()['data']).keys()) self.assertFalse(r.from_cache) self.assertTrue(s.post(url, json=d).from_cache) d[ignored_param] = "new" self.assertTrue(s.post(url, json=d).from_cache) d[usual_param] = "new" self.assertFalse(s.post(url, json=d).from_cache) def test_ignore_parameters_post_raw(self): url = httpbin("post") ignored_param = "ignored" raw_data = "raw test data" s = CachedSession(CACHE_NAME, CACHE_BACKEND, allowable_methods=('POST'), ignored_parameters=[ignored_param]) self.assertFalse(s.post(url, data=raw_data).from_cache) self.assertTrue(s.post(url, data=raw_data).from_cache) raw_data = "new raw data" self.assertFalse(s.post(url, data=raw_data).from_cache) @mock.patch("requests_cache.backends.base.datetime") @mock.patch("requests_cache.core.datetime") def test_remove_expired_entries(self, datetime_mock, datetime_mock2): expire_after = timedelta(minutes=10) start_time = datetime.utcnow().replace(year=2010, minute=0) datetime_mock.utcnow.return_value = start_time datetime_mock2.utcnow.return_value = start_time s = CachedSession(CACHE_NAME, CACHE_BACKEND, expire_after=expire_after) s.get(httpbin('get')) s.get(httpbin('relative-redirect/3')) datetime_mock.utcnow.return_value = start_time + expire_after * 2 datetime_mock2.utcnow.return_value = datetime_mock.utcnow.return_value ok_url = 'get?x=1' s.get(httpbin(ok_url)) self.assertEqual(len(s.cache.responses), 3) self.assertEqual(len(s.cache.keys_map), 3) s.remove_expired_responses() self.assertEqual(len(s.cache.responses), 1) self.assertEqual(len(s.cache.keys_map), 0) self.assertIn(ok_url, list(s.cache.responses.values())[0][0].url)
class CacheTestCase(unittest.TestCase): def setUp(self): self.s = CachedSession(CACHE_NAME, backend=CACHE_BACKEND, fast_save=FAST_SAVE) self.s.cache.clear() requests_cache.uninstall_cache() def test_expire_cache(self): delay = 1 url = httpbin('delay/%s' % delay) s = CachedSession(CACHE_NAME, backend=CACHE_BACKEND, expire_after=0.06) t = time.time() r = s.get(url) delta = time.time() - t self.assertGreaterEqual(delta, delay) time.sleep(0.5) t = time.time() r = s.get(url) delta = time.time() - t self.assertGreaterEqual(delta, delay) def test_delete_urls(self): url = httpbin('redirect/3') r = self.s.get(url) for i in range(1, 4): self.assert_(self.s.cache.has_url(httpbin('redirect/%s' % i))) self.s.cache.delete_url(url) self.assert_(not self.s.cache.has_url(url)) def test_unregistered_backend(self): with self.assertRaises(ValueError): CachedSession(CACHE_NAME, backend='nonexistent') # def test_async_compatibility(self): # try: # import grequests # except Exception: # self.skipTest('gevent is not installed') # n = 3 # def long_running(): # t = time.time() # rs = [grequests.get(httpbin('delay/%s' % i)) for i in range(n + 1)] # grequests.map(rs) # return time.time() - t # # cache it # delta = long_running() # self.assertGreaterEqual(delta, n) # # fast from cache # delta = 0 # for i in range(n): # delta += long_running() # self.assertLessEqual(delta, 1) def test_hooks(self): state = defaultdict(int) for hook in ('response',): # TODO it's only one hook here def hook_func(r, *args, **kwargs): state[hook] += 1 return r n = 5 for i in range(n): r = self.s.get(httpbin('get'), hooks={hook: hook_func}) self.assertEqual(state[hook], n) def test_post(self): url = httpbin('post') r1 = json.loads(self.s.post(url, data={'test1': 'test1'}).text) r2 = json.loads(self.s.post(url, data={'test2': 'test2'}).text) self.assertIn('test2', r2['form']) req = Request('POST', url).prepare() self.assert_(not self.s.cache.has_key(self.s.cache.create_key(req))) def test_disabled(self): url = httpbin('get') requests_cache.install_cache(CACHE_NAME, backend=CACHE_BACKEND, fast_save=FAST_SAVE) requests.get(url) with requests_cache.disabled(): for i in range(2): r = requests.get(url) self.assertFalse(getattr(r, 'from_cache', False)) with self.s.cache_disabled(): for i in range(2): r = self.s.get(url) self.assertFalse(getattr(r, 'from_cache', False)) r = self.s.get(url) self.assertTrue(getattr(r, 'from_cache', False)) def test_content_and_cookies(self): requests_cache.install_cache(CACHE_NAME, CACHE_BACKEND) s = requests.session() def js(url): return json.loads(s.get(url).text) r1 = js(httpbin('cookies/set/test1/test2')) with requests_cache.disabled(): r2 = js(httpbin('cookies')) self.assertEqual(r1, r2) r3 = js(httpbin('cookies')) with requests_cache.disabled(): r4 = js(httpbin('cookies/set/test3/test4')) # from cache self.assertEqual(r3, js(httpbin('cookies'))) # updated with requests_cache.disabled(): self.assertEqual(r4, js(httpbin('cookies'))) def test_response_history(self): r1 = self.s.get(httpbin('redirect/3')) def test_redirect_history(url): r2 = self.s.get(url) self.assertTrue(r2.from_cache) for r11, r22 in zip(r1.history, r2.history): self.assertEqual(r11.url, r22.url) test_redirect_history(httpbin('redirect/3')) test_redirect_history(httpbin('redirect/2')) r3 = requests.get(httpbin('redirect/1')) self.assertEqual(len(r3.history), 1) def test_response_history_simple(self): r1 = self.s.get(httpbin('redirect/2')) r2 = self.s.get(httpbin('redirect/1')) self.assertTrue(r2.from_cache) def post(self, data): return json.loads(self.s.post(httpbin('post'), data=data).text) def test_post_params(self): # issue #2 self.s = CachedSession(CACHE_NAME, CACHE_BACKEND, allowable_methods=('GET', 'POST')) d = {'param1': 'test1'} for _ in range(2): self.assertEqual(self.post(d)['form'], d) d = {'param1': 'test1', 'param3': 'test3'} self.assertEqual(self.post(d)['form'], d) self.assertTrue(self.s.post(httpbin('post'), data=d).from_cache) d.update({'something': 'else'}) self.assertFalse(self.s.post(httpbin('post'), data=d).from_cache) def test_post_data(self): # issue #2, raw payload self.s = CachedSession(CACHE_NAME, CACHE_BACKEND, allowable_methods=('GET', 'POST')) d1 = json.dumps({'param1': 'test1'}) d2 = json.dumps({'param1': 'test1', 'param2': 'test2'}) d3 = str('some unicode data') if is_py3: bin_data = bytes('some binary data', 'utf8') else: bin_data = bytes('some binary data') for d in (d1, d2, d3): self.assertEqual(self.post(d)['data'], d) r = self.s.post(httpbin('post'), data=d) self.assert_(hasattr(r, 'from_cache')) self.assertEqual(self.post(bin_data)['data'], bin_data.decode('utf8')) r = self.s.post(httpbin('post'), data=bin_data) self.assert_(hasattr(r, 'from_cache')) def test_get_params_as_argument(self): for _ in range(5): p = {'arg1': 'value1'} r = self.s.get(httpbin('get'), params=p) self.assert_(self.s.cache.has_url( httpbin('get?arg1=value1'))) def test_https_support(self): n = 10 delay = 1 url = 'https://httpbin.org/delay/%s?ar1=value1' % delay t = time.time() for _ in range(n): r = self.s.get(url, verify=False) self.assertLessEqual(time.time() - t, delay * n / 2) def test_from_cache_attribute(self): url = httpbin('get?q=1') self.assertFalse(self.s.get(url).from_cache) self.assertTrue(self.s.get(url).from_cache) self.s.cache.clear() self.assertFalse(self.s.get(url).from_cache)
class FIRSTInspiresHTTPAPI: def __init__(self, **kwargs): self.logger = logging.getLogger('FIRSTChesapeakeTwitchBot') self.FRCEVENTS_KEY = kwargs.get("FRCEVENTS_KEY", None) #if CLIENT_ID == None load it from ENV if self.FRCEVENTS_KEY == None: load_dotenv() self.FRCEVENTS_KEY = os.getenv('FRCEVENTS_KEY') #Initilize requests_cache to cache API calls self.session = CachedSession('FIRSTInspiresHTTPAPI', backend='sqlite') #temp logging.getLogger('requests_cache').setLevel('DEBUG') #Get Today's Events self.TodaysEvents = None self.AllDistrictEvents = None self.update_AllDistrictEvents(districtCode="CHS") self.update_TodaysDistrictEvents(districtCode="CHS") self.logger.debug("[FIRSTInspiresHTTPAPI] Instance Created") def get_FRCEventRankings(self, **kwargs): eventCode = kwargs.get("eventCode", None) if eventCode == None: self.logger.error( "[FIRSTInspiresHTTPAPI][get_FRCEventRankings] Error: Event Code is None!" ) #Access the API try: apiheaders = { 'Content-Type': 'application/json', "Authorization": "Basic " + self.FRCEVENTS_KEY #"If-Modified-Since" : datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT') #If-Modified-Since NOW (in GMT) # THIS BREAKS RANKINGS (Provides 200 result with empty response) } url = "https://frc-api.firstinspires.org/v3.0/2022/rankings/" + eventCode + "?top=5" self.logger.debug("[FIRSTInspiresHTTPAPI][get_FRCEventRankings] " + url) with self.session.cache_disabled(): response = self.session.get(url, headers=apiheaders, timeout=(3, 5)) except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): self.logger.error( "[FIRSTInspiresHTTPAPI][get_FRCEventRankings] Failed to contact FIRST INSPIRES API!" ) else: if response.status_code == 200: #200 - Success OK self.logger.debug( "[FIRSTInspiresHTTPAPI][get_FRCEventRankings] Response 200" ) self.logger.debug( "[FIRSTInspiresHTTPAPI][get_FRCEventRankings] Response Text: " + response.text) responseData = json.loads(response.text) return responseData["Rankings"] elif response.status_code == 304: #HTTP 304 - "Not Modified" self.logger.debug( "[FIRSTInspiresHTTPAPI][get_FRCEventRankings] Response 304 - Data Not Modified" ) elif response.status_code == 400: #HTTP 400 - "Invalid Season Requested"/"Malformed Parameter Format In Request"/"Missing Parameter In Request"/"Invalid API Version Requested": self.logger.debug( "[FIRSTInspiresHTTPAPI][get_FRCEventRankings] Response 400" ) elif response.status_code == 401: #HTTP 401 - "Unauthorized" self.logger.debug( "[FIRSTInspiresHTTPAPI][get_FRCEventRankings] Response 401" ) elif response.status_code == 404: #HTTP 404 - "Invalid Event Requested" self.logger.debug( "[FIRSTInspiresHTTPAPI][get_FRCEventRankings] Response 404" ) elif response.status_code == 500: #HTTP 500 - "Internal Server Error" self.logger.debug( "[FIRSTInspiresHTTPAPI][get_FRCEventRankings] Response 500" ) elif response.status_code == 501: #HTTP 501 - "Request Did Not Match Any Current API Pattern" self.logger.debug( "[FIRSTInspiresHTTPAPI][get_FRCEventRankings] Response 501" ) elif response.status_code == 503: #HTTP 503 - "Service Unavailable" self.logger.debug( "[FIRSTInspiresHTTPAPI][get_FRCEventRankings] Response 503" ) else: self.logger.error( "[FIRSTInspiresHTTPAPI][get_FRCEventRankings] Invalid response from server." ) def get_AllDistrictEvents(self): return self.AllDistrictEvents def update_AllDistrictEvents(self, **kwargs): tmpDistrictCode = kwargs.get("districtCode", None) if tmpDistrictCode == None: tmpDistrictCode = "CHS" #Access the API try: apiheaders = { 'Content-Type': 'application/json', "Authorization": "Basic " + self.FRCEVENTS_KEY } url = "https://frc-api.firstinspires.org/v3.0/2022/events?districtCode=" + tmpDistrictCode self.logger.debug( "[FIRSTInspiresHTTPAPI][get_AllDistrictEvents] " + url) response = self.session.get(url, headers=apiheaders, timeout=3) except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): self.logger.error( "[FIRSTInspiresHTTPAPI][get_AllDistrictEvents] Failed to contact FIRST INSPIRES API!" ) else: if response.status_code == 200: #200 - Success OK self.logger.debug( "[FIRSTInspiresHTTPAPI][get_AllDistrictEvents] Response 200" ) self.logger.debug( "[FIRSTInspiresHTTPAPI][get_AllDistrictEvents] Response Text: " + response.text) responseData = json.loads(response.text) self.logger.debug( "[FIRSTInspiresHTTPAPI][get_AllDistrictEvents] Received " + str(responseData["eventCount"]) + " events from API.") self.AllDistrictEvents = responseData elif response.status_code == 304: #HTTP 304 - "Not Modified" self.logger.debug( "[FIRSTInspiresHTTPAPI][get_AllDistrictEvents] Response 304 - Data Not Modified" ) elif response.status_code == 400: #HTTP 400 - "Invalid Season Requested"/"Malformed Parameter Format In Request"/"Missing Parameter In Request"/"Invalid API Version Requested": self.logger.debug( "[FIRSTInspiresHTTPAPI][get_AllDistrictEvents] Response 400" ) elif response.status_code == 401: #HTTP 401 - "Unauthorized" self.logger.debug( "[FIRSTInspiresHTTPAPI][get_AllDistrictEvents] Response 401" ) elif response.status_code == 404: #HTTP 404 - "Invalid Event Requested" self.logger.debug( "[FIRSTInspiresHTTPAPI][get_AllDistrictEvents] Response 404" ) elif response.status_code == 500: #HTTP 500 - "Internal Server Error" self.logger.debug( "[FIRSTInspiresHTTPAPI][get_AllDistrictEvents] Response 500" ) elif response.status_code == 501: #HTTP 501 - "Request Did Not Match Any Current API Pattern" self.logger.debug( "[FIRSTInspiresHTTPAPI][get_AllDistrictEvents] Response 501" ) elif response.status_code == 503: #HTTP 503 - "Service Unavailable" self.logger.debug( "[FIRSTInspiresHTTPAPI][get_AllDistrictEvents] Response 503" ) else: self.logger.error( "[FIRSTInspiresHTTPAPI][get_AllDistrictEvents] Invalid response from server." ) def get_TodaysDistrictEvents(self, **kwargs): return self.TodaysEvents def update_TodaysDistrictEvents(self, **kwargs): tmpDistrictCode = kwargs.get("districtCode", None) if tmpDistrictCode == None: tmpDistrictCode = "CHS" if len(self.AllDistrictEvents) == 0: self.logger.debug( "[FIRSTInspiresHTTPAPI][get_TodaysDistrictEvents] AllDistrictEvents is 0. Trying to get Events from FIRST" ) self.update_AllDistrictEvents(districtCode=tmpDistrictCode) if len(self.AllDistrictEvents) > 0: fResult = [] for event in self.AllDistrictEvents["Events"]: self.logger.debug( "[FIRSTInspiresHTTPAPI][get_TodaysDistrictEvents] Processing Event: " + event["name"]) dateStart = parser.parse(event["dateStart"]) dateEnd = parser.parse(event["dateEnd"]) if dateStart <= datetime.now( ) <= dateEnd: # If NOW DATE is BETWEEN eventStart and End if '#' in event["name"] and "Day" in event[ "name"]: # Contains '#' symbol and 'Day' self.logger.debug( "[FIRSTInspiresHTTPAPI][get_TodaysDistrictEvents] Today's Event" ) fResult.append(event) elif event["type"] == "DistrictChampionship": self.logger.debug( "[FIRSTInspiresHTTPAPI][get_TodaysDistrictEvents] DistrictChampionship Event" ) fResult.append(event) else: #skip #Do nothing self.logger.debug( "[FIRSTInspiresHTTPAPI][get_TodaysDistrictEvents] Unwanted Event. Name/Type Not match" ) else: self.logger.debug( "[FIRSTInspiresHTTPAPI][get_TodaysDistrictEvents] Unwanted Event, not Today" ) self.TodaysEvents = fResult self.logger.info( "[FIRSTInspiresHTTPAPI][get_TodaysDistrictEvents] Count of Today's Events: " + str(len(fResult))) else: self.logger.error( "[FIRSTInspiresHTTPAPI][get_TodaysDistrictEvents] No Events to process!" )