def test_remove_expired_responses__no_expiration(self, remove_expired_responses): requests_cache.install_cache() requests_cache.remove_expired_responses() # Before https://github.com/reclosedev/requests-cache/pull/177, this # was False, but with per-request caching, remove_expired_responses must # always be called assert remove_expired_responses.called is True
def getfile(project_name, file_path): fmt = "%a, %d %b %Y %H:%M:%S %Z" api_base = 'https://gitlab.com/api/v4' pid = quote_plus(project_name) fid = quote_plus(file_path) url = f'{api_base}/projects/{pid}/repository/files/{fid}?ref=master' resp = requests.get(url) data = resp.json() cached_last_commit_id = data['last_commit_id'] print(','.join( [project_name.split('/')[-1], file_path, cached_last_commit_id[:8]])) if resp.from_cache: old_date = datetime.datetime.strptime(resp.headers.get('Date'), fmt) resp = requests.head(url) head = resp.headers server_last_commit_id = head.get('x-gitlab-last-commit-id') new_date = datetime.datetime.strptime(head.get('Date'), fmt) delta = new_date - old_date if cached_last_commit_id != server_last_commit_id: print(f"Response was cached on {old_date}") print( f"Last Commit ID differs between cache ({cached_last_commit_id[:8]}) and server ({server_last_commit_id[:8]})." ) register_url(url, 0) # Mark as "expired". requests_cache.remove_expired_responses() resp = requests.get(url, expire_after='default') data = resp.json() return data
def get(self, filter): # remove expired responses requests_cache.remove_expired_responses() repos_info = get_repos_info() if filter == 'prs': result = sorted(repos_info, key=lambda x: x['open_pull_requests_count'], reverse=True) return {'response': {'repositories': result}} elif filter == 'commits': result = sorted(repos_info, key=lambda x: x['commits_count'], reverse=True) return {'response': {'repositories': result}} elif filter == 'contribs': result = sorted(repos_info, key=lambda x: x['contributors_count'], reverse=True) return {'response': {'repositories': result}} return {'fault': 'Invalid filter.'}
def test_remove_expired(self): response = requests.get(self.url) self.assertFalse(response.from_cache) response = requests.get(self.url) self.assertTrue(response.from_cache) second_url = HTTPBIN_URL + 'anything' response = requests.get(second_url, expire_after=2) self.assertFalse(response.from_cache) response = requests.get(second_url) self.assertTrue(response.from_cache) third_url = HTTPBIN_URL response = requests.get(third_url, expire_after=10) self.assertFalse(response.from_cache) response = requests.get(third_url) self.assertTrue(response.from_cache) self.assertEqual(len(requests.Session().cache.responses), 3) time.sleep(2) requests_cache.remove_expired_responses() self.assertEqual(len(requests.Session().cache.responses), 2)
def GetPage(url): headers = { "User-Agent": "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html" } requests_cache.install_cache("./data/cached-results", expire_after=3600) requests_cache.remove_expired_responses() page = requests.get(url, headers) return html.fromstring(page.content)
def __init__(self, cachePath): self.cachePath = cachePath #cache expires after: 3600 = 1hour requests_cache.install_cache(os.path.join(cachePath,'requests.cache'), backend='sqlite', expire_after=3600*8 ) requests_cache.remove_expired_responses() self.empty_srt = compat_str('{}/{}.da.srt').format(self.cachePath, tr(30508)) with open(self.empty_srt, 'w') as fn: fn.write('1\n00:00:00,000 --> 00:01:01,000\n') # we have to have something in srt to make kodi use it # self.conn = mysql.connector.connect( host="192.168.1.8", user="******", password="******", database="drtv", port=3306 ) self.conn = MySQLdb.connect( host="192.168.1.8", user="******", password="******", database="drtv", port=3306 ) self.cursor = self.conn.cursor() self.cursor.execute("SELECT * FROM lastUpdate;") reply = self.cursor.fetchall()[0][0] self.dbCurrent = True if reply == datetime.date.today().strftime("%Y-%m-%d") else False
def get(self, lang): # remove expired responses requests_cache.remove_expired_responses() try: repos = get_popular_repos(lang) if len(repos[0]) == 1: raise ValueError() return {'response': {'popular_repositories': repos}} except ValueError: # some error occured message = get_popular_repos(lang) return {'response': {'message': message}}
def get(self, lang): # remove expired responses requests_cache.remove_expired_responses() try: repos = get_popular_repos(lang) if len(repos[0]) == 1: raise ValueError() return {'response': {'popular_repositories': repos}} except ValueError: # language doesn't exists in GitHub's database or query invalid message = get_popular_repos(lang) return {'response': {'message': message}}
def get(self, repo_html_url): # remove expired responses requests_cache.remove_expired_responses() try: repo_full_name, top_contribs = get_repo_top_contribs(repo_html_url) return { 'response': { 'full_name': repo_full_name, 'top_contributors': top_contribs } } except ValueError: # some error occured message = get_repo_top_contribs(repo_html_url) return {'response': {'message': message}}
def __init__(self, py_jwt_exception_class, cache_enabled, cache_lifetime, cache_store, cache_store_connection): self.py_jwt_exception = py_jwt_exception_class self.cache_enabled = cache_enabled if cache_lifetime > 30 or cache_lifetime < 1: raise self.py_jwt_exception("cache-lifetime") self.cache_lifetime = timedelta(days=cache_lifetime) self.cache_store = cache_store if cache_enabled: try: requests_cache.install_cache(expire_after=self.cache_lifetime, backend=self.cache_store, connection=cache_store_connection) requests_cache.remove_expired_responses() except ValueError: raise self.py_jwt_exception("cache-store")
def clear_expired(): requests_cache.remove_expired_responses() return settings.CACHE_NAME
def test_remove_expired_responses__cache_not_installed(self, remove_expired_responses): requests_cache.remove_expired_responses() assert remove_expired_responses.called is False
def test_remove_expired_responses(self, remove_expired_responses): requests_cache.install_cache(expire_after=360) requests_cache.remove_expired_responses() assert remove_expired_responses.called is True
def _get(self, URL): requests_cache.install_cache(self._cache, backend='sqlite', expire_after=self._time_out) requests_cache.remove_expired_responses() return requests.get(URL)
format_other_recordings_list = '<div id=\"othervideos\">\n<div class=\"othersubheader\">Other recordings</div>\n<ul class=\"inlinelist\">{0}\n</ul>\n</div>' format_other_recordings_list_no_embeddable = '<ul class=\"inlinelist\">{0}\n</ul>' format_other_recordings_list_item = '<li class=\"inlinelistitem\"><a class=\"outbound\" href=\"{0}\">{1} ({2})</a></li>\n' format_slides_div = '<div id=\"slides\">\n<div class=\"subheader\">Slides</div>\n' format_other_slides_list = '<div id=\"otherslides\">\n<div class=\"othersubheader\">Other Versions</div>\n<ul class=\"inlinelist\">{0}\n</ul>\n</div>' format_other_slides_list_item = '<li class=\"inlinelistitem\"><a class=\"outbound\" href=\"{0}\">{1} ({2})</a></li>\n' format_reactions_div = '<div id=\"reactions\">\n<div class=\"subheader\">Reactions</div>\n<ul>{0}</ul></div>' format_talk_page_title = '{0}: Talks: Kevin Goldsmith' #duplicated. meh. format_close_div = '</div>\n' talk_type_keynote = 'keynote' #requests cache requests_cache.install_cache(expire_after=timedelta(days=1)) requests_cache.remove_expired_responses() def get_embed_code_from_videoURL(video_url): #https://youtu.be/_67NPdn6ygY #https://www.youtube.com/watch?v=7U3cO3h8Pao #https://vimeo.com/102774091 #<iframe width="560" height="315" src="https://www.youtube.com/embed/_67NPdn6ygY?rel=0" frameborder="0" allowfullscreen></iframe> #https://developer.vimeo.com/apis/oembed #https://www.turingfest.com/2019/speakers/kevin-goldsmith?wvideo=46th18adn3 parsed = urllib.parse.urlparse(video_url) youtube_id = '' if parsed.netloc == 'youtu.be': split = os.path.split(parsed.path) youtube_id = split[1] elif parsed.netloc == 'www.youtube.com':
import logging import os import requests import requests_cache import re # Install Cache for requests: from panel_app.constants import API_KEY_HERE os.makedirs('./cache', exist_ok=True) requests_cache.install_cache('./cache/tomtom_cache', backend='sqlite', expire_after=60 * 60 * 23 * 7) # Cache for ~ 1 wk. requests_cache.remove_expired_responses() # Clean-up expired responses. log = logging.getLogger(__name__) log.setLevel(logging.DEBUG) LIMIT_SEARCH_IN_PARAM = "circle:51.0447,-114.0719;r=80000" PROXIMITY_TO_YYC = "51.0447,-114.0719,80000" AUTOSUGGEST_URL = "https://autosuggest.search.hereapi.com/v1/autosuggest" AUTOCOMPLETE_URL = "https://autocomplete.geocoder.ls.hereapi.com/6.2/suggest.json" def clear_string(x): regex = r"th\s" subst = " " # You can manually specify the number of replacements by changing the 4th argument result = re.sub(regex, subst, x, 0, re.MULTILINE) log.info(f"Cleaning up query : {x} to {result}")
def test_remove_expired_responses__no_expiration(remove_expired_responses, installed_session): requests_cache.remove_expired_responses() assert remove_expired_responses.called is True