def loadURLFast(url=None, cache_dir=CACHE_DIR, aged=AGED, timeout=TIMEOUT): req_dig = request_digest(url) req_dig_key = cache_dir + req_dig req_dig_md_key = file_storage.metadata(req_dig_key) aged_in_cache = False in_cache = False now = datetime.datetime.now() try: md = file_storage.getItem(req_dig_md_key) in_cache = True if aged is not None and \ now - md["datetime"] > datetime.timedelta(seconds=aged): aged_in_cache = True except: pass if in_cache and (config.offline_mode or not aged_in_cache): try: try: headers = dict(file_storage.getItem(req_dig_md_key)["headers"]) except: headers = {'content-type': "application/octet-stream"} return headers, file_storage.getItem(req_dig_key, mode="file") except: file_storage.delItem(req_dig_md_key) in_cache = False headers, fh = doLoadURLFast(url, cache_dir=cache_dir, aged=aged, timeout=timeout) try: headers = dict(file_storage.getItem(req_dig_md_key)["headers"]) except: headers = {'content-type': "application/octet-stream"} return headers, fh
def downloadPreview(file_key, max_x=100, max_y=100): if Image is None: return try: metadata = file_storage.getItem(file_storage.metadata(file_key)) input_file = file_storage.getItem(file_key, mode="file") output_file = tempfile.TemporaryFile() ratio = 1 angle = 0 ct = metadata['Content-Type'] pre_ct = "image/jpeg" if ct.startswith("image/"): try: _preview(input_file, output_file, max_x, max_y, ratio, angle) except: pre_ct, output_file = preview_icon(ct) else: pre_ct, output_file = preview_icon(ct) last_modified = metadata["Last-Modified"] except: pre_ct, output_file = preview_icon('broken') last_modified = rfc822.formatdate(time.time()) #!!!? output_file.seek(0L, 2) preview_length = output_file.tell() output_file.seek(0L) fc = output_file.read() web.header('Content-Type', pre_ct) web.header('Content-Length', str(preview_length)) web.header('Last-Modified', last_modified) return fc
def download_captcha(file_key): if not goodkey_re.match(file_key): return metadata = file_storage.getItem(file_storage.metadata(file_key)) web.header('Content-Type', metadata["Content-Type"]) web.header('Content-Length', str(metadata["Content-Length"])) web.header('Last-Modified', metadata["Last-Modified"]) return file_storage.getItem(file_key, mode="str")
def downloadMediaFile(file_key): try: metadata = file_storage.getItem(file_storage.metadata(file_key)) web.header('Content-Type', metadata["Content-Type"]) web.header('Content-Length', str(metadata["Content-Length"])) web.header('Last-Modified', metadata["Last-Modified"]) return file_storage.getItem(file_key, mode="str") except KeyError: return downloadPreview(file_key) #??? !!!
def loadURL_2(url, cache_dir=CACHE_DIR, aged=AGED, timeout=TIMEOUT): CMD = READ_FEED_COMMAND + " %s %s %s" % (cache_dir, aged, timeout) child_stdin = os.popen(CMD, "w") child_stdin.write(url) child_stdin.close() req_dig_key = cache_dir + request_digest(url) return file_storage.getItem(req_dig_key, mode="file")
def doLoadURLFast(url, cache_dir=CACHE_DIR, aged=AGED, timeout=TIMEOUT): headers, fh = _doRead(url) req_dig_key = cache_dir + request_digest(url) req_dig_md_key = file_storage.metadata(req_dig_key) file_storage.setItem(req_dig_key, fh) file_storage.setItem(req_dig_md_key, {"datetime": datetime.datetime.now(), "headers": headers}) return headers, file_storage.getItem(req_dig_key, mode="file")
def get_session(): """Get existing session (may return None) """ session_id = get_session_id() if session_id is not None: session_key = SESSION_DIR + session_id try: return file_storage.getItem(session_key, mode="pickle") except: return None else: return None
def loadParsedURL(url): req_dig_key = CACHE_DIR + request_digest(url) req_dig_md_key = file_storage.metadata(req_dig_key) try: md = file_storage.getItem(req_dig_md_key) if datetime.datetime.now() - md["datetime"] \ <= datetime.timedelta(seconds=AGED): return md["parsed"] except StandardError: # misc errors pass return None
def needToReloadURL(url, cache_dir=CACHE_DIR, aged=AGED): req_dig_key = cache_dir + request_digest(url) req_dig_md_key = file_storage.metadata(req_dig_key) try: md = file_storage.getItem(req_dig_md_key) if datetime.datetime.now() - md["datetime"] \ <= datetime.timedelta(seconds=aged): return False except StandardError: # misc errors pass return True
def loadURL(url, cache_dir=CACHE_DIR, aged=AGED, timeout=TIMEOUT): #!!! new feature cached_result = tryCachedURL(url, cache_dir=CACHE_DIR, aged=aged) if cached_result is not None: return cached_result CMD = READ_FEED_COMMAND + " %s %s %s" % (cache_dir, aged, timeout) child_stdin = os.popen(CMD, "w") child_stdin.write(url) child_stdin.close() req_dig_key = cache_dir + request_digest(url) return file_storage.getItem(req_dig_key, mode="file")
def check_captcha(file_key, user_word): if not goodkey_re.match(file_key): return "Invalid key format" try: metadata = file_storage.getItem(file_storage.metadata(file_key)) except: return "No such thing" try: if time.time() - metadata["added"] > 180: # captcha is too old return "Too old" if user_word.strip() == metadata["secret"]: return "" finally: drop_captcha(file_key) return "Failed"
def getHeaders(url, cache_dir=CACHE_DIR): req_dig_key = cache_dir + request_digest(url) req_dig_md_key = file_storage.metadata(req_dig_key) md = file_storage.getItem(req_dig_md_key) return md["headers"]
def tryCachedURL(url, cache_dir=CACHE_DIR, aged=AGED): if not needToReloadURL(url, cache_dir, aged): req_dig_key = cache_dir + request_digest(url) return file_storage.getItem(req_dig_key, mode="file") return None