def post(self, request, *args, **kwargs): serializer = self.get_serializer(data=request.data) try: serializer.is_valid(raise_exception=True) except TokenError as e: raise InvalidToken(e.args[0]) data = { "display_name": serializer.validated_data["user_display"], "username": serializer.validated_data["user"], "Token": serializer.validated_data["access"], "UserType": serializer.validated_data["user_type"], "active": serializer.validated_data["user_type"], "is_superuser": serializer.validated_data["is_superuser"], } Cache.set(request.data["username"], serializer.validated_data["access"]) return Response(data, status=status.HTTP_200_OK)
def cache_test(): key = request.args.get("key") Cache.set(key,"val") Cache.set("name",key) name = Cache.get("name") Cache.delete("name") Cache.set("age",12) return jsonify({"name":name})
def process_item(item, spider): sql = "insert into api_ip (host, port, proxy_type, anonymity_type, region) values (%s, %s, %s, %s, %s)" try: cursor.execute(sql, [ item.get("host"), item.get("port"), item.get("proxy_type"), item.get("anonymity_type"), item.get("region"), ]) conn.commit() ip_title = "{}:{}".format(item["host"], item["port"]) Cache.set(ip_title) spider.logger.warning("【导入成功】{}".format(ip_title)) except Exception as exc: spider.logger.error("【导入失败】{}".format(exc)) return item
class FileFetchAndCache: ''' Fetch files over HTTP and cache their headers and contents. Return files from the cache if they have not changed. ''' # HTTP header name constants (and cache keys) __if_none_match: ClassVar[str] = 'If-None-Match' __if_modified_since: ClassVar[str] = 'If-Modified-Since' __etag: ClassVar[str] = 'ETag' __last_modified: ClassVar[str] = 'Last-Modified' __file_bytes: ClassVar[str] = 'file_bytes' # cache key __file_hash: ClassVar[str] = 'file_hash' # cache key def __init__(self, verbose: bool = False) -> None: self.verbose = verbose self.cache = Cache() ''' Fetch an URL and cache the contents. Arguments: host: hostname:port URL: URL to fetch starting with / Returns a tuple of: success: bool from_cache: bool file_bytes: bytes ''' def get(self, host: str = None, URL: str = None) -> tuple: success: bool = False from_cache: bool = False file_bytes: bytes = None file_hash: str = None request_headers: dict = {} # First check our cache for the headers from the URL, # if we find them, add headers to our request etag = self.cache.get(URL, FileFetchAndCache.__etag) last_mod = self.cache.get(URL, FileFetchAndCache.__last_modified) if etag is not None and last_mod is not None: request_headers[FileFetchAndCache.__if_none_match] = etag request_headers[FileFetchAndCache.__if_modified_since] = last_mod conn = http.client.HTTPSConnection(host) if self.verbose: conn.set_debuglevel(1) conn.request('GET', URL, headers=request_headers) resp = conn.getresponse() # Get and cache the headers we care about from the response etag = resp.getheader(FileFetchAndCache.__etag) if etag is not None: self.cache.set(URL, FileFetchAndCache.__etag, etag) last_mod = resp.getheader(FileFetchAndCache.__last_modified) if last_mod is not None: self.cache.set(URL, FileFetchAndCache.__last_modified, last_mod) # If we fetched the file the first time, cache it if resp.status == 200: file_bytes = resp.read() self.cache.set(URL, FileFetchAndCache.__file_bytes, file_bytes) file_hash = Hash.md5(file_bytes) self.cache.set(URL, FileFetchAndCache.__file_hash, file_hash) success = True elif resp.status == 304: file_bytes = self.cache.get(URL, FileFetchAndCache.__file_bytes) file_hash = self.cache.get(URL, FileFetchAndCache.__file_hash) success = True from_cache = True conn.close() if self.verbose: print(resp.status, resp.reason) print(self.cache) return success, from_cache, file_bytes