def download(self, date, type): params = json.dumps({ "ChatType": type, "MsgTime": date }) (md5, url) = self.get_download_urls(params) resp = requests.get(url) filename = './data/%s.gz' % md5 file_flag = FileCache(filename, 'b') if not file_flag.exist(): file_flag.set(resp.content) with gzip.open(filename, 'rb') as fp: cnt = -1 for line in fp: line = line.strip().rstrip(b',') if line == b']}': break if cnt < 0: info = json.loads(line + b']}') print('info: ', info) else: msg = json.loads(line) self.raw_print(msg) cnt += 1
def __init__(self, server, cache_dir, quick=False): self.server = server self.cache = FileCache(cache_dir) self.quick = quick self.lxmlCache = {} # use session to reuse the connection self.requests = requests.Session()
def __init__(self, req_queue, res_queues, cache_size): self.req_queue = req_queue self.res_queues = res_queues self.handlers = { "get": self._get_handler, "post": self._post_handler, "put": self._put_handler, "delete": self._del_handler } self.fm = FileManager(STORE_DIR) self.cache = FileCache(cache_size) super(RequestExec, self).__init__()
def test_graph_creation(): login, passwd = get_login_password() from vkwrapper import Vk from graphs import Graph v = Vk(cache=FileCache(), login=login, password=passwd) g = Graph(v, "238696131")
def test_cache_flush(): c = FileCache() c.add("123", ["1", "2"]) c.add("326", ["3", "2"]) c.flush() assert c.contains("123") == False assert c.contains("326") == False
def test_graph_get_community_labels(): login, passwd = get_login_password() from vkwrapper import Vk from graphs import Graph v = Vk(cache=FileCache(), login=login, password=passwd) g = Graph(v, "238696131") g.get_community_labels()
def test_graph_duplicating_edges(): login, passwd = get_login_password() from vkwrapper import Vk from graphs import Graph v = Vk(cache=FileCache(), login=login, password=passwd) g = Graph(v, "238696131") assert g.g.is_simple() == True
def test__graph_get_community_labels_sparse_graph(): login, passwd = get_login_password() from vkwrapper import Vk from graphs import Graph v = Vk(cache=FileCache(), login=login, password=passwd) g = Graph(v, "148907612") g.get_community_labels()
def test_vk_auth(): login, passwd = get_login_password() from vkwrapper import Vk v = Vk(cache=FileCache(), login=login, password=passwd) l1 = v.get_friends("525008285") l2 = v.get_friends("288999853") # just type checks assert isinstance(l1, list) and isinstance(l2, list) and isinstance( l1[0], str) and isinstance(l2[-1], str)
class Api(object): ogniter_mapping = { 'uni117.ogame.de': 136 } highscore_type_to_name = ["Total", "Economy", "Research", "Military", "Military Lost", "Military Built", "Military Destr.", "Honor"] def __init__(self, server, cache_dir, quick=False): self.server = server self.cache = FileCache(cache_dir) self.quick = quick self.lxmlCache = {} # use session to reuse the connection self.requests = requests.Session() def _doApiRequest(self, type, append=""): type_to_update_intervall = { 'playerData': timedelta(days=7), 'alliances': timedelta(days=1), 'players': timedelta(days=1), 'highscore': timedelta(hours=1), } api_data = self.cache.lookup(self.server+"_"+type+append) need_download = False if not api_data: logger.info("Need download because %s is not cached") need_download = True else: try: # exception when response is "player not found" timestamp = int(textextract(api_data, 'timestamp="', '"')) except: timestamp = os.path.getmtime(self.cache.get_path(self.server+"_"+type+append)) timestamp = datetime.fromtimestamp(timestamp) if timestamp + type_to_update_intervall[type] < datetime.now(): logger.info("Need download because %s is more than 12h old" % (self.server+"_"+type)) need_download = True if need_download: r = self.requests.get('http://'+self.server+'/api/'+type+'.xml'+append) self.cache.write(self.server+"_"+type+append, r.text) api_data = r.text return (need_download, type, append), api_data # call this if you want to cache many requests - works only for playerData # append must be an array def _doApiRequestAsync(self, type, appendList=[]): # also the newest grequests doesn't work with gevent >= 1.x - but don't know how to check this here try: import grequests except: return type_to_update_intervall = { 'playerData': timedelta(days=7), } urlList = [] for append in appendList: api_data = self.cache.lookup(self.server+"_"+type+append) need_download = False if not api_data: logger.info("Need download because %s is not cached") need_download = True else: try: # exception when response is "player not found" timestamp = int(textextract(api_data, 'timestamp="', '"')) except: timestamp = int(time.time()) timestamp = datetime.fromtimestamp(timestamp) if timestamp + type_to_update_intervall[type] < datetime.now(): logger.info("Need download because %s is more than 12h old" % (self.server+"_"+type)) need_download = True if need_download: urlList.append('http://'+self.server+'/api/'+type+'.xml'+append) if len(urlList): rs = (grequests.get(u, session=self.requests, timeout=3) for u in urlList) responses = grequests.map(rs) for r in responses: if r: append = textextract(r.url, type+".xml", "") self.cache.write(self.server+"_"+type+append, r.text) def _getLxmlRoot(self, apiRequestData): need_download = apiRequestData[0][0] type = apiRequestData[0][1] append = apiRequestData[0][2] data = apiRequestData[1] if type == "highscore" or type == "players" or type == "alliances": if not need_download: try: return self.lxmlCache[type+append] except: pass else: data = bytes(bytearray(data, encoding="utf-8")) self.lxmlCache[type+append] = etree.fromstring(data) return self.lxmlCache[type+append] # lxml seems to have a problem with unicode-strings so do this strange conversion data = bytes(bytearray(data, encoding="utf-8")) return etree.fromstring(data) def _findMatch(self, elements, attr, name): name = name.lower() all_names = [] for el in elements: all_names.append((el, Levenshtein.ratio(el.get(attr).decode("utf-8").lower(), name))) return sorted(all_names, key=lambda x: x[1], reverse = True) def getPlayerInfo(self, id=False, name=False, addPositionInfo=True, addStatusInfo=True): sim = 1.0 if not id: root = self._getLxmlRoot(self._doApiRequest("players")) el, sim = self._findMatch(root.findall(".//player"), "name", name)[0] if sim == 0.0: return (False, "No match") id = int(el.get("id")) data, playerData = self._doApiRequest("playerData", "?id=%d"%id) if playerData == "Player not found.": return (False, "Player not found.") root = self._getLxmlRoot((data, playerData)) player_info = {} dataEl = root player_info["name"] = dataEl.get("name") player_info["sim"] = sim player_info["id"] = int(dataEl.get("id")) player_info["serverId"] = dataEl.get("serverId") player_info["timestamp"] = dataEl.get("timestamp") if addPositionInfo: position = {} # position info is outdated - highscore.xml gets updated every hour - so better use this #for posEl in root.findall(".//position"): # if posEl.text is None: # return (False, "This player has no highscore - either he is gamemaster, or banned") # posType = int(posEl.get("type")) # position[posType] = { # "position":int(posEl.text), # "score":int(posEl.get("score")), # } # if posType == 3: # if posEl.get("ships"): # position[posType]["ships"] = int(posEl.get("ships")) # else: # position[posType]["ships"] = 0 for posType in (0, 1, 2, 3, 4, 5, 6, 7): highscoreRoot = self._getLxmlRoot(self._doApiRequest("highscore", "?category=1&type="+str(posType))) posEl = highscoreRoot.find(".//player[@id='%d']" % id) if posEl is None: return (False, "This player has no highscore - either he is gamemaster, or banned") position[posType] = { "position":int(posEl.get("position")), "score":int(posEl.get("score")), } if posType == 3: if posEl.get("ships"): position[posType]["ships"] = int(posEl.get("ships")) else: position[posType]["ships"] = 0 player_info["position"] = position planets = [] for planEl in root.findall(".//planet"): moonName = "" moonSize = 0 moon = planEl.find(".//moon") if moon is not None: moonName = moon.get("name") moonSize = int(moon.get("size")) planets.append((planEl.get("coords"), planEl.get("name"), planEl.get("id"), moonName, moonSize)) player_info["planets"] = planets ally = root.findall(".//alliance") if len(ally) == 0: player_info["ally"] = False player_info["allianceId"] = 0 else: ally = ally[0] tag = ally.find(".//tag").text name = ally.find(".//name").text player_info["ally"] = { "id": int(ally.get("id")), "tag": tag, "name": name, } player_info["allianceId"] = int(ally.get("id")) # to get the playerstatus we have to retrieve the players.xml :/ if addStatusInfo: playersRoot = self._getLxmlRoot(self._doApiRequest("players")) playerEl = playersRoot.find("player[@id='%d']" % id) player_info["status"] = playerEl.get("status") if not player_info["status"]: player_info["status"] = "" return (True, player_info) def getAllianceInfo(self, id=False, tag=False): sim = 1.0 if not id: root = self._getLxmlRoot(self._doApiRequest("alliances")) el, sim = self._findMatch(root.findall(".//alliance"), "tag", tag)[0] if sim == 0.0: return (False, "No match") id = int(el.get("id")) else: root = self._getLxmlRoot(self._doApiRequest("alliances")) el = root.find(".//alliance[@id='%d']" % id) alliance_info = { "name": el.get("name"), "tag": el.get("tag"), "sim": sim, "id": int(el.get("id")), "homepage": el.get("homepage"), "logo": el.get("logo"), "open": bool(el.get("open")), "serverId": root.get("serverId"), "timestamp": root.get("timestamp"), } players = [] for playerEl in el.findall(".//player"): players.append(int(playerEl.get("id"))) alliance_info["players"] = players return (True, alliance_info) def listPlayers(self): root = self._getLxmlRoot(self._doApiRequest("players")) allEls = root.findall(".//player") ret = [] for el in allEls: status = el.get("status") if status is None: status = "" ret.append({ "id":int(el.get("id")), "status":status, }) return (int(root.get("timestamp")), ret) def listHighscore(self, posType): root = self._getLxmlRoot(self._doApiRequest("highscore", "?category=1&type="+str(posType))) allPlayer = root.findall(".//player") ret = {} for posEl in allPlayer: position = { "position":int(posEl.get("position")), "score":int(posEl.get("score")), } if posType == 3: if posEl.get("ships"): position["ships"] = int(posEl.get("ships")) else: position["ships"] = 0 ret[int(posEl.get("id"))] = position return (int(root.get("timestamp")), ret) def listAlliances(self): root = self._getLxmlRoot(self._doApiRequest("alliances")) allEls = root.findall(".//alliance") ret = [] for el in allEls: ret.append({ "name": el.get("name"), "tag": el.get("tag"), "id": int(el.get("id")), "homepage": el.get("homepage"), "logo": el.get("logo"), "open": bool(el.get("open")), }) return (int(root.get("timestamp")), ret) def findPlayer(self, name, find, justMatch=False): retStr = [] root = self._getLxmlRoot(self._doApiRequest("players")) matches = self._findMatch(root.findall(".//player"), "name", name.strip()) for i in range(0, find): el, sim = matches[i] if justMatch: return int(el.get("id")), el.get("name"), sim retStr.append("%s - %.2f" % (el.get("name"), sim)) if not self.quick or i%2 == 1: retStr.append("\n") else: retStr.append(" ") return retStr def findAlliance(self, tag, find): retStr = [] root = self._getLxmlRoot(self._doApiRequest("alliances")) matches = self._findMatch(root.findall(".//alliance"), "tag", tag.strip()) for i in range(0, find): el, sim = matches[i] retStr.append("%s - %.2f" % (el.get("tag"), sim)) if not self.quick or i%2 == 1: retStr.append("\n") else: retStr.append(" ") return retStr def getPlayerString(self, name): retStr = [] (ret, player_info) = self.getPlayerInfo(name=name.strip()) if not ret: retStr.append(player_info) return retStr if player_info["sim"] != 1.0: retStr.append("%s - similarity:%.2f\n" % (player_info["name"], player_info["sim"])) position = player_info["position"][0] if player_info["status"]: retStr.append("%s, " % player_info["status"]) retStr.append("%04d/%d " % (position["position"], position["score"])) if self.server in self.ogniter_mapping: retStr.append("http://www.ogniter.org/de/%d/player/%d" % (self.ogniter_mapping[self.server], int(player_info["id"]))) retStr.append("\n") if not self.quick: if player_info["status"]: retStr.append("Status: %s\n" % player_info["status"]) t = PrettyTable(["Type", "Position", "Score", "Type2", "Position2", "Score2"]) t.align["Type"] = "l" t.align["Position"] = "r" t.align["Score"] = "r" t.align["Type2"] = "l" t.align["Position2"] = "r" t.align["Score2"] = "r" for type in range(0,len(self.highscore_type_to_name),2): t.add_row([self.highscore_type_to_name[type], player_info["position"][type]["position"], player_info["position"][type]["score"], self.highscore_type_to_name[type+1], player_info["position"][type+1]["position"], player_info["position"][type+1]["score"]]) t.add_row(["ships", "", player_info["position"][3]["ships"], # defense is (economy+research+military)-total "defense", "", (player_info["position"][1]["score"]+player_info["position"][2]["score"]+player_info["position"][3]["score"]) - player_info["position"][0]["score"]]) t.set_style(11) t_str = t.get_string(border=False,header=False, padding_width=1).split("\n") new_t_str = [] for line in t_str: new_t_str.append(line[1:]) retStr.append("\n".join(new_t_str)+"\n") t = PrettyTable(["Coord", "M", "Name"]) t.align["Coord"] = "l" t.align["Moon"] = "l" t.align["Name"] = "l" for planet in player_info["planets"]: moonInfo = "" if planet[4]: moonInfo = "M%d" % planet[4] t.add_row([planet[0], moonInfo, planet[1]]) t.set_style(11) t_str = t.get_string(border=False, header=False, padding_width=1) # make the table horizontal wider tableRows = t_str.split("\n") if len(tableRows) > 1: # take lower half and append it to the upper half half = len(tableRows)/2 for i in range(0, half): tableRows[i] += tableRows[i+1] del(tableRows[i+1]) t_str = "\n".join(tableRows) t_str = t_str.split("\n") new_t_str = [] for line in t_str: new_t_str.append(line[1:]) retStr.append("\n".join(new_t_str)+"\n") if not player_info["ally"]: retStr.append("No ally") else: retStr.append("%s - %s" % (player_info["ally"]["tag"], player_info["ally"]["name"])) return retStr def getAllianceString(self, tag): retStr = [] (ret, alliance_info) = self.getAllianceInfo(tag=tag.strip()) if not ret: retStr.append(alliance_info) return retStr if alliance_info["sim"] != 1.0: retStr.append("%s - similarity:%.2f\n" % (alliance_info["tag"], alliance_info["sim"])) for i in ["name", "homepage", "logo", "open"]: if alliance_info[i]: retStr.append("%s: %s " % (i, alliance_info[i])) if not self.quick: retStr.append("\n") if self.server in self.ogniter_mapping: retStr.append("http://www.ogniter.org/de/%d/alliance/%d " % (self.ogniter_mapping[self.server], alliance_info["id"])) if not self.quick: retStr.append("\n") if self.quick: retStr.append("players: %d\n" % (len(alliance_info["players"]))) players = [] for playerId in alliance_info["players"]: (ret, player_info) = self.getPlayerInfo(id=playerId) if ret: players.append((player_info["name"], player_info["position"][0]["position"], player_info)) players = sorted(players, key=lambda x: x[1]) i = 0 if self.quick: for player in players: i+=1 retStr.append("%s %s %d %s " % (str(i), player[0], player[1], player[2]["planets"][0][0])) if i%2==0: retStr.append("\n") if i == 4: break else: for player in players: i+=1 retStr.append("%s %s %d %s\n" % (str(i).ljust(2), player[0], player[1], player[2]["planets"][0][0])) return retStr
import logging import pickle import swagger_client import time from datetime import datetime from ratelimit import limits, sleep_and_retry from swagger_client.rest import ApiException from pprint import pprint import logger from api_key import API_KEY from cache import FileCache from counter import Counter LOGGER = logger.create_logger(__name__, level=logging.DEBUG) CACHE = FileCache(cache_root="cache", logger=LOGGER) REQUEST_COUNTER = Counter(counter_path="request-counter.txt", logger=LOGGER) API_CLIENT_CONFIGURATION = swagger_client.Configuration() API_CLIENT_CONFIGURATION.api_key = {'api_key': API_KEY} API_CLIENT = swagger_client.ApiClient(configuration=API_CLIENT_CONFIGURATION) MATCHES_API = swagger_client.MatchesApi(api_client=API_CLIENT) PLAYERS_API = swagger_client.PlayersApi(api_client=API_CLIENT) def counted_request(request, *args, **kwargs): try: response = request(*args, **kwargs) REQUEST_COUNTER.increment() return response except ApiException as e: # 5XX responses do not increment request count
class Api(object): ogniter_mapping = {'uni117.ogame.de': 136} highscore_type_to_name = [ "Total", "Economy", "Research", "Military", "Military Lost", "Military Built", "Military Destr.", "Honor" ] def __init__(self, server, cache_dir, quick=False): self.server = server self.cache = FileCache(cache_dir) self.quick = quick self.lxmlCache = {} # use session to reuse the connection self.requests = requests.Session() def _doApiRequest(self, type, append=""): type_to_update_intervall = { 'playerData': timedelta(days=7), 'alliances': timedelta(days=1), 'players': timedelta(days=1), 'highscore': timedelta(hours=1), } api_data = self.cache.lookup(self.server + "_" + type + append) need_download = False if not api_data: logger.info("Need download because %s is not cached") need_download = True else: try: # exception when response is "player not found" timestamp = int(textextract(api_data, 'timestamp="', '"')) except: timestamp = os.path.getmtime( self.cache.get_path(self.server + "_" + type + append)) timestamp = datetime.fromtimestamp(timestamp) if timestamp + type_to_update_intervall[type] < datetime.now(): logger.info("Need download because %s is more than 12h old" % (self.server + "_" + type)) need_download = True if need_download: r = self.requests.get('http://' + self.server + '/api/' + type + '.xml' + append) self.cache.write(self.server + "_" + type + append, r.text) api_data = r.text return (need_download, type, append), api_data # call this if you want to cache many requests - works only for playerData # append must be an array def _doApiRequestAsync(self, type, appendList=[]): # also the newest grequests doesn't work with gevent >= 1.x - but don't know how to check this here try: import grequests except: return type_to_update_intervall = { 'playerData': timedelta(days=7), } urlList = [] for append in appendList: api_data = self.cache.lookup(self.server + "_" + type + append) need_download = False if not api_data: logger.info("Need download because %s is not cached") need_download = True else: try: # exception when response is "player not found" timestamp = int(textextract(api_data, 'timestamp="', '"')) except: timestamp = int(time.time()) timestamp = datetime.fromtimestamp(timestamp) if timestamp + type_to_update_intervall[type] < datetime.now(): logger.info( "Need download because %s is more than 12h old" % (self.server + "_" + type)) need_download = True if need_download: urlList.append('http://' + self.server + '/api/' + type + '.xml' + append) if len(urlList): rs = (grequests.get(u, session=self.requests, timeout=3) for u in urlList) responses = grequests.map(rs) for r in responses: if r: append = textextract(r.url, type + ".xml", "") self.cache.write(self.server + "_" + type + append, r.text) def _getLxmlRoot(self, apiRequestData): need_download = apiRequestData[0][0] type = apiRequestData[0][1] append = apiRequestData[0][2] data = apiRequestData[1] if type == "highscore" or type == "players" or type == "alliances": if not need_download: try: return self.lxmlCache[type + append] except: pass else: data = bytes(bytearray(data, encoding="utf-8")) self.lxmlCache[type + append] = etree.fromstring(data) return self.lxmlCache[type + append] # lxml seems to have a problem with unicode-strings so do this strange conversion data = bytes(bytearray(data, encoding="utf-8")) return etree.fromstring(data) def _findMatch(self, elements, attr, name): name = name.lower() all_names = [] for el in elements: all_names.append( (el, Levenshtein.ratio(el.get(attr).decode("utf-8").lower(), name))) return sorted(all_names, key=lambda x: x[1], reverse=True) def getPlayerInfo(self, id=False, name=False, addPositionInfo=True, addStatusInfo=True): sim = 1.0 if not id: root = self._getLxmlRoot(self._doApiRequest("players")) el, sim = self._findMatch(root.findall(".//player"), "name", name)[0] if sim == 0.0: return (False, "No match") id = int(el.get("id")) data, playerData = self._doApiRequest("playerData", "?id=%d" % id) if playerData == "Player not found.": return (False, "Player not found.") root = self._getLxmlRoot((data, playerData)) player_info = {} dataEl = root player_info["name"] = dataEl.get("name") player_info["sim"] = sim player_info["id"] = int(dataEl.get("id")) player_info["serverId"] = dataEl.get("serverId") player_info["timestamp"] = dataEl.get("timestamp") if addPositionInfo: position = {} # position info is outdated - highscore.xml gets updated every hour - so better use this #for posEl in root.findall(".//position"): # if posEl.text is None: # return (False, "This player has no highscore - either he is gamemaster, or banned") # posType = int(posEl.get("type")) # position[posType] = { # "position":int(posEl.text), # "score":int(posEl.get("score")), # } # if posType == 3: # if posEl.get("ships"): # position[posType]["ships"] = int(posEl.get("ships")) # else: # position[posType]["ships"] = 0 for posType in (0, 1, 2, 3, 4, 5, 6, 7): highscoreRoot = self._getLxmlRoot( self._doApiRequest("highscore", "?category=1&type=" + str(posType))) posEl = highscoreRoot.find(".//player[@id='%d']" % id) if posEl is None: return ( False, "This player has no highscore - either he is gamemaster, or banned" ) position[posType] = { "position": int(posEl.get("position")), "score": int(posEl.get("score")), } if posType == 3: if posEl.get("ships"): position[posType]["ships"] = int(posEl.get("ships")) else: position[posType]["ships"] = 0 player_info["position"] = position planets = [] for planEl in root.findall(".//planet"): moonName = "" moonSize = 0 moon = planEl.find(".//moon") if moon is not None: moonName = moon.get("name") moonSize = int(moon.get("size")) planets.append((planEl.get("coords"), planEl.get("name"), planEl.get("id"), moonName, moonSize)) player_info["planets"] = planets ally = root.findall(".//alliance") if len(ally) == 0: player_info["ally"] = False player_info["allianceId"] = 0 else: ally = ally[0] tag = ally.find(".//tag").text name = ally.find(".//name").text player_info["ally"] = { "id": int(ally.get("id")), "tag": tag, "name": name, } player_info["allianceId"] = int(ally.get("id")) # to get the playerstatus we have to retrieve the players.xml :/ if addStatusInfo: playersRoot = self._getLxmlRoot(self._doApiRequest("players")) playerEl = playersRoot.find("player[@id='%d']" % id) player_info["status"] = playerEl.get("status") if not player_info["status"]: player_info["status"] = "" return (True, player_info) def getAllianceInfo(self, id=False, tag=False): sim = 1.0 if not id: root = self._getLxmlRoot(self._doApiRequest("alliances")) el, sim = self._findMatch(root.findall(".//alliance"), "tag", tag)[0] if sim == 0.0: return (False, "No match") id = int(el.get("id")) else: root = self._getLxmlRoot(self._doApiRequest("alliances")) el = root.find(".//alliance[@id='%d']" % id) alliance_info = { "name": el.get("name"), "tag": el.get("tag"), "sim": sim, "id": int(el.get("id")), "homepage": el.get("homepage"), "logo": el.get("logo"), "open": bool(el.get("open")), "serverId": root.get("serverId"), "timestamp": root.get("timestamp"), } players = [] for playerEl in el.findall(".//player"): players.append(int(playerEl.get("id"))) alliance_info["players"] = players return (True, alliance_info) def listPlayers(self): root = self._getLxmlRoot(self._doApiRequest("players")) allEls = root.findall(".//player") ret = [] for el in allEls: status = el.get("status") if status is None: status = "" ret.append({ "id": int(el.get("id")), "status": status, }) return (int(root.get("timestamp")), ret) def listHighscore(self, posType): root = self._getLxmlRoot( self._doApiRequest("highscore", "?category=1&type=" + str(posType))) allPlayer = root.findall(".//player") ret = {} for posEl in allPlayer: position = { "position": int(posEl.get("position")), "score": int(posEl.get("score")), } if posType == 3: if posEl.get("ships"): position["ships"] = int(posEl.get("ships")) else: position["ships"] = 0 ret[int(posEl.get("id"))] = position return (int(root.get("timestamp")), ret) def listAlliances(self): root = self._getLxmlRoot(self._doApiRequest("alliances")) allEls = root.findall(".//alliance") ret = [] for el in allEls: ret.append({ "name": el.get("name"), "tag": el.get("tag"), "id": int(el.get("id")), "homepage": el.get("homepage"), "logo": el.get("logo"), "open": bool(el.get("open")), }) return (int(root.get("timestamp")), ret) def findPlayer(self, name, find, justMatch=False): retStr = [] root = self._getLxmlRoot(self._doApiRequest("players")) matches = self._findMatch(root.findall(".//player"), "name", name.strip()) for i in range(0, find): el, sim = matches[i] if justMatch: return int(el.get("id")), el.get("name"), sim retStr.append("%s - %.2f" % (el.get("name"), sim)) if not self.quick or i % 2 == 1: retStr.append("\n") else: retStr.append(" ") return retStr def findAlliance(self, tag, find): retStr = [] root = self._getLxmlRoot(self._doApiRequest("alliances")) matches = self._findMatch(root.findall(".//alliance"), "tag", tag.strip()) for i in range(0, find): el, sim = matches[i] retStr.append("%s - %.2f" % (el.get("tag"), sim)) if not self.quick or i % 2 == 1: retStr.append("\n") else: retStr.append(" ") return retStr def getPlayerString(self, name): retStr = [] (ret, player_info) = self.getPlayerInfo(name=name.strip()) if not ret: retStr.append(player_info) return retStr if player_info["sim"] != 1.0: retStr.append("%s - similarity:%.2f\n" % (player_info["name"], player_info["sim"])) position = player_info["position"][0] if player_info["status"]: retStr.append("%s, " % player_info["status"]) retStr.append("%04d/%d " % (position["position"], position["score"])) if self.server in self.ogniter_mapping: retStr.append( "http://www.ogniter.org/de/%d/player/%d" % (self.ogniter_mapping[self.server], int(player_info["id"]))) retStr.append("\n") if not self.quick: if player_info["status"]: retStr.append("Status: %s\n" % player_info["status"]) t = PrettyTable( ["Type", "Position", "Score", "Type2", "Position2", "Score2"]) t.align["Type"] = "l" t.align["Position"] = "r" t.align["Score"] = "r" t.align["Type2"] = "l" t.align["Position2"] = "r" t.align["Score2"] = "r" for type in range(0, len(self.highscore_type_to_name), 2): t.add_row([ self.highscore_type_to_name[type], player_info["position"][type]["position"], player_info["position"][type]["score"], self.highscore_type_to_name[type + 1], player_info["position"][type + 1]["position"], player_info["position"][type + 1]["score"] ]) t.add_row([ "ships", "", player_info["position"][3]["ships"], # defense is (economy+research+military)-total "defense", "", (player_info["position"][1]["score"] + player_info["position"][2]["score"] + player_info["position"][3]["score"]) - player_info["position"][0]["score"] ]) t.set_style(11) t_str = t.get_string(border=False, header=False, padding_width=1).split("\n") new_t_str = [] for line in t_str: new_t_str.append(line[1:]) retStr.append("\n".join(new_t_str) + "\n") t = PrettyTable(["Coord", "M", "Name"]) t.align["Coord"] = "l" t.align["Moon"] = "l" t.align["Name"] = "l" for planet in player_info["planets"]: moonInfo = "" if planet[4]: moonInfo = "M%d" % planet[4] t.add_row([planet[0], moonInfo, planet[1]]) t.set_style(11) t_str = t.get_string(border=False, header=False, padding_width=1) # make the table horizontal wider tableRows = t_str.split("\n") if len(tableRows) > 1: # take lower half and append it to the upper half half = len(tableRows) / 2 for i in range(0, half): tableRows[i] += tableRows[i + 1] del (tableRows[i + 1]) t_str = "\n".join(tableRows) t_str = t_str.split("\n") new_t_str = [] for line in t_str: new_t_str.append(line[1:]) retStr.append("\n".join(new_t_str) + "\n") if not player_info["ally"]: retStr.append("No ally") else: retStr.append( "%s - %s" % (player_info["ally"]["tag"], player_info["ally"]["name"])) return retStr def getAllianceString(self, tag): retStr = [] (ret, alliance_info) = self.getAllianceInfo(tag=tag.strip()) if not ret: retStr.append(alliance_info) return retStr if alliance_info["sim"] != 1.0: retStr.append("%s - similarity:%.2f\n" % (alliance_info["tag"], alliance_info["sim"])) for i in ["name", "homepage", "logo", "open"]: if alliance_info[i]: retStr.append("%s: %s " % (i, alliance_info[i])) if not self.quick: retStr.append("\n") if self.server in self.ogniter_mapping: retStr.append( "http://www.ogniter.org/de/%d/alliance/%d " % (self.ogniter_mapping[self.server], alliance_info["id"])) if not self.quick: retStr.append("\n") if self.quick: retStr.append("players: %d\n" % (len(alliance_info["players"]))) players = [] for playerId in alliance_info["players"]: (ret, player_info) = self.getPlayerInfo(id=playerId) if ret: players.append( (player_info["name"], player_info["position"][0]["position"], player_info)) players = sorted(players, key=lambda x: x[1]) i = 0 if self.quick: for player in players: i += 1 retStr.append( "%s %s %d %s " % (str(i), player[0], player[1], player[2]["planets"][0][0])) if i % 2 == 0: retStr.append("\n") if i == 4: break else: for player in players: i += 1 retStr.append("%s %s %d %s\n" % (str(i).ljust(2), player[0], player[1], player[2]["planets"][0][0])) return retStr
sys.path.append('./python/') from gen_png import makePNG from cache import FileCache my_port = 5050 if len(sys.argv) > 1: my_port = sys.argv[1] static_file_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'public') app = Flask(__name__, static_url_path='', template_folder='public') app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0 cache = FileCache(16) @app.route('/public/<path:path>', methods=['GET']) def serve_file_in_dir(path): if not os.path.isfile(os.path.join(static_file_dir, path)): path = os.path.join(path, 'index.html') return send_from_directory(static_file_dir, path) @app.route('/api/img/') def gen_image(): arg = request.args.get('filename') if arg in cache: png_bytes = cache.get(arg)
def test_cache_get(): c = FileCache() c.add("123", ["1", "2", "3"]) assert c.get("123") == ["1", "2", "3"] c.delete("123") assert c.contains("123") == False
# -*-- coding:utf-8 -*-- import random import TLSSigAPI from cache import FileCache sdk_app_id = 1 identifier = '' random = random.randint(10000, 99999) content_type = 'json' get_history_url = 'https://console.tim.qq.com/v4/open_msg_svc/get_history' private_key_path = 'D:\\work\\party-api\\vendor\\party\\party-config\\env\\stable\\qcloud\\private_key' public_key_path = 'D:\\work\\party-api\\vendor\\party\\party-config\\env\\stable\\qcloud\\public_key' cache_handle = FileCache('./signature') if cache_handle.exist(): user_sig = cache_handle.get() else: with open(private_key_path, 'r') as f: private_key = f.read() f.close() with open(public_key_path, 'r') as f: public_key = f.read() f.close() api = TLSSigAPI.TLSSigAPI(sdk_app_id, private_key, public_key) user_sig = api.gen_sig(identifier) cache_handle.set(user_sig)
def test__cache_contains_in_new_cache(): c1 = FileCache() c1.add("123", []) assert c1.contains("123") == True c2 = FileCache() assert c2.contains("123") == True c1.flush() c2.flush()
def test_cache_contains_2(): c = FileCache() c.add("123", []) assert c.contains("123") == True c.delete("123") assert c.contains("123") == False
def test_cache_contains_1(): c = FileCache() assert c.contains("123") == False
def test_cache_smoke(): c = FileCache()
class RequestExec(Process): def __init__(self, req_queue, res_queues, cache_size): self.req_queue = req_queue self.res_queues = res_queues self.handlers = { "get": self._get_handler, "post": self._post_handler, "put": self._put_handler, "delete": self._del_handler } self.fm = FileManager(STORE_DIR) self.cache = FileCache(cache_size) super(RequestExec, self).__init__() def _store_item(self, uid, body, in_disc): # create a new entry in the cache with # the 'in_disc' flag response, status = self.cache.put(uid, body, in_disc) # if the cache is full or size == 0 if (status == res.CACHE_FULL_STATUS or status == res.CACHE_ZERO_SIZE_STATUS): self.fm.post(response["uid"], response["data"]) def _get_handler(self, header, body): uid = header['uid'] data, status = self.cache.get(uid) if (status == res.NOT_FOUND_STATUS): data, status = self.fm.get(uid) if (status == res.NOT_FOUND_STATUS): return data, status # the 'in_disc' flag is set to 1 because the # item was obtained from disc response, status = self.cache.put(uid, data, 1) # cache is full, have to back up # the LRU item in disc if (status == res.CACHE_FULL_STATUS): response, status = self.fm.post(response["uid"], response["data"]) # but if the cache is zero size the item is # already in disc if (status == res.CACHE_ZERO_SIZE_STATUS): status = res.OK_STATUS return data, status def _post_handler(self, header, body): uid = header['uid'] # store the new item with the flag # 'in_disc' turn off because the item # can not be backed up yet (it's new) self._store_item(uid, body, 0) return res.build_successful({'id': uid}) def _put_handler(self, header, body): uid = header['uid'] response, status = self.cache.update(uid, body) # cache is zero size, then directly # store the new data, it could return an # error message if there were no such file if (status == res.CACHE_ZERO_SIZE_STATUS): return self.fm.put(uid, body) # the item was not in the cache if (status == res.NOT_FOUND_STATUS): if not self.fm.check(uid): return res.build_not_found_error() # store the update of the item with # the flag 'in_disc' turn on because # there is a copy of it in disc self._store_item(uid, body, 1) return res.build_successful({}) def _del_handler(self, header, body): uid = header['uid'] response, status = self.cache.delete(uid) # if the entry it's backed up in disc # or if the entry was not there, # have to check in FileManager if (status == res.IN_DISC_STATUS or status == res.NOT_FOUND_STATUS): response, status = self.fm.delete(uid) return response, status def run(self): quit = False while not quit: # Remove request from queue # (req_header, req_body, pid, address) req = self.req_queue.get() if (req == None): quit = True continue header = req[0] body = req[1] pid = req[2] address = req[3] handler = self.handlers.get(header['method'].lower()) res_body, res_status = handler(header, body) self.res_queues[pid].put((header, res_body, res_status, address))