def index_handler(page="index"): page = page.lower() data = utils.jsonapi(params, methods) #try: # Get HDD data for chart data['hdduse'] = utils.percentage(data['system.getDiskFreeSpace'], data['system.getDiskSize']) data['hddfree'] = 100-data['hdduse'] # Get RAM data for chart data['usedram'] = utils.percentage(data['system.getJavaMemoryUsage'], data['system.getJavaMemoryTotal']) data['unusedram'] = 100-data['usedram'] # Get RAM data for chart data['online'] = utils.percentage(data['getPlayerCount'], data['getPlayerLimit']) data['offline'] = 100-data['online'] # Try to load skins.db if not os.path.isfile('skins.db'): # Never made the DB before... skindb = {} else: with open('skins.db','r') as f: skindb = json.loads(f.read()) full_players = list(set(data['getPlayerNames'] + data['getOfflinePlayerNames'])) for player in data['getPlayerNames']: if player not in skindb: # Never generated before... utils.gen_skin(player,16) skindb[player] = time.time() print 'Generating skin for %s' % player else: # Assume their name is in the DB.. check times! diff = int(time.time()) - int(skindb[player]) if diff > 43200: utils.gen_skin(player,16) skindb[player] = time.time() with open('skins.db', 'w') as f: f.write(json.dumps(skindb,indent=4)) # Here, we pull news posts from the forums. This is derp but it'll work... posts, uri = [], 'http://forum.wonderfulplanet.net/index.php?forums/news/index.rss' forum = xmltodict.parse(urllib2.urlopen(uri).read())['rss']['channel'] for item in forum['item']: content = h().unescape(item['content:encoded']) if '...<br />' in content: content = content.split('...<br />',1)[0] content += ' [<a href="%s">Read more</a>]' % h().unescape(item['link']) posts.append({ 'title': h().unescape(item['title']), 'author': h().unescape(item['author']), 'date': h().unescape(item['pubDate']), 'link': h().unescape(item['link']), 'content': content }) #except: # return flask.abort(404) page += '.html' if os.path.isfile('templates/' + page): return flask.render_template(page, data=data, posts=posts) return flask.abort(404)
def __goforit(self): if self.__cb: self.__cb({'pr': 10, 'str': 'Session'}) r = self.__s.post(self.__URL_LOGIN, timeout=self.__t, headers=self.__UA) if r.status_code == requests.codes.ok: if self.__cb: self.__cb({'pr': 20, 'str': 'Session start'}) self.__log_in['key'] = r.headers['challenge'] self.__log_in['session'] = r.headers['ssbulsatapi'] self.__s.headers.update({'SSBULSATAPI': self.__log_in['session']}) _text = self.__log_in['pw'] + (self.__BLOCK_SIZE - len(self.__log_in['pw']) % self.__BLOCK_SIZE) * '\0' enc = EN.AESModeOfOperationECB(self.__log_in['key']) self.__p_data['pass'][1] = base64.b64encode(enc.encrypt(_text)) self.__log_dat(self.__log_in) self.__log_dat(self.__p_data) if self.__cb: self.__cb({'pr': 30, 'str': 'Login start'}) r = self.__s.post(self.__URL_LOGIN, timeout=self.__t, headers=self.__UA, files=self.__p_data) self.__log_dat(r.request.headers) self.__log_dat(r.request.body) if r.status_code == requests.codes.ok: data = r.json() if data['Logged'] == 'true': self.__log_dat('Login ok') if self.__cb: self.__cb({'pr': 50, 'str': 'Login ok'}) self.__s.headers.update({'Access-Control-Request-Method': 'POST'}) self.__s.headers.update({'Access-Control-Request-Headers': 'ssbulsatapi'}) r = self.__s.options(self.__URL_LIST, timeout=self.__t, headers=self.__UA) self.__log_dat(r.request.headers) self.__log_dat(r.headers) self.__log_dat(str(r.status_code)) if self.__cb: self.__cb({'pr': 70, 'str': 'Fetch data'}) r = self.__s.post(self.__URL_LIST, timeout=self.__t, headers=self.__UA) self.__log_dat(r.request.headers) self.__log_dat(r.headers) if r.status_code == requests.codes.ok: self.__char_set = r.headers['content-type'].split('charset=')[1] self.__log_dat('get data ok') self.__tv_list = r.json() self.__js = {} self.__log_dat(self.__js) if self.__cb: self.__cb({'pr': 90, 'str': 'Fetch data done'}) if self.__gen_epg: for i, ch in enumerate(self.__tv_list): if self.__cb: self.__cb( { 'pr': int((i * 100) / len(self.__tv_list)), 'str': 'Fetch: %s' % ch['epg_name'].encode('utf-8'), 'idx': i, 'max': len(self.__tv_list) } ) if ch.has_key('program'): r = self.__s.post(self.__URL_EPG, timeout=self.__t, headers=self.__UA, data={ #'epg': 'nownext', 'epg': '1week', #'epg': '1day', 'channel': ch['epg_name'] } ) if r.status_code == requests.codes.ok: ch['program'] = r.json().items()[0][1]['programme'] from HTMLParser import HTMLParser as h self.__js = json.loads(h().unescape(json.dumps(self.__js).decode(self.__char_set))) self.__log_out() if r.status_code != requests.codes.ok: self.__log_dat('Error status code: %d' % (r.status_code, )) raise Exception("FetchFail") else: raise Exception("LoginFail")
def __goforit(self): if self.__cb: self.__cb({"pr": 10, "str": "Session"}) r = self.__s.post(self.__URL_LOGIN, timeout=self.__t, headers=self.__UA) if r.status_code == requests.codes.ok: if self.__cb: self.__cb({"pr": 20, "str": "Session start"}) self.__log_in["key"] = r.headers["challenge"] self.__log_in["session"] = r.headers["ssbulsatapi"] self.__s.headers.update({"SSBULSATAPI": self.__log_in["session"]}) _text = self.__log_in["pw"] + (self.__BLOCK_SIZE - len(self.__log_in["pw"]) % self.__BLOCK_SIZE) * "\0" enc = EN.AESModeOfOperationECB(self.__log_in["key"]) self.__p_data["pass"][1] = base64.b64encode(enc.encrypt(_text)) self.__log_dat(self.__log_in) self.__log_dat(self.__p_data) if self.__cb: self.__cb({"pr": 30, "str": "Login start"}) r = self.__s.post(self.__URL_LOGIN, timeout=self.__t, headers=self.__UA, files=self.__p_data) self.__log_dat(r.request.headers) self.__log_dat(r.request.body) if r.status_code == requests.codes.ok: data = r.json() if data["Logged"] == "true": self.__log_dat("Login ok") if self.__cb: self.__cb({"pr": 50, "str": "Login ok"}) self.__s.headers.update({"Access-Control-Request-Method": "POST"}) self.__s.headers.update({"Access-Control-Request-Headers": "ssbulsatapi"}) r = self.__s.options(self.__URL_LIST, timeout=self.__t, headers=self.__UA) self.__log_dat(r.request.headers) self.__log_dat(r.headers) self.__log_dat(str(r.status_code)) if self.__cb: self.__cb({"pr": 70, "str": "Fetch data"}) r = self.__s.post(self.__URL_LIST, timeout=self.__t, headers=self.__UA) self.__log_dat(r.request.headers) self.__log_dat(r.headers) if r.status_code == requests.codes.ok: self.__char_set = r.headers["content-type"].split("charset=")[1] self.__log_dat("get data ok") self.__tv_list = r.json() self.__js = {} self.__log_dat(self.__js) if self.__DEBUG_EN is True: with open(os.path.join(self.__path, "ch_dump"), "wb") as df: df.write(json.dumps(self.__tv_list)) if self.__cb: self.__cb({"pr": 90, "str": "Fetch data done"}) if self.__gen_epg or self.__gen_jd: for i, ch in enumerate(self.__tv_list): if self.__cb: self.__cb( { "pr": int((i * 100) / len(self.__tv_list)), "str": "Fetch: %s" % ch["epg_name"].encode("utf-8"), "idx": i, "max": len(self.__tv_list), } ) if ch.has_key("program"): r = self.__s.post( self.__URL_EPG, timeout=self.__t, headers=self.__UA, data={ #'epg': 'nownext', "epg": "1week", #'epg': '1day', "channel": ch["epg_name"], }, ) if r.status_code == requests.codes.ok: ch["program"] = r.json().items()[0][1]["programme"] from HTMLParser import HTMLParser as h self.__js = json.loads(h().unescape(json.dumps(self.__js).decode(self.__char_set))) self.__log_out() if r.status_code != requests.codes.ok: self.__log_dat("Error status code: %d" % (r.status_code,)) raise Exception("FetchFail") else: raise Exception("LoginFail")