def GET(self): web.header('Access-Control-Allow-Origin', '*') s = settings() s.ReadSettings() if s.RSSProxySerects is None: web.HTTPError('500 Internal Server Error') return 'RSSProxySerects is needed in settings.' if not verifySign(s.RSSProxySerects): web.HTTPError('401 Unauthorized') return '' t = web.input().get("t") if t is None: web.HTTPError('400 Bad Request') return '' headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.212 Safari/537.36'} # noqa: E501 e = web.ctx.env.copy() for k in ['User-Agent', 'Range', 'Accept', 'If-Modified-Since']: km = "HTTP_" + k.upper().replace('-', '_') if km in e: headers[k] = e[km] ref = web.input().get("r") if ref is not None: headers['referer'] = ref ses = Session() ses.headers.update(headers) cookie = web.input().get("c") if cookie is not None: from json import loads ses.cookies.update(loads(cookie)) header = web.input().get("h") if header is not None: from json import loads ses.headers.update(loads(header)) re = ses.get(t, stream=True) if re.status_code != 200: web.HTTPError(f"{re.status_code} {re.reason}") h = re.headers for i in ['cache-control', 'content-length', 'content-type', 'date', 'last-modified', 'content-range', 'age', 'expires', 'keep-alive', 'location', 'server']: if i == 'content-length': if 'content-encoding' in h and h['content-encoding'] != 'identity': # noqa: E501 continue if i in h: web.header(i, h[i]) return self.send(re)
def POST(self): s = settings() s.ReadSettings() if s.notiAPISecrets is not None: if not verifySign(s.notiAPISecrets, True): web.HTTPError('401 Unauthorized') return '' data = web.data() if isinstance(data, bytes): d = data.decode('UTF-8') elif isinstance(data, str): d = data else: web.HTTPError('500 Internal Server Error') return '' j = loads(d) mes = textc() APP_NAME = 'filterbox.field.APP_NAME' PACKAGE_NAME = 'filterbox.field.PACKAGE_NAME' TITLE = 'android.title' TEXT = 'android.text' WHEN = 'filterbox.field.WHEN' if APP_NAME in j: if PACKAGE_NAME in j: pn = j[PACKAGE_NAME] mes.addtotext(f"<b>{escape(j[APP_NAME])} ({escape(pn)})</b>") else: mes.addtotext(f"<b>{escape(j[APP_NAME])}</b>") if TITLE in j: mes.addtotext(f"<b>{escape(j[TITLE])}</b>") if TEXT in j: mes.addtotext(escape(j[TEXT])) mes.addtotext(strftime(ISO8601_FORMAT, gmtime(j[WHEN] / 1000))) while len(mes): sendMessage(s.notiAPITelegraBotChatId, mes.tostr(), s.notiAPITelegramBotAPIKey, 'HTML', True) return ''
def POST(self): if self._method is None: self._method = 'POST' web.header("Content-Type", "application/json; charset=utf-8") try: callback = None if self._method == 'GET': callback = web.input().get("callback") if callback is not None and callback != '': web.header("Content-Type", "application/javascript; charset=utf-8") else: callback = None cors = web.input().get("cors") if cors is not None: web.header('Access-Control-Allow-Origin', '*') s = settings() s.ReadSettings() sg = s.proxyAPISecrets if sg is None: d = dumps( { "code": -500, "msg": "proxyAPISecrets must be set in settings." }, ensure_ascii=False, separators=jsonsep) return d if callback is None else f'{callback}({d})' if not verifySign(sg): d = dumps({ "code": -401, "msg": "Unauthorized" }, ensure_ascii=False, separators=jsonsep) return d if callback is None else f'{callback}({d})' t = web.input().get("t") if t is None: d = dumps({ "code": -1, "msg": "current time(t) is needed." }, ensure_ascii=False, separators=jsonsep) return d if callback is None else f'{callback}({d})' try: t = int(t) except: d = dumps( { "code": -2, "msg": "current time(t) must be a integer." }, ensure_ascii=False, separators=jsonsep) return d if callback is None else f'{callback}({d})' nt = round(time()) if nt > (t + 300) or t < (t - 300): d = dumps( { "code": -3, "msg": "Emm. Seems the current time is not right." }, ensure_ascii=False, separators=jsonsep) return d if callback is None else f'{callback}({d})' act = web.input().get("a") if act is None or act == '': act = web.input().get("action") if act is None or act == '': d = dumps( { "code": -4, "msg": "action type (a/action) is needed." }, ensure_ascii=False, separators=jsonsep) return d if callback is None else f'{callback}({d})' if act != 'deleteAll': d = dumps( { "code": -5, "msg": "action type (a/action) must be 'deleteAll'." }, ensure_ascii=False, separators=jsonsep) return d if callback is None else f'{callback}({d})' db = ProxyDb() r = db.delete_all_proxy() d = dumps({ "code": 0, "result": r }, ensure_ascii=False, separators=jsonsep) return d if callback is None else f'{callback}({d})' except: t = '' try: s = settings() s.ReadSettings() if s.debug: t = format_exc() except: pass return dumps({ "code": -500, "msg": t }, ensure_ascii=False, separators=jsonsep)
def POST(self): web.header("Content-Type", "application/json; charset=utf-8") try: s = settings() s.ReadSettings() sg = s.proxyAPISecrets if sg is None: return dumps( { "code": -500, "msg": "proxyAPISecrets must be set in settings." }, ensure_ascii=False, separators=jsonsep) if not verifySign(sg): return dumps({ "code": -401, "msg": "Unauthorized" }, ensure_ascii=False, separators=jsonsep) t = web.input().get("t") if t is None: return dumps({ "code": -1, "msg": "current time(t) is needed." }, ensure_ascii=False, separators=jsonsep) try: t = int(t) except: return dumps( { "code": -2, "msg": "current time(t) must be a integer." }, ensure_ascii=False, separators=jsonsep) nt = round(time()) if nt > (t + 300) or t < (t - 300): return dumps( { "code": -3, "msg": "Emm. Seems the current time is not right." }, ensure_ascii=False, separators=jsonsep) idd = web.input().get("id") if idd is None: return dumps({ "code": -4, "msg": "id is needed." }, ensure_ascii=False, separators=jsonsep) act = web.input().get("a") if act is None or act == '': act = web.input().get("action") if act is None or act == '': return dumps( { "code": -6, "msg": "action type (a/action) is needed." }, ensure_ascii=False, separators=jsonsep) if act != 'get': return dumps( { "code": -7, "msg": "action type (a/action) must be 'get'." }, ensure_ascii=False, separators=jsonsep) db = ProxyDb() r = db.get_proxy(idd) if r is None: return dumps( { "code": -5, "msg": "Can not find this id in database." }, ensure_ascii=False, separators=jsonsep) j = {"id": idd, "cookies": r[0], "headers": r[1]} return dumps({ "code": 0, "result": j }, ensure_ascii=False, separators=jsonsep) except: t = '' try: s = settings() s.ReadSettings() if s.debug: t = format_exc() except: pass return dumps({ "code": -500, "msg": t }, ensure_ascii=False, separators=jsonsep)
def POST(self): if self._method is None: self._method = 'POST' web.header("Content-Type", "application/json; charset=utf-8") try: callback = None if self._method == 'GET': callback = web.input().get("callback") if callback is not None and callback != '': web.header("Content-Type", "application/javascript; charset=utf-8") else: callback = None cors = web.input().get("cors") if cors is not None: web.header('Access-Control-Allow-Origin', '*') s = settings() s.ReadSettings() sg = s.proxyAPISecrets if sg is None: d = dumps( { "code": -500, "msg": "proxyAPISecrets must be set in settings." }, ensure_ascii=False, separators=jsonsep) return d if callback is None else f'{callback}({d})' if not verifySign(sg): d = dumps({ "code": -401, "msg": "Unauthorized" }, ensure_ascii=False, separators=jsonsep) return d if callback is None else f'{callback}({d})' t = web.input().get("t") if t is None: d = dumps({ "code": -1, "msg": "current time(t) is needed." }, ensure_ascii=False, separators=jsonsep) return d if callback is None else f'{callback}({d})' try: t = int(t) except: d = dumps( { "code": -2, "msg": "current time(t) must be a integer." }, ensure_ascii=False, separators=jsonsep) return d if callback is None else f'{callback}({d})' nt = round(time()) if nt > (t + 300) or t < (t - 300): d = dumps( { "code": -3, "msg": "Emm. Seems the current time is not right." }, ensure_ascii=False, separators=jsonsep) return d if callback is None else f'{callback}({d})' act = web.input().get("a") if act is None or act == '': act = web.input().get("action") if act is None or act == '': d = dumps( { "code": -8, "msg": "action type (a/action) is needed." }, ensure_ascii=False, separators=jsonsep) return d if callback is None else f'{callback}({d})' if act != 'add': d = dumps( { "code": -9, "msg": "action type (a/action) must be 'add'." }, ensure_ascii=False, separators=jsonsep) return d if callback is None else f'{callback}({d})' idd = web.input().get("id") if idd is None: d = dumps({ "code": -4, "msg": "id is needed." }, ensure_ascii=False, separators=jsonsep) return d if callback is None else f'{callback}({d})' cookies = web.input().get('c') if cookies is None or cookies == '': cookies = web.input().get("cookies") headers = web.input().get('h') if headers is None or headers == '': headers = web.input().get("headers") if cookies is None: cookies = '' if headers is None: headers = '' if cookies == '' and headers == '': d = dumps({ "code": -5, "msg": "cookies or headers is needed." }, ensure_ascii=False, separators=jsonsep) return d if callback is None else f'{callback}({d})' if cookies != '': try: loads(cookies) except: tem = loadsWithHeaderSep(cookies) if tem is not None: cookies = dumps(tem, ensure_ascii=False, separators=jsonsep) else: d = dumps( { "code": -6, "msg": "cookies is not a vaild JSON." }, ensure_ascii=False, separators=jsonsep) return d if callback is None else f'{callback}({d})' if headers != '': try: loads(headers) except: tem = loadsWithHeaderSep(headers) if tem is not None: headers = dumps(tem, ensure_ascii=False, separators=jsonsep) else: d = dumps( { "code": -7, "msg": "headers is not a vaild JSON." }, ensure_ascii=False, separators=jsonsep) return d if callback is None else f'{callback}({d})' db = ProxyDb() r = db.add_proxy(idd, cookies, headers) d = dumps({ "code": 0, "result": r }, ensure_ascii=False, separators=jsonsep) return d if callback is None else f'{callback}({d})' except: t = '' try: s = settings() s.ReadSettings() if s.debug: t = format_exc() except: pass return dumps({ "code": -500, "msg": t }, ensure_ascii=False, separators=jsonsep)
def GET(self): try: web.header('Access-Control-Allow-Origin', '*') s = settings() s.ReadSettings() sg = s.proxyAPISecrets if sg is None: web.header('Content-Type', 'text/plain; charset=UTF-8') web.HTTPError('500 Internal Server Error') return "proxyAPISecrets must be set in settings." if not verifySign(sg): web.HTTPError('401 Unauthorized') return '' t = web.input().get('t') if t is None or t == '': t = web.input().get("target") if t is None or t == '': web.header('Content-Type', 'text/plain; charset=UTF-8') web.HTTPError('400 Bad Request') return 'target url (t/target) is needed.' idd = web.input().get("id") if idd == '': idd = None exp = web.input().get("e") if exp is None or exp == '': exp = web.input().get("expired") if exp is not None and exp != '': try: exp = int(exp) except: web.header('Content-Type', 'text/plain; charset=UTF-8') web.HTTPError('400 Bad Request') return 'Expired time should be a integer.' nt = round(time()) if nt > exp: web.header('Content-Type', 'text/plain; charset=UTF-8') web.HTTPError('400 Bad Request') return 'This reverse proxy link is expired.' if idd is not None: db = ProxyDb() r = db.get_proxy(idd) if r is None: web.header('Content-Type', 'text/plain; charset=UTF-8') web.HTTPError('400 Bad Request') return 'Can not find this id in database.' else: r = ('{}', '{}') headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.212 Safari/537.36' } # noqa: E501 if r[1] != '': try: headers.update(loads(r[1])) except: pass e = web.ctx.env.copy() for k in ['User-Agent', 'Range', 'Accept', 'If-Modified-Since']: km = "HTTP_" + k.upper().replace('-', '_') if km in e: headers[k] = e[km] ref = web.input().get("r") if ref is not None: headers['referer'] = ref ses = Session() if r[0] != '': try: ses.cookies.update(loads(r[0])) except: pass ses.headers.update(headers) cookie = web.input().get("c") if cookie is not None: try: ses.cookies.update(loads(cookie)) except: pass header = web.input().get("h") if header is not None: try: ses.headers.update(loads(header)) except: pass re = ses.get(t, stream=True) if re.status_code != 200: web.HTTPError(f"{re.status_code} {re.reason}") h = re.headers for i in [ 'cache-control', 'content-length', 'content-type', 'date', 'last-modified', 'content-range', 'age', 'expires', 'keep-alive', 'location', 'server' ]: if i == 'content-length': if 'content-encoding' in h and h[ 'content-encoding'] != 'identity': # noqa: E501 continue if i in h: web.header(i, h[i]) return self.send(re) except: web.header('Content-Type', 'text/plain; charset=UTF-8') web.HTTPError('500 Internal Server Error') try: s = settings() s.ReadSettings() if s.debug: return format_exc() except: pass return ''
def GET(self): try: web.header('Access-Control-Allow-Origin', '*') s = settings() s.ReadSettings() if s.pixivRSSSecrets and not verifySign(s.pixivRSSSecrets): web.HTTPError('401 Unauthorized') return '' db = PixivDb() p = PixivAPI(s, db) user = web.input().get("u") if user is None or user == '': user = web.input().get("user") typ = web.input().get("t") if typ is None or typ not in ['rss', 'json', 'atom']: typ = web.input().get("type") if typ is None or typ not in ['rss', 'json', 'atom']: typ = 'rss' include_tags = False if web.input().get( "include_tags") is None else True # noqa: E501 user_info = False if web.input().get("user_info") is None else True bookmarks = False if web.input().get("bookmarks") is None else True restrict = True if web.input().get("private") is None else False follow = False if web.input().get("follow") is None else True all = False if web.input().get("all") is None else True if follow: restrict2 = PixivFollowRestrict.PUBLIC if not restrict: restrict2 = PixivFollowRestrict.PRIVATE if all: restrict2 = PixivFollowRestrict.ALL lang = web.input().get("lang") add_author_in_title = True if bookmarks or follow else False add_author_in_title = parseBool( web.input().get("add_author_in_title"), add_author_in_title) # noqa: E501 enable_ugoira = s.pixivEnableUgoira enable_ugoira = parseBool(web.input().get("ugoira"), enable_ugoira) if user is None and not follow: web.HTTPError('400 Bad Request') return 'User is needed.' if follow: d = {} if lang is not None: d['lang'] = lang d['restrict'] = str(restrict2) pld = '' if len(d) == 0 else '?' + urlencode(d) ld = '/follow' + pld re = db.get_cache(ld, s.pixivCacheTime) new_cache = False if re is None: re = p.getFollow(restrict2, lang) if re is None: raise Exception('Can not get follow.') c = db.save_cache(ld, re) new_cache = True else: c = re[1] re = re[0] if typ == 'json': sendCacheInfo(60 * s.pixivCacheTime, c) web.header("Content-Type", "application/json; charset=UTF-8") return dumps(re, ensure_ascii=False, separators=jsonsep) elif typ == 'rss': if s.pixivCacheRSS: d["include_tags"] = include_tags d['add_author_in_title'] = add_author_in_title ld2 = '/follow/rss?' + urlencode(d) r = None if s.pixivCacheRSS and not new_cache: r = db.get_cache(ld2, s.pixivCacheTime) if r is not None: r = r[0] if r is None: from RSSGenerator import RSSGen, RSS2_TYPE from pixivHTMLGen import genRSSItems from pixivrssp import genUrl g = RSSGen(RSS2_TYPE) ill = re['illusts'] g.meta.title = f"Pixiv's timeline ({restrict2})" g.meta.link = 'https://www.pixiv.net/bookmark_new_illust.php' # noqa: E501 g.meta.description = 'Works by users you are following' g.meta.lastBuildDate = c / 1E9 g.meta.ttl = s.pixivCacheTime g.list = genRSSItems(ill, s, RSS2_TYPE, include_tags, add_author_in_title, enable_ugoira) r = g.generate() if s.pixivCacheRSS: db.save_cache(ld2, r) web.header("Content-Type", "application/xml; charset=UTF-8") return r web.HTTPError('400 Bad Request') return 'Type is not supported' elif user is not None: d = {} if lang is not None: d['lang'] = lang pld = '' if len(d) == 0 else '?' + urlencode(d) uld = f'/user/detail/{user}' ld = f'/user/illusts/{user}' + pld u = db.get_cache(uld, s.pixivCacheTime) new_cache = False if u is None: u = p.getUserDetails(user) if u is None: raise Exception('Can not get user info.') c = db.save_cache(uld, u) new_cache = True else: c = u[1] u = u[0] if user_info: if typ == 'json': sendCacheInfo(60 * s.pixivCacheTime, c) web.header("Content-Type", "application/json; charset=UTF-8") return dumps(u, ensure_ascii=False, separators=jsonsep) web.HTTPError('400 Bad Request') return 'Type is not supported' if bookmarks: d["restrict"] = restrict ld3 = f'/user/bookmarks/illusts/{user}?' + urlencode(d) bk = db.get_cache(ld3, s.pixivCacheTime) if bk is None: bk = p.getBookmarks(user, restrict, lang) if bk is None: raise Exception('Can not get bookmarks.') c = db.save_cache(ld3, bk) new_cache = True else: c = bk[1] bk = bk[0] ill = bk else: ill = db.get_cache(ld, s.pixivCacheTime) if ill is None: ill = p.getIllusts(user, lang) if ill is None: raise Exception("Can not get illusts") c = db.save_cache(ld, ill) new_cache = True else: c = ill[1] ill = ill[0] sendCacheInfo(60 * s.pixivCacheTime, c) if typ == 'json': web.header("Content-Type", "application/json; charset=UTF-8") return dumps(ill, ensure_ascii=False, separators=jsonsep) elif typ == 'rss': if s.pixivCacheRSS: d["include_tags"] = include_tags d['add_author_in_title'] = add_author_in_title ld2 = f'/user/illusts/{user}/rss?' + urlencode(d) r = None if s.pixivCacheRSS and not new_cache: r = db.get_cache(ld2, s.pixivCacheTime) if r is not None: r = r[0] if r is None: from RSSGenerator import RSSGen, RSS2_TYPE from pixivHTMLGen import genRSSItems from pixivrssp import genUrl g = RSSGen(RSS2_TYPE) ill = ill['illusts'] if not bookmarks: g.meta.title = f"Pixiv {u['user']['name']}(@{u['user']['account']})'s illusts" # noqa: E501 else: g.meta.title = f"Pixiv {u['user']['name']}(@{u['user']['account']})'s bookmarks" # noqa: E501 if not restrict: g.meta.title += ' (private)' g.meta.link = f"https://www.pixiv.net/users/{user}" g.meta.description = u['user']['comment'] if g.meta.description is None: g.meta.description = "This user don't have a comment." # noqa: E501 img_url = u['profile']['background_image_url'] if img_url is not None: g.meta.image = genUrl(img_url, s.RSSProxySerects) g.meta.lastBuildDate = c / 1E9 g.meta.ttl = s.pixivCacheTime g.list = genRSSItems(ill, s, RSS2_TYPE, include_tags, add_author_in_title, enable_ugoira) r = g.generate() if s.pixivCacheRSS: db.save_cache(ld2, r) web.header("Content-Type", "application/xml; charset=UTF-8") return r except: web.HTTPError('500 Internal Server Error') web.header("Content-Type", "text/plain; charset=UTF-8") try: s = settings() s.ReadSettings() if s.debug: return format_exc() except: pass return ''
def GET(self): try: web.header('Access-Control-Allow-Origin', '*') s = settings() s.ReadSettings() if s.tiktokRSSSecrets and not verifySign(s.tiktokRSSSecrets): web.HTTPError('401 Unauthorized') return '' t = TiktokAPI() user = web.input().get("u") if user is None or user == '': user = web.input().get("user") typ = web.input().get("t") if typ is None or typ not in ['rss', 'json', 'atom', 'url']: typ = web.input().get("type") if typ is None or typ not in ['rss', 'json', 'atom', 'url']: typ = 'rss' videoid = web.input().get("vid") if videoid is None or videoid == '': videoid = web.input().get("videoid") contain_id = web.input().get("cid") if contain_id is None: contain_id = web.input().get("contain_id") contain_id = contain_id is not None db = TiktokDatabase() cacheTime = 15 if videoid is not None: ld = f'/video/{videoid}' vdata = db.get_cache(ld, VIDEO_CACHE_TIME) if vdata is None: vdata = t.get_video(videoid, user) if vdata is None or vdata['statusCode'] != 0: raise Exception("Can not parse video info.") c = db.save_cache(ld, vdata) else: c = vdata[1] vdata = vdata[0] sendCacheInfo(VIDEO_CACHE_TIME * 60, c) if typ == 'json': web.header("Content-Type", "application/json; charset=UTF-8") return dumps(vdata, ensure_ascii=False, separators=jsonsep) elif typ == 'url': vurl = None i = vdata['itemInfo']['itemStruct'] for k in ['downloadAddr', 'playAddr']: if k in i['video']: if i['video'][k] is not None: vurl = i['video'][k] break if vurl is None: raise ValueError('Can not find play url.') if s.RSSProxySerects is None: raise ValueError( 'RSSProxySerects is needed in settings.') from tiktokRSSP import genUrl vurl = genUrl(vurl, s.RSSProxySerects, vdata, "https://www.tiktok.com/") web.HTTPError('302 FOUND') web.header('Location', vurl) return '' raise Exception('Other Type is not supported.') elif user is not None: ld = f'/user/{user}' udata = db.get_cache(ld, cacheTime) new_cache = False if udata is None: udata = t.get_user(user) if udata is None or udata['statusCode'] != 0: raise Exception("Can not parse user info.") c = db.save_cache(ld, udata) new_cache = True else: c = udata[1] udata = udata[0] sendCacheInfo(cacheTime * 60, c) if typ == 'json': web.header("Content-Type", "application/json; charset=UTF-8") return dumps(udata, ensure_ascii=False, separators=jsonsep) elif typ == 'rss': r = None if s.tiktokCacheRSS and not new_cache: d = {"contain_id": str(contain_id)} ldd2 = f"user/{user}/rss?" + urlencode(d) r = db.get_cache(ldd2, cacheTime) if r is not None: r = r[0] if r is None: from RSSGenerator import RSSGen, RSS2_TYPE from tiktokHTMLGen import genItemList g = RSSGen(RSS2_TYPE) u = udata['userInfo']['user'] ti = f"Tiktok {u['nickname']}(@{u['uniqueId']}" ti += f", {u['id']})" if contain_id else ')' g.meta.title = ti g.meta.link = f"https://www.tiktok.com/@{user}" g.meta.description = u['signature'] g.meta.image = u['avatarLarger'] g.meta.lastBuildDate = c / 1E9 g.meta.ttl = cacheTime g.list = genItemList(user, udata, udata['items'], RSS2_TYPE) r = g.generate() if s.tiktokCacheRSS: db.save_cache(ldd2, r) web.header("Content-Type", "application/xml; charset=UTF-8") return r except: web.HTTPError('500 Internal Server Error') try: s = settings() s.ReadSettings() if s.debug: return format_exc() except: pass return ''
def GET(self): try: s = settings() s.ReadSettings() if s.instagramRSSSecrets and not verifySign(s.instagramRSSSecrets): web.HTTPError('401 Unauthorized') return '' db = InstaDatabase() i = InstaAPI(db, s.instagramUsername, s.instagramPassword) user = web.input().get("u") if user is None or user == '': user = web.input().get("user") typ = web.input().get("t") if typ is None or typ not in ['rss', 'json', 'atom']: typ = web.input().get("type") if typ is None or typ not in ['rss', 'json', 'atom']: typ = 'rss' contain_id = web.input().get("cid") if contain_id is None: contain_id = web.input().get("contain_id") contain_id = contain_id is not None tagged = web.input().get("tagged") tagged = True if tagged is not None else False fetch_post = web.input().get("fetch_post") fetch_post = True if fetch_post is not None else False proxy = web.input().get("proxy") proxy = True if proxy is not None else False cacheTime = s.instagramCacheTime if user is not None: idd = f"user/{user}/init" r = db.get_cache(idd, cacheTime) new_cache = False if r is None: i._get_init_csrftoken() r = i.get_user_info(user) c = db.save_cache(idd, r) new_cache = True else: c = r[1] r = r[0] if tagged: idd2 = f"user/{user}/tagged" r2 = None new_cache = False r2 = db.get_cache(idd2, cacheTime) if r2 is not None: c = r2[1] r2 = r2[0] if r2 is None: r2 = i.get_user_tagged(r['id']) c = db.save_cache(idd2, r2) new_cache = True sendCacheInfo(cacheTime * 60, c) if fetch_post: edges = r2['edge_user_to_photos_of_you']['edges'] for e in edges: if e['node']['__typename'] == 'GraphSidecar': shortCode = e['node']['shortcode'] idd4 = f"post/{shortCode}" r4 = db.get_cache(idd4, cacheTime) if r4 is not None: r4 = r4[0] if r4 is None: r4 = i.get_post(shortCode) db.save_cache(idd4, r4) e['node'] = r4 if typ == "json": web.header("Content-Type", "application/json; charset=UTF-8") return dumps(r2, ensure_ascii=False, separators=jsonsep) elif typ == "rss": r3 = None if s.isntagramCacheRSS and not new_cache: d = { "contain_id": str(contain_id), "proxy": str(proxy) } idd3 = f"user/{user}/tagged/rss?" + urlencode(d) r3 = db.get_cache(idd3, cacheTime) if r3 is not None: r3 = r3[0] if r3 is None: from RSSGenerator import RSSGen, RSS2_TYPE from instaHTMLGen import genItemList g = RSSGen(RSS2_TYPE) if not contain_id: ti = f"Instagram Tagged {r['full_name']}(@{r['username']})" # noqa: E501 else: ti = f"Instagram Tagged {r['full_name']}(@{r['username']}, {r['id']})" # noqa: E501 g.meta.title = ti url = f"https://www.instagram.com/{r['username']}/" if 'external_url' in r: te = r['external_url'] if te is not None and isinstance( te, str) and len(te): # noqa: E501 url = te g.meta.link = url g.meta.description = r['biography'] image = r['profile_pic_url_hd'] if proxy: from instaRSSP import genUrl image = genUrl(image, s.RSSProxySerects) g.meta.image = image g.meta.lastBuildDate = c / 1E9 g.meta.ttl = cacheTime g.list = genItemList(r2, RSS2_TYPE, proxy=proxy) r3 = g.generate() if s.isntagramCacheRSS: db.save_cache(idd3, r3) web.header("Content-Type", "application/xml; charset=UTF-8") return r3 return sendCacheInfo(cacheTime * 60, c) if typ == 'json': web.header("Content-Type", "application/json; charset=UTF-8") return dumps(r, ensure_ascii=False, separators=jsonsep) elif typ == "rss": r2 = None if s.isntagramCacheRSS and not new_cache: d = { "contain_id": str(contain_id), "proxy": str(proxy) } idd2 = f"user/{user}/rss?" + urlencode(d) r2 = db.get_cache(idd2, cacheTime) if r2 is not None: r2 = r2[0] if r2 is None: from RSSGenerator import RSSGen, RSS2_TYPE from instaHTMLGen import genItemList g = RSSGen(RSS2_TYPE) if not contain_id: ti = f"Instagram {r['full_name']}(@{r['username']})" # noqa: E501 else: ti = f"Instagram {r['full_name']}(@{r['username']}, {r['id']})" # noqa: E501 g.meta.title = ti url = f"https://www.instagram.com/{r['username']}/" if 'external_url' in r: te = r['external_url'] if te is not None and isinstance( te, str) and len(te): # noqa: E501 url = te g.meta.link = url g.meta.description = r['biography'] image = r['profile_pic_url_hd'] if proxy: from instaRSSP import genUrl image = genUrl(image, s.RSSProxySerects) g.meta.image = image g.meta.lastBuildDate = c / 1E9 g.meta.ttl = cacheTime g.list = genItemList(r, RSS2_TYPE, proxy=proxy) r2 = g.generate() if s.isntagramCacheRSS: db.save_cache(idd2, r2) web.header("Content-Type", "application/xml; charset=UTF-8") return r2 except NeedVerifyError as e: z = [('gourl', web.ctx.path), ('nc', e.sign)] web.HTTPError('302 Found') web.header("Location", "/instaVerify?" + urlencode(z)) return '' except: web.HTTPError('500 Internal Server Error') try: s = settings() s.ReadSettings() if s.debug: return format_exc() except: pass return ''
def POST(self): web.header("Content-Type", "application/json; charset=utf-8") try: s = settings() s.ReadSettings() sg = s.proxyAPISecrets if sg is None: return dumps({"code": -500, "msg": "proxyAPISecrets must be set in settings."}, ensure_ascii=False, separators=jsonsep) if not verifySign(sg): return dumps({"code": -401, "msg": "Unauthorized"}, ensure_ascii=False, separators=jsonsep) t = web.input().get("t") if t is None: return dumps({"code": -1, "msg": "current time(t) is needed."}, ensure_ascii=False, separators=jsonsep) try: t = int(t) except: return dumps({"code": -2, "msg": "current time(t) must be a integer."}, ensure_ascii=False, separators=jsonsep) nt = round(time()) if nt > (t + 300) or t < (t - 300): return dumps({"code": -3, "msg": "Emm. Seems the current time is not right."}, ensure_ascii=False, separators=jsonsep) idd = web.input().get("id") if idd is None: return dumps({"code": -4, "msg": "id is needed."}, ensure_ascii=False, separators=jsonsep) target = web.input().get("target") if target is None or target == '': return dumps({"code": -5, "msg": "target url (targte) is needed."}, ensure_ascii=False, separators=jsonsep) act = web.input().get("a") if act is None or act == '': act = web.input().get("action") if act is None or act == '': return dumps({"code": -7, "msg": "action type (a/action) is needed."}, ensure_ascii=False, separators=jsonsep) if act != 'gen': return dumps({"code": -8, "msg": "action type (a/action) must be 'gen'."}, ensure_ascii=False, separators=jsonsep) exp = web.input().get("e") if exp is None or exp == '': exp = web.input().get("expired") if exp is not None and exp != '': try: exp = int(exp) except: return dumps({"code": -9, "msg": "expired time (e/expired) must be a integer." }, ensure_ascii=False, separators=jsonsep) else: exp = None db = ProxyDb() r = db.get_proxy(idd, True) if not r: return dumps({"code": -6, "msg": "Can not find this id in database."}, ensure_ascii=False, separators=jsonsep) ent = s.proxyEntry entr = urlparse(ent) if entr.netloc == '' and entr.path.startswith('/'): ent = f'{web.ctx.homedomain}{ent}' se = {"t": [target], "id": [idd]} if exp is not None: se["e"] = [str(exp)] t = genSign(s.proxyAPISecrets, se) se.update({"sign": [t]}) for i in se: se[i] = se[i][0] url = f'{ent}?{urlencode(se)}' return dumps({"code": 0, "result": url}, ensure_ascii=False, separators=jsonsep) except: t = '' try: s = settings() s.ReadSettings() if s.debug: t = format_exc() except: pass return dumps({"code": -500, "msg": t}, ensure_ascii=False, separators=jsonsep)
def GET(self): try: s = settings() s.ReadSettings() if s.cfwProfileSecrets and not verifySign(s.cfwProfileSecrets): web.HTTPError('401 Unauthorized') return '' origin = web.input().get("o") if origin is None or origin == '': origin = web.input().get("origin") if origin is None or origin == '': web.HTTPError('400 Bad Request') return '' headers = {"User-Agent": "ClashforWindows/0.13.8"} if 'HTTP_USER_AGENT' in web.ctx.env: ua: str = web.ctx.env['HTTP_USER_AGENT'] if ua.lower().startswith("clash"): headers.update({'User-Agent': ua}) r = get(origin, headers=headers) if r.status_code >= 400: web.HTTPError('400 Bad Request') return f'status = {r.status_code}\n{r.text}' ori = readFile(r.text) if isinstance(ori, str): web.HTTPError('200 Not Supported Type') return ori pro = web.input().get("p") if pro is None or pro == '': pro = web.input().get("profile") if pro is None or pro == '': return r.text prod = readFile(pro, True) d = {} default_proxy = web.input().get("dp") if default_proxy is None or default_proxy == '': default_proxy = web.input().get("default_proxy") if default_proxy is not None and default_proxy != '': d['default_proxy'] = default_proxy remove_rule_providers = web.input().get("rrp") if remove_rule_providers is None: remove_rule_providers = web.input().get( "remove_rule_providers") # noqa: E501 if remove_rule_providers is not None: d['remove_rule_providers'] = True force_enable_udp = web.input().get("feu") if force_enable_udp is None: force_enable_udp = web.input().get("force_enable_udp") if force_enable_udp is not None: d['force_enable_udp'] = True direct_proxy = web.input().get("direct") if direct_proxy is None or direct_proxy == '': direct_proxy = web.input().get("direct_proxy") if direct_proxy is not None and direct_proxy != '': d['direct_proxy'] = direct_proxy cfws = CfwFileSettings(**d) if cfws.remove_rule_providers: removeRuleProviders(ori, headers) removeRuleProviders(prod, headers) addProfileToTarget(prod, ori, cfws) t = dump(ori, Dumper=CSafeDumper, allow_unicode=True) web.header('Content-Type', 'text/yaml; charset=utf-8') return t except: web.HTTPError('500 Internal Server Error') try: s = settings() s.ReadSettings() if s.debug: return format_exc() except: pass return ''