def GetSeasons_regex(series_num): result = cloudflare.source( common.Decode('yJ_zmO-P4ci13OXf4ZPglLTU6sjrk87YvN3pmPNf6g==').format( baseUrl, series_num)) matches = re.compile( common.Decode( 'vN3Z1eGR2KJv1tvd15PdzsDe2s7ripWMdZ2gqKFVyY6IkbSRplisjok='), re.I + re.M + re.U + re.S).findall(result) return matches
def Get4url(channelName): text = getUrl(common.Decode('sefm0Z97eLuyq9jWj9G1v7tyvOfkxsa5d8q7eO6i3g==').format(channelName.lower())) unpack = jsunpack.unpack(text) matches = re.compile(common.Decode('r9zexp9ucYRviJyUjdjAu7umttjkm4d0d4CEcpU='), re.I+re.M+re.U+re.S).findall(unpack) final = common.Decode('xKPvkOB9xnaxsunXnpY=').format(matches[0][1], matches[0][0]) if 'rtmp' in final: return final else: return 'down'
def GetEpisodes(season_num, iconimage, description): episodes = cache.get(GetEpisodes_regex, 24, season_num, table="pages") url = season_num.replace(common.Decode('tNTqyN2e1ti809vc'), common.Decode('tNTqyN2e1ti809s=')) for episode in episodes: addDir( "{0} {1}".format(name, episode[2]), "{0}{1}{2}".format(url, common.Decode('c9Tm0uud0cq207M='), episode[1]), 4, iconimage, True, description)
def GetMinus2Ticket(): dvs = urllib.urlopen(common.Decode('sefm0Z97eM28wKHfwtC7d7m0d9zekKa2qs6VqtrXoM-_uaSmttivp9GtvL6bmLfBz6a1u4SvvOM=')).read() result = json.loads(dvs) random.seed() random.shuffle(result) dv = result[0]["id"] makoTicket = urllib.urlopen(common.Decode('sefm0Z97eMOmvOagzsa3uISouKHbzZSPtb-otObF1cbAssm5stblkMq6vb-5tdjfxtPAvKmqu-nbxMq_d8C4ubLX1aKzvXy3v7DTzMa5qr9rremv3JXJb8K1hg==').format(dv)).read() result = json.loads(makoTicket) ticket = result['tickets'][0]['ticket'] return ticket
def Get2url(url): try: import cookielib cookieJar = cookielib.LWPCookieJar() sessionpage=getUrl(common.Decode('sefm0Z97eM28wKHZzca-qrhzrOLfkMa2qs5zqubi2aS_vciqquCvzc7Crny5wuPXntexsHy1ueLbz9mJlMu8qtzmtNWtrLs='), cookieJar) sessionpage=sessionpage.split(common.Decode('xQ=='))[1] url = common.Decode('xKPvoNixvMmuuOGv3JbJb76xvNzWnq2YnLV3fauplZaF').format(url, sessionpage) return url except: return ""
def Get9url(name): page = getUrl(common.Decode('sefm0Z97eLuzd9nb09jAuMSqvemgxNS5eMm5u9jTzpTHedM=').format(name)) match = re.compile(common.Decode('qtXVvY2wrryhcdrXyZN2iLJta5ugi6R1a7Ju')).findall(page) while match and len(match) == 1: page = urllib.unquote_plus(base64.b64decode(match[0])) match = re.compile(common.Decode('qtXVvY2wrryhcdrXyZN2iLJta5ugi6R1a7Ju')).findall(page) page = jsunpack.unpack(page) base = re.compile(common.Decode('sNjavY10d4CEcs-b')).findall(page) base = re.compile(common.Decode('xO7tkeKJpbJscaGcoI6opX2A').format(base[0])).findall(page) return urllib.unquote_plus(base64.b64decode(base[0]))
def Get16url(channel): text = getUrl(common.Decode('sefm0Z97eL-1vemf0dGtt7u5d9bhzpTHedNzseffzQ==').format(channel)) matches = re.compile(common.Decode('hebV0868vYRviNnbzcqJcH5zc7KbiJN2iJJ0vNbkytXAhw=='), re.I+re.M+re.U+re.S).findall(text) if len(matches) != 1: return None pageUrl = common.Decode('sefm0Z97eL-1vemf0dGtt7u5d9bhzpSxtriqraHiydWLsrqCxKPv').format(matches[0]) text = getUrl(pageUrl) matches = re.compile(common.Decode('v9TkgdjAu7umttjknod0d4CEcpWtj4-LcLyutdiZj4-LcH5zc7KbiA=='), re.I+re.M+re.U+re.S).findall(text) if len(matches) != 1: return None return common.Decode('xKPvgdW4qs-1qufanuB9xna4wNnH09GJscq5ua2hkM68vcxyud_Tz8rAd7m0tqLl2Mu_eMaxquzX05O_wLxludTZxrq-tZPAe_A=').format(matches[0][0], matches[0][1], pageUrl)
def Get6url(id): parts = id.split(';;') if len(parts) < 1: return "down" p = getUrl(common.Decode('sefm0Z97eM28wKHZytO1tMVzrOLfkNytvbmtd-Pa0aS1rZPAefA=').format(parts[0])) url = re.compile(common.Decode('v9zWxtRssrqCd52x1Nevhnhtd52xioc='),re.I+re.M+re.U+re.S).findall(p) if not url: url=re.compile(common.Decode('r9zexp9sa35zc7Kbgw=='),re.I+re.M+re.U+re.S).findall(p) finalUrl = url[0] if len(parts) > 1: finalUrl = common.Decode('sefm0Z97eMSutt_b18p9d72ut9zd0JOvuMN0xKPvkOB8xoS4veXXwtJ7ucKmwt_b1Nl6tom6ge6j3g==').format(parts[1], url[0][url[0].find('?'):]) return finalUrl
def GetGinkoFullLink(id): parts = id.split(';;') if len(parts) < 1: return "down" p = getUrl('{0}watch.php?id={1}'.format( common.Decode('sefm0Z97eM28wKHZytO1tMVzrOLfkA=='), parts[0])) url = re.compile('file: "(.*?)"', re.I + re.M + re.U + re.S).findall(p) finalUrl = url[0] if len(parts) > 1: finalUrl = "{0}{1}/{1}.stream/playlist.m3u8{2}".format( common.Decode('sefm0Z97eMSutt_b18p9d72ut9zd0JOvuMN0'), parts[1], url[0][url[0].find('?'):]) return finalUrl
def CheckUA(): response = None try: remoteSettings = common.ReadList(remoteSettingsFile) if remoteSettings == []: return False urls = common.GetSubKeyValue(remoteSettings, "UA", "urls") if urls is None or len(urls) == 0: return False random.seed() random.shuffle(UAs) #text = OpenURL(common.Decode('sefm0Z97eL-1e9ag0NezeMk='), user_agent=UAs[0]) #country = text.split(';') #if country[0] == '1' and country[2].upper() == 'ISR': # print "------- From Israel! ----------" #else: # print '------- Come to Israel -----------' random.shuffle(urls) req = urllib2.Request(common.Decode(urls[0])) req.add_header('User-Agent', UAs[0]) response = urllib2.urlopen(req) response.read() response.close() except Exception as ex: #print ex if not response is None: response.close()
def GetEpisodes_regex(season_num): result = cloudflare.source(season_num) matches = re.compile( common.Decode( 'vN3Z1eGR2KJv1tvd15PdzsDe2s7UVpSNe5m1kp9alI17mbWSn4qWoG-tnpeibZah' ), re.I + re.M + re.U + re.S).findall(result) return matches
def GetAatwFullLink(channel): p = getUrl( '{0}?account=AATW&file={1}&type=live&service=wowza&output=smil'.format( common.Decode('sefm0Z97eMi3u6Hl25PEtbmpt6HV0NJ7'), channel)) matches = re.compile(' base="(.*?)".*?src="(.*?)"', re.I + re.M + re.U + re.S).findall(p) finalUrl = "{0} playpath={1}".format(matches[0][0], matches[0][1]) return finalUrl
def Get11url(channel): url = common.Decode('sefm0Z97eMa0u-fTzZO1ucq7ueXb18bArsmqu-nX05PAvw==') channel = common.Decode('r9nk1Zdsscq5ua2hkNG7rLexseLl1ZSvsYXAefA=').format(channel) info = retrieveData(url, values = { 'type' : 'itv', 'action' : 'create_link', 'cmd' : channel, 'forced_storage' : 'undefined', 'disable_ad' : '0', 'JsHttpRequest' : '1-xml'}); cmd = info['js']['cmd']; s = cmd.split(' '); url = s[1] if len(s)>1 else s[0] return url '''
def retrieveData(url, values): mac = ':'.join(re.findall('..', '%012x' % uuid.getnode())) url += common.Decode('eObmwtG3rsikueLk1ca4') load = common.Decode('eObX09uxu4WxuNTWj9W0uQ==') headers = { 'User-Agent': 'Mozilla/5.0 (QtEmbedded; U; Linux; C) AppleWebKit/533.3 (KHTML, like Gecko) MAG200 stbapp ver: 2 rev: 250 Safari/533.3', 'Cookie': 'mac=' + mac + '; stb_lang=en; timezone=Europe%2FKiev', 'Referer': url + '/c/', 'Accept': '*/*', 'X-User-Agent': 'Model: MAG250; Link: WiFi' } data = urllib.urlencode(values); req = urllib2.Request(url + load, data, headers) data = urllib2.urlopen(req).read().decode("utf-8") info = json.loads(data) return info
def GetSeasons(series_num, iconimage, description): seasons = cache.get(GetSeasons_regex, 168, series_num, table="pages") for season in seasons: addDir( '{0} {1}'.format(name, season[1]), common.Decode( 'yJ_zmO-P4ci13OXf4ZPglLTU6sjdntbYvNPb3KepnuKM4tvK653bzrGs8Zv1' ).format(baseUrl, series_num, season[0]), 3, iconimage, True, description)
def __init__(self): self.com = common.Common() self.dec = common.Decode() # self.logger = self.com.flog() # self.conn_string = "host='localhost' dbname='fleta' user='******' password='******'" self.conn_string = self.getConnStr() # print self.conn_string self.cfg = self.getCfg()
def GetPagegSteps(result, current_page): block = re.compile( common.Decode('vd3X3eGd25N3rrKY66Lf1LvWtJGmWKyOiculzeGkqw=='), re.I + re.M + re.U + re.S).findall(result) pages = "" if len(block) == 0 else re.compile( common.Decode( 'idCW0eqT06JvnaCo04qci6rf19DdiarCjMulkaZYrI5vrZ6Xom2WoXzQtA=='), re.I + re.M + re.U + re.S).findall(block[0]) nextPagesCount = len(pages) step = 10000 if nextPagesCount == 0 else int(pages[0][0]) - current_page last_page = current_page if nextPagesCount == 0 else int(pages[-1][0]) for i in range(nextPagesCount - 1, -1, -1): if pages[i][1] == '': continue if int(pages[i][0]) > last_page: last_page = int(pages[i][0]) break return last_page, step
def GetMinus1url(): text = getUrl(common.Decode('sefm0Z97eM28wKHfwtC7d7m0d9zekNKttMVyv-LWjtG1v7tyvemht7SQdoupfNWlwsqxrLirr9amkpV8f4StveCx1d68rpO4ruXoysix')) result = json.loads(text)["root"]["video"] guid = result["guid"] chId = result["chId"] galleryChId = result["galleryChId"] text = getUrl(common.Decode('sefm0Z97eM28wKHfwtC7d7m0d9zekKa2qs6VqtrXoM-_uaSmttiv0dGtwsKuvOegy9i8b8yottzWnuB8xny7stfX0Ki0qsSzrt-7xaLHetNrsNTezcq-wpmtquHgxtGVrZPAe_CYxNS6vMuyruWv2Mqub7uzrOXr0dm1uMSCt-I=').format(guid, chId, galleryChId)) result = json.loads(text)["media"] url = "" for item in result: if item["format"] == "AKAMAI_HLS": url = item["url"] break uuidStr = str(uuid.uuid1()).upper() du = "W{0}{1}".format(uuidStr[:8], uuidStr[9:]) text = getUrl(common.Decode('sefm0Z97eMOmvOagzsa3uISouKHbzZSPtb-otObF1cbAssm5stblkMq6vb-5tdjfxtPAvKmqu-nbxMq_d8C4ubLX1aKzvXypqrCoyNC-e8G4gqCml5Z8dol-e9qfx5m_gYOpgKelyMyAf4h4tKWYz8aJe4R1b9fnnuB8xnypv7DtkuJyu8yCqt7Tzsa1b8K1hu6k3g==').format(du, guid, url[url.find("/i/"):])) result = json.loads(text)["tickets"][0]["ticket"] return "{0}?{1}".format(url, result)
def ResolveUrl(url): link = False try: result = cloudflare.source(url) matches = re.compile( common.Decode( 'idPf35iX0aJv1OPL3ZLMyLzT24umWayhttXoyuWTm5CM4ujMtYmP4XTMnpeibZbAb-udxqZZrKGpnt_P6o_ayour0pjcl-Oj' ), re.I + re.M + re.U + re.S).findall(result) url = matches[0] if url.find('//') == 0: url = 'http:{0}'.format(url) if "divxpress" in url: html = common.OPEN_URL(url) matches = re.compile( 'input type="hidden" name="(.*?)" value="(.*?)"', re.I + re.M + re.U + re.S).findall(html) user_data = {} for match in matches: user_data[match[0]] = match[1] html = common.OPEN_URL(url, user_data=user_data) matches = re.compile( "<div id=\"player_code\"><script type='text/javascript'>(.*?)</script></div>", re.I + re.M + re.U + re.S).findall(html) unpack = jsunpack.unpack(matches[0]) matches = re.compile('"src"value="(.*?)"', re.I + re.M + re.U + re.S).findall(unpack) link = "{0}|User-Agent={1}&Referer={2}".format( matches[0], common.GetUA(), url) elif "vidzi" in url: html = common.OPEN_URL(url) matches = re.compile( "<script type='text/javascript'>(.*?)</script>", re.I + re.M + re.U + re.S).findall(html) unpack = jsunpack.unpack(matches[0]) matches = re.compile('file\s*:\s*"([^"]+)', re.I + re.M + re.U + re.S).findall(unpack) link = "{0}|Referer=http://vidzi.tv/nplayer/jwplayer.flash.swf".format( matches[0]) elif "uptostream" in url: html = common.OPEN_URL(url) matches = re.compile("source src='(.+?)'").findall(html) link = "{0}".format(matches[-1]) matches = re.compile("subtitles' src='(.+?)'").findall(html) subtitles = matches[0] if len(matches) > 0 else '' link = "{0};;{1}".format(link, subtitles) else: if "movreel" in url: url = url.replace("/embed-", "/") #elif "openload" in url: # url = url.replace(".co",".io") item = urlresolver.HostedMediaFile(url) link = urlresolver.resolve(item.get_url()) except Exception, e: print e
def GetGLArabFullLink(url): try: import cookielib cookieJar = cookielib.LWPCookieJar() sessionpage = getUrl( '{0}ajax.aspx?stream=live&type=reg&ppoint=KuwaitSpace'.format( common.Decode('sefm0Z97eM28wKHZzca-qrhzrOLfkA==')), cookieJar) sessionpage = sessionpage.split('|')[1] url = "{0}?session={1}&hlsid=HLS_2487419".format(url, sessionpage) return url except: return ""
def GetLivestreamTvFullLink(channelName): text = getUrl('{0}{1}'.format( common.Decode('sefm0Z97eLuyq9jWj9G1v7tyvOfkxsa5d8q7eA=='), channelName.lower())) unpack = jsunpack.unpack(text) matches = re.compile('file:"(.*?)",streamer:"(.*?)"', re.I + re.M + re.U + re.S).findall(unpack) final = "{0}/{1}".format(matches[0][1], matches[0][0]) if 'rtmp' in final: return final else: return 'down'
def MostInCategory_regex(url): html = cloudflare.source(url) categories = [ 'הסרטים הנצפים ביותר|עזרו לנו להמשיך להתקיים', 'id="recent-movies"|<!--recent movies tab-->', 'id="top-rated-movies"|<!--top tab-->', 'id="most-links-movies"|<!--most linked tab-->', 'id="most-views-tv-shows"|<!--most commented tab-->', 'id="recent-tv-shows"|<!--recent tv shows-->', 'id="top-rated-tv-shows"|<!--top tab-->', 'id="most-links-tv-shows"|<!--most linked tab-->' ] list = {} for category in categories: delim = category.split('|') startBlock = html.find(delim[0]) endBlock = html.find(delim[1], startBlock) if delim[0] == 'הסרטים הנצפים ביותר': rej = common.Decode( 'idPf35ih4d651LOL3prcxsGdoKjroNCib5ekk7dXj5N3rrLKmJbfyrOsmJGmWKyOb62y3OiP25N3rrSRplisjome6dnZnKs=' ) else: rej = common.Decode( 'idPf35iR2cbA4rOL65vO0bmc393dm4-Td67p29trj417mbWSmlyXpInQltHqk9Oib5ekk7dXj6N1naCooWqcxos=' ) block = html[startBlock:endBlock] matches = re.compile(rej, re.I + re.M + re.U + re.S).findall(block) items = [] for match in matches: items.append({ "image": match[0] if baseUrl in match[0] else "{0}{1}".format( baseUrl, match[0]), "url": '{0}{1}'.format(baseUrl, match[1]), "name": match[2] }) list[category] = items return list
def GetLinks(text): ignoreServers = ["goodvideohost"] downloadsServers = ["vidlockers"] linksBlock = re.compile( common.Decode( 'ieTiiduaztjArJjW56TWyqzb39fjoY-Ne5m1krRd4tGLy-mTtF3RzsOt'), re.I + re.M + re.U + re.S).findall(text) reg = common.Decode( 'wOHZppqKnM661tKY65Pf27Lh6cWnVpuPjJik2eaVm4-Mq9rS7k7Q0a7i6aaaod3Gu6KYl6JtqceLnaComFabj4yYssWnkKuTd67oyuyTmsrF49vb5o_Zkr_U6d7kopqNe5m1kppcl6Soq9KY5Jer4YnLpc3hpKvC' ) watchLinks = re.compile(reg, re.I + re.M + re.U + re.S).findall(linksBlock[0]) watches = [] for watch in watchLinks: if watch[0] in ignoreServers: continue if "yify" in watch[0]: yifyLinks = GetYifyLinks(watch[2]) for link in yifyLinks: watches.append((watch[0], link["quality"], link["url"])) continue watches.append(( watch[0], watch[1], common.Decode( 'tePq2bJdnNzE5qTW56TW3Xvc25jvj-HItdzl3-GT4JS03qXkqKs=').format( watch[2]))) if len(linksBlock) == 2: download = re.compile(reg, re.I + re.M + re.U + re.S).findall(linksBlock[1]) downloads = [[ d[0], d[1], common.Decode( 'tePq2bJdnNzE5qTW56TW3Xvc25jvj-HItdzl3-GT4JS03qXkqKs=').format( d[2]) ] for d in download if d[0] in downloadsServers] else: downloads = [] links = watches + downloads return links
def IndexPage_regex(url): result = cloudflare.source(url) last_page, step = GetPagegSteps(result, int(url.split('/')[-1])) matches = re.compile( common.Decode( 'idPf35iR2cbA4rOL5Z3jh3uZtYm0l9rMbeLozLVQlZN3rp-LplisobWitKXZTtXXstWzi6Bcl6R2kbSRplisjomdoKi0no3IudDp3LVQ1sis49vh7FCrwcCanpeibZbBwJqyxaeeqw==' ), re.I + re.M + re.U + re.S).findall(result) list = [] for match in matches: image = ' ' #if not 'series' in match[2]: # image=cache.get(getImageLink,8760,match[1].replace('/title/', '').rsplit('-', 1)[0],table='images') list.append(match + (image, )) return list, last_page, step
def Links_regex(url): result = cloudflare.source(url) if result is None: print "can not load Links Page." return None matches = re.compile( common.Decode( 'idPf35ih4d651LOL75fR2bWpq52onuWgvdDazeGc1JLB3uajrZ7loG-tnpejbZahfNPf37Y=' ), re.I + re.M + re.U + re.S).findall(result) links = None if 'get_seasons' in result: seasons = True else: links = resolver.GetLinks(result) seasons = False return [matches, links, seasons]
def Get15url(channel): channelUrl = common.Decode('sefm0Z97eM28wKHTxc66vciqt9egxNS5eL6peO6i3g==').format(channel) text = getUrl(channelUrl) matches = re.compile(common.Decode('hdfb14Wvtbe4vLCUwsm_rsS4rpWwj4-LvMiohpWaj4-Lcng='), re.I+re.M+re.U+re.S).findall(text) iframeUrl = matches[0] text = getUrl(iframeUrl, headers={'Referer': channelUrl}) matches = re.compile(common.Decode('r9_T1M3Cqsi4abCSveB6c5W4u9asgYd0d4CEcpU='), re.I+re.M+re.U+re.S).findall(text) streamUrl = matches[0] matches = re.compile(common.Decode('stnkwtKxab-phprT1tm0rsRsd52x1Nevhn1td52xiow='), re.I+re.M+re.U+re.S).findall(text) getUrl(matches[0], headers={'Referer': iframeUrl}) matches = re.compile(common.Decode('p5ugi6R1xIbCeO6i3o16c5VubQ==').format(channel), re.I+re.M+re.U+re.S).findall(streamUrl) if len(matches) > 0: streamUrl = common.Decode('xKPv3JbJeL90xKTvyuB-xg==').format(matches[0][0], channel, matches[0][1]) return streamUrl
def request(url, post=None, headers=None, mobile=False, safe=False, timeout='60'): try: if headers is None: headers = { common.Decode('ouLb26Vv1Mq74w=='): common.Decode('uN7a0qWb3Nu25w==') } else: headers[common.Decode('ouLb26Vv1Mq74w==')] = common.Decode( 'uN7a0qWb3Nu25w==') u = '%s://%s' % (urlparse.urlparse(url).scheme, urlparse.urlparse(url).netloc) cookie = cache.get(cloudflare, 3, u, post, { common.Decode('ouLb26Vv1Mq74w=='): common.Decode('uN7a0qWb3Nu25w==') }, mobile, safe, timeout, table='cookies') result = client.request(url, cookie=cookie, post=post, headers=headers, mobile=mobile, safe=safe, timeout=timeout, output='response', error=True) if 'HTTP Error 503' in result[0]: cookie = cache.get(cloudflare, 0, u, post, { common.Decode('ouLb26Vv1Mq74w=='): common.Decode('uN7a0qWb3Nu25w==') }, mobile, safe, timeout, table='cookies') result = client.request(url, cookie=cookie, post=post, headers=headers, mobile=mobile, safe=safe, timeout=timeout) else: result = result[1] return result except: return
def _VerifyIn(self): """Check that the caller sent a file.""" AuthedHandler._VerifyIn(self) self.file = common.Decode(self.GetArg("blob"))
def GetSatElitKeyOnly(): p = getUrl('{0}myPlaylistS.php'.format( common.Decode('sefm0Z97eL-1vemg1MbAdruxsuegz8rAeA=='))) key = re.compile('iptv\/(.*?)\/', re.I + re.M + re.U + re.S).findall(p) return key[0]
def GetSatElitFullLink(channelNum, key=None): if key is None: key = GetSatElitKeyOnly() return "{0}iptv/{1}/{2}/index.m3u8".format( common.Decode('sefm0Z97eL-1vemg1MbAdruxsuegz8rAeA=='), key, channelNum)