def jsCryptoAESDec(data, key):
    #lib.common.log("JairoX_Decrypt:" + key)
    from jscrypto import decode
    var = json.loads(data.decode('base-64'))
    ct = var['ct']
    salt = var['s'].decode('hex')
    return json.loads(decode(ct, key, salt))
Beispiel #2
0
def getLinks():
    cookieJar=get365CookieJar()
    kkey=get365Key(cookieJar)
    headers=[('User-Agent','AppleCoreMedia/1.0.0.13A452 (iPhone; U; CPU OS 9_0_2 like Mac OS X; en_gb)')]

    liveurl="http://www.sport365.live/en/events/-/1/-/-"+'/'+str(getutfoffset())
    linkshtml=getUrl(liveurl,headers=headers, cookieJar=cookieJar)
    reg="images\/types.*?(green|red).*?px;\">(.*?)<\/td><td style=\"borde.*?>(.*?)<\/td><td.*?>(.*?)<\/td.*?__showLinks.*?,.?\"(.*?)\".*?\">(.*?)<"
    sportslinks=re.findall(reg,linkshtml)
    print 'got links',sportslinks
    progress = xbmcgui.DialogProgress()
    progress.create('Progress', 'Fetching Live Links')
#    print sportslinks
    c=0
    cookieJar.save (S365COOKIEFILE,ignore_discard=True)

    import HTMLParser
    h = HTMLParser.HTMLParser()
    ret=[]
    import jscrypto
    for tp,tm,nm,lng,lnk,cat in sportslinks:
        c+=1
        cat=cat.split("/")[0]
        progress.update( (c*100/len(sportslinks)), "", "fetting links for "+nm, "" )
        try:    
            lnk=json.loads(lnk.decode("base64"))
            lnk=jscrypto.decode(lnk["ct"],kkey,lnk["s"].decode("hex"))
            #print lnk
            lnk=lnk.replace('\\/','/').replace('"',"")
         
            qty=""
            cat=cat.replace('&nbsp;','')
            lng=lng.replace('&nbsp;','')
            mm=nm.replace('&nbsp;','')
            #print nm,tp
            if 'span' in lng:
                lng=lng.split('>')
                qty=lng[-2].split('<')[0]
                lng= lng[-1]
            if len(lng)>0:
                lng=Colored("[" +lng+"]","orange")
            if len(qty)>0:
                qty=Colored("["+qty+"]","red")
                

            if "Dutch" in lng or "English" in lng or "German" in lng or "French" in lng:

                if not lnk.startswith("http"):
                    lnk='http://www.sport365.live'+lnk
                #print lnk
                if tp=="green":
                    lnk=base64.b64encode("Sports365:"+base64.b64encode(lnk))
                    #addDir(Colored(cat.capitalize()+": "+tm+" : "+ qty+lng+nm  ,'ZM') ,lnk,11 ,"",isItFolder=False)
                    ret+=[(cat.capitalize()+": "+tm+" : "+ qty+lng+nm ,lnk,True)]
                else:
                    ret+=[(cat.capitalize()+": "+tm+" : "+ qty+lng+nm ,lnk,False)]
        except: traceback.print_exc(file=sys.stdout)
        progress.close()
    return ret
Beispiel #3
0
def getLinks():
    cookieJar=get365CookieJar()
    kkey=get365Key(cookieJar)
    headers=[('User-Agent','AppleCoreMedia/1.0.0.13A452 (iPhone; U; CPU OS 9_0_2 like Mac OS X; en_gb)')]

    liveurl="http://www.sport365.live/en/events/-/1/-/-"+'/'+str(getutfoffset())
    linkshtml=getUrl(liveurl,headers=headers, cookieJar=cookieJar)
    reg="images\/types.*?(green|red).*?px;\">(.*?)<\/td><td style=\"borde.*?>(.*?)<\/td><td.*?>(.*?)<\/td.*?__showLinks.*?,.?\"(.*?)\".*?\">(.*?)<"
    sportslinks=re.findall(reg,linkshtml)
    print 'got links',sportslinks
    progress = xbmcgui.DialogProgress()
    progress.create('Progress', 'Fetching Live Links')
#    print sportslinks
    c=0
    cookieJar.save (S365COOKIEFILE,ignore_discard=True)

    import HTMLParser
    h = HTMLParser.HTMLParser()
    ret=[]
    import jscrypto
    for tp,tm,nm,lng,lnk,cat in sportslinks:
        c+=1
        cat=cat.split("/")[0]
        progress.update( (c*100/len(sportslinks)), "", "fetting links for "+nm, "" )
        try:    
            lnk=json.loads(lnk.decode("base64"))
            lnk=jscrypto.decode(lnk["ct"],kkey,lnk["s"].decode("hex"))
            #print lnk
            lnk=lnk.replace('\\/','/').replace('"',"")
         
            qty=""
            cat=cat.replace('&nbsp;','')
            lng=lng.replace('&nbsp;','')
            mm=nm.replace('&nbsp;','')
            #print nm,tp
            if 'span' in lng:
                lng=lng.split('>')
                qty=lng[-2].split('<')[0]
                lng= lng[-1]
            if len(lng)>0:
                lng=Colored("[" +lng+"]","orange")
            if len(qty)>0:
                qty=Colored("["+qty+"]","red")
                
            
            if not lnk.startswith("http"):
                lnk='http://www.sport365.live'+lnk
            #print lnk
            if tp=="green":
                lnk=base64.b64encode("Sports365:"+base64.b64encode(lnk))
                #addDir(Colored(cat.capitalize()+": "+tm+" : "+ qty+lng+nm  ,'ZM') ,lnk,11 ,"",isItFolder=False)
                ret+=[(cat.capitalize()+": "+tm+" : "+ qty+lng+nm ,lnk,True)]
            else:
                ret+=[(cat.capitalize()+": "+tm+" : "+ qty+lng+nm ,lnk,False)]
        except: traceback.print_exc(file=sys.stdout)
        progress.close()
    return ret
Beispiel #4
0
def getLinks():
    cookieJar = get365CookieJar()
    kkey = get365Key(cookieJar)
    headers = [(
        'User-Agent',
        'AppleCoreMedia/1.0.0.13A452 (iPhone; U; CPU OS 9_0_2 like Mac OS X; en_gb)'
    )]

    liveurl = "http://www.sport365.live/en/events/-/1/-/-" + '/' + str(
        getutfoffset())
    linkshtml = getUrl(liveurl, headers=headers, cookieJar=cookieJar)
    reg = "images\/types.*?(green|red).*?px;\">(.*?)<\/td><td style=\"borde.*?>(.*?)<\/td><td.*?>(.*?)<\/td.*?__showLinks.*?,.?\"(.*?)\".*?\">(.*?)<"
    sportslinks = re.findall(reg, linkshtml)
    c = 0
    cookieJar.save(S365COOKIEFILE, ignore_discard=True)

    import HTMLParser
    h = HTMLParser.HTMLParser()
    ret = []
    import jscrypto
    for tp, tm, nm, lng, lnk, cat in sportslinks:
        c += 1
        cat = cat.split("/")[0]
        try:
            lnk = json.loads(lnk.decode("base64"))
            lnk = jscrypto.decode(lnk["ct"], kkey, lnk["s"].decode("hex"))
            #print lnk
            lnk = lnk.replace('\\/', '/').replace('"', "")

            qty = ""
            cat = cat.replace('&nbsp;', '')
            lng = lng.replace('&nbsp;', '')
            mm = nm.replace('&nbsp;', '')
            #print nm,tp
            if 'span' in lng:
                lng = lng.split('>')
                qty = lng[-2].split('<')[0]
                lng = lng[-1]

            if len(qty) == 0:
                qty = ' '

            if not lnk.startswith("http"):
                lnk = 'http://www.sport365.live' + lnk
            #print lnk
            if tp == "green":
                lnk = base64.b64encode("Sports365:" + base64.b64encode(lnk))
                if lng.lower() == 'croatian' or lng.lower() == 'english':
                    ret += [(tm, nm, qty, lnk, True)]
            else:
                if lng.lower() == 'croatian' or lng.lower() == 'english':
                    ret += [(tm, nm, qty, lnk, False)]
        except:
            traceback.print_exc(file=sys.stdout)
    return ret
def select365(url):
    print 'select365',url
    url=base64.b64decode(url)
    retUtl=""
    
    try:
        links=[]
        matchhtml=getUrl(url)        
        reg=".open\('(.*?)'.*?>(.*?)<"
        sourcelinks=re.findall(reg,matchhtml)
        b6=False

        enc=False
        if 1==2 and len(sourcelinks)==0:
            reg="showPopUpCode\\('(.*?)'.*?\\.write.*?d64\\(\\\\\\'(.*?)\\\\\\'\\)"
            sourcelinks=re.findall(reg,matchhtml)
            #print 'f',sourcelinks
            b6=True
        if 1==2 and len(sourcelinks)==0:
            reg="showPopUpCode\\('(.*?)'.*?\\.write.*?atob\\(\\\\\\'(.*?)\\\\\\'\\)"
            sourcelinks=re.findall(reg,matchhtml)
            #print 's',sourcelinks
            b6=True            
        if len(sourcelinks)==0:
            reg="showWindow\\('(.*?)',.*?>(.*?)<"
            sourcelinks=re.findall(reg,matchhtml)
            #print sourcelinks
            enc=True    
            b6=False
        if len(sourcelinks)==0:
            reg="showPopUpCode\\(.*?,.?'(.*?)'.*?,.*?,(.*?)\\)"
            sourcelinks=re.findall(reg,matchhtml)
            #print sourcelinks
            enc=True    
            b6=False
            
        #print 'sourcelinks',sourcelinks
        kkey=get365Key(get365CookieJar())
        if len(sourcelinks)==0:
            print 'No links',matchhtml
            #addDir(Colored("  -"+"No links available yet, Refresh 5 mins before start.",'') ,"" ,0,"", False, True,isItFolder=False)		#name,url,mode,icon
            return ""
        else:
            available_source=[]
            ino=0
            for curl,cname in sourcelinks:
                ino+=1
                try:
                    if b6:
                        curl,cname=cname,curl
                        #print b6,curl
                        curl=base64.b64decode(curl)
                        curl=re.findall('(http.*?)"',curl)[0]#+'/768/432'
                    if enc:
                        #print curl
                        curl=json.loads(curl.decode("base64"))
                        import jscrypto
                        #print curl["ct"],kkey,curl["s"]
                        curl=jscrypto.decode(curl["ct"],kkey,curl["s"].decode("hex"))
                        #print curl
                        curl=curl.replace('\\/','/').replace('"',"")
                        print 'fina;',curl
                        if 'window.atob' in curl:
                            reg="window\\.atob\\(\\\\\\\\\\'(.*?)'"
                            #print 'in regex',reg,curl
                            curl=re.findall(reg,curl)[0]
                            curl=base64.b64decode(curl)
                            curl=re.findall('(http.*?)"',curl)[0]#+'/768/432'
                            if not curl.split('/')[-2].isdigit():
                                curl+='/768/432'
                                
                    print curl
                    cname=cname.encode('ascii', 'ignore').decode('ascii')
                    #if not cname.startswith('link'):
                    cname='source# '+str(ino)
                    available_source.append(cname)
                    links+=[[cname,curl]]
                except:
                    traceback.print_exc(file=sys.stdout)
            if len(curl)==0:
                return ""
            if len(curl)==1:
                return links[0][1]
            dialog = xbmcgui.Dialog()
            index = dialog.select('Choose your link', available_source)
            if index > -1:
                return links[index][1]    

    except:
        traceback.print_exc(file=sys.stdout)
    return retUtl
def selectMatch(url):
    url=select365(url)
    if url=="": return 
    import HTMLParser
    h = HTMLParser.HTMLParser()

    #urlToPlay=base64.b64decode(url)
    cookieJar=get365CookieJar()
    html=getUrl(url,headers=[('Referer','http://www.sport365.live/en/main')],cookieJar=cookieJar)
    #print html
    reg="iframe frameborder=0.*?src=\"(.*?)\""
    linkurl=re.findall(reg,html)
    if len(linkurl)==0:
        reg="http://www.sport365.live.*?'\/(.*?)'\)"
        linkurl=re.findall(reg,html)[0]
        linkurl="http://www.sport365.live/en/player/f/"+linkurl
        html=getUrl(h.unescape(linkurl),cookieJar=cookieJar)
        reg="iframe frameborder=0.*?src=\"(.*?)\""
        linkurl=re.findall(reg,html)[0]
#        print linkurl
    else:
        linkurl=linkurl[0]
    enclinkhtml=getUrl(h.unescape(linkurl),cookieJar=cookieJar)
    reg='player_div", "st".*?file":"(.*?)"'
    enclink=re.findall(reg,enclinkhtml)
    usediv=False
    
    if len(enclink)==0:
        reg='name="f" value="(.*?)"'
        enclink=re.findall(reg,enclinkhtml)[0]  
        reg='name="s" value="(.*?)"'
        encst=re.findall(reg,enclinkhtml)[0]
        reg="\('action', ['\"](.*?)['\"]"
        postpage=re.findall(reg,enclinkhtml)
        if len(postpage)>0:
            
            reg='player_div", "st".*?file":"(.*?)"'
            post={'p':'http://cdn.adshell.net/swf/player.swf','s':encst,'f':enclink}
            post = urllib.urlencode(post)
            enclinkhtml2= getUrl(postpage[0],post=post, headers=[('Referer',linkurl),('User-Agent','Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.116 Safari/537.36')])
            #enclink=re.findall(reg,enclinkhtml2)
            if 'player_div' in enclinkhtml2>0:
                usediv=True
                #enclinkhtml=enclinkhtml2
                #print 'usediv',usediv
                reg="player_div\",.?\"(.*?)\",.?\"(.*?)\",(.*?)\)"
                encst,enclink,isenc=re.findall(reg,enclinkhtml2)[0]
                #print 'encst,enclink',encst,enclink,isenc
                isenc=isenc.strip();
                if isenc=="1":
                    reg="src=\"(.*?\\/wrapper.js.*)\""
                    wrapurl=re.findall(reg,enclinkhtml2)[0]
                    kkey=get365Key(cookieJar,url=wrapurl)
                    #print 'kkey',kkey
                    enclink=json.loads(enclink.decode("base64"))
                    import jscrypto
                    lnk=jscrypto.decode(enclink["ct"],kkey,enclink["s"].decode("hex"))
                    
                    #print lnk
                    enclink=lnk
                #enclink=enclink[0]
                #print 'enclink',enclink
                #reg='player_div", "st":"(.*?)"'
                #encst=re.findall(reg,enclinkhtml)[0]
        
    else:
        usediv=True
        #print 'usediv',usediv
        enclink=enclink[0]
        #print 'enclink',enclink
        reg='player_div", "st":"(.*?)"'
        encst=re.findall(reg,enclinkhtml)[0]
    #if usediv:
    #    print 'usediv',usediv
    #    enclink=enclink[0]
    #    print 'enclink',enclink
    #    reg='player_div", "st":"(.*?)"'
    #    encst=re.findall(reg,enclinkhtml)[0]
        
    decodedst=decode(encst)

    #print encst, decodedst
    reg='"stkey":"(.*?)"'
    sitekey=re.findall(reg,decodedst)[0]
    #sitekey="myFhOWnjma1omjEf9jmH9WZg91CC"#hardcoded

    urlToPlay= decode(enclink.replace(sitekey,""))+"|Referer=%s&User-Agent=Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.103 Safari/537.36"%"http://h5.adshell.net/flash"
    return urlToPlay
Beispiel #7
0
def select365(url):
    print 'select365', url
    url = base64.b64decode(url)
    retUtl = ""

    try:
        links = []
        matchhtml = getUrl(url)
        reg = ".open\('(.*?)'.*?>(.*?)<"
        sourcelinks = re.findall(reg, matchhtml)
        b6 = False

        enc = False
        if 1 == 2 and len(sourcelinks) == 0:
            reg = "showPopUpCode\\('(.*?)'.*?\\.write.*?d64\\(\\\\\\'(.*?)\\\\\\'\\)"
            sourcelinks = re.findall(reg, matchhtml)
            #print 'f',sourcelinks
            b6 = True
        if 1 == 2 and len(sourcelinks) == 0:
            reg = "showPopUpCode\\('(.*?)'.*?\\.write.*?atob\\(\\\\\\'(.*?)\\\\\\'\\)"
            sourcelinks = re.findall(reg, matchhtml)
            #print 's',sourcelinks
            b6 = True
        if len(sourcelinks) == 0:
            reg = "showWindow\\('(.*?)',.*?>(.*?)<"
            sourcelinks = re.findall(reg, matchhtml)
            #print sourcelinks
            enc = True
            b6 = False
        if len(sourcelinks) == 0:
            reg = "showPopUpCode\\(.*?,.?'(.*?)'.*?,.*?,(.*?)\\)"
            sourcelinks = re.findall(reg, matchhtml)
            #print sourcelinks
            enc = True
            b6 = False

        #print 'sourcelinks',sourcelinks
        kkey = get365Key(get365CookieJar())
        if len(sourcelinks) == 0:
            print 'No links', matchhtml
            #addDir(Colored("  -"+"No links available yet, Refresh 5 mins before start.",'') ,"" ,0,"", False, True,isItFolder=False)		#name,url,mode,icon
            return ""
        else:
            available_source = []
            ino = 0
            for curl, cname in sourcelinks:
                ino += 1
                try:
                    if b6:
                        curl, cname = cname, curl
                        #print b6,curl
                        curl = base64.b64decode(curl)
                        curl = re.findall('(http.*?)"', curl)[0]  #+'/768/432'
                    if enc:
                        #print curl
                        curl = json.loads(curl.decode("base64"))
                        import jscrypto
                        #print curl["ct"],kkey,curl["s"]
                        curl = jscrypto.decode(curl["ct"], kkey,
                                               curl["s"].decode("hex"))
                        #print curl
                        curl = curl.replace('\\/', '/').replace('"', "")
                        print 'fina;', curl
                        if 'window.atob' in curl:
                            reg = "window\\.atob\(\\\\(.*?)\\\\\\)"
                            #print 'in regex',reg,curl
                            curl = re.findall(reg, curl)[0]
                            curl = base64.b64decode(curl)
                            curl = re.findall('(http.*?)"',
                                              curl)[0]  #+'/768/432'
                            if not curl.split('/')[-2].isdigit():
                                curl += '/768/432'

                    print curl
                    cname = cname.encode('ascii', 'ignore').decode('ascii')
                    #if not cname.startswith('link'):
                    cname = 'source# ' + str(ino)
                    available_source.append(cname)
                    links += [[cname, curl]]
                except:
                    traceback.print_exc(file=sys.stdout)
            if len(curl) == 0:
                return ""
            if len(curl) == 1:
                return links[0][1]
            dialog = xbmcgui.Dialog()
            index = dialog.select('Choose your link', available_source)
            if index > -1:
                return links[index][1]

    except:
        traceback.print_exc(file=sys.stdout)
    return retUtl
Beispiel #8
0
def selectMatch(url):
    url = select365(url)
    if url == "": return
    import HTMLParser
    h = HTMLParser.HTMLParser()

    #urlToPlay=base64.b64decode(url)
    cookieJar = get365CookieJar()
    html = getUrl(url,
                  headers=[('Referer', 'http://www.sport365.live/en/main')],
                  cookieJar=cookieJar)
    #print html
    reg = "iframe frameborder=0.*?src=\"(.*?)\""
    linkurl = re.findall(reg, html)
    if len(linkurl) == 0:
        reg = "http://www.sport365.live.*?'\/(.*?)'\)"
        linkurl = re.findall(reg, html)[0]
        linkurl = "http://www.sport365.live/en/player/f/" + linkurl
        html = getUrl(h.unescape(linkurl), cookieJar=cookieJar)
        reg = "iframe frameborder=0.*?src=\"(.*?)\""
        linkurl = re.findall(reg, html)[0]


#        print linkurl
    else:
        linkurl = linkurl[0]
    enclinkhtml = getUrl(h.unescape(linkurl), cookieJar=cookieJar)
    reg = 'player_div", "st".*?file":"(.*?)"'
    enclink = re.findall(reg, enclinkhtml)
    usediv = False

    if len(enclink) == 0:
        reg = 'name="f" value="(.*?)"'
        enclink = re.findall(reg, enclinkhtml)[0]
        reg = 'name="s" value="(.*?)"'
        encst = re.findall(reg, enclinkhtml)[0]
        reg = "\('action', ['\"](.*?)['\"]"
        postpage = re.findall(reg, enclinkhtml)
        if len(postpage) > 0:

            reg = 'player_div", "st".*?file":"(.*?)"'
            post = {
                'p': 'http://cdn.adshell.net/swf/player.swf',
                's': encst,
                'f': enclink
            }
            post = urllib.urlencode(post)
            enclinkhtml2 = getUrl(
                postpage[0],
                post=post,
                headers=
                [('Referer', linkurl),
                 ('User-Agent',
                  'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.116 Safari/537.36'
                  )])
            #enclink=re.findall(reg,enclinkhtml2)
            if 'player_div' in enclinkhtml2 > 0:
                usediv = True
                #enclinkhtml=enclinkhtml2
                #print 'usediv',usediv
                reg = "player_div\",.?\"(.*?)\",.?\"(.*?)\",(.*?)\)"
                encst, enclink, isenc = re.findall(reg, enclinkhtml2)[0]
                #print 'encst,enclink',encst,enclink,isenc
                isenc = isenc.strip()
                if isenc == "1":
                    reg = "src=\"(.*?\\/wrapper.js.*)\""
                    wrapurl = re.findall(reg, enclinkhtml2)[0]
                    kkey = get365Key(cookieJar, url=wrapurl)
                    #print 'kkey',kkey
                    enclink = json.loads(enclink.decode("base64"))
                    import jscrypto
                    lnk = jscrypto.decode(enclink["ct"], kkey,
                                          enclink["s"].decode("hex"))

                    #print lnk
                    enclink = lnk
                #enclink=enclink[0]
                #print 'enclink',enclink
                #reg='player_div", "st":"(.*?)"'
                #encst=re.findall(reg,enclinkhtml)[0]

    else:
        usediv = True
        #print 'usediv',usediv
        enclink = enclink[0]
        #print 'enclink',enclink
        reg = 'player_div", "st":"(.*?)"'
        encst = re.findall(reg, enclinkhtml)[0]
    #if usediv:
    #    print 'usediv',usediv
    #    enclink=enclink[0]
    #    print 'enclink',enclink
    #    reg='player_div", "st":"(.*?)"'
    #    encst=re.findall(reg,enclinkhtml)[0]

    decodedst = decode(encst)

    #print encst, decodedst
    reg = '"stkey":"(.*?)"'
    sitekey = re.findall(reg, decodedst)[0]
    #sitekey="myFhOWnjma1omjEf9jmH9WZg91CC"#hardcoded

    urlToPlay = decode(
        enclink.replace(sitekey, "")
    ) + "|Referer=%s&User-Agent=Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.103 Safari/537.36" % "http://h5.adshell.net/flash"
    return urlToPlay
Beispiel #9
0
def selectMatch(url):

    try:
        cookieJar = get365CookieJar()
        mainref = 'http://www.sport365.live/en/main'
        headers = [('User-Agent', useragent)]
        try:
            getUrl("http://www.sport365.live/",
                   headers=headers,
                   cookieJar=cookieJar)
            mainref = lastfinalurl
        except:
            pass

        url = select365(url, cookieJar, mainref)
        print 'playurl', url
        if 'iframe' in url.lower():
            url = re.findall('(http.*?)\\\\', url)[0]
        if 'script' in url.lower():
            url = re.findall('(http.*?)&', url)[0]

        print 'playurl', url
        if 1 == 2:
            try:
                headers = [('User-Agent', useragent)]
                hh = getUrl(
                    "http://adbetnet.advertserve.com/servlet/view/dynamic/javascript/zone?zid=281&pid=4&resolution=1920x1080&random=11965377&millis=1473441350879&referrer=http%3A%2F%2Fwww.sport365.live%2Fen%2Fhome",
                    headers=headers,
                    cookieJar=cookieJar)
                getUrl(re.findall('<img width=.*?src=\\\\"(.*?)\\\\"', hh)[0],
                       headers=headers,
                       cookieJar=cookieJar,
                       useproxy=False)
            except:
                pass
        if url == "": return
        import HTMLParser
        h = HTMLParser.HTMLParser()

        #urlToPlay=base64.b64decode(url)

        html = getUrl(url, headers=[('Referer', mainref)], cookieJar=cookieJar)

        #print html
        reg = "iframe frameborder=0.*?src=\"(.*?)\""
        linkurl = re.findall(reg, html)
        #print 'linkurl',linkurl
        if len(linkurl) == 0:
            reg = "http://www.sport365.live.*?'\/(.*?)'\)"
            linkurl = re.findall(reg, html)[0]
            linkurl = "http://www.sport365.live/en/player/f/" + linkurl
            html = getUrl(h.unescape(linkurl), cookieJar=cookieJar)
            reg = "iframe frameborder=0.*?src=\"(.*?)\""
            linkurl = re.findall(reg, html)[0]
    #        print linkurl
        else:
            linkurl = linkurl[0]
        uurl = h.unescape(linkurl)
        print 'uurl', uurl
        if not uurl.startswith('http'):
            import urlparse
            uurl = urlparse.urljoin('http://www.fastflash.pw/', uurl)
            print 'newurl', uurl
        enclinkhtml = getUrl(uurl,
                             cookieJar=cookieJar,
                             headers=[('Referer', url)])
        reg = 'player_div", "st".*?file":"(.*?)"'
        enclink = re.findall(reg, enclinkhtml)
        usediv = False

        if len(enclink) == 0:
            reg = 'name="f" value="(.*?)"'
            enclink = re.findall(reg, enclinkhtml)[-1]
            reg = 'name="d" value="(.*?)"'
            encst = re.findall(reg, enclinkhtml)[-1]
            reg = "\('action', ['\"](.*?)['\"]"
            postpage = re.findall(reg, enclinkhtml)
            if len(postpage) > 0:

                reg = 'player_div", "st".*?file":"(.*?)"'
                post = {'d': encst, 'f': enclink}
                post = urllib.urlencode(post)
                enclinkhtml2 = getUrl(postpage[0],
                                      post=post,
                                      cookieJar=cookieJar,
                                      headers=[('Referer', linkurl),
                                               ('User-Agent', useragent)])
                #enclink=re.findall(reg,enclinkhtml2)

                if 1 == 1:  #'player_div' in enclinkhtml2>0:
                    usediv = True
                    #enclinkhtml=enclinkhtml2
                    #print 'usediv',usediv
                    reg = 'player_div\'.*?\s*\s*<.*?\s*.*?\("(.*?)\",.?\"(.*?)\",(.*?)\)'
                    reg = "\([\"'](.*?)[\"'],.?[\"'](.*?)[\"'],.?[\"'](.*?)[\"'],(.*?)\)"
                    pd, encst, enclink, isenc = re.findall(reg,
                                                           enclinkhtml2)[0]

                    print 'encst,enclink', encst, enclink, isenc
                    isenc = isenc.strip()
                    if isenc == "1":
                        #reg="src=\"(.*?\\/wrapper.js.*)\""
                        #wrapurl=re.findall(reg,enclinkhtml2)[0]

                        kkey = get365Key(cookieJar)
                        #print 'kkey',kkey
                        enclink = json.loads(enclink.decode("base64"))
                        import jscrypto
                        lnk = jscrypto.decode(enclink["ct"], kkey,
                                              enclink["s"].decode("hex"))

                        print 'dec link', lnk
                        enclink = lnk

                        if lnk.startswith('"http'):
                            lnk = lnk.replace('\"', '').replace('\\/', '/')
                    #enclink=enclink[0]
                    #print 'enclink',enclink
                    #reg='player_div", "st":"(.*?)"'
                    #encst=re.findall(reg,enclinkhtml)[0]

        else:
            usediv = True
            #print 'usediv',usediv
            enclink = enclink[0]
            #print 'enclink',enclink
            reg = 'player_div", "st":"(.*?)"'
            encst = re.findall(reg, enclinkhtml)[0]
        #if usediv:
        #    print 'usediv',usediv
        #    enclink=enclink[0]
        #    print 'enclink',enclink
        #    reg='player_div", "st":"(.*?)"'
        #    encst=re.findall(reg,enclinkhtml)[0]
        print 'encst', encst
        if 1 == 2:  #not 'peer' in encst:
            decodedst = decode(encst)

            #print encst, decodedst
            reg = '"stkey":"(.*?)"'
            sitekey = re.findall(reg, decodedst)[0]
            #sitekey="myFhOWnjma1omjEf9jmH9WZg91CC"#hardcoded
            urlToPlaymain = decode(enclink.replace(sitekey, ""))
        else:
            urlToPlaymain = lnk
        urlToPlay = urlToPlaymain
        newcj = cookielib.LWPCookieJar()
        try:
            getUrl(urlToPlay,
                   headers=[('User-Agent', useragent),
                            ('Origin', 'http://h5.adshell.net'),
                            ('Referer', 'http://h5.adshell.net/peer5')],
                   cookieJar=newcj)
        except:
            pass

        #print newcj
        sessionid = getCookiesString(newcj, 'PHPSESSID').split('=')[-1]
        import uuid
        playback = str(uuid.uuid1()).upper()
        if len(sessionid) > 0: '&Cookie=PHPSESSID=' + sessionid.split('=')[-1]
        urlToPlaymain += "|Referer=%s&User-Agent=%s&Origin=http://h5.adshell.net&Referer=http://h5.adshell.net/peer5%s&X-Playback-Session-Id=%s" % (
            "http://h5.adshell.net/flash", urllib.quote_plus(useragent),
            sessionid, playback)
        #    urlToPlaymain+="|Referer=%s&User-Agent=%s&Origin=http://h5.adshell.net&Referer=http://h5.adshell.net/peer5%s&X-Playback-Session-Id=%s"%( "http://h5.adshell.net/flash",urllib.quote_plus(useragent),sessionid,playback)
        headers = [('User-Agent', useragent), ('Referer', mainref)]
        getUrl("http://www.sport365.live/en/sidebar",
               headers=headers,
               cookieJar=cookieJar)
        cookieJar.save(S365COOKIEFILE, ignore_discard=True)

        return urlToPlaymain
        return 'plugin://plugin.video.f4mTester/?url=%s&streamtype=HLS' % (
            urllib.quote_plus(urlToPlaymain))
    except:
        traceback.print_exc(file=sys.stdout)
        return None
Beispiel #10
0
def PlayShowLink ( url ): 
    #	url = tabURL.replace('%s',channelName);
    print "URL: %s" %url
    #regstring='http:\/\/(.*?)\/admin'
        #match=re.findall(regstring, url)
        
    #url2='http://serialzone.in/admin/AjaxProcess.php?cfile=load_video&id=%s&param=value&_=%s' % (url, time.time())
    url2=url
    print url2
    req = urllib2.Request(url2)
    req.add_header('User-Agent', 'Mozilla/5.0(iPad; U; CPU iPhone OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B314 Safari/531.21.10')
    response = urllib2.urlopen(req)
    link=response.read()
    response.close()

    #        print "LINK READ: ", link
    match =re.findall('embed\/(.*?)\?', link)
    #        print "MATCH: ",match,"\t\tMATCH LEN: ",len(match)
        
    if len(match)==0:
    #		print 'not found trying again'
        match =re.findall('yp\(\'(.*?)\'', link)
    #	print "LINK: ",link
    #        print "MATCH: ",match
    if len(match)==0:
    #		print 'not found trying again'
        match =re.findall('\(\'(.*?)\'', link)
        if match[0].startswith('http'):
            urltofetch=match[0]
            req = urllib2.Request(urltofetch)
            req.add_header('User-Agent', 'Mozilla/5.0(iPad; U; CPU iPhone OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B314 Safari/531.21.10')
            response = urllib2.urlopen(req)
            link=response.read()
            response.close()
            match =re.findall("\'(http.*?master.m3u8?.*?)\'", link)
            if  len(match)==0 and 'ozee.com' in urltofetch.lower():
                import jscrypto
                print 'dd',link
                linkdatareg='(var hlsplayurl.*?\}\')'
                lindata=re.findall(linkdatareg,link)[0]
                print lindata
                ct=re.findall('"ct":"(.*?)"',lindata)[0]
                salt=re.findall('"s":"(.*?)"',lindata)[0]
                passphrase=re.findall('dailytoday.?=.?\"(.*?)\"',link.split('var hlsplayurl')[0])[-1]
                salt=salt.decode("hex")    
                url= jscrypto.decode(ct,passphrase,salt)
                uurl= url.replace("\\/","/").replace('"','')
                cookieJar = cookielib.LWPCookieJar()
                getUrl(uurl,cookieJar=cookieJar,headers=[('User-Agent','Mozilla/5.0(iPad; U; CPU iPhone OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B314 Safari/531.21.10')])
                
                uurl=uurl+'|User-Agent=%s&Cookie=%s'%('Mozilla/5.0(iPad; U; CPU iPhone OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B314 Safari/531.21.10',getCookiesString(cookieJar))
                print uurl
                listitem = xbmcgui.ListItem( label = str(name), iconImage = "DefaultVideo.png", thumbnailImage = xbmc.getInfoImage( "ListItem.Thumb" ) )
                print "playing stream name: " + str(name) 
                xbmc.Player(  ).play( uurl, listitem)
            else:
                xbmc.executebuiltin("xbmc.PlayMedia("+match[0]+")")
            return
            
    time1=2000
    line1 = "Playing Youtube Link"
    xbmc.executebuiltin('Notification(%s, %s, %d, %s)'%(__addonname__,line1, time1, __icon__))

    youtubecode=match[0]

    progress = xbmcgui.DialogProgress()
    progress.create('Progress', 'Checking if Proxy Required?. Normally works with Indian IP or VPN')
    progress.update( 20, "", "Getting Urls..")

    liveUrl='https://m.youtube.com/watch?v=%s'%youtubecode
    stringFailed='The uploader has not made this video available in your country'
    req = urllib2.Request(liveUrl)
    req.add_header('User-Agent', 'Mozilla/5.0 (iPad; CPU OS 5_0 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Version/5.1 Mobile/9A334 Safari/7534.48.3')
    response = urllib2.urlopen(req)
    link=response.read()
    response.close()
    if stringFailed in link and 'googlevideo' not in link:        
        try:
            proxyserver=selfAddon.getSetting('ProxyServer')#:8080
            proxyport=str(selfAddon.getSetting('ProxyPort'))
            try:
                vquality=int(selfAddon.getSetting('youtubevideo'))
                print vquality
                vquality="hd720|medium|small".split('|')[vquality]
            except:
                vquality="medium"
            print proxyserver,proxyport,vquality
            progress.update( 60, "", "Yes proxy required, using %s:%s"%(proxyserver,str(proxyport)))
            ##use US proxy and play with it
            
            cookieJar = cookielib.LWPCookieJar()
            cookie_handler = urllib2.HTTPCookieProcessor(cookieJar)
            opener = urllib2.build_opener(cookie_handler, urllib2.HTTPBasicAuthHandler(), urllib2.HTTPHandler(),urllib2.ProxyHandler({ 'https'  : '%s:%s'%(proxyserver,proxyport)}))
            req = urllib2.Request(liveUrl)
            req.add_header('User-Agent','Mozilla/5.0 (iPad; CPU OS 5_0 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Version/5.1 Mobile/9A334 Safari/7534.48.3')
            response = opener.open(req,timeout=20)            
            link=response.read()
            response.close()
            print link
            progress.update( 90, "", "Got the Link, Now playing via proxy" )
            pat2='\\\\?u0026quality=(.*?)\\\\.*?url=(http.*?videoplayback.*?),'
            pat='url_encoded_fmt_stream_map\\\\"\\: (.*?), '
            final_url=re.findall(pat,link)
            if len(final_url)==0:
                return
                #final_url=re.findall(pat2,link)
            final_url=final_url[0].split(',')
            print final_url
            print 'final_url',final_url
            qarray=[]
            for sss in final_url:
                url=urllib.unquote(sss.split('url=')[1].split('\\')[0])
                qlt=sss.split('quality=')[1].split(',')[0].split('\\')[0]
                qarray.append([url,qlt])
            urltoplay=None
            if len(qarray)==0:
                progress.update( 99, "", "FAILED, proxy issues or link resolution problem" )
                time1=2000
                line1 = "GeoBocked:Proxy issues or link resolution problem"
                xbmc.executebuiltin('Notification(%s, %s, %d, %s)'%(__addonname__,line1, time1, __icon__))
                return 
            print 'qarray',qarray
            if len(qarray)>1:
                for uu in qarray:
                    if uu[1]==vquality and ',' not in uu[0]:
                        urltoplay=uu[0]
                        print 'quality selected',uu
            if not urltoplay:
                print 'default quality selected',final_url[0]
                urltoplay=final_url[0][0]
                
            urltoplay=urllib.unquote(urltoplay)
            print 'urltoplay',urltoplay
            playmediawithproxy(urltoplay,name,'',proxyserver,proxyport,progress)
        except: traceback.print_exc(file=sys.stdout)
        return ''

    uurl = 'plugin://plugin.video.youtube/play/?video_id=%s' % youtubecode
    #uurl = 'plugin://plugin.video.youtube/v/%s' % youtubecode
    #	print uurl
    xbmc.executebuiltin("xbmc.PlayMedia("+uurl+")")

    return
Beispiel #11
0
def PlayShowLink(url):
    #	url = tabURL.replace('%s',channelName);
    print "URL: %s" % url
    #regstring='http:\/\/(.*?)\/admin'
    #match=re.findall(regstring, url)

    #url2='http://serialzone.in/admin/AjaxProcess.php?cfile=load_video&id=%s&param=value&_=%s' % (url, time.time())
    url2 = url
    print url2
    req = urllib2.Request(url2)
    req.add_header(
        'User-Agent',
        'Mozilla/5.0(iPad; U; CPU iPhone OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B314 Safari/531.21.10'
    )
    response = urllib2.urlopen(req)
    link = response.read()
    response.close()

    #        print "LINK READ: ", link
    match = re.findall('embed\/(.*?)\?', link)
    #        print "MATCH: ",match,"\t\tMATCH LEN: ",len(match)

    if len(match) == 0:
        #		print 'not found trying again'
        match = re.findall('yp\(\'(.*?)\'', link)
    #	print "LINK: ",link
    #        print "MATCH: ",match
    if len(match) == 0:
        #		print 'not found trying again'
        match = re.findall('\(\'(.*?)\'', link)
        if match[0].startswith('http'):
            urltofetch = match[0]
            req = urllib2.Request(urltofetch)
            req.add_header(
                'User-Agent',
                'Mozilla/5.0(iPad; U; CPU iPhone OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B314 Safari/531.21.10'
            )
            response = urllib2.urlopen(req)
            link = response.read()
            response.close()
            match = re.findall("\'(http.*?master.m3u8?.*?)\'", link)
            if len(match) == 0 and 'ozee.com' in urltofetch.lower():
                import jscrypto
                print 'dd', link
                linkdatareg = '(var hlsplayurl.*?\}\')'
                lindata = re.findall(linkdatareg, link)[0]
                print lindata
                ct = re.findall('"ct":"(.*?)"', lindata)[0]
                salt = re.findall('"s":"(.*?)"', lindata)[0]
                passphrase = re.findall('dailytoday.?=.?\"(.*?)\"',
                                        link.split('var hlsplayurl')[0])[-1]
                salt = salt.decode("hex")
                url = jscrypto.decode(ct, passphrase, salt)
                uurl = url.replace("\\/", "/").replace('"', '')
                cookieJar = cookielib.LWPCookieJar()
                getUrl(
                    uurl,
                    cookieJar=cookieJar,
                    headers=
                    [('User-Agent',
                      'Mozilla/5.0(iPad; U; CPU iPhone OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B314 Safari/531.21.10'
                      )])

                uurl = uurl + '|User-Agent=%s&Cookie=%s' % (
                    'Mozilla/5.0(iPad; U; CPU iPhone OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B314 Safari/531.21.10',
                    getCookiesString(cookieJar))
                print uurl
                listitem = xbmcgui.ListItem(
                    label=str(name),
                    iconImage="DefaultVideo.png",
                    thumbnailImage=xbmc.getInfoImage("ListItem.Thumb"))
                print "playing stream name: " + str(name)
                xbmc.Player().play(uurl, listitem)
            else:
                xbmc.executebuiltin("xbmc.PlayMedia(" + match[0] + ")")
            return

    time1 = 2000
    line1 = "Playing Youtube Link"
    xbmc.executebuiltin('Notification(%s, %s, %d, %s)' %
                        (__addonname__, line1, time1, __icon__))

    youtubecode = match[0]

    progress = xbmcgui.DialogProgress()
    progress.create(
        'Progress',
        'Checking if Proxy Required?. Normally works with Indian IP or VPN')
    progress.update(20, "", "Getting Urls..")

    liveUrl = 'https://m.youtube.com/watch?v=%s' % youtubecode
    stringFailed = 'The uploader has not made this video available in your country'
    req = urllib2.Request(liveUrl)
    req.add_header(
        'User-Agent',
        'Mozilla/5.0 (iPad; CPU OS 5_0 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Version/5.1 Mobile/9A334 Safari/7534.48.3'
    )
    response = urllib2.urlopen(req)
    link = response.read()
    response.close()
    if stringFailed in link and 'googlevideo' not in link:
        try:
            proxyserver = selfAddon.getSetting('ProxyServer')  #:8080
            proxyport = str(selfAddon.getSetting('ProxyPort'))
            try:
                vquality = int(selfAddon.getSetting('youtubevideo'))
                print vquality
                vquality = "hd720|medium|small".split('|')[vquality]
            except:
                vquality = "medium"
            print proxyserver, proxyport, vquality
            progress.update(
                60, "", "Yes proxy required, using %s:%s" %
                (proxyserver, str(proxyport)))
            ##use US proxy and play with it

            cookieJar = cookielib.LWPCookieJar()
            cookie_handler = urllib2.HTTPCookieProcessor(cookieJar)
            opener = urllib2.build_opener(
                cookie_handler, urllib2.HTTPBasicAuthHandler(),
                urllib2.HTTPHandler(),
                urllib2.ProxyHandler(
                    {'https': '%s:%s' % (proxyserver, proxyport)}))
            req = urllib2.Request(liveUrl)
            req.add_header(
                'User-Agent',
                'Mozilla/5.0 (iPad; CPU OS 5_0 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Version/5.1 Mobile/9A334 Safari/7534.48.3'
            )
            response = opener.open(req, timeout=20)
            link = response.read()
            response.close()
            print link
            progress.update(90, "", "Got the Link, Now playing via proxy")
            pat2 = '\\\\?u0026quality=(.*?)\\\\.*?url=(http.*?videoplayback.*?),'
            pat = 'url_encoded_fmt_stream_map\\\\"\\: (.*?), '
            final_url = re.findall(pat, link)
            if len(final_url) == 0:
                return
                #final_url=re.findall(pat2,link)
            final_url = final_url[0].split(',')
            print final_url
            print 'final_url', final_url
            qarray = []
            for sss in final_url:
                url = urllib.unquote(sss.split('url=')[1].split('\\')[0])
                qlt = sss.split('quality=')[1].split(',')[0].split('\\')[0]
                qarray.append([url, qlt])
            urltoplay = None
            if len(qarray) == 0:
                progress.update(
                    99, "", "FAILED, proxy issues or link resolution problem")
                time1 = 2000
                line1 = "GeoBocked:Proxy issues or link resolution problem"
                xbmc.executebuiltin('Notification(%s, %s, %d, %s)' %
                                    (__addonname__, line1, time1, __icon__))
                return
            print 'qarray', qarray
            if len(qarray) > 1:
                for uu in qarray:
                    if uu[1] == vquality and ',' not in uu[0]:
                        urltoplay = uu[0]
                        print 'quality selected', uu
            if not urltoplay:
                print 'default quality selected', final_url[0]
                urltoplay = final_url[0][0]

            urltoplay = urllib.unquote(urltoplay)
            print 'urltoplay', urltoplay
            playmediawithproxy(urltoplay, name, '', proxyserver, proxyport,
                               progress)
        except:
            traceback.print_exc(file=sys.stdout)
        return ''

    uurl = 'plugin://plugin.video.youtube/play/?video_id=%s' % youtubecode
    #uurl = 'plugin://plugin.video.youtube/v/%s' % youtubecode
    #	print uurl
    xbmc.executebuiltin("xbmc.PlayMedia(" + uurl + ")")

    return