Esempio n. 1
0
    def movie(self, imdb, title, year):
        try:
            self.base_link = self.base_link
            query = '%s (%s)' % (title, year)
            query = self.search_str_link % (urllib.quote_plus(query))
            query = urlparse.urljoin(self.base_link, query)

            result = client.request(query)

            result = result.decode('iso-8859-1').encode('utf-8')
            result = result.split("\n")

            searchString = result[0]
            query = self.search_link % urllib.quote_plus(searchString)
            query = urlparse.urljoin(self.base_link, query)
            result = client.request(query)

            result = result.decode('iso-8859-1').encode('utf-8')
            #url = re.compile('movie_id]\s=>\s(\d+)').findall(result)[0]
            url = re.compile('movieId=(\d+)').findall(result)[0]
            if url == None or url == '':
                raise Exception()
            return url
        except Exception as e:
            logger.error(e)
            return
Esempio n. 2
0
    def movie(self, imdb, title, year):
        try:
            t = 'http://www.imdb.com/title/%s' % imdb
            t = client.request(t, headers={'Accept-Language': 'ar-AR'})
            t = client.parseDOM(t, 'title')[0]
            t = re.sub('(?:\(|\s)\d{4}.+', '', t).strip()

            query = self.search_link % urllib.quote_plus(t)
            query = urlparse.urljoin(self.base_link, query)

            r = client.request(query)

            r = client.parseDOM(r, 'div', attrs={'class': 'item'})
            r = [(client.parseDOM(i, 'a', ret='href'),
                  client.parseDOM(i, 'span', attrs={'class': 'tt'}),
                  client.parseDOM(i, 'span', attrs={'class': 'year'}))
                 for i in r]
            r = [(i[0][0], i[1][0], i[2][0]) for i in r
                 if len(i[0]) > 0 and len(i[1]) > 0 and len(i[2]) > 0]
            r = [
                i[0] for i in r
                if cleantitle.get(t) == cleantitle.get(i[1]) and year == i[2]
            ][0]

            url = re.findall('(?://.+?|)(/.+)', r)[0]
            url = client.replaceHTMLCodes(url)
            url = url.encode('utf-8')
            return url
        except:
            pass
Esempio n. 3
0
    def source(self, item):
        quality = ''
        try:
            #urls = client.parseDOM(item, "td")
            urls = client.parseDOM(item, "a", ret="href")
            for i in range(0, len(urls)):
                uResult = client.request(urls[i], mobile=False)
                uResult = uResult.replace('\n', '').replace('\t', '')
                if 'Could not connect to mysql! Please check your database' in uResult:
                    uResult = client.request(urls[i], mobile=True)

                item = client.parseDOM(uResult,
                                       "div",
                                       attrs={"class": "videoplayer"})[0]
                item = re.compile('(SRC|src|data-config)=[\'|\"](.+?)[\'|\"]'
                                  ).findall(item)[0][1]
                urls[i] = item
            host = client.host(urls[0])
            if len(urls) > 1:
                url = "##".join(urls)
            else:
                url = urls[0]
            self.srcs.append({
                'source': host,
                'parts': str(len(urls)),
                'quality': quality,
                'provider': 'ApnaView',
                'url': url,
                'direct': False
            })
        except:
            pass
Esempio n. 4
0
    def movie(self, imdb, title, year):
        try:
            t = cleantitle.movie(title)

            q = urlparse.urljoin(self.base_link, self.search_link)
            q = q % urllib.quote_plus(title)

            r = client.request(q, error=True)
            r = client.parseDOM(r, 'div', attrs={'class': 'item'})

            r = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a'))
                 for i in r]
            r = [(i[0][0], i[1][-1]) for i in r
                 if len(i[0]) > 0 and len(i[1]) > 0]
            r = [i[0] for i in r if t == cleantitle.movie(i[1])]

            for i in r[:4]:
                try:
                    m = client.request(urlparse.urljoin(self.base_link, i))
                    m = re.sub('\s|<.+?>|</.+?>', '', m)
                    m = re.findall('Release:(%s)' % year, m)[0]
                    u = i
                    break
                except:
                    pass

            url = re.findall('(?://.+?|)(/.+)', u)[0]
            url = client.replaceHTMLCodes(url)
            url = url.encode('utf-8')
            return url
        except:
            return
Esempio n. 5
0
    def movie(self, imdb, title, year):
        try:
            t = cleantitle.get(title)

            q = '%s %s' % (title, year)
            q = self.search_link.decode('base64') % urllib.quote_plus(q)

            r = client.request(q)
            r = json.loads(r)['results']
            r = [(i['url'], i['titleNoFormatting']) for i in r]
            r = [(i[0].split('%')[0], re.findall('(?:^Watch |)(.+?)(?:\(|)(\d{4})', i[1])) for i in r]
            r = [(i[0], i[1][0][0], i[1][0][1]) for i in r if i[1]]
            r = [i for i in r if '/watch/' in i[0] and not '-season-' in i[0]]
            r = [i for i in r if t == cleantitle.get(i[1]) and year == i[2]]
            r = r[0][0]

            url = re.findall('(?://.+?|)(/.+)', r)[0]
            url = client.replaceHTMLCodes(url)
            url = url.encode('utf-8')
            return url
        except:
            pass

        try:
            url = re.sub('[^A-Za-z0-9]', '-', title).lower()
            url = self.moviesearch_link % (url, year)

            r = urlparse.urljoin(self.base_link, url)
            r = client.request(r, output='geturl')
            if not year in r: raise Exception()

            return url
        except:
            return
Esempio n. 6
0
    def movie(self, imdb, title, year):
        self.super_url = []
        try:
            self.super_url = []
            title = cleantitle.getsearch(title)
            cleanmovie = cleantitle.get(title)
            query = "/search/%s.html" % (urllib.quote_plus(title))
            query = urlparse.urljoin(self.base_link, query)
            link = client.request(query)
            r = client.parseDOM(link, 'div', attrs={'class': 'ml-item'})
            for links in r:
                # print ("YMOVIES REQUEST", links)
                url = client.parseDOM(links, 'a', ret='data-url')[0]
                title = client.parseDOM(links, 'a', ret='title')[0]
                url = urlparse.urljoin(self.info_link, url)
                infolink = client.request(url)
                match_year = re.search('class="jt-info">(\d{4})<', infolink)
                match_year = match_year.group(1)
                if year in match_year:
                    result = client.parseDOM(infolink,
                                             'div',
                                             attrs={'class': 'jtip-bottom'})
                    for items in result:
                        playurl = client.parseDOM(items, 'a', ret='href')[0]
                        playurl = playurl.encode('utf-8')
                        referer = "%s" % playurl

                        mylink = client.request(referer)
                        i_d = re.findall(r'id: "(.*?)"', mylink,
                                         re.I | re.DOTALL)[0]
                        server = re.findall(r'server: "(.*?)"', mylink,
                                            re.I | re.DOTALL)[0]
                        type = re.findall(r'type: "(.*?)"', mylink,
                                          re.I | re.DOTALL)[0]
                        episode_id = re.findall(r'episode_id: "(.*?)"', mylink,
                                                re.I | re.DOTALL)[0]
                        # print ("YMOVIES REQUEST", episode_id)
                        token = self.__get_token()
                        # print ("YMOVIES TOKEN", token)
                        cookies = '%s%s%s=%s' % (self.key1, episode_id,
                                                 self.key2, token)
                        # print ("YMOVIES cookies", cookies)
                        url_hash = urllib.quote(
                            self.__uncensored(episode_id + self.key, token))
                        # print ("YMOVIES hash", url_hash)
                        url = urlparse.urljoin(
                            self.base_link,
                            self.playlist % (episode_id, url_hash))

                        request_url = url
                        # print ("YMOVIES REQUEST", request_url)
                        self.super_url.append([request_url, cookies, referer])
            return self.super_url
        except:
            return
Esempio n. 7
0
def request(url, resolverList=None):
    u = url
    url = False
    # Custom Resolvers
    try:
        host = client.host(u)

        r = [i['class'] for i in info() if host in i['host']][0]
        r = __import__(r, globals(), locals(), [], -1)
        url = r.resolve(u)
        if url == False:
            raise Exception()
    except:
        pass

    # URLResolvers 3.0.0
    try:
        if not url == False: raise Exception()
        logger.debug('Trying URL Resolver for %s' % u, __name__)

        hmf = urlresolver.HostedMediaFile(url=u,
                                          include_disabled=True,
                                          include_universal=False)
        if hmf.valid_url() == True: url = hmf.resolve()
        else: url = False
    except:
        pass

    try:
        headers = url.rsplit('|', 1)[1]
    except:
        headers = ''
    headers = urllib.quote_plus(headers).replace('%3D', '=').replace(
        '%26', '&') if ' ' in headers else headers
    headers = dict(urlparse.parse_qsl(headers))

    if url.startswith('http') and '.m3u8' in url:
        result = client.request(url.split('|')[0],
                                headers=headers,
                                output='geturl',
                                timeout='20')
        if result == None: raise Exception()

    elif url.startswith('http'):

        result = client.request(url.split('|')[0],
                                headers=headers,
                                output='chunk',
                                timeout='20')
        if result == None:
            logger.debug('Resolved %s but unable to play' % url, __name__)
            raise Exception()

    return url
Esempio n. 8
0
    def getAuthToken(self):
        url = base64.b64decode(
            'aHR0cHM6Ly9hcHMuZHlubnMuY29tL3RvcC8lcy5waHA/d21zQXV0aFNpZ249')
        try:
            userAgent = self.getUserAgent()
            logger.debug('Final UserAgent : %s' % userAgent, __name__)
            filename = userAgent[:4]

            import datetime, hashlib
            timesegment = datetime.datetime.utcnow().strftime(
                "%m/%d/%Y %H:%M:%S")
            validtime = userAgent[4]

            headers = {
                'User-Agent':
                base64.b64decode(
                    'UGFrJTIwVFYvMS4wIENGTmV0d29yay84MDguMi4xNiBEYXJ3aW4vMTYuMy4w'
                )
            }
            ipstring = client.request(base64.b64decode(
                "aHR0cHM6Ly9hcHMuZHlubnMuY29tL3RvcC9pcF9jaGVjay5waHA="),
                                      headers=headers)
            ipadd = ipstring.split('Address: ')[1]

            s = "%s%s%s%s" % (ipadd,
                              base64.b64decode("dHVtYmluamlhamF5bmFqYW5h") +
                              userAgent[:10], timesegment, validtime)

            dd = base64.b64decode(
                "c2VydmVyX3RpbWU9JXMmaGFzaF92YWx1ZT0lcyZ2YWxpZG1pbnV0ZXM9JXM="
            ) % (timesegment,
                 base64.b64encode(
                     hashlib.md5(s).hexdigest().lower()), validtime)
            url = (url % filename) + base64.b64encode(dd)

            headers = {
                'User-Agent':
                cache.get(self.getDeviceID, 600000, table='live_cache'),
                'Authorization':
                base64.b64decode(
                    'QmFzaWMgWW05emMyZGliM056T21kdmIyUm5aMjl2WkE9PQ==')
            }
            res = client.request(url, headers=headers)
            s = list(res)
            for i in range((len(s) - 59) / 12):
                ind = len(s) - 59 + (12 * (i))
                if ind < len(s):
                    print ind
                    s[ind] = ''
            return ''.join(s)
        except Exception as e:
            logger.error(e)
Esempio n. 9
0
    def request(self, url, check):
        try:
            result = client.request(url)
            if check in str(result):
                return result.decode('iso-8859-1').encode('utf-8')

            result = client.request(proxy.get() + urllib.quote_plus(url))
            if check in str(result):
                return result.decode('iso-8859-1').encode('utf-8')

            result = client.request(proxy.get() + urllib.quote_plus(url))
            if check in str(result):
                return result.decode('iso-8859-1').encode('utf-8')
        except:
            return
Esempio n. 10
0
    def sources(self, url):
        try:
            sources = []

            if url == None: return sources

            url = urlparse.urljoin(self.base_link, url)

            r = client.request(url)
            r = client.parseDOM(r, 'div', attrs={'class': 'player_wraper'})
            r = client.parseDOM(r, 'iframe', ret='src')

            for u in r:
                try:
                    m = '"(?:url|src)"\s*:\s*"(.+?)"'

                    d = urlparse.urljoin(self.base_link, u)

                    s = client.request(d, referer=url, timeout='10')

                    j = re.compile('<script>(.+?)</script>', re.DOTALL).findall(s)
                    for i in j:
                        try:
                            s += jsunpack.unpack(i)
                        except:
                            pass

                    u = re.findall(m, s)

                    if not u:
                        p = re.findall('location\.href\s*=\s*"(.+?)"', s)
                        if not p: p = ['/player/%s' % d.strip('/').split('/')[-1]]
                        p = urlparse.urljoin(self.base_link, p[0])
                        s = client.request(p, referer=d, timeout='10')
                        u = re.findall(m, s)

                    for i in u:
                        try:
                            sources.append({'provider':'movies14','source': 'gvideo', 'quality': directstream.googletag(i)[0]['quality'],
                                            'language': 'en', 'url': i, 'direct': True, 'debridonly': False})
                        except:
                            pass
                except:
                    pass

            return sources
        except:
            return sources
Esempio n. 11
0
    def movie(self, imdb, title, year):
        try:
            self.base_link = self.base_link
            query = '%s %s' % (title, year)
            query = '%s' % (title)
            query = self.search_link % (urllib.quote_plus(query))
            query = urlparse.urljoin(self.base_link, query)

            result = client.request(query)

            result = result.decode('iso-8859-1').encode('utf-8')

            result = client.parseDOM(result, "div", attrs={"class": "item"})
            title = cleantitle.movie(title)

            for item in result:
                searchTitle = client.parseDOM(item,
                                              "span",
                                              attrs={"class": "tt"})[0]
                try:
                    searchTitle = re.compile('(.+?) \d{4} ').findall(
                        searchTitle)[0]
                except:
                    pass
                searchTitle = cleantitle.movie(searchTitle)
                if title in searchTitle:
                    url = client.parseDOM(item, "a", ret="href")[0]
                    url = re.compile(".+/(.+?)/").findall(url)[0]
                    break
            if url == None or url == '':
                raise Exception()
            return url
        except Exception as e:
            logger.error('[%s] Exception : %s' % (self.__class__, e))
            return
Esempio n. 12
0
    def movie(self, imdb, title, year):
        try:
            t = cleantitle.movie(title)

            try:
                query = '%s %s' % (title, year)
                query = base64.b64decode(
                    self.search_link) % urllib.quote_plus(query)

                result = client.request(query)
                result = json.loads(result)['results']
                r = [(i['url'], i['titleNoFormatting']) for i in result]
                r = [(i[0], re.compile('(.+?) [\d{4}|(\d{4})]').findall(i[1]))
                     for i in r]
                r = [(i[0], i[1][0]) for i in r if len(i[1]) > 0]
                r = [x for y, x in enumerate(r) if x not in r[:y]]
                r = [i for i in r if t == cleantitle.movie(i[1])]
                u = [i[0] for i in r][0]

            except:
                return

            url = u
            url = client.replaceHTMLCodes(url)
            url = url.encode('utf-8')
            return url
        except:
            return
Esempio n. 13
0
def resolve(url):
    try:
        result = client.request(url)
        dek = EnkDekoder.dekode(result)

        if not dek == None:
            url = client.parseDOM(dek,
                                  "param",
                                  attrs={"name": "flashvars"},
                                  ret="value")[0]
        else:
            dek = result
            url = re.compile('file*:*"(http.+?)"').findall(dek)[0]

        if re.search(';video_url', url):
            url = re.findall(';video_url=(.+?)&amp', url)[0]
        elif re.search('iframe src=', url):
            url = re.findall('<iframe src="(.+?)"', url)[0]

        url = url.replace('_ipod.mp4', '.flv')
        url = url.replace('preview', 'edit')
        logger.debug('URL [%s]' % url, __name__)
        return url
    except:
        return False
Esempio n. 14
0
 def resolve(self, url, resolverList):
     try:
         logger.debug('ORIGINAL URL [%s]' % url, __name__)
         result = client.request(url, headers=self.headers)
         playdata = 'window.pl_data = (\{.*?"key":.*?\}\})'
         result = re.findall(playdata, result)[0]
         try:
             result = json.loads(result)
             link = result['live']['channel_list'][0]['file']
             key = result['live']['key']
             link = link.decode('base64')
             key = key.decode('base64')
             de = pyaes.new(key, pyaes.MODE_CBC, IV='\0' * 16)
             link = de.decrypt(link).replace('\x00', '').split('\0')[0]
             link = re.sub('[^\s!-~]', '', link)
         except:
             link = client.parseDOM(result,
                                    "source",
                                    attrs={"type": "application/x-mpegurl"},
                                    ret="src")[0]
         logger.debug('URL : [%s]' % link, __name__)
         url = '%s|Referer=%s' % (link.strip(), url)
         result = client.validateUrl(url)
         logger.debug('RESOLVED URL [%s]' % url, __name__)
         return url
     except:
         return False
Esempio n. 15
0
    def episodes(self, title, url):
        try:
            try:
                season = title.lower()
                season = re.compile('[0-9]+').findall(season)[0]
                #season = season.replace('season ', '')
            except:
                import traceback
                traceback.print_exc()
                season = '0'
            episodes = []

            tvshowurl = url
            rawResult = client.request(url)
            rawResult = rawResult.decode('iso-8859-1').encode('utf-8')
            rawResult = rawResult.replace('\n',
                                          '').replace('\t',
                                                      '').replace('\r', '')

            result = client.parseDOM(rawResult, "article")

            for item in result:
                if "promo" in item or '(Day' in item:
                    continue
                item = client.parseDOM(item, "h2")[0]
                name = client.parseDOM(item, "a", ret="title")
                if type(name) is list:
                    name = name[0]
                url = client.parseDOM(item, "a", ret="href")
                if type(url) is list:
                    url = url[0]
                if "Online" not in name: continue
                name = name.replace(title, '')
                try:
                    name = re.compile(
                        'Season [\d{1}|\d{2}](\w.+\d{4})').findall(name)[0]
                except:
                    pass
                name = re.compile('([\d{1}|\d{2}]\w.+\d{4})').findall(name)[0]
                name = name.strip()
                episodes.append({
                    'season': season,
                    'tvshowtitle': title,
                    'title': name,
                    'name': name,
                    'url': url,
                    'provider': 'yodesi',
                    'tvshowurl': tvshowurl
                })

            next = client.parseDOM(rawResult, "nav")
            next = client.parseDOM(next,
                                   "a",
                                   attrs={"class": "next page-numbers"},
                                   ret="href")[0]
            episodes[0].update({'next': next})

            return episodes
        except:
            return episodes
Esempio n. 16
0
    def movie(self, imdb, title, year):
        try:
            self.base_link = self.base_link
            query = '%s' % (title)
            query = self.search_link % (urllib.quote_plus(query))
            query = urlparse.urljoin(self.base_link, query)

            result = client.request(query)

            result = result.decode('iso-8859-1').encode('utf-8')
            result = client.parseDOM(result, "div", attrs={"class": "tab-content mt20"})[0]
            result = client.parseDOM(result, "div", attrs={"id": "movies_tab"})[0]
            result = client.parseDOM(result, "div", attrs={"class": "media-left"})

            title = cleantitle.movie(title)
            for item in result:
                searchTitle = client.parseDOM(item, "a", ret="title")[0]
                searchTitle = re.compile('(.+?) [(]\d{4}[)]$').findall(searchTitle)[0]
                searchTitle = cleantitle.movie(searchTitle)
                if title == searchTitle:
                    url = client.parseDOM(item, "a", ret="href")[0]
                    break
            if url == None or url == '':
                raise Exception()
            return url
        except:
            return
Esempio n. 17
0
    def getTVShowPosterFromGoogle(self, showName, retry):
        if retry == 0:
            return ''
        #baseURL = 'https://ajax.googleapis.com/ajax/services/search/images?v=1.0&q={query}'

        keyBing = 'btcCcvQ4Sfo9P2Q7u62eOREA1NfLEQPezqCNb+2LVhY'        # get Bing key from: https://datamarket.azure.com/account/keys
        credentialBing = 'Basic ' + (':%s' % keyBing).encode('base64')[:-1] # the "-1" is to remove the trailing "\n" which encode adds

        headers = {}
        headers['Authorization'] = credentialBing

        baseURL = 'https://api.datamarket.azure.com/Bing/Search/v1/Image?Query=%27{query}%27&$format=json'

        query = showName.lower() + ' poster'
        url = baseURL.format(query=urllib.quote_plus(query))
        try:
            result = client.request(url, headers=headers)

            results = json.loads(result)['d']['results']

            for image_info in results:
                iconImage = image_info['MediaUrl']
                break
            if iconImage is not None:
                return iconImage
            else:
                return '0'
        except :
            return self.getTVShowPosterFromGoogle(showName, retry-1)
        return ''
Esempio n. 18
0
    def getSwiftChannels(self, url, headers):
        result = client.request(url, headers=headers)

        try:
            tResult = re.compile("{\"LIVETV\":(.+?)}{\"LIVETV\"").findall(
                result)
            tResult = json.loads(tResult[0])
            result = tResult
        except:
            result = json.loads(result)["LIVETV"]
        for channel in result:
            title = channel['channel_title']
            #from ashock.modules import livemeta
            #names = cache.get(livemeta.source().getLiveNames, 200, table='live_cache')
            #title = cleantitle.live(title)
            #if title == 'SKIP':
            #    continue
            icon = channel['channel_thumbnail']
            if not icon.startswith('http'):
                icon = 'http://swiftstreamz.com/SwiftStream/images/thumbs/%s' % icon
            cUrl = channel['channel_url']
            self.channelList[title] = {
                'icon': icon,
                'url': cUrl,
                'provider': 'swift',
                'source': 'swift',
                'direct': False,
                'quality': 'HD'
            }
        return self.channelList
Esempio n. 19
0
    def episode(self, url, ep_url, imdb, tvdb, title, date, season, episode):
        query = '%s %s' % (imdb, title)
        query = self.search_link % (urllib.quote_plus(query))
        result = ''

        links = [self.base_link_1, self.base_link_2, self.base_link_3]
        for base_link in links:
            try:
                result = client.request(base_link + query)
            except:
                result = ''
            if 'item' in result: break

        result = result.decode('iso-8859-1').encode('utf-8')

        result = result.replace('\n', '').replace('\t', '')

        result = client.parseDOM(result, 'content:encoded')[0]

        ep_url = client.parseDOM(result,
                                 "a",
                                 attrs={"rel": "nofollow"},
                                 ret="href")[0]

        if ep_url:
            return ep_url
Esempio n. 20
0
    def movie(self, imdb, title, year):
        try:
            t = cleantitle.get(title)

            headers = {'X-Requested-With': 'XMLHttpRequest'}

            query = urllib.urlencode({'keyword': title})

            url = urlparse.urljoin(self.base_link, self.search_link)

            r = client.request(url, post=query, headers=headers)

            r = json.loads(r)['content']
            r = zip(
                client.parseDOM(r,
                                'a',
                                ret='href',
                                attrs={'class': 'ss-title'}),
                client.parseDOM(r, 'a', attrs={'class': 'ss-title'}))
            r = [i[0] for i in r
                 if cleantitle.get(t) == cleantitle.get(i[1])][:2]
            r = [(i, re.findall('(\d+)', i)[-1]) for i in r]

            for i in r:
                try:
                    y, q = cache.get(self.onemovies_info, 9000, i[1])
                    if not y == year: raise Exception()
                    return urlparse.urlparse(i[0]).path
                except:
                    pass
        except:
            return
Esempio n. 21
0
    def movie(self, imdb, title, year):
        try:
            url = None
            self.base_link = random.choice(
                [self.base_link_1, self.base_link_2])

            query = '%s %s' % (title, year)
            query = urllib.quote_plus(query)
            query = self.search_link % (query)
            query = urlparse.urljoin(self.base_link % 'search', query)

            result = client.request(query, headers=self.headers)

            result = result.decode('iso-8859-1').encode('utf-8')
            result = json.loads(result)

            result = result['resultObj']['response']['docs']

            title = cleantitle.movie(title)
            for item in result:
                searchTitle = cleantitle.movie(item['contentTitle'])
                if title == searchTitle:
                    url = self.cdn_link % item['contentId']
                    break
            if url == None or url == '':
                raise Exception()
            return url
        except Exception as e:
            logger.error('[%s] Exception : %s' % (self.__class__, e))
            return
Esempio n. 22
0
    def sources(self, url):
        logger.debug('SOURCES URL %s' % url, __name__)
        try:
            quality = ''
            srcs = []

            if url == None: return srcs

            url = urlparse.urljoin(self.base_link, url)

            try:
                result = client.request(url)
            except:
                result = ''

            result = result.decode('iso-8859-1').encode('utf-8')
            result = result.replace('\n', '').replace('\t',
                                                      '').replace('\r', '')

            result = client.parseDOM(
                result,
                "div",
                attrs={"class": "td-post-content td-pb-padding-side"})[0]
            result = client.parseDOM(result,
                                     "p",
                                     attrs={"style": "text-align: center;"})

            for item in result:
                try:
                    urls = client.parseDOM(item, "a", ret="href")
                    quality = client.parseDOM(item, "b")

                    quality = " ".join(quality)
                    quality = quality.lower()
                    if "720p" in quality:
                        quality = "HD"
                    else:
                        quality = "SD"

                    for i in range(0, len(urls)):
                        urls[i] = client.urlRewrite(urls[i])
                    host = client.host(urls[0])
                    if len(urls) > 1:
                        url = "##".join(urls)
                    else:
                        url = urls[0]
                    srcs.append({
                        'source': host,
                        'parts': str(len(urls)),
                        'quality': quality,
                        'provider': 'tDesiHit',
                        'url': url,
                        'direct': False
                    })
                except:
                    pass
            logger.debug('SOURCES [%s]' % srcs, __name__)
            return srcs
        except:
            return srcs
Esempio n. 23
0
    def movie(self, imdb, title, year):
        try:
            if debrid.status() == False: raise Exception()

            t = cleantitle.get(title)

            headers = {'X-Requested-With': 'XMLHttpRequest'}

            query = self.search_link + urllib.quote_plus(title)
            query = urlparse.urljoin(self.base_link, query)

            r = client.request(query, headers=headers)
            r = json.loads(r)

            r = [
                i for i in r
                if 'category' in i and 'movie' in i['category'].lower()
            ]
            r = [(i['url'], i['label']) for i in r
                 if 'label' in i and 'url' in i]
            r = [(i[0], re.findall('(.+?) \((\d{4})', i[1])) for i in r]
            r = [(i[0], i[1][0][0], i[1][0][1]) for i in r if len(i[1]) > 0]
            r = [
                i[0] for i in r if t == cleantitle.get(i[1]) and year == i[2]
            ][0]

            url = re.findall('(?://.+?|)(/.+)', r)[0]
            url = client.replaceHTMLCodes(url)
            url = url.encode('utf-8')
            return url
        except:
            return
Esempio n. 24
0
    def movie(self, imdb, title, year):
        try:
            url = None
            self.base_link = random.choice(
                [self.base_link_1, self.base_link_2])

            query = '%s %s' % (title, year)
            query = title
            query = self.search_link % (urllib.quote_plus(query))
            query = urlparse.urljoin(self.base_link, query)

            result = client.request(query)

            result = result.decode('iso-8859-1').encode('utf-8')

            result = client.parseDOM(result,
                                     "div",
                                     attrs={"class": "result clearfix"})

            title = cleantitle.movie(title)
            for item in result:
                item = client.parseDOM(item, "div", attrs={"class":
                                                           "details"})[0]
                searchTitle = client.parseDOM(item, "a")[0]
                searchTitle = cleantitle.movie(searchTitle)
                if title == searchTitle:
                    url = client.parseDOM(item, "a", ret="href")[0]
                    break
            if url == None or url == '':
                raise Exception()
            return url
        except:
            return
Esempio n. 25
0
    def login(self):
        try:
            post = {
                'el': self.user,
                'pw': self.password,
                'mobile': '',
                'callingcode': '',
                'type': 'json',
                'fbid': ''
            }
            h = {
                'Referer': self.base_link_1,
                'Content-Type':
                'application/x-www-form-urlencoded; charset=UTF-8'
            }

            result = client.request(self.login_link,
                                    post=urllib.urlencode(post),
                                    close=False)

            result = json.loads(result)

            t = result['success']
        except:
            pass
Esempio n. 26
0
    def movie(self, imdb, title, year):
        try:
            self.base_link = random.choice(
                [self.base_link_1, self.base_link_2])

            self.login()
            query = '%s %s' % (title, year)
            query = self.search_link % (urllib.quote_plus(query))
            query = urlparse.urljoin(self.base_link, query)

            result = client.request(query)

            result = result.decode('iso-8859-1').encode('utf-8')
            result = json.loads(result)

            result = result['rows']

            title = cleantitle.movie(title)
            for item in result:
                searchTitle = cleantitle.movie(item['title'])
                if title == searchTitle:
                    url = self.info_link % item['asset_id']
                    break
            if url == None or url == '':
                raise Exception()
            return url
        except:
            return
Esempio n. 27
0
    def movie(self, imdb, title, year):
        try:
            self.base_link = random.choice(
                [self.base_link_1, self.base_link_2])

            query = '%s %s' % (title, year)
            query = self.search_link % (urllib.quote_plus(query))
            query = urlparse.urljoin(self.base_link, query)

            result = client.request(query)

            result = result.decode('iso-8859-1').encode('utf-8')
            result = client.parseDOM(result, "item")

            title = cleantitle.movie(title)
            for item in result:
                searchTitle = client.parseDOM(item, "title")[0]
                searchTitle = cleantitle.movie(searchTitle)
                if title == searchTitle:
                    url = client.parseDOM(item, "link")[0]
                    break
            if url == None or url == '':
                raise Exception()
            return url
        except:
            return
Esempio n. 28
0
    def movie(self, imdb, title, year):
        try:
            url = None
            self.base_link = random.choice(
                [self.base_link_1, self.base_link_2])

            #query = '%s %s' % (title, year)
            query = title
            query = self.search_link % (urllib.quote_plus(query))
            query = urlparse.urljoin(self.base_link, query)

            result = client.request(query, error=True)

            items = client.parseDOM(result, "item")

            title = cleantitle.movie(title)
            for item in items:
                searchTitle = client.parseDOM(item, "title")[0]
                searchTitle = cleantitle.movie(searchTitle)
                if title in searchTitle:
                    url = client.parseDOM(item,
                                          "a",
                                          attrs={"rel": "nofollow"},
                                          ret="href")[0]
                    break
            if url == None or url == '':
                raise Exception()
            return url
        except:
            import traceback
            traceback.print_exc()
            return
Esempio n. 29
0
    def sources(self, url):
        logger.debug('SOURCES URL %s' % url, __name__)

        try:
            srcs = []

            if url == None: return srcs

            url = urlparse.urljoin(self.base_link, url)

            content = re.compile('(.+?)\?episode=\d*$').findall(url)
            content = 'movie' if len(content) == 0 else 'episode'

            try: url, episode = re.compile('(.+?)\?episode=(\d*)$').findall(url)[0]
            except: pass

            result = client.request(url)

            url = zip(client.parseDOM(result, 'a', ret='href', attrs = {'target': 'EZWebPlayer'}), client.parseDOM(result, 'a', attrs = {'target': 'EZWebPlayer'}))
            url = [(i[0], re.compile('(\d+)').findall(i[1])) for i in url]
            url = [(i[0], i[1][-1]) for i in url if len(i[1]) > 0]

            if content == 'episode':
                url = [i for i in url if i[1] == '%01d' % int(episode)]

            links = [client.replaceHTMLCodes(i[0]) for i in url]

            for u in links:

                try:
                    result = client.request(u)
                    result = re.findall('sources\s*:\s*\[(.+?)\]', result)[0]
                    result = re.findall('"file"\s*:\s*"(.+?)".+?"label"\s*:\s*"(.+?)"', result)

                    url = [{'url': i[0], 'quality': '1080p'} for i in result if '1080' in i[1]]
                    url += [{'url': i[0], 'quality': 'HD'} for i in result if '720' in i[1]]
                    url += [{'url': i[0], 'quality': 'SD'} for i in result if '480' in i[1]]
                    url += [{'url': i[0], 'quality': 'SCR'} for i in result if '360' in i[1]]

                    for i in url:
                        srcs.append({'source': 'gvideo', 'parts' : '1','quality': i['quality'], 'provider': 'Pubfilm', 'url': i['url'], 'direct': True, 'debridonly': False})
                except:
                    pass
            logger.debug('SOURCES URL %s' % srcs, __name__)
            return srcs
        except:
            return srcs
Esempio n. 30
0
    def sources(self, url):
        logger.debug('SOURCES URL %s' % url, __name__)
        try:
            srcs = []

            if url == None: return srcs

            url = urlparse.urljoin(self.base_link, url)

            try:
                result = client.request(url, referer=self.base_link)
            except:
                result = ''

            result = result.decode('iso-8859-1').encode('utf-8')
            result = result.replace('\n', '').replace('\t', '')

            try:
                quality = client.parseDOM(result,
                                          "span",
                                          attrs={"class": "calidad2"})[0]
            except:
                quality = ""

            parts = client.parseDOM(result,
                                    "div",
                                    attrs={"class": "player_nav"})[0]
            parts = client.parseDOM(parts, "a")

            items = client.parseDOM(result, "div", attrs={"id": "player2"})[0]
            items = client.parseDOM(items, "div", attrs={"class": "movieplay"})

            for i in range(0, len(items)):
                try:
                    part = parts[i]
                    part = cleantitle.movie(part)
                    if not "full" in part or "option" in part:
                        continue

                    url = re.compile(
                        '(SRC|src|data-config)=[\'|\"](.+?)[\'|\"]').findall(
                            items[i])[0][1]

                    host = client.host(url)
                    srcs.append({
                        'source': host,
                        'parts': '1',
                        'quality': quality,
                        'provider': 'DesiHDMovies',
                        'url': url,
                        'direct': False
                    })
                except:
                    pass
            logger.debug('SOURCES [%s]' % srcs, __name__)
            return srcs
        except Exception as e:
            logger.error('[%s] Exception : %s' % (self.__class__, e))
            return srcs