Esempio n. 1
0
    def sources(self, url, hostDict, hostprDict):
        sources = []
        try:
            if url is None:
                return sources

            data = urlparse.parse_qs(url)
            data = dict([(i, data[i][0]) if data[i] else (i, '')
                         for i in data])
            q = '%s' % cleantitle.get_gan_url(data['title'])
            url = urlparse.urljoin(self.base_link, self.search_link % q)
            r = cfscrape.get(url, headers=self.headers).content
            v = re.compile(
                '<a href="(.+?)" class="ml-mask jt" title="(.+?)">\s+<span class=".+?">(.+?)</span>'
            ).findall(r)
            for url, check, quality in v:
                t = '%s (%s)' % (data['title'], data['year'])
                if t in check:
                    key = url.split('-hd')[1]
                    url = 'https://ganool1.com//moviedownload.php?q=%s' % key
                    r = cfscrape.get(url, headers=self.headers).content
                    r = re.compile(
                        '<a rel=".+?" href="(.+?)" target=".+?">').findall(r)
                    for url in r:
                        if any(x in url for x in ['.rar']): continue
                        quality, info = source_utils.get_release_quality(
                            quality, url)
                        valid, host = source_utils.is_host_valid(url, hostDict)
                        if valid:
                            info = ' | '.join(info)
                            if control.setting('deb.rd_check') == 'true':
                                check = rd_check.rd_deb_check(url)
                                if check:
                                    info = 'RD Checked' + ' | ' + info
                                    sources.append({
                                        'source': host,
                                        'quality': quality,
                                        'language': 'en',
                                        'url': check,
                                        'info': info,
                                        'direct': False,
                                        'debridonly': True
                                    })
                            else:
                                sources.append({
                                    'source': host,
                                    'quality': quality,
                                    'language': 'en',
                                    'url': url,
                                    'info': info,
                                    'direct': False,
                                    'debridonly': True
                                })
            return sources
        except Exception:
            failure = traceback.format_exc()
            log_utils.log('---Ganool Testing - Exception: \n' + str(failure))
            return sources
Esempio n. 2
0
    def _get_sources(self, name, url):
        try:
            r = cfscrape.get(url).content
            name = client.replaceHTMLCodes(name)
            l = dom_parser2.parse_dom(r, 'div', {'class': 'ppu2h'})
            s = ''
            for i in l:
                s += i.content
            urls = re.findall(r'''((?:http|ftp|https)://[\w_-]+(?:(?:\.[\w_-]+)+)[\w.,@?^=%&:/~+#-]*[\w@?^=%&/~+#-])''', i.content, flags=re.MULTILINE|re.DOTALL)
            urls = [i for i in urls if '.rar' not in i or '.zip' not in i or '.iso' not in i or '.idx' not in i or '.sub' not in i]
            for url in urls:
                if url in str(self.sources):
                    continue

                valid, host = source_utils.is_host_valid(url, self.hostDict)
                if not valid:
                    continue
                host = client.replaceHTMLCodes(host)
                host = host.encode('utf-8')
                quality, info = source_utils.get_release_quality(name, url)
                try:
                    size = re.findall('((?:\d+\.\d+|\d+\,\d+|\d+)\s*(?:GiB|MiB|GB|MB))', name)[0]
                    div = 1 if size.endswith(('GB', 'GiB')) else 1024
                    size = float(re.sub('[^0-9|/.|/,]', '', size)) / div
                    size = '%.2f GB' % size
                    info.append(size)
                except BaseException:
                    pass
                info = ' | '.join(info)

                if control.setting('deb.rd_check') == 'true':
                    check = rd_check.rd_deb_check(url)
                    if check:
                        info = 'RD Checked' + ' | ' + info
                        self.sources.append(
                            {'source': host, 'quality': quality, 'language': 'en', 'url': check, 'info': info,
                             'direct': False, 'debridonly': True})
                else:
                    self.sources.append(
                        {'source': host, 'quality': quality, 'language': 'en', 'url': url, 'info': info,
                         'direct': False, 'debridonly': True})
        except:
            pass
Esempio n. 3
0
    def sources(self, url, hostDict, hostprDict):
        try:
            sources = []

            if url is None:
                return sources

            data = urlparse.parse_qs(url)
            data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])
            title = data['tvshowtitle'] if 'tvshowtitle' in data else data['title']
            hdlr = 'S%02dE%02d' % (int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else data['year']

            query = '%s S%02dE%02d' % (
                data['tvshowtitle'], int(data['season']), int(data['episode'])) \
                if 'tvshowtitle' in data else '%s %s' % (data['title'], data['year'])

            url = self.search_link % urllib.quote_plus(query)
            url = urlparse.urljoin(self.base_link, url).replace('%3A+', '+')

            r = cfscrape.get.get(url, header=self.headers).content

            posts = client.parseDOM(r, "h2", attrs={"class": "postTitle"})
            hostDict = hostprDict + hostDict
            items = []
            for post in posts:
                try:
                    u = client.parseDOM(post, 'a', ret='href')
                    for i in u:
                        name = str(i)
                        items.append(name)
                except:
                    pass

            for item in items:
                try:
                    i = str(item)
                    r = cfscrape.get.get(i, header=self.headers).content
                    u = client.parseDOM(r, "div", attrs={"class": "postContent"})
                    for t in u:
                        r = client.parseDOM(t, 'a', ret='href')
                        for url in r:
                            if any(x in url for x in ['.rar', '.zip', '.iso']): continue
                            quality, info = source_utils.get_release_quality(url)
                            if 'SD' in quality: continue
                            info = ' | '.join(info)
                            valid, host = source_utils.is_host_valid(url, hostDict)
                            if valid:
                                if control.setting('deb.rd_check') == 'true':
                                    check = rd_check.rd_deb_check(url)
                                    if check:
                                        info = 'RD Checked' + ' | ' + info
                                        sources.append(
                                            {'source': host, 'quality': quality, 'language': 'en', 'url': check,
                                             'info': info, 'direct': False, 'debridonly': True})
                                else:
                                    sources.append(
                                        {'source': host, 'quality': quality, 'language': 'en', 'url': url,
                                         'info': info, 'direct': False, 'debridonly': True})
                except:
                    pass
            return sources
        except Exception:
            failure = traceback.format_exc()
            log_utils.log('---Maxrls Testing - Exception: \n' + str(failure))
            return sources
Esempio n. 4
0
    def _get_sources(self, url, quality, info, hostDict, hostprDict):
        urls = []

        result = client.request(url, headers=self.headers)

        urls = [(client.parseDOM(result,
                                 'a',
                                 ret='href',
                                 attrs={'class': 'dbuttn watch'})[0],
                 client.parseDOM(result,
                                 'a',
                                 ret='href',
                                 attrs={'class': 'dbuttn blue'})[0],
                 client.parseDOM(result,
                                 'a',
                                 ret='href',
                                 attrs={'class': 'dbuttn magnet'})[0])]

        for url in urls[0]:
            try:
                r = client.request(url, headers={'User-Agent': client.agent()})

                if 'linkomark' in url:
                    p_link = dom_parser.parse_dom(r,
                                                  'link', {'rel': 'canonical'},
                                                  req='href')[0]
                    p_link = p_link.attrs['href']
                    input_name = client.parseDOM(r, 'input', ret='name')[0]
                    input_value = client.parseDOM(r, 'input', ret='value')[0]
                    post = {input_name: input_value}
                    p_data = client.request(p_link,
                                            post=post,
                                            headers=self.headers)
                    links = client.parseDOM(p_data,
                                            'a',
                                            ret='href',
                                            attrs={'target': '_blank'})

                    for i in links:
                        valid, host = source_utils.is_host_valid(i, hostDict)
                        if not valid:
                            valid, host = source_utils.is_host_valid(
                                i, hostprDict)
                            if not valid:
                                continue
                            else:
                                rd = True
                        else:
                            rd = False
                        if i in str(self._sources):
                            continue

                        if 'rapidgator' in i:
                            rd = True

                        if rd:
                            if debrid.status() is False: return
                            if control.setting('deb.rd_check') == 'true':
                                checked = rd_check.rd_deb_check(url)
                                if checked:
                                    info = 'RD Checked' + ' | ' + info
                                    self._sources.append({
                                        'source': host,
                                        'quality': quality,
                                        'language': 'en',
                                        'url': checked,
                                        'info': info,
                                        'direct': False,
                                        'debridonly': True
                                    })
                            else:
                                self._sources.append({
                                    'source': host,
                                    'quality': quality,
                                    'language': 'en',
                                    'url': i,
                                    'info': info,
                                    'direct': False,
                                    'debridonly': True
                                })
                        else:
                            self._sources.append({
                                'source': host,
                                'quality': quality,
                                'language': 'en',
                                'url': i,
                                'info': info,
                                'direct': False,
                                'debridonly': False
                            })

                elif 'torrent' in url:
                    if debrid.torrent_enabled() is False: return self._sources
                    data = client.parseDOM(r, 'a', ret='href')
                    url = [i for i in data if 'magnet:' in i][0]
                    url = url.split(';tr')[0]
                    if control.setting('torrent.rd_check') == 'true':
                        checked = rd_check.rd_cache_check(url)
                        if checked:
                            self._sources.append({
                                'source': 'Cahced Torrent',
                                'quality': quality,
                                'language': 'en',
                                'url': checked,
                                'info': info,
                                'direct': False,
                                'debridonly': True
                            })
                    else:
                        self._sources.append({
                            'source': 'Torrent',
                            'quality': quality,
                            'language': 'en',
                            'url': url,
                            'info': info,
                            'direct': False,
                            'debridonly': True
                        })

            except:
                pass
Esempio n. 5
0
    def sources(self, url, hostDict, hostprDict):
        try:
            sources = []

            if url is None:
                return sources

            data = urlparse.parse_qs(url)
            data = dict([(i, data[i][0]) if data[i] else (i, '')
                         for i in data])

            title = data['tvshowtitle'] if 'tvshowtitle' in data else data[
                'title']

            hdlr = 's%02de%02d' % (int(data['season']), int(
                data['episode'])) if 'tvshowtitle' in data else data['year']

            query = '%s s%02de%02d' % (
                data['tvshowtitle'], int(data['season']),
                int(data['episode'])) if 'tvshowtitle' in data else '%s %s' % (
                    data['title'], data['year'])
            query = re.sub('(\\\|/| -|:|;|\*|\?|"|\'|<|>|\|)', ' ', query)

            url = self.search_link % urllib.quote_plus(query)
            url = urlparse.urljoin(self.base_link, url).replace('++', '+')

            r = client.request(url, headers=self.headers)

            posts = client.parseDOM(r, "div", attrs={"class": "content-fill"})
            hostDict = hostprDict + hostDict
            items = []
            for post in posts:
                try:
                    u = client.parseDOM(post, 'a', ret='href')
                    for i in u:
                        name = str(i)
                        items.append(name)
                except:
                    pass

            for item in items:
                try:
                    i = str(item)
                    i = urlparse.urljoin(self.base_link, i)
                    r = client.request(i, headers=self.headers)
                    u = client.parseDOM(r, "div", attrs={"class": "dl-links"})
                    for t in u:
                        url = re.compile(
                            'a href="javascript: dl(.+?)" rel=".+?"').findall(
                                t)[0]
                        url = url.split("('")[1].split("')")[0]
                        if any(x in url for x in ['.rar', '.zip', '.iso']):
                            raise Exception()
                        quality, info = source_utils.get_release_quality(
                            url, url)
                        valid, host = source_utils.is_host_valid(url, hostDict)
                        info = ' | '.join(info)
                        if control.setting('deb.rd_check') == 'true':
                            check = rd_check.rd_deb_check(url)
                            if check:
                                info = 'RD Checked' + ' | ' + info
                                sources.append({
                                    'source': host,
                                    'quality': quality,
                                    'language': 'en',
                                    'url': url,
                                    'info': info,
                                    'direct': False,
                                    'debridonly': True
                                })
                        else:
                            sources.append({
                                'source': host,
                                'quality': quality,
                                'language': 'en',
                                'url': url,
                                'info': info,
                                'direct': False,
                                'debridonly': True
                            })
                except:
                    pass
            return sources
        except Exception:
            failure = traceback.format_exc()
            log_utils.log('---Link4file Testing - Exception: \n' +
                          str(failure))
            return sources
Esempio n. 6
0
    def sources(self, url, hostDict, hostprDict):
        try:
            sources = []

            if url is None: return sources

            data = urlparse.parse_qs(url)
            data = dict([(i, data[i][0]) if data[i] else (i, '')
                         for i in data])

            title = data['tvshowtitle'] if 'tvshowtitle' in data else data[
                'title']

            hdlr = 'S%02dE%02d' % (int(data['season']), int(
                data['episode'])) if 'tvshowtitle' in data else data['year']

            query = '%s S%02dE%02d' % (
                data['tvshowtitle'], int(data['season']),
                int(data['episode'])) if 'tvshowtitle' in data else '%s %s' % (
                    data['title'], data['year'])
            query = re.sub('(\\\|/| -|:|\.|;|\*|\?|"|\'|<|>|\|)', ' ', query)

            url = self.search_link % urllib.quote_plus(query)
            url = urlparse.urljoin(self.base_link,
                                   url).replace('-', '+').replace('%3A+', '+')

            r = client.request(url, headers=self.headers)
            r = client.parseDOM(r, 'div', attrs={'class': 'item-post'})
            r = [
                re.findall('<a href="(.+?)">(.+?)<', i, re.DOTALL)[0]
                for i in r
            ]

            hostDict = hostprDict + hostDict

            items = []

            for item in r:
                try:
                    t = item[1]
                    t = re.sub('(\[.*?\])|(<.+?>)', '', t)
                    t1 = re.sub(
                        '(\.|\(|\[|\s)(\d{4}|S\d*E\d*|S\d+|3D)(\.|\)|\]|\s|)(.+|)',
                        '', t)

                    if not cleantitle.get(t1) == cleantitle.get(title):
                        raise Exception()

                    y = re.findall(
                        '[\.|\(|\[|\s](\d{4}|S\d*E\d*|S\d*)[\.|\)|\]|\s]',
                        t)[-1].upper()

                    if not y == hdlr: raise Exception()

                    data = client.request(item[0], headers=self.headers)
                    data = client.parseDOM(data,
                                           'div',
                                           attrs={'class': 'single-link'})[0]
                    data = dom_parser.parse_dom(data, 'a', req='href')

                    u = [(t, i.attrs['href']) for i in data]
                    items += u

                except:
                    pass

            for item in items:
                try:
                    name = item[0]
                    name = client.replaceHTMLCodes(name)

                    quality, info = source_utils.get_release_quality(
                        name, item[1])

                    url = item[1]
                    if any(x in url for x in [
                            '.rar', '.zip', '.iso', 'www.share-online.biz',
                            'https://ouo.io', 'http://guard.link'
                    ]):
                        raise Exception()
                    url = client.replaceHTMLCodes(url)
                    url = url.encode('utf-8')

                    valid, host = source_utils.is_host_valid(url, hostDict)
                    host = client.replaceHTMLCodes(host)
                    host = host.encode('utf-8')
                    info = ' | '.join(info)
                    if control.setting('deb.rd_check') == 'true':
                        check = rd_check.rd_deb_check(url)
                        if check:
                            info = 'RD Checked' + ' | ' + info
                            sources.append({
                                'source': host,
                                'quality': quality,
                                'language': 'en',
                                'url': check,
                                'info': info,
                                'direct': False,
                                'debridonly': True
                            })
                    else:
                        sources.append({
                            'source': host,
                            'quality': quality,
                            'language': 'en',
                            'url': url,
                            'info': info,
                            'direct': False,
                            'debridonly': True
                        })
                except:
                    pass

            return sources
        except:
            failure = traceback.format_exc()
            log_utils.log('---ONCEDDL Testing - Exception: \n' + str(failure))
            return sources
Esempio n. 7
0
    def sources(self, url, hostDict, hostprDict):
        try:
            sources = []

            if url is None:
                return sources

            data = urlparse.parse_qs(url)
            data = dict([(i, data[i][0]) if data[i] else (i, '')
                         for i in data])
            title = data['tvshowtitle']
            hdlr = 'S%02dE%02d' % (int(data['season']), int(data['episode']))
            premDate = ''

            query = '%s %s S%02dE%02d' % (data['tvshowtitle'], data['year'],
                                          int(data['season']),
                                          int(data['episode']))

            query = re.sub('(\\\|/| -|:|;|\*|\?|"|\'|<|>|\|)', '', query)
            query = query.replace("&", "and")
            query = query.replace("  ", " ")
            query = query.replace(" ", "-")

            url = self.search_link % urllib.quote_plus(query)
            url = urlparse.urljoin(self.base_link, url)

            url = "http://rlsbb.ru/" + query
            if 'tvshowtitle' not in data: url = url + "-1080p"

            r = cfscrape.get(url).content

            if r is None and 'tvshowtitle' in data:
                season = re.search('S(.*?)E', hdlr)
                season = season.group(1)
                query = title
                query = re.sub('(\\\|/| -|:|;|\*|\?|"|\'|<|>|\|)', '', query)
                query = query + "-S" + season
                query = query.replace("&", "and")
                query = query.replace("  ", " ")
                query = query.replace(" ", "-")
                url = "http://rlsbb.ru/" + query
                r = cfscrape.get(url).content

            for loopCount in range(0, 2):
                if loopCount == 1 or (r is None and 'tvshowtitle' in data):

                    premDate = re.sub('[ \.]', '-', data['premiered'])
                    query = re.sub('[\\\\:;*?"<>|/\-\']', '',
                                   data['tvshowtitle'])
                    query = query.replace("&", " and ").replace("  ",
                                                                " ").replace(
                                                                    " ", "-")
                    query = query + "-" + premDate

                    url = "http://rlsbb.ru/" + query
                    url = url.replace('The-Late-Show-with-Stephen-Colbert',
                                      'Stephen-Colbert')

                    r = cfscrape.get(url).content

                posts = client.parseDOM(r, "div", attrs={"class": "content"})
                hostDict = hostprDict + hostDict
                items = []
                for post in posts:
                    try:
                        u = client.parseDOM(post, 'a', ret='href')
                        for i in u:
                            try:
                                name = str(i)
                                if hdlr in name.upper(): items.append(name)
                                elif len(premDate
                                         ) > 0 and premDate in name.replace(
                                             ".", "-"):
                                    items.append(name)

                            except:
                                pass
                    except:
                        pass

                if len(items) > 0: break

            seen_urls = set()

            for item in items:
                try:
                    info = []

                    url = str(item)
                    url = client.replaceHTMLCodes(url)
                    url = url.encode('utf-8')

                    if url in seen_urls:
                        continue
                    seen_urls.add(url)

                    host = url.replace("\\", "")
                    host2 = host.strip('"')
                    host = re.findall(
                        '([\w]+[.][\w]+)$',
                        urlparse.urlparse(host2.strip().lower()).netloc)[0]

                    if host not in hostDict:
                        raise Exception()
                    if any(x in host2 for x in ['.rar', '.zip', '.iso']):
                        continue

                    if '4K' in host2:
                        quality = '4K'
                    elif '2160p' in host2:
                        quality = '4K'
                    elif '1080p' in host2:
                        quality = '1080p'
                    elif '720p' in host2:
                        quality = '720p'
                    else:
                        quality = 'SD'

                    info = ' | '.join(info)
                    host = client.replaceHTMLCodes(host)
                    host = host.encode('utf-8')
                    if control.setting('deb.rd_check') == 'true':
                        check = rd_check.rd_deb_check(host2)
                        if check:
                            info = 'RD Checked' + ' | ' + info
                            sources.append({
                                'source': host,
                                'quality': quality,
                                'language': 'en',
                                'url': check,
                                'info': info,
                                'direct': False,
                                'debridonly': True
                            })
                    else:
                        sources.append({
                            'source': host,
                            'quality': quality,
                            'language': 'en',
                            'url': host2,
                            'info': info,
                            'direct': False,
                            'debridonly': True
                        })

                except:
                    pass
            check = [i for i in sources if not i['quality'] == 'CAM']
            if check:
                sources = check
            return sources
        except Exception:
            failure = traceback.format_exc()
            log_utils.log('---Rlsbb Testing - Exception: \n' + str(failure))
            return sources
Esempio n. 8
0
    def sources(self, url, hostDict, hostprDict):
        try:
            sources = []

            if url is None:
                return sources

            hostDict = hostprDict + hostDict

            data = urlparse.parse_qs(url)
            data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])

            title = data['tvshowtitle'] if 'tvshowtitle' in data else data['title']

            hdlr = 's%02de%02d' % (int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else data['year']

            query = '%s s%02de%02d' % (data['tvshowtitle'], int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else '%s %s' % (data['title'], data['year'])
            query = re.sub('(\\\|/| -|:|;|\*|\?|"|\'|<|>|\|)', ' ', query)

            try:
                url = self.search_link % urllib.quote_plus(query)
                url = urlparse.urljoin(self.base_link, url)

                r = cfscrape.get(url, headers={'User-Agent': client.agent()}).content

                posts = client.parseDOM(r, 'div', attrs={'class': 'post'})

                items = []; dupes = []

                for post in posts:
                    try:
                        u = client.parseDOM(post, "div", attrs={"class": "postContent"})
                        u = client.parseDOM(u, "h2")
                        u = client.parseDOM(u, 'a', ret='href')
                        u = [(i.strip('/').split('/')[-1], i) for i in u]
                        items += u
                    except:
                        pass
            except:
                pass

            for item in items:
                try:
                    name = item[0]
                    name = client.replaceHTMLCodes(name)

                    t = re.sub('(\.|\(|\[|\s)(\d{4}|S\d*E\d*|S\d*|3D)(\.|\)|\]|\s|)(.+|)', '', name)

                    if not cleantitle.get(t) == cleantitle.get(title): continue

                    quality, info = source_utils.get_release_quality(name, item[1])

                    url = item[1]
                    if any(x in url for x in ['.rar', '.zip', '.iso']):
                        raise Exception()
                    url = client.replaceHTMLCodes(url)
                    url = url.encode('utf-8')

                    host = re.findall('([\w]+[.][\w]+)$', urlparse.urlparse(url.strip().lower()).netloc)[0]
                    if host not in hostDict:
                        raise Exception()
                    info = ' | '.join(info)
                    host = client.replaceHTMLCodes(host)
                    host = host.encode('utf-8')
                    if control.setting('deb.rd_check') == 'true':
                        check = rd_check.rd_deb_check(url)
                        if check:
                            info = 'RD Checked' + ' | ' + info
                            sources.append(
                                {'source': host, 'quality': quality, 'language': 'en', 'url': check,
                                 'info': info, 'direct': False, 'debridonly': True})
                    else:
                        sources.append(
                            {'source': host, 'quality': quality, 'language': 'en', 'url': url,
                             'info': info, 'direct': False, 'debridonly': True})
                except:
                    pass

            return sources
        except Exception:
            failure = traceback.format_exc()
            log_utils.log('---Scenerls Testing - Exception: \n' + str(failure))
            return sources
Esempio n. 9
0
    def sources(self, url, hostDict, hostprDict):
        try:
            sources = []

            if url is None:
                return sources

            data = urlparse.parse_qs(url)
            data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])

            title = data['tvshowtitle'] if 'tvshowtitle' in data else data['title']

            hdlr = 's%02de%02d' % (int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else data['year']

            query = '%s s%02de%02d' % (data['tvshowtitle'], int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else '%s %s' % (data['title'], data['year'])
            query = re.sub('(\\\|/| -|:|;|\*|\?|"|\'|<|>|\|)', ' ', query).replace('&', 'and').replace('  ', ' ')

            post = '&subaction=search&search_start=0&full_search=0&result_from=1&story=%s' % urllib.quote_plus(query)

            r = client.request(self.search_link, post=post)

            posts = client.parseDOM(r, "div", attrs={"id": "dle-content"})
            hostDict = hostprDict + hostDict
            items = []
            for post in posts:
                try:
                    u = client.parseDOM(post, 'a', ret='href')
                    for i in u:
                        if any(x in i for x in ['#']):
                            continue
                        name = str(i)
                        items.append(name)
                except:
                    pass

            for item in items:
                try:
                    i = str(item)
                    r = client.request(i)
                    u = client.parseDOM(r, "div", attrs={"class": "pw-description clearfix"})
                    for t in u:
                        try:
                            url = re.compile('title="(https.+?)"').findall(t)
                        except:
                            url = re.compile('<br>(https.+?)<br>').findall(t)
                        for url in url:
                            if any(x in url for x in ['.rar', '.zip', '.iso', 'iMDB URL', 'imdb', 'youtube']):
                                continue
                            quality, info = source_utils.get_release_quality(url, url)
                            valid, host = source_utils.is_host_valid(url, hostDict)
                            info = ' | '.join(info)
                            if control.setting('deb.rd_check') == 'true':
                                check = rd_check.rd_deb_check(url)
                                if check:
                                    info = 'RD Checked' + ' | ' + info
                                    sources.append(
                                        {'source': host, 'quality': quality, 'language': 'en', 'url': check,
                                         'info': info, 'direct': False, 'debridonly': True})
                            else:
                                sources.append(
                                    {'source': host, 'quality': quality, 'language': 'en', 'url': url,
                                     'info': info, 'direct': False, 'debridonly': True})
                except:
                    pass
            return sources
        except Exception:
            failure = traceback.format_exc()
            log_utils.log('---Dlpix Testing - Exception: \n' + str(failure))
            return sources
Esempio n. 10
0
    def sources(self, url, hostDict, hostprDict):
        try:
            sources = []

            if url is None:
                return sources

            data = urlparse.parse_qs(url)
            data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])

            title = data['tvshowtitle'] if 'tvshowtitle' in data else data['title']

            hdlr = 'S%02dE%02d' % (int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else data['year']

            query = '%s s%02de%02d' % (data['tvshowtitle'], int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else '%s %s' % (data['title'], data['year'])
            query = re.sub('(\\\|/| -|:|;|\*|\?|"|\'|<|>|\|)', ' ', query)

            url = self.search_link % urllib.quote_plus(query)
            url = urlparse.urljoin(self.base_link, url)

            r = client.request(url, headers=self.headers)

            posts = re.findall('<h2 class="title">(.+?)</h2>', r, re.IGNORECASE)

            hostDict = hostprDict + hostDict

            urls = []
            for item in posts:

                try:
                    item = re.compile('a href="(.+?)"').findall(item)
                    name = item[0]
                    query = query.replace(" ", "-").lower()
                    if query not in name: continue
                    name = client.replaceHTMLCodes(name)

                    quality, info = source_utils.get_release_quality(name, name)
                    if any(x in quality for x in ['CAM', 'SD']): continue

                    url = item
                    links = self.links(url)
                    urls += [(i, quality, info) for i in links]

                except:
                    pass

            for item in urls:
                if any(x in item[0] for x in ['.rar', '.zip', '.iso', 'earn-money']): continue
                url = client.replaceHTMLCodes(item[0])
                url = url.encode('utf-8')

                valid, host = source_utils.is_host_valid(url, hostDict)
                if not valid:
                    continue
                host = client.replaceHTMLCodes(host)
                host = host.encode('utf-8')
                info = item[2]
                info = ' | '.join(info)
                if control.setting('deb.rd_check') == 'true':
                    check = rd_check.rd_deb_check(url)
                    if check:
                        info = 'RD Checked' + ' | ' + info
                        sources.append(
                            {'source': host, 'quality': item[1], 'language': 'en', 'url': check,
                             'info': info, 'direct': False, 'debridonly': True})
                else:
                    sources.append(
                        {'source': host, 'quality': item[1], 'language': 'en', 'url': url, 'info': info,
                         'direct': False, 'debridonly': True})

            return sources
        except Exception:
            failure = traceback.format_exc()
            log_utils.log('---300MBFILMS Testing - Exception: \n' + str(failure))
            return sources
Esempio n. 11
0
    def sources(self, url, hostDict, hostprDict):
        try:
            sources = []

            if url is None:
                return sources

            data = urlparse.parse_qs(url)
            data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])

            title = data['tvshowtitle'] if 'tvshowtitle' in data else data['title']

            hdlr = 'S%02dE%02d' % (int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else data['year']

            query = '%s S%02dE%02d' % (data['tvshowtitle'], int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else '%s %s' % (
                data['title'], data['year'])
            query = re.sub('(\\\|/| -|:|;|\*|\?|"|\'|<|>|\|)', ' ', query)

            url = self.search_link % urllib.quote_plus(query)
            url = urlparse.urljoin(self.base_link, url)

            html = client.request(url, headers=self.headers)
            posts = client.parseDOM(html, 'item')

            hostDict = hostprDict + hostDict

            items = []

            for post in posts:
                try:
                    t = client.parseDOM(post, 'title')[0]
                    u = client.parseDOM(post, 'enclosure', ret='url')
                    s = re.search('((?:\d+\.\d+|\d+\,\d+|\d+)\s*(?:GB|GiB|MB|MiB))', post)
                    s = s.groups()[0] if s else '0'
                    items += [(t, i, s) for i in u]
                except:
                    pass

            for item in items:
                try:

                    url = item[1]
                    if any(x in url for x in ['.rar', '.zip', '.iso', '.part']):
                        raise Exception()
                    url = client.replaceHTMLCodes(url)
                    url = url.encode('utf-8')

                    valid, host = source_utils.is_host_valid(url, hostDict)
                    if not valid: raise Exception()

                    host = client.replaceHTMLCodes(host)
                    host = host.encode('utf-8')

                    name = item[0]
                    name = client.replaceHTMLCodes(name)

                    t = re.sub('(\.|\(|\[|\s)(\d{4}|S\d*E\d*|S\d*|3D)(\.|\)|\]|\s|)(.+|)', '', name, flags=re.I)

                    if not cleantitle.get(t) == cleantitle.get(title):
                        raise Exception()

                    y = re.findall('[\.|\(|\[|\s](\d{4}|S\d*E\d*|S\d*)[\.|\)|\]|\s]', name)[-1].upper()

                    if not y == hdlr:
                        raise Exception()

                    quality, info = source_utils.get_release_quality(name, url)

                    try:
                        size = re.findall('((?:\d+\.\d+|\d+\,\d+|\d+)\s*(?:GB|GiB|MB|MiB))', item[2])[-1]
                        div = 1 if size.endswith(('GB', 'GiB')) else 1024
                        size = float(re.sub('[^0-9|/.|/,]', '', size)) / div
                        size = '%.2f GB' % size
                        info.append(size)
                    except:
                        pass

                    info = ' | '.join(info)
                    if control.setting('deb.rd_check') == 'true':
                        check = rd_check.rd_deb_check(url)
                        if check:
                            info = 'RD Checked' + ' | ' + info
                            sources.append(
                                {'source': host, 'quality': quality, 'language': 'en', 'url': check, 'info': info,
                                 'direct': False, 'debridonly': True})
                    else:
                        sources.append(
                            {'source': host, 'quality': quality, 'language': 'en', 'url': url, 'info': info,
                             'direct': False, 'debridonly': True})
                except:
                    pass

            check = [i for i in sources if not i['quality'] == 'CAM']
            if check:
                sources = check

            return sources
        except Exception:
            failure = traceback.format_exc()
            log_utils.log('---Mvrls Testing - Exception: \n' + str(failure))
            return sources