コード例 #1
0
        def builder():
            for count, item in enumerate(self.uncached, 1):
                try:
                    listitem = self.make_listitem()
                    quality = item.get('quality', 'SD')
                    quality_icon = self.get_quality_iconPath(quality)
                    extra_info = item.get('info')
                    try:
                        size_label = extra_info.split('|', 1)[0]
                        if any(value in size_label
                               for value in ['HEVC', '3D']):
                            size_label = ''
                    except:
                        size_label = ''

                    try:
                        f = ' / '.join([
                            '%s' % info.strip()
                            for info in extra_info.split('|')
                        ])
                    except:
                        f = ''
                    if 'name_info' in item:
                        t = getFileType(name_info=item.get('name_info'))
                    else:
                        t = getFileType(url=item.get('url'))
                    t = '%s /%s' % (f, t) if (f != '' and f != '0 '
                                              and f != ' ') else t
                    if t == '': t = getFileType(url=item.get('url'))
                    extra_info = t

                    listitem.setProperty('venom.source_dict', jsdumps([item]))
                    listitem.setProperty('venom.debrid',
                                         self.debrid_abv(item.get('debrid')))
                    listitem.setProperty('venom.provider',
                                         item.get('provider').upper())
                    listitem.setProperty('venom.source',
                                         item.get('source').upper())
                    listitem.setProperty('venom.seeders',
                                         str(item.get('seeders')))
                    listitem.setProperty('venom.hash', item.get('hash', 'N/A'))
                    listitem.setProperty('venom.name', item.get('name'))
                    listitem.setProperty('venom.quality', quality.upper())
                    listitem.setProperty('venom.quality_icon', quality_icon)
                    listitem.setProperty('venom.url', item.get('url'))
                    listitem.setProperty('venom.extra_info', extra_info)
                    if size_label:
                        listitem.setProperty('venom.size_label', size_label)
                    else:
                        listitem.setProperty('venom.size_label', 'NA')
                    listitem.setProperty('venom.count', '%02d.)' % count)
                    yield listitem
                except:
                    from resources.lib.modules import log_utils
                    log_utils.error()
コード例 #2
0
    def sources(self, url, hostDict, hostprDict):
        try:
            sources = []

            if url == None: return sources

            if not control.setting('pmcached.providers') == 'true' and not control.setting('rdcached.providers') == 'true': return sources
            if self.pm_api_key == '' and self.rd_api_key == '': return sources

            data = urlparse.parse_qs(url)
            data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])

            title = data['tvshowtitle'] if 'tvshowtitle' in data else data['title']

            hdlr = 'S%02dE%02d' % (int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else data['year']

            query = '%s S%02dE%02d' % (data['tvshowtitle'], int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else '%s %s' % (data['title'], data['year'])
            query = re.sub('(\\\|/| -|:|;|\*|\?|"|\'|<|>|\|)', ' ', query)

            url = self.search_link % urllib.quote_plus(query)
            url = urlparse.urljoin(self.base_link, url)

            r = self.scraper.get(url).content

            result = client.parseDOM(r, 'table', attrs={'class': 'table'})[0]
            result = client.parseDOM(result, 'tr')
            result = [(re.findall('href="(magnet[^"]+)"', i)[0], client.parseDOM(i, 'a', ret='title')[0], client.parseDOM(i, 'div', attrs={'class': 'tail'})[0]) for i in result]

            items = []

            for item in result:
                try:
                    name = item[1]
                    magnetlink = item[0]

                    size = ''
                    try:
                        size = re.findall('((?:\d+\.\d+|\d+\,\d+|\d+) (?:GB|GiB|MB|MiB))', item[2])[0]
                        div = 1 if size.endswith(('GB', 'GiB')) else 1024
                        size = float(re.sub('[^0-9|/.|/,]', '', size))/div
                        size = '%.2f GB' % size
                    except:
                        pass

                    t = re.sub('(\.|\(|\[|\s)(\d{4}|S\d*E\d*|S\d+|3D)(\.|\)|\]|\s|)(.+|)', '', name)
                    t = re.sub('<.+?>', '', t)
                    if not cleantitle.get(t) == cleantitle.get(title): raise Exception()
                    y = re.findall('[\.|\(|\[|\s](\d{4}|(?:S|s)\d*(?:E|e)\d*|(?:S|s)\d*)[\.|\)|\]|\s]', name)[-1].upper()
                    if not y == hdlr: raise Exception()

                    u = [(name, magnetlink, size)]
                    items += u
                except:
                    pass

            if control.setting('pmcached.providers') == 'true' and not self.pm_api_key == '':
                for item in items:
                    try:
                        _hash = re.findall('btih:(.*?)\W', item[1])[0]
                        checkurl = urlparse.urljoin(self.pm_base_link, self.pm_checkcache_link % (self.pm_api_key, _hash, self.pm_api_key))
                        r = client.request(checkurl)
                        if not 'finished' in r: raise Exception()

                        name = client.replaceHTMLCodes(item[0])
                        quality, info = source_utils.get_release_quality(name, None)
                        filetype = source_utils.getFileType(name)
                        info += [filetype.strip(), name]
                        info = filter(None, info)
                        info = ' | '.join(info)
                        if not item[2] == '':
                            info = '%s | %s' % (item[2], info)
                        url = 'magnet:?xt=urn:btih:%s' % _hash

                        sources.append({'source': 'PMCACHED', 'quality': quality, 'language': 'en', 'url': url, 'info': info, 'direct': False, 'debridonly': False, 'cached': True})
                    except:
                        pass

            if control.setting('rdcached.providers') == 'true' and not self.rd_api_key == '':
                checktorr_r = self.checkrdcache()
                checktorr_result = json.loads(checktorr_r)

                for item in items:
                    try:
                        _hash = re.findall('btih:(.*?)\W', item[1])[0]
                        _hash = _hash.lower()

                        url = ''
                        for i in checktorr_result:
                            try:
                                if _hash == i['hash'] and i['status'] == 'downloaded':
                                    url = i['links'][0]
                                    break
                            except:
                                pass

                        if url == '':
                            checkurl = urlparse.urljoin(self.rd_base_link, self.rd_checkcache_link % (_hash, self.rd_api_key))
                            r = client.request(checkurl)
                            checkinstant = json.loads(r)
                            checkinstant = checkinstant[_hash]

                            checkinstant_num = 0
                            try:
                                checkinstant_num = len(checkinstant['rd'])
                            except:
                                pass

                            if checkinstant_num == 0: raise Exception()
                            url = 'rdmagnet:?xt=urn:btih:%s' % _hash

                        if url == '': raise Exception()

                        name = client.replaceHTMLCodes(item[0])
                        quality, info = source_utils.get_release_quality(name, None)
                        filetype = source_utils.getFileType(name)
                        info += [filetype.strip(), name]
                        info = filter(None, info)
                        info = ' | '.join(info)
                        if not item[2] == '':
                            info = '%s | %s' % (item[2], info)

                        sources.append({'source': 'RDCACHED', 'quality': quality, 'language': 'en', 'url': url, 'info': info, 'direct': False, 'debridonly': False, 'cached': True})
                    except:
                        pass

            return sources
        except:
            log_utils.log('>>>> %s TRACE <<<<\n%s' % (__file__.upper().split('\\')[-1].split('.')[0], traceback.format_exc()), log_utils.LOGDEBUG)
            return sources
コード例 #3
0
ファイル: 300mbfilms.py プロジェクト: bopopescu/fuzzybritches
	def sources(self, url, hostDict, hostprDict):
		try:
			sources = []

			if url is None:
				return sources

			if debrid.status() is False:
				return sources

			hostDict = hostprDict + hostDict

			data = urlparse.parse_qs(url)
			data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])

			title = data['tvshowtitle'] if 'tvshowtitle' in data else data['title']
			title = title.replace('&', 'and').replace('Special Victims Unit', 'SVU')

			hdlr = 'S%02dE%02d' % (int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else data['year']

			query = '%s %s' % (title, hdlr)
			query = re.sub('(\\\|/| -|:|;|\*|\?|"|\'|<|>|\|)', '', query)

			url = self.search_link % urllib.quote_plus(query)
			url = urlparse.urljoin(self.base_link, url)
			# log_utils.log('url = %s' % url, log_utils.LOGDEBUG)

			r = client.request(url)

			posts = client.parseDOM(r, 'h2')

			urls = []
			for item in posts:
				if not item.startswith('<a href'):
					continue

				try:
					tit = client.parseDOM(item, "a")[0]
					t = tit.split(hdlr)[0].replace(data['year'], '').replace('(', '').replace(')', '').replace('&', 'and')
					if cleantitle.get(t) != cleantitle.get(title):
						continue

					if hdlr not in tit:
						continue

					quality, info = source_utils.get_release_quality(tit, item[0])

					try:
						size = re.findall('((?:\d+\,\d+\.\d+|\d+\.\d+|\d+\,\d+|\d+)\s*(?:GB|GiB|Gb|MB|MiB|Mb))', item)[0]
						div = 1 if size.endswith(('GB', 'GiB', 'Gb')) else 1024
						size = float(re.sub('[^0-9|/.|/,]', '', size.replace(',', '.'))) / div
						size = '%.2f GB' % size
					except:
						size = '0'
						pass

					info.append(size)

					fileType = source_utils.getFileType(tit)
					info.append(fileType)
					info = ' | '.join(info) if fileType else info[0]

					item = client.parseDOM(item, 'a', ret='href')

					url = item

					links = self.links(url)
					if links is None:
						continue

					urls += [(i, quality, info) for i in links]

				except:
					source_utils.scraper_error('300MBFILMS')
					pass

			for item in urls:
				if 'earn-money' in item[0]:
					continue

				if any(x in item[0] for x in ['.rar', '.zip', '.iso']):
					continue

				url = client.replaceHTMLCodes(item[0])
				url = url.encode('utf-8')

				valid, host = source_utils.is_host_valid(url, hostDict)

				if not valid:
					continue

				host = client.replaceHTMLCodes(host)
				host = host.encode('utf-8')

				sources.append({'source': host, 'quality': item[1], 'language': 'en', 'url': url, 'info': item[2], 'direct': False, 'debridonly': True})
			return sources

		except:
			source_utils.scraper_error('300MBFILMS')
			return sources
コード例 #4
0
    def searchShowPack(self, title, season, episode, query):
        try:
            sources = []

            se_ep = season + episode
            url = self.search_link % cleantitle.geturl(query)
            url = urlparse.urljoin(self.base_link, url)
            r = client.request(url)

            result = client.parseDOM(r, 'table', attrs={'class': 'tl'})[0]
            result = client.parseDOM(result, 'tr', attrs={'class': 'tlr|tlz'})
            result = [(client.parseDOM(i,
                                       'a',
                                       attrs={'title': 'Magnet link'},
                                       ret='href')[0],
                       client.parseDOM(i, 'td', attrs={'class': 'tli'})[0], i)
                      for i in result]
            result = [(i[0], client.parseDOM(i[1], 'a')[0], i[2])
                      for i in result]

            items = []

            for item in result:
                try:
                    name = item[1]
                    magnetlink = item[0]

                    size = ''
                    try:
                        size = re.findall(
                            '((?:\d+\.\d+|\d+\,\d+|\d+) (?:GB|GiB|MB|MiB))',
                            item[2])[0]
                        div = 1 if size.endswith(('GB', 'GiB')) else 1024
                        size = float(re.sub('[^0-9|/.|/,]', '', size)) / div
                        size = '%.2f GB' % size
                    except:
                        pass

                    t = re.sub('(\.|\(|\[|\s)((?:S|s)\d+)(\.|\)|\]|\s|)(.+|)',
                               '', name)
                    if not cleantitle.get(t) == cleantitle.get(title):
                        raise Exception()
                    y = re.findall('[\.|\(|\[|\s]((?:S|s)\d*)[\.|\)|\]|\s]',
                                   name)[-1].upper()
                    if not y.lower() == season.lower(): raise Exception()
                    if not size == '':
                        u = [(name, magnetlink, size)]
                    else:
                        u = [(name, magnetlink)]
                    items += u
                except:
                    traceback.print_exc()
                    pass

            for item in items:
                try:
                    _hash = re.findall('btih:(.*?)\W', item[1])[0]
                    checkurl = urlparse.urljoin(
                        self.pm_base_link, self.pm_checkcache_link %
                        (self.pm_api_key, _hash, self.pm_api_key))
                    r = client.request(checkurl)
                    if not 'finished' in r: raise Exception()

                    name = client.replaceHTMLCodes(item[0])
                    quality, info = source_utils.get_release_quality(
                        name, None)
                    filetype = source_utils.getFileType(name)
                    info += [filetype.strip(), name]
                    info = filter(None, info)
                    info = ' | '.join(info)

                    season_url = urlparse.urljoin(
                        self.pm_base_link,
                        self.pm_dl_link % (self.pm_api_key, _hash))
                    r = client.request(season_url)
                    streamitems = json.loads(r)
                    if not streamitems['status'] == 'success':
                        raise Exception()
                    streamitems = streamitems['content']
                    streamitems = [
                        i for i in streamitems if not i['stream_link'] == False
                    ]
                    streamitems = [
                        (i['link'], i['size']) for i in streamitems
                        if se_ep.lower() in i['link'].rsplit('/')[-1].lower()
                    ]
                    streamitems = sorted(streamitems,
                                         key=lambda x: x[1],
                                         reverse=True)
                    url = streamitems[0][0]

                    size = ''
                    try:
                        size = streamitems[0][1]
                        size = float(size) / 1073741824
                        size = '%.2f GB' % size
                    except:
                        pass
                    try:
                        info = '%s (%s) | %s' % (size, item[2], info)
                    except:
                        pass

                    sources.append({
                        'source': 'PMCACHED',
                        'quality': quality,
                        'language': 'en',
                        'url': url,
                        'info': info,
                        'direct': False,
                        'debridonly': False,
                        'cached': True
                    })
                except:
                    traceback.print_exc()
                    pass

            return sources
        except:
            log_utils.log(
                '>>>> %s TRACE <<<<\n%s' %
                (__file__.upper().split('\\')[-1].split('.')[0],
                 traceback.format_exc()), log_utils.LOGDEBUG)
            return sources
コード例 #5
0
    def sourcesFilter(self):
        provider = control.setting('hosts.sort.provider')
        if provider == '': provider = 'false'
        
        quality = control.setting('hosts.quality')
        if quality == '': quality = '0'

        captcha = control.setting('hosts.captcha')
        if captcha == '': captcha = 'true'

        HEVC = control.setting('HEVC')

        random.shuffle(self.sources)

        if provider == 'true':
            self.sources = sorted(self.sources, key=lambda k: k['provider'])

        for i in self.sources:
            if 'checkquality' in i and i['checkquality'] == True:
                if not i['source'].lower() in self.hosthqDict and i['quality'] not in ['SD', 'SCR', 'CAM']: i.update({'quality': 'SD'})
        
        local = [i for i in self.sources if 'local' in i and i['local'] == True]
        for i in local: i.update({'language': self._getPrimaryLang() or 'en'})
        self.sources = [i for i in self.sources if not i in local]

        filter = []
        filter += [i for i in self.sources if i['direct'] == True]
        filter += [i for i in self.sources if i['direct'] == False]
        self.sources = filter

        filter = []
        for d in debrid.debrid_resolvers:
            valid_hoster = set([i['source'] for i in self.sources])
            valid_hoster = [i for i in valid_hoster if d.valid_url('', i)]
            filter += [dict(i.items() + [('debrid', d.name)]) for i in self.sources if i['source'] in valid_hoster]
        filter += [i for i in self.sources if not i['source'].lower() in self.hostprDict and i['debridonly'] == False]

        self.sources = filter
      
        for i in range(len(self.sources)):
            q = self.sources[i]['quality']            
            if q == 'HD': self.sources[i].update({'quality': '720p'})

        filter = []
        filter += local

        if quality in ['0']: filter += [i for i in self.sources if i['quality'] == '4K' and 'debrid' in i]
        if quality in ['0']: filter += [i for i in self.sources if i['quality'] == '4K' and not 'debrid' in i and 'memberonly' in i]
        if quality in ['0']: filter += [i for i in self.sources if i['quality'] == '4K' and not 'debrid' in i and not 'memberonly' in i]

        if quality in ['0', '1']: filter += [i for i in self.sources if i['quality'] == '1440p' and 'debrid' in i]
        if quality in ['0', '1']: filter += [i for i in self.sources if i['quality'] == '1440p' and not 'debrid' in i and 'memberonly' in i]
        if quality in ['0', '1']: filter += [i for i in self.sources if i['quality'] == '1440p' and not 'debrid' in i and not 'memberonly' in i]

        if quality in ['0', '1', '2']: filter += [i for i in self.sources if i['quality'] == '1080p' and 'debrid' in i]
        if quality in ['0', '1', '2']: filter += [i for i in self.sources if i['quality'] == '1080p' and not 'debrid' in i and 'memberonly' in i]
        if quality in ['0', '1', '2']: filter += [i for i in self.sources if i['quality'] == '1080p' and not 'debrid' in i and not 'memberonly' in i]

        if quality in ['0', '1', '2', '3']: filter += [i for i in self.sources if i['quality'] == '720p' and 'debrid' in i]
        if quality in ['0', '1', '2', '3']: filter += [i for i in self.sources if i['quality'] == '720p' and not 'debrid' in i and 'memberonly' in i]
        if quality in ['0', '1', '2', '3']: filter += [i for i in self.sources if i['quality'] == '720p' and not 'debrid' in i and not 'memberonly' in i]

        filter += [i for i in self.sources if i['quality'] in ['SD', 'SCR', 'CAM']]
        self.sources = filter

        if not captcha == 'true':
            filter = [i for i in self.sources if i['source'].lower() in self.hostcapDict and not 'debrid' in i]
            self.sources = [i for i in self.sources if not i in filter]

        filter = [i for i in self.sources if i['source'].lower() in self.hostblockDict and not 'debrid' in i]
        self.sources = [i for i in self.sources if not i in filter]
        
        multi = [i['language'] for i in self.sources]
        multi = [x for y,x in enumerate(multi) if x not in multi[:y]]
        multi = True if len(multi) > 1 else False

        if multi == True:
            self.sources = [i for i in self.sources if not i['language'] == 'en'] + [i for i in self.sources if i['language'] == 'en']
        
        self.sources = self.sources[:2000]

        extra_info = control.setting('sources.extrainfo')
        prem_identify = control.setting('prem.identify')
        if prem_identify == '': prem_identify = 'blue'
        prem_identify = self.getPremColor(prem_identify)
        
        for i in range(len(self.sources)):
        
            if extra_info == 'true': t = source_utils.getFileType(self.sources[i]['url'])
            else: t = None
            
            u = self.sources[i]['url']

            p = self.sources[i]['provider']

            q = self.sources[i]['quality']

            s = self.sources[i]['source']
            
            s = s.rsplit('.', 1)[0]

            l = self.sources[i]['language']

            try: f = (' | '.join(['[I]%s [/I]' % info.strip() for info in self.sources[i]['info'].split('|')]))
            except: f = ''

            try: d = self.sources[i]['debrid']
            except: d = self.sources[i]['debrid'] = ''

            if not d == '': label = '%02d | [B]%s[/B] | ' % (int(i+1), d)
            #if not d == '': label = '%02d | [B]%s[/B] | [B]%s[/B] | ' % (int(i+1), p, d)
            else: label = '%02d | [B]%s[/B] | ' % (int(i+1), p)

            if multi == True and not l == 'en': label += '[B]%s[/B] | ' % l

            ### if q in ['4K', '1440p', '1080p', 'HD']: label += '%s | %s | [B][I]%s [/I][/B]' % (s, f, q)
            if t:
                if q in ['4K', '1440p', '1080p', '720p']: label += '%s | [B][I]%s [/I][/B] | [I]%s[/I] | %s' % (s, q, t, f)
                elif q == 'SD': label += '%s | %s | [I]%s[/I]' % (s, f, t)
                else: label += '%s | %s | [I]%s [/I] | [I]%s[/I]' % (s, f, q, t)
            else:
                if q in ['4K', '1440p', '1080p', '720p']: label += '%s | [B][I]%s [/I][/B] | %s' % (s, q, f)
                elif q == 'SD': label += '%s | %s' % (s, f)
                else: label += '%s | %s | [I]%s [/I]' % (s, f, q)
            label = label.replace('| 0 |', '|').replace(' | [I]0 [/I]', '')
            #label = label.replace('[I]HEVC [/I]', 'HEVC')
            label = re.sub('\[I\]\s+\[/I\]', ' ', label)
            label = re.sub('\|\s+\|', '|', label)
            label = re.sub('\|(?:\s+|)$', '', label)

            if d: 
                if not prem_identify == 'nocolor':
                    self.sources[i]['label'] = ('[COLOR %s]' % (prem_identify)) + label.upper() + '[/COLOR]'
                else: self.sources[i]['label'] = label.upper()
            else: self.sources[i]['label'] = label.upper()

        try: 
            if not HEVC == 'true': self.sources = [i for i in self.sources if not 'HEVC' in i['label']]
        except: pass

        return self.sources
コード例 #6
0
ファイル: easynews.py プロジェクト: TheProphet1/Prophet
    def sources(self, url, hostDict, hostprDict):

        auth = self._get_auth()

        if not auth:
            return

        sources = []

        query = self._query(url)

        url, params = self._translate_search(query)
        headers = {'Authorization': auth}
        response = requests.get(url, params=params, headers=headers).text
        results = json.loads(response)

        down_url = results.get('downURL')
        dl_farm = results.get('dlFarm')
        dl_port = results.get('dlPort')
        files = results.get('data', [])

        for item in files:

            try:

                post_hash, post_title, ext, duration = item['0'], item[
                    '10'], item['11'], item['14']

                checks = [False] * 5
                if 'alangs' in item and item['alangs'] and 'eng' not in item[
                        'alangs']:
                    checks[1] = True
                if re.match('^\d+s', duration) or re.match(
                        '^[0-5]m', duration):
                    checks[2] = True
                if 'passwd' in item and item['passwd']: checks[3] = True
                if 'virus' in item and item['virus']: checks[4] = True
                if 'type' in item and item['type'].upper() != 'VIDEO':
                    checks[5] = True

                if any(checks):
                    continue

                stream_url = down_url + quote(
                    '/%s/%s/%s%s/%s%s' %
                    (dl_farm, dl_port, post_hash, ext, post_title, ext))
                file_name = post_title
                file_dl = stream_url + '|Authorization=%s' % (quote(auth))
                size = float(int(item['rawSize'])) / 1073741824

                quality = source_utils.get_release_quality(file_name)[0]
                info = source_utils.getFileType(file_name)
                info = '%.2f GB | %s | %s' % (
                    size, info, file_name.replace('.', ' ').upper())

                sources.append({
                    'source': 'direct',
                    'quality': quality,
                    'language': "en",
                    'url': file_dl,
                    'info': info,
                    'direct': True,
                    'debridonly': False
                })

            except:
                print("Unexpected error in Easynews Script: source",
                      sys.exc_info()[0])
                exc_type, exc_obj, exc_tb = sys.exc_info()
                print(exc_type, exc_tb.tb_lineno)
                pass

        return sources
コード例 #7
0
    def sources(self, url, hostDict, hostprDict):
        try:
            sources = []

            if url == None: return sources

            if not control.setting('pmcached.providers') == 'true':
                return sources
            if self.api_key == '': return sources

            data = urlparse.parse_qs(url)
            data = dict([(i, data[i][0]) if data[i] else (i, '')
                         for i in data])

            title = data['tvshowtitle'] if 'tvshowtitle' in data else data[
                'title']

            hdlr = 'S%02dE%02d' % (int(data['season']), int(
                data['episode'])) if 'tvshowtitle' in data else data['year']

            query = '%s S%02dE%02d' % (
                data['tvshowtitle'], int(data['season']),
                int(data['episode'])) if 'tvshowtitle' in data else '%s %s' % (
                    data['title'], data['year'])
            query = re.sub('(\\\|/| -|:|;|\*|\?|"|\'|<|>|\|)', ' ', query)

            url = self.folderlist_link % self.api_key
            url = urlparse.urljoin(self.base_link, url)
            r = client.request(url)

            result = json.loads(r)

            if not result['status'] == 'success': raise Exception()
            pmitems = result['content']

            items = []

            for item in pmitems:
                try:
                    name, id, type = item['name'], item['id'], item['type']
                    t = re.sub(
                        '(\.|\(|\[|\s)(\d{4}|S\d*E\d*|S\d+|3D)(\.|\)|\]|\s|)(.+|)',
                        '', name)
                    if not cleantitle.get(t) == cleantitle.get(title):
                        raise Exception()
                    y = re.findall(
                        '[\.|\(|\[|\s](\d{4}|(?:S|s)\d*(?:E|e)\d*|(?:S|s)\d*)[\.|\)|\]|\s]',
                        name)[-1].upper()
                    if not y == hdlr: raise Exception()
                    if type == 'folder':
                        params = {'id': id, 'includebreadcrumbs': 'false'}
                        params = '&' + urllib.urlencode(params)
                        url = self.folderlist_link % self.api_key
                        url = urlparse.urljoin(self.base_link, url + params)
                    elif type == 'file':
                        if item['stream_link'] == False: raise Exception()
                        url = item['link']
                    u = [(name, url)]
                    items += u
                except:
                    pass

            for item in items:
                try:
                    name = client.replaceHTMLCodes(item[0])
                    quality, info = source_utils.get_release_quality(
                        name, None)
                    filetype = source_utils.getFileType(name)
                    info += [filetype.strip(), name]
                    info = filter(None, info)
                    info = ' | '.join(info)

                    sources.append({
                        'source': 'PMCACHED',
                        'quality': quality,
                        'language': 'en',
                        'url': item[1],
                        'info': info,
                        'direct': False,
                        'debridonly': False,
                        'cached': True
                    })
                except:
                    pass

            return sources
        except:
            log_utils.log(
                '>>>> %s TRACE <<<<\n%s' %
                (__file__.upper().split('\\')[-1].split('.')[0],
                 traceback.format_exc()), log_utils.LOGDEBUG)
            return sources
コード例 #8
0
    def sources(self, url, hostDict, hostprDict):
        try:
            sources = []
            if url == None: return sources

            if not control.setting('rdcached.providers') == 'true':
                return sources
            if self.api_key == '': return sources

            data = urlparse.parse_qs(url)
            data = dict([(i, data[i][0]) if data[i] else (i, '')
                         for i in data])

            title = data['tvshowtitle'] if 'tvshowtitle' in data else data[
                'title']

            if 'tvshowtitle' in data:
                season = 'S%02d' % int(data['season'])
                episode = 'E%02d' % int(data['episode'])
            hdlr = 'S%02dE%02d' % (int(data['season']), int(
                data['episode'])) if 'tvshowtitle' in data else data['year']

            checktorr_r = self.checkrdcache()
            result = json.loads(checktorr_r)

            items = []

            for i in result:
                try:
                    if not i['status'] == 'downloaded': raise Exception()

                    name = i['filename']
                    t = re.sub(
                        '(\.|\(|\[|\s)(\d{4}|S\d*E\d*|S\d+|3D)(\.|\)|\]|\s|)(.+|)',
                        '', name)
                    if not cleantitle.get(t) == cleantitle.get(title):
                        raise Exception()
                    y = re.findall(
                        '[\.|\(|\[|\s](\d{4}|(?:S|s)\d*(?:E|e)\d*|(?:S|s)\d*)[\.|\)|\]|\s]',
                        name)[-1].upper()

                    if not y == hdlr:
                        if 'tvshowtitle' in data:
                            if not y == season:
                                raise Exception()
                            else:
                                items += self.getSeasonItems(i, hdlr)
                    else:
                        info_url = urlparse.urljoin(
                            self.base_link,
                            self.torrentsinfo_link % (i['id'], self.api_key))

                        r = client.request(info_url)

                        torr_info = json.loads(r)
                        links = torr_info['links']
                        if len(links) == 0: raise Exception()
                        links = links[0]

                        u = [(name, links)]
                        items += u
                except:
                    pass

            for item in items:
                try:
                    name = item[0]
                    quality, info = source_utils.get_release_quality(
                        name, None)
                    filetype = source_utils.getFileType(name)
                    info += [filetype.strip(), name]
                    info = filter(None, info)
                    info = ' | '.join(info)

                    sources.append({
                        'source': 'RDCACHED',
                        'quality': quality,
                        'language': 'en',
                        'url': item[1],
                        'info': info,
                        'direct': False,
                        'debridonly': False,
                        'cached': True
                    })
                except:
                    pass

            return sources
        except:
            log_utils.log(
                '>>>> %s TRACE <<<<\n%s' %
                (__file__.upper().split('\\')[-1].split('.')[0],
                 traceback.format_exc()), log_utils.LOGDEBUG)
            return sources
コード例 #9
0
    def sourcesFilter(self):
        provider = control.setting('hosts.sort.provider')
        if provider == '':
            provider = 'false'

        quality = control.setting('hosts.quality')
        if quality == '':
            quality = '0'

        captcha = control.setting('hosts.captcha')
        if captcha == '':
            captcha = 'true'

        random.shuffle(self.sources)

        if provider == 'true':
            self.sources = sorted(self.sources, key=lambda k: k['provider'])

        for i in self.sources:
            if 'checkquality' in i and i['checkquality'] is True:
                if not i['source'].lower() in self.hosthqDict and i['quality'] not in ['SD', 'SCR', 'CAM']:
                    i.update({'quality': 'SD'})

        local = [i for i in self.sources if 'local' in i and i['local'] is True]
        for i in local:
            i.update({'language': self._getPrimaryLang() or 'en'})
        self.sources = [i for i in self.sources if i not in local]

        filter = []
        filter += [i for i in self.sources if i['direct'] is True]
        filter += [i for i in self.sources if i['direct'] is False]
        self.sources = filter

        filter = []

        filter += [i for i in self.sources if not i['source'].lower() in self.hostprDict]

        self.sources = filter

        for i in range(len(self.sources)):
            q = self.sources[i]['quality']
            if q == 'HD':
                self.sources[i].update({'quality': '720p'})

        filter = []
        filter += local

        if quality in ['0']:
            filter += [i for i in self.sources if i['quality'] == '4K']

        if quality in ['0', '1']:
            filter += [i for i in self.sources if i['quality'] == '1080p']

        if quality in ['0', '1', '2']:
            filter += [i for i in self.sources if i['quality'] == '720p']

        filter += [i for i in self.sources if i['quality'] in ['SD', 'SCR', 'CAM']]
        self.sources = filter

        if not captcha == 'true':
            filter = [i for i in self.sources if i['source'].lower() in self.hostcapDict]
            self.sources = [i for i in self.sources if i not in filter]

        filter = [i for i in self.sources if i['source'].lower() in self.hostblockDict]
        self.sources = [i for i in self.sources if i not in filter]
        
        multi = [i['language'] for i in self.sources]
        multi = [x for y, x in enumerate(multi) if x not in multi[:y]]
        multi = True if len(multi) > 1 else False

        if multi is True:
            self.sources = [i for i in self.sources if not i['language'] ==
                            'en'] + [i for i in self.sources if i['language'] == 'en']
        
        self.sources = self.sources[:2500]

        extra_info = control.setting('sources.extrainfo')

        HEVC = control.setting('HEVC')

        prem_identify = control.setting('prem.identify')
        prem_identify = self.getPremColor(prem_identify)

        torr_identify = control.setting('torrent.identify')
        torr_identify = self.getPremColor(torr_identify)

        for i in range(len(self.sources)):
            if extra_info == 'true':
                t = source_utils.getFileType(self.sources[i]['url'])
            else:
                t = None
            
            u = self.sources[i]['url']

            p = self.sources[i]['provider']

            q = self.sources[i]['quality']

            s = self.sources[i]['source']
            
            s = s.rsplit('.', 1)[0]

            l = self.sources[i]['language']

            try:
                f = (' | '.join(['[I]%s [/I]' % info.strip() for info in self.sources[i]['info'].split('|')]))
            except:
                f = ''

            label = '%02d | [B]%s[/B] | ' % (int(i+1), p)

            if multi is True and not l == 'en':
                label += '[B]%s[/B] | ' % l

            if t:
                if q in ['4K', '1080p', '720p']:
                    label += '%s | [B][I]%s [/I][/B] | [I]%s[/I] | %s' % (s, q, t, f)
                elif q == 'SD':
                    label += '%s | %s | [I]%s[/I]' % (s, f, t)
                else:
                    label += '%s | %s | [I]%s [/I] | [I]%s[/I]' % (s, f, q, t)
            else:
                if q in ['4K', '1080p', '720p']:
                    label += '%s | [B][I]%s [/I][/B] | %s' % (s, q, f)
                elif q == 'SD':
                    label += '%s | %s' % (s, f)
                else:
                    label += '%s | %s | [I]%s [/I]' % (s, f, q)
            label = label.replace('| 0 |', '|').replace(' | [I]0 [/I]', '')
            label = re.sub('\[I\]\s+\[/I\]', ' ', label)
            label = re.sub('\|\s+\|', '|', label)
            label = re.sub('\|(?:\s+|)$', '', label)
            
            self.sources[i]['label'] = label.upper()

        try:
            if not HEVC == 'true':
                self.sources = [i for i in self.sources if 'HEVC' not in i['label']]
        except:
            pass

        self.sources = [i for i in self.sources if 'label' in i]
    
        return self.sources
コード例 #10
0
    def searchShowPack(self, title, season, episode, query, category, token):
        try:
            sources = []

            se_ep = season + episode
            url = urlparse.urljoin(self.base_link,
                                   self.search_link % (query, category, token))
            r = client.request(url)

            result = json.loads(r)
            result = result['torrent_results']

            items = []

            for item in result:
                try:
                    name = item['title']
                    magnetlink = item['download']

                    size = ''
                    try:
                        size = item['size']
                        size = float(size) / 1073741824
                        size = '%.2f GB' % size
                    except:
                        pass

                    t = re.sub('(\.|\(|\[|\s)((?:S|s)\d+)(\.|\)|\]|\s|)(.+|)',
                               '', name)
                    if not cleantitle.get(t) == cleantitle.get(title):
                        raise Exception()
                    y = re.findall('[\.|\(|\[|\s]((?:S|s)\d*)[\.|\)|\]|\s]',
                                   name)[-1].upper()
                    if not y.lower() == season.lower(): raise Exception()
                    if not size == '':
                        u = [(name, magnetlink, size)]
                    else:
                        u = [(name, magnetlink)]
                    items += u
                except:
                    pass

            for item in items:
                try:
                    _hash = re.findall('btih:(.*?)\W', item[1])[0]
                    checkurl = urlparse.urljoin(
                        self.pm_base_link, self.pm_checkcache_link %
                        (self.pm_api_key, _hash, self.pm_api_key))
                    r = client.request(checkurl)
                    if not 'finished' in r: raise Exception()

                    name = client.replaceHTMLCodes(item[0])
                    quality, info = source_utils.get_release_quality(
                        name, None)
                    filetype = source_utils.getFileType(name)
                    info += [filetype.strip(), name]
                    info = filter(None, info)
                    info = ' | '.join(info)

                    season_url = urlparse.urljoin(
                        self.pm_base_link,
                        self.pm_dl_link % (self.pm_api_key, _hash))
                    r = client.request(season_url)
                    streamitems = json.loads(r)
                    if not streamitems['status'] == 'success':
                        raise Exception()
                    streamitems = streamitems['content']
                    streamitems = [
                        i for i in streamitems if not i['stream_link'] == False
                    ]
                    streamitems = [
                        (i['link'], i['size']) for i in streamitems
                        if se_ep.lower() in i['link'].rsplit('/')[-1].lower()
                    ]
                    streamitems = sorted(streamitems,
                                         key=lambda x: int(x[1]),
                                         reverse=True)
                    url = streamitems[0][0]

                    size = ''
                    try:
                        size = streamitems[0][1]
                        size = float(size) / 1073741824
                        size = '%.2f GB' % size
                    except:
                        pass
                    try:
                        info = '%s (%s) | %s' % (size, item[2], info)
                    except:
                        pass

                    sources.append({
                        'source': 'PMCACHED',
                        'quality': quality,
                        'language': 'en',
                        'url': url,
                        'info': info,
                        'direct': False,
                        'debridonly': False,
                        'cached': True
                    })
                except:
                    pass

            return sources
        except:
            log_utils.log(
                '>>>> %s TRACE <<<<\n%s' %
                (__file__.upper().split('\\')[-1].split('.')[0],
                 traceback.format_exc()), log_utils.LOGDEBUG)
            return sources
コード例 #11
0
    def sources(self, url, hostDict, hostprDict):
        try:
            sources = []

            if url == None: return sources

            if not control.setting(
                    'pmcached.providers') == 'true' and not control.setting(
                        'rdcached.providers') == 'true':
                raise Exception()
            if self.pm_api_key == '' and self.rd_api_key == '':
                raise Exception()

            data = urlparse.parse_qs(url)
            data = dict([(i, data[i][0]) if data[i] else (i, '')
                         for i in data])

            title = data['tvshowtitle'] if 'tvshowtitle' in data else data[
                'title']

            hdlr = 'S%02dE%02d' % (int(data['season']), int(
                data['episode'])) if 'tvshowtitle' in data else data['year']

            query = '%s S%02dE%02d' % (
                data['tvshowtitle'], int(data['season']),
                int(data['episode'])) if 'tvshowtitle' in data else '%s %s' % (
                    data['title'], data['year'])
            query = re.sub('(\\\|/| -|:|;|\*|\?|"|\'|<|>|\|)', ' ', query)
            query = query.replace(' ', '.')

            category = 'tv' if 'tvshowtitle' in data else 'movies'

            token_url = urlparse.urljoin(self.base_link, self.token_link)
            tokr = client.request(token_url)
            xbmc.sleep(2000)
            tokr = json.loads(tokr)
            token = tokr['token']

            if 'tvshowtitle' in data and control.setting(
                    'pmcached.providers'
            ) == 'true' and not self.pm_api_key == '':
                season = 'S%02d' % (int(data['season']))
                episode = 'E%02d' % (int(data['episode']))
                seasonquery = '%s S%02d' % (data['tvshowtitle'],
                                            int(data['season']))
                seasonquery = re.sub('(\\\|/| -|:|;|\*|\?|"|\'|<|>|\|)', ' ',
                                     seasonquery)
                seasonquery = seasonquery.replace(' ', '.')
                sources += self.searchShowPack(title, season, episode,
                                               seasonquery, category, token)

            url = urlparse.urljoin(self.base_link,
                                   self.search_link % (query, category, token))
            r = client.request(url)

            result = json.loads(r)
            result = result['torrent_results']

            items = []

            for item in result:
                try:
                    name = item['title']
                    magnetlink = item['download']

                    size = ''
                    try:
                        size = item['size']
                        size = float(size) / 1073741824
                        size = '%.2f GB' % size
                    except:
                        pass

                    t = re.sub(
                        '(\.|\(|\[|\s)(\d{4}|S\d*E\d*|S\d+|3D)(\.|\)|\]|\s|)(.+|)',
                        '', name)
                    if not cleantitle.get(t) == cleantitle.get(title):
                        raise Exception()
                    y = re.findall(
                        '[\.|\(|\[|\s](\d{4}|(?:S|s)\d*(?:E|e)\d*|(?:S|s)\d*)[\.|\)|\]|\s]',
                        name)[-1].upper()
                    if not y == hdlr: raise Exception()

                    u = [(name, magnetlink, size)]
                    items += u
                except:
                    pass

            if control.setting('pmcached.providers'
                               ) == 'true' and not self.pm_api_key == '':
                for item in items:
                    try:
                        _hash = re.findall('btih:(.*?)\W', item[1])[0]
                        checkurl = urlparse.urljoin(
                            self.pm_base_link, self.pm_checkcache_link %
                            (self.pm_api_key, _hash, self.pm_api_key))
                        r = client.request(checkurl)
                        if not 'finished' in r: raise Exception()

                        name = client.replaceHTMLCodes(item[0])
                        quality, info = source_utils.get_release_quality(
                            name, None)
                        filetype = source_utils.getFileType(name)
                        info += [filetype.strip(), name]
                        info = filter(None, info)
                        info = ' | '.join(info)
                        if not item[2] == '':
                            info = '%s | %s' % (item[2], info)
                        url = 'magnet:?xt=urn:btih:%s' % _hash

                        sources.append({
                            'source': 'PMCACHED',
                            'quality': quality,
                            'language': 'en',
                            'url': url,
                            'info': info,
                            'direct': False,
                            'debridonly': False,
                            'cached': True
                        })
                    except:
                        pass

            if control.setting('rdcached.providers'
                               ) == 'true' and not self.rd_api_key == '':
                checktorr_r = self.checkrdcache()
                checktorr_result = json.loads(checktorr_r)

                for item in items:
                    try:
                        _hash = re.findall('btih:(.*?)\W', item[1])[0]
                        _hash = _hash.lower()

                        url = ''
                        for i in checktorr_result:
                            try:
                                if _hash == i['hash'] and i[
                                        'status'] == 'downloaded':
                                    url = i['links'][0]
                                    break
                            except:
                                pass

                        if url == '':
                            checkurl = urlparse.urljoin(
                                self.rd_base_link, self.rd_checkcache_link %
                                (_hash, self.rd_api_key))
                            r = client.request(checkurl)
                            checkinstant = json.loads(r)
                            checkinstant = checkinstant[_hash]

                            checkinstant_num = 0
                            try:
                                checkinstant_num = len(checkinstant['rd'])
                            except:
                                pass

                            if checkinstant_num == 0: raise Exception()
                            url = 'rdmagnet:?xt=urn:btih:%s' % _hash

                        if url == '': raise Exception()

                        name = item[0]
                        quality, info = source_utils.get_release_quality(
                            name, None)
                        filetype = source_utils.getFileType(name)
                        info += [filetype.strip(), name]
                        info = filter(None, info)
                        info = ' | '.join(info)
                        if not item[2] == '':
                            info = '%s | %s' % (item[2], info)

                        sources.append({
                            'source': 'RDCACHED',
                            'quality': quality,
                            'language': 'en',
                            'url': url,
                            'info': info,
                            'direct': False,
                            'debridonly': False,
                            'cached': True
                        })
                    except:
                        pass

            return sources
        except:
            log_utils.log(
                '>>>> %s TRACE <<<<\n%s' %
                (__file__.upper().split('\\')[-1].split('.')[0],
                 traceback.format_exc()), log_utils.LOGDEBUG)
            return sources
コード例 #12
0
    def _get_sources(self, url, name, hostDict, hostprDict):
        try:
            urls = []
            result = client.request(url)

            urls = [(client.parseDOM(result,
                                     'a',
                                     ret='href',
                                     attrs={'class': 'dbuttn watch'})[0],
                     client.parseDOM(result,
                                     'a',
                                     ret='href',
                                     attrs={'class': 'dbuttn blue'})[0],
                     client.parseDOM(result,
                                     'a',
                                     ret='href',
                                     attrs={'class': 'dbuttn magnet'})[0])]

            # '''<a class="dbuttn watch" href="https://www.linkomark.xyz/view/EnWNqSNeLw" target="_blank" rel="nofollow noopener">Watch Online Links</a>
            # <a class="dbuttn blue" href="https://www.linkomark.xyz/view/3-Gjyz5Q2R" target="_blank" rel="nofollow noopener">Get Download Links</a>
            # <a class="dbuttn magnet" href="https://torrentbox.site/save/2970fa51e8af52b7e2d1d5fa61a6005777d768ba" target="_blank" rel="nofollow noopener">Magnet Link</a>'''

            quality, info = source_utils.get_release_quality(name, url)

            try:
                size = re.findall(
                    '((?:\d+\,\d+\.\d+|\d+\.\d+|\d+\,\d+|\d+)\s*(?:GB|GiB|Gb|MB|MiB|Mb))',
                    result)[0]
                div = 1 if size.endswith(('GB', 'GiB', 'Gb')) else 1024
                size = float(re.sub('[^0-9|/.|/,]', '', size.replace(
                    ',', '.'))) / div
                size = '%.2f GB' % size
                info.append(size)
            except:
                pass

            fileType = source_utils.getFileType(name)
            info.append(fileType)
            info = ' | '.join(info) if fileType else info[0]

            # Debrid_info = info.append(fileType)
            # Debrid_info = ' | '.join(info) if fileType else info[0]
            # Torrent_info = ' | '.join(info)

        except:
            source_utils.scraper_error('MKVHUB')
            return

        for url in urls[0]:
            try:
                r = client.request(url)
                if r is None:
                    continue

                if 'linkomark' in url:
                    # info = Debrid_info
                    p_link = client.parseDOM(r,
                                             'link',
                                             attrs={'rel': 'canonical'},
                                             ret='href')[0]

                    #<input type="hidden" name="_csrf_token_" value=""/>
                    input_name = client.parseDOM(r, 'input', ret='name')[0]
                    input_value = client.parseDOM(r, 'input', ret='value')[0]

                    post = {input_name: input_value}
                    p_data = client.request(p_link, post=post)
                    links = client.parseDOM(p_data,
                                            'a',
                                            ret='href',
                                            attrs={'target': '_blank'})

                    for i in links:
                        valid, host = source_utils.is_host_valid(i, hostDict)
                        if not valid:
                            valid, host = source_utils.is_host_valid(
                                i, hostprDict)
                            if not valid:
                                continue
                            else:
                                rd = True
                        else:
                            rd = False
                        if i in str(self._sources):
                            continue

                        if 'rapidgator' in i:
                            rd = True

                        if rd:
                            self._sources.append({
                                'source': host,
                                'quality': quality,
                                'language': 'en',
                                'url': i,
                                'info': info,
                                'direct': False,
                                'debridonly': True
                            })
                        else:
                            self._sources.append({
                                'source': host,
                                'quality': quality,
                                'language': 'en',
                                'url': i,
                                'info': info,
                                'direct': False,
                                'debridonly': False
                            })

                elif 'torrent' in url:
                    # info = Torrent_info
                    data = client.parseDOM(r, 'a', ret='href')

                    url = [i for i in data if 'magnet:' in i][0]
                    url = url.split('&tr')[0]

                    self._sources.append({
                        'source': 'torrent',
                        'quality': quality,
                        'language': 'en',
                        'url': url,
                        'info': info,
                        'direct': False,
                        'debridonly': True
                    })

            except:
                source_utils.scraper_error('MKVHUB')
                pass