Example #1
0
    def sources_packs(self,
                      url,
                      hostDict,
                      hostprDict,
                      search_series=False,
                      total_seasons=None,
                      bypass_filter=False):
        sources = []
        self.bypass_filter = bypass_filter

        if search_series:  # torrentapi does not have showPacks
            return sources
        try:
            if url is None:
                return sources
            if debrid.status() is False:
                return sources

            data = parse_qs(url)
            data = dict([(i, data[i][0]) if data[i] else (i, '')
                         for i in data])

            self.title = data['tvshowtitle'].replace('&', 'and').replace(
                'Special Victims Unit', 'SVU')
            self.aliases = data['aliases']
            self.year = data['year']
            self.season_x = data['season']
            self.season_xx = self.season_x.zfill(2)

            query = re.sub('[^A-Za-z0-9\s\.-]+', '', self.title)
            search_link = self.tvsearch.format(
                self.key, quote_plus(query + ' S%s' % self.season_xx))
            # log_utils.log('search_link = %s' % str(search_link), __name__, log_utils.LOGDEBUG)

            time.sleep(2.1)
            rjson = client.request(search_link, error=True)
            if not rjson or not 'torrent_results' in str(rjson):
                return sources

            files = json.loads(rjson)['torrent_results']
            for file in files:
                url = file["download"]
                url = url.split('&tr')[0]
                hash = re.compile('btih:(.*?)&').findall(url)[0]

                name = file["title"]
                name = unquote_plus(name)
                name = source_utils.clean_name(self.title, name)
                if source_utils.remove_lang(name):
                    continue

                if not self.bypass_filter:
                    if not source_utils.filter_season_pack(
                            self.title, self.aliases, self.year, self.season_x,
                            name):
                        continue
                package = 'season'

                try:
                    seeders = int(file["seeders"])
                    if self.min_seeders > seeders:
                        continue
                except:
                    seeders = 0
                    pass

                quality, info = source_utils.get_release_quality(name, name)

                try:
                    dsize, isize = source_utils.convert_size(file["size"],
                                                             to='GB')
                    info.insert(0, isize)
                except:
                    dsize = 0
                    pass

                info = ' | '.join(info)

                sources.append({
                    'source': 'torrent',
                    'seeders': seeders,
                    'hash': hash,
                    'name': name,
                    'quality': quality,
                    'language': 'en',
                    'url': url,
                    'info': info,
                    'direct': False,
                    'debridonly': True,
                    'size': dsize,
                    'package': package
                })
            return sources
        except:
            source_utils.scraper_error('TORRENTAPI')
            return sources
Example #2
0
	def get_sources_packs(self, link):
		# log_utils.log('link = %s' % str(link), __name__, log_utils.LOGDEBUG)
		try:
			r = client.request(link)
			if not r:
				return
			posts = client.parseDOM(r, 'tr')

			for post in posts:
				link = re.findall('a title="Download Torrent Magnet" href="(magnet:.+?)"', post, re.DOTALL)
				if not link:
					continue

				for url in link:
					url = unquote_plus(url).split('&tr')[0].replace('&', '&').replace(' ', '.')
					url = source_utils.strip_non_ascii_and_unprintable(url)

					hash = re.compile('btih:(.*?)&').findall(url)[0]
					name = url.split('&dn=')[1]
					name = source_utils.clean_name(self.title, name)
					if source_utils.remove_lang(name):
						continue

					if not self.search_series:
						if not self.bypass_filter:
							if not source_utils.filter_season_pack(self.title, self.aliases, self.year, self.season_x, name):
								continue
						package = 'season'

					elif self.search_series:
						if not self.bypass_filter:
							valid, last_season = source_utils.filter_show_pack(self.title, self.aliases, self.imdb, self.year, self.season_x, name, self.total_seasons)
							if not valid:
								continue
						else:
							last_season = self.total_seasons
						package = 'show'

					try:
						seeders = int(client.parseDOM(post, 'td', attrs={'class': 'seeds is-hidden-sm-mobile'})[0].replace(',', ''))
						if self.min_seeders > seeders:
							continue
					except:
						seeders = 0
						pass

					quality, info = source_utils.get_release_quality(name, url)

					try:
						size = re.findall('((?:\d+\,\d+\.\d+|\d+\.\d+|\d+\,\d+|\d+)\s*(?:GiB|MiB|GB|MB))', post)[0]
						dsize, isize = source_utils._size(size)
						info.insert(0, isize)
					except:
						dsize = 0
						pass

					info = ' | '.join(info)

					item = {'source': 'torrent', 'seeders': seeders, 'hash': hash, 'name': name, 'quality': quality,
								'language': 'en', 'url': url, 'info': info, 'direct': False, 'debridonly': True, 'size': dsize, 'package': package}
					if self.search_series:
						item.update({'last_season': last_season})
					self.sources.append(item)
		except:
			source_utils.scraper_error('BTSCENE')
			pass
Example #3
0
	def get_sources_packs(self, link):
		# log_utils.log('link = %s' % link, __name__, log_utils.LOGDEBUG)
		try:
			headers = {'User-Agent': client.agent()}
			r = client.request(link, headers=headers)
			if not r:
				return
			posts = client.parseDOM(r, 'tr', attrs={'id': 'torrent_latest_torrents'})

			for post in posts:
				ref = client.parseDOM(post, 'a', attrs={'title': 'Torrent magnet link'}, ret='href')[0]
				link = ref.split('url=')[1]

				url = unquote_plus(link).replace('&', '&').replace(' ', '.')
				url = url.split('&tr')[0]
				hash = re.compile('btih:(.*?)&').findall(url)[0]
				name = unquote_plus(url.split('&dn=')[1])
				name = source_utils.clean_name(self.title, name)
				if source_utils.remove_lang(name):
					continue

				if not self.search_series:
					if not self.bypass_filter:
						if not source_utils.filter_season_pack(self.title, self.aliases, self.year, self.season_x, name):
							continue
					package = 'season'

				elif self.search_series:
					if not self.bypass_filter:
						valid, last_season = source_utils.filter_show_pack(self.title, self.aliases, self.imdb, self.year, self.season_x, name, self.total_seasons)
						if not valid:
							continue
					else:
						last_season = self.total_seasons
					package = 'show'

				try:
					seeders = int(re.findall('<td class="green center">([0-9]+|[0-9]+,[0-9]+)</td>', post, re.DOTALL)[0].replace(',', ''))
					if self.min_seeders > seeders:
						continue
				except:
					seeders = 0
					pass

				quality, info = source_utils.get_release_quality(name, url)

				try:
					size = re.findall('((?:\d+\,\d+\.\d+|\d+\.\d+|\d+\,\d+|\d+)\s*(?:GiB|MiB|GB|MB))', post)[0]
					dsize, isize = source_utils._size(size)
					info.insert(0, isize)
				except:
					dsize = 0
					pass

				info = ' | '.join(info)

				item = {'source': 'torrent', 'seeders': seeders, 'hash': hash, 'name': name, 'quality': quality,
							'language': 'en', 'url': url, 'info': info, 'direct': False, 'debridonly': True, 'size': dsize, 'package': package}
				if self.search_series:
					item.update({'last_season': last_season})
				self.sources.append(item)
		except:
			source_utils.scraper_error('KICKASS2')
			pass
Example #4
0
	def get_sources_packs(self, link, url):
		try:
			# log_utils.log('link = %s' % str(link), log_utils.LOGDEBUG)
			# log_utils.log('url = %s' % url, log_utils.LOGDEBUG)
			self.headers.update({'Referer': link})
			query_data = {
				'query': url,
				'offset': 0,
				'limit': 99,
				'filters[field]': 'seeds',
				'filters[sort]': 'desc',
				'filters[time]': 4,
				'filters[category]': 4,
				'filters[adult]': False,
				'filters[risky]': False}

			api_url = urljoin(self.base_link, self.api_search_link)
			rjson = client.request(api_url, post=query_data, headers=self.headers)

			files = json.loads(rjson)
			error = files.get('error')
			if error:
				return sources

			for file in files.get('content'):
				try:
					name = file.get('name')
					name = source_utils.clean_name(self.title, name)

					url = unquote_plus(file.get('magnet')).replace('&amp;', '&').replace(' ', '.')
					url = re.sub(r'(&tr=.+)&dn=', '&dn=', url) # some links on bitlord &tr= before &dn=
					url = url.split('&tr=')[0].split('&xl=')[0]
					url = source_utils.strip_non_ascii_and_unprintable(url)

					hash = re.compile('btih:(.*?)&').findall(url)[0]
					if source_utils.remove_lang(name):
						continue

					if not self.search_series:
						if not self.bypass_filter:
							if not source_utils.filter_season_pack(self.title, self.aliases, self.year, self.season_x, name):
								continue
						package = 'season'

					elif self.search_series:
						if not self.bypass_filter:
							valid, last_season = source_utils.filter_show_pack(self.title, self.aliases, self.imdb, self.year, self.season_x, name, self.total_seasons)
							if not valid:
								continue
						else:
							last_season = self.total_seasons
						package = 'show'

					try:
						seeders = file.get('seeds')
						if self.min_seeders > seeders:
							continue
					except:
						seeders = 0
						pass

					quality, info = source_utils.get_release_quality(name, url)

					try:
						size = file.get('size')
						size = str(size) + ' GB' if len(str(size)) == 1 else str(size) + ' MB'
						dsize, isize = source_utils._size(size)
						info.insert(0, isize)
					except:
						source_utils.scraper_error('BITLORD')
						dsize = 0
						pass

					info = ' | '.join(info)

					item = {'source': 'torrent', 'seeders': seeders, 'hash': hash, 'name': name, 'quality': quality,
								'language': 'en', 'url': url, 'info': info, 'direct': False, 'debridonly': True, 'size': dsize, 'package': package}
					if self.search_series:
						item.update({'last_season': last_season})
					self.sources.append(item)
				except:
					source_utils.scraper_error('BITLORD')
					continue
		except:
			source_utils.scraper_error('BITLORD')
			pass
Example #5
0
    def get_sources_packs(self, link):
        # log_utils.log('link = %s' % str(link), __name__, log_utils.LOGDEBUG)
        try:
            r = client.request(link)
            if not r:
                return
            r = json.loads(r)
            results = r['results']

            for item in results:
                try:
                    url = unquote_plus(item['magnet']).replace(' ', '.')
                    url = re.sub(
                        r'(&tr=.+)&dn=', '&dn=',
                        url)  # some links on solidtorrents &tr= before &dn=
                    url = source_utils.strip_non_ascii_and_unprintable(url)
                    hash = item['infohash'].lower()

                    name = item['title']
                    name = source_utils.clean_name(self.title, name)
                    if source_utils.remove_lang(name):
                        continue

                    if not self.search_series:
                        if not self.bypass_filter:
                            if not source_utils.filter_season_pack(
                                    self.title, self.aliases, self.year,
                                    self.season_x, name):
                                continue
                        package = 'season'

                    elif self.search_series:
                        if not self.bypass_filter:
                            valid, last_season = source_utils.filter_show_pack(
                                self.title, self.aliases, self.imdb, self.year,
                                self.season_x, name, self.total_seasons)
                            if not valid:
                                continue
                        else:
                            last_season = self.total_seasons
                        package = 'show'

                    if url in str(self.sources):
                        continue

                    try:
                        seeders = int(item['swarm']['seeders'])
                        if self.min_seeders > seeders:
                            continue
                    except:
                        seeders = 0
                        pass

                    quality, info = source_utils.get_release_quality(name, url)

                    try:
                        dsize, isize = source_utils.convert_size(item["size"],
                                                                 to='GB')
                        info.insert(0, isize)
                    except:
                        dsize = 0
                        pass

                    info = ' | '.join(info)

                    item = {
                        'source': 'torrent',
                        'seeders': seeders,
                        'hash': hash,
                        'name': name,
                        'quality': quality,
                        'language': 'en',
                        'url': url,
                        'info': info,
                        'direct': False,
                        'debridonly': True,
                        'size': dsize,
                        'package': package
                    }
                    if self.search_series:
                        item.update({'last_season': last_season})
                    self.sources.append(item)
                except:
                    source_utils.scraper_error('SOLIDTORRENTS')
                    pass
        except:
            source_utils.scraper_error('SOLIDTORRENTS')
            pass
Example #6
0
	def get_sources_packs(self, link):
		try:
			# log_utils.log('link = %s' % str(link), __name__, log_utils.LOGDEBUG)
			rjson = client.request(link, error=True)
			if not rjson or 'No results returned' in str(rjson) or 'Connection Time-out' in str(rjson):
				return
			files = json.loads(rjson)
		except:
			source_utils.scraper_error('PIRATEBAY')
			return

		for file in files:
			try:
				hash = file['info_hash']
				name = file['name']
				name = source_utils.clean_name(self.title, name)
				if source_utils.remove_lang(name):
					continue

				url = 'magnet:?xt=urn:btih:%s&dn=%s' % (hash, name) 

				if not self.search_series:
					if not self.bypass_filter:
						if not source_utils.filter_season_pack(self.title, self.aliases, self.year, self.season_x, name):
							continue
					package = 'season'

				elif self.search_series:
					if not self.bypass_filter:
						valid, last_season = source_utils.filter_show_pack(self.title, self.aliases, self.imdb, self.year, self.season_x, name, self.total_seasons)
						if not valid:
							continue
					else:
						last_season = self.total_seasons
					package = 'show'

				try:
					seeders= file['seeders']
					if self.min_seeders > seeders:
						continue
				except:
					seeders = 0
					pass

				quality, info = source_utils.get_release_quality(name, url)
				try:
					dsize, isize = source_utils.convert_size(float(file["size"]), to='GB')
					info.insert(0, isize)
				except:
					dsize = 0
					pass

				info = ' | '.join(info)

				item = {'source': 'torrent', 'seeders': seeders, 'hash': hash, 'name': name, 'quality': quality,
							'language': 'en', 'url': url, 'info': info, 'direct': False, 'debridonly': True, 'size': dsize, 'package': package}
				if self.search_series:
					item.update({'last_season': last_season})
				self.sources.append(item)
			except:
				source_utils.scraper_error('PIRATEBAY')
				continue
Example #7
0
    def get_sources_packs(self, link):
        # log_utils.log('link = %s' % str(link), __name__, log_utils.LOGDEBUG)
        try:
            r = client.request(link)
            if not r:
                return
            if '<tbody' not in r:
                return
            posts = client.parseDOM(r, 'tbody')[0]
            posts = client.parseDOM(posts, 'tr')
        except:
            source_utils.scraper_error('SKYTORRENTS')
            return

        for post in posts:
            try:
                post = re.sub(r'\n', '', post)
                post = re.sub(r'\t', '', post)
                link = re.findall(
                    'href="(magnet:.+?)".+<td style="text-align: center;color:green;">([0-9]+|[0-9]+,[0-9]+)</td>',
                    post, re.DOTALL)

                for url, seeders, in link:
                    url = unquote_plus(url).split('&tr')[0].replace(
                        '&amp;', '&').replace(' ', '.')
                    url = source_utils.strip_non_ascii_and_unprintable(url)
                    if url in str(self.sources):
                        return

                    hash = re.compile('btih:(.*?)&').findall(url)[0]

                    name = url.split('&dn=')[1]
                    name = source_utils.clean_name(self.title, name)
                    if source_utils.remove_lang(name):
                        continue

                    if not self.search_series:
                        if not self.bypass_filter:
                            if not source_utils.filter_season_pack(
                                    self.title, self.aliases, self.year,
                                    self.season_x, name):
                                continue
                        package = 'season'

                    elif self.search_series:
                        if not self.bypass_filter:
                            valid, last_season = source_utils.filter_show_pack(
                                self.title, self.aliases, self.imdb, self.year,
                                self.season_x, name, self.total_seasons)
                            if not valid:
                                continue
                        else:
                            last_season = self.total_seasons
                        package = 'show'

                    try:
                        seeders = int(seeders)
                        if self.min_seeders > seeders:
                            continue
                    except:
                        seeders = 0
                        pass

                    quality, info = source_utils.get_release_quality(name, url)

                    try:
                        size = re.findall(
                            '((?:\d+\,\d+\.\d+|\d+\.\d+|\d+\,\d+|\d+)\s*(?:GiB|MiB|GB|MB))',
                            post)[0]
                        dsize, isize = source_utils._size(size)
                        info.insert(0, isize)
                    except:
                        dsize = 0
                        pass

                    info = ' | '.join(info)

                    item = {
                        'source': 'torrent',
                        'seeders': seeders,
                        'hash': hash,
                        'name': name,
                        'quality': quality,
                        'language': 'en',
                        'url': url,
                        'info': info,
                        'direct': False,
                        'debridonly': True,
                        'size': dsize,
                        'package': package
                    }
                    if self.search_series:
                        item.update({'last_season': last_season})
                    self.sources.append(item)
            except:
                source_utils.scraper_error('SKYTORRENTS')
                pass
Example #8
0
    def get_sources_packs(self, url):
        # log_utils.log('url = %s' % str(url), __name__, log_utils.LOGDEBUG)
        try:
            r = client.request(url, timeout='5')
            if not r:
                return

            links = re.findall('<a href="(/torrent/.+?)"', r, re.DOTALL)
            for link in links:
                url = '%s%s' % (self.base_link, link)
                result = client.request(url, timeout='5')
                if result is None:
                    continue
                if '<kbd>' not in result:
                    continue
                hash = re.findall('<kbd>(.+?)<', result, re.DOTALL)[0]
                url = '%s%s' % ('magnet:?xt=urn:btih:', hash)

                name = re.findall('<h3 class="card-title">(.+?)<', result,
                                  re.DOTALL)[0].replace('Original Name: ', '')
                name = unquote_plus(name)
                name = source_utils.clean_name(self.title, name)
                if source_utils.remove_lang(name):
                    continue

                if not self.search_series:
                    if not self.bypass_filter:
                        if not source_utils.filter_season_pack(
                                self.title, self.aliases, self.year,
                                self.season_x, name):
                            continue
                    package = 'season'

                elif self.search_series:
                    if not self.bypass_filter:
                        valid, last_season = source_utils.filter_show_pack(
                            self.title, self.aliases, self.imdb, self.year,
                            self.season_x, name, self.total_seasons)
                        if not valid:
                            continue
                    else:
                        last_season = self.total_seasons
                    package = 'show'

                url = '%s%s%s' % (url, '&dn=', str(name))
                if url in str(self.sources):
                    continue

                try:
                    seeders = int(
                        re.findall(
                            '<div class="col-3">Seeders:</div><div class="col"><span style="color:green">([0-9]+|[0-9]+,[0-9]+)<',
                            result, re.DOTALL)[0].replace(',', ''))
                    if self.min_seeders > seeders:
                        continue
                except:
                    seeders = 0
                    pass

                size = re.findall(
                    '<div class="col-3">File size:</div><div class="col">(.+?)<',
                    result, re.DOTALL)[0]
                quality, info = source_utils.get_release_quality(name, url)

                try:
                    size = re.findall(
                        '((?:\d+\,\d+\.\d+|\d+\.\d+|\d+\,\d+|\d+)\s*(?:GB|GiB|Gb|MB|MiB|Mb))',
                        size)[0]
                    dsize, isize = source_utils._size(size)
                    info.insert(0, isize)
                except:
                    dsize = 0
                    pass

                info = ' | '.join(info)

                item = {
                    'source': 'torrent',
                    'seeders': seeders,
                    'hash': hash,
                    'name': name,
                    'quality': quality,
                    'language': 'en',
                    'url': url,
                    'info': info,
                    'direct': False,
                    'debridonly': True,
                    'size': dsize,
                    'package': package
                }
                if self.search_series:
                    item.update({'last_season': last_season})
                self.sources.append(item)
        except:
            source_utils.scraper_error('YOURBITTORRENT')
            pass
Example #9
0
    def get_sources_packs(self, url):
        # log_utils.log('url = %s' % str(url), __name__, log_utils.LOGDEBUG)
        try:
            r = client.request(url)
            if not r:
                return
            if '<tbody' not in r:
                return

            r = client.parseDOM(r, 'tbody')[0]

            results = client.parseDOM(r, 'tr')
            posts = [i for i in results if 'magnet:' in i]

            try:
                next_page = [i for i in results if 'Next Page' in i]
                if not next_page:
                    raise Exception()
                page = client.parseDOM(next_page,
                                       'a',
                                       ret='href',
                                       attrs={'title':
                                              'Downloads | Page 2'})[0]
                r2 = client.request(self.base_link + page)
                results2 = client.parseDOM(r2, 'tr')
                posts += [i for i in results2 if 'magnet:' in i]
            except:
                pass

            for post in posts:
                post = post.replace('&nbsp;', ' ')

                links = client.parseDOM(post, 'a', ret='href')
                magnet = [
                    i.replace('&amp;', '&') for i in links if 'magnet:' in i
                ][0]
                url = unquote_plus(magnet).split('&tr')[0].replace(' ', '.')
                if url in str(self.sources):
                    continue

                hash = re.compile('btih:(.*?)&').findall(url)[0]

                name = client.parseDOM(post, 'a',
                                       ret='title')[1].replace('&ndash;', '-')
                name = unquote_plus(name)
                name = source_utils.clean_name(self.title, name)
                if source_utils.remove_lang(name):
                    continue

                if not self.search_series:
                    if not self.bypass_filter:
                        if not source_utils.filter_season_pack(
                                self.title, self.aliases, self.year,
                                self.season_x, name):
                            continue
                    package = 'season'

                elif self.search_series:
                    if not self.bypass_filter:
                        valid, last_season = source_utils.filter_show_pack(
                            self.title, self.aliases, self.imdb, self.year,
                            self.season_x, name, self.total_seasons)
                        if not valid:
                            continue
                    else:
                        last_season = self.total_seasons
                    package = 'show'

                try:
                    seeders = int(
                        client.parseDOM(post, 'td',
                                        attrs={'class':
                                               's'})[0].replace(',', ''))
                    if self.min_seeders > seeders:
                        continue
                except:
                    seeders = 0
                    pass

                quality, info = source_utils.get_release_quality(name, url)

                try:
                    size = re.findall(
                        '((?:\d+\,\d+\.\d+|\d+\.\d+|\d+\,\d+|\d+)\s*(?:GiB|MiB|GB|MB))',
                        post)[0]
                    dsize, isize = source_utils._size(size)
                    info.insert(0, isize)
                except:
                    dsize = 0
                    pass

                info = ' | '.join(info)

                item = {
                    'source': 'torrent',
                    'seeders': seeders,
                    'hash': hash,
                    'name': name,
                    'quality': quality,
                    'language': 'en',
                    'url': url,
                    'info': info,
                    'direct': False,
                    'debridonly': True,
                    'size': dsize,
                    'package': package
                }
                if self.search_series:
                    item.update({'last_season': last_season})
                self.sources.append(item)
        except:
            source_utils.scraper_error('MAGNETDL')
            pass
Example #10
0
    def get_sources_packs(self, link):
        # log_utils.log('link = %s' % str(link), __name__, log_utils.LOGDEBUG)
        try:
            r = client.request(link)
            if not r:
                return
            r = re.sub(r'\n', '', r)
            r = re.sub(r'\t', '', r)
            posts = re.compile(
                '<table class="table2" cellspacing="0">(.*?)</table>').findall(
                    r)
            posts = client.parseDOM(posts, 'tr')
        except:
            source_utils.scraper_error('TORRENTDOWNLOAD')
            return

        for post in posts:
            try:
                if '<th' in post:
                    continue
                links = re.compile(
                    '<a href="(.+?)">.*?<td class="tdnormal">((?:\d+\,\d+\.\d+|\d+\.\d+|\d+\,\d+|\d+)\s*(?:GB|GiB|Gb|MB|MiB|Mb))</td><td class="tdseed">([0-9]+|[0-9]+,[0-9]+)</td>'
                ).findall(post)

                for items in links:
                    link = items[0].split("/")
                    hash = link[1].lower()

                    name = link[2].replace('+MB+', '')
                    name = unquote_plus(name).replace('&amp;', '&')
                    name = source_utils.clean_name(self.title, name)
                    if source_utils.remove_lang(name):
                        continue
                    url = 'magnet:?xt=urn:btih:%s&dn=%s' % (hash, name)

                    if not self.search_series:
                        if not self.bypass_filter:
                            if not source_utils.filter_season_pack(
                                    self.title, self.aliases, self.year,
                                    self.season_x, name):
                                continue
                        package = 'season'

                    elif self.search_series:
                        if not self.bypass_filter:
                            valid, last_season = source_utils.filter_show_pack(
                                self.title, self.aliases, self.imdb, self.year,
                                self.season_x, name, self.total_seasons)
                            if not valid:
                                continue
                        else:
                            last_season = self.total_seasons
                        package = 'show'

                    try:
                        seeders = int(items[2].replace(',', ''))
                        if self.min_seeders > seeders:
                            continue
                    except:
                        seeders = 0
                        pass

                    quality, info = source_utils.get_release_quality(name, url)

                    try:
                        size = re.findall(
                            '((?:\d+\,\d+\.\d+|\d+\.\d+|\d+\,\d+|\d+)\s*(?:GB|GiB|Gb|MB|MiB|Mb))',
                            items[1])[0]
                        dsize, isize = source_utils._size(size)
                        info.insert(0, isize)
                    except:
                        dsize = 0
                        pass
                    info = ' | '.join(info)

                    item = {
                        'source': 'torrent',
                        'seeders': seeders,
                        'hash': hash,
                        'name': name,
                        'quality': quality,
                        'language': 'en',
                        'url': url,
                        'info': info,
                        'direct': False,
                        'debridonly': True,
                        'size': dsize,
                        'package': package
                    }
                    if self.search_series:
                        item.update({'last_season': last_season})
                    self.sources.append(item)
            except:
                source_utils.scraper_error('TORRENTDOWNLOAD')
                pass
Example #11
0
    def get_sources_packs(self, link):
        # log_utils.log('link = %s' % str(link), __name__, log_utils.LOGDEBUG)
        try:
            r = self.scraper.get(link).content
            if not r:
                return
            posts = client.parseDOM(r, 'div', attrs={'class': 'tgxtable'})
        except:
            source_utils.scraper_error('TORRENTGALAXY')
            return

        for post in posts:
            try:
                links = zip(
                    re.findall('a href="(magnet:.+?)"', post, re.DOTALL),
                    re.findall(
                        r"<span class='badge badge-secondary' style='border-radius:4px;'>(.*?)</span>",
                        post, re.DOTALL),
                    re.findall(
                        r"<span title='Seeders/Leechers'>\[<font color='green'><b>(.*?)<",
                        post, re.DOTALL))
                for link in links:
                    url = unquote_plus(link[0]).split('&tr')[0].replace(
                        ' ', '.')
                    url = source_utils.strip_non_ascii_and_unprintable(url)
                    hash = re.compile('btih:(.*?)&').findall(url)[0]

                    name = url.split('&dn=')[1]
                    name = source_utils.clean_name(self.title, name)
                    if source_utils.remove_lang(name):
                        continue

                    if not self.search_series:
                        if not self.bypass_filter:
                            if not source_utils.filter_season_pack(
                                    self.title, self.aliases, self.year,
                                    self.season_x, name):
                                continue
                        package = 'season'

                    elif self.search_series:
                        if not self.bypass_filter:
                            valid, last_season = source_utils.filter_show_pack(
                                self.title, self.aliases, self.imdb, self.year,
                                self.season_x, name, self.total_seasons)
                            if not valid:
                                continue
                        else:
                            last_season = self.total_seasons
                        package = 'show'

                    try:
                        seeders = int(link[2])
                        if self.min_seeders > seeders:
                            continue
                    except:
                        seeders = 0
                        pass

                    quality, info = source_utils.get_release_quality(name, url)

                    try:
                        dsize, isize = source_utils._size(link[1])
                        info.insert(0, isize)
                    except:
                        dsize = 0
                        pass

                    info = ' | '.join(info)

                    item = {
                        'source': 'torrent',
                        'seeders': seeders,
                        'hash': hash,
                        'name': name,
                        'quality': quality,
                        'language': 'en',
                        'url': url,
                        'info': info,
                        'direct': False,
                        'debridonly': True,
                        'size': dsize,
                        'package': package
                    }
                    if self.search_series:
                        item.update({'last_season': last_season})
                    self.sources.append(item)
            except:
                source_utils.scraper_error('TORRENTGALAXY')
                pass
Example #12
0
    def get_sources_packs(self, link):
        # log_utils.log('link = %s' % str(link), __name__, log_utils.LOGDEBUG)
        try:
            r = client.request(link)
            if not r:
                return
            div = client.parseDOM(r, 'div', attrs={'id': 'div2child'})

            for row in div:
                row = client.parseDOM(r,
                                      'div',
                                      attrs={'class': 'resultdivbotton'})
                if not row:
                    return

                for post in row:
                    hash = re.findall(
                        '<div id="hideinfohash.+?" class="hideinfohash">(.+?)<',
                        post, re.DOTALL)[0]
                    name = re.findall(
                        '<div id="hidename.+?" class="hideinfohash">(.+?)<',
                        post, re.DOTALL)[0]
                    name = unquote_plus(name)
                    name = source_utils.clean_name(self.title, name)
                    if source_utils.remove_lang(name):
                        continue

                    url = 'magnet:?xt=urn:btih:%s&dn=%s' % (hash, name)

                    if url in str(self.sources):
                        continue

                    if not self.search_series:
                        if not self.bypass_filter:
                            if not source_utils.filter_season_pack(
                                    self.title, self.aliases, self.year,
                                    self.season_x, name):
                                continue
                        package = 'season'

                    elif self.search_series:
                        if not self.bypass_filter:
                            valid, last_season = source_utils.filter_show_pack(
                                self.title, self.aliases, self.imdb, self.year,
                                self.season_x, name, self.total_seasons)
                            if not valid:
                                continue
                        else:
                            last_season = self.total_seasons
                        package = 'show'

                    try:
                        seeders = int(
                            re.findall(
                                '<div class="resultdivbottonseed">([0-9]+|[0-9]+,[0-9]+)<',
                                post, re.DOTALL)[0].replace(',', ''))
                        if self.min_seeders > seeders:
                            continue
                    except:
                        seeders = 0
                        pass

                    quality, info = source_utils.get_release_quality(name, url)

                    try:
                        size = re.findall(
                            '<div class="resultdivbottonlength">(.+?)<',
                            post)[0]
                        dsize, isize = source_utils._size(size)
                        info.insert(0, isize)
                    except:
                        dsize = 0
                        pass

                    info = ' | '.join(info)

                    item = {
                        'source': 'torrent',
                        'seeders': seeders,
                        'hash': hash,
                        'name': name,
                        'quality': quality,
                        'language': 'en',
                        'url': url,
                        'info': info,
                        'direct': False,
                        'debridonly': True,
                        'size': dsize,
                        'package': package
                    }
                    if self.search_series:
                        item.update({'last_season': last_season})
                    self.sources.append(item)
        except:
            source_utils.scraper_error('IDOPE')
            pass
Example #13
0
    def get_sources_packs(self, url):
        try:
            r = client.request(url, timeout='5')
            if not r:
                return

            r = client.parseDOM(r, 'table', attrs={'class': 'tmain'})[0]
            links = re.findall('<a href="(/torrent/.+?)">(.+?)</a>', r,
                               re.DOTALL)

            for link in links:
                try:
                    url = link[0].encode('ascii', errors='ignore').decode(
                        'ascii', errors='ignore').replace('&nbsp;', ' ')
                except:
                    url = link[0].replace('&nbsp;', ' ')
                if '/torrent/' not in url:
                    continue

                try:
                    name = link[1].encode('ascii', errors='ignore').decode(
                        'ascii', errors='ignore').replace('&nbsp;', '.')
                except:
                    name = link[1].replace('&nbsp;', '.')
                if '<span' in name:
                    nam = name.split('<span')[0].replace(' ', '.')
                    span = client.parseDOM(name, 'span')[0].replace('-', '.')
                    name = '%s%s' % (nam, span)
                name = source_utils.clean_name(self.title, name)
                if source_utils.remove_lang(name):
                    continue

                if not self.search_series:
                    if not self.bypass_filter:
                        if not source_utils.filter_season_pack(
                                self.title, self.aliases, self.year,
                                self.season_x, name):
                            continue
                    package = 'season'

                elif self.search_series:
                    if not self.bypass_filter:
                        valid, last_season = source_utils.filter_show_pack(
                            self.title, self.aliases, self.imdb, self.year,
                            self.season_x, name, self.total_seasons)
                        if not valid:
                            continue
                    else:
                        last_season = self.total_seasons
                    package = 'show'

                if not url.startswith('http'):
                    link = urljoin(self.base_link, url)

                link = client.request(link, timeout='5')
                if link is None:
                    continue
                hash = re.findall(
                    '<b>Infohash</b></td><td valign=top>(.+?)</td>', link,
                    re.DOTALL)[0]
                url = 'magnet:?xt=urn:btih:%s&dn=%s' % (hash, name)
                if url in str(self.sources):
                    continue

                try:
                    seeders = int(
                        re.findall(
                            '<b>Swarm:</b></td><td valign=top><font color=red>([0-9]+)</font>',
                            link, re.DOTALL)[0].replace(',', ''))
                    if self.min_seeders > seeders:  # site does not seem to report seeders
                        continue
                except:
                    seeders = 0
                    pass

                quality, info = source_utils.get_release_quality(name, url)

                try:
                    size = re.findall(
                        '((?:\d+\,\d+\.\d+|\d+\.\d+|\d+\,\d+|\d+)\s*(?:GiB|MiB|GB|MB))',
                        link)[0]
                    dsize, isize = source_utils._size(size)
                    info.insert(0, isize)
                except:
                    dsize = 0
                    pass

                info = ' | '.join(info)

                item = {
                    'source': 'torrent',
                    'seeders': seeders,
                    'hash': hash,
                    'name': name,
                    'quality': quality,
                    'language': 'en',
                    'url': url,
                    'info': info,
                    'direct': False,
                    'debridonly': True,
                    'size': dsize,
                    'package': package
                }
                if self.search_series:
                    item.update({'last_season': last_season})
                self.sources.append(item)
        except:
            source_utils.scraper_error('TORRENTFUNK')
            pass
Example #14
0
    def get_sources_packs(self, link):
        # log_utils.log('link = %s' % str(link), __name__, log_utils.LOGDEBUG)
        try:
            # For some reason Zooqle returns 404 even though the response has a body.
            # This is probably a bug on Zooqle's server and the error should just be ignored.
            html = client.request(link, ignoreErrors=404)
            if not html:
                return
            html = html.replace('&nbsp;', ' ')

            try:
                results = client.parseDOM(
                    html,
                    'table',
                    attrs={
                        'class': 'table table-condensed table-torrents vmiddle'
                    })[0]
            except:
                return

            rows = re.findall('<tr(.+?)</tr>', results, re.DOTALL)
            if not rows:
                return

            for entry in rows:
                try:
                    try:
                        if 'magnet:' not in entry:
                            continue
                        url = 'magnet:%s' % (re.findall(
                            'href="magnet:(.+?)"', entry, re.DOTALL)[0])
                        url = unquote_plus(url).split('&tr')[0].replace(
                            '&amp;', '&').replace(' ', '.')
                        url = source_utils.strip_non_ascii_and_unprintable(url)
                        if url in str(self.sources):
                            continue
                    except:
                        continue

                    hash = re.compile('btih:(.*?)&').findall(url)[0]

                    try:
                        name = re.findall('<a class=".+?>(.+?)</a>', entry,
                                          re.DOTALL)[0]
                        name = client.replaceHTMLCodes(name).replace(
                            '<hl>', '').replace('</hl>', '')
                        name = unquote_plus(name)
                        name = source_utils.clean_name(self.title, name)
                        # name = url.split('&dn=')[1]
                    except:
                        continue
                    if source_utils.remove_lang(name):
                        continue

                    # some titles have foreign title translation in front so remove it
                    if './.' in name:
                        name = name.split('./.', 1)[1]
                    if '.com.' in name.lower():
                        try:
                            name = re.sub(r'(.*?)\W{2,10}', '', name)
                        except:
                            name = name.split('-.', 1)[1].lstrip()

                    if not self.search_series:
                        if not self.bypass_filter:
                            if not source_utils.filter_season_pack(
                                    self.title, self.aliases, self.year,
                                    self.season_x, name):
                                continue
                        package = 'season'

                    elif self.search_series:
                        if not self.bypass_filter:
                            valid, last_season = source_utils.filter_show_pack(
                                self.title, self.aliases, self.imdb, self.year,
                                self.season_x, name, self.total_seasons)
                            if not valid:
                                continue
                        else:
                            last_season = self.total_seasons
                        package = 'show'

                    try:
                        seeders = int(
                            re.findall(
                                'class="progress prog trans90" title="Seeders: (.+?) \|',
                                entry, re.DOTALL)[0].replace(',', ''))
                        if self.min_seeders > seeders:
                            continue
                    except:
                        seeders = 0
                        pass

                    quality, info = source_utils.get_release_quality(name, url)

                    try:
                        size = re.findall(
                            '((?:\d+\.\d+|\d+\,\d+|\d+)\s*(?:GB|GiB|MB|MiB))',
                            entry)[-1]
                        dsize, isize = source_utils._size(size)
                        info.insert(0, isize)
                    except:
                        dsize = 0
                        pass

                    info = ' | '.join(info)

                    item = {
                        'source': 'torrent',
                        'seeders': seeders,
                        'hash': hash,
                        'name': name,
                        'quality': quality,
                        'language': 'en',
                        'url': url,
                        'info': info,
                        'direct': False,
                        'debridonly': True,
                        'size': dsize,
                        'package': package
                    }
                    if self.search_series:
                        item.update({'last_season': last_season})
                    self.sources.append(item)
                except:
                    continue
        except:
            source_utils.scraper_error('ZOOGLE')
            pass