Ejemplo n.º 1
0
    def parse(self, data, mode, **kwargs):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        with bs4_parser(data) as html:
            table_body = html.find('tbody')

            # Continue only if at least one release is found
            if not table_body:
                sickrage.app.log.debug('Data returned from provider does not contain any torrents')
                return results

            for row in table_body('tr'):
                cells = row('td')
                if len(cells) < 4:
                    continue

                try:
                    title = download_url = None
                    info_cell = cells[0].a
                    if info_cell:
                        title = info_cell.get_text()
                        download_url = self._get_download_link(urljoin(self.urls['base_url'], info_cell.get('href')))
                    if not all([title, download_url]):
                        continue

                    title = '{name} {codec}'.format(name=title, codec='x264')

                    if self.custom_url:
                        if not validate_url(self.custom_url):
                            sickrage.app.log.warning("Invalid custom url: {}".format(self.custom_url))
                            return results
                        download_url = urljoin(self.custom_url, download_url.split(self.urls['base_url'])[1])

                    seeders = try_int(cells[2].get_text(strip=True))
                    leechers = try_int(cells[3].get_text(strip=True))

                    torrent_size = cells[1].get_text()
                    size = convert_size(torrent_size, -1)

                    results += [{
                        'title': title,
                        'link': download_url,
                        'size': size,
                        'seeders': seeders,
                        'leechers': leechers
                    }]

                    if mode != 'RSS':
                        sickrage.app.log.debug("Found result: {}".format(title))
                except Exception:
                    sickrage.app.log.error("Failed parsing provider")

        return results
Ejemplo n.º 2
0
    def parse(self, data, mode):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        with bs4_parser(data) as html:
            # Continue only if one Release is found
            empty = html.find('h2', text="No .torrents fit this filter criteria")
            if empty:
                sickrage.app.log.debug("Data returned from provider does not contain any torrents")
                return results

            torrent_table = html.find('table', attrs={'style': 'border: none; width: 100%;'})
            if not torrent_table:
                sickrage.app.log.error("Could not find table of torrents")
                return results

            torrent_rows = torrent_table.find_all('tr', attrs={'class': 'browse'})

            for result in torrent_rows:
                cells = result.find_all('td')
                size = None
                link = cells[1].find('a', attrs={'style': 'font-size: 1.25em; font-weight: bold;'})

                torrent_id = link['href'].replace('details.php?id=', '')

                try:
                    if link.has_key('title'):
                        title = link['title']
                    else:
                        title = link.contents[0]

                    download_url = self.urls['download'] % (torrent_id, link.contents[0])
                    seeders = int(cells[9].contents[0])
                    leechers = int(cells[10].contents[0])

                    # Need size for failed downloads handling
                    if size is None:
                        if re.match(r'[0-9]+,?\.?[0-9]*[KkMmGg]+[Bb]+', cells[7].text):
                            size = convert_size(cells[7].text, -1)

                    if not all([title, download_url]):
                        continue

                    item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders,
                            'leechers': leechers, 'hash': ''}

                    if mode != 'RSS':
                        sickrage.app.log.debug("Found result: {}".format(title))

                    results.append(item)
                except Exception:
                    sickrage.app.log.error("Failed parsing provider.")

        return results
Ejemplo n.º 3
0
    def parse(self, data, mode, **kwargs):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        for item in data:
            try:
                title = item['title']
                download_url = item['link']
                if not all([title, download_url]):
                    continue

                seeders = try_int(item['nyaa_seeders'])
                leechers = try_int(item['nyaa_leechers'])

                size = convert_size(item['nyaa_size'], -1, units=['B', 'KIB', 'MIB', 'GIB', 'TIB', 'PIB'])

                results += [
                    {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers}
                ]

                if mode != 'RSS':
                    sickrage.app.log.debug("Found result: {}".format(title))
            except Exception:
                sickrage.app.log.error('Failed parsing provider')

        return results
Ejemplo n.º 4
0
    def parse(self, data, mode, **kwargs):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        def process_column_header(td):
            result = ''
            if td.a and td.a.img:
                result = td.a.img.get('title', td.a.get_text(strip=True))
            if not result:
                result = td.get_text(strip=True)
            return result

        with bs4_parser(data) as html:
            torrent_table = html.find('table', attrs={'id': 'torrent_table'})
            torrent_rows = torrent_table('tr') if torrent_table else []

            # Continue only if one Release is found
            if len(torrent_rows) < 2:
                sickrage.app.log.debug("Data returned from provider does not contain any torrents")
                return results

            # '', '', 'Name /Year', 'Files', 'Time', 'Size', 'Snatches', 'Seeders', 'Leechers'
            labels = [process_column_header(label) for label in torrent_rows[0]('td')]

            # Skip column headers
            for row in torrent_rows[1:]:
                try:
                    cells = row('td')
                    if len(cells) < len(labels):
                        continue

                    title = cells[labels.index('Name /Year')].find('a', dir='ltr').get_text(strip=True)
                    download = cells[labels.index('Name /Year')].find('a', title='Download')['href']
                    download_url = urljoin(self.urls['base_url'], download)
                    if not all([title, download_url]):
                        continue

                    seeders = try_int(cells[labels.index('Seeders')].get_text(strip=True))
                    leechers = try_int(cells[labels.index('Leechers')].get_text(strip=True))

                    torrent_size = cells[labels.index('Size')].get_text(strip=True)
                    size = convert_size(torrent_size, -1)

                    results += [
                        {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers}
                    ]

                    if mode != 'RSS':
                        sickrage.app.log.debug('Found result: {}'.format(title))
                except Exception:
                    sickrage.app.log.error('Failed parsing provider')

        return results
Ejemplo n.º 5
0
    def parse(self, data, mode, **kwargs):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        def process_column_header(td):
            td_title = ''
            if td.img:
                td_title = td.img.get('title', td.get_text(strip=True))
            if not td_title:
                td_title = td.get_text(strip=True)
            return td_title

        with bs4_parser(data) as html:
            torrent_table = html.find('table', id='sortabletable')
            torrent_rows = torrent_table('tr') if torrent_table else []

            # Continue only if at least one Release is found
            if len(torrent_rows) < 2:
                sickrage.app.log.debug("Data returned from provider does not contain any torrents")
                return results

            labels = [process_column_header(label) for label in torrent_rows[0]('td')]

            # Skip column headers
            for result in torrent_rows[1:]:
                try:
                    title = result.find('div', class_='tooltip-target').get_text(strip=True)
                    # skip if torrent has been nuked due to poor quality
                    if title.startswith('Nuked.'):
                        continue
                    download_url = result.find(
                        'img', title='Click to Download this Torrent in SSL!').parent['href']
                    if not all([title, download_url]):
                        continue

                    cells = result('td')
                    seeders = try_int(cells[labels.index('Seeders')].get_text(strip=True))
                    leechers = try_int(cells[labels.index('Leechers')].get_text(strip=True))
                    torrent_size = cells[labels.index('Size')].get_text(strip=True)
                    size = convert_size(torrent_size, -1)

                    results += [
                        {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers}
                    ]

                    if mode != 'RSS':
                        sickrage.app.log.debug("Found result: {}".format(title))
                except Exception:
                    sickrage.app.log.error("Failed parsing provider")

        return results
Ejemplo n.º 6
0
    def search(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None):
        results = []

        for mode in search_strings:
            items = []
            sickrage.srCore.srLogger.debug('Search Mode: {}'.format(mode))
            for search_string in search_strings[mode]:
                search_url = self.urls['feed']
                if mode != 'RSS':
                    sickrage.srCore.srLogger.debug('Search string: {}'.format(search_string))

                try:
                    data = sickrage.srCore.srWebSession.get(search_url, params={'f': search_string}).text
                except Exception:
                    sickrage.srCore.srLogger.debug('No data returned from provider')
                    continue

                if not data.startswith('<?xml'):
                    sickrage.srCore.srLogger.info('Expected xml but got something else, is your mirror failing?')
                    continue

                with bs4_parser(data) as parser:
                    for item in parser('item'):
                        if item.category and 'tv' not in item.category.get_text(strip=True):
                            continue

                        title = item.title.get_text(strip=True)
                        t_hash = item.guid.get_text(strip=True).rsplit('/', 1)[-1]

                        if not all([title, t_hash]):
                            continue

                        download_url = "magnet:?xt=urn:btih:" + t_hash + "&dn=" + title
                        torrent_size, seeders, leechers = self._split_description(item.find('description').text)
                        size = convert_size(torrent_size) or -1

                        # Filter unseeded torrent
                        if seeders < self.minseed or leechers < self.minleech:
                            if mode != 'RSS':
                                sickrage.srCore.srLogger.debug("Discarding torrent because it doesn't meet the minimum seeders or leechers: {} (S:{} L:{})".format(title, seeders, leechers))
                            continue

                        items += [{
                            'title': title,
                            'link': download_url,
                            'size': size,
                            'seeders': seeders,
                            'leechers': leechers,
                            'hash': t_hash
                        }]

            # For each search mode sort all the items by seeders if available
            items.sort(key=lambda d: int(d.get('seeders', 0)), reverse=True)
            results += items

        return results
Ejemplo n.º 7
0
    def parse(self, data, mode):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        with bs4_parser(data) as html:
            torrent_table = html.find("table", border="1")
            torrent_rows = torrent_table("tr") if torrent_table else []

            # Continue only if at least one Release is found
            if len(torrent_rows) < 2:
                sickrage.app.log.debug("Data returned from provider does not contain any torrents")
                return results

            # "Type", "Name", Files", "Comm.", "Added", "TTL", "Size", "Snatched", "Seeders", "Leechers"
            labels = [label.get_text(strip=True) for label in torrent_rows[0]("td")]

            for result in torrent_rows[1:]:
                try:
                    cells = result("td")

                    link = cells[labels.index("Name")].find("a", href=re.compile(r"download.php\?id="))["href"]
                    download_url = urljoin(self.urls['base_url'], link)

                    title_element = cells[labels.index("Name")].find("a", href=re.compile(r"details.php\?id="))
                    title = title_element.get("title", "") or title_element.get_text(strip=True)
                    if not all([title, download_url]):
                        continue

                    if self.freeleech:
                        # Free leech torrents are marked with green [F L] in the title (i.e. <font color=green>[F&nbsp;L]</font>)
                        freeleech = cells[labels.index("Name")].find("font", color="green")
                        if not freeleech or freeleech.get_text(strip=True) != "[F\xa0L]":
                            continue

                    seeders = try_int(cells[labels.index("Seeders")].get_text(strip=True))
                    leechers = try_int(cells[labels.index("Leechers")].get_text(strip=True))
                    torrent_size = cells[labels.index("Size")].get_text(strip=True)
                    size = convert_size(torrent_size, -1)

                    item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders,
                            'leechers': leechers, 'hash': ''}

                    if mode != "RSS":
                        sickrage.app.log.debug("Found result: {}".format(title))

                    results.append(item)
                except Exception:
                    sickrage.app.log.error("Failed parsing provider.")

        return results
Ejemplo n.º 8
0
    def parse(self, data, mode, **kwargs):
        """
        Parse search results for items.

        :param data: The raw response from a search
        :param mode: The current mode used to search, e.g. RSS

        :return: A list of items found
        """
        results = []

        with bs4_parser(data) as html:
            torrent_table = html.find(class_='table-responsive results')
            torrent_rows = torrent_table('tr') if torrent_table else []

            # Continue only if at least one Release is found
            if len(torrent_rows) < 2:
                sickrage.app.log.debug('Data returned from provider does not contain any torrents')
                return results

            for result in torrent_rows[1:]:
                cells = result('td')
                if len(cells) < 9:
                    continue

                try:
                    info = cells[1].find('a')
                    title = info.get_text(strip=True)
                    download_url = info.get('href')
                    if not (title and download_url):
                        continue

                    torrent_id = re.search(r'/(\d+)-', download_url)
                    download_url = self.urls['download'] % torrent_id.group(1)

                    seeders = try_int(cells[7].get_text(strip=True), 0)
                    leechers = try_int(cells[8].get_text(strip=True), 0)

                    torrent_size = cells[5].get_text()
                    size = convert_size(torrent_size, -1, ['O', 'KO', 'MO', 'GO', 'TO', 'PO'])

                    results += [{
                        'title': title,
                        'link': download_url,
                        'size': size,
                        'seeders': seeders,
                        'leechers': leechers
                    }]

                    if mode != 'RSS':
                        sickrage.app.log.debug("Found result: {}".format(title))
                except Exception:
                    sickrage.app.log.error('Failed parsing provider.')

        return results
Ejemplo n.º 9
0
    def parse(self, data, mode):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        with bs4_parser(data) as html:
            torrent_table = html.find("div", id="torrentBrowse")
            torrent_rows = torrent_table.findChildren("tr") if torrent_table else []

            # Continue only if at least one release is found
            if len(torrent_rows) < 1:
                sickrage.app.log.debug("Data returned from provider does not contain any torrents")
                return results

            for result in torrent_rows[1:]:
                try:
                    cells = result.findChildren("td")
                    title = cells[1].find("a").find_next("a")
                    link = cells[3].find("a")
                    shares = cells[8].get_text().split("/", 1)
                    torrent_size = cells[7].get_text().split("/", 1)[0]

                    if title.has_key('title'):
                        title = title['title']
                    else:
                        title = cells[1].find("a")['title']

                    download_url = self.urls['download'] % (link['href'])
                    seeders = int(shares[0])
                    leechers = int(shares[1])

                    size = -1
                    if re.match(r"\d+([,.]\d+)?\s*[KkMmGgTt]?[Bb]", torrent_size):
                        size = convert_size(torrent_size.rstrip(), -1)

                    if not all([title, download_url]):
                        continue

                    item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders,
                            'leechers': leechers, 'hash': ''}

                    if mode != 'RSS':
                        sickrage.app.log.debug("Found result: {}".format(title))

                    results.append(item)
                except Exception:
                    sickrage.app.log.error("Failed parsing provider.")

        return results
Ejemplo n.º 10
0
    def parse(self, data, mode):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        with bs4_parser(data) as html:
            torrent_table = html.find(class_='torrent_table')
            torrent_rows = torrent_table('tr') if torrent_table else []

            # Continue only if at least one Release is found
            if len(torrent_rows) < 2:
                sickrage.app.log.debug('Data returned from provider does not contain any torrents')
                return results

            # Catégorie, Release, Date, DL, Size, C, S, L
            labels = [label.get_text(strip=True) for label in torrent_rows[0]('td')]

            # Skip column headers
            for result in torrent_rows[1:]:
                try:
                    cells = result('td')
                    if len(cells) < len(labels):
                        continue

                    title = cells[labels.index('Release')].get_text(strip=True)
                    download_url = urljoin(self.urls['base_url'],
                                           cells[labels.index('DL')].find('a', class_='tooltip')['href'])
                    if not all([title, download_url]):
                        continue

                    seeders = try_int(cells[labels.index('S')].get_text(strip=True))
                    leechers = try_int(cells[labels.index('L')].get_text(strip=True))

                    size_index = labels.index('Size') if 'Size' in labels else labels.index('Taille')

                    units = ['O', 'KO', 'MO', 'GO', 'TO', 'PO']
                    size = convert_size(cells[size_index].get_text(), -1, units)

                    item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders,
                            'leechers': leechers, 'hash': ''}

                    if mode != 'RSS':
                        sickrage.app.log.debug('Found result: {}'.format(title))

                    results.append(item)
                except Exception:
                    sickrage.app.log.error('Failed parsing provider')

        return results
Ejemplo n.º 11
0
    def search(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None):
        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        for mode in search_strings:
            for search_string in search_strings[mode]:
                search_url = self.urls['verified'] if self.confirmed else self.urls['feed']
                if mode != 'RSS':
                    search_url += '?q=' + quote_plus(search_string)
                    sickrage.srCore.srLogger.info(search_url)

                try:
                    data = sickrage.srCore.srWebSession.get(search_url).text
                except Exception:
                    sickrage.srCore.srLogger.info('Seems to be down right now!')
                    continue

                if not data.startswith('<?xml'):
                    sickrage.srCore.srLogger.info('Expected xml but got something else, is your mirror failing?')
                    continue

                with bs4_parser(data) as html:
                    if not html:
                        sickrage.srCore.srLogger.debug("No html data parsed from provider")
                        continue

                    for item in html('item'):
                        if item.category and 'tv' not in item.category.get_text(strip=True):
                            continue

                        title = item.title.text.rsplit(' ', 1)[0].replace(' ', '.')
                        t_hash = item.guid.text.rsplit('/', 1)[-1]

                        if not all([title, t_hash]):
                            continue

                        download_url = "magnet:?xt=urn:btih:" + t_hash + "&dn=" + title
                        torrent_size, seeders, leechers = self._split_description(item.find('description').text)
                        size = convert_size(torrent_size) or -1

                        # Filter unseeded torrent
                        if seeders < self.minseed or leechers < self.minleech:
                            if mode != 'RSS':
                                sickrage.srCore.srLogger.debug("Discarding torrent because it doesn't meet the minimum seeders or leechers: {} (S:{} L:{})".format(title, seeders, leechers))
                            continue

                        items[mode].append((title, download_url, size, seeders, leechers))

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)
            results += items[mode]

        return results
Ejemplo n.º 12
0
    def parse(self, data, mode):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        torrent_rows = data.pop('torrents', {})

        if not self._check_auth_from_data(data):
            return results

        # Skip column headers
        for row in torrent_rows:
            try:
                title = row.pop('title', '')
                info_hash = row.pop('infoHash', '')
                download_url = 'magnet:?xt=urn:btih:' + info_hash
                if not all([title, download_url, info_hash]):
                    continue

                swarm = row.pop('swarm', {})
                seeders = try_int(swarm.pop('seeders', 0))
                leechers = try_int(swarm.pop('leechers', 0))

                # Filter unseeded torrent
                if seeders < min(self.minseed, 1):
                    if mode != 'RSS':
                        sickrage.app.log.debug("Discarding torrent because it doesn't meet the minimum  "
                                                       "seeders: {0}. Seeders: {1}".format(title, seeders))
                    continue

                size = convert_size(row.pop('size', -1)) or -1

                item = {
                    'title': title,
                    'link': download_url,
                    'size': size,
                    'seeders': seeders,
                    'leechers': leechers,
                    'pubdate': None,
                }
                if mode != 'RSS':
                    sickrage.app.log.debug('Found result: {}'.format(title))

                results.append(item)
            except Exception:
                sickrage.app.log.error('Failed parsing provider')

        return results
Ejemplo n.º 13
0
    def parse(self, data, mode, **kwargs):
        """
        Parse search results for items.

        :param data: The raw response from a search
        :param mode: The current mode used to search, e.g. RSS

        :return: A list of items found
        """

        results = []

        with bs4_parser(data) as html:
            if 'no encontrada' in html.get_text():
                return results

            try:
                link = html.find(rel='canonical')
                if not link:
                    return results

                try:
                    title = unidecode(html.find('h1').get_text().split('/')[1])
                    title = self._process_title(title, link['href'])
                except Exception:
                    title = None

                try:
                    download_url = self.urls['download'] % re.search(
                        r'http://tumejorserie.com/descargar/.+?(\d{6}).+?\.html', html.get_text(), re.DOTALL).group(1)
                except Exception:
                    download_url = None

                if not all([title, download_url]):
                    return results

                seeders = 1  # Provider does not provide seeders
                leechers = 0  # Provider does not provide leechers

                torrent_size = html.find_all(class_='imp')[1].get_text()
                torrent_size = re.sub(r'Size: ([\d.]+).+([KMGT]B)', r'\1 \2', torrent_size)
                size = convert_size(torrent_size, -1)

                results += [
                    {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers}
                ]

                if mode != 'RSS':
                    sickrage.app.log.debug("Found result: {}".format(title))
            except Exception:
                sickrage.app.log.error('Failed parsing provider')

        return results
Ejemplo n.º 14
0
    def parse(self, data, mode):
        """
        Parse search results for items.

        :param data: The raw response from a search
        :param mode: The current mode used to search, e.g. RSS

        :return: A list of items found
        """
        results = []

        with bs4_parser(data) as html:
            torrent_table = html.find(class_='table table-striped')
            torrent_rows = torrent_table('tr') if torrent_table else []

            # Continue only if at least one Release is found
            if len(torrent_rows) < 2:
                sickrage.app.log.debug('Data returned from provider does not contain any torrents')
                return results

            # Skip column headers
            for result in torrent_rows[1:]:
                cells = result('td')
                if len(cells) < 5:
                    continue

                try:
                    title = cells[0].find('a', class_='torrent-name').get_text(strip=True)
                    download_url = urljoin(self.urls['base_url'], cells[0].find('a', target='_blank')['href'])
                    if not (title and download_url):
                        continue

                    seeders = try_int(cells[4].get_text(strip=True), 1)
                    leechers = try_int(cells[5].get_text(strip=True), 0)

                    torrent_size = cells[3].get_text()
                    size = convert_size(torrent_size, -1)

                    item = {
                        'title': title,
                        'link': download_url,
                        'size': size,
                        'seeders': seeders,
                        'leechers': leechers
                    }
                    if mode != 'RSS':
                        sickrage.app.log.debug('Found result: {}'.format(title))

                    results.append(item)
                except Exception:
                    sickrage.app.log.error('Failed parsing provider.')

        return results
Ejemplo n.º 15
0
    def parse(self, data, mode):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        if not data.startswith("<rss"):
            sickrage.app.log.info("Expected rss but got something else, is your mirror failing?")
            return results

        feed = feedparser.parse(data)
        for item in feed.entries:
            try:
                title = item.title
                download_url = item.link
                if not (title and download_url):
                    continue

                info = self.regex.search(item.description)
                if not info:
                    continue

                seeders = try_int(info.group("seeders"))
                leechers = try_int(info.group("leechers"))

                category = item.category
                if category != 'all':
                    sickrage.app.log.warning(
                        'skytorrents.in has added categories! Please report this so it can be updated: Category={cat}, '
                        'Title={title}'.format(cat=category, title=title))

                size = convert_size(info.group('size'), -1)

                try:
                    info_hash = download_url.rsplit('/', 2)[1]
                except IndexError:
                    info_hash = ''

                item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders,
                        'leechers': leechers, 'hash': info_hash}

                if mode != "RSS":
                    sickrage.app.log.debug("Found result: {}".format(title))

                results.append(item)
            except Exception:
                sickrage.app.log.error("Failed parsing provider")

        return results
Ejemplo n.º 16
0
    def parse(self, data, mode, **kwargs):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        with bs4_parser(data) as html:
            torrent_table = html.find(class_='ttable_headinner')
            torrent_rows = torrent_table('tr') if torrent_table else []

            # Continue only if at least one Release is found
            if len(torrent_rows) < 2:
                sickrage.app.log.debug('Data returned from provider does not contain any torrents')
                return results

            # Catégorie, Release, Date, DL, Size, C, S, L
            labels = [label.get_text(strip=True) for label in torrent_rows[0]('th')]

            # Skip column headers
            for result in torrent_rows[1:]:
                try:
                    cells = result('td')
                    if len(cells) < len(labels):
                        continue

                    id = re.search('id=([0-9]+)', cells[labels.index('Nom')].find('a')['href']).group(1)
                    title = cells[labels.index('Nom')].get_text(strip=True)
                    download_url = urljoin(self.urls['download'], '?id={0}&name={1}'.format(id, title))
                    if not all([title, download_url]):
                        continue

                    seeders = try_int(cells[labels.index('S')].get_text(strip=True))
                    leechers = try_int(cells[labels.index('L')].get_text(strip=True))

                    size_index = labels.index('Size') if 'Size' in labels else labels.index('Taille')
                    torrent_size = cells[size_index].get_text()
                    size = convert_size(torrent_size, -1)

                    results += [
                        {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers}
                    ]

                    if mode != 'RSS':
                        sickrage.app.log.debug('Found result: {}'.format(title))
                except Exception:
                    sickrage.app.log.error('Failed parsing provider')

        return results
Ejemplo n.º 17
0
    def parse(self, data, mode, **kwargs):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        with bs4_parser(data) as soup:
            torrent_table = soup.find('table', class_='listing')
            torrent_rows = torrent_table('tr') if torrent_table else []

            # Continue only if one Release is found
            if len(torrent_rows) < 2:
                sickrage.app.log.debug("Data returned from provider does not contain any torrents")
                return results

            a = 1 if len(torrent_rows[0]('td')) < 2 else 0

            for top, bot in zip(torrent_rows[a::2], torrent_rows[a + 1::2]):
                try:
                    title = download_url = ""
                    desc_top = top.find('td', class_='desc-top')
                    if desc_top:
                        title = desc_top.get_text(strip=True)
                        download_url = desc_top.find('a')['href']

                    if not all([title, download_url]):
                        continue

                    stats = bot.find('td', class_='stats').get_text(strip=True)
                    sl = re.match(r'S:(?P<seeders>\d+)L:(?P<leechers>\d+)C:(?:\d+)ID:(?:\d+)', stats.replace(' ', ''))
                    seeders = try_int(sl.group('seeders'))
                    leechers = try_int(sl.group('leechers'))

                    desc_bottom = bot.find('td', class_='desc-bot').get_text(strip=True)
                    size = convert_size(desc_bottom.split('|')[1].strip('Size: '), -1)

                    results += [
                        {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers}
                    ]

                    if mode != 'RSS':
                        sickrage.app.log.debug("Found result: {}".format(title))
                except Exception:
                    sickrage.app.log.error("Failed parsing provider")

        return results
Ejemplo n.º 18
0
    def parse(self, data, mode):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        with bs4_parser(data) as html:
            torrent_table = html.find('table', attrs={'id': 'torrents-table'})
            torrent_rows = torrent_table.find_all('tr') if torrent_table else []

            # Continue only if at least one Release is found
            if len(torrent_rows) < 2:
                sickrage.app.log.debug("Data returned from provider does not contain any torrents")
                return results

            for result in torrent_table.find_all('tr')[1:]:
                try:
                    link = result.find('td', attrs={'class': 'ttr_name'}).find('a')
                    url = result.find('td', attrs={'class': 'td_dl'}).find('a')

                    title = link.string
                    if re.search(r'\.\.\.', title):
                        data = sickrage.app.wsession.get(self.urls['base_url'] + "/" + link['href']).text
                        with bs4_parser(data) as details_html:
                            title = re.search('(?<=").+(?<!")', details_html.title.string).group(0)
                    download_url = self.urls['download'] % url['href']
                    seeders = int(result.find('td', attrs={'class': 'ttr_seeders'}).string)
                    leechers = int(result.find('td', attrs={'class': 'ttr_leechers'}).string)
                    size = convert_size(result.find('td', attrs={'class': 'ttr_size'}).contents[0], -1)

                    if not all([title, download_url]):
                        continue

                    item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders,
                            'leechers': leechers, 'hash': ''}

                    if mode != 'RSS':
                        sickrage.app.log.debug("Found result: {}".format(title))

                    results.append(item)
                except Exception:
                    sickrage.app.log.error("Failed parsing provider")

        return results
Ejemplo n.º 19
0
    def parse(self, data, mode, **kwargs):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        try:
            data = data.split('<div id="information"></div>')[1]
        except ValueError:
            sickrage.app.log.error("Could not find main torrent table")
            return results

        with bs4_parser(data[data.index('<table'):]) as html:
            torrents = html.findAll('tr')
            if not torrents:
                return results

            # Skip column headers
            for result in torrents[1:]:
                if len(result.contents) < 10:
                    # skip extraneous rows at the end
                    continue

                try:
                    dl_href = result.find('a', attrs={'href': re.compile(r'download.php.*')})['href']
                    title = re.search('f=(.*).torrent', dl_href).group(1).replace('+', '.')
                    download_url = self.urls['base_url'] + dl_href
                    seeders = int(result.find('span', attrs={'class': 'seedy'}).find('a').text)
                    leechers = int(result.find('span', attrs={'class': 'leechy'}).find('a').text)
                    size = convert_size(result, -1)

                    if not all([title, download_url]):
                        continue

                    results += [
                        {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers}
                    ]

                    if mode != 'RSS':
                        sickrage.app.log.debug("Found result: {}".format(title))
                except Exception:
                    sickrage.app.log.error("Failed parsing provider")

        return results
Ejemplo n.º 20
0
    def parse(self, data, mode):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        with bs4_parser(data) as html:
            torrent_rows = html.find_all('tr', class_='torrent')
            if len(torrent_rows) < 1:
                sickrage.app.log.debug("Data returned from provider does not contain any torrents")
                return results

            for result in torrent_rows:
                try:
                    # skip if torrent has been nuked due to poor quality
                    if result.find('img', alt='Nuked'):
                        continue

                    download_url = urljoin(self.urls['base_url'] + '/',
                                           result.find('span', title='Download').parent['href'])
                    title = result.find('a', title='View torrent').get_text(strip=True)

                    if not all([title, download_url]):
                        continue

                    seeders = try_int(result('td', class_="number_column")[1].text, 0)
                    leechers = try_int(result('td', class_="number_column")[2].text, 0)

                    size = -1
                    if re.match(r'\d+([,.]\d+)?\s*[KkMmGgTt]?[Bb]',
                                result('td', class_="number_column")[0].text):
                        size = convert_size(result('td', class_="number_column")[0].text.strip(), -1)

                    item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders,
                            'leechers': leechers, 'hash': ''}

                    if mode != 'RSS':
                        sickrage.app.log.debug("Found result: {}".format(title))

                    results.append(item)
                except Exception:
                    sickrage.app.log.error("Failed parsing provider")

        return results
Ejemplo n.º 21
0
    def parse(self, data, mode):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        with bs4_parser(data) as html:
            torrent_rows = html.find_all('tr')
            for row in torrent_rows:
                for torrent in row.find_all('td'):
                    for link in torrent.find_all('a'):
                        try:
                            fileType = ''.join(link.find_previous('i')["class"])
                            fileType = unicodedata.normalize('NFKD', fileType). \
                                encode(sickrage.app.sys_encoding, 'ignore')

                            if fileType == "Series":
                                title = link.get_text(strip=True)
                                download_url = self.get_download_url(link.get('href'))

                                if not all([title, download_url]):
                                    continue

                                # size
                                size = convert_size(link.findNext('td').text, -1)

                                # Filter unseeded torrent
                                seeders = try_int(link.find_next('img', alt='seeders').parent.text, 0)
                                leechers = try_int(link.find_next('img', alt='leechers').parent.text, 0)

                                if mode != 'RSS':
                                    sickrage.app.log.debug("Found result: {}".format(title))

                                results += [{
                                    'title': title,
                                    'link': download_url,
                                    'size': size,
                                    'seeders': seeders,
                                    'leechers': leechers,
                                }]
                        except Exception:
                            sickrage.app.log.error("Failed parsing provider")

        return results
Ejemplo n.º 22
0
    def parse(self, data, mode):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        with bs4_parser(data) as html:
            torrent_table = html.find('table', attrs={'id': 'torrenttable'})
            torrent_rows = torrent_table.find_all('tr') if torrent_table else []

            # Continue only if one Release is found
            if len(torrent_rows) < 2:
                sickrage.app.log.debug("Data returned from provider does not contain any torrents")
                return results

            for result in torrent_table.find_all('tr')[1:]:
                try:
                    title = result.find("td", class_="name").find("a").get_text(strip=True)
                    download_url = urljoin(self.urls['base_url'],
                                           result.find("td", class_="quickdownload").find("a")["href"])

                    if not all([title, download_url]):
                        continue

                    seeders = try_int(result.find('td', attrs={'class': 'seeders'}).text, 0)
                    leechers = try_int(result.find('td', attrs={'class': 'leechers'}).text, 0)

                    size = -1
                    if re.match(r'\d+([,.]\d+)?\s*[KkMmGgTt]?[Bb]',
                                result('td', class_="listcolumn")[1].text):
                        size = convert_size(result('td', class_="listcolumn")[1].text.strip(), -1)

                    item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders,
                            'leechers': leechers, 'hash': ''}

                    if mode != 'RSS':
                        sickrage.app.log.debug("Found result: {}".format(title))

                    results.append(item)
                except Exception:
                    sickrage.app.log.error("Failed parsing provider.")

        return results
Ejemplo n.º 23
0
    def parse(self, data, mode):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        error = data.get('error')
        if error:
            sickrage.app.log.debug(error)
            return results

        try:
            if data['0']['total_results'] == 0:
                sickrage.app.log.debug("Provider has no results for this search")
                return results
        except Exception:
            return results

        for i in data:
            try:
                title = data[i]["release_name"]
                download_url = data[i]["download_url"]
                if not all([title, download_url]):
                    continue

                seeders = data[i]["seeders"]
                leechers = data[i]["leechers"]

                torrent_size = str(data[i]["size"]) + ' MB'
                size = convert_size(torrent_size, -1)

                item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders,
                        'leechers': leechers, 'hash': ''}

                if mode != 'RSS':
                    sickrage.app.log.debug("Found result: {}".format(title))

                results.append(item)
            except Exception:
                sickrage.app.log.error("Failed parsing provider")

        return results
Ejemplo n.º 24
0
    def parse(self, data, mode, **kwargs):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        data = re.sub(r'(?im)<button.+?<[/]button>', '', data, 0)

        with bs4_parser(data) as html:
            torrent_table = html.find('table', id='torrents')
            torrents = torrent_table('tr') if torrent_table else []

            # Continue only if one Release is found
            if len(torrents) < 2 or html.find(text='No Torrents Found!'):
                sickrage.app.log.debug("Data returned from provider does not contain any torrents")
                return results

            for torrent in torrents[1:]:
                try:
                    title = torrent('td')[1].find('a').text
                    download_url = self.urls['base_url'] + torrent('td')[3].find('a')['href']
                    if not all([title, download_url]):
                        continue

                    size = convert_size(torrent('td')[5].text, -1)
                    seeders = int(torrent.find('td', attrs={'class': 'ac t_seeders'}).text)
                    leechers = int(torrent.find('td', attrs={'class': 'ac t_leechers'}).text)

                    results += [{
                        'title': title,
                        'link': download_url,
                        'size': size,
                        'seeders': seeders,
                        'leechers': leechers
                    }]

                    if mode != 'RSS':
                        sickrage.app.log.debug("Found result: {}".format(title))
                except Exception:
                    sickrage.app.log.error("Failed parsing provider")

        return results
Ejemplo n.º 25
0
    def parse(self, data, mode):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        for item in data:
            try:
                title = item['title']
                download_url = item['link']
                if not all([title, download_url]):
                    continue

                seeders = try_int(item['nyaa_seeders'])
                leechers = try_int(item['nyaa_leechers'])

                # Filter unseeded torrent
                if seeders < min(self.minseed, 1):
                    if mode != 'RSS':
                        sickrage.app.log.debug("Discarding torrent because it doesn't meet the "
                                                       "minimum seeders: {}. Seeders: {}".format(title, seeders))
                    continue

                size = convert_size(item['nyaa_size'], -1, units=['B', 'KIB', 'MIB', 'GIB', 'TIB', 'PIB'])

                item = {
                    'title': title,
                    'link': download_url,
                    'size': size,
                    'seeders': seeders,
                    'leechers': leechers
                }
                if mode != 'RSS':
                    sickrage.app.log.debug('Found result: {}'.format(title))

                results.append(item)
            except Exception:
                sickrage.app.log.error('Failed parsing provider')

        return results
Ejemplo n.º 26
0
    def parse(self, data, mode, **kwargs):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        if not isinstance(data, dict):
            return results

        torrent_results = data['total_results']
        if not torrent_results:
            return results

        sickrage.app.log.debug('Number of torrents found on nCore = ' + str(torrent_results))

        for item in data['results']:
            try:
                title = item.pop("release_name")
                download_url = item.pop("download_url")
                if not all([title, download_url]):
                    continue

                seeders = item.pop("seeders")
                leechers = item.pop("leechers")
                torrent_size = item.pop("size", -1)
                size = convert_size(torrent_size, -1)

                if mode != "RSS":
                    sickrage.app.log.debug("Found result: {}".format(title))

                results += [
                    {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers}
                ]

                if mode != 'RSS':
                    sickrage.app.log.debug("Found result: {}".format(title))
            except Exception:
                sickrage.app.log.error("Failed parsing provider")

        return results
Ejemplo n.º 27
0
    def parse(self, data, mode):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        if not (data and "total_found" in data and int(data["total_found"]) > 0):
            sickrage.app.log.debug("Data returned from provider does not contain any torrents")
            return results

        del data["total_found"]

        for i in data:
            try:
                title = data[i]["title"]
                seeders = try_int(data[i]["seeds"], 1)
                leechers = try_int(data[i]["leechs"], 0)
                t_hash = data[i]["torrent_hash"]
                torrent_size = data[i]["torrent_size"]
                if not all([t_hash, torrent_size]):
                    continue

                download_url = data[i]["magnet"]
                size = convert_size(torrent_size, -1)

                if not all([title, download_url]):
                    continue

                item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders,
                        'leechers': leechers, 'hash': t_hash}

                if mode != 'RSS':
                    sickrage.app.log.debug("Found result: {}".format(title))

                results.append(item)
            except Exception:
                sickrage.app.log.error("Failed parsing provider.")

        return results
Ejemplo n.º 28
0
    def parse(self, data, mode):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        if not data.startswith('<?xml'):
            sickrage.app.log.info('Expected xml but got something else, is your mirror failing?')
            return results

        with bs4_parser(data) as parser:
            for item in parser('item'):
                try:
                    if item.category and 'tv' not in item.category.get_text(strip=True):
                        continue

                    title = item.title.get_text(strip=True)
                    t_hash = item.guid.get_text(strip=True).rsplit('/', 1)[-1]

                    if not all([title, t_hash]):
                        continue

                    download_url = "magnet:?xt=urn:btih:" + t_hash + "&dn=" + title
                    torrent_size, seeders, leechers = self._split_description(item.find('description').text)
                    size = convert_size(torrent_size, -1)

                    results += [{
                        'title': title,
                        'link': download_url,
                        'size': size,
                        'seeders': seeders,
                        'leechers': leechers,
                        'hash': t_hash
                    }]
                except Exception:
                    sickrage.app.log.error("Failed parsing provider.")

        return results
Ejemplo n.º 29
0
    def parse(self, data, mode, **kwargs):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        for torrent in data.get('results', []):
            try:
                title = torrent.get('release_name')
                download_url = torrent.get('download_url')
                if not all([title, download_url]):
                    continue

                seeders = torrent.get('seeders')
                leechers = torrent.get('leechers')

                freeleech = torrent.get('freeleech')
                if self.freeleech and not freeleech:
                    continue

                torrent_size = '{} MB'.format(torrent.get('size', -1))
                size = convert_size(torrent_size, -1)

                results += [{
                    'title': title,
                    'link': download_url,
                    'size': size,
                    'seeders': seeders,
                    'leechers': leechers
                }]

                if mode != 'RSS':
                    sickrage.app.log.debug("Found result: {}".format(title))
            except Exception:
                sickrage.app.log.error('Failed parsing provider')

        return results
Ejemplo n.º 30
0
    def parse(self, data, mode):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        if data.get('error'):
            if data.get('error_code') == 4:
                if not self.login(True): return results
                return results
            elif data.get('error_code') == 5:
                return results
            elif data.get('error_code') != 20:
                sickrage.app.log.debug(data['error'])
                return results

        for item in data.get('torrent_results') or []:
            try:
                title = item['title']
                download_url = item['download']
                size = convert_size(item['size'], -1)
                seeders = item['seeders']
                leechers = item['leechers']

                if not all([title, download_url]):
                    continue

                item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders,
                        'leechers': leechers, 'hash': ''}

                if mode != 'RSS':
                    sickrage.app.log.debug("Found result: {}".format(title))
                results.append(item)
            except Exception:
                sickrage.app.log.error("Failed parsing provider")

        return results
Ejemplo n.º 31
0
    def parse(self, data, mode, **kwargs):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        keywords = kwargs.pop('keywords', None)

        with bs4_parser(data) as html:
            torrent_table = html.find(id='sortabletable')
            torrent_rows = torrent_table('tr') if torrent_table else []

            if len(torrent_rows) < 2:
                sickrage.app.log.debug(
                    'Data returned from provider does not contain any torrents'
                )
                return results

            labels = [
                label.img['title'] if label.img else label.get_text(strip=True)
                for label in torrent_rows[0]('td')
            ]

            for row in torrent_rows[1:]:
                try:
                    # Skip highlighted torrents
                    if mode == 'RSS' and row.get('class') == ['highlight']:
                        continue

                    title = row.find(class_='tooltip-content')
                    title = title.div.get_text(strip=True) if title else None
                    download_url = row.find(
                        title='Click to Download this Torrent!')
                    download_url = download_url.parent[
                        'href'] if download_url else None
                    if not all([title, download_url]):
                        continue

                    seeders = try_int(
                        row.find(title='Seeders').get_text(strip=True))
                    leechers = try_int(
                        row.find(title='Leechers').get_text(strip=True))

                    # Chop off tracker/channel prefix or we cant parse the result!
                    if mode != 'RSS' and keywords:
                        show_name_first_word = re.search(r'^[^ .]+',
                                                         keywords).group()
                        if not title.startswith(show_name_first_word):
                            title = re.sub(
                                r'.*(' + show_name_first_word + '.*)', r'\1',
                                title)

                    # Change title from Series to Season, or we can't parse
                    if mode == 'Season':
                        title = re.sub(r'(.*)(?i)Series', r'\1Season', title)

                    # Strip year from the end or we can't parse it!
                    title = re.sub(r'(.*)[. ]?\(\d{4}\)', r'\1', title)
                    title = re.sub(r'\s+', r' ', title)

                    torrent_size = row('td')[labels.index('Size')].get_text(
                        strip=True)
                    size = convert_size(torrent_size, -1)

                    results += [{
                        'title': title,
                        'link': download_url,
                        'size': size,
                        'seeders': seeders,
                        'leechers': leechers
                    }]

                    if mode != 'RSS':
                        sickrage.app.log.debug(
                            "Found result: {}".format(title))
                except Exception:
                    sickrage.app.log.error("Failed parsing provider.")

        return results
Ejemplo n.º 32
0
    def search(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        if not self.login():
            return results

        for mode in search_strings.keys():
            sickrage.srCore.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_strings[mode]:

                if mode != 'RSS':
                    searchURL = self.urls['search'] % (urllib.quote_plus(search_string), self.categories)
                else:
                    searchURL = self.urls['rss'] % self.categories

                sickrage.srCore.srLogger.debug("Search URL: %s" % searchURL)
                if mode != 'RSS':
                    sickrage.srCore.srLogger.debug("Search string: %s" % search_string)

                try:
                    data = sickrage.srCore.srWebSession.get(searchURL, cache=False).text
                except Exception:
                    sickrage.srCore.srLogger.debug("No data returned from provider")
                    continue

                # Search result page contains some invalid html that prevents html parser from returning all data.
                # We cut everything before the table that contains the data we are interested in thus eliminating
                # the invalid html portions
                try:
                    index = data.lower().ind
                    '<table class="mainblockcontenttt"'
                except ValueError:
                    sickrage.srCore.srLogger.error("Could not find table of torrents mainblockcontenttt")
                    continue

                data = urllib.unquote(data[index:].encode('utf-8')).decode('utf-8').replace('\t', '')

                with bs4_parser(data) as html:
                    if not html:
                        sickrage.srCore.srLogger.debug("No html data parsed from provider")
                        continue

                    empty = html.find('No torrents here')
                    if empty:
                        sickrage.srCore.srLogger.debug("Data returned from provider does not contain any torrents")
                        continue

                    tables = html.find('table', attrs={'class': 'mainblockcontenttt'})
                    if not tables:
                        sickrage.srCore.srLogger.error("Could not find table of torrents mainblockcontenttt")
                        continue

                    torrents = tables.findChildren('tr')
                    if not torrents:
                        continue

                    # Skip column headers
                    for result in torrents[1:]:
                        try:
                            cells = result.findChildren('td', attrs={
                                'class': re.compile(r'(green|yellow|red|mainblockcontent)')})
                            if not cells:
                                continue

                            title = download_url = seeders = leechers = size = None
                            for cell in cells:
                                try:
                                    if None is title and cell.get('title') and cell.get('title') in 'Download':
                                        title = re.search('f=(.*).torrent', cell.a['href']).group(1).replace('+', '.')
                                        title = title.decode('utf-8')
                                        download_url = self.urls['home'] % cell.a['href']
                                        continue
                                    if None is seeders and cell.get('class')[0] and cell.get('class')[
                                        0] in 'green' 'yellow' 'red':
                                        seeders = int(cell.text)
                                        if not seeders:
                                            seeders = 1
                                            continue
                                    elif None is leechers and cell.get('class')[0] and cell.get('class')[
                                        0] in 'green' 'yellow' 'red':
                                        leechers = int(cell.text)
                                        if not leechers:
                                            leechers = 0
                                            continue

                                    # Need size for failed downloads handling
                                    if size is None:
                                        if re.match(r'[0-9]+,?\.?[0-9]* [KkMmGg]+[Bb]+', cell.text):
                                            size = convert_size(cell.text)
                                            if not size:
                                                size = -1

                                except Exception:
                                    sickrage.srCore.srLogger.error(
                                        "Failed parsing provider. Traceback: %s" % traceback.format_exc())

                            if not all([title, download_url]):
                                continue

                            # Filter unseeded torrent
                            if seeders < self.minseed or leechers < self.minleech:
                                if mode != 'RSS':
                                    sickrage.srCore.srLogger.debug(
                                        "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(
                                            title, seeders, leechers))
                                continue

                            item = title, download_url, size, seeders, leechers
                            if mode != 'RSS':
                                sickrage.srCore.srLogger.debug("Found result: %s " % title)

                            items[mode].append(item)

                        except (AttributeError, TypeError, KeyError, ValueError):
                            continue

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Ejemplo n.º 33
0
    def parse(self, data, mode):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        with bs4_parser(data) as html:
            torrent_table = html.find("table", border="1")
            torrent_rows = torrent_table("tr") if torrent_table else []

            # Continue only if at least one Release is found
            if len(torrent_rows) < 2:
                sickrage.app.log.debug(
                    "Data returned from provider does not contain any torrents"
                )
                return results

            # "Type", "Name", Files", "Comm.", "Added", "TTL", "Size", "Snatched", "Seeders", "Leechers"
            labels = [
                label.get_text(strip=True) for label in torrent_rows[0]("td")
            ]

            for result in torrent_rows[1:]:
                try:
                    cells = result("td")

                    link = cells[labels.index("Name")].find(
                        "a", href=re.compile(r"download.php\?id="))["href"]
                    download_url = urljoin(self.urls['base_url'], link)

                    title_element = cells[labels.index("Name")].find(
                        "a", href=re.compile(r"details.php\?id="))
                    title = title_element.get(
                        "title", "") or title_element.get_text(strip=True)
                    if not all([title, download_url]):
                        continue

                    if self.freeleech:
                        # Free leech torrents are marked with green [F L] in the title (i.e. <font color=green>[F&nbsp;L]</font>)
                        freeleech = cells[labels.index("Name")].find(
                            "font", color="green")
                        if not freeleech or freeleech.get_text(
                                strip=True) != "[F\xa0L]":
                            continue

                    seeders = try_int(
                        cells[labels.index("Seeders")].get_text(strip=True))
                    leechers = try_int(
                        cells[labels.index("Leechers")].get_text(strip=True))
                    torrent_size = cells[labels.index("Size")].get_text(
                        strip=True)
                    size = convert_size(torrent_size, -1)

                    results += [{
                        'title': title,
                        'link': download_url,
                        'size': size,
                        'seeders': seeders,
                        'leechers': leechers
                    }]

                    if mode != "RSS":
                        sickrage.app.log.debug(
                            "Found result: {}".format(title))
                except Exception:
                    sickrage.app.log.error("Failed parsing provider.")

        return results
Ejemplo n.º 34
0
    def search(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        if not self.login():
            return results

        if epObj is not None:
            ep_indexerid = epObj.show.indexerid
            ep_indexer = epObj.show.indexer
        else:
            ep_indexerid = None
            ep_indexer = None

        for mode in search_params.keys():  # Mode = RSS, Season, Episode
            sickrage.srCore.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_params[mode]:

                if mode != 'RSS':
                    sickrage.srCore.srLogger.debug("Search string: %s " % search_string)

                if mode == 'RSS':
                    searchURL = self.urls['listing'] + self.defaultOptions
                elif mode == 'Season':
                    if ep_indexer == INDEXER_TVDB:
                        searchURL = self.urls['search_tvdb'] % (ep_indexerid, search_string) + self.defaultOptions
                    else:
                        searchURL = self.urls['search'] % (search_string) + self.defaultOptions
                elif mode == 'Episode':
                    if ep_indexer == INDEXER_TVDB:
                        searchURL = self.urls['search_tvdb'] % (ep_indexerid, search_string) + self.defaultOptions
                    else:
                        searchURL = self.urls['search'] % (search_string) + self.defaultOptions
                else:
                    sickrage.srCore.srLogger.error("Invalid search mode: %s " % mode)

                if self.minleech:
                    searchURL += self.urlOptions['leechers'].format(min_leechers=int(self.minleech))

                if self.minseed:
                    searchURL += self.urlOptions['seeders'].format(min_seeders=int(self.minseed))

                if self.sorting:
                    searchURL += self.urlOptions['sorting'].format(sorting=self.sorting)

                if self.ranked:
                    searchURL += self.urlOptions['ranked'].format(ranked=int(self.ranked))

                sickrage.srCore.srLogger.debug("Search URL: %s" % searchURL)

                try:
                    retry = 3
                    while retry > 0:
                        time_out = 0
                        while (datetime.datetime.now() < self.next_request) and time_out <= 15:
                            time_out += 1
                            time.sleep(1)

                        self.next_request = datetime.datetime.now() + datetime.timedelta(seconds=10)

                        try:
                            data = sickrage.srCore.srWebSession.get(searchURL + self.urlOptions['token'].format(token=self.token)).text
                        except Exception:
                            sickrage.srCore.srLogger.debug("No data returned from provider")
                            raise StopIteration

                        if re.search('ERROR', data):
                            sickrage.srCore.srLogger.debug("Error returned from provider")
                            raise StopIteration
                        if re.search('No results found', data):
                            sickrage.srCore.srLogger.debug("No results found")
                            raise StopIteration
                        if re.search('Invalid token set!', data):
                            sickrage.srCore.srLogger.warning("Invalid token!")
                            return results
                        if re.search('Too many requests per minute. Please try again later!', data):
                            sickrage.srCore.srLogger.warning("Too many requests per minute")
                            retry -= 1
                            time.sleep(10)
                            continue
                        if re.search('Cant find search_tvdb in database. Are you sure this imdb exists?', data):
                            sickrage.srCore.srLogger.warning(
                                "No results found. The tvdb id: %s do not exist on provider" % ep_indexerid)
                            raise StopIteration
                        if re.search('Invalid token. Use get_token for a new one!', data):
                            sickrage.srCore.srLogger.debug("Invalid token, retrieving new token")
                            retry -= 1
                            self.token = None
                            self.tokenExpireDate = None
                            if not self.login():
                                sickrage.srCore.srLogger.debug("Failed retrieving new token")
                                return results
                            sickrage.srCore.srLogger.debug("Using new token")
                            continue

                        # No error found break
                        break
                    else:
                        sickrage.srCore.srLogger.debug("Retried 3 times without getting results")
                        continue
                except StopIteration:
                    continue

                try:
                    data = re.search(r'\[\{\"title\".*\}\]', data)
                    if data is not None:
                        data_json = json.loads(data.group())
                    else:
                        data_json = {}
                except Exception:
                    sickrage.srCore.srLogger.error("JSON load failed: %s" % traceback.format_exc())
                    sickrage.srCore.srLogger.debug("JSON load failed. Data dump: %s" % data)
                    continue

                try:
                    for item in data_json:
                        try:
                            title = item['title']
                            download_url = item['download']
                            size = convert_size(item['size'])
                            seeders = item['seeders']
                            leechers = item['leechers']
                            # pubdate = item['pubdate']

                            if not all([title, download_url]):
                                continue

                            item = title, download_url, size, seeders, leechers
                            if mode != 'RSS':
                                sickrage.srCore.srLogger.debug("Found result: %s " % title)
                            items[mode].append(item)

                        except Exception:
                            sickrage.srCore.srLogger.debug("Skipping invalid result. JSON item: {}".format(item))

                except Exception:
                    sickrage.srCore.srLogger.error("Failed parsing provider. Traceback: %s" % traceback.format_exc())

            # For each search mode sort all the items by seeders
            items[mode].sort(key=lambda tup: tup[3], reverse=True)
            results += items[mode]

        return results
Ejemplo n.º 35
0
    def search(self,
               search_strings,
               search_mode='eponly',
               epcount=0,
               age=0,
               epObj=None):
        results = []
        """
        205 = SD, 208 = HD, 200 = All Videos
        https://pirateproxy.pl/s/?q=Game of Thrones&type=search&orderby=7&page=0&category=200
        """
        search_params = {
            "q": "",
            "type": "search",
            "orderby": 7,
            "page": 0,
            "category": 200
        }

        items = {'Season': [], 'Episode': [], 'RSS': []}

        for mode in search_strings.keys():
            sickrage.srCore.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_strings[mode]:

                search_params.update({'q': search_string.strip()})

                if mode != 'RSS':
                    sickrage.srCore.srLogger.debug("Search string: " +
                                                   search_string)

                searchURL = self.urls[('search', 'rss')[
                    mode == 'RSS']] + '?' + urlencode(search_params)
                sickrage.srCore.srLogger.debug("Search URL: %s" % searchURL)

                try:
                    data = sickrage.srCore.srWebSession.get(searchURL,
                                                            cache=False).text
                except Exception:
                    sickrage.srCore.srLogger.debug(
                        "No data returned from provider")
                    continue

                matches = re.compile(self.re_title_url,
                                     re.DOTALL).finditer(data)
                for torrent in matches:
                    title = torrent.group('title')
                    download_url = torrent.group('url')
                    size = convert_size(torrent.group('size'))
                    seeders = int(torrent.group('seeders'))
                    leechers = int(torrent.group('leechers'))

                    if not all([title, download_url]):
                        continue

                    # Filter unseeded torrent
                    if seeders < self.minseed or leechers < self.minleech:
                        if mode != 'RSS':
                            sickrage.srCore.srLogger.debug(
                                "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})"
                                .format(title, seeders, leechers))
                        continue

                    # Accept Torrent only from Good People for every Episode Search
                    if self.confirmed and re.search(
                            r'(VIP|Trusted|Helper|Moderator)',
                            torrent.group(0)) is None:
                        if mode != 'RSS':
                            sickrage.srCore.srLogger.debug(
                                "Found result %s but that doesn't seem like a trusted result so I'm ignoring it"
                                % title)
                        continue

                    item = title, download_url, size, seeders, leechers
                    if mode != 'RSS':
                        sickrage.srCore.srLogger.debug("Found result: %s " %
                                                       title)

                    items[mode].append(item)

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Ejemplo n.º 36
0
    def search(self,
               search_strings,
               search_mode='eponly',
               epcount=0,
               age=0,
               epObj=None):
        results = []

        for mode in search_strings:
            items = []
            sickrage.srCore.srLogger.debug('Search Mode: {}'.format(mode))
            for search_string in search_strings[mode]:
                search_url = self.urls['feed']
                if mode != 'RSS':
                    sickrage.srCore.srLogger.debug(
                        'Search string: {}'.format(search_string))

                try:
                    data = sickrage.srCore.srWebSession.get(search_url,
                                                            params={
                                                                'f':
                                                                search_string
                                                            },
                                                            cache=False).text
                except Exception:
                    sickrage.srCore.srLogger.debug(
                        'No data returned from provider')
                    continue

                if not data.startswith('<?xml'):
                    sickrage.srCore.srLogger.info(
                        'Expected xml but got something else, is your mirror failing?'
                    )
                    continue

                with bs4_parser(data) as parser:
                    for item in parser('item'):
                        if item.category and 'tv' not in item.category.get_text(
                                strip=True):
                            continue

                        title = item.title.get_text(strip=True)
                        t_hash = item.guid.get_text(strip=True).rsplit('/',
                                                                       1)[-1]

                        if not all([title, t_hash]):
                            continue

                        download_url = "magnet:?xt=urn:btih:" + t_hash + "&dn=" + title
                        torrent_size, seeders, leechers = self._split_description(
                            item.find('description').text)
                        size = convert_size(torrent_size) or -1

                        # Filter unseeded torrent
                        if seeders < self.minseed or leechers < self.minleech:
                            if mode != 'RSS':
                                sickrage.srCore.srLogger.debug(
                                    "Discarding torrent because it doesn't meet the minimum seeders or leechers: {} (S:{} L:{})"
                                    .format(title, seeders, leechers))
                            continue

                        items += [{
                            'title': title,
                            'link': download_url,
                            'size': size,
                            'seeders': seeders,
                            'leechers': leechers,
                            'hash': t_hash
                        }]

            # For each search mode sort all the items by seeders if available
            items.sort(key=lambda d: int(d.get('seeders', 0)), reverse=True)
            results += items

        return results
Ejemplo n.º 37
0
    def parse(self, data, mode, **kwargs):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        with bs4_parser(data, "html5lib") as html:
            torrent_rows = html.find_all("div", class_="torrentrow")

            # Continue only if at least one Release is found
            if not torrent_rows:
                sickrage.app.log.debug(
                    "Data returned from provider does not contain any torrents"
                )
                return results

            # "Type", "Name", "Download", "Files", "Comments", "Added", "Size", "Snatched", "Seeders", "Leechers", "Upped by"
            labels = []

            columns = html.find_all("div", class_="colhead")
            for index, column in enumerate(columns):
                lbl = column.get_text(strip=True)
                if lbl:
                    labels.append(str(lbl))
                else:
                    lbl = column.find("img")
                    if lbl:
                        if lbl.has_attr("alt"):
                            lbl = lbl['alt']
                            labels.append(str(lbl))
                    else:
                        if index == 3:
                            lbl = "Download"
                        else:
                            lbl = str(index)
                        labels.append(lbl)

            # Skip column headers
            for result in torrent_rows:
                try:
                    cells = result.find_all("div", class_="torrenttable")
                    if len(cells) < len(labels):
                        continue

                    title = cells[labels.index("Name")].find("a").find(
                        "b").get_text(strip=True)
                    download_url = urljoin(
                        self.urls['base_url'],
                        cells[labels.index("Download")].find("a")["href"])
                    if not all([title, download_url]):
                        continue

                    seeders = try_int(
                        cells[labels.index("Seeders")].find("span").get_text(
                            strip=True))
                    leechers = try_int(
                        cells[labels.index("Leechers")].find("span").get_text(
                            strip=True))

                    torrent_size = cells[labels.index("Size")].find(
                        "span").get_text(strip=True)
                    size = convert_size(torrent_size, -1)

                    results += [{
                        'title': title,
                        'link': download_url,
                        'size': size,
                        'seeders': seeders,
                        'leechers': leechers
                    }]

                    if mode != "RSS":
                        sickrage.app.log.debug(
                            "Found result: {}".format(title))
                except Exception:
                    sickrage.app.log.error("Failed parsing provider")

        return results
Ejemplo n.º 38
0
    def search(self,
               search_strings,
               search_mode='eponly',
               epcount=0,
               age=0,
               epObj=None):
        results = []

        search_params = {
            'cID': 0,
            'tLang': 0,
            'oBy': 0,
            'oMode': 0,
            'category_': 767,
            'subcategory_': 'All',
            'idioma_': 1,
            'calidad_': 'All',
            'oByAux': 0,
            'oModeAux': 0,
            'size_': 0,
            'btnb': 'Filtrar+Busqueda',
            'q': ''
        }

        items = {'Season': [], 'Episode': [], 'RSS': []}

        lang_info = '' if not epObj or not epObj.show else epObj.show.lang

        # Only search if user conditions are true
        if self.onlyspasearch and lang_info != 'es':
            sickrage.srCore.srLogger.debug(
                "Show info is not spanish, skipping provider search")
            return results

        for mode in search_strings.keys():
            sickrage.srCore.srLogger.debug("Search Mode: %s" % mode)

            for search_string in search_strings[mode]:
                search_params.update({'q': search_string.strip()})

                sickrage.srCore.srLogger.debug("Search URL: %s" %
                                               self.urls['search'] + '?' +
                                               urlencode(search_params))

                try:
                    data = sickrage.srCore.srWebSession.post(
                        self.urls['search'], data=search_params,
                        timeout=30).text
                except Exception:
                    continue

                try:
                    with bs4_parser(data) as html:
                        torrent_tbody = html.find('tbody')

                        if len(torrent_tbody) < 1:
                            sickrage.srCore.srLogger.debug(
                                "Data returned from provider does not contain any torrents"
                            )
                            continue

                        torrent_table = torrent_tbody.findAll('tr')
                        num_results = len(torrent_table) - 1

                        iteration = 0
                        for row in torrent_table:
                            try:
                                if iteration < num_results:
                                    torrent_size = row.findAll('td')[2]
                                    torrent_row = row.findAll('a')[1]

                                    download_url = torrent_row.get('href')
                                    title_raw = torrent_row.get('title')
                                    size = convert_size(torrent_size.text)

                                    title = self._processTitle(title_raw)

                                    item = title, download_url, size
                                    sickrage.srCore.srLogger.debug(
                                        "Found result: %s " % title)

                                    items[mode].append(item)
                                    iteration += 1

                            except (AttributeError, TypeError):
                                continue

                except Exception:
                    sickrage.srCore.srLogger.warning(
                        "Failed parsing provider. Traceback: %s" %
                        traceback.format_exc())

            results += items[mode]

        return results
Ejemplo n.º 39
0
    def search(self,
               search_strings,
               search_mode='eponly',
               epcount=0,
               age=0,
               epObj=None):

        results = []

        if not self._doLogin():
            return results

        items = {'Season': [], 'Episode': [], 'RSS': []}

        for mode in search_strings.keys():
            if mode != 'RSS':
                sickrage.srCore.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_strings[mode]:
                if mode != 'RSS':
                    sickrage.srCore.srLogger.debug("Search string: %s " %
                                                   search_string)

                searchURL = self.urls['search'] % (
                    urllib.quote(search_string), self.categories[search_mode])
                sickrage.srCore.srLogger.debug("Search URL: %s" % searchURL)

                try:
                    data = sickrage.srCore.srWebSession.get(searchURL).text
                except Exception:
                    sickrage.srCore.srLogger.debug(
                        "No data returned from provider")
                    continue

                with bs4_parser(data) as html:
                    torrent_table = html.find('table',
                                              attrs={'id': 'torrents-table'})
                    torrent_rows = torrent_table.find_all(
                        'tr') if torrent_table else []

                    # Continue only if at least one Release is found
                    if len(torrent_rows) < 2:
                        sickrage.srCore.srLogger.debug(
                            "Data returned from provider does not contain any torrents"
                        )
                        continue

                    for result in torrent_table.find_all('tr')[1:]:

                        try:
                            link = result.find('td',
                                               attrs={
                                                   'class': 'ttr_name'
                                               }).find('a')
                            url = result.find('td', attrs={
                                'class': 'td_dl'
                            }).find('a')

                            title = link.string
                            if re.search(r'\.\.\.', title):
                                data = sickrage.srCore.srWebSession.get(
                                    self.urls['base_url'] + "/" +
                                    link['href']).text
                                with bs4_parser(data) as details_html:
                                    title = re.search(
                                        '(?<=").+(?<!")',
                                        details_html.title.string).group(0)
                            download_url = self.urls['download'] % url['href']
                            seeders = int(
                                result.find('td',
                                            attrs={
                                                'class': 'ttr_seeders'
                                            }).string)
                            leechers = int(
                                result.find('td',
                                            attrs={
                                                'class': 'ttr_leechers'
                                            }).string)
                            size = convert_size(
                                result.find('td', attrs={
                                    'class': 'ttr_size'
                                }).contents[0])
                        except Exception:
                            continue

                        if not all([title, download_url]):
                            continue

                        # Filter unseeded torrent
                        if seeders < self.minseed or leechers < self.minleech:
                            if mode != 'RSS':
                                sickrage.srCore.srLogger.debug(
                                    "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})"
                                    .format(title, seeders, leechers))
                            continue

                        item = title, download_url, size, seeders, leechers
                        if mode != 'RSS':
                            sickrage.srCore.srLogger.debug(
                                "Found result: %s " % title)

                        items[mode].append(item)

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Ejemplo n.º 40
0
    def search(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        if not self.login():
            return results

        for mode in search_params.keys():
            sickrage.srCore.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_params[mode]:

                if mode != 'RSS':
                    sickrage.srCore.srLogger.debug("Search string: %s " % search_string)

                searchURL = self.urls['search'] % (self.categories, search_string)
                sickrage.srCore.srLogger.debug("Search URL: %s" % searchURL)

                # Set cookies from response
                # Returns top 30 results by default, expandable in user profile

                try:
                    data = sickrage.srCore.srWebSession.get(searchURL, cookies=self.cookies, cache=False).text
                except Exception:
                    sickrage.srCore.srLogger.debug("No data returned from provider")
                    continue

                try:
                    with bs4_parser(data) as html:
                        torrent_table = html.find("div", id="torrentBrowse")
                        torrent_rows = torrent_table.findChildren("tr") if torrent_table else []

                        # Continue only if at least one release is found
                        if len(torrent_rows) < 1:
                            sickrage.srCore.srLogger.debug("Data returned from provider does not contain any torrents")
                            continue

                        for result in torrent_rows[1:]:
                            cells = result.findChildren("td")
                            title = cells[1].find("a").find_next("a")
                            link = cells[3].find("a")
                            shares = cells[8].get_text().split("/", 1)
                            torrent_size = cells[7].get_text().split("/", 1)[0]

                            try:
                                if title.has_key('title'):
                                    title = title['title']
                                else:
                                    title = cells[1].find("a")['title']

                                download_url = self.urls['download'] % (link['href'])
                                seeders = int(shares[0])
                                leechers = int(shares[1])

                                size = -1
                                if re.match(r"\d+([,\.]\d+)?\s*[KkMmGgTt]?[Bb]", torrent_size):
                                    size = convert_size(torrent_size.rstrip())

                            except (AttributeError, TypeError):
                                continue

                            if not all([title, download_url]):
                                continue

                            # Filter unseeded torrent
                            if seeders < self.minseed or leechers < self.minleech:
                                if mode != 'RSS':
                                    sickrage.srCore.srLogger.debug(
                                        "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(
                                            title, seeders, leechers))
                                continue

                            item = title, download_url, size, seeders, leechers
                            if mode != 'RSS':
                                sickrage.srCore.srLogger.debug("Found result: %s " % title)

                            items[mode].append(item)

                except Exception as e:
                    sickrage.srCore.srLogger.error("Failed parsing provider. Traceback: %s" % traceback.format_exc())

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Ejemplo n.º 41
0
    def parse(self, data, mode, **kwargs):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        with bs4_parser(data) as html:
            table_body = html.find('tbody')

            # Continue only if at least one release is found
            if not table_body:
                sickrage.app.log.debug(
                    'Data returned from provider does not contain any torrents'
                )
                return results

            for row in table_body('tr'):
                cells = row('td')
                if len(cells) < 4:
                    continue

                try:
                    info_cell = cells[0].a
                    title = info_cell.get_text()
                    download_url = self._get_download_link(
                        urljoin(self.urls['base_url'], info_cell.get('href')))
                    if not all([title, download_url]):
                        continue

                    title = '{name} {codec}'.format(name=title, codec='x264')

                    if self.custom_url:
                        if not validate_url(self.custom_url):
                            sickrage.app.log.warning(
                                "Invalid custom url: {}".format(
                                    self.custom_url))
                            return results
                        download_url = urljoin(
                            self.custom_url,
                            download_url.split(self.urls['base_url'])[1])

                    seeders = try_int(cells[2].get_text(strip=True))
                    leechers = try_int(cells[3].get_text(strip=True))

                    torrent_size = cells[1].get_text()
                    size = convert_size(torrent_size, -1,
                                        ['O', 'KO', 'MO', 'GO', 'TO', 'PO'])

                    results += [{
                        'title': title,
                        'link': download_url,
                        'size': size,
                        'seeders': seeders,
                        'leechers': leechers
                    }]

                    if mode != 'RSS':
                        sickrage.app.log.debug(
                            "Found result: {}".format(title))
                except Exception:
                    sickrage.app.log.error("Failed parsing provider")

        return results
Ejemplo n.º 42
0
    def search(self,
               search_strings,
               search_mode='eponly',
               epcount=0,
               age=0,
               epObj=None):
        results = []

        for mode in search_strings:
            items = []
            sickrage.srCore.srLogger.debug('Search Mode: {}'.format(mode))
            for search_string in search_strings[mode]:
                if mode != 'RSS':
                    sickrage.srCore.srLogger.debug(
                        'Search string: {}'.format(search_string))

                if mode == 'RSS':
                    search_url = self.urls['series']
                else:
                    search_url = urljoin(self.urls['base_url'], search_string)

                try:
                    data = sickrage.srCore.srWebSession.get(search_url,
                                                            cache=False).text
                except Exception:
                    sickrage.srCore.srLogger.debug(
                        'No data returned from provider')
                    continue

                with bs4_parser(data) as html:
                    torrent_rows = html.find_all('tr')
                    for row in torrent_rows:
                        for torrent in row.find_all('td'):
                            for link in torrent.find_all('a'):
                                fileType = ''.join(
                                    link.find_previous('i')["class"])
                                fileType = unicodedata.normalize('NFKD', fileType). \
                                    encode(sickrage.SYS_ENCODING, 'ignore')

                                if fileType == "Series":
                                    title = link.get_text(strip=True)
                                    download_url = self.get_download_url(
                                        link.get('href'))

                                    if not all([title, download_url]):
                                        continue

                                    # size
                                    size = convert_size(
                                        link.findNext('td').text) or -1

                                    # Filter unseeded torrent
                                    seeders = tryInt(
                                        link.find_next(
                                            'img', alt='seeders').parent.text,
                                        0)
                                    leechers = tryInt(
                                        link.find_next(
                                            'img', alt='leechers').parent.text,
                                        0)

                                    if seeders < self.minseed or leechers < self.minleech:
                                        if mode != 'RSS':
                                            sickrage.srCore.srLogger.debug(
                                                "Discarding torrent because it doesn't meet the minimum seeders or leechers: {} (S:{} L:{})"
                                                .format(
                                                    title, seeders, leechers))
                                        continue

                                    items += [{
                                        'title': title,
                                        'link': download_url,
                                        'size': size,
                                        'seeders': seeders,
                                        'leechers': leechers,
                                    }]

                                    if mode != 'RSS':
                                        sickrage.srCore.srLogger.debug(
                                            "Found result: {}".format(title))

            # For each search mode sort all the items by seeders if available
            items.sort(key=lambda d: int(d.get('seeders', 0)), reverse=True)
            results += items

        return results
Ejemplo n.º 43
0
    def parse(self, data, mode, **kwargs):
        results = []

        with bs4_parser(data) as html:
            if not self._check_auth_from_data(html):
                return results

            try:
                self.torznab = 'xmlns:torznab' in html.rss.attrs
            except AttributeError:
                self.torznab = False

            if not html('item'):
                sickrage.app.log.debug('No results returned from provider. Check chosen Newznab '
                                       'search categories in provider settings and/or usenet '
                                       'retention')
                return results

            for item in html('item'):
                try:
                    title = item.title.get_text(strip=True)
                    download_url = None
                    if item.link:
                        url = item.link.get_text(strip=True)
                        if validate_url(url) or url.startswith('magnet'):
                            download_url = url

                        if not download_url:
                            url = item.link.next.strip()
                            if validate_url(url) or url.startswith('magnet'):
                                download_url = url

                    if not download_url and item.enclosure:
                        url = item.enclosure.get('url', '').strip()
                        if validate_url(url) or url.startswith('magnet'):
                            download_url = url

                    if not (title and download_url):
                        continue

                    seeders = leechers = -1
                    if 'gingadaddy' in self.urls['base_url']:
                        size_regex = re.search(r'\d*.?\d* [KMGT]B', str(item.description))
                        item_size = size_regex.group() if size_regex else -1
                    else:
                        item_size = item.size.get_text(strip=True) if item.size else -1

                        newznab_attrs = item(re.compile('newznab:attr'))
                        torznab_attrs = item(re.compile('torznab:attr'))
                        for attr in newznab_attrs + torznab_attrs:
                            item_size = attr['value'] if attr['name'] == 'size' else item_size
                            seeders = try_int(attr['value']) if attr['name'] == 'seeders' else seeders
                            peers = try_int(attr['value']) if attr['name'] == 'peers' else None
                            leechers = peers - seeders if peers else leechers

                    if not item_size or (self.torznab and (seeders is -1 or leechers is -1)):
                        continue

                    size = convert_size(item_size, -1)

                    results += [
                        {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers}
                    ]

                    if mode != 'RSS':
                        sickrage.app.log.debug('Found result: {}'.format(title))
                except (AttributeError, TypeError, KeyError, ValueError, IndexError):
                    sickrage.app.log.error('Failed parsing provider')

        return results
Ejemplo n.º 44
0
    def parse(self, data, mode, **kwargs):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        def process_column_header(th):
            text = ""
            if th.a:
                text = th.a.get_text(strip=True)
            if not text:
                text = th.get_text(strip=True)
            return text

        with bs4_parser(data) as html:
            torrent_table = html.find("table", id="searchResult")
            torrent_rows = torrent_table("tr") if torrent_table else []

            # Continue only if at least one Release is found
            if len(torrent_rows) < 2:
                sickrage.app.log.debug("Data returned from provider does not contain any torrents")
                return results

            labels = [process_column_header(label) for label in torrent_rows[0]("th")]

            # Skip column headers
            for row in torrent_rows[1:]:
                cells = row('td')
                if len(cells) < len(labels):
                    continue

                try:
                    title = row.find(class_='detName')
                    title = title.get_text(strip=True) if title else None
                    download_url = row.find(title='Download this torrent using magnet')
                    download_url = download_url.get('href') if download_url else None
                    if download_url and 'magnet:?' not in download_url:
                        try:
                            details_url = urljoin(self.custom_settings['custom_url'] or self.urls['base_url'], download_url)
                            details_data = self.session.get(details_url).text
                        except Exception:
                            sickrage.app.log.debug('Invalid ThePirateBay proxy please try another one')
                            continue

                        with bs4_parser(details_data) as details:
                            download_url = details.find(title='Get this torrent')
                            download_url = download_url.get('href') if download_url else None
                            if download_url and 'magnet:?' not in download_url:
                                sickrage.app.log.debug('Invalid ThePirateBay proxy please try another one')
                                continue
                    if not all([title, download_url]):
                        continue

                    seeders = try_int(cells[labels.index("SE")].get_text(strip=True))
                    leechers = try_int(cells[labels.index("LE")].get_text(strip=True))

                    # Accept Torrent only from Good People for every Episode Search
                    if self.custom_settings['confirmed'] and not row.find(alt=re.compile(r"VIP|Trusted")):
                        if mode != "RSS":
                            sickrage.app.log.debug(
                                "Found result: {0} but that doesn't seem like a trusted result so I'm "
                                "ignoring it".format(title))
                        continue

                    # Convert size after all possible skip scenarios
                    torrent_size = cells[labels.index('Name')].find(class_='detDesc')
                    torrent_size = torrent_size.get_text(strip=True).split(', ')[1]
                    torrent_size = re.sub(r'Size ([\d.]+).+([KMGT]iB)', r'\1 \2', torrent_size)
                    size = convert_size(torrent_size, -1, ['B', 'KIB', 'MIB', 'GIB', 'TIB', 'PIB'])

                    results += [
                        {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers}
                    ]

                    if mode != 'RSS':
                        sickrage.app.log.debug("Found result: {}".format(title))
                except Exception:
                    sickrage.app.log.error("Failed parsing provider")

        return results
Ejemplo n.º 45
0
    def search(self,
               search_params,
               search_mode='eponly',
               epcount=0,
               age=0,
               epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        freeleech = '&free=on' if self.freeleech else ''

        if not self.login():
            return results

        for mode in search_params.keys():
            sickrage.srCore.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_params[mode]:

                if mode != 'RSS':
                    sickrage.srCore.srLogger.debug("Search string: %s " %
                                                   search_string)

                # URL with 50 tv-show results, or max 150 if adjusted in IPTorrents profile
                searchURL = self.urls['search'] % (self.categories, freeleech,
                                                   search_string)
                searchURL += ';o=seeders' if mode != 'RSS' else ''
                sickrage.srCore.srLogger.debug("Search URL: %s" % searchURL)

                try:
                    data = sickrage.srCore.srWebSession.get(searchURL).text
                except Exception:
                    sickrage.srCore.srLogger.debug(
                        "No data returned from provider")
                    continue

                try:
                    data = re.sub(r'(?im)<button.+?<[/]button>', '', data, 0)
                    with bs4_parser(data) as html:
                        if not html:
                            sickrage.srCore.srLogger.debug(
                                "No data returned from provider")
                            continue

                        if html.find(text='No Torrents Found!'):
                            sickrage.srCore.srLogger.debug(
                                "Data returned from provider does not contain any torrents"
                            )
                            continue

                        torrent_table = html.find('table',
                                                  attrs={'class': 'torrents'})
                        torrents = torrent_table.find_all(
                            'tr') if torrent_table else []

                        # Continue only if one Release is found
                        if len(torrents) < 2:
                            sickrage.srCore.srLogger.debug(
                                "Data returned from provider does not contain any torrents"
                            )
                            continue

                        for result in torrents[1:]:
                            try:
                                title = result.find_all('td')[1].find('a').text
                                download_url = self.urls[
                                    'base_url'] + result.find_all(
                                        'td')[3].find('a')['href']
                                size = convert_size(
                                    result.find_all('td')[5].text)
                                seeders = int(
                                    result.find('td',
                                                attrs={
                                                    'class': 'ac t_seeders'
                                                }).text)
                                leechers = int(
                                    result.find('td',
                                                attrs={
                                                    'class': 'ac t_leechers'
                                                }).text)
                            except (AttributeError, TypeError, KeyError):
                                continue

                            if not all([title, download_url]):
                                continue

                            # Filter unseeded torrent
                            if seeders < self.minseed or leechers < self.minleech:
                                if mode != 'RSS':
                                    sickrage.srCore.srLogger.debug(
                                        "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})"
                                        .format(title, seeders, leechers))
                                continue

                            item = title, download_url, size, seeders, leechers
                            if mode != 'RSS':
                                sickrage.srCore.srLogger.debug(
                                    "Found result: %s " % title)

                            items[mode].append(item)

                except Exception as e:
                    sickrage.srCore.srLogger.error(
                        "Failed parsing provider. Error: %r" % e)

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Ejemplo n.º 46
0
    def parse(self, data, mode, **kwargs):
        """
        Parse search results from data
        :param data: response data
        :param mode: search mode
        :return: search results
        """

        results = []

        id_regex = re.compile(r'(?:\/)(.*)(?:-torrent-([0-9]*)\.html)', re.I)
        hash_regex = re.compile(r'(.*)([0-9a-f]{40})(.*)', re.I)

        def process_column_header(th):
            return th.span.get_text() if th.span else th.get_text()

        with bs4_parser(data) as html:
            torrent_table = html.find('table', class_='table2')

            if not torrent_table:
                sickrage.app.log.debug(
                    'Data returned from provider does not contain any torrents'
                )
                return results

            torrent_rows = torrent_table.find_all('tr')
            labels = [
                process_column_header(label)
                for label in torrent_rows[0].find_all('th')
            ]

            # Skip the first row, since it isn't a valid result
            for row in torrent_rows[1:]:
                cells = row.find_all('td')

                try:
                    title_cell = cells[labels.index('Torrent Name')]

                    verified = title_cell.find('img', title='Verified torrent')
                    if self.confirmed and not verified:
                        continue

                    title_anchors = title_cell.find_all('a')
                    if not title_anchors or len(title_anchors) < 2:
                        continue

                    title_url = title_anchors[0].get('href')
                    title = title_anchors[1].get_text(strip=True)
                    regex_result = id_regex.search(
                        title_anchors[1].get('href'))

                    alt_title = regex_result.group(1)
                    if len(title) < len(alt_title):
                        title = alt_title.replace('-', ' ')

                    torrent_id = regex_result.group(2)
                    info_hash = hash_regex.search(title_url).group(2)
                    if not all([title, torrent_id, info_hash]):
                        continue

                    try:
                        self.session.get(self.urls['update'],
                                         timeout=30,
                                         params={
                                             'torrent_id': torrent_id,
                                             'infohash': info_hash
                                         })
                    except Exception:
                        pass

                    download_url = 'magnet:?xt=urn:btih:{hash}&dn={title}'.format(
                        hash=info_hash, title=title)

                    # Remove comma as thousands separator from larger number like 2,000 seeders = 2000
                    seeders = try_int(
                        cells[labels.index('Seed')].get_text(
                            strip=True).replace(',', ''), 1)
                    leechers = try_int(cells[labels.index('Leech')].get_text(
                        strip=True).replace(',', ''))

                    size = convert_size(
                        cells[labels.index('Size')].get_text(strip=True), -1)

                    results += [{
                        'title': title,
                        'link': download_url,
                        'size': size,
                        'seeders': seeders,
                        'leechers': leechers
                    }]

                    if mode != 'RSS':
                        sickrage.app.log.debug(
                            "Found result: {}".format(title))
                except Exception:
                    sickrage.app.log.error("Failed parsing provider")

        return results
Ejemplo n.º 47
0
    def search(self,
               search_strings,
               search_mode='eponly',
               epcount=0,
               age=0,
               epObj=None):
        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        # select the correct category
        anime = (self.show
                 and self.show.anime) or (epObj and epObj.show
                                          and epObj.show.anime) or False
        self.search_params['category'] = ('tv', 'anime')[anime]

        for mode in search_strings.keys():
            sickrage.srCore.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_strings[mode]:

                self.search_params['q'] = search_string.encode(
                    'utf-8') if mode != 'RSS' else ''
                self.search_params[
                    'field'] = 'seeders' if mode != 'RSS' else 'time_add'

                if mode != 'RSS':
                    sickrage.srCore.srLogger.debug("Search string: %s" %
                                                   search_string)

                url_fmt_string = 'usearch' if mode != 'RSS' else search_string
                try:
                    searchURL = self.urls[
                        'search'] % url_fmt_string + '?' + urlencode(
                            self.search_params)
                    sickrage.srCore.srLogger.debug("Search URL: %s" %
                                                   searchURL)

                    try:
                        data = sickrage.srCore.srWebSession.get(searchURL).text
                    except Exception:
                        sickrage.srCore.srLogger.debug(
                            "No data returned from provider")
                        continue

                    if not data.startswith('<?xml'):
                        sickrage.srCore.srLogger.info(
                            'Expected xml but got something else, is your mirror failing?'
                        )
                        continue

                    try:
                        data = xmltodict.parse(data)
                    except ExpatError:
                        sickrage.srCore.srLogger.error(
                            "Failed parsing provider. Traceback: %r\n%r" %
                            (traceback.format_exc(), data))
                        continue

                    if not all([
                            data, 'rss' in data, 'channel' in data['rss'],
                            'item' in data['rss']['channel']
                    ]):
                        sickrage.srCore.srLogger.debug(
                            "Malformed rss returned, skipping")
                        continue

                    # https://github.com/martinblech/xmltodict/issues/111
                    entries = data['rss']['channel']['item']
                    entries = entries if isinstance(entries,
                                                    list) else [entries]

                    for item in entries:
                        try:
                            title = item['title']
                            # Use the torcache link kat provides,
                            # unless it is not torcache or we are not using blackhole
                            # because we want to use magnets if connecting direct to client
                            # so that proxies work.
                            download_url = item['enclosure']['@url']
                            if sickrage.srCore.srConfig.TORRENT_METHOD != "blackhole" or 'torcache' not in download_url:
                                download_url = item['torrent:magnetURI']

                            seeders = int(item['torrent:seeds'])
                            leechers = int(item['torrent:peers'])
                            verified = bool(int(item['torrent:verified']) or 0)
                            size = convert_size(item['torrent:contentLength'])

                            info_hash = item['torrent:infoHash']
                        except (AttributeError, TypeError, KeyError):
                            continue

                        if not all([title, download_url]):
                            continue

                        # Filter unseeded torrent
                        if seeders < self.minseed or leechers < self.minleech:
                            if mode != 'RSS':
                                sickrage.srCore.srLogger.debug(
                                    "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})"
                                    .format(title, seeders, leechers))
                            continue

                        if self.confirmed and not verified:
                            if mode != 'RSS':
                                sickrage.srCore.srLogger.debug(
                                    "Found result " + title +
                                    " but that doesn't seem like a verified result so I'm ignoring it"
                                )
                            continue

                        item = title, download_url, size, seeders, leechers, info_hash
                        if mode != 'RSS':
                            sickrage.srCore.srLogger.debug(
                                "Found result: %s " % title)

                        items[mode].append(item)

                except Exception:
                    sickrage.srCore.srLogger.error(
                        "Failed parsing provider. Traceback: %r" %
                        traceback.format_exc())

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Ejemplo n.º 48
0
    def search(self,
               search_params,
               search_mode='eponly',
               epcount=0,
               age=0,
               epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        if not self._doLogin():
            return results

        for mode in search_params.keys():
            sickrage.srCore.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_params[mode]:

                if mode != 'RSS':
                    sickrage.srCore.srLogger.debug("Search string: %s " %
                                                   search_string)

                searchURL = self.urls['search'] % (urllib.quote(
                    search_string.encode('utf-8')), self.categories)
                sickrage.srCore.srLogger.debug("Search URL: %s" % searchURL)

                try:
                    data = sickrage.srCore.srWebSession.get(searchURL).text
                except Exception:
                    sickrage.srCore.srLogger.debug(
                        "No data returned from provider")
                    continue

                try:
                    with bs4_parser(data) as html:
                        # Continue only if one Release is found
                        empty = html.find(
                            'h2', text="No .torrents fit this filter criteria")
                        if empty:
                            sickrage.srCore.srLogger.debug(
                                "Data returned from provider does not contain any torrents"
                            )
                            continue

                        torrent_table = html.find(
                            'table',
                            attrs={'style': 'border: none; width: 100%;'})
                        if not torrent_table:
                            sickrage.srCore.srLogger.error(
                                "Could not find table of torrents")
                            continue

                        torrent_rows = torrent_table.find_all(
                            'tr', attrs={'class': 'browse'})

                        for result in torrent_rows:
                            cells = result.find_all('td')
                            size = None
                            link = cells[1].find(
                                'a',
                                attrs={
                                    'style':
                                    'font-size: 1.25em; font-weight: bold;'
                                })

                            torrent_id = link['href'].replace(
                                'details.php?id=', '')

                            try:
                                if link.has_key('title'):
                                    title = link['title']
                                else:
                                    title = link.contents[0]

                                download_url = self.urls['download'] % (
                                    torrent_id, link.contents[0])
                                seeders = int(cells[9].contents[0])
                                leechers = int(cells[10].contents[0])

                                # Need size for failed downloads handling
                                if size is None:
                                    if re.match(
                                            r'[0-9]+,?\.?[0-9]*[KkMmGg]+[Bb]+',
                                            cells[7].text):
                                        size = convert_size(cells[7].text)
                                        if not size:
                                            size = -1

                            except (AttributeError, TypeError):
                                continue

                            if not all([title, download_url]):
                                continue

                            # Filter unseeded torrent
                            if seeders < self.minseed or leechers < self.minleech:
                                if mode != 'RSS':
                                    sickrage.srCore.srLogger.debug(
                                        "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})"
                                        .format(title, seeders, leechers))
                                continue

                            item = title, download_url, size, seeders, leechers
                            if mode != 'RSS':
                                sickrage.srCore.srLogger.debug(
                                    "Found result: %s " % title)

                            items[mode].append(item)

                except Exception as e:
                    sickrage.srCore.srLogger.error(
                        "Failed parsing provider. Traceback: %s" %
                        traceback.format_exc())

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Ejemplo n.º 49
0
    def search(self,
               search_params,
               search_mode='eponly',
               epcount=0,
               age=0,
               epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        if not self.login():
            return results

        for mode in search_params.keys():
            sickrage.srCore.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_params[mode]:

                if mode != 'RSS':
                    sickrage.srCore.srLogger.debug("Search string: %s " %
                                                   search_string)

                searchURL = self.urls['search'] % (urllib.quote(search_string),
                                                   self.categories)
                sickrage.srCore.srLogger.debug("Search URL: %s" % searchURL)

                try:
                    data = sickrage.srCore.srWebSession.get(searchURL).text
                except Exception:
                    sickrage.srCore.srLogger.debug(
                        "No data returned from provider")
                    continue

                try:
                    with bs4_parser(data) as html:
                        # Continue only if one Release is found
                        empty = html.find('Nothing found!')
                        if empty:
                            sickrage.srCore.srLogger.debug(
                                "Data returned from provider does not contain any torrents"
                            )
                            continue

                        torrent_table = html.find('table',
                                                  attrs={'border': '1'})
                        torrent_rows = torrent_table.find_all(
                            'tr') if torrent_table else []

                        for result in torrent_rows[1:]:
                            cells = result.find_all('td')
                            size = None
                            link = cells[1].find('a', attrs={'class': 'index'})

                            full_id = link['href'].replace(
                                'details.php?id=', '')
                            torrent_id = full_id.split("&")[0]

                            # Free leech torrents are marked with green [F L] in the title (i.e. <font color=green>[F&nbsp;L]</font>)
                            freeleechTag = cells[1].find(
                                'font', attrs={'color': 'green'})
                            if freeleechTag and freeleechTag.text == '[F&nbsp;L]':
                                isFreeleechTorrent = True
                            else:
                                isFreeleechTorrent = False

                            if self.freeleech and not isFreeleechTorrent:
                                continue

                            try:
                                if link.has_key('title'):
                                    title = cells[1].find(
                                        'a', {'class': 'index'})['title']
                                else:
                                    title = link.contents[0]
                                download_url = self.urls['download'] % (
                                    torrent_id, link.contents[0])
                                seeders = int(
                                    cells[8].find('span').contents[0])
                                leechers = int(
                                    cells[9].find('span').contents[0])

                                # Need size for failed downloads handling
                                if size is None:
                                    if re.match(
                                            r'[0-9]+,?\.?[0-9]*[KkMmGg]+[Bb]+',
                                            cells[6].text):
                                        size = convert_size(cells[6].text)
                                        if not size:
                                            size = -1

                            except (AttributeError, TypeError):
                                continue

                            if not all([title, download_url]):
                                continue

                            # Filter unseeded torrent
                            if seeders < self.minseed or leechers < self.minleech:
                                if mode != 'RSS':
                                    sickrage.srCore.srLogger.debug(
                                        "Discarding torrent because it doesn't meet the minimum seeders or leechers: {} (S:{} L:{})"
                                        .format(title, seeders, leechers))
                                continue

                            item = title, download_url, size, seeders, leechers
                            if mode != 'RSS':
                                sickrage.srCore.srLogger.debug(
                                    "Found result: %s " % title)

                            items[mode].append(item)

                except Exception as e:
                    sickrage.srCore.srLogger.error(
                        "Failed parsing provider. Traceback: %s" %
                        traceback.format_exc())

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results