Esempio n. 1
0
    def test_search(self):
        self.url = "http://kickass.unblocked.li"
        searchURL = "{}/usearch/American%20Dad%20S08/".format(self.url)

        data = getURL(searchURL, session=requests.Session())
        if not data:
            return

        with bs4_parser(data) as html:
            torrent_table = html.find("table", attrs={"class": "data"})

        # Continue only if one Release is found
        torrent_rows = torrent_table.find_all("tr") if torrent_table else []
        if len(torrent_rows) < 2:
            print ("The data returned does not contain any torrents")
            return

        for tr in torrent_rows[1:]:
            try:
                link = urlparse.urljoin(self.url, (tr.find("div", {"class": "torrentname"}).find_all("a")[1])["href"])
                id = tr.get("id")[-7:]
                title = (tr.find("div", {"class": "torrentname"}).find_all("a")[1]).text or (
                    tr.find("div", {"class": "torrentname"}).find_all("a")[2]
                ).text
                url = tr.find("a", "imagnet")["href"]
                verified = True if tr.find("a", "iverify") else False
                trusted = True if tr.find("img", {"alt": "verified"}) else False
                seeders = int(tr.find_all("td")[-2].text)
                leechers = int(tr.find_all("td")[-1].text)
            except (AttributeError, TypeError):
                continue

            print title
Esempio n. 2
0
    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        # check for auth
        if not self._doLogin():
            return results

        for mode in search_params.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_params[mode]:

                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s " % search_string)

                searchURL = self.urlsearch % (urllib.quote(search_string), self.categories)
                sickrage.srLogger.debug("Search URL: %s" % searchURL)
                data = self.getURL(searchURL)

                if not data:
                    continue

                with bs4_parser(data) as html:
                    resultsTable = html.find("table", {"class": "table2 table-bordered2"})
                    if resultsTable:
                        rows = resultsTable.findAll("tr")
                        for row in rows:
                            link = row.find("a", href=re.compile("details.php"))
                            if link:
                                title = link.text
                                download_url = self.url + '/' + row.find("a", href=re.compile("download.php"))['href']
                                # FIXME
                                size = -1
                                seeders = 1
                                leechers = 0

                                if not all([title, download_url]):
                                    continue

                                # Filter unseeded torrent
                                # if seeders < self.minseed or leechers < self.minleech:
                                #    if mode is not 'RSS':
                                #        LOGGER.debug(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers))
                                #    continue

                                item = title, download_url, size, seeders, leechers
                                if mode is not 'RSS':
                                    sickrage.srLogger.debug("Found result: %s " % title)

                                items[mode].append(item)

            # For each search mode sort all the items by seeders if available if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 3
0
    def _doSearch(self, search_string, search_mode='eponly', epcount=0, age=0, epObj=None):
        # FIXME ADD MODE
        if self.show and not self.show.is_anime:
            return []

        sickrage.srLogger.debug("Search string: %s " % search_string)

        params = {
            "terms": search_string.encode('utf-8'),
            "type": 1,  # get anime types
        }

        searchURL = self.url + 'search.php?' + urllib.urlencode(params)
        sickrage.srLogger.debug("Search URL: %s" % searchURL)
        data = self.getURL(searchURL)

        if not data:
            return []

        results = []
        try:
            with bs4_parser(data) as html:
                torrent_table = html.find('table', attrs={'class': 'listing'})
                torrent_rows = torrent_table.find_all('tr') if torrent_table else []
                if torrent_rows:
                    if torrent_rows[0].find('td', attrs={'class': 'centertext'}):
                        a = 1
                    else:
                        a = 0

                    for top, bottom in zip(torrent_rows[a::2], torrent_rows[a::2]):
                        title = top.find('td', attrs={'class': 'desc-top'}).text
                        title.lstrip()
                        download_url = top.find('td', attrs={'class': 'desc-top'}).find('a')['href']
                        # FIXME
                        size = -1
                        seeders = 1
                        leechers = 0

                        if not all([title, download_url]):
                            continue

                        # Filter unseeded torrent
                        # if seeders < self.minseed or leechers < self.minleech:
                        #    if mode is not 'RSS':
                        #        LOGGER.debug(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers))
                        #    continue

                        item = title, download_url, size, seeders, leechers

                        results.append(item)

        except Exception as e:
            sickrage.srLogger.error("Failed parsing provider. Traceback: %s" % traceback.format_exc())

        # FIXME SORTING
        return results
Esempio n. 4
0
    def test_search(self):
        self.url = 'http://kickass.unblocked.li'
        searchURL = '{}/usearch/American%20Dad%20S08/'.format(self.url)

        data = getURL(searchURL, session=requests.Session())
        if not data:
            return

        with bs4_parser(data) as html:
            torrent_table = html.find('table', attrs={'class': 'data'})

        # Continue only if one Release is found
        torrent_rows = torrent_table.find_all('tr') if torrent_table else []
        if len(torrent_rows) < 2:
            print("The data returned does not contain any torrents")
            return

        for tr in torrent_rows[1:]:
            try:
                link = urlparse.urljoin(self.url,
                                        (tr.find('div', {
                                            'class': 'torrentname'
                                        }).find_all('a')[1])['href'])
                id = tr.get('id')[-7:]
                title = (tr.find('div', {'class': 'torrentname'}).find_all('a')[1]).text \
                        or (tr.find('div', {'class': 'torrentname'}).find_all('a')[2]).text
                url = tr.find('a', 'imagnet')['href']
                verified = True if tr.find('a', 'iverify') else False
                trusted = True if tr.find('img',
                                          {'alt': 'verified'}) else False
                seeders = int(tr.find_all('td')[-2].text)
                leechers = int(tr.find_all('td')[-1].text)
            except (AttributeError, TypeError):
                continue

            print title
Esempio n. 5
0
    def _doSearch(self,
                  search_params,
                  search_mode='eponly',
                  epcount=0,
                  age=0,
                  epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        freeleech = '3' if self.freeleech else '0'

        if not self._doLogin():
            return results

        for mode in search_params.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_params[mode]:

                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s " %
                                            search_string)

                searchURL = self.urls['search'] % (freeleech, search_string)
                sickrage.srLogger.debug("Search URL: %s" % searchURL)
                data = self.getURL(searchURL)
                max_page_number = 0

                if not data:
                    sickrage.srLogger.debug("No data returned from provider")
                    continue

                try:
                    with bs4_parser(data) as html:

                        # Check to see if there is more than 1 page of results
                        pager = html.find('div', {'class': 'pager'})
                        if pager:
                            page_links = pager.find_all('a', href=True)
                        else:
                            page_links = []

                        if len(page_links) > 0:
                            for lnk in page_links:
                                link_text = lnk.text.strip()
                                if link_text.isdigit():
                                    page_int = int(link_text)
                                    if page_int > max_page_number:
                                        max_page_number = page_int

                        # limit page number to 15 just in case something goes wrong
                        if max_page_number > 15:
                            max_page_number = 15
                        # limit RSS search
                        if max_page_number > 3 and mode is 'RSS':
                            max_page_number = 3
                except Exception:
                    sickrage.srLogger.error(
                        "Failed parsing provider. Traceback: %s" %
                        traceback.format_exc())
                    continue

                data_response_list = [data]

                # Freshon starts counting pages from zero, even though it displays numbers from 1
                if max_page_number > 1:
                    for i in range(1, max_page_number):

                        time.sleep(1)
                        page_searchURL = searchURL + '&page=' + str(i)
                        # '.log(u"Search string: " + page_searchURL, LOGGER.DEBUG)
                        page_html = self.getURL(page_searchURL)

                        if not page_html:
                            continue

                        data_response_list.append(page_html)

                try:

                    for data in data_response_list:

                        with bs4_parser(data) as html:

                            torrent_rows = html.findAll(
                                "tr", {"class": re.compile('torrent_[0-9]*')})

                            # Continue only if a Release is found
                            if len(torrent_rows) == 0:
                                sickrage.srLogger.debug(
                                    "Data returned from provider does not contain any torrents"
                                )
                                continue

                            for individual_torrent in torrent_rows:

                                # skip if torrent has been nuked due to poor quality
                                if individual_torrent.find(
                                        'img', alt='Nuked') is not None:
                                    continue

                                try:
                                    title = individual_torrent.find(
                                        'a', {'class': 'torrent_name_link'
                                              })['title']
                                except Exception:
                                    sickrage.srLogger.warning(
                                        "Unable to parse torrent title. Traceback: %s "
                                        % traceback.format_exc())
                                    continue

                                try:
                                    details_url = individual_torrent.find(
                                        'a',
                                        {'class': 'torrent_name_link'})['href']
                                    torrent_id = int((re.match(
                                        '.*?([0-9]+)$',
                                        details_url).group(1)).strip())
                                    download_url = self.urls['download'] % (
                                        str(torrent_id))
                                    seeders = tryInt(
                                        individual_torrent.find(
                                            'td', {
                                                'class': 'table_seeders'
                                            }).find('span').text.strip(), 1)
                                    leechers = tryInt(
                                        individual_torrent.find(
                                            'td', {
                                                'class': 'table_leechers'
                                            }).find('a').text.strip(), 0)
                                    # FIXME
                                    size = -1
                                except Exception:
                                    continue

                                if not all([title, download_url]):
                                    continue

                                # Filter unseeded torrent
                                if seeders < self.minseed or leechers < self.minleech:
                                    if mode is not 'RSS':
                                        sickrage.srLogger.debug(
                                            "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})"
                                            .format(title, seeders, leechers))
                                    continue

                                item = title, download_url, size, seeders, leechers
                                if mode is not 'RSS':
                                    sickrage.srLogger.debug(
                                        "Found result: %s " % title)

                                items[mode].append(item)

                except Exception:
                    sickrage.srLogger.error(
                        "Failed parsing provider. Traceback: %s" %
                        traceback.format_exc())

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 6
0
    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        self.categories = "cat=" + str(self.cat)

        if not self._doLogin():
            return results

        for mode in search_params.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_params[mode]:

                if mode is 'RSS':
                    self.page = 2

                last_page = 0
                y = int(self.page)

                if search_string == '':
                    continue

                search_string = str(search_string).replace('.', ' ')

                for x in range(0, y):
                    z = x * 20
                    if last_page:
                        break

                    if mode is not 'RSS':
                        searchURL = (self.urls['search_page'] + '&filter={2}').format(z, self.categories,
                                                                                      search_string)
                    else:
                        searchURL = self.urls['search_page'].format(z, self.categories)

                    if mode is not 'RSS':
                        sickrage.srLogger.debug("Search string: %s " % search_string)

                    sickrage.srLogger.debug("Search URL: %s" % searchURL)
                    data = self.getURL(searchURL)
                    if not data:
                        sickrage.srLogger.debug("No data returned from provider")
                        continue

                    try:
                        with bs4_parser(data) as html:
                            torrent_table = html.find('table', attrs={'class': 'copyright'})
                            torrent_rows = torrent_table.find_all('tr') if torrent_table else []

                            # Continue only if one Release is found
                            if len(torrent_rows) < 3:
                                sickrage.srLogger.debug("Data returned from provider does not contain any torrents")
                                last_page = 1
                                continue

                            if len(torrent_rows) < 42:
                                last_page = 1

                            for result in torrent_table.find_all('tr')[2:]:

                                try:
                                    link = result.find('td').find('a')
                                    title = link.string
                                    download_url = self.urls['download'] % result.find_all('td')[8].find('a')['href'][
                                                                           -8:]
                                    leechers = result.find_all('td')[3].find_all('td')[1].text
                                    leechers = int(leechers.strip('[]'))
                                    seeders = result.find_all('td')[3].find_all('td')[2].text
                                    seeders = int(seeders.strip('[]'))
                                    # FIXME
                                    size = -1
                                except (AttributeError, TypeError):
                                    continue

                                filename_qt = self._reverseQuality(self._episodeQuality(result))
                                for text in self.hdtext:
                                    title1 = title
                                    title = title.replace(text, filename_qt)
                                    if title != title1:
                                        break

                                if Quality.nameQuality(title) == Quality.UNKNOWN:
                                    title += filename_qt

                                if not self._is_italian(result) and not self.subtitle:
                                    sickrage.srLogger.debug("Torrent is subtitled, skipping: %s " % title)
                                    continue

                                if self.engrelease and not self._is_english(result):
                                    sickrage.srLogger.debug("Torrent isnt english audio/subtitled , skipping: %s " % title)
                                    continue

                                search_show = re.split(r'([Ss][\d{1,2}]+)', search_string)[0]
                                show_title = search_show
                                rindex = re.search(r'([Ss][\d{1,2}]+)', title)
                                if rindex:
                                    show_title = title[:rindex.start()]
                                    ep_params = title[rindex.start():]

                                if show_title.lower() != search_show.lower() \
                                        and search_show.lower() in show_title.lower() and ep_params:
                                    title = search_show + ep_params

                                if not all([title, download_url]):
                                    continue

                                if self._is_season_pack(title):
                                    title = re.sub(r'([Ee][\d{1,2}\-?]+)', '', title)

                                # Filter unseeded torrent
                                if seeders < self.minseed or leechers < self.minleech:
                                    if mode is not 'RSS':
                                        sickrage.srLogger.debug(
                                                "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(
                                                        title, seeders, leechers))
                                    continue

                                item = title, download_url, size, seeders, leechers
                                if mode is not 'RSS':
                                    sickrage.srLogger.debug("Found result: %s " % title)

                                items[mode].append(item)

                    except Exception:
                        sickrage.srLogger.error("Failed parsing provider. Traceback: %s" % traceback.format_exc())

                # For each search mode sort all the items by seeders if available if available
                items[mode].sort(key=lambda tup: tup[3], reverse=True)

                results += items[mode]

        return results
Esempio n. 7
0
    def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        lang_info = '' if not epObj or not epObj.show else epObj.show.lang

        # Only search if user conditions are true
        if self.onlyspasearch and lang_info != 'es':
            sickrage.srLogger.debug("Show info is not spanish, skipping provider search")
            return results

        for mode in search_strings.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)

            for search_string in search_strings[mode]:
                self.search_params.update({'q': search_string.strip()})

                sickrage.srLogger.debug(
                        "Search URL: %s" % self.urls['search'] + '?' + urlencode(self.search_params))
                data = self.getURL(self.urls['search'], post_data=self.search_params, timeout=30)
                if not data:
                    continue

                try:
                    with bs4_parser(data) as html:
                        torrent_tbody = html.find('tbody')

                        if len(torrent_tbody) < 1:
                            sickrage.srLogger.debug("Data returned from provider does not contain any torrents")
                            continue

                        torrent_table = torrent_tbody.findAll('tr')
                        num_results = len(torrent_table) - 1

                        iteration = 0
                        for row in torrent_table:
                            try:
                                if iteration < num_results:
                                    torrent_size = row.findAll('td')[2]
                                    torrent_row = row.findAll('a')[1]

                                    download_url = torrent_row.get('href')
                                    title_raw = torrent_row.get('title')
                                    size = self._convertSize(torrent_size.text)

                                    title = self._processTitle(title_raw)

                                    item = title, download_url, size
                                    sickrage.srLogger.debug("Found result: %s " % title)

                                    items[mode].append(item)
                                    iteration += 1

                            except (AttributeError, TypeError):
                                continue

                except Exception:
                    sickrage.srLogger.warning("Failed parsing provider. Traceback: %s" % traceback.format_exc())

            results += items[mode]

        return results
Esempio n. 8
0
    def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        if not self._doLogin():
            return results

        for mode in search_strings.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_strings[mode]:

                if mode is not 'RSS':
                    searchURL = self.urls['search'] % (urllib.quote_plus(search_string), self.categories)
                else:
                    searchURL = self.urls['rss'] % self.categories

                sickrage.srLogger.debug("Search URL: %s" % searchURL)
                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s" % search_string)

                data = self.getURL(searchURL)
                if not data or 'please try later' in data:
                    sickrage.srLogger.debug("No data returned from provider")
                    continue

                # Search result page contains some invalid html that prevents html parser from returning all data.
                # We cut everything before the table that contains the data we are interested in thus eliminating
                # the invalid html portions
                try:
                    index = data.lower().ind
                    '<table class="mainblockcontenttt"'
                except ValueError:
                    sickrage.srLogger.error("Could not find table of torrents mainblockcontenttt")
                    continue

                data = urllib.unquote(data[index:].encode('utf-8')).decode('utf-8').replace('\t', '')

                with bs4_parser(data) as html:
                    if not html:
                        sickrage.srLogger.debug("No html data parsed from provider")
                        continue

                    empty = html.find('No torrents here')
                    if empty:
                        sickrage.srLogger.debug("Data returned from provider does not contain any torrents")
                        continue

                    tables = html.find('table', attrs={'class': 'mainblockcontenttt'})
                    if not tables:
                        sickrage.srLogger.error("Could not find table of torrents mainblockcontenttt")
                        continue

                    torrents = tables.findChildren('tr')
                    if not torrents:
                        continue

                    # Skip column headers
                    for result in torrents[1:]:
                        try:
                            cells = result.findChildren('td', attrs={
                                'class': re.compile(r'(green|yellow|red|mainblockcontent)')})
                            if not cells:
                                continue

                            title = download_url = seeders = leechers = size = None
                            for cell in cells:
                                try:
                                    if None is title and cell.get('title') and cell.get('title') in 'Download':
                                        title = re.search('f=(.*).torrent', cell.a[b'href']).group(1).replace('+', '.')
                                        title = title.decode('utf-8')
                                        download_url = self.urls['home'] % cell.a[b'href']
                                        continue
                                    if None is seeders and cell.get('class')[0] and cell.get('class')[
                                        0] in 'green' 'yellow' 'red':
                                        seeders = int(cell.text)
                                        if not seeders:
                                            seeders = 1
                                            continue
                                    elif None is leechers and cell.get('class')[0] and cell.get('class')[
                                        0] in 'green' 'yellow' 'red':
                                        leechers = int(cell.text)
                                        if not leechers:
                                            leechers = 0
                                            continue

                                    # Need size for failed downloads handling
                                    if size is None:
                                        if re.match(r'[0-9]+,?\.?[0-9]* [KkMmGg]+[Bb]+', cell.text):
                                            size = self._convertSize(cell.text)
                                            if not size:
                                                size = -1

                                except Exception:
                                    sickrage.srLogger.error("Failed parsing provider. Traceback: %s" % traceback.format_exc())

                            if not all([title, download_url]):
                                continue

                            # Filter unseeded torrent
                            if seeders < self.minseed or leechers < self.minleech:
                                if mode is not 'RSS':
                                    sickrage.srLogger.debug(
                                            "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(
                                                    title, seeders, leechers))
                                continue

                            item = title, download_url, size, seeders, leechers
                            if mode is not 'RSS':
                                sickrage.srLogger.debug("Found result: %s " % title)

                            items[mode].append(item)

                        except (AttributeError, TypeError, KeyError, ValueError):
                            continue

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 9
0
    def _doSearch(self,
                  search_strings,
                  search_mode='eponly',
                  epcount=0,
                  age=0,
                  epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        lang_info = '' if not epObj or not epObj.show else epObj.show.lang

        # Only search if user conditions are true
        if self.onlyspasearch and lang_info != 'es':
            sickrage.srLogger.debug(
                "Show info is not spanish, skipping provider search")
            return results

        for mode in search_strings.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)

            for search_string in search_strings[mode]:
                self.search_params.update({'q': search_string.strip()})

                sickrage.srLogger.debug("Search URL: %s" %
                                        self.urls['search'] + '?' +
                                        urlencode(self.search_params))
                data = self.getURL(self.urls['search'],
                                   post_data=self.search_params,
                                   timeout=30)
                if not data:
                    continue

                try:
                    with bs4_parser(data) as html:
                        torrent_tbody = html.find('tbody')

                        if len(torrent_tbody) < 1:
                            sickrage.srLogger.debug(
                                "Data returned from provider does not contain any torrents"
                            )
                            continue

                        torrent_table = torrent_tbody.findAll('tr')
                        num_results = len(torrent_table) - 1

                        iteration = 0
                        for row in torrent_table:
                            try:
                                if iteration < num_results:
                                    torrent_size = row.findAll('td')[2]
                                    torrent_row = row.findAll('a')[1]

                                    download_url = torrent_row.get('href')
                                    title_raw = torrent_row.get('title')
                                    size = self._convertSize(torrent_size.text)

                                    title = self._processTitle(title_raw)

                                    item = title, download_url, size
                                    sickrage.srLogger.debug(
                                        "Found result: %s " % title)

                                    items[mode].append(item)
                                    iteration += 1

                            except (AttributeError, TypeError):
                                continue

                except Exception:
                    sickrage.srLogger.warning(
                        "Failed parsing provider. Traceback: %s" %
                        traceback.format_exc())

            results += items[mode]

        return results
Esempio n. 10
0
    def _doSearch(self,
                  search_strings,
                  search_mode='eponly',
                  epcount=0,
                  age=0,
                  epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        if not self._doLogin():
            return results

        for mode in search_strings.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_strings[mode]:

                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s " %
                                            search_string)

                searchURL = self.urls['search'] % (search_string,
                                                   self.catagories)
                sickrage.srLogger.debug("Search URL: %s" % searchURL)

                data = self.getURL(searchURL)
                if not data:
                    continue

                try:
                    with bs4_parser(data) as html:
                        torrent_table = html.find(
                            'table', attrs={'id': 'torrent_table'})
                        torrent_rows = torrent_table.find_all(
                            'tr') if torrent_table else []

                        # Continue only if one Release is found
                        if len(torrent_rows) < 2:
                            sickrage.srLogger.debug(
                                "Data returned from provider does not contain any torrents"
                            )
                            continue

                        for result in torrent_rows[1:]:
                            cells = result.find_all('td')
                            link = result.find('a', attrs={'dir': 'ltr'})
                            url = result.find('a', attrs={'title': 'Download'})

                            try:
                                title = link.contents[0]
                                download_url = self.urls['download'] % (
                                    url[b'href'])
                                seeders = cells[len(cells) - 2].contents[0]
                                leechers = cells[len(cells) - 1].contents[0]
                                # FIXME
                                size = -1
                            except (AttributeError, TypeError):
                                continue

                            if not all([title, download_url]):
                                continue

                            # Filter unseeded torrent
                            if seeders < self.minseed or leechers < self.minleech:
                                if mode is not 'RSS':
                                    sickrage.srLogger.debug(
                                        "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})"
                                        .format(title, seeders, leechers))
                                continue

                            item = title, download_url, size, seeders, leechers
                            if mode is not 'RSS':
                                sickrage.srLogger.debug("Found result: %s " %
                                                        title)

                            items[mode].append(item)

                except Exception:
                    sickrage.srLogger.warning(
                        "Failed parsing provider. Traceback: %s" %
                        traceback.format_exc())

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 11
0
    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        freeleech = '3' if self.freeleech else '0'

        if not self._doLogin():
            return results

        for mode in search_params.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_params[mode]:

                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s " % search_string)

                searchURL = self.urls['search'] % (freeleech, search_string)
                sickrage.srLogger.debug("Search URL: %s" % searchURL)
                data = self.getURL(searchURL)
                max_page_number = 0

                if not data:
                    sickrage.srLogger.debug("No data returned from provider")
                    continue

                try:
                    with bs4_parser(data) as html:

                        # Check to see if there is more than 1 page of results
                        pager = html.find('div', {'class': 'pager'})
                        if pager:
                            page_links = pager.find_all('a', href=True)
                        else:
                            page_links = []

                        if len(page_links) > 0:
                            for lnk in page_links:
                                link_text = lnk.text.strip()
                                if link_text.isdigit():
                                    page_int = int(link_text)
                                    if page_int > max_page_number:
                                        max_page_number = page_int

                        # limit page number to 15 just in case something goes wrong
                        if max_page_number > 15:
                            max_page_number = 15
                        # limit RSS search
                        if max_page_number > 3 and mode is 'RSS':
                            max_page_number = 3
                except Exception:
                    sickrage.srLogger.error("Failed parsing provider. Traceback: %s" % traceback.format_exc())
                    continue

                data_response_list = [data]

                # Freshon starts counting pages from zero, even though it displays numbers from 1
                if max_page_number > 1:
                    for i in range(1, max_page_number):

                        time.sleep(1)
                        page_searchURL = searchURL + '&page=' + str(i)
                        # '.log(u"Search string: " + page_searchURL, LOGGER.DEBUG)
                        page_html = self.getURL(page_searchURL)

                        if not page_html:
                            continue

                        data_response_list.append(page_html)

                try:

                    for data in data_response_list:

                        with bs4_parser(data) as html:

                            torrent_rows = html.findAll("tr", {"class": re.compile('torrent_[0-9]*')})

                            # Continue only if a Release is found
                            if len(torrent_rows) == 0:
                                sickrage.srLogger.debug("Data returned from provider does not contain any torrents")
                                continue

                            for individual_torrent in torrent_rows:

                                # skip if torrent has been nuked due to poor quality
                                if individual_torrent.find('img', alt='Nuked') is not None:
                                    continue

                                try:
                                    title = individual_torrent.find('a', {'class': 'torrent_name_link'})['title']
                                except Exception:
                                    sickrage.srLogger.warning(
                                            "Unable to parse torrent title. Traceback: %s " % traceback.format_exc())
                                    continue

                                try:
                                    details_url = individual_torrent.find('a', {'class': 'torrent_name_link'})['href']
                                    torrent_id = int((re.match('.*?([0-9]+)$', details_url).group(1)).strip())
                                    download_url = self.urls['download'] % (str(torrent_id))
                                    seeders = tryInt(individual_torrent.find('td', {'class': 'table_seeders'}).find(
                                            'span').text.strip(), 1)
                                    leechers = tryInt(individual_torrent.find('td', {'class': 'table_leechers'}).find(
                                            'a').text.strip(), 0)
                                    # FIXME
                                    size = -1
                                except Exception:
                                    continue

                                if not all([title, download_url]):
                                    continue

                                # Filter unseeded torrent
                                if seeders < self.minseed or leechers < self.minleech:
                                    if mode is not 'RSS':
                                        sickrage.srLogger.debug(
                                                "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(
                                                        title, seeders, leechers))
                                    continue

                                item = title, download_url, size, seeders, leechers
                                if mode is not 'RSS':
                                    sickrage.srLogger.debug("Found result: %s " % title)

                                items[mode].append(item)

                except Exception:
                    sickrage.srLogger.error("Failed parsing provider. Traceback: %s" % traceback.format_exc())

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 12
0
    def _doSearch(self,
                  search_params,
                  search_mode='eponly',
                  epcount=0,
                  age=0,
                  epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        if not self._doLogin():
            return results

        for mode in search_params.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_params[mode]:

                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s " %
                                            search_string)

                searchURL = self.urls['search'] % (self.categories,
                                                   search_string)
                sickrage.srLogger.debug("Search URL: %s" % searchURL)

                # Set cookies from response
                self.headers.update({'Cookie': self.cookies})
                # Returns top 30 results by default, expandable in user profile
                data = self.getURL(searchURL)
                if not data:
                    continue

                try:
                    with bs4_parser(data) as html:
                        torrent_table = html.find("div", id="torrentBrowse")
                        torrent_rows = torrent_table.findChildren(
                            "tr") if torrent_table else []

                        # Continue only if at least one release is found
                        if len(torrent_rows) < 1:
                            sickrage.srLogger.debug(
                                "Data returned from provider does not contain any torrents"
                            )
                            continue

                        for result in torrent_rows[1:]:
                            cells = result.findChildren("td")
                            title = cells[1].find("a").find_next("a")
                            link = cells[3].find("a")
                            shares = cells[8].get_text().split("/", 1)
                            torrent_size = cells[7].get_text().split("/", 1)[0]

                            try:
                                if title.has_key('title'):
                                    title = title[b'title']
                                else:
                                    title = cells[1].find("a")['title']

                                download_url = self.urls['download'] % (
                                    link[b'href'])
                                seeders = int(shares[0])
                                leechers = int(shares[1])

                                size = -1
                                if re.match(
                                        r"\d+([,\.]\d+)?\s*[KkMmGgTt]?[Bb]",
                                        torrent_size):
                                    size = self._convertSize(
                                        torrent_size.rstrip())

                            except (AttributeError, TypeError):
                                continue

                            if not all([title, download_url]):
                                continue

                            # Filter unseeded torrent
                            if seeders < self.minseed or leechers < self.minleech:
                                if mode is not 'RSS':
                                    sickrage.srLogger.debug(
                                        "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})"
                                        .format(title, seeders, leechers))
                                continue

                            item = title, download_url, size, seeders, leechers
                            if mode is not 'RSS':
                                sickrage.srLogger.debug("Found result: %s " %
                                                        title)

                            items[mode].append(item)

                except Exception as e:
                    sickrage.srLogger.error(
                        "Failed parsing provider. Traceback: %s" %
                        traceback.format_exc())

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 13
0
    def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None):

        results = []

        if not self._doLogin():
            return results

        items = {'Season': [], 'Episode': [], 'RSS': []}

        for mode in search_strings.keys():
            if mode is not 'RSS':
                sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_strings[mode]:
                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s " % search_string)

                searchURL = self.urls['search'] % (urllib.quote(search_string), self.categories[search_mode])

                try:
                    sickrage.srLogger.debug("Search URL: %s" % searchURL)
                    data = self.getURL(searchURL)
                    time.sleep(cpu_presets[sickrage.srConfig.CPU_PRESET])
                except Exception as e:
                    sickrage.srLogger.warning("Unable to fetch data. Error: %s" % repr(e))

                if not data:
                    continue

                with bs4_parser(data) as html:
                    torrent_table = html.find('table', attrs={'id': 'torrents-table'})
                    torrent_rows = torrent_table.find_all('tr') if torrent_table else []

                    # Continue only if at least one Release is found
                    if len(torrent_rows) < 2:
                        sickrage.srLogger.debug("Data returned from provider does not contain any torrents")
                        continue

                    for result in torrent_table.find_all('tr')[1:]:

                        try:
                            link = result.find('td', attrs={'class': 'ttr_name'}).find('a')
                            url = result.find('td', attrs={'class': 'td_dl'}).find('a')

                            title = link.string
                            if re.search(r'\.\.\.', title):
                                data = self.getURL(self.url + "/" + link[b'href'])
                                if data:
                                    with bs4_parser(data) as details_html:
                                        title = re.search('(?<=").+(?<!")', details_html.title.string).group(0)
                            download_url = self.urls['download'] % url[b'href']
                            seeders = int(result.find('td', attrs={'class': 'ttr_seeders'}).string)
                            leechers = int(result.find('td', attrs={'class': 'ttr_leechers'}).string)
                            size = self._convertSize(result.find('td', attrs={'class': 'ttr_size'}).contents[0])
                        except (AttributeError, TypeError):
                            continue

                        if not all([title, download_url]):
                            continue

                        # Filter unseeded torrent
                        if seeders < self.minseed or leechers < self.minleech:
                            if mode is not 'RSS':
                                sickrage.srLogger.debug(
                                        "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(
                                                title, seeders, leechers))
                            continue

                        item = title, download_url, size, seeders, leechers
                        if mode is not 'RSS':
                            sickrage.srLogger.debug("Found result: %s " % title)

                        items[mode].append(item)

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 14
0
    def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        if not self._doLogin():
            return results

        for mode in search_strings.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_strings[mode]:

                if mode is not 'RSS':
                    searchURL = self.urls['search'] % (urllib.quote_plus(search_string.replace('.', ' ')),)
                else:
                    searchURL = self.urls['search'] % ''

                sickrage.srLogger.debug("Search URL: %s" % searchURL)
                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s" % search_string)

                data = self.getURL(searchURL)
                if not data or 'please try later' in data:
                    sickrage.srLogger.debug("No data returned from provider")
                    continue

                # Search result page contains some invalid html that prevents html parser from returning all data.
                # We cut everything before the table that contains the data we are interested in thus eliminating
                # the invalid html portions
                try:
                    data = data.split('<div id="information"></div>')[1]
                    index = data.index('<table')
                except ValueError:
                    sickrage.srLogger.error("Could not find main torrent table")
                    continue

                with bs4_parser(data[index:]) as html:
                    if not html:
                        sickrage.srLogger.debug("No html data parsed from provider")
                        continue

                torrents = html.findAll('tr')
                if not torrents:
                    continue

                # Skip column headers
                for result in torrents[1:]:
                    if len(result.contents) < 10:
                        # skip extraneous rows at the end
                        continue

                    try:
                        dl_href = result.find('a', attrs={'href': re.compile(r'download.php.*')})['href']
                        title = re.search('f=(.*).torrent', dl_href).group(1).replace('+', '.')
                        download_url = self.urls['base_url'] + dl_href
                        seeders = int(result.find('span', attrs={'class': 'seedy'}).find('a').text)
                        leechers = int(result.find('span', attrs={'class': 'leechy'}).find('a').text)
                        size = re.match(r'.*?([0-9]+,?\.?[0-9]* [KkMmGg]+[Bb]+).*', str(result), re.DOTALL).group(1)

                        if not all([title, download_url]):
                            continue

                        # Filter unseeded torrent
                        if seeders < self.minseed or leechers < self.minleech:
                            if mode is not 'RSS':
                                sickrage.srLogger.debug(
                                        "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(
                                                title, seeders, leechers))
                            continue

                        item = title, download_url, size, seeders, leechers
                        if mode is not 'RSS':
                            sickrage.srLogger.debug("Found result: %s " % title)

                        items[mode].append(item)

                    except (AttributeError, TypeError, KeyError, ValueError):
                        continue

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 15
0
    def _doSearch(self,
                  search_params,
                  search_mode='eponly',
                  epcount=0,
                  age=0,
                  epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        if not self._doLogin():
            return results

        for mode in search_params.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_params[mode]:

                if mode is 'RSS':
                    searchURL = self.urls['index'] % self.categories
                else:
                    searchURL = self.urls['search'] % (urllib.quote_plus(
                        search_string.encode('utf-8')), self.categories)
                    sickrage.srLogger.debug("Search string: %s " %
                                            search_string)

                data = self.getURL(searchURL)
                sickrage.srLogger.debug("Search URL: %s" % searchURL)
                if not data:
                    continue

                try:
                    with bs4_parser(data) as html:
                        torrent_table = html.find('table',
                                                  attrs={'id': 'torrenttable'})
                        torrent_rows = torrent_table.find_all(
                            'tr') if torrent_table else []

                        # Continue only if one Release is found
                        if len(torrent_rows) < 2:
                            sickrage.srLogger.debug(
                                "Data returned from provider does not contain any torrents"
                            )
                            continue

                        for result in torrent_table.find_all('tr')[1:]:

                            try:
                                link = result.find('td',
                                                   attrs={
                                                       'class': 'name'
                                                   }).find('a')
                                url = result.find('td',
                                                  attrs={
                                                      'class': 'quickdownload'
                                                  }).find('a')
                                title = link.string
                                download_url = self.urls['download'] % url[
                                    b'href']
                                seeders = int(
                                    result.find('td',
                                                attrs={
                                                    'class': 'seeders'
                                                }).string)
                                leechers = int(
                                    result.find('td',
                                                attrs={
                                                    'class': 'leechers'
                                                }).string)
                                # FIXME
                                size = -1
                            except (AttributeError, TypeError):
                                continue

                            if not all([title, download_url]):
                                continue

                            # Filter unseeded torrent
                            if seeders < self.minseed or leechers < self.minleech:
                                if mode is not 'RSS':
                                    sickrage.srLogger.debug(
                                        "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})"
                                        .format(title, seeders, leechers))
                                continue

                            item = title, download_url, size, seeders, leechers
                            if mode is not 'RSS':
                                sickrage.srLogger.debug("Found result: %s " %
                                                        title)

                            items[mode].append(item)

                except Exception:
                    sickrage.srLogger.error(
                        "Failed parsing provider. Traceback: {}".format(
                            traceback.format_exc()))

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 16
0
    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):

        size = -1
        seeders = 1
        leechers = 0

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        for mode in search_params.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_params[mode]:

                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s " % search_string)

                searchURL = self.url + '/recherche/' + search_string.replace('.', '-') + '.html'
                sickrage.srLogger.debug("Search URL: %s" % searchURL)
                data = self.getURL(searchURL)

                if not data:
                    continue

                try:
                    with bs4_parser(data) as html:
                        lin = erlin = 0
                        resultdiv = []
                        while erlin == 0:
                            try:
                                classlin = 'ligne' + str(lin)
                                resultlin = html.findAll(attrs={'class': [classlin]})
                                if resultlin:
                                    for ele in resultlin:
                                        resultdiv.append(ele)
                                    lin += 1
                                else:
                                    erlin = 1
                            except Exception:
                                erlin = 1

                        for row in resultdiv:
                            try:
                                link = row.find("a", title=True)
                                title = link.text.lower().strip()
                                pageURL = link[b'href']

                                # downloadTorrentLink = torrentSoup.find("a", title.startswith('Cliquer'))
                                tmp = pageURL.split('/')[-1].replace('.html', '.torrent')

                                downloadTorrentLink = ('http://www.cpasbien.io/telechargement/%s' % tmp)

                                if downloadTorrentLink:
                                    download_url = downloadTorrentLink
                                    size = -1
                                    seeders = 1
                                    leechers = 0

                            except (AttributeError, TypeError):
                                continue

                            if not all([title, download_url]):
                                continue

                            item = title, download_url, size, seeders, leechers
                            if mode is not 'RSS':
                                sickrage.srLogger.debug("Found result: %s " % title)

                            items[mode].append(item)

                except Exception as e:
                    sickrage.srLogger.error("Failed parsing provider. Traceback: %s" % traceback.format_exc())

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 17
0
    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        freeleech = '&free=on' if self.freeleech else ''

        if not self._doLogin():
            return results

        for mode in search_params.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_params[mode]:

                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s " % search_string)

                # URL with 50 tv-show results, or max 150 if adjusted in IPTorrents profile
                searchURL = self.urls['search'] % (self.categories, freeleech, search_string)
                searchURL += ';o=seeders' if mode is not 'RSS' else ''
                sickrage.srLogger.debug("Search URL: %s" % searchURL)

                data = self.getURL(searchURL)
                if not data:
                    continue

                try:
                    data = re.sub(r'(?im)<button.+?<[/]button>', '', data, 0)
                    with bs4_parser(data) as html:
                        if not html:
                            sickrage.srLogger.debug("No data returned from provider")
                            continue

                        if html.find(text='No Torrents Found!'):
                            sickrage.srLogger.debug("Data returned from provider does not contain any torrents")
                            continue

                        torrent_table = html.find('table', attrs={'class': 'torrents'})
                        torrents = torrent_table.find_all('tr') if torrent_table else []

                        # Continue only if one Release is found
                        if len(torrents) < 2:
                            sickrage.srLogger.debug("Data returned from provider does not contain any torrents")
                            continue

                        for result in torrents[1:]:
                            try:
                                title = result.find_all('td')[1].find('a').text
                                download_url = self.urls['base_url'] + result.find_all('td')[3].find('a')['href']
                                size = self._convertSize(result.find_all('td')[5].text)
                                seeders = int(result.find('td', attrs={'class': 'ac t_seeders'}).text)
                                leechers = int(result.find('td', attrs={'class': 'ac t_leechers'}).text)
                            except (AttributeError, TypeError, KeyError):
                                continue

                            if not all([title, download_url]):
                                continue

                            # Filter unseeded torrent
                            if seeders < self.minseed or leechers < self.minleech:
                                if mode is not 'RSS':
                                    sickrage.srLogger.debug(
                                            "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(
                                                    title, seeders, leechers))
                                continue

                            item = title, download_url, size, seeders, leechers
                            if mode is not 'RSS':
                                sickrage.srLogger.debug("Found result: %s " % title)

                            items[mode].append(item)

                except Exception as e:
                    sickrage.srLogger.error("Failed parsing provider. Error: %r" % e)

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 18
0
    def _doSearch(self,
                  search_params,
                  search_mode='eponly',
                  epcount=0,
                  age=0,
                  epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        # check for auth
        if not self._doLogin():
            return results

        for mode in search_params.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_params[mode]:

                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s " %
                                            search_string)

                searchURL = self.urlsearch % (urllib.quote(search_string),
                                              self.categories)
                sickrage.srLogger.debug("Search URL: %s" % searchURL)
                data = self.getURL(searchURL)

                if not data:
                    continue

                with bs4_parser(data) as html:
                    resultsTable = html.find(
                        "table", {"class": "table2 table-bordered2"})
                    if resultsTable:
                        rows = resultsTable.findAll("tr")
                        for row in rows:
                            link = row.find("a",
                                            href=re.compile("details.php"))
                            if link:
                                title = link.text
                                download_url = self.url + '/' + row.find(
                                    "a",
                                    href=re.compile("download.php"))['href']
                                # FIXME
                                size = -1
                                seeders = 1
                                leechers = 0

                                if not all([title, download_url]):
                                    continue

                                # Filter unseeded torrent
                                # if seeders < self.minseed or leechers < self.minleech:
                                #    if mode is not 'RSS':
                                #        LOGGER.debug(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers))
                                #    continue

                                item = title, download_url, size, seeders, leechers
                                if mode is not 'RSS':
                                    sickrage.srLogger.debug(
                                        "Found result: %s " % title)

                                items[mode].append(item)

            # For each search mode sort all the items by seeders if available if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 19
0
    def _doSearch(self,
                  search_params,
                  search_mode='eponly',
                  epcount=0,
                  age=0,
                  epObj=None):

        size = -1
        seeders = 1
        leechers = 0

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        for mode in search_params.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_params[mode]:

                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s " %
                                            search_string)

                searchURL = self.url + '/recherche/' + search_string.replace(
                    '.', '-') + '.html'
                sickrage.srLogger.debug("Search URL: %s" % searchURL)
                data = self.getURL(searchURL)

                if not data:
                    continue

                try:
                    with bs4_parser(data) as html:
                        lin = erlin = 0
                        resultdiv = []
                        while erlin == 0:
                            try:
                                classlin = 'ligne' + str(lin)
                                resultlin = html.findAll(
                                    attrs={'class': [classlin]})
                                if resultlin:
                                    for ele in resultlin:
                                        resultdiv.append(ele)
                                    lin += 1
                                else:
                                    erlin = 1
                            except Exception:
                                erlin = 1

                        for row in resultdiv:
                            try:
                                link = row.find("a", title=True)
                                title = link.text.lower().strip()
                                pageURL = link[b'href']

                                # downloadTorrentLink = torrentSoup.find("a", title.startswith('Cliquer'))
                                tmp = pageURL.split('/')[-1].replace(
                                    '.html', '.torrent')

                                downloadTorrentLink = (
                                    'http://www.cpasbien.io/telechargement/%s'
                                    % tmp)

                                if downloadTorrentLink:
                                    download_url = downloadTorrentLink
                                    size = -1
                                    seeders = 1
                                    leechers = 0

                            except (AttributeError, TypeError):
                                continue

                            if not all([title, download_url]):
                                continue

                            item = title, download_url, size, seeders, leechers
                            if mode is not 'RSS':
                                sickrage.srLogger.debug("Found result: %s " %
                                                        title)

                            items[mode].append(item)

                except Exception as e:
                    sickrage.srLogger.error(
                        "Failed parsing provider. Traceback: %s" %
                        traceback.format_exc())

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 20
0
    def _doSearch(self,
                  search_params,
                  search_mode='eponly',
                  epcount=0,
                  age=0,
                  epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        freeleech = '&free=on' if self.freeleech else ''

        if not self._doLogin():
            return results

        for mode in search_params.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_params[mode]:

                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s " %
                                            search_string)

                # URL with 50 tv-show results, or max 150 if adjusted in IPTorrents profile
                searchURL = self.urls['search'] % (self.categories, freeleech,
                                                   search_string)
                searchURL += ';o=seeders' if mode is not 'RSS' else ''
                sickrage.srLogger.debug("Search URL: %s" % searchURL)

                data = self.getURL(searchURL)
                if not data:
                    continue

                try:
                    data = re.sub(r'(?im)<button.+?<[/]button>', '', data, 0)
                    with bs4_parser(data) as html:
                        if not html:
                            sickrage.srLogger.debug(
                                "No data returned from provider")
                            continue

                        if html.find(text='No Torrents Found!'):
                            sickrage.srLogger.debug(
                                "Data returned from provider does not contain any torrents"
                            )
                            continue

                        torrent_table = html.find('table',
                                                  attrs={'class': 'torrents'})
                        torrents = torrent_table.find_all(
                            'tr') if torrent_table else []

                        # Continue only if one Release is found
                        if len(torrents) < 2:
                            sickrage.srLogger.debug(
                                "Data returned from provider does not contain any torrents"
                            )
                            continue

                        for result in torrents[1:]:
                            try:
                                title = result.find_all('td')[1].find('a').text
                                download_url = self.urls[
                                    'base_url'] + result.find_all(
                                        'td')[3].find('a')['href']
                                size = self._convertSize(
                                    result.find_all('td')[5].text)
                                seeders = int(
                                    result.find('td',
                                                attrs={
                                                    'class': 'ac t_seeders'
                                                }).text)
                                leechers = int(
                                    result.find('td',
                                                attrs={
                                                    'class': 'ac t_leechers'
                                                }).text)
                            except (AttributeError, TypeError, KeyError):
                                continue

                            if not all([title, download_url]):
                                continue

                            # Filter unseeded torrent
                            if seeders < self.minseed or leechers < self.minleech:
                                if mode is not 'RSS':
                                    sickrage.srLogger.debug(
                                        "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})"
                                        .format(title, seeders, leechers))
                                continue

                            item = title, download_url, size, seeders, leechers
                            if mode is not 'RSS':
                                sickrage.srLogger.debug("Found result: %s " %
                                                        title)

                            items[mode].append(item)

                except Exception as e:
                    sickrage.srLogger.error(
                        "Failed parsing provider. Error: %r" % e)

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 21
0
    def _doSearch(self,
                  search_params,
                  search_mode='eponly',
                  epcount=0,
                  age=0,
                  epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        if not self._doLogin():
            return results

        for mode in search_params.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_params[mode]:

                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s " %
                                            search_string)

                searchURL = self.urls['search'] % (urllib.quote(search_string),
                                                   self.categories)
                sickrage.srLogger.debug("Search URL: %s" % searchURL)

                data = self.getURL(searchURL)
                if not data:
                    continue

                try:
                    with bs4_parser(data) as html:
                        # Continue only if one Release is found
                        empty = html.find('Nothing found!')
                        if empty:
                            sickrage.srLogger.debug(
                                "Data returned from provider does not contain any torrents"
                            )
                            continue

                        torrent_table = html.find('table',
                                                  attrs={'border': '1'})
                        torrent_rows = torrent_table.find_all(
                            'tr') if torrent_table else []

                        for result in torrent_rows[1:]:
                            cells = result.find_all('td')
                            size = None
                            link = cells[1].find('a', attrs={'class': 'index'})

                            full_id = link[b'href'].replace(
                                'details.php?id=', '')
                            torrent_id = full_id.split("&")[0]

                            # Free leech torrents are marked with green [F L] in the title (i.e. <font color=green>[F&nbsp;L]</font>)
                            freeleechTag = cells[1].find(
                                'font', attrs={'color': 'green'})
                            if freeleechTag and freeleechTag.text == '[F&nbsp;L]':
                                isFreeleechTorrent = True
                            else:
                                isFreeleechTorrent = False

                            if self.freeleech and not isFreeleechTorrent:
                                continue

                            try:
                                if link.has_key('title'):
                                    title = cells[1].find(
                                        'a', {'class': 'index'})['title']
                                else:
                                    title = link.contents[0]
                                download_url = self.urls['download'] % (
                                    torrent_id, link.contents[0])
                                seeders = int(
                                    cells[8].find('span').contents[0])
                                leechers = int(
                                    cells[9].find('span').contents[0])

                                # Need size for failed downloads handling
                                if size is None:
                                    if re.match(
                                            r'[0-9]+,?\.?[0-9]*[KkMmGg]+[Bb]+',
                                            cells[6].text):
                                        size = self._convertSize(cells[6].text)
                                        if not size:
                                            size = -1

                            except (AttributeError, TypeError):
                                continue

                            if not all([title, download_url]):
                                continue

                            # Filter unseeded torrent
                            if seeders < self.minseed or leechers < self.minleech:
                                if mode is not 'RSS':
                                    sickrage.srLogger.debug(
                                        "Discarding torrent because it doesn't meet the minimum seeders or leechers: {} (S:{} L:{})"
                                        .format(title, seeders, leechers))
                                continue

                            item = title, download_url, size, seeders, leechers
                            if mode is not 'RSS':
                                sickrage.srLogger.debug("Found result: %s " %
                                                        title)

                            items[mode].append(item)

                except Exception as e:
                    sickrage.srLogger.error(
                        "Failed parsing provider. Traceback: %s" %
                        traceback.format_exc())

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 22
0
    def _doSearch(self,
                  search_strings,
                  search_mode='eponly',
                  epcount=0,
                  age=0,
                  epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        # check for auth
        if not self._doLogin():
            return results

        for mode in search_strings.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_strings[mode]:

                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s " %
                                            search_string)

                self.search_params[b'recherche'] = search_string

                data = self.getURL(self.urls['search'],
                                   params=self.search_params)
                if not data:
                    continue

                try:
                    with bs4_parser(data) as html:
                        result_table = html.find('table',
                                                 {'id': 'tablealign3bis'})

                        if not result_table:
                            sickrage.srLogger.debug(
                                "Data returned from provider does not contain any torrents"
                            )
                            continue

                        if result_table:
                            rows = result_table.findAll(
                                "tr", {"class": "ligntorrent"})

                            for row in rows:
                                link = row.findAll('td')[1].find(
                                    "a", href=re.compile("fiche_film"))

                                if link:
                                    try:
                                        title = link.text
                                        download_url = self.urls['base_url'] + "/" + \
                                                       row.find("a", href=re.compile(r"download\.php"))['href']
                                    except (AttributeError, TypeError):
                                        continue

                                    try:
                                        detailseedleech = link[b'mtcontent']
                                        seeders = int(
                                            detailseedleech.split(
                                                "<font color='#00b72e'>")
                                            [1].split("</font>")[0])
                                        leechers = int(
                                            detailseedleech.split(
                                                "<font color='red'>")[1].split(
                                                    "</font>")[0])
                                        # FIXME
                                        size = -1
                                    except Exception:
                                        sickrage.srLogger.debug(
                                            "Unable to parse torrent id & seeders & leechers. Traceback: %s "
                                            % traceback.format_exc())
                                        continue

                                    if not all([title, download_url]):
                                        continue

                                    # Filter unseeded torrent
                                    if seeders < self.minseed or leechers < self.minleech:
                                        if mode is not 'RSS':
                                            sickrage.srLogger.debug(
                                                "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})"
                                                .format(
                                                    title, seeders, leechers))
                                        continue

                                    item = title, download_url, size, seeders, leechers
                                    if mode is not 'RSS':
                                        sickrage.srLogger.debug(
                                            "Found result: %s " % title)

                                    items[mode].append(item)

                except Exception as e:
                    sickrage.srLogger.error(
                        "Failed parsing provider. Traceback: %s" %
                        traceback.format_exc())

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 23
0
    def _doSearch(self,
                  search_params,
                  search_mode='eponly',
                  epcount=0,
                  age=0,
                  epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        if not self._doLogin():
            return results

        for mode in search_params.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_params[mode]:

                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s " %
                                            search_string)

                searchURL = self.urls['search'] % (urllib.quote(
                    search_string.encode('utf-8')), self.categories)
                sickrage.srLogger.debug("Search URL: %s" % searchURL)

                data = self.getURL(searchURL)
                if not data:
                    continue

                try:
                    with bs4_parser(data) as html:
                        # Continue only if one Release is found
                        empty = html.find(
                            'h2', text="No .torrents fit this filter criteria")
                        if empty:
                            sickrage.srLogger.debug(
                                "Data returned from provider does not contain any torrents"
                            )
                            continue

                        torrent_table = html.find(
                            'table',
                            attrs={'style': 'border: none; width: 100%;'})
                        if not torrent_table:
                            sickrage.srLogger.error(
                                "Could not find table of torrents")
                            continue

                        torrent_rows = torrent_table.find_all(
                            'tr', attrs={'class': 'browse'})

                        for result in torrent_rows:
                            cells = result.find_all('td')
                            size = None
                            link = cells[1].find(
                                'a',
                                attrs={
                                    'style':
                                    'font-size: 1.25em; font-weight: bold;'
                                })

                            torrent_id = link[b'href'].replace(
                                'details.php?id=', '')

                            try:
                                if link.has_key('title'):
                                    title = link[b'title']
                                else:
                                    title = link.contents[0]

                                download_url = self.urls['download'] % (
                                    torrent_id, link.contents[0])
                                seeders = int(cells[9].contents[0])
                                leechers = int(cells[10].contents[0])

                                # Need size for failed downloads handling
                                if size is None:
                                    if re.match(
                                            r'[0-9]+,?\.?[0-9]*[KkMmGg]+[Bb]+',
                                            cells[7].text):
                                        size = self._convertSize(cells[7].text)
                                        if not size:
                                            size = -1

                            except (AttributeError, TypeError):
                                continue

                            if not all([title, download_url]):
                                continue

                            # Filter unseeded torrent
                            if seeders < self.minseed or leechers < self.minleech:
                                if mode is not 'RSS':
                                    sickrage.srLogger.debug(
                                        "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})"
                                        .format(title, seeders, leechers))
                                continue

                            item = title, download_url, size, seeders, leechers
                            if mode is not 'RSS':
                                sickrage.srLogger.debug("Found result: %s " %
                                                        title)

                            items[mode].append(item)

                except Exception as e:
                    sickrage.srLogger.error(
                        "Failed parsing provider. Traceback: %s" %
                        traceback.format_exc())

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 24
0
    def _doSearch(self,
                  search_strings,
                  search_mode='eponly',
                  epcount=0,
                  age=0,
                  epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        if not self._doLogin():
            return results

        for mode in search_strings.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_strings[mode]:

                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s " %
                                            search_string)

                self.search_params[b'keywords'] = search_string.strip()
                data = self.getURL(self.urls['search'],
                                   params=self.search_params)
                # url_searched = self.urls['search'] + '?' + urlencode(self.search_params)

                if not data:
                    sickrage.srLogger.debug("No data returned from provider")
                    continue

                with bs4_parser(data) as html:
                    torrent_table = html.find(id='listtorrents').find_all('tr')
                    for torrent in torrent_table:
                        try:
                            title = torrent.find(attrs={
                                'class': 'tooltip-content'
                            }).text.strip()
                            download_url = torrent.find(
                                title="Click to Download this Torrent!"
                            ).parent[b'href'].strip()
                            seeders = int(
                                torrent.find(title='Seeders').text.strip())
                            leechers = int(
                                torrent.find(title='Leechers').text.strip())

                            if not all([title, download_url]):
                                continue

                            # Filter unseeded torrent
                            if seeders < self.minseed or leechers < self.minleech:
                                if mode is not 'RSS':
                                    sickrage.srLogger.debug(
                                        "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})"
                                        .format(title, seeders, leechers))
                                continue

                            # Chop off tracker/channel prefix or we cant parse the result!
                            show_name_first_word = re.search(
                                r'^[^ .]+',
                                self.search_params[b'keywords']).group()
                            if not title.startswith(show_name_first_word):
                                title = re.match(
                                    r'(.*)(' + show_name_first_word + '.*)',
                                    title).group(2)

                            # Change title from Series to Season, or we can't parse
                            if 'Series' in self.search_params[b'keywords']:
                                title = re.sub(r'(?i)series', 'Season', title)

                            # Strip year from the end or we can't parse it!
                            title = re.sub(r'[\. ]?\(\d{4}\)', '', title)

                            # FIXME
                            size = -1

                            item = title, download_url, size, seeders, leechers
                            if mode is not 'RSS':
                                sickrage.srLogger.debug("Found result: %s " %
                                                        title)

                            items[mode].append(item)

                        except:
                            continue

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 25
0
    def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        # check for auth
        if not self._doLogin():
            return results

        for mode in search_strings.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_strings[mode]:

                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s " % search_string)

                self.search_params[b'search'] = search_string

                data = self.getURL(self.urls['search'], params=self.search_params)
                if not data:
                    continue

                try:
                    with bs4_parser(data) as html:
                        result_linkz = html.findAll('a', href=re.compile("torrents-details"))

                        if not result_linkz:
                            sickrage.srLogger.debug("Data returned from provider do not contains any torrent")
                            continue

                        if result_linkz:
                            for link in result_linkz:
                                title = link.text
                                download_url = self.urls['base_url'] + "/" + link[b'href']
                                download_url = download_url.replace("torrents-details", "download")
                                # FIXME
                                size = -1
                                seeders = 1
                                leechers = 0

                                if not title or not download_url:
                                    continue

                                # Filter unseeded torrent
                                # if seeders < self.minseed or leechers < self.minleech:
                                #    if mode is not 'RSS':
                                #        LOGGER.debug(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers))
                                #    continue

                                item = title, download_url, size, seeders, leechers
                                if mode is not 'RSS':
                                    sickrage.srLogger.debug("Found result: %s " % title)

                                items[mode].append(item)

                except Exception as e:
                    sickrage.srLogger.error("Failed parsing provider. Traceback: %s" % traceback.format_exc())

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 26
0
    def _doSearch(self,
                  search_params,
                  search_mode='eponly',
                  epcount=0,
                  age=0,
                  epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        self.categories = "cat=" + str(self.cat)

        if not self._doLogin():
            return results

        for mode in search_params.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_params[mode]:

                if mode is 'RSS':
                    self.page = 2

                last_page = 0
                y = int(self.page)

                if search_string == '':
                    continue

                search_string = str(search_string).replace('.', ' ')

                for x in range(0, y):
                    z = x * 20
                    if last_page:
                        break

                    if mode is not 'RSS':
                        searchURL = (self.urls['search_page'] +
                                     '&filter={2}').format(
                                         z, self.categories, search_string)
                    else:
                        searchURL = self.urls['search_page'].format(
                            z, self.categories)

                    if mode is not 'RSS':
                        sickrage.srLogger.debug("Search string: %s " %
                                                search_string)

                    sickrage.srLogger.debug("Search URL: %s" % searchURL)
                    data = self.getURL(searchURL)
                    if not data:
                        sickrage.srLogger.debug(
                            "No data returned from provider")
                        continue

                    try:
                        with bs4_parser(data) as html:
                            torrent_table = html.find(
                                'table', attrs={'class': 'copyright'})
                            torrent_rows = torrent_table.find_all(
                                'tr') if torrent_table else []

                            # Continue only if one Release is found
                            if len(torrent_rows) < 3:
                                sickrage.srLogger.debug(
                                    "Data returned from provider does not contain any torrents"
                                )
                                last_page = 1
                                continue

                            if len(torrent_rows) < 42:
                                last_page = 1

                            for result in torrent_table.find_all('tr')[2:]:

                                try:
                                    link = result.find('td').find('a')
                                    title = link.string
                                    download_url = self.urls[
                                        'download'] % result.find_all(
                                            'td')[8].find('a')['href'][-8:]
                                    leechers = result.find_all(
                                        'td')[3].find_all('td')[1].text
                                    leechers = int(leechers.strip('[]'))
                                    seeders = result.find_all(
                                        'td')[3].find_all('td')[2].text
                                    seeders = int(seeders.strip('[]'))
                                    # FIXME
                                    size = -1
                                except (AttributeError, TypeError):
                                    continue

                                filename_qt = self._reverseQuality(
                                    self._episodeQuality(result))
                                for text in self.hdtext:
                                    title1 = title
                                    title = title.replace(text, filename_qt)
                                    if title != title1:
                                        break

                                if Quality.nameQuality(
                                        title) == Quality.UNKNOWN:
                                    title += filename_qt

                                if not self._is_italian(
                                        result) and not self.subtitle:
                                    sickrage.srLogger.debug(
                                        "Torrent is subtitled, skipping: %s " %
                                        title)
                                    continue

                                if self.engrelease and not self._is_english(
                                        result):
                                    sickrage.srLogger.debug(
                                        "Torrent isnt english audio/subtitled , skipping: %s "
                                        % title)
                                    continue

                                search_show = re.split(r'([Ss][\d{1,2}]+)',
                                                       search_string)[0]
                                show_title = search_show
                                rindex = re.search(r'([Ss][\d{1,2}]+)', title)
                                if rindex:
                                    show_title = title[:rindex.start()]
                                    ep_params = title[rindex.start():]

                                if show_title.lower() != search_show.lower() \
                                        and search_show.lower() in show_title.lower() and ep_params:
                                    title = search_show + ep_params

                                if not all([title, download_url]):
                                    continue

                                if self._is_season_pack(title):
                                    title = re.sub(r'([Ee][\d{1,2}\-?]+)', '',
                                                   title)

                                # Filter unseeded torrent
                                if seeders < self.minseed or leechers < self.minleech:
                                    if mode is not 'RSS':
                                        sickrage.srLogger.debug(
                                            "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})"
                                            .format(title, seeders, leechers))
                                    continue

                                item = title, download_url, size, seeders, leechers
                                if mode is not 'RSS':
                                    sickrage.srLogger.debug(
                                        "Found result: %s " % title)

                                items[mode].append(item)

                    except Exception:
                        sickrage.srLogger.error(
                            "Failed parsing provider. Traceback: %s" %
                            traceback.format_exc())

                # For each search mode sort all the items by seeders if available if available
                items[mode].sort(key=lambda tup: tup[3], reverse=True)

                results += items[mode]

        return results
Esempio n. 27
0
    def _doSearch(self,
                  search_strings,
                  search_mode='eponly',
                  epcount=0,
                  age=0,
                  epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        if not self._doLogin():
            return results

        for mode in search_strings.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_strings[mode]:

                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s " %
                                            search_string)

                self.search_params[b'searchstr'] = search_string

                data = self.getURL(self.urls['search'],
                                   params=self.search_params)
                startTableIndex = data.find("<table class=\"torrent_table")
                data = data[startTableIndex:]
                if not data:
                    continue

                try:
                    with bs4_parser(data) as html:
                        result_table = html.find('table',
                                                 {'id': 'torrent_table'})

                        if not result_table:
                            sickrage.srLogger.debug(
                                "Data returned from provider does not contain any torrents"
                            )
                            continue

                        result_tbody = result_table.find('tbody')
                        entries = result_tbody.contents
                        del entries[1::2]

                        for result in entries[1:]:

                            torrent = result.find_all('td')
                            if len(torrent) <= 1:
                                break

                            allAs = (torrent[1]).find_all('a')

                            try:
                                # link = self.urls['base_url'] + allAs[2].attrs[b'href']
                                # url = result.find('td', attrs={'class': 'quickdownload'}).find('a')
                                title = allAs[2].string
                                # Trimming title so accepted by scene check(Feature has been rewuestet i forum)
                                title = title.replace("custom.", "")
                                title = title.replace("CUSTOM.", "")
                                title = title.replace("Custom.", "")
                                title = title.replace("dk", "")
                                title = title.replace("DK", "")
                                title = title.replace("Dk", "")
                                title = title.replace("subs.", "")
                                title = title.replace("SUBS.", "")
                                title = title.replace("Subs.", "")

                                download_url = self.urls['base_url'] + allAs[
                                    0].attrs[b'href']
                                # FIXME
                                size = -1
                                seeders = 1
                                leechers = 0

                            except (AttributeError, TypeError):
                                continue

                            if not title or not download_url:
                                continue

                            # Filter unseeded torrent
                            # if seeders < self.minseed or leechers < self.minleech:
                            #    if mode is not 'RSS':
                            #        LOGGER.debug(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers))
                            #    continue

                            item = title, download_url, size, seeders, leechers
                            if mode is not 'RSS':
                                sickrage.srLogger.debug("Found result: %s " %
                                                        title)

                            items[mode].append(item)

                except Exception as e:
                    sickrage.srLogger.error(
                        "Failed parsing provider. Traceback: %s" %
                        traceback.format_exc())

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 28
0
    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        # freeleech = '3' if self.freeleech else '0'

        if not self._doLogin():
            return results

        for mode in search_params.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_params[mode]:

                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s " % search_string)

                searchURL = self.urls['search'] % (search_string.replace('(', '').replace(')', ''))
                sickrage.srLogger.debug("Search URL: %s" % searchURL)

                # returns top 15 results by default, expandable in user profile to 100
                data = self.getURL(searchURL)
                if not data:
                    continue

                try:
                    with bs4_parser(data) as html:
                        torrent_table = html.find('table', attrs={'class': 'torrent_table'})
                        torrent_rows = torrent_table.findChildren('tr') if torrent_table else []

                        # Continue only if one Release is found
                        if len(torrent_rows) < 2:
                            sickrage.srLogger.debug("Data returned from provider does not contain any torrents")
                            continue

                        # skip colheader
                        for result in torrent_rows[1:]:
                            cells = result.findChildren('td')
                            link = cells[1].find('span', attrs={'title': 'Download'}).parent
                            title_anchor = cells[1].find('a', attrs = {'dir': 'ltr'})

                            # skip if torrent has been nuked due to poor quality
                            if cells[1].find('img', alt='Nuked') is not None:
                                continue

                            torrent_id_long = link[b'href'].replace('torrents.php?action=download&id=', '')

                            try:
                                if title_anchor.has_key('title'):
                                    title = cells[1].find('a', {'title': 'View torrent'}).contents[0].strip()
                                else:
                                    title = title_anchor.contents[0]
                                download_url = self.urls['download'] % torrent_id_long

                                seeders = cells[6].contents[0]

                                leechers = cells[7].contents[0]

                                size = -1
                                if re.match(r'\d+([,\.]\d+)?\s*[KkMmGgTt]?[Bb]', cells[4].contents[0]):
                                    size = self._convertSize(cells[4].text.strip())

                            except (AttributeError, TypeError):
                                continue

                            if not all([title, download_url]):
                                continue

                            # Filter unseeded torrent
                            if seeders < self.minseed or leechers < self.minleech:
                                if mode is not 'RSS':
                                    sickrage.srLogger.debug(
                                            "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(
                                                    title, seeders, leechers))
                                continue

                            item = title, download_url, size, seeders, leechers
                            if mode is not 'RSS':
                                sickrage.srLogger.debug("Found result: %s " % title)

                            items[mode].append(item)

                except Exception as e:
                    sickrage.srLogger.error("Failed parsing provider. Traceback: %s" % traceback.format_exc())

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 29
0
    def _doSearch(self,
                  search_strings,
                  search_mode='eponly',
                  epcount=0,
                  age=0,
                  epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        if not self._doLogin():
            return results

        for mode in search_strings.keys():
            for search_string in search_strings[mode]:

                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s " %
                                            search_string)

                data = self.getURL(self.urls['index'],
                                   params=self.search_params)
                searchURL = self.urls['index'] + "?" + urlencode(
                    self.search_params)
                sickrage.srLogger.debug("Search URL: %s" % searchURL)

                if not data:
                    sickrage.srLogger.debug("No data returned from provider")
                    continue

                try:
                    with bs4_parser(data) as html:

                        torrent_rows = []

                        down_elems = html.findAll("img",
                                                  {"alt": "Download Torrent"})
                        for down_elem in down_elems:
                            if down_elem:
                                torr_row = down_elem.findParent('tr')
                                if torr_row:
                                    torrent_rows.append(torr_row)

                        # Continue only if one Release is found
                        if len(torrent_rows) < 1:
                            sickrage.srLogger.debug(
                                "Data returned from provider does not contain any torrents"
                            )
                            continue

                        for torrent_row in torrent_rows:

                            title = torrent_row.find(
                                'a',
                                {"data-src": True})['data-src'].rsplit('.',
                                                                       1)[0]
                            download_href = torrent_row.find(
                                'img', {
                                    "alt": 'Download Torrent'
                                }).findParent()['href']
                            seeders = int(
                                torrent_row.findAll('a', {
                                    'title':
                                    'Click here to view peers details'
                                })[0].text.strip())
                            leechers = int(
                                torrent_row.findAll('a', {
                                    'title':
                                    'Click here to view peers details'
                                })[1].text.strip())
                            download_url = self.urls['base_url'] + download_href
                            # FIXME
                            size = -1

                            if not all([title, download_url]):
                                continue

                            # Filter unseeded torrent
                            if seeders < self.minseed or leechers < self.minleech:
                                if mode is not 'RSS':
                                    sickrage.srLogger.debug(
                                        "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})"
                                        .format(title, seeders, leechers))
                                continue

                            item = title, download_url, size, seeders, leechers
                            if mode is not 'RSS':
                                sickrage.srLogger.debug("Found result: %s " %
                                                        title)

                            items[mode].append(item)

                except Exception:
                    sickrage.srLogger.error(
                        "Failed parsing provider. Traceback: {}".format(
                            traceback.format_exc()))

            # For each search mode sort all the items by seeders
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 30
0
    def fetch_popular_shows(self):
        """Get popular show information from IMDB"""

        popular_shows = []

        data = getURL(self.url,
                      session=self.session,
                      params=self.params,
                      headers={'Referer': 'http://akas.imdb.com/'})
        if not data:
            return None

        with bs4_parser(data) as soup:
            results = soup.find("table", {"class": "results"})
            rows = results.find_all("tr")

        for row in rows:
            show = {}
            image_td = row.find("td", {"class": "image"})

            if image_td:
                image = image_td.find("img")
                show[b'image_url_large'] = self.change_size(image[b'src'], 3)
                show[b'image_path'] = os.path.join(
                    'images', 'imdb_popular',
                    os.path.basename(show[b'image_url_large']))

                self.cache_image(show[b'image_url_large'])

            td = row.find("td", {"class": "title"})

            if td:
                show[b'name'] = td.find("a").contents[0]
                show[b'imdb_url'] = "http://www.imdb.com" + td.find(
                    "a")["href"]
                show[b'imdb_tt'] = show[b'imdb_url'][-10:][0:9]
                show[b'year'] = td.find("span", {
                    "class": "year_type"
                }).contents[0].split(" ")[0][1:]

                rating_all = td.find("div", {"class": "user_rating"})
                if rating_all:
                    rating_string = rating_all.find(
                        "div", {"class": "rating rating-list"})
                    if rating_string:
                        rating_string = rating_string[b'title']

                        match = re.search(r".* (.*)\/10.*\((.*)\).*",
                                          rating_string)
                        if match:
                            matches = match.groups()
                            show[b'rating'] = matches[0]
                            show[b'votes'] = matches[1]
                        else:
                            show[b'rating'] = None
                            show[b'votes'] = None
                else:
                    show[b'rating'] = None
                    show[b'votes'] = None

                outline = td.find("span", {"class": "outline"})
                if outline:
                    show[b'outline'] = outline.contents[0]
                else:
                    show[b'outline'] = ''

                popular_shows.append(show)

        return popular_shows
Esempio n. 31
0
    def _doSearch(self,
                  search_strings,
                  search_mode='eponly',
                  epcount=0,
                  age=0,
                  epObj=None):

        results = []

        if not self._doLogin():
            return results

        items = {'Season': [], 'Episode': [], 'RSS': []}

        for mode in search_strings.keys():
            if mode is not 'RSS':
                sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_strings[mode]:
                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s " %
                                            search_string)

                searchURL = self.urls['search'] % (
                    urllib.quote(search_string), self.categories[search_mode])

                try:
                    sickrage.srLogger.debug("Search URL: %s" % searchURL)
                    data = self.getURL(searchURL)
                    time.sleep(cpu_presets[sickrage.srConfig.CPU_PRESET])
                except Exception as e:
                    sickrage.srLogger.warning(
                        "Unable to fetch data. Error: %s" % repr(e))

                if not data:
                    continue

                with bs4_parser(data) as html:
                    torrent_table = html.find('table',
                                              attrs={'id': 'torrents-table'})
                    torrent_rows = torrent_table.find_all(
                        'tr') if torrent_table else []

                    # Continue only if at least one Release is found
                    if len(torrent_rows) < 2:
                        sickrage.srLogger.debug(
                            "Data returned from provider does not contain any torrents"
                        )
                        continue

                    for result in torrent_table.find_all('tr')[1:]:

                        try:
                            link = result.find('td',
                                               attrs={
                                                   'class': 'ttr_name'
                                               }).find('a')
                            url = result.find('td', attrs={
                                'class': 'td_dl'
                            }).find('a')

                            title = link.string
                            if re.search(r'\.\.\.', title):
                                data = self.getURL(self.url + "/" +
                                                   link[b'href'])
                                if data:
                                    with bs4_parser(data) as details_html:
                                        title = re.search(
                                            '(?<=").+(?<!")',
                                            details_html.title.string).group(0)
                            download_url = self.urls['download'] % url[b'href']
                            seeders = int(
                                result.find('td',
                                            attrs={
                                                'class': 'ttr_seeders'
                                            }).string)
                            leechers = int(
                                result.find('td',
                                            attrs={
                                                'class': 'ttr_leechers'
                                            }).string)
                            size = self._convertSize(
                                result.find('td', attrs={
                                    'class': 'ttr_size'
                                }).contents[0])
                        except (AttributeError, TypeError):
                            continue

                        if not all([title, download_url]):
                            continue

                        # Filter unseeded torrent
                        if seeders < self.minseed or leechers < self.minleech:
                            if mode is not 'RSS':
                                sickrage.srLogger.debug(
                                    "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})"
                                    .format(title, seeders, leechers))
                            continue

                        item = title, download_url, size, seeders, leechers
                        if mode is not 'RSS':
                            sickrage.srLogger.debug("Found result: %s " %
                                                    title)

                        items[mode].append(item)

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 32
0
    def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        if not self._doLogin():
            return results

        for mode in search_strings.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_strings[mode]:

                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s " % search_string)

                self.search_params[b'keywords'] = search_string.strip()
                data = self.getURL(self.urls['search'], params=self.search_params)
                # url_searched = self.urls['search'] + '?' + urlencode(self.search_params)

                if not data:
                    sickrage.srLogger.debug("No data returned from provider")
                    continue

                with bs4_parser(data) as html:
                    torrent_table = html.find(id='listtorrents').find_all('tr')
                    for torrent in torrent_table:
                        try:
                            title = torrent.find(attrs={'class': 'tooltip-content'}).text.strip()
                            download_url = torrent.find(title="Click to Download this Torrent!").parent[b'href'].strip()
                            seeders = int(torrent.find(title='Seeders').text.strip())
                            leechers = int(torrent.find(title='Leechers').text.strip())

                            if not all([title, download_url]):
                                continue

                            # Filter unseeded torrent
                            if seeders < self.minseed or leechers < self.minleech:
                                if mode is not 'RSS':
                                    sickrage.srLogger.debug(
                                            "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(
                                                    title, seeders, leechers))
                                continue

                            # Chop off tracker/channel prefix or we cant parse the result!
                            show_name_first_word = re.search(r'^[^ .]+', self.search_params[b'keywords']).group()
                            if not title.startswith(show_name_first_word):
                                title = re.match(r'(.*)(' + show_name_first_word + '.*)', title).group(2)

                            # Change title from Series to Season, or we can't parse
                            if 'Series' in self.search_params[b'keywords']:
                                title = re.sub(r'(?i)series', 'Season', title)

                            # Strip year from the end or we can't parse it!
                            title = re.sub(r'[\. ]?\(\d{4}\)', '', title)

                            # FIXME
                            size = -1

                            item = title, download_url, size, seeders, leechers
                            if mode is not 'RSS':
                                sickrage.srLogger.debug("Found result: %s " % title)

                            items[mode].append(item)

                        except:
                            continue

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 33
0
    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        if not self._doLogin():
            return results

        for mode in search_params.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_params[mode]:

                if mode is 'RSS':
                    searchURL = self.urls['index'] % self.categories
                else:
                    searchURL = self.urls['search'] % (
                        urllib.quote_plus(search_string.encode('utf-8')), self.categories)
                    sickrage.srLogger.debug("Search string: %s " % search_string)

                data = self.getURL(searchURL)
                sickrage.srLogger.debug("Search URL: %s" % searchURL)
                if not data:
                    continue

                try:
                    with bs4_parser(data) as html:
                        torrent_table = html.find('table', attrs={'id': 'torrenttable'})
                        torrent_rows = torrent_table.find_all('tr') if torrent_table else []

                        # Continue only if one Release is found
                        if len(torrent_rows) < 2:
                            sickrage.srLogger.debug("Data returned from provider does not contain any torrents")
                            continue

                        for result in torrent_table.find_all('tr')[1:]:

                            try:
                                link = result.find('td', attrs={'class': 'name'}).find('a')
                                url = result.find('td', attrs={'class': 'quickdownload'}).find('a')
                                title = link.string
                                download_url = self.urls['download'] % url[b'href']
                                seeders = int(result.find('td', attrs={'class': 'seeders'}).string)
                                leechers = int(result.find('td', attrs={'class': 'leechers'}).string)
                                # FIXME
                                size = -1
                            except (AttributeError, TypeError):
                                continue

                            if not all([title, download_url]):
                                continue

                            # Filter unseeded torrent
                            if seeders < self.minseed or leechers < self.minleech:
                                if mode is not 'RSS':
                                    sickrage.srLogger.debug(
                                            "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(
                                                    title, seeders, leechers))
                                continue

                            item = title, download_url, size, seeders, leechers
                            if mode is not 'RSS':
                                sickrage.srLogger.debug("Found result: %s " % title)

                            items[mode].append(item)

                except Exception:
                    sickrage.srLogger.error("Failed parsing provider. Traceback: {}".format(traceback.format_exc()))

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 34
0
    def _doSearch(self,
                  search_params,
                  search_mode='eponly',
                  epcount=0,
                  age=0,
                  epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        if not self._doLogin():
            return results

        for mode in search_params.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_params[mode]:

                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s " %
                                            search_string)

                searchURL = self.urls['search'] % (urllib.quote(search_string),
                                                   self.categories)
                sickrage.srLogger.debug("Search URL: %s" % searchURL)

                data = self.getURL(searchURL)
                if not data:
                    continue

                try:
                    with bs4_parser(data) as html:
                        torrent_table = html.select("#torrenttable table")
                        torrent_rows = torrent_table[0].select(
                            "tr") if torrent_table else []

                        # Continue only if one Release is found
                        if len(torrent_rows) < 2:
                            sickrage.srLogger.debug(
                                "Data returned from provider does not contain any torrents"
                            )
                            continue

                        # Scenetime apparently uses different number of cells in #torrenttable based
                        # on who you are. This works around that by extracting labels from the first
                        # <tr> and using their index to find the correct download/seeders/leechers td.
                        labels = [
                            label.get_text()
                            for label in torrent_rows[0].find_all('td')
                        ]

                        for result in torrent_rows[1:]:
                            cells = result.find_all('td')

                            link = cells[labels.index('Name')].find('a')

                            full_id = link[b'href'].replace(
                                'details.php?id=', '')
                            torrent_id = full_id.split("&")[0]

                            try:
                                title = link.contents[0].get_text()
                                filename = "%s.torrent" % title.replace(
                                    " ", ".")
                                download_url = self.urls['download'] % (
                                    torrent_id, filename)

                                int(cells[labels.index('Seeders')].get_text())
                                seeders = int(
                                    cells[labels.index('Seeders')].get_text())
                                leechers = int(
                                    cells[labels.index('Leechers')].get_text())
                                # FIXME
                                size = -1

                            except (AttributeError, TypeError):
                                continue

                            if not all([title, download_url]):
                                continue

                            # Filter unseeded torrent
                            if seeders < self.minseed or leechers < self.minleech:
                                if mode is not 'RSS':
                                    sickrage.srLogger.debug(
                                        "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})"
                                        .format(title, seeders, leechers))
                                continue

                            item = title, download_url, size, seeders, leechers
                            if mode is not 'RSS':
                                sickrage.srLogger.debug("Found result: %s " %
                                                        title)

                            items[mode].append(item)

                except Exception:
                    sickrage.srLogger.error(
                        "Failed parsing provider. Traceback: {}".format(
                            traceback.format_exc()))

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 35
0
    def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        if not self._doLogin():
            return results

        for mode in search_strings.keys():
            for search_string in search_strings[mode]:

                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s " % search_string)

                data = self.getURL(self.urls['index'], params=self.search_params)
                searchURL = self.urls['index'] + "?" + urlencode(self.search_params)
                sickrage.srLogger.debug("Search URL: %s" % searchURL)

                if not data:
                    sickrage.srLogger.debug("No data returned from provider")
                    continue

                try:
                    with bs4_parser(data) as html:

                        torrent_rows = []

                        down_elems = html.findAll("img", {"alt": "Download Torrent"})
                        for down_elem in down_elems:
                            if down_elem:
                                torr_row = down_elem.findParent('tr')
                                if torr_row:
                                    torrent_rows.append(torr_row)

                        # Continue only if one Release is found
                        if len(torrent_rows) < 1:
                            sickrage.srLogger.debug("Data returned from provider does not contain any torrents")
                            continue

                        for torrent_row in torrent_rows:

                            title = torrent_row.find('a', {"data-src": True})['data-src'].rsplit('.', 1)[0]
                            download_href = torrent_row.find('img', {"alt": 'Download Torrent'}).findParent()['href']
                            seeders = int(
                                    torrent_row.findAll('a', {'title': 'Click here to view peers details'})[
                                        0].text.strip())
                            leechers = int(
                                    torrent_row.findAll('a', {'title': 'Click here to view peers details'})[
                                        1].text.strip())
                            download_url = self.urls['base_url'] + download_href
                            # FIXME
                            size = -1

                            if not all([title, download_url]):
                                continue

                            # Filter unseeded torrent
                            if seeders < self.minseed or leechers < self.minleech:
                                if mode is not 'RSS':
                                    sickrage.srLogger.debug(
                                            "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(
                                                    title, seeders, leechers))
                                continue

                            item = title, download_url, size, seeders, leechers
                            if mode is not 'RSS':
                                sickrage.srLogger.debug("Found result: %s " % title)

                            items[mode].append(item)

                except Exception:
                    sickrage.srLogger.error("Failed parsing provider. Traceback: {}".format(traceback.format_exc()))

            # For each search mode sort all the items by seeders
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 36
0
    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        if not self._doLogin():
            return results

        for mode in search_params.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_params[mode]:

                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s " % search_string)

                searchURL = self.urls['search'] % (self.categories, search_string)
                sickrage.srLogger.debug("Search URL: %s" % searchURL)

                # Set cookies from response
                self.headers.update({'Cookie': self.cookies})
                # Returns top 30 results by default, expandable in user profile
                data = self.getURL(searchURL)
                if not data:
                    continue

                try:
                    with bs4_parser(data) as html:
                        torrent_table = html.find("div", id="torrentBrowse")
                        torrent_rows = torrent_table.findChildren("tr") if torrent_table else []

                        # Continue only if at least one release is found
                        if len(torrent_rows) < 1:
                            sickrage.srLogger.debug("Data returned from provider does not contain any torrents")
                            continue

                        for result in torrent_rows[1:]:
                            cells = result.findChildren("td")
                            title = cells[1].find("a").find_next("a")
                            link = cells[3].find("a")
                            shares = cells[8].get_text().split("/", 1)
                            torrent_size = cells[7].get_text().split("/", 1)[0]

                            try:
                                if title.has_key('title'):
                                    title = title[b'title']
                                else:
                                    title = cells[1].find("a")['title']

                                download_url = self.urls['download'] % (link[b'href'])
                                seeders = int(shares[0])
                                leechers = int(shares[1])

                                size = -1
                                if re.match(r"\d+([,\.]\d+)?\s*[KkMmGgTt]?[Bb]", torrent_size):
                                    size = self._convertSize(torrent_size.rstrip())

                            except (AttributeError, TypeError):
                                continue

                            if not all([title, download_url]):
                                continue

                            # Filter unseeded torrent
                            if seeders < self.minseed or leechers < self.minleech:
                                if mode is not 'RSS':
                                    sickrage.srLogger.debug(
                                            "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(
                                                    title, seeders, leechers))
                                continue

                            item = title, download_url, size, seeders, leechers
                            if mode is not 'RSS':
                                sickrage.srLogger.debug("Found result: %s " % title)

                            items[mode].append(item)

                except Exception as e:
                    sickrage.srLogger.error("Failed parsing provider. Traceback: %s" % traceback.format_exc())

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 37
0
    def _doSearch(self,
                  search_params,
                  search_mode='eponly',
                  epcount=0,
                  age=0,
                  epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        # freeleech = '3' if self.freeleech else '0'

        if not self._doLogin():
            return results

        for mode in search_params.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_params[mode]:

                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s " %
                                            search_string)

                searchURL = self.urls['search'] % (search_string.replace(
                    '(', '').replace(')', ''))
                sickrage.srLogger.debug("Search URL: %s" % searchURL)

                # returns top 15 results by default, expandable in user profile to 100
                data = self.getURL(searchURL)
                if not data:
                    continue

                try:
                    with bs4_parser(data) as html:
                        torrent_table = html.find(
                            'table', attrs={'class': 'torrent_table'})
                        torrent_rows = torrent_table.findChildren(
                            'tr') if torrent_table else []

                        # Continue only if one Release is found
                        if len(torrent_rows) < 2:
                            sickrage.srLogger.debug(
                                "Data returned from provider does not contain any torrents"
                            )
                            continue

                        # skip colheader
                        for result in torrent_rows[1:]:
                            cells = result.findChildren('td')
                            link = cells[1].find('span',
                                                 attrs={
                                                     'title': 'Download'
                                                 }).parent
                            title_anchor = cells[1].find('a',
                                                         attrs={'dir': 'ltr'})

                            # skip if torrent has been nuked due to poor quality
                            if cells[1].find('img', alt='Nuked') is not None:
                                continue

                            torrent_id_long = link[b'href'].replace(
                                'torrents.php?action=download&id=', '')

                            try:
                                if title_anchor.has_key('title'):
                                    title = cells[1].find(
                                        'a', {
                                            'title': 'View torrent'
                                        }).contents[0].strip()
                                else:
                                    title = title_anchor.contents[0]
                                download_url = self.urls[
                                    'download'] % torrent_id_long

                                seeders = cells[6].contents[0]

                                leechers = cells[7].contents[0]

                                size = -1
                                if re.match(
                                        r'\d+([,\.]\d+)?\s*[KkMmGgTt]?[Bb]',
                                        cells[4].contents[0]):
                                    size = self._convertSize(
                                        cells[4].text.strip())

                            except (AttributeError, TypeError):
                                continue

                            if not all([title, download_url]):
                                continue

                            # Filter unseeded torrent
                            if seeders < self.minseed or leechers < self.minleech:
                                if mode is not 'RSS':
                                    sickrage.srLogger.debug(
                                        "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})"
                                        .format(title, seeders, leechers))
                                continue

                            item = title, download_url, size, seeders, leechers
                            if mode is not 'RSS':
                                sickrage.srLogger.debug("Found result: %s " %
                                                        title)

                            items[mode].append(item)

                except Exception as e:
                    sickrage.srLogger.error(
                        "Failed parsing provider. Traceback: %s" %
                        traceback.format_exc())

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 38
0
    def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        if not self._doLogin():
            return results

        for mode in search_strings.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_strings[mode]:

                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s " % search_string)

                self.search_params[b'searchstr'] = search_string

                data = self.getURL(self.urls['search'], params=self.search_params)
                startTableIndex = data.find("<table class=\"torrent_table")
                data = data[startTableIndex:]
                if not data:
                    continue

                try:
                    with bs4_parser(data) as html:
                        result_table = html.find('table', {'id': 'torrent_table'})

                        if not result_table:
                            sickrage.srLogger.debug("Data returned from provider does not contain any torrents")
                            continue

                        result_tbody = result_table.find('tbody')
                        entries = result_tbody.contents
                        del entries[1::2]

                        for result in entries[1:]:

                            torrent = result.find_all('td')
                            if len(torrent) <= 1:
                                break

                            allAs = (torrent[1]).find_all('a')

                            try:
                                # link = self.urls['base_url'] + allAs[2].attrs[b'href']
                                # url = result.find('td', attrs={'class': 'quickdownload'}).find('a')
                                title = allAs[2].string
                                # Trimming title so accepted by scene check(Feature has been rewuestet i forum)
                                title = title.replace("custom.", "")
                                title = title.replace("CUSTOM.", "")
                                title = title.replace("Custom.", "")
                                title = title.replace("dk", "")
                                title = title.replace("DK", "")
                                title = title.replace("Dk", "")
                                title = title.replace("subs.", "")
                                title = title.replace("SUBS.", "")
                                title = title.replace("Subs.", "")

                                download_url = self.urls['base_url'] + allAs[0].attrs[b'href']
                                # FIXME
                                size = -1
                                seeders = 1
                                leechers = 0

                            except (AttributeError, TypeError):
                                continue

                            if not title or not download_url:
                                continue

                            # Filter unseeded torrent
                            # if seeders < self.minseed or leechers < self.minleech:
                            #    if mode is not 'RSS':
                            #        LOGGER.debug(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers))
                            #    continue

                            item = title, download_url, size, seeders, leechers
                            if mode is not 'RSS':
                                sickrage.srLogger.debug("Found result: %s " % title)

                            items[mode].append(item)

                except Exception as e:
                    sickrage.srLogger.error("Failed parsing provider. Traceback: %s" % traceback.format_exc())

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 39
0
    def fetch_popular_shows(self):
        """Get popular show information from IMDB"""

        popular_shows = []

        data = getURL(self.url, session=self.session, params=self.params,
                      headers={'Referer': 'http://akas.imdb.com/'})
        if not data:
            return None

        with bs4_parser(data) as soup:
            results = soup.find("table", {"class": "results"})
            rows = results.find_all("tr")

        for row in rows:
            show = {}
            image_td = row.find("td", {"class": "image"})

            if image_td:
                image = image_td.find("img")
                show[b'image_url_large'] = self.change_size(image[b'src'], 3)
                show[b'image_path'] = os.path.join('images', 'imdb_popular',
                                                   os.path.basename(show[b'image_url_large']))

                self.cache_image(show[b'image_url_large'])

            td = row.find("td", {"class": "title"})

            if td:
                show[b'name'] = td.find("a").contents[0]
                show[b'imdb_url'] = "http://www.imdb.com" + td.find("a")["href"]
                show[b'imdb_tt'] = show[b'imdb_url'][-10:][0:9]
                show[b'year'] = td.find("span", {"class": "year_type"}).contents[0].split(" ")[0][1:]

                rating_all = td.find("div", {"class": "user_rating"})
                if rating_all:
                    rating_string = rating_all.find("div", {"class": "rating rating-list"})
                    if rating_string:
                        rating_string = rating_string[b'title']

                        match = re.search(r".* (.*)\/10.*\((.*)\).*", rating_string)
                        if match:
                            matches = match.groups()
                            show[b'rating'] = matches[0]
                            show[b'votes'] = matches[1]
                        else:
                            show[b'rating'] = None
                            show[b'votes'] = None
                else:
                    show[b'rating'] = None
                    show[b'votes'] = None

                outline = td.find("span", {"class": "outline"})
                if outline:
                    show[b'outline'] = outline.contents[0]
                else:
                    show[b'outline'] = ''

                popular_shows.append(show)

        return popular_shows
Esempio n. 40
0
    def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        # check for auth
        if not self._doLogin():
            return results

        for mode in search_strings.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_strings[mode]:

                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s " % search_string)

                self.search_params[b'recherche'] = search_string

                data = self.getURL(self.urls['search'], params=self.search_params)
                if not data:
                    continue

                try:
                    with bs4_parser(data) as html:
                        result_table = html.find('table', {'id': 'tablealign3bis'})

                        if not result_table:
                            sickrage.srLogger.debug("Data returned from provider does not contain any torrents")
                            continue

                        if result_table:
                            rows = result_table.findAll("tr", {"class": "ligntorrent"})

                            for row in rows:
                                link = row.findAll('td')[1].find("a", href=re.compile("fiche_film"))

                                if link:
                                    try:
                                        title = link.text
                                        download_url = self.urls['base_url'] + "/" + \
                                                       row.find("a", href=re.compile(r"download\.php"))['href']
                                    except (AttributeError, TypeError):
                                        continue

                                    try:
                                        detailseedleech = link[b'mtcontent']
                                        seeders = int(
                                                detailseedleech.split("<font color='#00b72e'>")[1].split("</font>")[0])
                                        leechers = int(
                                                detailseedleech.split("<font color='red'>")[1].split("</font>")[0])
                                        # FIXME
                                        size = -1
                                    except Exception:
                                        sickrage.srLogger.debug(
                                                "Unable to parse torrent id & seeders & leechers. Traceback: %s " % traceback.format_exc())
                                        continue

                                    if not all([title, download_url]):
                                        continue

                                    # Filter unseeded torrent
                                    if seeders < self.minseed or leechers < self.minleech:
                                        if mode is not 'RSS':
                                            sickrage.srLogger.debug(
                                                    "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(
                                                            title, seeders, leechers))
                                        continue

                                    item = title, download_url, size, seeders, leechers
                                    if mode is not 'RSS':
                                        sickrage.srLogger.debug("Found result: %s " % title)

                                    items[mode].append(item)

                except Exception as e:
                    sickrage.srLogger.error("Failed parsing provider. Traceback: %s" % traceback.format_exc())

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 41
0
    def _doSearch(self, search_strings, search_mode='eponly', epcount=0, age=0, epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        if not self._doLogin():
            return results

        for mode in search_strings.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_strings[mode]:

                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s " % search_string)

                searchURL = self.urls['search'] % (search_string, self.catagories)
                sickrage.srLogger.debug("Search URL: %s" % searchURL)

                data = self.getURL(searchURL)
                if not data:
                    continue

                try:
                    with bs4_parser(data) as html:
                        torrent_table = html.find('table', attrs={'id': 'torrent_table'})
                        torrent_rows = torrent_table.find_all('tr') if torrent_table else []

                        # Continue only if one Release is found
                        if len(torrent_rows) < 2:
                            sickrage.srLogger.debug("Data returned from provider does not contain any torrents")
                            continue

                        for result in torrent_rows[1:]:
                            cells = result.find_all('td')
                            link = result.find('a', attrs={'dir': 'ltr'})
                            url = result.find('a', attrs={'title': 'Download'})

                            try:
                                title = link.contents[0]
                                download_url = self.urls['download'] % (url[b'href'])
                                seeders = cells[len(cells) - 2].contents[0]
                                leechers = cells[len(cells) - 1].contents[0]
                                # FIXME
                                size = -1
                            except (AttributeError, TypeError):
                                continue

                            if not all([title, download_url]):
                                continue

                            # Filter unseeded torrent
                            if seeders < self.minseed or leechers < self.minleech:
                                if mode is not 'RSS':
                                    sickrage.srLogger.debug(
                                            "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(
                                                    title, seeders, leechers))
                                continue

                            item = title, download_url, size, seeders, leechers
                            if mode is not 'RSS':
                                sickrage.srLogger.debug("Found result: %s " % title)

                            items[mode].append(item)

                except Exception:
                    sickrage.srLogger.warning("Failed parsing provider. Traceback: %s" % traceback.format_exc())

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 42
0
    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        if not self._doLogin():
            return results

        for mode in search_params.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_params[mode]:

                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s " % search_string)

                searchURL = self.urls['search'] % (urllib.quote(search_string), self.categories)
                sickrage.srLogger.debug("Search URL: %s" % searchURL)

                data = self.getURL(searchURL)
                if not data:
                    continue

                try:
                    with bs4_parser(data) as html:
                        torrent_table = html.select("#torrenttable table")
                        torrent_rows = torrent_table[0].select("tr") if torrent_table else []

                        # Continue only if one Release is found
                        if len(torrent_rows) < 2:
                            sickrage.srLogger.debug("Data returned from provider does not contain any torrents")
                            continue

                        # Scenetime apparently uses different number of cells in #torrenttable based
                        # on who you are. This works around that by extracting labels from the first
                        # <tr> and using their index to find the correct download/seeders/leechers td.
                        labels = [label.get_text() for label in torrent_rows[0].find_all('td')]

                        for result in torrent_rows[1:]:
                            cells = result.find_all('td')

                            link = cells[labels.index('Name')].find('a')

                            full_id = link[b'href'].replace('details.php?id=', '')
                            torrent_id = full_id.split("&")[0]

                            try:
                                title = link.contents[0].get_text()
                                filename = "%s.torrent" % title.replace(" ", ".")
                                download_url = self.urls['download'] % (torrent_id, filename)

                                int(cells[labels.index('Seeders')].get_text())
                                seeders = int(cells[labels.index('Seeders')].get_text())
                                leechers = int(cells[labels.index('Leechers')].get_text())
                                # FIXME
                                size = -1

                            except (AttributeError, TypeError):
                                continue

                            if not all([title, download_url]):
                                continue

                            # Filter unseeded torrent
                            if seeders < self.minseed or leechers < self.minleech:
                                if mode is not 'RSS':
                                    sickrage.srLogger.debug(
                                            "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(
                                                    title, seeders, leechers))
                                continue

                            item = title, download_url, size, seeders, leechers
                            if mode is not 'RSS':
                                sickrage.srLogger.debug("Found result: %s " % title)

                            items[mode].append(item)

                except Exception:
                    sickrage.srLogger.error("Failed parsing provider. Traceback: {}".format(traceback.format_exc()))

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 43
0
    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        if not self._doLogin():
            return results

        for mode in search_params.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_params[mode]:

                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s " % search_string)

                searchURL = self.urls['search'] % (urllib.quote(search_string.encode('utf-8')), self.categories)
                sickrage.srLogger.debug("Search URL: %s" % searchURL)

                data = self.getURL(searchURL)
                if not data:
                    continue

                try:
                    with bs4_parser(data) as html:
                        # Continue only if one Release is found
                        empty = html.find('h2', text="No .torrents fit this filter criteria")
                        if empty:
                            sickrage.srLogger.debug("Data returned from provider does not contain any torrents")
                            continue

                        torrent_table = html.find('table', attrs={'style': 'border: none; width: 100%;'})
                        if not torrent_table:
                            sickrage.srLogger.error("Could not find table of torrents")
                            continue

                        torrent_rows = torrent_table.find_all('tr', attrs={'class': 'browse'})

                        for result in torrent_rows:
                            cells = result.find_all('td')
                            size = None
                            link = cells[1].find('a', attrs={'style': 'font-size: 1.25em; font-weight: bold;'})

                            torrent_id = link[b'href'].replace('details.php?id=', '')

                            try:
                                if link.has_key('title'):
                                    title = link[b'title']
                                else:
                                    title = link.contents[0]

                                download_url = self.urls['download'] % (torrent_id, link.contents[0])
                                seeders = int(cells[9].contents[0])
                                leechers = int(cells[10].contents[0])

                                # Need size for failed downloads handling
                                if size is None:
                                    if re.match(r'[0-9]+,?\.?[0-9]*[KkMmGg]+[Bb]+', cells[7].text):
                                        size = self._convertSize(cells[7].text)
                                        if not size:
                                            size = -1

                            except (AttributeError, TypeError):
                                continue

                            if not all([title, download_url]):
                                continue

                            # Filter unseeded torrent
                            if seeders < self.minseed or leechers < self.minleech:
                                if mode is not 'RSS':
                                    sickrage.srLogger.debug(
                                            "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(
                                                    title, seeders, leechers))
                                continue

                            item = title, download_url, size, seeders, leechers
                            if mode is not 'RSS':
                                sickrage.srLogger.debug("Found result: %s " % title)

                            items[mode].append(item)

                except Exception as e:
                    sickrage.srLogger.error("Failed parsing provider. Traceback: %s" % traceback.format_exc())

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results
Esempio n. 44
0
    def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):

        results = []
        items = {'Season': [], 'Episode': [], 'RSS': []}

        if not self._doLogin():
            return results

        for mode in search_params.keys():
            sickrage.srLogger.debug("Search Mode: %s" % mode)
            for search_string in search_params[mode]:

                if mode is not 'RSS':
                    sickrage.srLogger.debug("Search string: %s " % search_string)

                searchURL = self.urls['search'] % (urllib.quote(search_string), self.categories)
                sickrage.srLogger.debug("Search URL: %s" % searchURL)

                data = self.getURL(searchURL)
                if not data:
                    continue

                try:
                    with bs4_parser(data) as html:
                        # Continue only if one Release is found
                        empty = html.find('Nothing found!')
                        if empty:
                            sickrage.srLogger.debug("Data returned from provider does not contain any torrents")
                            continue

                        torrent_table = html.find('table', attrs={'border': '1'})
                        torrent_rows = torrent_table.find_all('tr') if torrent_table else []

                        for result in torrent_rows[1:]:
                            cells = result.find_all('td')
                            size = None
                            link = cells[1].find('a', attrs={'class': 'index'})

                            full_id = link[b'href'].replace('details.php?id=', '')
                            torrent_id = full_id.split("&")[0]

                            # Free leech torrents are marked with green [F L] in the title (i.e. <font color=green>[F&nbsp;L]</font>)
                            freeleechTag = cells[1].find('font', attrs={'color': 'green'})
                            if freeleechTag and freeleechTag.text == '[F&nbsp;L]':
                                isFreeleechTorrent = True
                            else:
                                isFreeleechTorrent = False

                            if self.freeleech and not isFreeleechTorrent:
                                continue

                            try:
                                if link.has_key('title'):
                                    title = cells[1].find('a', {'class': 'index'})['title']
                                else:
                                    title = link.contents[0]
                                download_url = self.urls['download'] % (torrent_id, link.contents[0])
                                seeders = int(cells[8].find('span').contents[0])
                                leechers = int(cells[9].find('span').contents[0])

                                # Need size for failed downloads handling
                                if size is None:
                                    if re.match(r'[0-9]+,?\.?[0-9]*[KkMmGg]+[Bb]+', cells[6].text):
                                        size = self._convertSize(cells[6].text)
                                        if not size:
                                            size = -1

                            except (AttributeError, TypeError):
                                continue

                            if not all([title, download_url]):
                                continue

                            # Filter unseeded torrent
                            if seeders < self.minseed or leechers < self.minleech:
                                if mode is not 'RSS':
                                    sickrage.srLogger.debug(
                                            "Discarding torrent because it doesn't meet the minimum seeders or leechers: {} (S:{} L:{})".format(
                                                    title, seeders, leechers))
                                continue

                            item = title, download_url, size, seeders, leechers
                            if mode is not 'RSS':
                                sickrage.srLogger.debug("Found result: %s " % title)

                            items[mode].append(item)

                except Exception as e:
                    sickrage.srLogger.error("Failed parsing provider. Traceback: %s" % traceback.format_exc())

            # For each search mode sort all the items by seeders if available
            items[mode].sort(key=lambda tup: tup[3], reverse=True)

            results += items[mode]

        return results