コード例 #1
0
ファイル: scraper.py プロジェクト: twissell-/rui
    def next(self, serie, scraper):
        if not serie.onGoing or serie.lastChapter == serie.totalChapters:
            PirateBayScraper.logger.info(
                '%s finished. No chapters to look for' % serie)
            return -1

        # mapping parameters.
        params = scraper.map(serie.params)
        chapter = serie.lastChapter + 1
        cssSelector = '#searchResult > tr:nth-of-type(1) > td:nth-of-type(2) > a:nth-of-type(1)'
        res = requests.get(serie.fansub.baseUrl + params['searchStr'] + '%02d' % chapter)
        soup = bs4.BeautifulSoup(res.text, 'html.parser')

        links = soup.select(cssSelector)

        if len(links) == 0:
            PirateBayScraper.logger.debug('%s chapter %d not found' % (serie, chapter))
            return False
        else:
            PirateBayScraper.logger.info('%s chapter %d found' % (serie, chapter))
            Link.create(link=links[0]['href'], serie=serie, chapter=chapter)
            serie.lastChapter = chapter
            # put the magnet link into transmission
            Transmission.add(links[0]['href'])

            if serie.lastChapter == serie.totalChapters:
                serie.onGoing = False

            serie.save()
            if scraper.notificate:
                Notification.create(
                    message='New: %s - Ch. %d from %s' % (serie.title, serie.lastChapter, scraper.fansub.name))
            return True
コード例 #2
0
ファイル: scraper.py プロジェクト: twissell-/rui
    def next(self, serie, scraper):
        if not serie.onGoing or serie.lastChapter == serie.totalChapters:
            FallensubsScraper.logger.info(
                '%s finished. No chapters to look for' % serie)
            return -1

        # mapping parameters.
        params = scraper.map(serie.params)
        cssSelector = '.bbc_table > tr:nth-of-type(%d) > td:nth-of-type(%d) > a:nth-of-type(%d)'
        res = requests.get(serie.fansub.baseUrl + params['topicId'])
        soup = bs4.BeautifulSoup(res.text, 'html.parser')
        chapter = serie.lastChapter + 1

        linkTag = soup.select( 
            cssSelector % (chapter + 1, params['column'], params['mirror']))

        if len(linkTag) == 0:
            FallensubsScraper.logger.debug('%s chapter %d not found' % (serie, chapter))
            return False
        else:
            FallensubsScraper.logger.info('%s chapter %d found' % (serie, chapter))
            Link.create(link=linkTag[0]['href'], serie=serie, chapter=chapter)
            serie.lastChapter = chapter
            copy(linkTag[0]['href'])

            if serie.lastChapter == serie.totalChapters:
                serie.onGoing = False

            serie.save()
            if scraper.notificate:
                Notification.create(
                    message='New: %s - Ch. %d from %s' % (serie.title, serie.lastChapter, scraper.fansub.name))
            return True
コード例 #3
0
ファイル: scraper.py プロジェクト: twissell-/rui
    def _torrentNext(self, serie, params, scraper):
        res = requests.get(serie.fansub.baseUrl % (params['fl-animeId'], params['fl-fansubId']))
        soup = bs4.BeautifulSoup(res.text, 'html.parser')
        chapter = serie.lastChapter + 1

        # </a>
        anchors = soup.select(FrozenLayerScraper.cssPageSelector)

        chapterLink = False
        for a in anchors:
            # generates a list with all numbers of the </a> text
            if ' al ' in a.text or '1080' in a.text:
                # to avoid several chapters torrents
                continue
            chaptersList = [int(s) for s in a.text.split() if s.isdigit()]
            if chapter in chaptersList:
                chapterLink = a['href']

        if chapterLink:
            # inside chapter page
            res = requests.get(chapterLink)
            soup = bs4.BeautifulSoup(res.text, 'html.parser')
            anchors = soup.select('a')
            magnetLink = False

            for a in anchors:
                if 'magnet:' in a['href']:
                    magnetLink = a['href']
                    break

            if not magnetLink:
                FrozenLayerScraper.logger.debug('%s chapter %d not found' % (serie, chapter))
                return -1

            Link.create(link=magnetLink, serie=serie, chapter=chapter)
            serie.lastChapter = chapter
            # put the magnet link into transmission
            Transmission.add(magnetLink)
            if serie.lastChapter == serie.totalChapters:
                serie.onGoing = False

            serie.save()

            if scraper.notificate:
                Notification.create(
                    message='New: %s - Ch. %d from %s' % (serie.title, serie.lastChapter, scraper.fansub.name))

            FrozenLayerScraper.logger.info('%s chapter %d found' % (serie, chapter))
            return True
        else:
            FrozenLayerScraper.logger.debug('%s chapter %d not found' % (serie, chapter))
            return -1