Пример #1
0
    def parse_feed(self):
        logger.debug('start scan %s (%s)', self.bangumi.name, self.bangumi.id)
        eps_no_list = [eps.episode_no for eps in self.episode_list]

        timeout = socket.getdefaulttimeout()
        # set timeout is provided
        if self.timeout is not None:
            timeout = self.timeout

        r = requests.post(self.feed_url, json={'tag_id': self.tag_list}, timeout=timeout)

        if r.status_code > 399:
            raise SchedulerError('Network Error %d'.format(r.status_code))

        resp_body = r.json()

        result_list = []

        for torrent in resp_body['torrents']:
            for file in torrent['content']:
                file_path = file[0]
                file_name = os.path.basename(file_path)
                if not file_name.endswith('.mp4'):
                    continue
                eps_no = self.parse_episode_number(file_name)
                if eps_no in eps_no_list:
                    result_list.append((torrent['magnet'], eps_no, file_path, file_name))

        return result_list
Пример #2
0
    def download(self, download_url, download_location):
        url_type = self.__url_type(download_url)
        if url_type == 'magnet':
            torrent_id = yield client.core.add_torrent_magnet(
                download_url, {'download_location': download_location})
        elif url_type == 'torrent':
            torrent_id = yield client.core.add_torrent_url(
                download_url, {'download_location': download_location})
        else:
            raise SchedulerError('unsupport url format')

        returnValue(torrent_id)
Пример #3
0
    def parse_feed(self):
        '''
        parse feed for current bangumi and find not downloaded episode in feed entries.
        this method using an async call to add torrent.
        :return: if no error when get feed None is return otherwise return the error object
        '''
        # eps no list
        logger.debug('start scan %s (%s)', self.bangumi.name, self.bangumi.id)
        logger.debug(self.feed_url)
        eps_no_list = [eps.episode_no for eps in self.episode_list]

        timeout = socket.getdefaulttimeout()
        # set timeout is provided
        if self.timeout is not None:
            timeout = self.timeout

        r = dmhy_request.get(self.feed_url,
                             proxies=self.proxy,
                             timeout=timeout)

        if r.status_code > 399:
            raise SchedulerError('Network Error {0}'.format(r.status_code))

        feed_dict = feedparser.parse(r.text)

        if feed_dict.bozo != 0:
            raise SchedulerError(feed_dict.bozo_exception)

        result_list = []

        for item in feed_dict.entries:
            title = item['title']
            eps_no = self.parse_episode_number(title)
            if eps_no in eps_no_list and hasattr(item.enclosures[0], 'href'):
                result_list.append(
                    (item.enclosures[0].href, eps_no, None, None))

        return result_list
Пример #4
0
    def parse_feed(self):
        logger.debug('start scan %s (%s)', self.bangumi.name, self.bangumi.id)
        eps_no_list = [eps.episode_no for eps in self.episode_list]

        timeout = socket.getdefaulttimeout()
        # set timeout is provided
        if self.timeout is not None:
            timeout = self.timeout

        r = requests.get(self.feed_url, params={'keyword': self.keyword}, timeout=timeout)

        if r.status_code > 399:
            raise SchedulerError('Network Error %d'.format(r.status_code))

        item_array = r.json()

        result_list = []

        for item in item_array:
            eps_list = []
            for media_file in item['files']:
                if media_file['ext'] is not None and media_file['ext'].lower() != '.mp4':
                    continue
                eps_no = self.parse_episode_number(media_file['name'])
                if eps_no in eps_no_list:
                    eps_list.append({
                        'eps_no': eps_no,
                        'file_path': media_file['path'],
                        'file_name': media_file['name']
                    })
            if len(eps_list) == 0:
                continue
            for eps in eps_list:
                if self.mode == 'nyaa' or self.mode == 'dmhy':
                    download_uri = item['magnet_uri']
                else:
                    download_uri = item['torrent_url']
                result_list.append((download_uri, eps['eps_no'], eps['file_path'], eps['file_name']))

        logger.debug(result_list)

        return result_list
Пример #5
0
    def parse_feed(self):
        '''
        parse feed for current bangumi and find not downloaded episode in feed entries.
        this method using an async call to add torrent.
        :return: if no error when get feed None is return otherwise return the error object
        '''
        # eps no list
        logger.debug('start scan %s (%s)', self.bangumi.name, self.bangumi.id)
        eps_no_list = [eps.episode_no for eps in self.episode_list]

        default_timeout = socket.getdefaulttimeout()
        # set timeout is provided
        if self.timeout is not None:
            socket.setdefaulttimeout(self.timeout)

        # use handlers
        if self.proxy is not None:
            proxy_handler = urllib2.ProxyHandler(self.proxy)
            feed_dict = feedparser.parse(self.feed_url,
                                         handlers=[proxy_handler])
        else:
            feed_dict = feedparser.parse(self.feed_url)

        # restore the default timeout
        if self.timeout is not None:
            socket.setdefaulttimeout(default_timeout)

        if feed_dict.bozo != 0:
            raise SchedulerError(feed_dict.bozo_exception)

        result_list = []

        for item in feed_dict.entries:
            eps_no = self.parse_episode_number(item['title'])
            if eps_no in eps_no_list:
                result_list.append(
                    (item.enclosures[0].href, eps_no, None, None))
                # d = self.add_to_download(item, eps_no)
                # d.addCallback(self.download_callback)

        return result_list