コード例 #1
0
 def test_womble(self):
     RSSFeeds().clearCache()
     result = RSSFeeds().getFeed(
         'https://newshost.co.za/rss/?sec=tv-sd&fr=false')
     self.assertTrue('entries' in result)
     self.assertTrue('feed' in result)
     for item in result['entries']:
         self.assertTrue(TVCache._parseItem(item))
コード例 #2
0
ファイル: tvcache.py プロジェクト: peanutsofdoom/SickRage
    def getRSSFeed(self, url, post_data=None, items=[]):
        handlers = []

        if self.provider.proxy.isEnabled():
            self.provider.headers.update(
                {'Referer': self.provider.proxy.getProxyURL()})
        elif sickbeard.PROXY_SETTING:
            logger.log("Using proxy for url: " + url, logger.DEBUG)
            scheme, address = urllib2.splittype(sickbeard.PROXY_SETTING)
            address = sickbeard.PROXY_SETTING if scheme else 'http://' + sickbeard.PROXY_SETTING
            handlers = [
                urllib2.ProxyHandler({
                    'http': address,
                    'https': address
                })
            ]
            self.provider.headers.update({'Referer': address})
        elif 'Referer' in self.provider.headers:
            self.provider.headers.pop('Referer')

        return RSSFeeds(self.providerID).getFeed(
            self.provider.proxy._buildURL(url),
            post_data,
            self.provider.headers,
            items,
            handlers=handlers)
コード例 #3
0
    def _search_provider(self, search_params, **kwargs):

        self._authorised()
        results = []

        items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}

        url = self.urls['browse'] % self.passkey
        for mode in search_params.keys():
            for search_string in search_params[mode]:
                search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
                search_url = url + (self.urls['search'] % search_string, '')['Cache' == mode]

                xml_data = RSSFeeds(self).get_feed(search_url)

                cnt = len(items[mode])
                if xml_data and 'entries' in xml_data:
                    for entry in xml_data['entries']:
                        try:
                            if entry['title'] and 'download' in entry['link']:
                                items[mode].append((entry['title'], entry['link'], None, None))
                        except KeyError:
                            continue

                self._log_search(mode, len(items[mode]) - cnt, search_url)

            results = list(set(results + items[mode]))

        return results
コード例 #4
0
 def getRSSFeed(self, url, post_data=None, items=[]):
     if self.provider.proxy.isEnabled():
         self.provider.headers.update(
             {'Referer': self.provider.proxy.getProxyURL()})
     return RSSFeeds(self.providerID).getFeed(
         self.provider.proxy._buildURL(url), post_data,
         self.provider.headers, items)
コード例 #5
0
    def __init__(self,
                 name,
                 url,
                 cookies='',
                 search_mode='eponly',
                 search_fallback=False,
                 enable_recentsearch=False,
                 enable_backlog=False):
        generic.TorrentProvider.__init__(self, name)

        self.url = url.rstrip('/')
        self.cookies = cookies

        self.enable_recentsearch = enable_recentsearch
        self.enable_backlog = enable_backlog
        self.search_mode = search_mode
        self.search_fallback = search_fallback

        self.feeder = RSSFeeds(self)
        self.cache = TorrentRssCache(self)
コード例 #6
0
ファイル: omgwtfnzbs.py プロジェクト: Amelandbor/SickGear
    def get_cache_data(self):

        api_key = self._init_api()
        if False is api_key:
            return self.search_html()
        if None is not api_key:
            params = {
                'user': self.username,
                'api': api_key,
                'eng': 1,
                'catid': '19,20'
            }  # SD,HD

            rss_url = self.urls['cache'] % urllib.urlencode(params)

            logger.log(self.name + u' cache update URL: ' + rss_url,
                       logger.DEBUG)

            data = RSSFeeds(self).get_feed(rss_url)
            if data and 'entries' in data:
                return data.entries
        return []
コード例 #7
0
    def __init__(self,
                 name,
                 url,
                 cookies='',
                 search_mode='eponly',
                 search_fallback=False,
                 enable_recentsearch=False,
                 enable_backlog=False):
        self.enable_backlog = bool(tryInt(enable_backlog))
        generic.TorrentProvider.__init__(self,
                                         name,
                                         supports_backlog=self.enable_backlog,
                                         cache_update_freq=15)

        self.url = url.rstrip('/')
        self.url_base = self.url
        self.cookies = cookies

        self.enable_recentsearch = bool(
            tryInt(enable_recentsearch)) or not self.enable_backlog
        self.search_mode = search_mode
        self.search_fallback = bool(tryInt(search_fallback))

        self.feeder = RSSFeeds(self)
コード例 #8
0
 def getRSSFeed(self, url, post_data=None, request_headers=None):
     return RSSFeeds(self.providerID).getFeed(url, post_data,
                                              request_headers)
コード例 #9
0
ファイル: feedparser_tests.py プロジェクト: theglump/SickRage
 def test_newznab(self):
     RSSFeeds().clearCache()
     result = RSSFeeds().getFeed('http://lolo.sickbeard.com/api?t=caps')
     self.assertTrue('entries' in result)
     self.assertTrue('feed' in result)
     self.assertTrue('categories' in result.feed)
コード例 #10
0
 def getRSSFeed(self, url):
     return RSSFeeds(self.provider).get_feed(url)
コード例 #11
0
ファイル: tvcache.py プロジェクト: markchipman/SickGear
 def get_rss(self, url, **kwargs):
     return RSSFeeds(self.provider).get_feed(url, **kwargs)