def _doSearch(self, search_params, epcount=0, age=0): results = [] self._checkAuth() logger.log(u"Search url: " + self.search_url + " search_params: " + search_params, logger.DEBUG) data = self.getURL(self.search_url, post_data=search_params) if not data: logger.log(u"No data returned from " + self.search_url, logger.ERROR) return [] parsedJSON = helpers.parse_json(data) if parsedJSON is None: logger.log(u"Error trying to load " + self.name + " JSON data", logger.ERROR) return [] if self._checkAuthFromData(parsedJSON): if parsedJSON and 'data' in parsedJSON: items = parsedJSON['data'] else: logger.log(u"Resulting JSON from " + self.name + " isn't correct, not parsing it", logger.ERROR) items = [] for item in items: results.append(item) return results
def _doSearch(self, search_params, show=None): self._checkAuth() logger.log(u"Search url: " + self.search_url + " search_params: " + search_params, logger.DEBUG) data = self.getURL(self.search_url, post_data=search_params) if not data: logger.log(u"No data returned from " + self.search_url, logger.ERROR) return [] parsedJSON = helpers.parse_json(data) if parsedJSON is None: logger.log(u"Error trying to load " + self.name + " JSON data", logger.ERROR) return [] if self._checkAuthFromData(parsedJSON): results = [] if parsedJSON and 'data' in parsedJSON: items = parsedJSON['data'] else: logger.log(u"Resulting JSON from " + self.name + " isn't correct, not parsing it", logger.ERROR) items = [] for item in items: results.append(item) return results
def updateCache(self): # delete anything older then 7 days logger.log(u"Clearing " + self.provider.name + " cache") self._clearCache() if not self.shouldUpdate(): return if self._checkAuth(None): data = self._getRSSData() # As long as we got something from the provider we count it as an update if data: self.setLastUpdate() else: return [] parsedJSON = helpers.parse_json(data) if parsedJSON is None: logger.log( u"Error trying to load " + self.provider.name + " JSON feed", logger.ERROR) return [] if self._checkAuth(parsedJSON): if parsedJSON and 'data' in parsedJSON: items = parsedJSON['data'] else: logger.log( u"Resulting JSON from " + self.provider.name + " isn't correct, not parsing it", logger.ERROR) return [] ql = [] for item in items: ci = self._parseItem(item) if ci is not None: ql.append(ci) if ql: myDB = self._getDB() myDB.mass_action(ql) else: raise exceptions.AuthException( "Your authentication info for " + self.provider.name + " is incorrect, check your config") else: return []
def updateCache(self): # delete anything older then 7 days logger.log(u"Clearing " + self.provider.name + " cache") self._clearCache() if not self.shouldUpdate(): return if self._checkAuth(None): data = self._getRSSData() # As long as we got something from the provider we count it as an update if data: self.setLastUpdate() else: return [] parsedJSON = helpers.parse_json(data) if parsedJSON is None: logger.log(u"Error trying to load " + self.provider.name + " JSON feed", logger.ERROR) return [] if self._checkAuth(parsedJSON): if parsedJSON and 'data' in parsedJSON: items = parsedJSON['data'] else: logger.log(u"Resulting JSON from " + self.provider.name + " isn't correct, not parsing it", logger.ERROR) return [] ql = [] for item in items: ci = self._parseItem(item) if ci is not None: ql.append(ci) if ql: myDB = self._getDB() myDB.mass_action(ql) else: raise exceptions.AuthException( "Your authentication info for " + self.provider.name + " is incorrect, check your config") else: return []
def _doSearch(self, search, show=None, retention=0): self._checkAuth() params = { 'user': sickbeard.OMGWTFNZBS_USERNAME, 'api': sickbeard.OMGWTFNZBS_APIKEY, 'eng': 1, 'nukes': 1, # show nuke info 'catid': '19,20', # SD,HD 'retention': sickbeard.USENET_RETENTION, 'search': search } if retention or not params['retention']: params['retention'] = retention search_url = 'https://api.omgwtfnzbs.org/json/?' + urllib.urlencode( params) logger.log(u"Search url: " + search_url, logger.DEBUG) data = self.getURL(search_url) if not data: logger.log(u"No data returned from " + search_url, logger.ERROR) return [] parsedJSON = helpers.parse_json(data) if parsedJSON is None: logger.log(u"Error trying to load " + self.name + " JSON data", logger.ERROR) return [] if self._checkAuthFromData(parsedJSON, is_XML=False): results = [] for item in parsedJSON: if 'nuked' in item and item['nuked'].startswith('1'): # logger.log(u"Skipping nuked release: " + item['release'], logger.DEBUG) continue if 'release' in item and 'getnzb' in item: results.append(item) return results return []
def updateCache(self): if not self.shouldUpdate(): return if self._checkAuth(None): data = self._getRSSData() # As long as we got something from the provider we count it as an update if data: self.setLastUpdate() else: return [] logger.log(u"Clearing " + self.provider.name + " cache and updating with new information") self._clearCache() parsedJSON = helpers.parse_json(data) if parsedJSON is None: logger.log( u"Error trying to load " + self.provider.name + " JSON feed", logger.ERROR) return [] if self._checkAuth(parsedJSON): if parsedJSON and 'data' in parsedJSON: items = parsedJSON['data'] else: logger.log( u"Resulting JSON from " + self.provider.name + " isn't correct, not parsing it", logger.ERROR) return [] for item in items: self._parseItem(item) else: raise exceptions.AuthException( "Your authentication info for " + self.provider.name + " is incorrect, check your config") else: return []
def _doSearch(self, search, show=None, retention=0): self._checkAuth() params = {'user': sickbeard.OMGWTFNZBS_USERNAME, 'api': sickbeard.OMGWTFNZBS_APIKEY, 'eng': 1, 'nukes': 1, # show nuke info 'catid': '19,20', # SD,HD 'retention': sickbeard.USENET_RETENTION, 'search': search} if retention or not params['retention']: params['retention'] = retention search_url = 'https://api.omgwtfnzbs.org/json/?' + urllib.urlencode(params) logger.log(u"Search url: " + search_url, logger.DEBUG) data = self.getURL(search_url) if not data: logger.log(u"No data returned from " + search_url, logger.ERROR) return [] parsedJSON = helpers.parse_json(data) if parsedJSON is None: logger.log(u"Error trying to load " + self.name + " JSON data", logger.ERROR) return [] if self._checkAuthFromData(parsedJSON, is_XML=False): results = [] for item in parsedJSON: if 'nuked' in item and item['nuked'].startswith('1'): # logger.log(u"Skipping nuked release: " + item['release'], logger.DEBUG) continue if 'release' in item and 'getnzb' in item: results.append(item) return results return []
def updateCache(self): if not self.shouldUpdate(): return if self._checkAuth(None): data = self._getRSSData() # As long as we got something from the provider we count it as an update if data: self.setLastUpdate() else: return [] logger.log(u"Clearing " + self.provider.name + " cache and updating with new information") self._clearCache() parsedJSON = helpers.parse_json(data) if parsedJSON is None: logger.log(u"Error trying to load " + self.provider.name + " JSON feed", logger.ERROR) return [] if self._checkAuth(parsedJSON): if parsedJSON and "data" in parsedJSON: items = parsedJSON["data"] else: logger.log( u"Resulting JSON from " + self.provider.name + " isn't correct, not parsing it", logger.ERROR ) return [] for item in items: self._parseItem(item) else: raise exceptions.AuthException( "Your authentication info for " + self.provider.name + " is incorrect, check your config" ) else: return []
def _doSearch(self, search, show=None, retention=0): self._checkAuth() params = { "user": sickbeard.OMGWTFNZBS_USERNAME, "api": sickbeard.OMGWTFNZBS_APIKEY, "eng": 1, "catid": "19,20", # SD,HD "retention": sickbeard.USENET_RETENTION, "search": search, } if retention or not params["retention"]: params["retention"] = retention search_url = "https://api.omgwtfnzbs.org/json/?" + urllib.urlencode(params) logger.log(u"Search url: " + search_url, logger.DEBUG) data = self.getURL(search_url) if not data: logger.log(u"No data returned from " + search_url, logger.ERROR) return [] parsedJSON = helpers.parse_json(data) if parsedJSON is None: logger.log(u"Error trying to load " + self.name + " JSON data", logger.ERROR) return [] if self._checkAuthFromData(parsedJSON, is_XML=False): results = [] for item in parsedJSON: if "release" in item and "getnzb" in item: results.append(item) return results return []
def _doSearch(self, search, show=None, age=0): self._checkAuth() params = { 'age': sickbeard.USENET_RETENTION, 'completion': sickbeard.NZBX_COMPLETION, 'cat': 'tv-hd|tv-sd', 'limit': 250, 'q': search } if age or not params['age']: params['age'] = age if not params['completion']: params['completion'] = 100 search_url = self.url + 'api/sickbeard?' + urllib.urlencode(params) logger.log(u"Search url: " + search_url, logger.DEBUG) data = self.getURL(search_url) if not data: logger.log(u"No data returned from " + search_url, logger.ERROR) return [] parsedJSON = helpers.parse_json(data) if parsedJSON is None: logger.log(u"Error trying to load " + self.name + " JSON data", logger.ERROR) return [] results = [] for item in parsedJSON: if item['name'] and item['guid']: results.append(item) return results
def _doSearch(self, search, show=None, age=0): self._checkAuth() params = {'age': sickbeard.USENET_RETENTION, 'completion': sickbeard.NZBX_COMPLETION, 'cat': 'tv-hd|tv-sd', 'limit': 250, 'q': search} if age or not params['age']: params['age'] = age if not params['completion']: params['completion'] = 100 search_url = self.url + 'api/sickbeard?' + urllib.urlencode(params) logger.log(u"Search url: " + search_url, logger.DEBUG) data = self.getURL(search_url) if not data: logger.log(u"No data returned from " + search_url, logger.ERROR) return [] parsedJSON = helpers.parse_json(data) if parsedJSON is None: logger.log(u"Error trying to load " + self.name + " JSON data", logger.ERROR) return [] results = [] for item in parsedJSON: if item['name'] and item['guid']: results.append(item) return results
def findEpisode(self, episode, manualSearch=False): logger.log(u"Searching " + self.name + " for " + episode.prettyName()) self.cache.updateCache() results = self.cache.searchCache(episode, manualSearch) logger.log(u"Cache results: " + str(results), logger.DEBUG) # if we got some results then use them no matter what. # OR # return anyway unless we're doing a manual search if results or not manualSearch: return results data = self.getURL(self.search_url, post_data=self._make_post_data_JSON( show=episode.show, episode=episode)) if not data: logger.log(u"No data returned from " + self.search_url, logger.ERROR) return [] parsedJSON = helpers.parse_json(data) if parsedJSON is None: logger.log(u"Error trying to load " + self.name + " JSON data", logger.ERROR) return [] if self._checkAuthFromData(parsedJSON): results = [] if parsedJSON and 'data' in parsedJSON: items = parsedJSON['data'] else: logger.log( u"Resulting JSON from " + self.name + " isn't correct, not parsing it", logger.ERROR) items = [] for item in items: (title, url) = self._get_title_and_url(item) # parse the file name try: myParser = NameParser() parse_result = myParser.parse(title) except InvalidNameException: logger.log( u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING) continue if episode.show.air_by_date: if parse_result.air_date != episode.airdate: logger.log( u"Episode " + title + " didn't air on " + str(episode.airdate) + ", skipping it", logger.DEBUG) continue elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers: logger.log( u"Episode " + title + " isn't " + str(episode.season) + "x" + str(episode.episode) + ", skipping it", logger.DEBUG) continue quality = self.getQuality(item) if not episode.show.wantEpisode(episode.season, episode.episode, quality, manualSearch): logger.log( u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[quality], logger.DEBUG) continue logger.log(u"Found result " + title + " at " + url, logger.DEBUG) result = self.getResult([episode]) result.url = url result.name = title result.quality = quality results.append(result) return results
def findEpisode(self, episode, manualSearch=False): logger.log(u"Searching " + self.name + " for " + episode.prettyName()) self.cache.updateCache() results = self.cache.searchCache(episode, manualSearch) logger.log(u"Cache results: " + str(results), logger.DEBUG) # if we got some results then use them no matter what. # OR # return anyway unless we're doing a manual search if results or not manualSearch: return results data = self.getURL(self.search_url, post_data=self._make_post_data_JSON(show=episode.show, episode=episode)) if not data: logger.log(u"No data returned from " + self.search_url, logger.ERROR) return [] parsedJSON = helpers.parse_json(data) if parsedJSON is None: logger.log(u"Error trying to load " + self.name + " JSON data", logger.ERROR) return [] if self._checkAuthFromData(parsedJSON): results = [] if parsedJSON and 'data' in parsedJSON: items = parsedJSON['data'] else: logger.log(u"Resulting JSON from " + self.name + " isn't correct, not parsing it", logger.ERROR) items = [] for item in items: (title, url) = self._get_title_and_url(item) # parse the file name try: myParser = NameParser() parse_result = myParser.parse(title) except InvalidNameException: logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING) continue if episode.show.air_by_date: if parse_result.air_date != episode.airdate: logger.log(u"Episode " + title + " didn't air on " + str(episode.airdate) + ", skipping it", logger.DEBUG) continue elif parse_result.season_number != episode.season or episode.episode not in parse_result.episode_numbers: logger.log(u"Episode " + title + " isn't " + str(episode.season) + "x" + str(episode.episode) + ", skipping it", logger.DEBUG) continue quality = self.getQuality(item) if not episode.show.wantEpisode(episode.season, episode.episode, quality, manualSearch): logger.log(u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[quality], logger.DEBUG) continue logger.log(u"Found result " + title + " at " + url, logger.DEBUG) result = self.getResult([episode]) result.url = url result.name = title result.quality = quality results.append(result) return results