def _extract_name_from_filename(self, filename): name_regex = '(.*?)\.?(\[.*]|\d+\.TPB)\.torrent$' logger.log(u"Comparing %s against %s" % (name_regex, filename), logger.DEBUG) match = re.match(name_regex, filename, re.I) if match: return match.group(1) return None
def _notifyTrakt(self, method, api, username, password, data={}): """ A generic method for communicating with trakt. Uses the method and data provided along with the auth info to send the command. method: The URL to use at trakt, relative, no leading slash. api: The API string to provide to trakt username: The username to use when logging in password: The unencrypted password to use when logging in Returns: A boolean representing success """ logger.log(u"TRAKT: Calling method " + method, logger.DEBUG) # if the API isn't given then use the config API if not api: api = sickbeard.TRAKT_API # if the username isn't given then use the config username if not username: username = sickbeard.TRAKT_USERNAME # if the password isn't given then use the config password if not password: password = sickbeard.TRAKT_PASSWORD password = sha1(password).hexdigest() # append apikey to method method += api data["username"] = username data["password"] = password # take the URL params and make a json object out of them encoded_data = json.dumps(data) # request the URL from trakt and parse the result as json try: logger.log( u"TRAKT: Calling method http://api.trakt.tv/" + method + ", with data" + encoded_data, logger.DEBUG ) # TODO: Use our getURL from helper? stream = urllib2.urlopen("http://api.trakt.tv/" + method, encoded_data) resp = stream.read() resp = json.loads(resp) if "error" in resp: raise Exception(resp["error"]) except (IOError): logger.log(u"TRAKT: Failed calling method", logger.ERROR) return False if resp["status"] == "success": logger.log(u"TRAKT: Succeeded calling method. Result: " + resp["message"], logger.DEBUG) return True logger.log(u"TRAKT: Failed calling method", logger.ERROR) return False
def _retrieve_show_image(self, image_type, show_obj, which=None): """ Gets an image URL from theTVDB.com, downloads it and returns the data. image_type: type of image to retrieve (currently supported: poster, fanart) show_obj: a TVShow object to use when searching for the image which: optional, a specific numbered poster to look for Returns: the binary image data if available, or else None """ tvdb_lang = show_obj.lang try: # There's gotta be a better way of doing this but we don't wanna # change the language value elsewhere ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if tvdb_lang and not tvdb_lang == 'en': ltvdb_api_parms['language'] = tvdb_lang t = tvdb_api.Tvdb(banners=True, **ltvdb_api_parms) tvdb_show_obj = t[show_obj.tvdbid] except (tvdb_exceptions.tvdb_error, IOError), e: logger.log(u"Unable to look up show on TVDB, not downloading images: "+ex(e), logger.ERROR) return None
def _check_github_for_update(self): """ Uses pygithub to ask github if there is a newer version that the provided commit hash. If there is a newer version it sets Sick Beard's version text. commit_hash: hash that we're checking against """ self._num_commits_behind = 0 self._newest_commit_hash = None gh = github.GitHub() # find newest commit for curCommit in gh.commits.forBranch('midgetspy', 'Sick-Beard', version.SICKBEARD_VERSION): if not self._newest_commit_hash: self._newest_commit_hash = curCommit.id if not self._cur_commit_hash: break if curCommit.id == self._cur_commit_hash: break self._num_commits_behind += 1 logger.log(u"newest: "+str(self._newest_commit_hash)+" and current: "+str(self._cur_commit_hash)+" and num_commits: "+str(self._num_commits_behind), logger.DEBUG)
def _season_thumb_dict(self, show_obj): """ Should return a dict like: result = {<season number>: {1: '<url 1>', 2: <url 2>, ...},} """ # This holds our resulting dictionary of season art result = {} tvdb_lang = show_obj.lang try: # There's gotta be a better way of doing this but we don't wanna # change the language value elsewhere ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if tvdb_lang and not tvdb_lang == 'en': ltvdb_api_parms['language'] = tvdb_lang t = tvdb_api.Tvdb(banners=True, **ltvdb_api_parms) tvdb_show_obj = t[show_obj.tvdbid] except (tvdb_exceptions.tvdb_error, IOError), e: logger.log(u"Unable to look up show on TVDB, not downloading images: "+ex(e), logger.ERROR) return result
def execute(self): ShowQueueItem.execute(self) logger.log(u"Performing rename on " + self.show.name) try: show_loc = self.show.location except exceptions.ShowDirNotFoundException: logger.log(u"Can't perform rename on " + self.show.name + " when the show dir is missing.", logger.WARNING) return ep_obj_rename_list = [] ep_obj_list = self.show.getAllEpisodes(has_location=True) for cur_ep_obj in ep_obj_list: # Only want to rename if we have a location if cur_ep_obj.location: if cur_ep_obj.relatedEps: # do we have one of multi-episodes in the rename list already have_already = False for cur_related_ep in cur_ep_obj.relatedEps + [cur_ep_obj]: if cur_related_ep in ep_obj_rename_list: have_already = True break if not have_already: ep_obj_rename_list.append(cur_ep_obj) else: ep_obj_rename_list.append(cur_ep_obj) for cur_ep_obj in ep_obj_rename_list: cur_ep_obj.rename() self.inProgress = False
def save_thumbnail(self, ep_obj): """ Retrieves a thumbnail and saves it to the correct spot. This method should not need to be overridden by implementing classes, changing get_episode_thumb_path and _get_episode_thumb_url should suffice. ep_obj: a TVEpisode object for which to generate a thumbnail """ file_path = self.get_episode_thumb_path(ep_obj) if not file_path: logger.log(u"Unable to find a file path to use for this thumbnail, not generating it", logger.DEBUG) return False thumb_url = self._get_episode_thumb_url(ep_obj) # if we can't find one then give up if not thumb_url: logger.log("No thumb is available for this episode, not creating a thumb", logger.DEBUG) return False thumb_data = metadata_helpers.getShowImage(thumb_url) result = self._write_image(thumb_data, file_path) if not result: return False for cur_ep in [ep_obj] + ep_obj.relatedEps: cur_ep.hastbn = True return True
def rename_ep_file(cur_path, new_path): """ Creates all folders needed to move a file to its new location, renames it, then cleans up any folders left that are now empty. cur_path: The absolute path to the file you want to move/rename new_path: The absolute path to the destination for the file WITHOUT THE EXTENSION """ new_dest_dir, new_dest_name = os.path.split(new_path) #@UnusedVariable cur_file_name, cur_file_ext = os.path.splitext(cur_path) #@UnusedVariable if cur_file_ext[1:] in subtitleExtensions: #Extract subtitle language from filename sublang = os.path.splitext(cur_file_name)[1][1:] #Check if the language extracted from filename is a valid language try: language = subliminal.language.Language(sublang, strict=True) cur_file_ext = '.'+sublang+cur_file_ext except ValueError: pass # put the extension on the incoming file new_path += cur_file_ext make_dirs(os.path.dirname(new_path)) # move the file try: logger.log(u"Renaming file from " + cur_path + " to " + new_path) ek.ek(os.rename, cur_path, new_path) except (OSError, IOError), e: logger.log(u"Failed renaming " + cur_path + " to " + new_path + ": " + ex(e), logger.ERROR) return False
def create_https_certificates(ssl_cert, ssl_key): """ Create self-signed HTTPS certificares and store in paths 'ssl_cert' and 'ssl_key' """ try: from OpenSSL import crypto #@UnresolvedImport from lib.certgen import createKeyPair, createCertRequest, createCertificate, TYPE_RSA, serial #@UnresolvedImport except: logger.log(u"pyopenssl module missing, please install for https access", logger.WARNING) return False # Create the CA Certificate cakey = createKeyPair(TYPE_RSA, 1024) careq = createCertRequest(cakey, CN='Certificate Authority') cacert = createCertificate(careq, (careq, cakey), serial, (0, 60*60*24*365*10)) # ten years cname = 'SickBeard' pkey = createKeyPair(TYPE_RSA, 1024) req = createCertRequest(pkey, CN=cname) cert = createCertificate(req, (cacert, cakey), serial, (0, 60*60*24*365*10)) # ten years # Save the key and certificate to disk try: open(ssl_key, 'w').write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey)) open(ssl_cert, 'w').write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert)) except: logger.log(u"Error creating SSL key and certificate", logger.ERROR) return False return True
def testAuthentication(host=None, username=None, password=None, apikey=None): """ Sends a simple API request to SAB to determine if the given connection information is connect host: The host where SAB is running (incl port) username: The username to use for the HTTP request password: The password to use for the HTTP request apikey: The API key to provide to SAB Returns: A tuple containing the success boolean and a message """ # build up the URL parameters params = {} params['mode'] = 'queue' params['output'] = 'json' params['ma_username'] = username params['ma_password'] = password params['apikey'] = apikey url = host + "api?" + urllib.urlencode(params) # send the test request logger.log(u"SABnzbd test URL: " + url, logger.DEBUG) result, f = _sabURLOpenSimple(url) if not result: return False, f # check the result and determine if it's good or not result, sabText = _checkSabResponse(f) if not result: return False, sabText return True, "Success"
def getURL (url, headers=[]): """ Returns a byte-string retrieved from the url provider. """ opener = urllib2.build_opener() opener.addheaders = [('User-Agent', USER_AGENT), ('Accept-Encoding', 'gzip,deflate')] for cur_header in headers: opener.addheaders.append(cur_header) try: usock = opener.open(url) url = usock.geturl() encoding = usock.info().get("Content-Encoding") if encoding in ('gzip', 'x-gzip', 'deflate'): content = usock.read() if encoding == 'deflate': data = StringIO.StringIO(zlib.decompress(content)) else: data = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(content)) result = data.read() else: result = usock.read() usock.close() except urllib2.HTTPError, e: logger.log(u"HTTP error " + str(e.code) + " while loading URL " + url, logger.WARNING) return None
def get_season_thumb_path(self, show_obj, season): """ Season thumbs for MediaBrowser go in Show Dir/Season X/folder.jpg If no season folder exists, None is returned """ dir_list = [x for x in ek.ek(os.listdir, show_obj.location) if ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))] season_dir_regex = '^Season\s+(\d+)$' season_dir = None for cur_dir in dir_list: if season == 0 and cur_dir == 'Specials': season_dir = cur_dir break match = re.match(season_dir_regex, cur_dir, re.I) if not match: continue cur_season = int(match.group(1)) if cur_season == season: season_dir = cur_dir break if not season_dir: logger.log(u"Unable to find a season dir for season "+str(season), logger.DEBUG) return None logger.log(u"Using "+str(season_dir)+"/folder.jpg as season dir for season "+str(season), logger.DEBUG) return ek.ek(os.path.join, show_obj.location, season_dir, 'folder.jpg')
def _getRSSData(self): languages = helpers.getAllLanguages() languages = filter(lambda x: not x == u"en", languages) cat = '5030,5040' if len(languages) > 0: cat = '5020' params = {"t": "tvsearch", "cat": cat} # hack this in for now if self.provider.getID() == 'nzbs_org': params['cat'] += ',5070,5090' if self.provider.key: params['apikey'] = self.provider.key url = self.provider.url + 'api?' + urllib.urlencode(params) logger.log(self.provider.name + " cache update URL: " + url, logger.DEBUG) data = self.provider.getURL(url) # hack this in until it's fixed server side if data and not data.startswith('<?xml'): data = '<?xml version="1.0" encoding="ISO-8859-1" ?>' + data return data
def _retrieve_show_images_from_tmdb(self, show, img_type): types = {'poster': 'poster_path', 'banner': None, 'fanart': 'backdrop_path', 'poster_thumb': 'poster_path', 'banner_thumb': None} # get TMDB configuration info tmdb = TMDB(sickbeard.TMDB_API_KEY) config = tmdb.Configuration() response = config.info() base_url = response['images']['base_url'] sizes = response['images']['poster_sizes'] def size_str_to_int(x): return float("inf") if x == 'original' else int(x[1:]) max_size = max(sizes, key=size_str_to_int) try: search = tmdb.Search() for show_name in set(allPossibleShowNames(show)): for result in search.collection({'query': show_name})['results'] + search.tv({'query': show_name})['results']: if types[img_type] and getattr(result, types[img_type]): return "{0}{1}{2}".format(base_url, max_size, result[types[img_type]]) except Exception: pass logger.log(u"Could not find any " + img_type + " images on TMDB for " + show.name, logger.INFO)
def _retrieve_show_images_from_fanart(self, show, img_type, thumb=False): types = { 'poster': fanart.TYPE.TV.POSTER, 'banner': fanart.TYPE.TV.BANNER, 'poster_thumb': fanart.TYPE.TV.POSTER, 'banner_thumb': fanart.TYPE.TV.BANNER, 'fanart': fanart.TYPE.TV.BACKGROUND, } try: indexerid = helpers.mapIndexersToShow(show)[1] if indexerid: request = fanartRequest( apikey=sickbeard.FANART_API_KEY, id=indexerid, ws=fanart.WS.TV, type=types[img_type], sort=fanart.SORT.POPULAR, limit=fanart.LIMIT.ONE, ) resp = request.response() url = resp[types[img_type]][0]['url'] if thumb: url = re.sub('/fanart/', '/preview/', url) return url except Exception: pass logger.log(u"Could not find any " + img_type + " images on Fanart.tv for " + show.name, logger.INFO)
def _retrieve_show_image(self, image_type, show_obj, which=None): """ Gets an image URL from theTVDB.com and TMDB.com, downloads it and returns the data. image_type: type of image to retrieve (currently supported: fanart, poster, banner) show_obj: a TVShow object to use when searching for the image which: optional, a specific numbered poster to look for Returns: the binary image data if available, or else None """ image_url = None indexer_lang = show_obj.lang try: # There's gotta be a better way of doing this but we don't wanna # change the language value elsewhere lINDEXER_API_PARMS = sickbeard.indexerApi(show_obj.indexer).api_params.copy() lINDEXER_API_PARMS['banners'] = True if indexer_lang and not indexer_lang == sickbeard.INDEXER_DEFAULT_LANGUAGE: lINDEXER_API_PARMS['language'] = indexer_lang if show_obj.dvdorder != 0: lINDEXER_API_PARMS['dvdorder'] = True t = sickbeard.indexerApi(show_obj.indexer).indexer(**lINDEXER_API_PARMS) indexer_show_obj = t[show_obj.indexerid] except (sickbeard.indexer_error, IOError), e: logger.log(u"Unable to look up show on " + sickbeard.indexerApi( show_obj.indexer).name + ", not downloading images: " + ex(e), logger.WARNING) logger.log(u"Indexer " + sickbeard.indexerApi(show_obj.indexer).name + "maybe experiencing some problems. Try again later", logger.DEBUG) return None
def _season_banners_dict(self, show_obj, season): """ Should return a dict like: result = {<season number>: {1: '<url 1>', 2: <url 2>, ...},} """ # This holds our resulting dictionary of season art result = {} indexer_lang = show_obj.lang try: # There's gotta be a better way of doing this but we don't wanna # change the language value elsewhere lINDEXER_API_PARMS = sickbeard.indexerApi(show_obj.indexer).api_params.copy() lINDEXER_API_PARMS['banners'] = True if indexer_lang and not indexer_lang == sickbeard.INDEXER_DEFAULT_LANGUAGE: lINDEXER_API_PARMS['language'] = indexer_lang t = sickbeard.indexerApi(show_obj.indexer).indexer(**lINDEXER_API_PARMS) indexer_show_obj = t[show_obj.indexerid] except (sickbeard.indexer_error, IOError), e: logger.log(u"Unable to look up show on " + sickbeard.indexerApi( show_obj.indexer).name + ", not downloading images: " + ex(e), logger.WARNING) logger.log(u"Indexer " + sickbeard.indexerApi(show_obj.indexer).name + "maybe experiencing some problems. Try again later", logger.DEBUG) return result
def _check_exists(location): if location: assert isinstance(location, unicode) result = os.path.isfile(location) logger.log(u"Checking if " + location + " exists: " + str(result), logger.DEBUG) return result return False
def update_show_indexer_metadata(self, show_obj): if self.show_metadata and show_obj and self._has_show_metadata(show_obj): logger.log( u"Metadata provider " + self.name + " updating show indexer info metadata file for " + show_obj.name, logger.DEBUG) nfo_file_path = self.get_show_file_path(show_obj) assert isinstance(nfo_file_path, unicode) try: with io.open(nfo_file_path, 'rb') as xmlFileObj: showXML = etree.ElementTree(file=xmlFileObj) indexerid = showXML.find('id') root = showXML.getroot() if indexerid is not None: indexerid.text = str(show_obj.indexerid) else: etree.SubElement(root, "id").text = str(show_obj.indexerid) # Make it purdy helpers.indentXML(root) showXML.write(nfo_file_path, encoding='UTF-8') helpers.chmodAsParent(nfo_file_path) return True except IOError, e: logger.log( u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e), logger.ERROR)
def set_newest_text(self): # if we're up to date then don't set this sickbeard.NEWEST_VERSION_STRING = None if not self._cur_commit_hash or self._num_commits_behind == 100: logger.log(u"Unknown current version, don't know if we should update or not", logger.DEBUG) newest_text = "Unknown version: If you've never used the Sick Beard upgrade system then I don't know what version you have." newest_text += "— <a href=\"" + self.get_update_url() + "\">Update Now</a>" elif self._num_commits_behind > 0: base_url = 'http://github.com/' + self.github_repo_user + '/' + self.github_repo if self._newest_commit_hash: url = base_url + '/compare/' + self._cur_commit_hash + '...' + self._newest_commit_hash else: url = base_url + '/commits/' newest_text = 'There is a <a href="' + url + '" onclick="window.open(this.href); return false;">newer version available</a>' newest_text += " (you're " + str(self._num_commits_behind) + " commit" if self._num_commits_behind > 1: newest_text += "s" newest_text += " behind)" + "— <a href=\"" + self.get_update_url() + "\">Update Now</a>" else: return sickbeard.NEWEST_VERSION_STRING = newest_text
def execute(self): dir_results = self.connection.select("SELECT location FROM tv_shows") dir_counts = {} for cur_dir in dir_results: cur_root_dir = ek.ek(os.path.dirname, ek.ek(os.path.normpath, cur_dir["location"])) if cur_root_dir not in dir_counts: dir_counts[cur_root_dir] = 1 else: dir_counts[cur_root_dir] += 1 logger.log(u"Dir counts: "+str(dir_counts), logger.DEBUG) if not dir_counts: self.incDBVersion() return default_root_dir = dir_counts.values().index(max(dir_counts.values())) new_root_dirs = str(default_root_dir)+'|'+'|'.join(dir_counts.keys()) logger.log(u"Setting ROOT_DIRS to: "+new_root_dirs, logger.DEBUG) sickbeard.ROOT_DIRS = new_root_dirs sickbeard.save_config() self.incDBVersion()
def _find_newest_version(self, whole_link=False): """ Checks git for the newest Windows binary build. Returns either the build number or the entire build URL depending on whole_link's value. whole_link: If True, returns the entire URL to the release. If False, it returns only the build number. default: False """ regex = ".*SickBeard\-win32\-alpha\-build(\d+)(?:\.\d+)?\.zip" version_url_data = helpers.getURL(self.version_url) if version_url_data is None: return None else: for curLine in version_url_data.splitlines(): logger.log(u"checking line " + curLine, logger.DEBUG) match = re.match(regex, curLine) if match: logger.log(u"found a match", logger.DEBUG) if whole_link: return curLine.strip() else: return int(match.group(1)) return None
def filterBadReleases(name): """ Filters out non-english and just all-around stupid releases by comparing them to the resultFilters contents. name: the release name to check Returns: True if the release name is OK, False if it's bad. """ cp = CompleteParser() cpr = cp.parse(name) parse_result = cpr.parse_result # use the extra info and the scene group to filter against check_string = '' if parse_result.extra_info: check_string = parse_result.extra_info if parse_result.release_group: if check_string: check_string = check_string + '-' + parse_result.release_group else: check_string = parse_result.release_group # if there's no info after the season info then assume it's fine if not check_string: return True # if any of the bad strings are in the name then say no for x in resultFilters + sickbeard.IGNORE_WORDS.split(','): if re.search('(^|[\W_])' + x + '($|[\W_])', check_string, re.I): logger.log(u"Invalid scene release: " + name + " contains " + x + ", ignoring it", logger.DEBUG) return False return True
def getQuality(self, item): attributes = item.find(self._report('attributes')) attr_dict = {} for attribute in attributes.getiterator(self._report('attribute')): cur_attr = attribute.attrib['type'] if cur_attr not in attr_dict: attr_dict[cur_attr] = [attribute.text] else: attr_dict[cur_attr].append(attribute.text) logger.log("Finding quality of item based on attributes "+str(attr_dict), logger.DEBUG) if self._is_SDTV(attr_dict): quality = Quality.SDTV elif self._is_SDDVD(attr_dict): quality = Quality.SDDVD elif self._is_HDTV(attr_dict): quality = Quality.HDTV elif self._is_WEBDL(attr_dict): quality = Quality.HDWEBDL elif self._is_720pBluRay(attr_dict): quality = Quality.HDBLURAY elif self._is_1080pBluRay(attr_dict): quality = Quality.FULLHDBLURAY else: quality = Quality.UNKNOWN logger.log("Resulting quality: "+str(quality), logger.DEBUG) return quality
def updateCache(self): # check if we should update if not self.shouldUpdate(): return # clear cache self._clearCache() # set updated self.setLastUpdate() cl = [] for group in ['alt.binaries.hdtv', 'alt.binaries.hdtv.x264', 'alt.binaries.tv', 'alt.binaries.tvseries']: search_params = {'max': 50, 'g': group} data = self.getRSSFeed(self.provider.urls['rss'], search_params)['entries'] if not data: logger.log('No data returned from provider', logger.DEBUG) continue for item in data: ci = self._parseItem(item) if ci: cl.append(ci) if cl: cache_db_con = self._getDB() cache_db_con.mass_action(cl)
def filterBadReleases(name): """ Filters out non-english and just all-around stupid releases by comparing them to the resultFilters contents. name: the release name to check Returns: True if the release name is OK, False if it's bad. """ try: fp = NameParser() parse_result = fp.parse(name) except InvalidNameException: logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.WARNING) return False # if any of the bad strings are in the name then say no if sickbeard.IGNORE_WORDS: resultFilters.extend(sickbeard.IGNORE_WORDS.split(',')) filters = [re.compile('(^|[\W_])%s($|[\W_])' % filter.strip(), re.I) for filter in resultFilters] for regfilter in filters: if regfilter.search(name): logger.log(u"Invalid scene release: " + name + " contains pattern: " + regfilter.pattern + ", ignoring it", logger.DEBUG) return False return True
def _getRSSData(self, search=None): params = { 'searchaction': 'Search', 'fpn': 'p', 'category': 8, 'u_nfo_posts_only': 0, 'u_url_posts_only': 0, 'u_comment_posts_only': 0, 'u_show_passworded': 0, 'u_v3_retention': 0, 'ps_rb_source': 3008, 'ps_rb_video_format': 3082257, 'ps_rb_language': 4096, 'sort': 'date', 'order': 'desc', 'u_post_results_amt': 50, 'feed': 'rss', 'hauth': 1, } if search: params['q'] = search + " AND " else: params['q'] = '' params['q'] += 'Attr:Lang~Eng AND NOT Attr:VideoF=DVD' url = self.url + "search/?%s" % urllib.urlencode(params) logger.log("Newzbin search URL: " + url, logger.DEBUG) data = self.getURL(url) return data
def _find_installed_version(self): try: version = sickbeard.version.SICKBEARD_VERSION return int(version[6:]) except ValueError: logger.log(u"Unknown SickBeard Windows binary release: " + version, logger.ERROR) return None
def updateCache(self): if not self.shouldUpdate(): return search_params = {'RSS': ['']} rss_results = self.provider._doSearch(search_params) if rss_results: self.setLastUpdate() else: return [] logger.log(u"Clearing " + self.provider.name + " cache and updating with new information") self._clearCache() cl = [] for result in rss_results: item = (result[0], result[1]) ci = self._parseItem(item) if ci is not None: cl.append(ci) if len(cl) > 0: myDB = self._getDB() myDB.mass_action(cl)
def _check_github_for_update(self): """ Uses pygithub to ask github if there is a newer version that the provided commit hash. If there is a newer version it sets Sick Beard's version text. commit_hash: hash that we're checking against """ self._num_commits_behind = 0 self._newest_commit_hash = None gh = github.GitHub(self.github_repo_user, self.github_repo, self.branch) # find newest commit for curCommit in gh.commits(): if not self._newest_commit_hash: self._newest_commit_hash = curCommit['sha'] if not self._cur_commit_hash: break if curCommit['sha'] == self._cur_commit_hash: break self._num_commits_behind += 1 logger.log(u"newest: " + str(self._newest_commit_hash) + " and current: " + str(self._cur_commit_hash) + " and num_commits: " + str(self._num_commits_behind), logger.DEBUG)
def _update_zoneinfo(): if not should_try_loading(): return global sb_timezone sb_timezone = get_tz() # now check if the zoneinfo needs update url_zv = 'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/zoneinfo.txt' url_data = helpers.getURL(url_zv) if url_data is None: update_last_retry() # When urlData is None, trouble connecting to github logger.log( u'Loading zoneinfo.txt failed, this can happen from time to time. Unable to get URL: %s' % url_zv, logger.WARNING) return else: reset_last_retry() zonefilename = zoneinfo.ZONEFILENAME cur_zoneinfo = zonefilename if None is not cur_zoneinfo: cur_zoneinfo = ek.ek(basename, zonefilename) zonefile = helpers.real_path( ek.ek(join, sickbeard.ZONEINFO_DIR, cur_zoneinfo)) zonemetadata = zoneinfo.gettz_db_metadata() if ek.ek( os.path.isfile, zonefile) else None (new_zoneinfo, zoneinfo_md5) = url_data.decode('utf-8').strip().rsplit(u' ') newtz_regex = re.search(r'(\d{4}[^.]+)', new_zoneinfo) if not newtz_regex or len(newtz_regex.groups()) != 1: return newtzversion = newtz_regex.group(1) if cur_zoneinfo is not None and zonemetadata is not None and 'tzversion' in zonemetadata and zonemetadata[ 'tzversion'] == newtzversion: return # now load the new zoneinfo url_tar = u'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/%s' % new_zoneinfo zonefile_tmp = re.sub(r'\.tar\.gz$', '.tmp', zonefile) if ek.ek(os.path.exists, zonefile_tmp): try: ek.ek(os.remove, zonefile_tmp) except: logger.log(u'Unable to delete: %s' % zonefile_tmp, logger.ERROR) return if not helpers.download_file(url_tar, zonefile_tmp): return if not ek.ek(os.path.exists, zonefile_tmp): logger.log(u'Download of %s failed.' % zonefile_tmp, logger.ERROR) return new_hash = str(helpers.md5_for_file(zonefile_tmp)) if zoneinfo_md5.upper() == new_hash.upper(): logger.log(u'Updating timezone info with new one: %s' % new_zoneinfo, logger.MESSAGE) try: # remove the old zoneinfo file if cur_zoneinfo is not None: old_file = helpers.real_path( ek.ek(join, sickbeard.ZONEINFO_DIR, cur_zoneinfo)) if ek.ek(os.path.exists, old_file): ek.ek(os.remove, old_file) # rename downloaded file ek.ek(os.rename, zonefile_tmp, zonefile) from dateutil.zoneinfo import gettz if '_CLASS_ZONE_INSTANCE' in gettz.func_globals: gettz.func_globals.__setitem__('_CLASS_ZONE_INSTANCE', list()) tz.gettz.cache_clear() sb_timezone = get_tz() except: _remove_zoneinfo_failed(zonefile_tmp) return else: _remove_zoneinfo_failed(zonefile_tmp) logger.log( u'MD5 hash does not match: %s File: %s' % (zoneinfo_md5.upper(), new_hash.upper()), logger.ERROR) return
for path in paths: # we do not need it double-encoded, gawd this is dumb unEncPath = urllib.unquote(path.text).decode(sickbeard.SYS_ENCODING) logger.log(u"KODI Updating " + showName + " on " + host + " at " + unEncPath, logger.DEBUG) updateCommand = {'command': 'ExecBuiltIn', 'parameter': 'KODI.updatelibrary(video, %s)' % (unEncPath)} request = self._send_to_kodi(updateCommand, host) if not request: logger.log(u"Update of show directory failed on " + showName + " on " + host + " at " + unEncPath, logger.ERROR) return False # sleep for a few seconds just to be sure kodi has a chance to finish each directory if len(paths) > 1: time.sleep(5) # do a full update if requested else: logger.log(u"Doing Full Library KODI update on host: " + host, logger.INFO) updateCommand = {'command': 'ExecBuiltIn', 'parameter': 'KODI.updatelibrary(video)'} request = self._send_to_kodi(updateCommand, host) if not request: logger.log(u"KODI Full Library update failed on: " + host, logger.ERROR) return False return True ############################################################################## # JSON-RPC API (KODI 12+) methods ############################################################################## def _send_to_kodi_json(self, command, host=None, username=None, password=None): """Handles communication to KODI servers via JSONRPC
def __init__(self, force=None, show=None): #TODOif not sickbeard.DOWNLOAD_FRENCH: # return if sickbeard.showList == None: return logger.log(u"Beginning the search for french episodes older than " + str(sickbeard.FRENCH_DELAY) + " days") frenchlist = [] #get list of english episodes that we want to search in french myDB = db.DBConnection() today = datetime.date.today().toordinal() if show: frenchsql = myDB.select( "SELECT showid, season, episode from tv_episodes where audio_langs='en' and tv_episodes.showid =? and (? - tv_episodes.airdate) > ? order by showid, airdate asc", [show, today, sickbeard.FRENCH_DELAY]) count = myDB.select( "SELECT count(*) from tv_episodes where audio_langs='en' and tv_episodes.showid =? and (? - tv_episodes.airdate) > ?", [show, today, sickbeard.FRENCH_DELAY]) else: frenchsql = myDB.select( "SELECT showid, season, episode from tv_episodes, tv_shows where audio_langs='en' and tv_episodes.showid = tv_shows.tvdb_id and tv_shows.frenchsearch = 1 and (? - tv_episodes.airdate) > ? order by showid, airdate asc", [today, sickbeard.FRENCH_DELAY]) count = myDB.select( "SELECT count(*) from tv_episodes, tv_shows where audio_langs='en' and tv_episodes.showid = tv_shows.tvdb_id and tv_shows.frenchsearch = 1 and (? - tv_episodes.airdate) > ?", [today, sickbeard.FRENCH_DELAY]) #make the episodes objects logger.log(u"Searching for " + str(count[0][0]) + " episodes in french") for episode in frenchsql: showObj = helpers.findCertainShow(sickbeard.showList, episode[0]) epObj = showObj.getEpisode(episode[1], episode[2]) frenchlist.append(epObj) #for each episode in frenchlist fire a search in french delay = [] for frepisode in frenchlist: if frepisode.show.tvdbid in delay: logger.log( u"Previous episode for show " + str(frepisode.show.tvdbid) + " not found in french so skipping this search", logger.DEBUG) continue result = [] for curProvider in providers.sortedProviderList(): if not curProvider.isActive(): continue logger.log(u"Searching for french episodes on " + curProvider.name + " for " + frepisode.show.name + " season " + str(frepisode.season) + " episode " + str(frepisode.episode)) try: curfrench = curProvider.findFrench(frepisode, manualSearch=True) except: logger.log(u"Exception", logger.DEBUG) pass for x in curfrench: result.append(x) best = None try: epi = {} epi[1] = frepisode best = search.pickBestResult(result, episode=epi) except: pass if best: best.name = best.name + ' snatchedfr' logger.log(u"Found french episode for " + frepisode.show.name + " season " + str(frepisode.season) + " episode " + str(frepisode.episode)) try: search.snatchEpisode(best, SNATCHED_FRENCH) except: logger.log(u"Exception", logger.DEBUG) pass else: delay.append(frepisode.show.tvdbid) logger.log(u"No french episodes found for " + frepisode.show.name + " season " + str(frepisode.season) + " episode " + str(frepisode.episode))
def _getShowCollection(self): """ Get Collection and parse once into addressable structure """ try: self.Collectionlist = {'tvdb_id': {}, 'tvrage_id': {}} logger.log(u"Getting Show Collection", logger.DEBUG) TraktCollectionList = self.trakt_api.traktRequest( "sync/collection/shows") tvdb_id = 'tvdb' tvrage_id = 'tvrage' for watchlist_el in TraktCollectionList: tvdb = False tvrage = False if not watchlist_el['show']['ids']["tvdb"] is None: tvdb = True if not watchlist_el['show']['ids']["tvrage"] is None: tvrage = True title = watchlist_el['show']['title'] year = str(watchlist_el['show']['year']) if 'seasons' in watchlist_el: for season_el in watchlist_el['seasons']: for episode_el in season_el['episodes']: season = str(season_el['number']) episode = str(episode_el['number']) if tvdb: showid = str( watchlist_el['show']['ids'][tvdb_id]) if showid not in self.Collectionlist[ tvdb_id + '_id'].keys(): self.Collectionlist[tvdb_id + '_id'][showid] = { 'id': showid, 'title': title, 'year': year, 'seasons': {} } if season not in self.Collectionlist[ tvdb_id + '_id'][showid]['seasons'].keys(): self.Collectionlist[ tvdb_id + '_id'][showid]['seasons'][season] = { 's': season, 'episodes': {} } if episode not in self.Collectionlist[ tvdb_id + '_id'][showid]['seasons'][ season]['episodes'].keys(): self.Collectionlist[ tvdb_id + '_id'][showid]['seasons'][season][ 'episodes'][episode] = episode if tvrage: showid = str( watchlist_el['show']['ids'][tvrage_id]) if showid not in self.Collectionlist[ tvrage_id + '_id'].keys(): self.Collectionlist[tvrage_id + '_id'][showid] = { 'id': showid, 'title': title, 'year': year, 'seasons': {} } if season not in self.Collectionlist[ tvrage_id + '_id'][showid]['seasons'].keys(): self.Collectionlist[ tvrage_id + '_id'][showid]['seasons'][season] = { 's': season, 'episodes': {} } if episode not in self.Collectionlist[ tvrage_id + '_id'][showid]['seasons'][ season]['episodes'].keys(): self.Collectionlist[ tvrage_id + '_id'][showid]['seasons'][season][ 'episodes'][episode] = episode except traktException as e: logger.log( u"Could not connect to trakt service, cannot download Show Collection: %s" % repr(e), logger.WARNING) return False return True
def logHelper(logMessage, logLevel=logger.MESSAGE): logger.log(logMessage, logLevel) return logMessage + u"\n"
def load_network_conversions(): if not should_try_loading(): return conversions = [] # network conversions are stored on github pages url = 'https://raw.githubusercontent.com/prinz23/sg_network_conversions/master/conversions.txt' url_data = helpers.getURL(url) if url_data is None: update_last_retry() # When urlData is None, trouble connecting to github logger.log( u'Updating network conversions failed, this can happen from time to time. URL: %s' % url, logger.WARNING) return else: reset_last_retry() try: for line in url_data.splitlines(): (tvdb_network, tvrage_network, tvrage_country) = line.decode('utf-8').strip().rsplit(u'::', 2) if not (tvdb_network and tvrage_network and tvrage_country): continue conversions.append({ 'tvdb_network': tvdb_network, 'tvrage_network': tvrage_network, 'tvrage_country': tvrage_country }) except (IOError, OSError): pass my_db = db.DBConnection('cache.db') old_d = my_db.select('SELECT * FROM network_conversions') old_d = helpers.build_dict(old_d, 'tvdb_network') # list of sql commands to update the network_conversions table cl = [] for n_w in conversions: cl.append([ 'INSERT OR REPLACE INTO network_conversions (tvdb_network, tvrage_network, tvrage_country)' 'VALUES (?,?,?)', [ n_w['tvdb_network'], n_w['tvrage_network'], n_w['tvrage_country'] ] ]) try: del old_d[n_w['tvdb_network']] except: pass # remove deleted records if len(old_d) > 0: old_items = list(va for va in old_d) cl.append([ 'DELETE FROM network_conversions WHERE tvdb_network' ' IN (%s)' % ','.join(['?'] * len(old_items)), old_items ]) # change all network conversion info at once (much faster) if len(cl) > 0: my_db.mass_action(cl)
def update_network_dict(): if not should_try_loading(): return _remove_old_zoneinfo() _update_zoneinfo() load_network_conversions() d = {} # network timezones are stored on github pages url = 'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/network_timezones.txt' url_data = helpers.getURL(url) if url_data is None: update_last_retry() # When urlData is None, trouble connecting to github logger.log( u'Updating network timezones failed, this can happen from time to time. URL: %s' % url, logger.WARNING) load_network_dict(load=False) return else: reset_last_retry() try: for line in url_data.splitlines(): try: (key, val) = line.decode('utf-8').strip().rsplit(u':', 1) except (StandardError, Exception): continue if key is None or val is None: continue d[key] = val except (IOError, OSError): pass my_db = db.DBConnection('cache.db') # load current network timezones old_d = dict(my_db.select('SELECT * FROM network_timezones')) # list of sql commands to update the network_timezones table cl = [] for cur_d, cur_t in iteritems(d): h_k = cur_d in old_d if h_k and cur_t != old_d[cur_d]: # update old record cl.append([ 'UPDATE network_timezones SET network_name=?, timezone=? WHERE network_name=?', [cur_d, cur_t, cur_d] ]) elif not h_k: # add new record cl.append([ 'INSERT INTO network_timezones (network_name, timezone) VALUES (?,?)', [cur_d, cur_t] ]) if h_k: del old_d[cur_d] # remove deleted records if len(old_d) > 0: old_items = list(va for va in old_d) cl.append([ 'DELETE FROM network_timezones WHERE network_name IN (%s)' % ','.join(['?'] * len(old_items)), old_items ]) # change all network timezone infos at once (much faster) if len(cl) > 0: my_db.mass_action(cl) load_network_dict()
def logHelper(logMessage, logLevel=logger.INFO): logger.log(logMessage, logLevel) return logMessage + u"\n"
class TVCache(): def __init__(self, provider): self.provider = provider self.providerID = self.provider.getID() self.minTime = 10 def _getDB(self): return CacheDBConnection(self.providerID) def _clearCache(self): myDB = self._getDB() myDB.action("DELETE FROM "+self.providerID+" WHERE 1") def _getRSSData(self): data = None return data def _checkAuth(self, data): return True def _checkItemAuth(self, title, url): return True def updateCache(self): if not self.shouldUpdate(): return data = self._getRSSData() # as long as the http request worked we count this as an update if data: self.setLastUpdate() else: return [] # now that we've loaded the current RSS feed lets delete the old cache logger.log(u"Clearing "+self.provider.name+" cache and updating with new information") self._clearCache() if not self._checkAuth(data): raise exceptions.AuthException("Your authentication info for "+self.provider.name+" is incorrect, check your config") try: responseSoup = etree.ElementTree(etree.XML(data)) items = responseSoup.getiterator('item') except Exception, e: logger.log(u"Error trying to load "+self.provider.name+" RSS feed: "+ex(e), logger.ERROR) logger.log(u"Feed contents: "+repr(data), logger.DEBUG) return [] if responseSoup.getroot().tag != 'rss': logger.log(u"Resulting XML from "+self.provider.name+" isn't RSS, not parsing it", logger.ERROR) return [] for item in items: self._parseItem(item)
def _getProperList(self): propers = {} # for each provider get a list of the propers for curProvider in providers.sortedProviderList(): if not curProvider.isActive(): continue date = datetime.datetime.today() - datetime.timedelta(days=2) logger.log(u"Searching for any new PROPER releases from "+curProvider.name) curPropers = curProvider.findPropers(date) # if they haven't been added by a different provider than add the proper to the list for x in curPropers: name = self._genericName(x.name) if not name in propers: logger.log(u"Found new proper: "+x.name, logger.DEBUG) x.provider = curProvider propers[name] = x # take the list of unique propers and get it sorted by sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True) finalPropers = [] for curProper in sortedPropers: # parse the file name cp = CompleteParser() cpr = cp.parse(curProper.name) parse_result = cpr.parse_result if not parse_result.episode_numbers and not parse_result.is_anime: logger.log(u"Ignoring "+curProper.name+" because it's for a full season rather than specific episode", logger.DEBUG) continue # populate our Proper instance if parse_result.air_by_date: curProper.season = -1 curProper.episode = parse_result.air_date else: curProper.season = parse_result.season_number if parse_result.season_number != None else 1 if parse_result.is_anime: logger.log(u"I am sorry '"+curProper.name+"' seams to be an anime proper seach is not yet suported", logger.DEBUG) continue curProper.episode = parse_result.ab_episode_numbers[0] else: curProper.episode = parse_result.episode_numbers[0] curProper.quality = Quality.nameQuality(curProper.name,parse_result.is_anime) # for each show in our list for curShow in sickbeard.showList: if not parse_result.series_name: continue genericName = self._genericName(parse_result.series_name) # get the scene name masks sceneNames = set(show_name_helpers.makeSceneShowSearchStrings(curShow)) # for each scene name mask for curSceneName in sceneNames: # if it matches if genericName == self._genericName(curSceneName): logger.log(u"Successful match! Result "+parse_result.series_name+" matched to show "+curShow.name, logger.DEBUG) # set the tvdbid in the db to the show's tvdbid curProper.tvdbid = curShow.tvdbid # since we found it, break out break # if we found something in the inner for loop break out of this one if curProper.tvdbid != -1: break if curProper.tvdbid == -1: continue if not show_name_helpers.filterBadReleases(curProper.name): logger.log(u"Proper "+curProper.name+" isn't a valid scene release that we want, igoring it", logger.DEBUG) continue # if we have an air-by-date show then get the real season/episode numbers if curProper.season == -1 and curProper.tvdbid: showObj = helpers.findCertainShow(sickbeard.showList, curProper.tvdbid) if not showObj: logger.log(u"This should never have happened, post a bug about this!", logger.ERROR) raise Exception("BAD STUFF HAPPENED") tvdb_lang = showObj.lang # There's gotta be a better way of doing this but we don't wanna # change the language value elsewhere ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if tvdb_lang and not tvdb_lang == 'en': ltvdb_api_parms['language'] = tvdb_lang try: t = tvdb_api.Tvdb(**ltvdb_api_parms) epObj = t[curProper.tvdbid].airedOn(curProper.episode)[0] curProper.season = int(epObj["seasonnumber"]) curProper.episodes = [int(epObj["episodenumber"])] except tvdb_exceptions.tvdb_episodenotfound: logger.log(u"Unable to find episode with date "+str(curProper.episode)+" for show "+parse_result.series_name+", skipping", logger.WARNING) continue # check if we actually want this proper (if it's the right quality) sqlResults = db.DBConnection().select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [curProper.tvdbid, curProper.season, curProper.episode]) if not sqlResults: continue oldStatus, oldQuality = Quality.splitCompositeStatus(int(sqlResults[0]["status"])) # only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones) if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != curProper.quality: continue # if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers if curProper.tvdbid != -1 and (curProper.tvdbid, curProper.season, curProper.episode) not in map(operator.attrgetter('tvdbid', 'season', 'episode'), finalPropers): logger.log(u"Found a proper that we need: "+str(curProper.name)) finalPropers.append(curProper) return finalPropers
def _addCacheEntry(self, name, url, season=None, episodes=None, tvdb_id=0, tvrage_id=0, quality=None, extraNames=[]): myDB = self._getDB() parse_result = None # if we don't have complete info then parse the filename to get it for curName in [name] + extraNames: try: myParser = NameParser() parse_result = myParser.parse(curName) except InvalidNameException: logger.log(u"Unable to parse the filename "+curName+" into a valid episode", logger.DEBUG) continue if not parse_result: logger.log(u"Giving up because I'm unable to parse this name: "+name, logger.DEBUG) return False if not parse_result.series_name: logger.log(u"No series name retrieved from "+name+", unable to cache it", logger.DEBUG) return False # if we need tvdb_id or tvrage_id then search the DB for them if not tvdb_id or not tvrage_id: # if we have only the tvdb_id, use the database if tvdb_id: showObj = helpers.findCertainShow(sickbeard.showList, tvdb_id) if showObj: tvrage_id = showObj.tvrid tvdb_lang = showObj.lang else: logger.log(u"We were given a TVDB id "+str(tvdb_id)+" but it doesn't match a show we have in our list, so leaving tvrage_id empty", logger.DEBUG) tvrage_id = 0 # if we have only a tvrage_id then use the database elif tvrage_id: showObj = helpers.findCertainTVRageShow(sickbeard.showList, tvrage_id) if showObj: tvdb_id = showObj.tvdbid tvdb_lang = showObj.lang else: logger.log(u"We were given a TVRage id "+str(tvrage_id)+" but it doesn't match a show we have in our list, so leaving tvdb_id empty", logger.DEBUG) tvdb_id = 0 # if they're both empty then fill out as much info as possible by searching the show name else: # check the name cache and see if we already know what show this is logger.log(u"Checking the cache to see if we already know the tvdb id of "+parse_result.series_name, logger.DEBUG) tvdb_id = name_cache.retrieveNameFromCache(parse_result.series_name) # remember if the cache lookup worked or not so we know whether we should bother updating it later if tvdb_id == None: logger.log(u"No cache results returned, continuing on with the search", logger.DEBUG) from_cache = False else: logger.log(u"Cache lookup found "+repr(tvdb_id)+", using that", logger.DEBUG) from_cache = True # if the cache failed, try looking up the show name in the database if tvdb_id == None: logger.log(u"Trying to look the show up in the show database", logger.DEBUG) showResult = helpers.searchDBForShow(parse_result.series_name) if showResult: logger.log(parse_result.series_name+" was found to be show "+showResult[1]+" ("+str(showResult[0])+") in our DB.", logger.DEBUG) tvdb_id = showResult[0] # if the DB lookup fails then do a comprehensive regex search if tvdb_id == None: logger.log(u"Couldn't figure out a show name straight from the DB, trying a regex search instead", logger.DEBUG) for curShow in sickbeard.showList: if show_name_helpers.isGoodResult(name, curShow, False): logger.log(u"Successfully matched "+name+" to "+curShow.name+" with regex", logger.DEBUG) tvdb_id = curShow.tvdbid tvdb_lang = curShow.lang break # if tvdb_id was anything but None (0 or a number) then if not from_cache: name_cache.addNameToCache(parse_result.series_name, tvdb_id) # if we came out with tvdb_id = None it means we couldn't figure it out at all, just use 0 for that if tvdb_id == None: tvdb_id = 0 # if we found the show then retrieve the show object if tvdb_id: showObj = helpers.findCertainShow(sickbeard.showList, tvdb_id) if showObj: tvrage_id = showObj.tvrid tvdb_lang = showObj.lang # if we weren't provided with season/episode information then get it from the name that we parsed if not season: season = parse_result.season_number if parse_result.season_number != None else 1 if not episodes: episodes = parse_result.episode_numbers # if we have an air-by-date show then get the real season/episode numbers if parse_result.air_by_date and tvdb_id: try: # There's gotta be a better way of doing this but we don't wanna # change the language value elsewhere ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if not (tvdb_lang == "" or tvdb_lang == "en" or tvdb_lang == None): ltvdb_api_parms['language'] = tvdb_lang t = tvdb_api.Tvdb(**ltvdb_api_parms) epObj = t[tvdb_id].airedOn(parse_result.air_date)[0] season = int(epObj["seasonnumber"]) episodes = [int(epObj["episodenumber"])] except tvdb_exceptions.tvdb_episodenotfound: logger.log(u"Unable to find episode with date "+str(parse_result.air_date)+" for show "+parse_result.series_name+", skipping", logger.WARNING) return False except tvdb_exceptions.tvdb_error, e: logger.log(u"Unable to contact TVDB: "+e.message.decode(sickbeard.SYS_ENCODING), logger.WARNING) return False
logger.log( u"Not deleting folder " + folder + " found the following files: " + str(check_files), logger.INFO) return False try: logger.log(u"Deleting folder (if it's empty): " + folder) os.rmdir(folder) except (OSError, IOError), e: logger.log( u"Warning: unable to delete folder: " + folder + ": " + ex(e), logger.WARNING) return False else: try: logger.log(u"Deleting folder: " + folder) shutil.rmtree(folder) except (OSError, IOError), e: logger.log( u"Warning: unable to delete folder: " + folder + ": " + ex(e), logger.WARNING) return False return True def delete_files(processPath, notwantedFiles, result, force=False): if not result.result and force: result.output += logHelper( u"Forcing deletion of files, even though last result was not success",
def _search_provider(self, search_params, **kwargs): results = [] if not self._authorised(): return results items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []} rc = dict((k, re.compile('(?i)' + v)) for (k, v) in { 'show_id': '"show\?id=(\d+)[^>]+>([^<]+)<\/a>', 'get': 'load_torrent' }.items()) search_types = sorted([x for x in search_params.items()], key=lambda tup: tup[0], reverse=True) maybe_only = search_types[0][0] show_detail = '_only' in maybe_only and search_params.pop( maybe_only)[0] or '' for mode in search_params.keys(): for search_string in search_params[mode]: if 'Cache' == mode: search_url = self.urls['browse'] html = self.get_url(search_url) else: search_string = isinstance( search_string, unicode) and unidecode(search_string) or search_string search_string = search_string.replace(show_detail, '').strip() search_url = self.urls['search'] % search_string html = self.get_url(search_url) shows = rc['show_id'].findall(html) if not any(shows): continue html = '' for show in shows: sid, title = show if title not in search_string: continue html and time.sleep(1.1) html += self.get_url(self.urls['show'] % sid) cnt = len(items[mode]) try: if not html or self._has_no_results(html): raise generic.HaltParseException with BS4Parser(html, features=['html5lib', 'permissive']) as soup: torrent_rows = soup.tbody.find_all( 'tr') or soup.table.find_all('tr') or [] if 2 > len(torrent_rows): raise generic.HaltParseException head = None for tr in torrent_rows[0:]: cells = tr.find_all('td') if 4 > len(cells): continue try: head = head if None is not head else self._header_row( tr) stats = cells[head['leech']].get_text().strip() seeders, leechers = [ (tryInt(x[0], 0), tryInt(x[1], 0)) for x in re.findall( '(?::(\d+))(?:\W*[/]\W*:(\d+))?', stats) if x[0] ][0] if self._peers_fail(mode, seeders, leechers): continue sizes = [ (tryInt(x[0], x[0]), tryInt(x[1], False)) for x in re.findall( '([\d.]+\w+)?(?:\s*[(\[](\d+)[)\]])?', stats) if x[0] ][0] size = sizes[(0, 1)[1 < len(sizes)]] for element in [ x for x in cells[2].contents[::-1] if unicode(x).strip() ]: if 'NavigableString' in str( element.__class__): title = unicode(element).strip() break download_url = self._link( tr.find('a', href=rc['get'])['href']) except (AttributeError, TypeError, ValueError): continue if title and download_url: items[mode].append( (title, download_url, seeders, self._bytesizer(size))) except generic.HaltParseException: pass except (StandardError, Exception): logger.log( u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, search_url) results = self._sort_seeding(mode, results + items[mode]) return results
def findNeededEpisodes(self, episode = None, manualSearch=False): neededEps = {} if episode: neededEps[episode] = [] myDB = self._getDB() if not episode: sqlResults = myDB.select("SELECT * FROM "+self.providerID) else: sqlResults = myDB.select("SELECT * FROM "+self.providerID+" WHERE tvdbid = ? AND season = ? AND episodes LIKE ?", [episode.show.tvdbid, episode.season, "|"+str(episode.episode)+"|"]) # for each cache entry for curResult in sqlResults: # skip non-tv crap (but allow them for Newzbin cause we assume it's filtered well) if self.providerID != 'newzbin' and not show_name_helpers.filterBadReleases(curResult["name"]): continue # get the show object, or if it's not one of our shows then ignore it showObj = helpers.findCertainShow(sickbeard.showList, int(curResult["tvdbid"])) if not showObj: continue # get season and ep data (ignoring multi-eps for now) curSeason = int(curResult["season"]) if curSeason == -1: continue curEp = curResult["episodes"].split("|")[1] if not curEp: continue curEp = int(curEp) curQuality = int(curResult["quality"]) # if the show says we want that episode then add it to the list if not showObj.wantEpisode(curSeason, curEp, curQuality, manualSearch): logger.log(u"Skipping "+curResult["name"]+" because we don't want an episode that's "+Quality.qualityStrings[curQuality], logger.DEBUG) else: if episode: epObj = episode else: epObj = showObj.getEpisode(curSeason, curEp) # build a result object title = curResult["name"] url = curResult["url"] logger.log(u"Found result " + title + " at " + url) result = self.provider.getResult([epObj]) result.url = url result.name = title result.quality = curQuality # add it to the list if epObj not in neededEps: neededEps[epObj] = [result] else: neededEps[epObj].append(result) return neededEps
def _update_library_json(self, host=None, showName=None): # pylint: disable=too-many-return-statements, too-many-branches """Handles updating KODI host via HTTP JSON-RPC Attempts to update the KODI video library for a specific tv show if passed, otherwise update the whole library if enabled. Args: host: KODI webserver host:port showName: Name of a TV show to specifically target the library update for Returns: Returns True or False """ if not host: logger.log(u'No KODI host passed, aborting update', logger.WARNING) return False logger.log(u"Updating KODI library via JSON method for host: " + host, logger.DEBUG) # if we're doing per-show if showName: showName = urllib.unquote_plus(showName) tvshowid = -1 path = '' logger.log( u"Updating library in KODI via JSON method for show " + showName, logger.DEBUG) # let's try letting kodi filter the shows showsCommand = '{"jsonrpc":"2.0","method":"VideoLibrary.GetTVShows","params":{"filter":{"field":"title","operator":"is","value":"%s"},"properties":["title"]},"id":"SickRage"}' # get tvshowid by showName showsResponse = self._send_to_kodi_json(showsCommand % showName, host) if showsResponse and "result" in showsResponse and "tvshows" in showsResponse[ "result"]: shows = showsResponse["result"]["tvshows"] else: # fall back to retrieving the entire show list showsCommand = '{"jsonrpc":"2.0","method":"VideoLibrary.GetTVShows","id":1}' showsResponse = self._send_to_kodi_json(showsCommand, host) if showsResponse and "result" in showsResponse and "tvshows" in showsResponse[ "result"]: shows = showsResponse["result"]["tvshows"] else: logger.log(u"KODI: No tvshows in KODI TV show list", logger.DEBUG) return False for show in shows: if ("label" in show and show["label"] == showName) or ( "title" in show and show["title"] == showName): tvshowid = show["tvshowid"] # set the path is we have it already if "file" in show: path = show["file"] break # this can be big, so free some memory del shows # we didn't find the show (exact match), thus revert to just doing a full update if enabled if tvshowid == -1: logger.log(u'Exact show name not matched in KODI TV show list', logger.DEBUG) return False # lookup tv-show path if we don't already know it if not path: pathCommand = '{"jsonrpc":"2.0","method":"VideoLibrary.GetTVShowDetails","params":{"tvshowid":%d, "properties": ["file"]},"id":1}' % tvshowid pathResponse = self._send_to_kodi_json(pathCommand, host) path = pathResponse["result"]["tvshowdetails"]["file"] logger.log( u"Received Show: " + showName + " with ID: " + str(tvshowid) + " Path: " + path, logger.DEBUG) if not path: logger.log( u"No valid path found for " + showName + " with ID: " + str(tvshowid) + " on " + host, logger.WARNING) return False logger.log( u"KODI Updating " + showName + " on " + host + " at " + path, logger.DEBUG) updateCommand = '{"jsonrpc":"2.0","method":"VideoLibrary.Scan","params":{"directory":%s},"id":1}' % ( json.dumps(path)) request = self._send_to_kodi_json(updateCommand, host) if not request: logger.log( u"Update of show directory failed on " + showName + " on " + host + " at " + path, logger.WARNING) return False # catch if there was an error in the returned request for r in request: if 'error' in r: logger.log( u"Error while attempting to update show directory for " + showName + " on " + host + " at " + path, logger.WARNING) return False # do a full update if requested else: logger.log(u"Doing Full Library KODI update on host: " + host, logger.DEBUG) updateCommand = '{"jsonrpc":"2.0","method":"VideoLibrary.Scan","id":1}' request = self._send_to_kodi_json(updateCommand, host) if not request: logger.log(u"KODI Full Library update failed on: " + host, logger.WARNING) return False return True
def search(self, search_params, age=0, ep_obj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} if not self.login(): return results for mode in search_params.keys(): logger.log(u"Search Mode: %s" % mode, logger.DEBUG) for search_string in search_params[mode]: if mode == 'RSS': searchURL = self.urls['index'] % self.categories else: searchURL = self.urls['search'] % (urllib.quote_plus( search_string.encode('utf-8')), self.categories) logger.log(u"Search string: %s " % search_string, logger.DEBUG) data = self.get_url(searchURL) logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) if not data: continue try: with BS4Parser(data, 'html5lib') as html: torrent_table = html.find('table', attrs={'id': 'torrenttable'}) torrent_rows = torrent_table.find_all( 'tr') if torrent_table else [] # Continue only if one Release is found if len(torrent_rows) < 2: logger.log( u"Data returned from provider does not contain any torrents", logger.DEBUG) continue for result in torrent_table.find_all('tr')[1:]: try: link = result.find('td', attrs={ 'class': 'name' }).find('a') url = result.find('td', attrs={ 'class': 'quickdownload' }).find('a') title = link.string download_url = self.urls['download'] % url[ 'href'] seeders = int( result.find('td', attrs={ 'class': 'seeders' }).string) leechers = int( result.find('td', attrs={ 'class': 'leechers' }).string) # FIXME size = -1 except (AttributeError, TypeError): continue if not all([title, download_url]): continue # Filter unseeded torrent if seeders < self.minseed or leechers < self.minleech: if mode != 'RSS': logger.log( u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})" .format(title, seeders, leechers), logger.DEBUG) continue item = title, download_url, size, seeders, leechers if mode != 'RSS': logger.log(u"Found result: %s " % title, logger.DEBUG) items[mode].append(item) except Exception, e: logger.log( u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) # For each search mode sort all the items by seeders if available items[mode].sort(key=lambda tup: tup[3], reverse=True) results += items[mode]
def _update_library(self, host=None, showName=None): # pylint: disable=too-many-locals, too-many-return-statements """Handles updating KODI host via HTTP API Attempts to update the KODI video library for a specific tv show if passed, otherwise update the whole library if enabled. Args: host: KODI webserver host:port showName: Name of a TV show to specifically target the library update for Returns: Returns True or False """ if not host: logger.log(u'No KODI host passed, aborting update', logger.WARNING) return False logger.log(u"Updating KODI library via HTTP method for host: " + host, logger.DEBUG) # if we're doing per-show if showName: logger.log( u"Updating library in KODI via HTTP method for show " + showName, logger.DEBUG) pathSql = 'select path.strPath from path, tvshow, tvshowlinkpath where ' \ 'tvshow.c00 = "%s" and tvshowlinkpath.idShow = tvshow.idShow ' \ 'and tvshowlinkpath.idPath = path.idPath' % showName # use this to get xml back for the path lookups xmlCommand = { 'command': 'SetResponseFormat(webheader;false;webfooter;false;header;<xml>;footer;</xml>;opentag;<tag>;closetag;</tag>;closefinaltag;false)' } # sql used to grab path(s) sqlCommand = {'command': 'QueryVideoDatabase(%s)' % pathSql} # set output back to default resetCommand = {'command': 'SetResponseFormat()'} # set xml response format, if this fails then don't bother with the rest request = self._send_to_kodi(xmlCommand, host) if not request: return False sqlXML = self._send_to_kodi(sqlCommand, host) request = self._send_to_kodi(resetCommand, host) if not sqlXML: logger.log(u"Invalid response for " + showName + " on " + host, logger.DEBUG) return False encSqlXML = urllib.quote(sqlXML, ':\\/<>') try: et = etree.fromstring(encSqlXML) except SyntaxError as e: logger.log(u"Unable to parse XML returned from KODI: " + ex(e), logger.ERROR) return False paths = et.findall('.//field') if not paths: logger.log( u"No valid paths found for " + showName + " on " + host, logger.DEBUG) return False for path in paths: # we do not need it double-encoded, gawd this is dumb unEncPath = urllib.unquote(path.text).decode( sickbeard.SYS_ENCODING) logger.log( u"KODI Updating " + showName + " on " + host + " at " + unEncPath, logger.DEBUG) updateCommand = { 'command': 'ExecBuiltIn', 'parameter': 'KODI.updatelibrary(video, %s)' % unEncPath } request = self._send_to_kodi(updateCommand, host) if not request: logger.log( u"Update of show directory failed on " + showName + " on " + host + " at " + unEncPath, logger.WARNING) return False # sleep for a few seconds just to be sure kodi has a chance to finish each directory if len(paths) > 1: time.sleep(5) # do a full update if requested else: logger.log(u"Doing Full Library KODI update on host: " + host, logger.DEBUG) updateCommand = { 'command': 'ExecBuiltIn', 'parameter': 'KODI.updatelibrary(video)' } request = self._send_to_kodi(updateCommand, host) if not request: logger.log(u"KODI Full Library update failed on: " + host, logger.WARNING) return False return True
def _notify_kodi(self, message, title="SickRage", host=None, username=None, password=None, force=False, dest_app="KODI"): # pylint: disable=too-many-arguments """Internal wrapper for the notify_snatch and notify_download functions Detects JSON-RPC version then branches the logic for either the JSON-RPC or legacy HTTP API methods. Args: message: Message body of the notice to send title: Title of the notice to send host: KODI webserver host:port username: KODI webserver username password: KODI webserver password force: Used for the Test method to override config saftey checks Returns: Returns a list results in the format of host:ip:result The result will either be 'OK' or False, this is used to be parsed by the calling function. """ # fill in omitted parameters if not host: host = sickbeard.KODI_HOST if not username: username = sickbeard.KODI_USERNAME if not password: password = sickbeard.KODI_PASSWORD # suppress notifications if the notifier is disabled but the notify options are checked if not sickbeard.USE_KODI and not force: logger.log( u"Notification for %s not enabled, skipping this notification" % dest_app, logger.DEBUG) return False result = '' for curHost in [x.strip() for x in host.split(",") if x.strip()]: logger.log( u"Sending %s notification to '%s' - %s" % (dest_app, curHost, message), logger.DEBUG) kodiapi = self._get_kodi_version(curHost, username, password, dest_app) if kodiapi: if kodiapi <= 4: logger.log( u"Detected %s version <= 11, using %s HTTP API" % (dest_app, dest_app), logger.DEBUG) command = { 'command': 'ExecBuiltIn', 'parameter': 'Notification(' + title.encode("utf-8") + ',' + message.encode("utf-8") + ')' } notifyResult = self._send_to_kodi(command, curHost, username, password) if notifyResult: result += curHost + ':' + str(notifyResult) else: logger.log( u"Detected %s version >= 12, using %s JSON API" % (dest_app, dest_app), logger.DEBUG) command = '{"jsonrpc":"2.0","method":"GUI.ShowNotification","params":{"title":"%s","message":"%s", "image": "%s"},"id":1}' % ( title.encode("utf-8"), message.encode("utf-8"), self.sr_logo_url) notifyResult = self._send_to_kodi_json( command, curHost, username, password, dest_app) if notifyResult and notifyResult.get('result'): # pylint: disable=no-member result += curHost + ':' + notifyResult[ "result"].decode(sickbeard.SYS_ENCODING) else: if sickbeard.KODI_ALWAYS_ON or force: logger.log( u"Failed to detect %s version for '%s', check configuration and try again." % (dest_app, curHost), logger.WARNING) result += curHost + ':False' return result
class QueueItemAdd(ShowQueueItem): def __init__(self, tvdb_id, showDir, default_status, quality, flatten_folders, lang, subtitles, audio_lang): self.tvdb_id = tvdb_id self.showDir = showDir self.default_status = default_status self.quality = quality self.flatten_folders = flatten_folders self.lang = lang self.audio_lang = audio_lang self.subtitles = subtitles self.show = None # this will initialize self.show to None ShowQueueItem.__init__(self, ShowQueueActions.ADD, self.show) def _getName(self): """ Returns the show name if there is a show object created, if not returns the dir that the show is being added to. """ if self.show == None: return self.showDir return self.show.name show_name = property(_getName) def _isLoading(self): """ Returns True if we've gotten far enough to have a show object, or False if we still only know the folder name. """ if self.show == None: return True return False isLoading = property(_isLoading) def execute(self): ShowQueueItem.execute(self) logger.log(u"Starting to add show " + self.showDir) try: # make sure the tvdb ids are valid try: ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy() if self.lang: ltvdb_api_parms['language'] = self.lang logger.log(u"TVDB: " + repr(ltvdb_api_parms)) t = tvdb_api.Tvdb(**ltvdb_api_parms) s = t[self.tvdb_id] # this usually only happens if they have an NFO in their show dir which gave us a TVDB ID that has no proper english version of the show if not s['seriesname']: logger.log(u"Show in " + self.showDir + " has no name on TVDB, probably the wrong language used to search with.", logger.ERROR) ui.notifications.error("Unable to add show", "Show in " + self.showDir + " has no name on TVDB, probably the wrong language. Delete .nfo and add manually in the correct language.") self._finishEarly() return # if the show has no episodes/seasons if not s: logger.log(u"Show " + str(s['seriesname']) + " is on TVDB but contains no season/episode data.", logger.ERROR) ui.notifications.error("Unable to add show", "Show " + str(s['seriesname']) + " is on TVDB but contains no season/episode data.") self._finishEarly() return except tvdb_exceptions.tvdb_exception, e: logger.log(u"Error contacting TVDB: " + ex(e), logger.ERROR) ui.notifications.error("Unable to add show", "Unable to look up the show in " + self.showDir + " on TVDB, not using the NFO. Delete .nfo and add manually in the correct language.") self._finishEarly() return # clear the name cache name_cache.clearCache() newShow = TVShow(self.tvdb_id, self.lang, self.audio_lang) newShow.loadFromTVDB() self.show = newShow # set up initial values self.show.location = self.showDir self.show.subtitles = self.subtitles if self.subtitles != None else sickbeard.SUBTITLES_DEFAULT self.show.quality = self.quality if self.quality else sickbeard.QUALITY_DEFAULT self.show.flatten_folders = self.flatten_folders if self.flatten_folders != None else sickbeard.FLATTEN_FOLDERS_DEFAULT self.show.paused = 0 # be smartish about this if self.show.genre and "talk show" in self.show.genre.lower(): self.show.air_by_date = 1 except tvdb_exceptions.tvdb_exception, e: logger.log(u"Unable to add show due to an error with TVDB: " + ex(e), logger.ERROR) if self.show: ui.notifications.error("Unable to add " + str(self.show.name) + " due to an error with TVDB") else: ui.notifications.error("Unable to add show due to an error with TVDB") self._finishEarly() return
def _send_to_kodi_json(command, host=None, username=None, password=None, dest_app="KODI"): """Handles communication to KODI servers via JSONRPC Args: command: Dictionary of field/data pairs, encoded via urllib and passed to the KODI JSON-RPC via HTTP host: KODI webserver host:port username: KODI webserver username password: KODI webserver password Returns: Returns response.result for successful commands or False if there was an error """ # fill in omitted parameters if not username: username = sickbeard.KODI_USERNAME if not password: password = sickbeard.KODI_PASSWORD if not host: logger.log(u'No %s host passed, aborting update' % dest_app, logger.WARNING) return False command = command.encode('utf-8') logger.log(u"%s JSON command: %s" % (dest_app, command), logger.DEBUG) url = 'http://%s/jsonrpc' % host try: req = urllib2.Request(url, command) req.add_header("Content-type", "application/json") # if we have a password, use authentication if password: base64string = base64.encodestring('%s:%s' % (username, password))[:-1] authheader = "Basic %s" % base64string req.add_header("Authorization", authheader) logger.log( u"Contacting %s (with auth header) via url: %s" % (dest_app, ss(url)), logger.DEBUG) else: logger.log(u"Contacting %s via url: %s" % (dest_app, ss(url)), logger.DEBUG) try: response = urllib2.urlopen(req) except (httplib.BadStatusLine, urllib2.URLError) as e: if sickbeard.KODI_ALWAYS_ON: logger.log( u"Error while trying to retrieve %s API version for %s: %r" % (dest_app, host, ex(e)), logger.WARNING) return False # parse the json result try: result = json.load(response) response.close() logger.log(u"%s JSON response: %s" % (dest_app, result), logger.DEBUG) return result # need to return response for parsing except ValueError as e: logger.log(u"Unable to decode JSON: " + str(response.read()), logger.WARNING) return False except IOError as e: if sickbeard.KODI_ALWAYS_ON: logger.log( u"Warning: Couldn't contact %s JSON API at %s: %r" % (dest_app, ss(url), ex(e)), logger.WARNING) return False
cells[head[x]].get_text().strip() for x in 'seed', 'leech', 'size']] if self._peers_fail(mode, seeders, leechers): continue info = tr.find('a', href=rc['info']) title = (info.attrs.get('title') or info.get_text()).strip() download_url = self._link(tr.find('a', href=rc['get'])['href']) except (AttributeError, TypeError, ValueError, KeyError): continue if title and download_url: items[mode].append((title, download_url, seeders, self._bytesizer(size))) except generic.HaltParseException: pass except (StandardError, Exception): logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, search_url) results = self._sort_seeding(mode, results + items[mode]) return results def _cache_data(self, **kwargs): return self._search_provider({'Cache': ['x264,', 'x264,2', 'x264,3', 'x264,4', 'x264,5']}) provider = SkytorrentsProvider()
def _send_to_kodi(command, host=None, username=None, password=None, dest_app="KODI"): # pylint: disable=too-many-arguments """Handles communication to KODI servers via HTTP API Args: command: Dictionary of field/data pairs, encoded via urllib and passed to the KODI API via HTTP host: KODI webserver host:port username: KODI webserver username password: KODI webserver password Returns: Returns response.result for successful commands or False if there was an error """ # fill in omitted parameters if not username: username = sickbeard.KODI_USERNAME if not password: password = sickbeard.KODI_PASSWORD if not host: logger.log(u'No %s host passed, aborting update' % dest_app, logger.WARNING) return False for key in command: if isinstance(command[key], unicode): command[key] = command[key].encode('utf-8') enc_command = urllib.urlencode(command) logger.log(u"%s encoded API command: %r" % (dest_app, enc_command), logger.DEBUG) # url = 'http://%s/xbmcCmds/xbmcHttp/?%s' % (host, enc_command) # maybe need for old plex? url = 'http://%s/kodiCmds/kodiHttp/?%s' % (host, enc_command) try: req = urllib2.Request(url) # if we have a password, use authentication if password: base64string = base64.encodestring('%s:%s' % (username, password))[:-1] authheader = "Basic %s" % base64string req.add_header("Authorization", authheader) logger.log( u"Contacting %s (with auth header) via url: %s" % (dest_app, ss(url)), logger.DEBUG) else: logger.log(u"Contacting %s via url: %s" % (dest_app, ss(url)), logger.DEBUG) try: response = urllib2.urlopen(req) except (httplib.BadStatusLine, urllib2.URLError) as e: logger.log( u"Couldn't contact %s HTTP at %r : %r" % (dest_app, url, ex(e)), logger.DEBUG) return False result = response.read().decode(sickbeard.SYS_ENCODING) response.close() logger.log( u"%s HTTP response: %s" % (dest_app, result.replace('\n', '')), logger.DEBUG) return result except Exception as e: logger.log( u"Couldn't contact %s HTTP at %r : %r" % (dest_app, url, ex(e)), logger.DEBUG) return False
def pickBestResult(results, show, quality_list=None): logger.log( u"Picking the best result out of " + str([x.name for x in results]), logger.DEBUG) # build the black And white list bwl = None if show: bwl = BlackAndWhiteList(show.indexerid) else: logger.log("Could not create black and white list no show was given", logger.DEBUG) # find the best result for the current episode bestResult = None for cur_result in results: logger.log("Quality of " + cur_result.name + " is " + Quality.qualityStrings[cur_result.quality]) if bwl: if not bwl.is_valid(cur_result): logger.log( cur_result.name + " does not match the blacklist or the whitelist, rejecting it. Result: " + bwl.get_last_result_msg(), logger.MESSAGE) continue if quality_list and cur_result.quality not in quality_list: logger.log( cur_result.name + " is a quality we know we don't want, rejecting it", logger.DEBUG) continue if show.rls_ignore_words and filter_release_name( cur_result.name, show.rls_ignore_words): logger.log( u"Ignoring " + cur_result.name + " based on ignored words filter: " + show.rls_ignore_words, logger.MESSAGE) continue if show.rls_require_words and not filter_release_name( cur_result.name, show.rls_require_words): logger.log( u"Ignoring " + cur_result.name + " based on required words filter: " + show.rls_require_words, logger.MESSAGE) continue if sickbeard.USE_FAILED_DOWNLOADS and failed_history.hasFailed( cur_result.name, cur_result.size, cur_result.provider.name): logger.log(cur_result.name + u" has previously failed, rejecting it") continue if not bestResult or bestResult.quality < cur_result.quality and cur_result.quality != Quality.UNKNOWN: bestResult = cur_result elif bestResult.quality == cur_result.quality: if "proper" in cur_result.name.lower( ) or "repack" in cur_result.name.lower(): bestResult = cur_result elif "internal" in bestResult.name.lower( ) and "internal" not in cur_result.name.lower(): bestResult = cur_result elif "xvid" in bestResult.name.lower( ) and "x264" in cur_result.name.lower(): logger.log(u"Preferring " + cur_result.name + " (x264 over xvid)") bestResult = cur_result if bestResult: logger.log(u"Picked " + bestResult.name + " as the best", logger.DEBUG) else: logger.log(u"No result picked.", logger.DEBUG) return bestResult
# be smartish about this if self.show.genre and "talk show" in self.show.genre.lower(): self.show.air_by_date = 1 except tvdb_exceptions.tvdb_exception, e: logger.log(u"Unable to add show due to an error with TVDB: " + ex(e), logger.ERROR) if self.show: ui.notifications.error("Unable to add " + str(self.show.name) + " due to an error with TVDB") else: ui.notifications.error("Unable to add show due to an error with TVDB") self._finishEarly() return except exceptions.MultipleShowObjectsException: logger.log(u"The show in " + self.showDir + " is already in your show list, skipping", logger.ERROR) ui.notifications.error('Show skipped', "The show in " + self.showDir + " is already in your show list") self._finishEarly() return except Exception, e: logger.log(u"Error trying to add show: " + ex(e), logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) self._finishEarly() raise # add it to the show list sickbeard.showList.append(self.show) try: self.show.loadEpisodesFromDir()
with ek.ek(open, fileName, 'w') as fileOut: fileOut.write(result.extraInfo[0]) helpers.chmodAsParent(fileName) except EnvironmentError, e: logger.log(u"Error trying to save NZB to black hole: " + ex(e), logger.ERROR) newResult = False elif resProvider.providerType == "torrent": newResult = resProvider.downloadResult(result) else: logger.log( u"Invalid provider type - this is a coding error, report it please", logger.ERROR) return False if newResult and sickbeard.USE_FAILED_DOWNLOADS: ui.notifications.message( 'Episode snatched', '<b>%s</b> snatched from <b>%s</b>' % (result.name, resProvider.name)) return newResult def snatchEpisode(result, endStatus=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that has been found.
def searchProviders(show, season, episodes, manualSearch=False): foundResults = {} finalResults = [] # check if we want to search for season packs instead of just season/episode seasonSearch = False if not manualSearch: seasonEps = show.getAllEpisodes(season) if len(seasonEps) == len(episodes): seasonSearch = True providers = [ x for x in sickbeard.providers.sortedProviderList() if x.isActive() ] if not len(providers): logger.log( u"No NZB/Torrent providers found or enabled in the sickrage config. Please check your settings.", logger.ERROR) return origThreadName = threading.currentThread().name for providerNum, provider in enumerate(providers): if provider.anime_only and not show.is_anime: logger.log(u"" + str(show.name) + " is not an anime skiping ...") continue threading.currentThread( ).name = origThreadName + " :: [" + provider.name + "]" foundResults.setdefault(provider.name, {}) searchCount = 0 search_mode = 'eponly' if seasonSearch and provider.search_mode == 'sponly': search_mode = provider.search_mode while (True): searchCount += 1 if search_mode == 'sponly': logger.log(u"Searching for " + show.name + " Season " + str(season) + " pack") else: logger.log(u"Searching for episodes we need from " + show.name + " Season " + str(season)) try: searchResults = provider.findSearchResults( show, season, episodes, search_mode, manualSearch) except exceptions.AuthException, e: logger.log(u"Authentication error: " + ex(e), logger.ERROR) break except Exception, e: logger.log( u"Error while searching " + provider.name + ", skipping: " + ex(e), logger.ERROR) break if len(searchResults): # make a list of all the results for this provider for curEp in searchResults: # skip non-tv crap searchResults[curEp] = filter( lambda x: show_name_helpers.filterBadReleases(x.name) and show_name_helpers.isGoodResult( x.name, show, season=season), searchResults[curEp]) if curEp in foundResults: foundResults[ provider.name][curEp] += searchResults[curEp] else: foundResults[ provider.name][curEp] = searchResults[curEp] break elif not provider.search_fallback or searchCount == 2: break if search_mode == 'sponly': logger.log(u"FALLBACK EPISODE SEARCH INITIATED ...") search_mode = 'eponly' else: logger.log(u"FALLBACK SEASON PACK SEARCH INITIATED ...") search_mode = 'sponly'
def get(self, name): if name in self._previous_parsed: logger.log("Using cached parse result for: " + name, logger.DEBUG) return self._previous_parsed[name] else: return None
def snatchEpisode(result, endStatus=SNATCHED): """ Contains the internal logic necessary to actually "snatch" a result that has been found. Returns a bool representing success. result: SearchResult instance to be snatched. endStatus: the episode status that should be used for the episode object once it's snatched. """ if result is None: return False result.priority = 0 # -1 = low, 0 = normal, 1 = high if sickbeard.ALLOW_HIGH_PRIORITY: # if it aired recently make it high priority for curEp in result.episodes: if datetime.date.today() - curEp.airdate <= datetime.timedelta( days=7): result.priority = 1 if re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', result.name, re.I) != None: endStatus = SNATCHED_PROPER # NZBs can be sent straight to SAB or saved to disk if result.resultType in ("nzb", "nzbdata"): if sickbeard.NZB_METHOD == "blackhole": dlResult = _downloadResult(result) elif sickbeard.NZB_METHOD == "sabnzbd": dlResult = sab.sendNZB(result) elif sickbeard.NZB_METHOD == "nzbget": is_proper = True if endStatus == SNATCHED_PROPER else False dlResult = nzbget.sendNZB(result, is_proper) else: logger.log( u"Unknown NZB action specified in config: " + sickbeard.NZB_METHOD, logger.ERROR) dlResult = False # TORRENTs can be sent to clients or saved to disk elif result.resultType == "torrent": # torrents are saved to disk when blackhole mode if sickbeard.TORRENT_METHOD == "blackhole": dlResult = _downloadResult(result) else: # Sets per provider seed ratio result.ratio = result.provider.seedRatio() result.content = result.provider.getURL( result.url) if not result.url.startswith('magnet') else None client = clients.getClientIstance(sickbeard.TORRENT_METHOD)() dlResult = client.sendTORRENT(result) else: logger.log(u"Unknown result type, unable to download it", logger.ERROR) dlResult = False if not dlResult: return False if sickbeard.USE_FAILED_DOWNLOADS: failed_history.logSnatch(result) ui.notifications.message('Episode snatched', result.name) history.logSnatch(result) # don't notify when we re-download an episode sql_l = [] for curEpObj in result.episodes: with curEpObj.lock: if isFirstBestMatch(result): curEpObj.status = Quality.compositeStatus( SNATCHED_BEST, result.quality) else: curEpObj.status = Quality.compositeStatus( endStatus, result.quality) sql_l.append(curEpObj.get_sql()) if curEpObj.status not in Quality.DOWNLOADED: notifiers.notify_snatch( curEpObj._format_pattern('%SN - %Sx%0E - %EN - %QN')) if sql_l: with db.DBConnection() as myDB: myDB.mass_action(sql_l) return True
def splitResult(result): urlData = helpers.getURL(result.url) if urlData is None: logger.log(u"Unable to load url " + result.url + ", can't download season NZB", logger.ERROR) return False # parse the season ep name try: np = NameParser(False, showObj=result.show) parse_result = np.parse(result.name) except InvalidNameException: logger.log(u"Unable to parse the filename " + result.name + " into a valid episode", logger.DEBUG) return False except InvalidShowException: logger.log(u"Unable to parse the filename " + result.name + " into a valid show", logger.DEBUG) return False # bust it up season = parse_result.season_number if parse_result.season_number != None else 1 separateNZBs, xmlns = getSeasonNZBs(result.name, urlData, season) resultList = [] for newNZB in separateNZBs: logger.log(u"Split out " + newNZB + " from " + result.name, logger.DEBUG) # parse the name try: np = NameParser(False, showObj=result.show) parse_result = np.parse(newNZB) except InvalidNameException: logger.log(u"Unable to parse the filename " + newNZB + " into a valid episode", logger.DEBUG) return False except InvalidShowException: logger.log(u"Unable to parse the filename " + newNZB + " into a valid show", logger.DEBUG) return False # make sure the result is sane if (parse_result.season_number != None and parse_result.season_number != season) or ( parse_result.season_number == None and season != 1): logger.log( u"Found " + newNZB + " inside " + result.name + " but it doesn't seem to belong to the same season, ignoring it", logger.WARNING) continue elif len(parse_result.episode_numbers) == 0: logger.log( u"Found " + newNZB + " inside " + result.name + " but it doesn't seem to be a valid episode NZB, ignoring it", logger.WARNING) continue wantEp = True for epNo in parse_result.episode_numbers: if not result.extraInfo[0].wantEpisode(season, epNo, result.quality): logger.log(u"Ignoring result " + newNZB + " because we don't want an episode that is " + Quality.qualityStrings[result.quality], logger.DEBUG) wantEp = False break if not wantEp: continue # get all the associated episode objects epObjList = [] for curEp in parse_result.episode_numbers: epObjList.append(result.extraInfo[0].getEpisode(season, curEp)) # make a result curResult = classes.NZBDataSearchResult(epObjList) curResult.name = newNZB curResult.provider = result.provider curResult.quality = result.quality curResult.extraInfo = [createNZBString(separateNZBs[newNZB], xmlns)] resultList.append(curResult) return resultList
class NewQualitySettings (NumericProviders): def test(self): return self.hasTable("db_version") def execute(self): numTries = 0 while not ek.ek(os.path.isfile, db.dbFilename(suffix='v0')): if not ek.ek(os.path.isfile, db.dbFilename()): break try: logger.log(u"Attempting to back up your sickbeard.db file before migration...") shutil.copy(db.dbFilename(), db.dbFilename(suffix='v0')) logger.log(u"Done backup, proceeding with migration.") break except Exception, e: logger.log(u"Error while trying to back up your sickbeard.db: "+ex(e)) numTries += 1 time.sleep(1) logger.log(u"Trying again.") if numTries >= 10: logger.log(u"Unable to back up your sickbeard.db file, please do it manually.") sys.exit(1) # old stuff that's been removed from common but we need it to upgrade HD = 1 SD = 3 ANY = 2 BEST = 4 ACTION_SNATCHED = 1 ACTION_PRESNATCHED = 2 ACTION_DOWNLOADED = 3 PREDOWNLOADED = 3 MISSED = 6 BACKLOG = 7 DISCBACKLOG = 8 SNATCHED_BACKLOG = 10 ### Update default quality if sickbeard.QUALITY_DEFAULT == HD: sickbeard.QUALITY_DEFAULT = common.HD elif sickbeard.QUALITY_DEFAULT == SD: sickbeard.QUALITY_DEFAULT = common.SD elif sickbeard.QUALITY_DEFAULT == ANY: sickbeard.QUALITY_DEFAULT = common.ANY elif sickbeard.QUALITY_DEFAULT == BEST: sickbeard.QUALITY_DEFAULT = common.BEST ### Update episode statuses toUpdate = self.connection.select("SELECT episode_id, location, status FROM tv_episodes WHERE status IN (?, ?, ?, ?, ?, ?, ?)", [common.DOWNLOADED, common.SNATCHED, PREDOWNLOADED, MISSED, BACKLOG, DISCBACKLOG, SNATCHED_BACKLOG]) didUpdate = False for curUpdate in toUpdate: # remember that we changed something didUpdate = True newStatus = None oldStatus = int(curUpdate["status"]) if oldStatus == common.SNATCHED: newStatus = common.Quality.compositeStatus(common.SNATCHED, common.Quality.UNKNOWN) elif oldStatus == PREDOWNLOADED: newStatus = common.Quality.compositeStatus(common.DOWNLOADED, common.Quality.SDTV) elif oldStatus in (MISSED, BACKLOG, DISCBACKLOG): newStatus = common.WANTED elif oldStatus == SNATCHED_BACKLOG: newStatus = common.Quality.compositeStatus(common.SNATCHED, common.Quality.UNKNOWN) if newStatus != None: self.connection.action("UPDATE tv_episodes SET status = ? WHERE episode_id = ? ", [newStatus, curUpdate["episode_id"]]) continue # if we get here status should be == DOWNLOADED if not curUpdate["location"]: continue newQuality = common.Quality.nameQuality(curUpdate["location"]) if newQuality == common.Quality.UNKNOWN: newQuality = common.Quality.assumeQuality(curUpdate["location"]) self.connection.action("UPDATE tv_episodes SET status = ? WHERE episode_id = ?", [common.Quality.compositeStatus(common.DOWNLOADED, newQuality), curUpdate["episode_id"]]) # if no updates were done then the backup is useless if didUpdate: os.remove(db.dbFilename(suffix='v0')) ### Update show qualities toUpdate = self.connection.select("SELECT * FROM tv_shows") for curUpdate in toUpdate: if not curUpdate["quality"]: continue if int(curUpdate["quality"]) == HD: newQuality = common.HD elif int(curUpdate["quality"]) == SD: newQuality = common.SD elif int(curUpdate["quality"]) == ANY: newQuality = common.ANY elif int(curUpdate["quality"]) == BEST: newQuality = common.BEST else: logger.log(u"Unknown show quality: "+str(curUpdate["quality"]), logger.WARNING) newQuality = None if newQuality: self.connection.action("UPDATE tv_shows SET quality = ? WHERE show_id = ?", [newQuality, curUpdate["show_id"]]) ### Update history toUpdate = self.connection.select("SELECT * FROM history") for curUpdate in toUpdate: newAction = None newStatus = None if int(curUpdate["action"] == ACTION_SNATCHED): newStatus = common.SNATCHED elif int(curUpdate["action"] == ACTION_DOWNLOADED): newStatus = common.DOWNLOADED elif int(curUpdate["action"] == ACTION_PRESNATCHED): newAction = common.Quality.compositeStatus(common.SNATCHED, common.Quality.SDTV) if newAction == None and newStatus == None: continue if not newAction: if int(curUpdate["quality"] == HD): newAction = common.Quality.compositeStatus(newStatus, common.Quality.HDTV) elif int(curUpdate["quality"] == SD): newAction = common.Quality.compositeStatus(newStatus, common.Quality.SDTV) else: newAction = common.Quality.compositeStatus(newStatus, common.Quality.UNKNOWN) self.connection.action("UPDATE history SET action = ? WHERE date = ? AND showid = ?", [newAction, curUpdate["date"], curUpdate["showid"]]) self.connection.action("CREATE TABLE db_version (db_version INTEGER);") self.connection.action("INSERT INTO db_version (db_version) VALUES (?)", [1])