def postProcessDir(downloaderDir, nzbName=None): returnStr = '' downloadDir = '' # if they passed us a real dir then assume it's the one we want if os.path.isdir(downloaderDir): downloadDir = os.path.abspath(downloaderDir) # if they've got a download dir configured then use it elif sickbeard.TV_DOWNLOAD_DIR != '' and os.path.isdir(sickbeard.TV_DOWNLOAD_DIR): downloadDir = ek.ek(os.path.join, sickbeard.TV_DOWNLOAD_DIR, os.path.abspath(downloaderDir).split(os.path.sep)[-1]) returnStr += logHelper("Trying to use folder "+downloadDir, logger.DEBUG) # if we didn't find a real dir then quit if not ek.ek(os.path.isdir, downloadDir): returnStr += logHelper("Unable to figure out what folder to process. If your downloader and Sick Beard aren't on the same PC make sure you fill out your TV download dir in the config.", logger.DEBUG) return returnStr # make sure the dir isn't inside a show dir myDB = db.DBConnection() sqlResults = myDB.select("SELECT * FROM tv_shows") for sqlShow in sqlResults: if downloadDir.startswith(os.path.abspath(sqlShow["location"])+os.sep): returnStr += logHelper("You're trying to post process a show that's already been moved to its show dir", logger.ERROR) return returnStr returnStr += logHelper("Final folder name is " + downloadDir, logger.DEBUG) # TODO: check if it's failed and deal with it if it is if downloadDir.startswith('_FAILED_'): returnStr += logHelper("The directory name indicates it failed to extract, cancelling", logger.DEBUG) return returnStr # find the file we're dealing with biggest_file = findMainFile(downloadDir) if biggest_file == None: returnStr += logHelper("Unable to find the biggest file - is this really a TV download?", logger.DEBUG) return returnStr returnStr += logHelper("The biggest file in the dir is: " + biggest_file, logger.DEBUG) result = processFile(biggest_file, downloadDir, nzbName) # a successful post-processing will return a list with a string in it # if it's not successful then I just return right now if type(result) in (str, unicode): return returnStr + result returnStr += result[0] # delete the old folder unless the config wants us not to if not sickbeard.KEEP_PROCESSED_DIR and not sickbeard.KEEP_PROCESSED_FILE: returnStr += logHelper("Deleting folder " + downloadDir, logger.DEBUG) try: shutil.rmtree(downloadDir) except (OSError, IOError), e: returnStr += logHelper("Warning: unable to remove the folder " + downloadDir + ": " + str(e), logger.ERROR)
def fixSetGroupID(childPath): if os.name == 'nt' or os.name == 'ce': return parentPath = ek.ek(os.path.dirname, childPath) parentStat = os.stat(parentPath) parentMode = stat.S_IMODE(parentStat[stat.ST_MODE]) if parentMode & stat.S_ISGID: parentGID = parentStat[stat.ST_GID] childStat = ek.ek(os.stat, childPath) childGID = childStat[stat.ST_GID] if childGID == parentGID: return childPath_owner = childStat.st_uid user_id = os.geteuid() if user_id !=0 and user_id != childPath_owner: logger.log(u"Not running as root or owner of "+childPath+", not trying to set the set-group-ID", logger.DEBUG) return try: ek.ek(os.chown, childPath, -1, parentGID) #@UndefinedVariable - only available on UNIX logger.log(u"Respecting the set-group-ID bit on the parent directory for %s" % (childPath), logger.DEBUG) except OSError: logger.log(u"Failed to respect the set-group-ID bit on the parent directory for %s (setting group ID %i)" % (childPath, parentGID), logger.ERROR)
def delete_empty_folders(check_empty_dir, keep_dir=None): """ Walks backwards up the path and deletes any empty folders found. check_empty_dir: The path to clean (absolute path to a folder) keep_dir: Clean until this path is reached """ # treat check_empty_dir as empty when it only contains these items ignore_items = [] logger.log(u"Trying to clean any empty folders under " + check_empty_dir) # as long as the folder exists and doesn't contain any files, delete it while ek.ek(os.path.isdir, check_empty_dir) and check_empty_dir != keep_dir: check_files = ek.ek(os.listdir, check_empty_dir) if not check_files or (len(check_files) <= len(ignore_items) and all([check_file in ignore_items for check_file in check_files])): # directory is empty or contains only ignore_items try: logger.log(u"Deleting empty folder: " + check_empty_dir) # need shutil.rmtree when ignore_items is really implemented ek.ek(os.rmdir, check_empty_dir) # do the library update for synoindex notifiers.synoindex_notifier.deleteFolder(check_empty_dir) except (WindowsError, OSError), e: logger.log(u"Unable to delete " + check_empty_dir + ": " + repr(e) + " / " + str(e), logger.WARNING) break check_empty_dir = ek.ek(os.path.dirname, check_empty_dir) else: break
def parse(self, name): name = self._unicodify(name) cached = name_parser_cache.get(name) if cached: return cached # break it into parts if there are any (dirname, file name, extension) dir_name, file_name = ek.ek(os.path.split, name) if self.is_file_name: base_file_name = helpers.remove_non_release_groups(helpers.remove_extension(file_name)) else: base_file_name = file_name # use only the direct parent dir dir_name = ek.ek(os.path.basename, dir_name) # set up a result to use final_result = ParseResult(name) # try parsing the file name file_name_result = self._parse_string(base_file_name) # parse the dirname for extra info if needed dir_name_result = self._parse_string(dir_name) # build the ParseResult object final_result.air_date = self._combine_results(file_name_result, dir_name_result, 'air_date') if not final_result.air_date: final_result.season_number = self._combine_results(file_name_result, dir_name_result, 'season_number') final_result.episode_numbers = self._combine_results(file_name_result, dir_name_result, 'episode_numbers') final_result.is_proper = self._combine_results(file_name_result, dir_name_result, 'is_proper') # if the dirname has a release group/show name I believe it over the filename final_result.series_name = self._combine_results(dir_name_result, file_name_result, 'series_name') final_result.extra_info = self._combine_results(dir_name_result, file_name_result, 'extra_info') final_result.release_group = self._combine_results(dir_name_result, file_name_result, 'release_group') final_result.which_regex = [] if final_result == file_name_result: final_result.which_regex = file_name_result.which_regex elif final_result == dir_name_result: final_result.which_regex = dir_name_result.which_regex else: if file_name_result: final_result.which_regex += file_name_result.which_regex if dir_name_result: final_result.which_regex += dir_name_result.which_regex # if there's no useful info in it then raise an exception if final_result.season_number is None and not final_result.episode_numbers and final_result.air_date is None and not final_result.series_name: raise InvalidNameException("Unable to parse " + name.encode(sickbeard.SYS_ENCODING, 'xmlcharrefreplace')) name_parser_cache.add(name, final_result) # return it return final_result
def chmodAsParent(childPath): if os.name == 'nt' or os.name == 'ce': return parentPath = ek.ek(os.path.dirname, childPath) if not parentPath: logger.log(u"No parent path provided in "+childPath+", unable to get permissions from it", logger.DEBUG) return parentMode = stat.S_IMODE(os.stat(parentPath)[stat.ST_MODE]) childPathStat = ek.ek(os.stat, childPath) childPath_mode = stat.S_IMODE(childPathStat[stat.ST_MODE]) if ek.ek(os.path.isfile, childPath): childMode = fileBitFilter(parentMode) else: childMode = parentMode if childPath_mode == childMode: return childPath_owner = childPathStat.st_uid user_id = os.geteuid() if user_id !=0 and user_id != childPath_owner: logger.log(u"Not running as root or owner of "+childPath+", not trying to set permissions", logger.DEBUG) return try: ek.ek(os.chmod, childPath, childMode) logger.log(u"Setting permissions for %s to %o as parent directory has %o" % (childPath, childMode, parentMode), logger.DEBUG) except OSError: logger.log(u"Failed to set permission for %s to %o" % (childPath, childMode), logger.ERROR)
def moveFile(srcFile, destFile): try: ek.ek(os.rename, srcFile, destFile) fixSetGroupID(destFile) except OSError: copyFile(srcFile, destFile) ek.ek(os.unlink, srcFile)
def rename_ep_file(cur_path, new_path): """ Creates all folders needed to move a file to its new location, renames it, then cleans up any folders left that are now empty. cur_path: The absolute path to the file you want to move/rename new_path: The absolute path to the destination for the file WITHOUT THE EXTENSION """ new_dest_dir, new_dest_name = os.path.split(new_path) #@UnusedVariable cur_file_name, cur_file_ext = os.path.splitext(cur_path) #@UnusedVariable if cur_file_ext[1:] in subtitleExtensions: #Extract subtitle language from filename sublang = os.path.splitext(cur_file_name)[1][1:] #Check if the language extracted from filename is a valid language try: language = subliminal.language.Language(sublang, strict=True) cur_file_ext = '.'+sublang+cur_file_ext except ValueError: pass # put the extension on the incoming file new_path += cur_file_ext make_dirs(os.path.dirname(new_path)) # move the file try: logger.log(u"Renaming file from " + cur_path + " to " + new_path) ek.ek(os.rename, cur_path, new_path) except (OSError, IOError), e: logger.log(u"Failed renaming " + cur_path + " to " + new_path + ": " + ex(e), logger.ERROR) return False
def execute(self): dir_results = self.connection.select("SELECT location FROM tv_shows") dir_counts = {} for cur_dir in dir_results: cur_root_dir = ek.ek(os.path.dirname, ek.ek(os.path.normpath, cur_dir["location"])) if cur_root_dir not in dir_counts: dir_counts[cur_root_dir] = 1 else: dir_counts[cur_root_dir] += 1 logger.log(u"Dir counts: "+str(dir_counts), logger.DEBUG) if not dir_counts: self.incDBVersion() return default_root_dir = dir_counts.values().index(max(dir_counts.values())) new_root_dirs = str(default_root_dir)+'|'+'|'.join(dir_counts.keys()) logger.log(u"Setting ROOT_DIRS to: "+new_root_dirs, logger.DEBUG) sickbeard.ROOT_DIRS = new_root_dirs sickbeard.save_config() self.incDBVersion()
def get_season_thumb_path(self, show_obj, season): """ Season thumbs for MediaBrowser go in Show Dir/Season X/folder.jpg If no season folder exists, None is returned """ dir_list = [x for x in ek.ek(os.listdir, show_obj.location) if ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))] season_dir_regex = '^Season\s+(\d+)$' season_dir = None for cur_dir in dir_list: if season == 0 and cur_dir == 'Specials': season_dir = cur_dir break match = re.match(season_dir_regex, cur_dir, re.I) if not match: continue cur_season = int(match.group(1)) if cur_season == season: season_dir = cur_dir break if not season_dir: logger.log(u"Unable to find a season dir for season "+str(season), logger.DEBUG) return None logger.log(u"Using "+str(season_dir)+"/folder.jpg as season dir for season "+str(season), logger.DEBUG) return ek.ek(os.path.join, show_obj.location, season_dir, 'folder.jpg')
def isBeingWritten(filepath): # Return True if file was modified within 60 seconds. it might still be being written to. ctime = max(ek.ek(os.path.getctime, filepath), ek.ek(os.path.getmtime, filepath)) if ctime > time.time() - 60: return True return False
def hardlinkFile(srcFile, destFile): try: ek.ek(link, srcFile, destFile) fixSetGroupID(destFile) except: logger.log(u"Failed to create hardlink of " + srcFile + " at " + destFile + ". Copying instead", logger.ERROR) copyFile(srcFile, destFile)
def _delete(self, file_path, associated_files=False): """ Deletes the file and optionally all associated files. file_path: The file to delete associated_files: True to delete all files which differ only by extension, False to leave them """ if not file_path: return # figure out which files we want to delete file_list = [file_path] if associated_files: file_list = file_list + helpers.list_associated_files(file_path, base_name_only=True) if not file_list: self._log(u"There were no files associated with " + file_path + ", not deleting anything", logger.DEBUG) return # delete the file and any other files which we want to delete for cur_file in file_list: if ek.ek(os.path.isfile, cur_file): self._log(u"Deleting file " + cur_file, logger.DEBUG) ek.ek(os.remove, cur_file) # do the library update for synoindex notifiers.synoindex_notifier.deleteFile(cur_file)
def __init__(self, file_path, nzb_name=None, pp_options={}): """ Creates a new post processor with the given file path and optionally an NZB name. file_path: The path to the file to be processed nzb_name: The name of the NZB which resulted in this file being downloaded (optional) """ # absolute path to the folder that is being processed self.folder_path = ek.ek(os.path.dirname, ek.ek(os.path.abspath, file_path)) # full path to file self.file_path = file_path # file name only self.file_name = ek.ek(os.path.basename, file_path) # the name of the folder only self.folder_name = ek.ek(os.path.basename, self.folder_path) # name of the NZB that resulted in this folder self.nzb_name = nzb_name self.force_replace = pp_options.get('force_replace', False) self.in_history = False self.release_group = None self.release_name = None self.is_proper = False self.log = ''
def makeDir (dir): if not ek.ek(os.path.isdir, dir): try: ek.ek(os.makedirs, dir) except OSError: return False return True
def backupVersionedFile(old_file, version): numTries = 0 new_file = old_file + '.' + 'v' + str(version) while not ek.ek(os.path.isfile, new_file): time.sleep(0.01) if not ek.ek(os.path.isfile, old_file): logger.log(u"Not creating backup, " + old_file + " doesn't exist", logger.DEBUG) break try: logger.log(u"Trying to back up " + old_file + " to " + new_file, logger.DEBUG) shutil.copy(old_file, new_file) logger.log(u"Backup done", logger.DEBUG) break except Exception, e: logger.log(u"Error while trying to back up " + old_file + " to " + new_file + " : " + ex(e), logger.WARNING) numTries += 1 time.sleep(1) logger.log(u"Trying again.", logger.DEBUG) if numTries >= 10: logger.log(u"Unable to back up " + old_file + " to " + new_file + " please do it manually.", logger.ERROR) return False
def _delete(self, file_path, associated_files=False): """ Deletes the file and optionall all associated files. file_path: The file to delete associated_files: True to delete all files which differ only by extension, False to leave them """ if not file_path: return # figure out which files we want to delete if associated_files: file_list = self._list_associated_files(file_path) else: file_list = [file_path] if not file_list: self._log(u"There were no files associated with "+file_path+", not deleting anything", logger.DEBUG) return # delete the file and any other files which we want to delete for cur_file in file_list: self._log(u"Deleting file "+cur_file, logger.DEBUG) if ek.ek(os.path.isfile, cur_file): ek.ek(os.remove, cur_file)
def subtitlesLanguages(video_path): """Return a list detected subtitles for the given video file""" resultList = [] if sickbeard.SUBTITLES_DIR and ek.ek(os.path.exists, sickbeard.SUBTITLES_DIR): video_path = ek.ek(os.path.join, sickbeard.SUBTITLES_DIR, ek.ek(os.path.basename, video_path)) # Search subtitles in the relative path if sickbeard.SUBTITLES_DIR: video_path = ek.ek(os.path.join, ek.ek(os.path.dirname, video_path), sickbeard.SUBTITLES_DIR, ek.ek(os.path.basename, video_path)) languages = subliminal.video.scan_subtitle_languages(video_path) for language in languages: if hasattr(language, 'opensubtitles') and language.opensubtitles: resultList.append(language.opensubtitles) elif hasattr(language, 'alpha3') and language.alpha3: resultList.append(language.alpha3) elif hasattr(language, 'alpha2') and language.alpha2: resultList.append(language.alpha2) defaultLang = wantedLanguages() if len(resultList) is 1 and len(defaultLang) is 1: return defaultLang if ('pob' in defaultLang or 'pb' in defaultLang) and ('pt' not in defaultLang and 'por' not in defaultLang): resultList = [x if not x in ['por', 'pt'] else u'pob' for x in resultList] return sorted(resultList)
def _checkForExistingFile(self, existing_file): if not existing_file: self._log(u"There is no existing file so there's no worries about replacing it", logger.DEBUG) return PostProcessor.DOESNT_EXIST # if the new file exists, return the appropriate code depending on the size if ek.ek(os.path.isfile, existing_file): # see if it's bigger than our old file if ek.ek(os.path.getsize, existing_file) > ek.ek(os.path.getsize, self.file_path): self._log(u"File "+existing_file+" is larger than "+self.file_path, logger.DEBUG) return PostProcessor.EXISTS_LARGER elif ek.ek(os.path.getsize, existing_file) == ek.ek(os.path.getsize, self.file_path): self._log(u"File "+existing_file+" is the same size as "+self.file_path, logger.DEBUG) return PostProcessor.EXISTS_SAME else: self._log(u"File "+existing_file+" is smaller than "+self.file_path, logger.DEBUG) return PostProcessor.EXISTS_SMALLER else: self._log(u"File "+existing_file+" doesn't exist so there's no worries about replacing it", logger.DEBUG) return PostProcessor.DOESNT_EXIST
def chmodAsParent(childPath): if os.name == "nt" or os.name == "ce": return parentPath = ek.ek(os.path.dirname, childPath) if not parentPath: logger.log(u"No parent path provided in " + childPath + ", unable to get permissions from it", logger.DEBUG) return parentMode = stat.S_IMODE(os.stat(parentPath)[stat.ST_MODE]) if ek.ek(os.path.isfile, childPath): childMode = fileBitFilter(parentMode) else: childMode = parentMode try: ek.ek(os.chmod, childPath, childMode) logger.log( u"Setting permissions for %s to %o as parent directory has %o" % (childPath, childMode, parentMode), logger.DEBUG, ) except OSError: logger.log(u"Failed to set permission for %s to %o" % (childPath, childMode), logger.ERROR)
def _delete_folder(self, folder, check_empty=True): # check if it's a folder if not ek.ek(os.path.isdir, folder): return False # make sure it isn't TV_DOWNLOAD_DIR if sickbeard.TV_DOWNLOAD_DIR and helpers.real_path(sickbeard.TV_DOWNLOAD_DIR) == helpers.real_path(folder): return False # check if it's empty folder when wanted checked if check_empty and ek.ek(os.listdir, folder): return False # try deleting folder try: shutil.rmtree(folder) except (OSError, IOError) as e: logger.log(u'Warning: unable to delete folder: %s: %s' % (folder, ex(e)), logger.WARNING) return False if ek.ek(os.path.isdir, folder): logger.log(u'Warning: unable to delete folder: %s' % folder, logger.WARNING) return False self._log_helper(u'Deleted folder ' + folder, logger.MESSAGE) return True
def _find_ep_destination_folder(self, ep_obj): # if we're supposed to put it in a season folder then figure out what folder to use season_folder = '' if ep_obj.show.seasonfolders: # search the show dir for season folders for curDir in ek.ek(os.listdir, ep_obj.show.location): if not ek.ek(os.path.isdir, ek.ek(os.path.join, ep_obj.show.location, curDir)): continue # if it's a season folder, check if it's the one we want match = re.match(".*season\s*(\d+)", curDir, re.IGNORECASE) if match: # if it's the correct season folder then stop looking if int(match.group(1)) == int(ep_obj.season): season_folder = curDir break # if we couldn't find the right one then just use the season folder defaut format if season_folder == '': # for air-by-date shows use the year as the season folder if ep_obj.show.air_by_date: season_folder = str(ep_obj.airdate.year) else: try: season_folder = sickbeard.SEASON_FOLDERS_FORMAT % (ep_obj.season) except TypeError: logger.log(u"Error: Your season folder format is incorrect, try setting it back to the default") dest_folder = ek.ek(os.path.join, ep_obj.show.location, season_folder) return dest_folder
def clean_url(url): """ Returns an cleaned url starting with a scheme and folder with trailing / or an empty string """ if url and url.strip(): url = url.strip() if '://' not in url: url = '//' + url scheme, netloc, path, query, fragment = urlparse.urlsplit(url, 'http') if not path.endswith('/'): basename, ext = ek.ek(os.path.splitext, ek.ek(os.path.basename, path)) # @UnusedVariable if not ext: path = path + '/' cleaned_url = urlparse.urlunsplit((scheme, netloc, path, query, fragment)) else: cleaned_url = '' return cleaned_url
def imageName(self): if ek.ek( os.path.isfile, ek.ek(os.path.join, sickbeard.PROG_DIR, "data", "images", "providers", self.getID() + ".png"), ): return self.getID() + ".png" return "newznab.png"
def _fanart_dir(self, indexer_id=None): """ Builds up the full path to the fanart image cache directory """ args = [os.path.join, self._cache_dir(), 'fanart'] + \ (None is not indexer_id and [str(indexer_id).split('.')[0]] or []) return ek.ek(os.path.abspath, ek.ek(*args))
def _makeURL(self, result): urls = [] filename = u'' if result.url.startswith('magnet'): try: torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0].upper() torrent_name = re.findall('dn=([^&]+)', result.url)[0] if len(torrent_hash) == 32: torrent_hash = b16encode(b32decode(torrent_hash)).upper() if not torrent_hash: logger.log("Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR) return (urls, filename) urls = [ 'http://torcache.net/torrent/' + torrent_hash + '.torrent', 'http://zoink.ch/torrent/' + torrent_name + '.torrent', 'http://torrage.com/torrent/' + torrent_hash + '.torrent', ] except: urls = [result.url] else: urls = [result.url] if self.providerType == GenericProvider.TORRENT: filename = ek.ek(os.path.join, sickbeard.TORRENT_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) elif self.providerType == GenericProvider.NZB: filename = ek.ek(os.path.join, sickbeard.NZB_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) return (urls, filename)
def rename_ep_file(cur_path, new_path, old_path_length=0): """ Creates all folders needed to move a file to its new location, renames it, then cleans up any folders left that are now empty. cur_path: The absolute path to the file you want to move/rename new_path: The absolute path to the destination for the file WITHOUT THE EXTENSION old_path_length: The length of media file path (old name) WITHOUT THE EXTENSION """ new_dest_dir, new_dest_name = os.path.split(new_path) # @UnusedVariable if old_path_length == 0 or old_path_length > len(cur_path): # approach from the right cur_file_name, cur_file_ext = os.path.splitext(cur_path) # @UnusedVariable else: # approach from the left cur_file_ext = cur_path[old_path_length:] # put the extension on the incoming file new_path += cur_file_ext make_dirs(os.path.dirname(new_path)) # move the file try: logger.log(u"Renaming file from " + cur_path + " to " + new_path) ek.ek(os.rename, cur_path, new_path) except (OSError, IOError), e: logger.log(u"Failed renaming " + cur_path + " to " + new_path + ": " + ex(e), logger.ERROR) return False
def _write_image(self, image_data, image_path): """ Saves the data in image_data to the location image_path. Returns True/False to represent success or failure. image_data: binary image data to write to file image_path: file location to save the image to """ # don't bother overwriting it if ek.ek(os.path.isfile, image_path): logger.log(u"Image already exists, not downloading", logger.DEBUG) return False if not image_data: logger.log(u"Unable to retrieve image, skipping", logger.WARNING) return False try: outFile = ek.ek(open, image_path, "wb") outFile.write(image_data) outFile.close() except IOError, e: logger.log( u"Unable to write image to " + image_path + " - are you sure the show folder is writable? " + str(e).decode("utf-8"), logger.ERROR, ) return False
def __init__(self, file_path, nzb_name=None, process_method=None, is_priority=None): """ Creates a new post processor with the given file path and optionally an NZB name. file_path: The path to the file to be processed nzb_name: The name of the NZB which resulted in this file being downloaded (optional) """ # absolute path to the folder that is being processed self.folder_path = ek.ek(os.path.dirname, ek.ek(os.path.abspath, file_path)) # full path to file self.file_path = file_path # file name only self.file_name = ek.ek(os.path.basename, file_path) # the name of the folder only self.folder_name = ek.ek(os.path.basename, self.folder_path) # name of the NZB that resulted in this folder self.nzb_name = nzb_name self.process_method = process_method if process_method else sickbeard.PROCESS_METHOD self.in_history = False self.release_group = None self.is_proper = False self.is_priority = is_priority self.good_results = {self.NZB_NAME: False, self.FOLDER_NAME: False, self.FILE_NAME: False} self.log = ''
def process_media(processPath, videoFiles, nzbName, process_method, force, is_priority, result): processor = None for cur_video_file in videoFiles: if already_postprocessed(processPath, cur_video_file, force, result): result.missedfiles.append(ek.ek(os.path.join, processPath, cur_video_file) + " : Already processed") continue cur_video_file_path = ek.ek(os.path.join, processPath, cur_video_file) try: processor = postProcessor.PostProcessor(cur_video_file_path, nzbName, process_method, is_priority) result.result = processor.process() process_fail_message = "" except exceptions.PostProcessingFailed, e: result.result = False process_fail_message = ex(e) if processor: result.output += processor.log if result.result: result.output += logHelper(u"Processing succeeded for " + cur_video_file_path) else: result.output += logHelper(u"Processing failed for " + cur_video_file_path + ": " + process_fail_message, logger.WARNING) result.missedfiles.append(cur_video_file_path + " : Processing failed: " + process_fail_message) result.aggresult = False
def _delete_files(self, process_path, notwanted_files, use_trash=False): if not self.any_vid_processed: return # Delete all file not needed for cur_file in notwanted_files: cur_file_path = ek.ek(os.path.join, process_path, cur_file) if not ek.ek(os.path.isfile, cur_file_path): continue # Prevent error when a notwantedfiles is an associated files # check first the read-only attribute file_attribute = ek.ek(os.stat, cur_file_path)[0] if not file_attribute & stat.S_IWRITE: # File is read-only, so make it writeable self._log_helper(u'Changing ReadOnly flag for file ' + cur_file) try: ek.ek(os.chmod, cur_file_path, stat.S_IWRITE) except OSError as e: self._log_helper(u'Cannot change permissions of %s: %s' % (cur_file_path, str(e.strerror))) try: if use_trash: ek.ek(send2trash, cur_file_path) else: ek.ek(os.remove, cur_file_path) except OSError as e: self._log_helper(u'Unable to delete file %s: %s' % (cur_file, str(e.strerror))) if True is not ek.ek(os.path.isfile, cur_file_path): self._log_helper(u'Deleted file ' + cur_file)
def update(self): """ Downloads the latest source tarball from github and installs it over the existing version. """ base_url = 'http://github.com/' + self.github_org + '/' + self.github_repo tar_download_url = base_url + '/tarball/' + self.branch try: # prepare the update dir sr_update_dir = ek.ek(os.path.join, sickbeard.PROG_DIR, u'sr-update') if os.path.isdir(sr_update_dir): logger.log(u"Clearing out update folder " + sr_update_dir + " before extracting") shutil.rmtree(sr_update_dir) logger.log(u"Creating update folder " + sr_update_dir + " before extracting") os.makedirs(sr_update_dir) # retrieve file logger.log(u"Downloading update from " + repr(tar_download_url)) tar_download_path = os.path.join(sr_update_dir, u'sr-update.tar') urllib.urlretrieve(tar_download_url, tar_download_path) if not ek.ek(os.path.isfile, tar_download_path): logger.log(u"Unable to retrieve new version from " + tar_download_url + ", can't update", logger.ERROR) return False if not ek.ek(tarfile.is_tarfile, tar_download_path): logger.log(u"Retrieved version from " + tar_download_url + " is corrupt, can't update", logger.ERROR) return False # extract to sr-update dir logger.log(u"Extracting file " + tar_download_path) tar = tarfile.open(tar_download_path) tar.extractall(sr_update_dir) tar.close() # delete .tar.gz logger.log(u"Deleting file " + tar_download_path) os.remove(tar_download_path) # find update dir name update_dir_contents = [x for x in os.listdir(sr_update_dir) if os.path.isdir(os.path.join(sr_update_dir, x))] if len(update_dir_contents) != 1: logger.log(u"Invalid update data, update failed: " + str(update_dir_contents), logger.ERROR) return False content_dir = os.path.join(sr_update_dir, update_dir_contents[0]) # walk temp folder and move files to main folder logger.log(u"Moving files from " + content_dir + " to " + sickbeard.PROG_DIR) for dirname, dirnames, filenames in os.walk(content_dir): # @UnusedVariable dirname = dirname[len(content_dir) + 1:] for curfile in filenames: old_path = os.path.join(content_dir, dirname, curfile) new_path = os.path.join(sickbeard.PROG_DIR, dirname, curfile) # Avoid DLL access problem on WIN32/64 # These files needing to be updated manually #or find a way to kill the access from memory if curfile in ('unrar.dll', 'unrar64.dll'): try: os.chmod(new_path, stat.S_IWRITE) os.remove(new_path) os.renames(old_path, new_path) except Exception, e: logger.log(u"Unable to update " + new_path + ': ' + ex(e), logger.DEBUG) os.remove(old_path) # Trash the updated file without moving in new path continue if os.path.isfile(new_path): os.remove(new_path) os.renames(old_path, new_path) sickbeard.CUR_COMMIT_HASH = self._newest_commit_hash sickbeard.CUR_COMMIT_BRANCH = self.branch
def imageName(self): if ek.ek(os.path.isfile, ek.ek(os.path.join, sickbeard.PROG_DIR, 'gui', sickbeard.GUI_NAME, 'images', 'providers', self.getID() + '.png')): return self.getID() + '.png' return 'newznab.png'
def copyFile(srcFile, destFile): ek.ek(shutil.copyfile, srcFile, destFile) try: ek.ek(shutil.copymode, srcFile, destFile) except OSError: pass
def _remove_file_failed(file): try: ek.ek(os.remove, file) except: pass
def real_path(path): """ Returns: the canonicalized absolute pathname. The resulting path will have no symbolic link, '/./' or '/../' components. """ return ek.ek(os.path.normpath, ek.ek(os.path.normcase, ek.ek(os.path.realpath, path)))
new_path += cur_file_ext make_dirs(os.path.dirname(new_path)) # move the file try: logger.log(u"Renaming file from " + cur_path + " to " + new_path) ek.ek(os.rename, cur_path, new_path) except (OSError, IOError), e: logger.log( u"Failed renaming " + cur_path + " to " + new_path + ": " + ex(e), logger.ERROR) return False # clean up any old folders that are empty delete_empty_folders(ek.ek(os.path.dirname, cur_path)) return True def delete_empty_folders(check_empty_dir, keep_dir=None): """ Walks backwards up the path and deletes any empty folders found. check_empty_dir: The path to clean (absolute path to a folder) keep_dir: Clean until this path is reached """ # treat check_empty_dir as empty when it only contains these items ignore_items = []
# save the data to disk try: fileOut = open(fileName, "w") fileOut.write(result.extraInfo[0]) fileOut.close() helpers.chmodAsParent(fileName) except IOError, e: logger.log(u"Error trying to save NZB to black hole: " + ex(e), logger.ERROR) newResult = False elif result.resultType == "torrentdata": # get the final file path to the nzb fileName = ek.ek(os.path.join, sickbeard.TORRENT_DIR, helpers.sanitizeFileName(result.name) + ".torrent") logger.log(u"Saving Torrent to " + fileName) newResult = True # save the data to disk try: fileOut = open(fileName, "wb") fileOut.write(result.extraInfo[0]) fileOut.close() helpers.chmodAsParent(fileName) except IOError, e: logger.log(u"Error trying to save Torrent to black hole: " + ex(e), logger.ERROR) newResult = False
def get_season_all_banner_path(self, show_obj): return ek.ek(os.path.join, show_obj.location, self.season_all_banner_name)
def get_fanart_path(self, show_obj): return ek.ek(os.path.join, show_obj.location, self.fanart_name)
else: self._log( u"This download is marked a priority download so I'm going to replace an existing file if I find one", logger.DEBUG) # delete the existing file (and company) for cur_ep in [ep_obj] + ep_obj.relatedEps: try: self._delete(cur_ep.location, associated_files=True) except OSError, IOError: raise exceptions.PostProcessingFailed( "Unable to delete the existing files") # if the show directory doesn't exist then make it if allowed if not ek.ek( os.path.isdir, ep_obj.show.location) and sickbeard.CREATE_MISSING_SHOW_DIRS: self._log(u"Show directory doesn't exist, creating it", logger.DEBUG) try: ek.ek(os.mkdir, ep_obj.show.location) except OSError, IOError: raise exceptions.PostProcessingFailed( "Unable to create the show directory: " + ep_obj.show.location) # get metadata for the show (but not episode because it hasn't been fully processed) ep_obj.show.writeMetadata(True) # update the ep info before we rename so the quality & release name go into the name properly
def _has_season_all_banner(self, show_obj): result = ek.ek(os.path.isfile, self.get_season_all_banner_path(show_obj)) logger.log(u"Checking if " + self.get_season_all_banner_path(show_obj) + " exists: " + str(result), logger.DEBUG) return result
def get_poster_path(self, show_obj): return ek.ek(os.path.join, show_obj.location, self.poster_name)
def _has_episode_thumb(self, ep_obj): location = self.get_episode_thumb_path(ep_obj) result = location != None and ek.ek(os.path.isfile, location) if location: logger.log(u"Checking if " + location + " exists: " + str(result), logger.DEBUG) return result
def get_show_file_path(self, show_obj): return ek.ek(os.path.join, show_obj.location, self._show_metadata_filename)
def _has_episode_metadata(self, ep_obj): result = ek.ek(os.path.isfile, self.get_episode_file_path(ep_obj)) logger.log(u"Checking if " + self.get_episode_file_path(ep_obj) + " exists: " + str(result), logger.DEBUG) return result
def _has_season_banner(self, show_obj, season): location = self.get_season_banner_path(show_obj, season) result = location != None and ek.ek(os.path.isfile, location) if location: logger.log(u"Checking if " + location + " exists: " + str(result), logger.DEBUG) return result
def _safe_replace(self, ep_obj, new_ep_quality): """ Determines if the new episode can safely replace old episode. Episodes which are expected (snatched) or larger than the existing episode are priority, others are not. ep_obj: The TVEpisode object in question new_ep_quality: The quality of the episode that is being processed Returns: True if the episode can safely replace old episode, False otherwise. """ # if SB snatched this then assume it's safe if ep_obj.status in common.Quality.SNATCHED + common.Quality.SNATCHED_PROPER: self._log(u"Sick Beard snatched this episode, marking it safe to replace", logger.DEBUG) return True old_ep_status, old_ep_quality = common.Quality.splitCompositeStatus(ep_obj.status) # if old episode is not downloaded/archived then it's safe if old_ep_status != common.DOWNLOADED and old_ep_status != common.ARCHIVED: self._log(u"Existing episode status is not downloaded/archived, marking it safe to replace", logger.DEBUG) return True if old_ep_status == common.ARCHIVED: self._log(u"Existing episode status is archived, marking it unsafe to replace", logger.DEBUG) return False # Status downloaded. Quality/ size checks # if manual post process option is set to force_replace then it's safe if self.force_replace: self._log(u"Processed episode is set to force replace existing episode, marking it safe to replace", logger.DEBUG) return True # if the file processed is higher quality than the existing episode then it's safe if new_ep_quality > old_ep_quality: if new_ep_quality != common.Quality.UNKNOWN: self._log(u"Existing episode status is not snatched but processed episode appears to be better quality than existing episode, marking it safe to replace", logger.DEBUG) return True else: self._log(u"Episode already exists in database and processed episode has unknown quality, marking it unsafe to replace", logger.DEBUG) return False # if there's an existing downloaded file with same quality, check filesize to decide if new_ep_quality == old_ep_quality: self._log(u"Episode already exists in database and has same quality as processed episode", logger.DEBUG) # check for an existing file self._log(u"Checking size of existing file: " + ep_obj.location, logger.DEBUG) existing_file_status = self._checkForExistingFile(ep_obj.location) if existing_file_status == PostProcessor.EXISTS_LARGER: self._log(u"File exists and new file is smaller, marking it unsafe to replace", logger.DEBUG) return False elif existing_file_status == PostProcessor.EXISTS_SAME: self._log(u"File exists and new file is same size, marking it unsafe to replace", logger.DEBUG) return False elif existing_file_status == PostProcessor.EXISTS_SMALLER: self._log(u"File exists and new file is larger, marking it safe to replace", logger.DEBUG) return True elif existing_file_status == PostProcessor.DOESNT_EXIST: if not ek.ek(os.path.isdir, ep_obj.show._location) and not sickbeard.CREATE_MISSING_SHOW_DIRS: self._log(u"File and Show location doesn't exist, marking it unsafe to replace", logger.DEBUG) return False else: self._log(u"File doesn't exist, marking it safe to replace", logger.DEBUG) return True else: self._log(u"Unknown file status for: " + ep_obj.location + "This should never happen, please log this as a bug.", logger.ERROR) return False # if there's an existing file with better quality if new_ep_quality < old_ep_quality and old_ep_quality != common.Quality.UNKNOWN: self._log(u"Episode already exists in database and processed episode has lower quality, marking it unsafe to replace", logger.DEBUG) return False self._log(u"None of the conditions were met, marking it unsafe to replace", logger.DEBUG) return False
def _has_fanart(self, show_obj): result = ek.ek(os.path.isfile, self.get_fanart_path(show_obj)) logger.log(u"Checking if " + self.get_fanart_path(show_obj) + " exists: " + str(result), logger.DEBUG) return result
logger.ERROR) return False try: r = requests.get('http://torcache.net/torrent/' + torrent_hash + '.torrent', verify=False) except Exception, e: logger.log("Unable to connect to Torcache: " + ex(e), logger.ERROR) return False if not r.status_code == 200: return False magnetFileName = ek.ek( os.path.join, sickbeard.TORRENT_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) magnetFileContent = r.content try: with open(magnetFileName, 'wb') as fileOut: fileOut.write(magnetFileContent) helpers.chmodAsParent(magnetFileName) except EnvironmentError, e: logger.log("Unable to save the file: " + ex(e), logger.ERROR) return False logger.log(u"Saved magnet link to " + magnetFileName + " ", logger.MESSAGE)
def process(self): """ Post-process a given file """ self._log(u"Processing " + self.file_path + " (" + str(self.nzb_name) + ")") if ek.ek(os.path.isdir, self.file_path): self._log(u"File " + self.file_path + " seems to be a directory") return False # reset per-file stuff self.in_history = False # try to find the file info (tvdb_id, season, episodes, quality) = self._find_info() # if we don't have it then give up if not tvdb_id or season is None or not episodes: self._log(u"Not enough information to determine what episode this is", logger.DEBUG) self._log(u"Quitting post-processing", logger.DEBUG) return False # retrieve/create the corresponding TVEpisode objects ep_obj = self._get_ep_obj(tvdb_id, season, episodes) # get the quality of the episode we're processing if quality: self._log(u"Snatch history had a quality in it, using that: " + common.Quality.qualityStrings[quality], logger.DEBUG) new_ep_quality = quality else: new_ep_quality = self._get_quality(ep_obj) logger.log(u"Quality of the processing episode: " + str(new_ep_quality), logger.DEBUG) # see if it's safe to replace existing episode (is download snatched, PROPER, better quality) safe_replace = self._safe_replace(ep_obj, new_ep_quality) # if it's not safe to replace, stop here if not safe_replace: self._log(u"Quitting post-processing", logger.DEBUG) return False # if the file is safe to replace then we're going to replace it even if it exists else: self._log(u"This download is marked as safe to replace existing file", logger.DEBUG) # delete the existing file (and company) for cur_ep in [ep_obj] + ep_obj.relatedEps: try: self._delete(cur_ep.location, associated_files=True) # clean up any left over folders if cur_ep.location: helpers.delete_empty_folders(ek.ek(os.path.dirname, cur_ep.location), keep_dir=ep_obj.show._location) except (OSError, IOError): raise exceptions.PostProcessingFailed(u"Unable to delete the existing files") # if the show directory doesn't exist then make it if allowed if not ek.ek(os.path.isdir, ep_obj.show._location) and sickbeard.CREATE_MISSING_SHOW_DIRS: self._log(u"Show directory doesn't exist, creating it", logger.DEBUG) try: ek.ek(os.mkdir, ep_obj.show._location) # do the library update for synoindex notifiers.synoindex_notifier.addFolder(ep_obj.show._location) except (OSError, IOError): raise exceptions.PostProcessingFailed(u"Unable to create the show directory: " + ep_obj.show._location) # get metadata for the show (but not episode because it hasn't been fully processed) ep_obj.show.writeMetadata(True) # update the ep info before we rename so the quality & release name go into the name properly for cur_ep in [ep_obj] + ep_obj.relatedEps: if self.release_name: self._log(u"Found release name " + self.release_name, logger.DEBUG) cur_ep.release_name = self.release_name else: cur_ep.release_name = "" cur_ep.status = common.Quality.compositeStatus(common.DOWNLOADED, new_ep_quality) # find the destination folder try: proper_path = ep_obj.proper_path() proper_absolute_path = ek.ek(os.path.join, ep_obj.show.location, proper_path) dest_path = ek.ek(os.path.dirname, proper_absolute_path) except exceptions.ShowDirNotFoundException: raise exceptions.PostProcessingFailed(u"Unable to post-process an episode if the show dir doesn't exist, quitting") self._log(u"Destination folder for this episode: " + dest_path, logger.DEBUG) # create any folders we need if not helpers.make_dirs(dest_path): raise exceptions.PostProcessingFailed(u"Unable to create destination folder: " + dest_path) # figure out the base name of the resulting episode file if sickbeard.RENAME_EPISODES: orig_extension = self.file_name.rpartition('.')[-1] new_base_name = ek.ek(os.path.basename, proper_path) new_file_name = new_base_name + '.' + orig_extension else: # if we're not renaming then there's no new base name, we'll just use the existing name new_base_name = None new_file_name = self.file_name try: # move the episode and associated files to the show dir if sickbeard.KEEP_PROCESSED_DIR: self._copy(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES) else: self._move(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES) except (OSError, IOError): raise exceptions.PostProcessingFailed(u"Unable to move the files to destination folder: " + dest_path) # put the new location in the database for cur_ep in [ep_obj] + ep_obj.relatedEps: with cur_ep.lock: cur_ep.location = ek.ek(os.path.join, dest_path, new_file_name) cur_ep.saveToDB() # log it to history history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group) # send notifiers download notification if not ep_obj.show.skip_notices: notifiers.notify_download(ep_obj.prettyName()) # generate nfo/tbn ep_obj.createMetaFiles() ep_obj.saveToDB() # send notifiers library update notifiers.update_library(ep_obj) self._run_extra_scripts(ep_obj) return True
def _remove_zoneinfo_failed(filename): try: ek.ek(os.remove, filename) except: pass
try: fileList = [{ 'name': filename, 'path': ek.ek(os.path.join, path, filename) } for filename in ek.ek(os.listdir, path)] except OSError, e: logger.log(u"Unable to open " + path + ": " + repr(e) + " / " + str(e), logger.WARNING) fileList = [{ 'name': filename, 'path': ek.ek(os.path.join, parentPath, filename) } for filename in ek.ek(os.listdir, parentPath)] if not includeFiles: fileList = filter(lambda entry: ek.ek(os.path.isdir, entry['path']), fileList) # prune out directories to proect the user from doing stupid things (already lower case the dir to reduce calls) hideList = [ "boot", "bootmgr", "cache", "msocache", "recovery", "$recycle.bin", "recycler", "system volume information", "temporary internet files" ] # windows specific hideList += [ ".fseventd", ".spotlight", ".trashes", ".vol", "cachedmessages", "caches", "trash" ] # osx specific fileList = filter(lambda entry: entry['name'].lower() not in hideList, fileList) fileList = sorted(
def processDir (dirName, nzbName=None, recurse=False): """ Scans through the files in dirName and processes whatever media files it finds dirName: The folder name to look in nzbName: The NZB name which resulted in this folder being downloaded recurse: Boolean for whether we should descend into subfolders or not """ returnStr = '' returnStr += logHelper(u"Processing folder "+dirName, logger.DEBUG) # if they passed us a real dir then assume it's the one we want if ek.ek(os.path.isdir, dirName): dirName = ek.ek(os.path.realpath, dirName) # if they've got a download dir configured then use it elif sickbeard.TV_DOWNLOAD_DIR and ek.ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR) \ and ek.ek(os.path.normpath, dirName) != ek.ek(os.path.normpath, sickbeard.TV_DOWNLOAD_DIR): dirName = ek.ek(os.path.join, sickbeard.TV_DOWNLOAD_DIR, ek.ek(os.path.abspath, dirName).split(os.path.sep)[-1]) returnStr += logHelper(u"Trying to use folder "+dirName, logger.DEBUG) # if we didn't find a real dir then quit if not ek.ek(os.path.isdir, dirName): returnStr += logHelper(u"Unable to figure out what folder to process. If your downloader and Sick Beard aren't on the same PC make sure you fill out your TV download dir in the config.", logger.DEBUG) return returnStr # TODO: check if it's failed and deal with it if it is if ek.ek(os.path.basename, dirName).startswith('_FAILED_'): returnStr += logHelper(u"The directory name indicates it failed to extract, cancelling", logger.DEBUG) return returnStr elif ek.ek(os.path.basename, dirName).startswith('_UNDERSIZED_'): returnStr += logHelper(u"The directory name indicates that it was previously rejected for being undersized, cancelling", logger.DEBUG) return returnStr elif ek.ek(os.path.basename, dirName).startswith('_UNPACK_'): returnStr += logHelper(u"The directory name indicates that this release is in the process of being unpacked, skipping", logger.DEBUG) return returnStr # make sure the dir isn't inside a show dir myDB = db.DBConnection() sqlResults = myDB.select("SELECT * FROM tv_shows") for sqlShow in sqlResults: if dirName.lower().startswith(ek.ek(os.path.realpath, sqlShow["location"]).lower()+os.sep) or dirName.lower() == ek.ek(os.path.realpath, sqlShow["location"]).lower(): returnStr += logHelper(u"You're trying to post process an episode that's already been moved to its show dir", logger.ERROR) return returnStr fileList = ek.ek(os.listdir, dirName) # split the list into video files and folders folders = filter(lambda x: ek.ek(os.path.isdir, ek.ek(os.path.join, dirName, x)), fileList) videoFiles = filter(helpers.isMediaFile, fileList) # recursively process all the folders for curFolder in folders: returnStr += logHelper(u"Recursively processing a folder: "+curFolder, logger.DEBUG) returnStr += processDir(ek.ek(os.path.join, dirName, curFolder), recurse=True) remainingFolders = filter(lambda x: ek.ek(os.path.isdir, ek.ek(os.path.join, dirName, x)), fileList) # If nzbName is set and there's more than one videofile in the folder, files will be lost (overwritten). if nzbName != None and len(videoFiles) >= 2: nzbName = None # process any files in the dir for cur_video_file_path in videoFiles: cur_video_file_path = ek.ek(os.path.join, dirName, cur_video_file_path) try: processor = postProcessor.PostProcessor(cur_video_file_path, nzbName) process_result = processor.process() process_fail_message = "" except exceptions.PostProcessingFailed, e: process_result = False process_fail_message = ex(e) returnStr += processor.log # as long as the postprocessing was successful delete the old folder unless the config wants us not to if process_result: if len(videoFiles) == 1 and not sickbeard.KEEP_PROCESSED_DIR and \ ek.ek(os.path.normpath, dirName) != ek.ek(os.path.normpath, sickbeard.TV_DOWNLOAD_DIR) and \ ek.ek(os.path.normpath, dirName) != ek.ek(os.path.normpath, sickbeard.TORRENT_DOWNLOAD_DIR) and \ len(remainingFolders) == 0: returnStr += logHelper(u"Deleting folder " + dirName, logger.DEBUG) try: shutil.rmtree(dirName) except (OSError, IOError), e: returnStr += logHelper(u"Warning: unable to remove the folder " + dirName + ": " + ex(e), logger.WARNING) returnStr += logHelper(u"Processing succeeded for "+cur_video_file_path) if sickbeard.TV_DOWNLOAD_DIR !="": helpers.del_empty_dirs(sickbeard.TV_DOWNLOAD_DIR) if sickbeard.TORRENT_DOWNLOAD_DIR !="": helpers.del_empty_dirs(sickbeard.TORRENT_DOWNLOAD_DIR)
def _combined_file_operation(self, file_path, new_path, new_base_name, associated_files=False, action=None, subtitles=False): """ Performs a generic operation (move or copy) on a file. Can rename the file as well as change its location, and optionally move associated files too. file_path: The full path of the media file to act on new_path: Destination path where we want to move/copy the file to new_base_name: The base filename (no extension) to use during the copy. Use None to keep the same name. associated_files: Boolean, whether we should copy similarly-named files too action: function that takes an old path and new path and does an operation with them (move/copy) """ if not action: self._log( u"Must provide an action for the combined file operation", logger.ERROR) return file_list = [file_path] if associated_files: file_list = file_list + self._list_associated_files(file_path) elif subtitles: file_list = file_list + self._list_associated_files( file_path, True) if not file_list: self._log( u"There were no files associated with " + file_path + ", not moving anything", logger.DEBUG) return # create base name with file_path (media_file without .extension) old_base_name = file_path.rpartition('.')[0] old_base_name_length = len(old_base_name) # deal with all files for cur_file_path in file_list: cur_file_name = ek.ek(os.path.basename, cur_file_path) # get the extension without . cur_extension = cur_file_path[old_base_name_length + 1:] # check if file have subtitles language if os.path.splitext( cur_extension)[1][1:] in common.subtitleExtensions: cur_lang = os.path.splitext(cur_extension)[0] if cur_lang in sickbeard.SUBTITLES_LANGUAGES: cur_extension = cur_lang + os.path.splitext( cur_extension)[1] # replace .nfo with .nfo-orig to avoid conflicts if cur_extension == 'nfo': cur_extension = 'nfo-orig' # If new base name then convert name if new_base_name: new_file_name = new_base_name + '.' + cur_extension # if we're not renaming we still want to change extensions sometimes else: new_file_name = helpers.replaceExtension( cur_file_name, cur_extension) if sickbeard.SUBTITLES_DIR and cur_extension in common.subtitleExtensions: subs_new_path = ek.ek(os.path.join, new_path, sickbeard.SUBTITLES_DIR) dir_exists = helpers.makeDir(subs_new_path) if not dir_exists: logger.log( u"Unable to create subtitles folder " + subs_new_path, logger.ERROR) else: helpers.chmodAsParent(subs_new_path) new_file_path = ek.ek(os.path.join, subs_new_path, new_file_name) else: new_file_path = ek.ek(os.path.join, new_path, new_file_name) action(cur_file_path, new_file_path)
def _update_zoneinfo(): global sb_timezone sb_timezone = get_tz() # now check if the zoneinfo needs update url_zv = 'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/zoneinfo.txt' url_data = helpers.getURL(url_zv) if url_data is None: # When urlData is None, trouble connecting to github logger.log( u'Loading zoneinfo.txt failed, this can happen from time to time. Unable to get URL: %s' % url_zv, logger.WARNING) return zonefilename = zoneinfo.ZONEFILENAME cur_zoneinfo = zonefilename if None is not cur_zoneinfo: cur_zoneinfo = ek.ek(basename, zonefilename) zonefile = helpers.real_path( ek.ek(join, sickbeard.ZONEINFO_DIR, cur_zoneinfo)) zonemetadata = zoneinfo.gettz_db_metadata() if ek.ek( os.path.isfile, zonefile) else None (new_zoneinfo, zoneinfo_md5) = url_data.decode('utf-8').strip().rsplit(u' ') newtz_regex = re.search(r'(\d{4}[^.]+)', new_zoneinfo) if not newtz_regex or len(newtz_regex.groups()) != 1: return newtzversion = newtz_regex.group(1) if cur_zoneinfo is not None and zonemetadata is not None and 'tzversion' in zonemetadata and zonemetadata[ 'tzversion'] == newtzversion: return # now load the new zoneinfo url_tar = u'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/%s' % new_zoneinfo zonefile_tmp = re.sub(r'\.tar\.gz$', '.tmp', zonefile) if ek.ek(os.path.exists, zonefile_tmp): try: ek.ek(os.remove, zonefile_tmp) except: logger.log(u'Unable to delete: %s' % zonefile_tmp, logger.ERROR) return if not helpers.download_file(url_tar, zonefile_tmp): return if not ek.ek(os.path.exists, zonefile_tmp): logger.log(u'Download of %s failed.' % zonefile_tmp, logger.ERROR) return new_hash = str(helpers.md5_for_file(zonefile_tmp)) if zoneinfo_md5.upper() == new_hash.upper(): logger.log(u'Updating timezone info with new one: %s' % new_zoneinfo, logger.MESSAGE) try: # remove the old zoneinfo file if cur_zoneinfo is not None: old_file = helpers.real_path( ek.ek(join, sickbeard.ZONEINFO_DIR, cur_zoneinfo)) if ek.ek(os.path.exists, old_file): ek.ek(os.remove, old_file) # rename downloaded file ek.ek(os.rename, zonefile_tmp, zonefile) from dateutil.zoneinfo import gettz if '_CLASS_ZONE_INSTANCE' in gettz.func_globals: gettz.func_globals.__setitem__('_CLASS_ZONE_INSTANCE', list()) sb_timezone = get_tz() except: _remove_zoneinfo_failed(zonefile_tmp) return else: _remove_zoneinfo_failed(zonefile_tmp) logger.log( u'MD5 hash does not match: %s File: %s' % (zoneinfo_md5.upper(), new_hash.upper()), logger.ERROR) return
def downloadResult(self, result): """ Save the result to disk. """ # check for auth if not self._doLogin(): return False if self.providerType == GenericProvider.TORRENT: try: torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0].upper() if len(torrent_hash) == 32: torrent_hash = b16encode(b32decode(torrent_hash)).lower() if not torrent_hash: logger.log( "Unable to extract torrent hash from link: " + ex(result.url), logger.ERROR) return False urls = [ 'http://torcache.net/torrent/' + torrent_hash + '.torrent', 'http://torrage.com/torrent/' + torrent_hash + '.torrent', 'http://zoink.it/torrent/' + torrent_hash + '.torrent', ] except: urls = [result.url] filename = ek.ek( os.path.join, sickbeard.TORRENT_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) elif self.providerType == GenericProvider.NZB: urls = [result.url] filename = ek.ek( os.path.join, sickbeard.NZB_DIR, helpers.sanitizeFileName(result.name) + '.' + self.providerType) else: return for url in urls: if helpers.download_file(url, filename, session=self.session): logger.log(u"Downloading a result from " + self.name + " at " + url) if self.providerType == GenericProvider.TORRENT: logger.log(u"Saved magnet link to " + filename, logger.INFO) else: logger.log(u"Saved result to " + filename, logger.INFO) if self._verify_download(filename): return True logger.log(u"Failed to download result", logger.WARNING) return False
def process(self): """ Post-process a given file """ self._log(u"Processing " + self.file_path + " (" + str(self.nzb_name) + ")") if ek.ek(os.path.isdir, self.file_path): self._log(u"File " + self.file_path + " seems to be a directory") return False for ignore_file in self.IGNORED_FILESTRINGS: if ignore_file in self.file_path: self._log(u"File " + self.file_path + " is ignored type, skipping") return False # reset per-file stuff self.in_history = False # try to find the file info (tvdb_id, season, episodes) = self._find_info() # if we don't have it then give up if not tvdb_id or season == None or not episodes: self._log( u"Can't find show id from TVDB or season or episode, skipping", logger.WARNING) return False # retrieve/create the corresponding TVEpisode objects ep_obj = self._get_ep_obj(tvdb_id, season, episodes) # find the destination folder try: proper_path = ep_obj.proper_path() proper_absolute_path = ek.ek(os.path.join, ep_obj.show.location, proper_path) dest_path = ek.ek(os.path.dirname, proper_absolute_path) except exceptions.ShowDirNotFoundException: raise exceptions.PostProcessingFailed( u"Unable to post-process an episode if the show dir doesn't exist, quitting" ) self._log(u"Destination folder for this episode: " + dest_path, logger.DEBUG) # create any folders we need helpers.make_dirs(dest_path) # get the quality of the episode we're processing new_ep_quality = self._get_quality(ep_obj) logger.log( u"Quality of the episode we're processing: " + str(new_ep_quality), logger.DEBUG) # see if this is a priority download (is it snatched, in history, or PROPER) priority_download = self._is_priority(ep_obj, new_ep_quality) self._log(u"Is ep a priority download: " + str(priority_download), logger.DEBUG) # set the status of the episodes for curEp in [ep_obj] + ep_obj.relatedEps: curEp.status = common.Quality.compositeStatus( common.SNATCHED, new_ep_quality) # check for an existing file existing_file_status = self._checkForExistingFile(ep_obj.location) # if it's not priority then we don't want to replace smaller files in case it was a mistake if not priority_download: # if there's an existing file that we don't want to replace stop here if existing_file_status in (PostProcessor.EXISTS_LARGER, PostProcessor.EXISTS_SAME): self._log( u"File exists and we are not going to replace it because it's not smaller, quitting post-processing", logger.ERROR) return False elif existing_file_status == PostProcessor.EXISTS_SMALLER: self._log( u"File exists and is smaller than the new file so I'm going to replace it", logger.DEBUG) elif existing_file_status != PostProcessor.DOESNT_EXIST: self._log( u"Unknown existing file status. This should never happen, please log this as a bug.", logger.ERROR) return False # if the file is priority then we're going to replace it even if it exists else: self._log( u"This download is marked a priority download so I'm going to replace an existing file if I find one", logger.DEBUG) # delete the existing file (and company) for cur_ep in [ep_obj] + ep_obj.relatedEps: try: self._delete(cur_ep.location, associated_files=True) # clean up any left over folders if cur_ep.location: helpers.delete_empty_folders( ek.ek(os.path.dirname, cur_ep.location), keep_dir=ep_obj.show._location) except (OSError, IOError): raise exceptions.PostProcessingFailed( "Unable to delete the existing files") # if the show directory doesn't exist then make it if allowed if not ek.ek( os.path.isdir, ep_obj.show._location) and sickbeard.CREATE_MISSING_SHOW_DIRS: self._log(u"Show directory doesn't exist, creating it", logger.DEBUG) try: ek.ek(os.mkdir, ep_obj.show._location) # do the library update for synoindex notifiers.synoindex_notifier.addFolder(ep_obj.show._location) except (OSError, IOError): raise exceptions.PostProcessingFailed( "Unable to create the show directory: " + ep_obj.show._location) # get metadata for the show (but not episode because it hasn't been fully processed) ep_obj.show.writeMetadata(True) # update the ep info before we rename so the quality & release name go into the name properly for cur_ep in [ep_obj] + ep_obj.relatedEps: with cur_ep.lock: cur_release_name = None # use the best possible representation of the release name if self.good_results[self.NZB_NAME]: cur_release_name = self.nzb_name if cur_release_name.lower().endswith('.nzb'): cur_release_name = cur_release_name.rpartition('.')[0] elif self.good_results[self.FOLDER_NAME]: cur_release_name = self.folder_name elif self.good_results[self.FILE_NAME]: cur_release_name = self.file_name # take the extension off the filename, it's not needed if '.' in self.file_name: cur_release_name = self.file_name.rpartition('.')[0] if cur_release_name: self._log("Found release name " + cur_release_name, logger.DEBUG) cur_ep.release_name = cur_release_name else: logger.log(u"good results: " + repr(self.good_results), logger.DEBUG) cur_ep.status = common.Quality.compositeStatus( common.DOWNLOADED, new_ep_quality) cur_ep.subtitles = [] cur_ep.subtitles_searchcount = 0 cur_ep.subtitles_lastsearch = '0001-01-01 00:00:00' cur_ep.is_proper = self.is_proper cur_ep.saveToDB() # figure out the base name of the resulting episode file if sickbeard.RENAME_EPISODES: orig_extension = self.file_name.rpartition('.')[-1] new_base_name = ek.ek(os.path.basename, proper_path) new_file_name = new_base_name + '.' + orig_extension else: # if we're not renaming then there's no new base name, we'll just use the existing name new_base_name = None new_file_name = self.file_name try: # move the episode and associated files to the show dir if self.process_method == "copy": self._copy(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles) elif self.process_method == "move": self._move(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles) elif self.process_method == "hardlink": self._hardlink( self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles) elif self.process_method == "symlink": self._moveAndSymlink( self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles) else: logger.log( u"Unknown process method: " + sickbeard.PROCESS_METHOD, logger.ERROR) raise exceptions.PostProcessingFailed( "Unable to move the files to their new home") except (OSError, IOError): raise exceptions.PostProcessingFailed( "Unable to move the files to their new home") # download subtitles if sickbeard.USE_SUBTITLES and ep_obj.show.subtitles: for curEp in [ep_obj]: with cur_ep.lock: cur_ep.location = ek.ek(os.path.join, dest_path, new_file_name) cur_ep.downloadSubtitles(force=True) # put the new location in the database for cur_ep in [ep_obj] + ep_obj.relatedEps: with cur_ep.lock: cur_ep.location = ek.ek(os.path.join, dest_path, new_file_name) cur_ep.saveToDB() # log it to history history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group) # send notifications notifiers.notify_download( ep_obj._format_pattern('%SN - %Sx%0E - %EN - %QN')) # generate nfo/tbn ep_obj.createMetaFiles() ep_obj.saveToDB() # do the library update for XBMC notifiers.xbmc_notifier.update_library(ep_obj.show.name) # do the library update for Plex notifiers.plex_notifier.update_library() # do the library update for NMJ # nmj_notifier kicks off its library update when the notify_download is issued (inside notifiers) # do the library update for Synology Indexer notifiers.synoindex_notifier.addFile(ep_obj.location) # do the library update for pyTivo notifiers.pytivo_notifier.update_library(ep_obj) # do the library update for Trakt notifiers.trakt_notifier.update_library(ep_obj) self._run_extra_scripts(ep_obj) return True
def search_providers(show, episodes, manual_search=False, torrent_only=False, try_other_searches=False, old_status=None): found_results = {} final_results = [] search_done = False orig_thread_name = threading.currentThread().name use_quality_list = None if any([episodes]): old_status = old_status or failed_history.find_old_status( episodes[0]) or episodes[0].status if old_status: status, quality = Quality.splitCompositeStatus(old_status) use_quality_list = (status not in (common.WANTED, common.FAILED, common.UNAIRED, common.SKIPPED, common.IGNORED, common.UNKNOWN)) provider_list = [ x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_backlog and ( not torrent_only or x.providerType == GenericProvider.TORRENT) ] for cur_provider in provider_list: if cur_provider.anime_only and not show.is_anime: logger.log(u'%s is not an anime, skipping' % show.name, logger.DEBUG) continue threading.currentThread().name = '%s :: [%s]' % (orig_thread_name, cur_provider.name) provider_id = cur_provider.get_id() found_results[provider_id] = {} search_count = 0 search_mode = cur_provider.search_mode while True: search_count += 1 if 'eponly' == search_mode: logger.log(u'Performing episode search for %s' % show.name) else: logger.log(u'Performing season pack search for %s' % show.name) try: cur_provider.cache._clearCache() search_results = cur_provider.find_search_results( show, episodes, search_mode, manual_search, try_other_searches=try_other_searches) if any(search_results): logger.log(', '.join([ '%s %s candidate%s' % (len(v), (('multiep', 'season')[SEASON_RESULT == k], 'episode')['ep' in search_mode], helpers.maybe_plural(len(v))) for (k, v) in search_results.iteritems() ])) except exceptions.AuthException as e: logger.log(u'Authentication error: %s' % ex(e), logger.ERROR) break except Exception as e: logger.log( u'Error while searching %s, skipping: %s' % (cur_provider.name, ex(e)), logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) break finally: threading.currentThread().name = orig_thread_name search_done = True if len(search_results): # make a list of all the results for this provider for cur_ep in search_results: # skip non-tv crap search_results[cur_ep] = filter( lambda ep_item: show_name_helpers.pass_wordlist_checks( ep_item.name, parse=False) and ep_item.show == show, search_results[cur_ep]) if cur_ep in found_results: found_results[provider_id][cur_ep] += search_results[ cur_ep] else: found_results[provider_id][cur_ep] = search_results[ cur_ep] break elif not cur_provider.search_fallback or search_count == 2: break search_mode = '%sonly' % ('ep', 'sp')['ep' in search_mode] logger.log(u'Falling back to %s search ...' % ('season pack', 'episode')['ep' in search_mode]) # skip to next provider if we have no results to process if not len(found_results[provider_id]): continue any_qualities, best_qualities = Quality.splitQuality(show.quality) # pick the best season NZB best_season_result = None if SEASON_RESULT in found_results[provider_id]: best_season_result = pick_best_result( found_results[provider_id][SEASON_RESULT], show, any_qualities + best_qualities) highest_quality_overall = 0 for cur_episode in found_results[provider_id]: for cur_result in found_results[provider_id][cur_episode]: if Quality.UNKNOWN != cur_result.quality and highest_quality_overall < cur_result.quality: highest_quality_overall = cur_result.quality logger.log( u'%s is the highest quality of any match' % Quality.qualityStrings[highest_quality_overall], logger.DEBUG) # see if every episode is wanted if best_season_result: # get the quality of the season nzb season_qual = best_season_result.quality logger.log( u'%s is the quality of the season %s' % (Quality.qualityStrings[season_qual], best_season_result.provider.providerType), logger.DEBUG) my_db = db.DBConnection() sql = 'SELECT episode FROM tv_episodes WHERE showid = %s AND (season IN (%s))' %\ (show.indexerid, ','.join([str(x.season) for x in episodes])) ep_nums = [int(x['episode']) for x in my_db.select(sql)] logger.log(u'Executed query: [%s]' % sql) logger.log(u'Episode list: %s' % ep_nums, logger.DEBUG) all_wanted = True any_wanted = False for ep_num in ep_nums: for season in set([x.season for x in episodes]): if not show.wantEpisode(season, ep_num, season_qual): all_wanted = False else: any_wanted = True # if we need every ep in the season and there's nothing better then just download this and # be done with it (unless single episodes are preferred) if all_wanted and highest_quality_overall == best_season_result.quality: logger.log( u'Every episode in this season is needed, downloading the whole %s %s' % (best_season_result.provider.providerType, best_season_result.name)) ep_objs = [] for ep_num in ep_nums: for season in set([x.season for x in episodes]): ep_objs.append(show.getEpisode(season, ep_num)) best_season_result.episodes = ep_objs return [best_season_result] elif not any_wanted: logger.log( u'No episodes from this season are wanted at this quality, ignoring the result of ' + best_season_result.name, logger.DEBUG) else: if GenericProvider.NZB == best_season_result.provider.providerType: logger.log( u'Breaking apart the NZB and adding the individual ones to our results', logger.DEBUG) # if not, break it apart and add them as the lowest priority results individual_results = nzbSplitter.splitResult( best_season_result) individual_results = filter( lambda r: show_name_helpers.pass_wordlist_checks( r.name, parse=False) and r.show == show, individual_results) for cur_result in individual_results: if 1 == len(cur_result.episodes): ep_num = cur_result.episodes[0].episode elif 1 < len(cur_result.episodes): ep_num = MULTI_EP_RESULT if ep_num in found_results[provider_id]: found_results[provider_id][ep_num].append( cur_result) else: found_results[provider_id][ep_num] = [cur_result] # If this is a torrent all we can do is leech the entire torrent, # user will have to select which eps not do download in his torrent client else: # Season result from Torrent Provider must be a full-season torrent, creating multi-ep result for it logger.log( u'Adding multi episode result for full season torrent. In your torrent client, set ' + u'the episodes that you do not want to "don\'t download"' ) ep_objs = [] for ep_num in ep_nums: for season in set([x.season for x in episodes]): ep_objs.append(show.getEpisode(season, ep_num)) best_season_result.episodes = ep_objs ep_num = MULTI_EP_RESULT if ep_num in found_results[provider_id]: found_results[provider_id][ep_num].append( best_season_result) else: found_results[provider_id][ep_num] = [ best_season_result ] # go through multi-ep results and see if we really want them or not, get rid of the rest multi_results = {} if MULTI_EP_RESULT in found_results[provider_id]: for multi_result in found_results[provider_id][MULTI_EP_RESULT]: logger.log( u'Checking usefulness of multi episode result [%s]' % multi_result.name, logger.DEBUG) if sickbeard.USE_FAILED_DOWNLOADS and failed_history.has_failed( multi_result.name, multi_result.size, multi_result.provider.name): logger.log( u'Rejecting previously failed multi episode result [%s]' % multi_result.name) continue # see how many of the eps that this result covers aren't covered by single results needed_eps = [] not_needed_eps = [] for ep_obj in multi_result.episodes: ep_num = ep_obj.episode # if we have results for the episode if ep_num in found_results[provider_id] and 0 < len( found_results[provider_id][ep_num]): needed_eps.append(ep_num) else: not_needed_eps.append(ep_num) logger.log( u'Single episode check result is... needed episodes: %s, not needed episodes: %s' % (needed_eps, not_needed_eps), logger.DEBUG) if not not_needed_eps: logger.log( u'All of these episodes were covered by single episode results, ' + 'ignoring this multi episode result', logger.DEBUG) continue # check if these eps are already covered by another multi-result multi_needed_eps = [] multi_not_needed_eps = [] for ep_obj in multi_result.episodes: ep_num = ep_obj.episode if ep_num in multi_results: multi_not_needed_eps.append(ep_num) else: multi_needed_eps.append(ep_num) logger.log( u'Multi episode check result is... multi needed episodes: ' + '%s, multi not needed episodes: %s' % (multi_needed_eps, multi_not_needed_eps), logger.DEBUG) if not multi_needed_eps: logger.log( u'All of these episodes were covered by another multi episode nzb, ' + 'ignoring this multi episode result', logger.DEBUG) continue # if we're keeping this multi-result then remember it for ep_obj in multi_result.episodes: multi_results[ep_obj.episode] = multi_result # don't bother with the single result if we're going to get it with a multi result for ep_obj in multi_result.episodes: ep_num = ep_obj.episode if ep_num in found_results[provider_id]: logger.log( u'A needed multi episode result overlaps with a single episode result for episode ' + '#%s, removing the single episode results from the list' % ep_num, logger.DEBUG) del found_results[provider_id][ep_num] # of all the single ep results narrow it down to the best one for each episode final_results += set(multi_results.values()) quality_list = use_quality_list and ( None, best_qualities)[any(best_qualities)] or None for cur_ep in found_results[provider_id]: if cur_ep in (MULTI_EP_RESULT, SEASON_RESULT): continue if 0 == len(found_results[provider_id][cur_ep]): continue best_result = pick_best_result(found_results[provider_id][cur_ep], show, quality_list) # if all results were rejected move on to the next episode if not best_result: continue # filter out possible bad torrents from providers if 'torrent' == best_result.resultType: if best_result.url.startswith('magnet'): if 'blackhole' != sickbeard.TORRENT_METHOD: best_result.content = None else: cache_file = ek.ek( os.path.join, sickbeard.CACHE_DIR or helpers._getTempDir(), '%s.torrent' % (helpers.sanitizeFileName(best_result.name))) if not helpers.download_file( best_result.url, cache_file, session=best_result.provider.session): continue try: with open(cache_file, 'rb') as fh: td = fh.read() setattr(best_result, 'cache_file', cache_file) except (StandardError, Exception): continue if getattr(best_result.provider, 'chk_td', None): name = None try: hdr = re.findall('(\w+(\d+):)', td[0:6])[0] x, v = len(hdr[0]), int(hdr[1]) while x < len(td): y = x + v name = 'name' == td[x:y] w = re.findall('((?:i-?\d+e|e+|d|l+)*(\d+):)', td[y:y + 32])[0] x, v = y + len(w[0]), int(w[1]) if name: name = td[x:x + v] break except (StandardError, Exception): continue if name: if not pass_show_wordlist_checks(name, show): continue if not show_name_helpers.pass_wordlist_checks( name): logger.log( 'Ignored: %s (debug log has detail)' % name) continue best_result.name = name if 'blackhole' != sickbeard.TORRENT_METHOD: best_result.content = td # add result if its not a duplicate and found = False for i, result in enumerate(final_results): for best_result_ep in best_result.episodes: if best_result_ep in result.episodes: if best_result.quality > result.quality: final_results.pop(i) else: found = True if not found: final_results += [best_result] # check that we got all the episodes we wanted first before doing a match and snatch wanted_ep_count = 0 for wanted_ep in episodes: for result in final_results: if wanted_ep in result.episodes and is_final_result(result): wanted_ep_count += 1 # make sure we search every provider for results unless we found everything we wanted if len(episodes) == wanted_ep_count: break if not len(provider_list): logger.log( 'No NZB/Torrent sources enabled in Media Provider options to do backlog searches', logger.WARNING) elif not search_done: logger.log( 'Failed backlog search of %s enabled provider%s. More info in debug log.' % (len(provider_list), helpers.maybe_plural(len(provider_list))), logger.ERROR) elif not any(final_results): logger.log('No suitable candidates') return final_results
def update(self): zip_download_url = self._find_newest_version(True) logger.log(u"new_link: " + repr(zip_download_url), logger.DEBUG) if not zip_download_url: logger.log(u"Unable to find a new version link on google code, not updating") return False try: # prepare the update dir sr_update_dir = ek.ek(os.path.join, sickbeard.PROG_DIR, u'sr-update') if os.path.isdir(sr_update_dir): logger.log(u"Clearing out update folder " + sr_update_dir + " before extracting") shutil.rmtree(sr_update_dir) logger.log(u"Creating update folder " + sr_update_dir + " before extracting") os.makedirs(sr_update_dir) # retrieve file logger.log(u"Downloading update from " + zip_download_url) zip_download_path = os.path.join(sr_update_dir, u'sr-update.zip') urllib.urlretrieve(zip_download_url, zip_download_path) if not ek.ek(os.path.isfile, zip_download_path): logger.log(u"Unable to retrieve new version from " + zip_download_url + ", can't update", logger.ERROR) return False if not ek.ek(zipfile.is_zipfile, zip_download_path): logger.log(u"Retrieved version from " + zip_download_url + " is corrupt, can't update", logger.ERROR) return False # extract to sr-update dir logger.log(u"Unzipping from " + str(zip_download_path) + " to " + sr_update_dir) update_zip = zipfile.ZipFile(zip_download_path, 'r') update_zip.extractall(sr_update_dir) update_zip.close() # delete the zip logger.log(u"Deleting zip file from " + str(zip_download_path)) os.remove(zip_download_path) # find update dir name update_dir_contents = [x for x in os.listdir(sr_update_dir) if os.path.isdir(os.path.join(sr_update_dir, x))] if len(update_dir_contents) != 1: logger.log(u"Invalid update data, update failed. Maybe try deleting your sr-update folder?", logger.ERROR) return False content_dir = os.path.join(sr_update_dir, update_dir_contents[0]) old_update_path = os.path.join(content_dir, u'updater.exe') new_update_path = os.path.join(sickbeard.PROG_DIR, u'updater.exe') logger.log(u"Copying new update.exe file from " + old_update_path + " to " + new_update_path) shutil.move(old_update_path, new_update_path) # Notify update successful notifiers.notify_git_update(sickbeard.NEWEST_VERSION_STRING) except Exception, e: logger.log(u"Error while trying to update: " + ex(e), logger.ERROR) return False