def _delete(self, file_path, associated_files=False): """ Deletes the file and optionally all associated files. file_path: The file to delete associated_files: True to delete all files which differ only by extension, False to leave them """ if not file_path: return # figure out which files we want to delete if associated_files: file_list = self._list_associated_files(file_path) else: file_list = [file_path] if not file_list: self._log( u"There were no files associated with " + file_path + ", not deleting anything", logger.DEBUG) return # delete the file and any other files which we want to delete for cur_file in file_list: self._log(u"Deleting file " + cur_file, logger.DEBUG) if ek.ek(os.path.isfile, cur_file): ek.ek(os.remove, cur_file) # do the library update for synoindex notifiers.synoindex_notifier.deleteFile(cur_file) # clean up any left over folders helpers.delete_empty_folders(ek.ek(os.path.dirname, file_path))
def _delete(self, file_path, associated_files=False): """ Deletes the file and optionally all associated files. file_path: The file to delete associated_files: True to delete all files which differ only by extension, False to leave them """ if not file_path: return # figure out which files we want to delete file_list = [file_path] if associated_files: file_list = file_list + self._list_associated_files(file_path) if not file_list: self._log(u"There were no files associated with " + file_path + ", not deleting anything", logger.DEBUG) return # delete the file and any other files which we want to delete for cur_file in file_list: self._log(u"Deleting file "+cur_file, logger.DEBUG) if ek.ek(os.path.isfile, cur_file): ek.ek(os.remove, cur_file) # do the library update for synoindex notifiers.synoindex_notifier.deleteFile(cur_file) # clean up any left over folders helpers.delete_empty_folders(ek.ek(os.path.dirname, file_path))
def process(self): """ Post-process a given file """ self._log(u"Processing " + self.file_path + " (" + str(self.nzb_name) + ")") if ek.ek(os.path.isdir, self.file_path): self._log(u"File " + self.file_path + " seems to be a directory") return False # reset per-file stuff self.in_history = False # try to find the file info (tvdb_id, season, episodes, quality) = self._find_info() # if we don't have it then give up if not tvdb_id or season is None or not episodes: self._log(u"Not enough information to determine what episode this is", logger.DEBUG) self._log(u"Quitting post-processing", logger.DEBUG) return False # retrieve/create the corresponding TVEpisode objects ep_obj = self._get_ep_obj(tvdb_id, season, episodes) # get the quality of the episode we're processing if quality: self._log(u"Snatch history had a quality in it, using that: " + common.Quality.qualityStrings[quality], logger.DEBUG) new_ep_quality = quality else: new_ep_quality = self._get_quality(ep_obj) logger.log(u"Quality of the processing episode: " + str(new_ep_quality), logger.DEBUG) # see if it's safe to replace existing episode (is download snatched, PROPER, better quality) safe_replace = self._safe_replace(ep_obj, new_ep_quality) # if it's not safe to replace, stop here if not safe_replace: self._log(u"Quitting post-processing", logger.DEBUG) return False # if the file is safe to replace then we're going to replace it even if it exists else: self._log(u"This download is marked as safe to replace existing file", logger.DEBUG) # delete the existing file (and company) for cur_ep in [ep_obj] + ep_obj.relatedEps: try: self._delete(cur_ep.location, associated_files=True) # clean up any left over folders if cur_ep.location: helpers.delete_empty_folders(ek.ek(os.path.dirname, cur_ep.location), keep_dir=ep_obj.show._location) except (OSError, IOError): raise exceptions.PostProcessingFailed(u"Unable to delete the existing files") # if the show directory doesn't exist then make it if allowed if not ek.ek(os.path.isdir, ep_obj.show._location) and sickbeard.CREATE_MISSING_SHOW_DIRS: self._log(u"Show directory doesn't exist, creating it", logger.DEBUG) try: ek.ek(os.mkdir, ep_obj.show._location) # do the library update for synoindex notifiers.synoindex_notifier.addFolder(ep_obj.show._location) except (OSError, IOError): raise exceptions.PostProcessingFailed(u"Unable to create the show directory: " + ep_obj.show._location) # get metadata for the show (but not episode because it hasn't been fully processed) ep_obj.show.writeMetadata(True) # update the ep info before we rename so the quality & release name go into the name properly for cur_ep in [ep_obj] + ep_obj.relatedEps: if self.release_name: self._log(u"Found release name " + self.release_name, logger.DEBUG) cur_ep.release_name = self.release_name else: cur_ep.release_name = "" cur_ep.status = common.Quality.compositeStatus(common.DOWNLOADED, new_ep_quality) # find the destination folder try: proper_path = ep_obj.proper_path() proper_absolute_path = ek.ek(os.path.join, ep_obj.show.location, proper_path) dest_path = ek.ek(os.path.dirname, proper_absolute_path) except exceptions.ShowDirNotFoundException: raise exceptions.PostProcessingFailed(u"Unable to post-process an episode if the show dir doesn't exist, quitting") self._log(u"Destination folder for this episode: " + dest_path, logger.DEBUG) # create any folders we need if not helpers.make_dirs(dest_path): raise exceptions.PostProcessingFailed(u"Unable to create destination folder: " + dest_path) # figure out the base name of the resulting episode file if sickbeard.RENAME_EPISODES: orig_extension = self.file_name.rpartition('.')[-1] new_base_name = ek.ek(os.path.basename, proper_path) new_file_name = new_base_name + '.' + orig_extension else: # if we're not renaming then there's no new base name, we'll just use the existing name new_base_name = None new_file_name = self.file_name try: # move the episode and associated files to the show dir if sickbeard.KEEP_PROCESSED_DIR: self._copy(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES) else: self._move(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES) except (OSError, IOError): raise exceptions.PostProcessingFailed(u"Unable to move the files to destination folder: " + dest_path) # put the new location in the database for cur_ep in [ep_obj] + ep_obj.relatedEps: with cur_ep.lock: cur_ep.location = ek.ek(os.path.join, dest_path, new_file_name) cur_ep.saveToDB() # log it to history history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group) # send notifiers download notification if not ep_obj.show.skip_notices: notifiers.notify_download(ep_obj.prettyName()) # generate nfo/tbn ep_obj.createMetaFiles() ep_obj.saveToDB() # send notifiers library update notifiers.update_library(ep_obj) self._run_extra_scripts(ep_obj) return True
def process(self): """ Post-process a given file """ self._log(u"Processing " + self.file_path + " (" + str(self.nzb_name) + ")") if os.path.isdir(self.file_path): self._log(u"File " + self.file_path + " seems to be a directory") return False for ignore_file in self.IGNORED_FILESTRINGS: if ignore_file in self.file_path: self._log(u"File " + self.file_path + " is ignored type, skipping") return False # reset per-file stuff self.in_history = False # try to find the file info (tvdb_id, season, episodes) = self._find_info() # if we don't have it then give up if not tvdb_id or season == None or not episodes: return False # retrieve/create the corresponding TVEpisode objects ep_obj = self._get_ep_obj(tvdb_id, season, episodes) # get the quality of the episode we're processing new_ep_quality = self._get_quality(ep_obj) logger.log( u"Quality of the episode we're processing: " + str(new_ep_quality), logger.DEBUG) # see if this is a priority download (is it snatched, in history, or PROPER) priority_download = self._is_priority(ep_obj, new_ep_quality) self._log(u"Is ep a priority download: " + str(priority_download), logger.DEBUG) # set the status of the episodes for curEp in [ep_obj] + ep_obj.relatedEps: curEp.status = common.Quality.compositeStatus( common.SNATCHED, new_ep_quality) # check for an existing file existing_file_status = self._checkForExistingFile(ep_obj.location) # if it's not priority then we don't want to replace smaller files in case it was a mistake if not priority_download: # if there's an existing file that we don't want to replace stop here if existing_file_status in (PostProcessor.EXISTS_LARGER, PostProcessor.EXISTS_SAME): self._log( u"File exists and we are not going to replace it because it's not smaller, quitting post-processing", logger.DEBUG) return False elif existing_file_status == PostProcessor.EXISTS_SMALLER: self._log( u"File exists and is smaller than the new file so I'm going to replace it", logger.DEBUG) elif existing_file_status != PostProcessor.DOESNT_EXIST: self._log( u"Unknown existing file status. This should never happen, please log this as a bug.", logger.ERROR) return False # if the file is priority then we're going to replace it even if it exists else: self._log( u"This download is marked a priority download so I'm going to replace an existing file if I find one", logger.DEBUG) # delete the existing file (and company) for cur_ep in [ep_obj] + ep_obj.relatedEps: try: self._delete(cur_ep.location, associated_files=True) # clean up any left over folders if cur_ep.location: helpers.delete_empty_folders( ek.ek(os.path.dirname, cur_ep.location), keep_dir=ep_obj.show._location) except (OSError, IOError): raise exceptions.PostProcessingFailed( "Unable to delete the existing files") # if the show directory doesn't exist then make it if allowed if not ek.ek( os.path.isdir, ep_obj.show._location) and sickbeard.CREATE_MISSING_SHOW_DIRS: self._log(u"Show directory doesn't exist, creating it", logger.DEBUG) try: ek.ek(os.mkdir, ep_obj.show._location) # do the library update for synoindex notifiers.synoindex_notifier.addFolder(ep_obj.show._location) except (OSError, IOError): raise exceptions.PostProcessingFailed( "Unable to create the show directory: " + ep_obj.show._location) # get metadata for the show (but not episode because it hasn't been fully processed) ep_obj.show.writeMetadata(True) # update the ep info before we rename so the quality & release name go into the name properly for cur_ep in [ep_obj] + ep_obj.relatedEps: with cur_ep.lock: cur_release_name = None # use the best possible representation of the release name if self.good_results[self.NZB_NAME]: cur_release_name = self.nzb_name if cur_release_name.lower().endswith('.nzb'): cur_release_name = cur_release_name.rpartition('.')[0] elif self.good_results[self.FOLDER_NAME]: cur_release_name = self.folder_name elif self.good_results[self.FILE_NAME]: cur_release_name = self.file_name # take the extension off the filename, it's not needed if '.' in self.file_name: cur_release_name = self.file_name.rpartition('.')[0] if cur_release_name: self._log("Found release name " + cur_release_name, logger.DEBUG) cur_ep.release_name = cur_release_name else: logger.log("good results: " + repr(self.good_results), logger.DEBUG) cur_ep.status = common.Quality.compositeStatus( common.DOWNLOADED, new_ep_quality) cur_ep.saveToDB() # find the destination folder try: proper_path = ep_obj.proper_path() proper_absolute_path = ek.ek(os.path.join, ep_obj.show.location, proper_path) dest_path = ek.ek(os.path.dirname, proper_absolute_path) except exceptions.ShowDirNotFoundException: raise exceptions.PostProcessingFailed( u"Unable to post-process an episode if the show dir doesn't exist, quitting" ) self._log(u"Destination folder for this episode: " + dest_path, logger.DEBUG) # create any folders we need helpers.make_dirs(dest_path) # figure out the base name of the resulting episode file if sickbeard.RENAME_EPISODES: orig_extension = self.file_name.rpartition('.')[-1] new_base_name = ek.ek(os.path.basename, proper_path) new_file_name = new_base_name + '.' + orig_extension else: # if we're not renaming then there's no new base name, we'll just use the existing name new_base_name = None new_file_name = self.file_name try: # move the episode and associated files to the show dir if sickbeard.KEEP_PROCESSED_DIR: self._copy(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES) else: self._move(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES) except (OSError, IOError): raise exceptions.PostProcessingFailed( "Unable to move the files to their new home") # put the new location in the database for cur_ep in [ep_obj] + ep_obj.relatedEps: with cur_ep.lock: cur_ep.location = ek.ek(os.path.join, dest_path, new_file_name) cur_ep.saveToDB() # log it to history history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group) # download subtitles if sickbeard.USE_SUBTITLES and ep_obj.show.subtitles: cur_ep.downloadSubtitles() # send notifications notifiers.notify_download(ep_obj.prettyName()) # generate nfo/tbn ep_obj.createMetaFiles() ep_obj.saveToDB() # do the library update for XBMC notifiers.xbmc_notifier.update_library(ep_obj.show.name) # do the library update for Plex notifiers.plex_notifier.update_library() # do the library update for NMJ # nmj_notifier kicks off its library update when the notify_download is issued (inside notifiers) # do the library update for Synology Indexer notifiers.synoindex_notifier.addFile(ep_obj.location) # do the library update for pyTivo notifiers.pytivo_notifier.update_library(ep_obj) # do the library update for Trakt notifiers.trakt_notifier.update_library(ep_obj) self._run_extra_scripts(ep_obj) return True
def process(self): """ Post-process a given file """ self._log(u"Processing " + self.file_path + " (" + str(self.nzb_name) + ")") if os.path.isdir(self.file_path): self._log(u"File " + self.file_path + " seems to be a directory") return False for ignore_file in self.IGNORED_FILESTRINGS: if ignore_file in self.file_path: self._log(u"File " + self.file_path + " is ignored type, skipping") return False # reset per-file stuff self.in_history = False # try to find the file info (tvdb_id, season, episodes) = self._find_info() # if we don't have it then give up if not tvdb_id or season == None or not episodes: return False # retrieve/create the corresponding TVEpisode objects ep_obj = self._get_ep_obj(tvdb_id, season, episodes) # get the quality of the episode we're processing new_ep_quality = self._get_quality(ep_obj) logger.log(u"Quality of the episode we're processing: " + str(new_ep_quality), logger.DEBUG) # see if this is a priority download (is it snatched, in history, or PROPER) priority_download = self._is_priority(ep_obj, new_ep_quality) self._log(u"Is ep a priority download: " + str(priority_download), logger.DEBUG) # set the status of the episodes for curEp in [ep_obj] + ep_obj.relatedEps: curEp.status = common.Quality.compositeStatus(common.SNATCHED, new_ep_quality) # check for an existing file existing_file_status = self._checkForExistingFile(ep_obj.location) # if it's not priority then we don't want to replace smaller files in case it was a mistake if not priority_download: # if there's an existing file that we don't want to replace stop here if existing_file_status in (PostProcessor.EXISTS_LARGER, PostProcessor.EXISTS_SAME): self._log(u"File exists and we are not going to replace it because it's not smaller, quitting post-processing", logger.DEBUG) return False elif existing_file_status == PostProcessor.EXISTS_SMALLER: self._log(u"File exists and is smaller than the new file so I'm going to replace it", logger.DEBUG) elif existing_file_status != PostProcessor.DOESNT_EXIST: self._log(u"Unknown existing file status. This should never happen, please log this as a bug.", logger.ERROR) return False # if the file is priority then we're going to replace it even if it exists else: self._log(u"This download is marked a priority download so I'm going to replace an existing file if I find one", logger.DEBUG) # delete the existing file (and company) for cur_ep in [ep_obj] + ep_obj.relatedEps: try: self._delete(cur_ep.location, associated_files=True) # clean up any left over folders if cur_ep.location: helpers.delete_empty_folders(ek.ek(os.path.dirname, cur_ep.location), keep_dir=ep_obj.show._location) except (OSError, IOError): raise exceptions.PostProcessingFailed("Unable to delete the existing files") # if the show directory doesn't exist then make it if allowed if not ek.ek(os.path.isdir, ep_obj.show._location) and sickbeard.CREATE_MISSING_SHOW_DIRS: self._log(u"Show directory doesn't exist, creating it", logger.DEBUG) try: ek.ek(os.mkdir, ep_obj.show._location) # do the library update for synoindex notifiers.synoindex_notifier.addFolder(ep_obj.show._location) except (OSError, IOError): raise exceptions.PostProcessingFailed("Unable to create the show directory: " + ep_obj.show._location) # get metadata for the show (but not episode because it hasn't been fully processed) ep_obj.show.writeMetadata(True) # update the ep info before we rename so the quality & release name go into the name properly for cur_ep in [ep_obj] + ep_obj.relatedEps: with cur_ep.lock: cur_release_name = None # use the best possible representation of the release name if self.good_results[self.NZB_NAME]: cur_release_name = self.nzb_name if cur_release_name.lower().endswith('.nzb'): cur_release_name = cur_release_name.rpartition('.')[0] elif self.good_results[self.FOLDER_NAME]: cur_release_name = self.folder_name elif self.good_results[self.FILE_NAME]: cur_release_name = self.file_name # take the extension off the filename, it's not needed if '.' in self.file_name: cur_release_name = self.file_name.rpartition('.')[0] if cur_release_name: self._log("Found release name " + cur_release_name, logger.DEBUG) cur_ep.release_name = cur_release_name else: logger.log("good results: " + repr(self.good_results), logger.DEBUG) cur_ep.status = common.Quality.compositeStatus(common.DOWNLOADED, new_ep_quality) cur_ep.saveToDB() # find the destination folder try: proper_path = ep_obj.proper_path() proper_absolute_path = ek.ek(os.path.join, ep_obj.show.location, proper_path) dest_path = ek.ek(os.path.dirname, proper_absolute_path) except exceptions.ShowDirNotFoundException: raise exceptions.PostProcessingFailed(u"Unable to post-process an episode if the show dir doesn't exist, quitting") self._log(u"Destination folder for this episode: " + dest_path, logger.DEBUG) # create any folders we need helpers.make_dirs(dest_path) # figure out the base name of the resulting episode file if sickbeard.RENAME_EPISODES: orig_extension = self.file_name.rpartition('.')[-1] new_base_name = ek.ek(os.path.basename, proper_path) new_file_name = new_base_name + '.' + orig_extension else: # if we're not renaming then there's no new base name, we'll just use the existing name new_base_name = None new_file_name = self.file_name with open(self.file_path, 'rb') as fh: m = hashlib.md5() while True: data = fh.read(8192) if not data: break m.update(data) MD5 = m.hexdigest() try: path,file=os.path.split(self.file_path) if sickbeard.TORRENT_DOWNLOAD_DIR == path: #Action possible pour les torrent if sickbeard.PROCESS_METHOD == "copy": self._copy(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES) elif sickbeard.PROCESS_METHOD == "move": self._move(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES) elif sickbeard.PROCESS_METHOD == "hardlink": self._hardlink(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES) elif sickbeard.PROCESS_METHOD == "symlink": self._moveAndSymlink(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES) else: logger.log(u"Unknown process method: " + str(sickbeard.PROCESS_METHOD), logger.ERROR) raise exceptions.PostProcessingFailed("Unable to move the files to their new home") else: #action pour le reste des fichier if sickbeard.KEEP_PROCESSED_DIR: self._copy(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES) else: self._move(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES) except (OSError, IOError): raise exceptions.PostProcessingFailed("Unable to move the files to their new home") myDB = db.DBConnection() ## INSERT MD5 of file controlMD5 = {"episode_id" : int(ep_obj.tvdbid) } NewValMD5 = {"filename" : new_base_name , "md5" : MD5 } myDB.upsert("processed_files", NewValMD5, controlMD5) # put the new location in the database for cur_ep in [ep_obj] + ep_obj.relatedEps: with cur_ep.lock: cur_ep.location = ek.ek(os.path.join, dest_path, new_file_name) cur_ep.saveToDB() # log it to history history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group) # download subtitles if sickbeard.USE_SUBTITLES and ep_obj.show.subtitles: cur_ep.downloadSubtitles() # send notifications notifiers.notify_download(ep_obj.prettyName()) # generate nfo/tbn ep_obj.createMetaFiles() ep_obj.saveToDB() # do the library update for XBMC notifiers.xbmc_notifier.update_library(ep_obj.show.name) # do the library update for Plex notifiers.plex_notifier.update_library() # do the library update for NMJ # nmj_notifier kicks off its library update when the notify_download is issued (inside notifiers) # do the library update for Synology Indexer notifiers.synoindex_notifier.addFile(ep_obj.location) # do the library update for pyTivo notifiers.pytivo_notifier.update_library(ep_obj) # do the library update for Trakt notifiers.trakt_notifier.update_library(ep_obj) self._run_extra_scripts(ep_obj) return True
def process(self): """ Post-process a given file """ self._log(u"Processing " + self.file_path + " (" + str(self.nzb_name) + ")") if ek.ek(os.path.isdir, self.file_path): self._log(u"File " + self.file_path + " seems to be a directory") return False for ignore_file in self.IGNORED_FILESTRINGS: if ignore_file in self.file_path: self._log(u"File " + self.file_path + " is ignored type, skipping") return False # reset per-file stuff self.in_history = False # try to find the file info (indexer_id, season, episodes) = self._find_info() if not (indexer_id and season and len(episodes)): self._log(u"Can't find the show on any of the Indexers, skipping", logger.WARNING) return False # retrieve/create the corresponding TVEpisode objects ep_obj = self._get_ep_obj(indexer_id, season, episodes) # get the quality of the episode we're processing new_ep_quality = self._get_quality(ep_obj) logger.log(u"Quality of the episode we're processing: " + str(new_ep_quality), logger.DEBUG) # see if this is a priority download (is it snatched, in history, PROPER, or BEST) priority_download = self._is_priority(ep_obj, new_ep_quality) self._log(u"Is ep a priority download: " + str(priority_download), logger.DEBUG) # set the status of the episodes for curEp in [ep_obj] + ep_obj.relatedEps: curEp.status = common.Quality.compositeStatus(common.SNATCHED, new_ep_quality) # check for an existing file existing_file_status = self._checkForExistingFile(ep_obj.location) # if it's not priority then we don't want to replace smaller files in case it was a mistake if not priority_download: # if there's an existing file that we don't want to replace stop here if existing_file_status in (PostProcessor.EXISTS_LARGER, PostProcessor.EXISTS_SAME): self._log( u"File exists and we are not going to replace it because it's not smaller, quitting post-processing", logger.ERROR) return False elif existing_file_status == PostProcessor.EXISTS_SMALLER: self._log(u"File exists and is smaller than the new file so I'm going to replace it", logger.DEBUG) elif existing_file_status != PostProcessor.DOESNT_EXIST: self._log(u"Unknown existing file status. This should never happen, please log this as a bug.", logger.ERROR) return False # if the file is priority then we're going to replace it even if it exists else: self._log( u"This download is marked a priority download so I'm going to replace an existing file if I find one", logger.DEBUG) # delete the existing file (and company) for cur_ep in [ep_obj] + ep_obj.relatedEps: try: self._delete(cur_ep.location, associated_files=True) # clean up any left over folders if cur_ep.location: helpers.delete_empty_folders(ek.ek(os.path.dirname, cur_ep.location), keep_dir=ep_obj.show._location) except (OSError, IOError): raise exceptions.PostProcessingFailed("Unable to delete the existing files") # if the show directory doesn't exist then make it if allowed if not ek.ek(os.path.isdir, ep_obj.show._location) and sickbeard.CREATE_MISSING_SHOW_DIRS: self._log(u"Show directory doesn't exist, creating it", logger.DEBUG) try: ek.ek(os.mkdir, ep_obj.show._location) # do the library update for synoindex notifiers.synoindex_notifier.addFolder(ep_obj.show._location) except (OSError, IOError): raise exceptions.PostProcessingFailed("Unable to create the show directory: " + ep_obj.show._location) # get metadata for the show (but not episode because it hasn't been fully processed) ep_obj.show.writeMetadata(True) # update the ep info before we rename so the quality & release name go into the name properly for cur_ep in [ep_obj] + ep_obj.relatedEps: with cur_ep.lock: cur_release_name = None # use the best possible representation of the release name if self.good_results[self.NZB_NAME]: cur_release_name = self.nzb_name if cur_release_name.lower().endswith('.nzb'): cur_release_name = cur_release_name.rpartition('.')[0] elif self.good_results[self.FOLDER_NAME]: cur_release_name = self.folder_name elif self.good_results[self.FILE_NAME]: cur_release_name = self.file_name # take the extension off the filename, it's not needed if '.' in self.file_name: cur_release_name = self.file_name.rpartition('.')[0] if cur_release_name: self._log("Found release name " + cur_release_name, logger.DEBUG) cur_ep.release_name = cur_release_name else: logger.log("good results: " + repr(self.good_results), logger.DEBUG) if ep_obj.status in common.Quality.SNATCHED_BEST: cur_ep.status = common.Quality.compositeStatus(common.ARCHIVED, new_ep_quality) else: cur_ep.status = common.Quality.compositeStatus(common.DOWNLOADED, new_ep_quality) cur_ep.subtitles = [] cur_ep.subtitles_searchcount = 0 cur_ep.subtitles_lastsearch = '0001-01-01 00:00:00' cur_ep.is_proper = self.is_proper cur_ep.saveToDB() # Just want to keep this consistent for failed handling right now releaseName = show_name_helpers.determineReleaseName(self.folder_path, self.nzb_name) if releaseName is not None: failed_history.logSuccess(releaseName) else: self._log(u"Couldn't find release in snatch history", logger.WARNING) # find the destination folder try: proper_path = ep_obj.proper_path() proper_absolute_path = ek.ek(os.path.join, ep_obj.show.location, proper_path) dest_path = ek.ek(os.path.dirname, proper_absolute_path) except exceptions.ShowDirNotFoundException: raise exceptions.PostProcessingFailed( u"Unable to post-process an episode if the show dir doesn't exist, quitting") self._log(u"Destination folder for this episode: " + dest_path, logger.DEBUG) # create any folders we need helpers.make_dirs(dest_path) # figure out the base name of the resulting episode file if sickbeard.RENAME_EPISODES: orig_extension = self.file_name.rpartition('.')[-1] new_base_name = ek.ek(os.path.basename, proper_path) new_file_name = new_base_name + '.' + orig_extension else: # if we're not renaming then there's no new base name, we'll just use the existing name new_base_name = None new_file_name = self.file_name try: # move the episode and associated files to the show dir if self.process_method == "copy": self._copy(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles) elif self.process_method == "move": self._move(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles) elif self.process_method == "hardlink": self._hardlink(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles) elif self.process_method == "symlink": self._moveAndSymlink(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles) else: logger.log(u"Unknown process method: " + str(self.process_method), logger.ERROR) raise exceptions.PostProcessingFailed("Unable to move the files to their new home") except (OSError, IOError): raise exceptions.PostProcessingFailed("Unable to move the files to their new home") # download subtitles if sickbeard.USE_SUBTITLES and ep_obj.show.subtitles: for curEp in [ep_obj]: with cur_ep.lock: cur_ep.location = ek.ek(os.path.join, dest_path, new_file_name) cur_ep.downloadSubtitles(force=True) # put the new location in the database for cur_ep in [ep_obj] + ep_obj.relatedEps: with cur_ep.lock: cur_ep.location = ek.ek(os.path.join, dest_path, new_file_name) cur_ep.saveToDB() # log it to history history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group) # send notifications notifiers.notify_download(ep_obj._format_pattern('%SN - %Sx%0E - %EN - %QN')) # generate nfo/tbn ep_obj.createMetaFiles() ep_obj.saveToDB() # do the library update for XBMC notifiers.xbmc_notifier.update_library(ep_obj.show.name) # do the library update for Plex notifiers.plex_notifier.update_library() # do the library update for NMJ # nmj_notifier kicks off its library update when the notify_download is issued (inside notifiers) # do the library update for Synology Indexer notifiers.synoindex_notifier.addFile(ep_obj.location) # do the library update for pyTivo notifiers.pytivo_notifier.update_library(ep_obj) # do the library update for Trakt notifiers.trakt_notifier.update_library(ep_obj) self._run_extra_scripts(ep_obj) return True
def process(self): """ Post-process a given file """ self._log(u"Processing " + self.file_path + " (" + str(self.nzb_name) + ")") if os.path.isdir(self.file_path): self._log(u"File " + self.file_path + " seems to be a directory") return False for ignore_file in self.IGNORED_FILESTRINGS: if ignore_file in self.file_path: self._log(u"File " + self.file_path + " is ignored type, skipping") return False # reset per-file stuff self.in_history = False # try to find the file info (tvdb_id, season, episodes) = self._find_info() # if we don't have it then give up if not tvdb_id or season == None or not episodes: return False # retrieve/create the corresponding TVEpisode objects ep_obj = self._get_ep_obj(tvdb_id, season, episodes) # get the quality of the episode we're processing new_ep_quality = self._get_quality(ep_obj) logger.log(u"Quality of the episode we're processing: " + str(new_ep_quality), logger.DEBUG) # see if this is a priority download (is it snatched, in history, or PROPER) priority_download = self._is_priority(ep_obj, new_ep_quality) self._log(u"Is ep a priority download: " + str(priority_download), logger.DEBUG) # set the status of the episodes for curEp in [ep_obj] + ep_obj.relatedEps: curEp.status = common.Quality.compositeStatus(common.SNATCHED, new_ep_quality) # check for an existing file existing_file_status = self._checkForExistingFile(ep_obj.location) # if it's not priority then we don't want to replace smaller files in case it was a mistake if not priority_download: # if there's an existing file that we don't want to replace stop here if existing_file_status in (PostProcessor.EXISTS_LARGER, PostProcessor.EXISTS_SAME): self._log(u"File exists and we are not going to replace it because it's not smaller, quitting post-processing", logger.DEBUG) return False elif existing_file_status == PostProcessor.EXISTS_SMALLER: self._log(u"File exists and is smaller than the new file so I'm going to replace it", logger.DEBUG) elif existing_file_status != PostProcessor.DOESNT_EXIST: self._log(u"Unknown existing file status. This should never happen, please log this as a bug.", logger.ERROR) return False # if the file is priority then we're going to replace it even if it exists else: self._log(u"This download is marked a priority download so I'm going to replace an existing file if I find one", logger.DEBUG) # delete the existing file (and company) for cur_ep in [ep_obj] + ep_obj.relatedEps: try: self._delete(cur_ep.location, associated_files=True) # clean up any left over folders if cur_ep.location: helpers.delete_empty_folders(ek.ek(os.path.dirname, cur_ep.location), keep_dir=ep_obj.show._location) except (OSError, IOError): raise exceptions.PostProcessingFailed("Unable to delete the existing files") # if the show directory doesn't exist then make it if allowed if not ek.ek(os.path.isdir, ep_obj.show._location) and sickbeard.CREATE_MISSING_SHOW_DIRS: self._log(u"Show directory doesn't exist, creating it", logger.DEBUG) try: ek.ek(os.mkdir, ep_obj.show._location) except (OSError, IOError): raise exceptions.PostProcessingFailed("Unable to create the show directory: " + ep_obj.show._location) # get metadata for the show (but not episode because it hasn't been fully processed) ep_obj.show.writeMetadata(True) # update the ep info before we rename so the quality & release name go into the name properly for cur_ep in [ep_obj] + ep_obj.relatedEps: with cur_ep.lock: cur_release_name = None # use the best possible representation of the release name if self.good_results[self.NZB_NAME]: cur_release_name = self.nzb_name if cur_release_name.lower().endswith('.nzb'): cur_release_name = cur_release_name.rpartition('.')[0] elif self.good_results[self.FOLDER_NAME]: cur_release_name = self.folder_name elif self.good_results[self.FILE_NAME]: cur_release_name = self.file_name # take the extension off the filename, it's not needed if '.' in self.file_name: cur_release_name = self.file_name.rpartition('.')[0] if cur_release_name: self._log("Found release name " + cur_release_name, logger.DEBUG) cur_ep.release_name = cur_release_name else: logger.log("good results: " + repr(self.good_results), logger.DEBUG) cur_ep.status = common.Quality.compositeStatus(common.DOWNLOADED, new_ep_quality) cur_ep.saveToDB() # find the destination folder try: proper_path = ep_obj.proper_path() proper_absolute_path = ek.ek(os.path.join, ep_obj.show.location, proper_path) dest_path = ek.ek(os.path.dirname, proper_absolute_path) except exceptions.ShowDirNotFoundException: raise exceptions.PostProcessingFailed(u"Unable to post-process an episode if the show dir doesn't exist, quitting") self._log(u"Destination folder for this episode: " + dest_path, logger.DEBUG) # create any folders we need helpers.make_dirs(dest_path) # figure out the base name of the resulting episode file if sickbeard.RENAME_EPISODES: logger.log("Malleho rename, ep_obj.show.location : " + ep_obj.show.location, logger.DEBUG) malleho_ep_rep = ep_obj.show.location.decode("utf-8") malleho_ep_name_start = len(malleho_ep_rep)-malleho_ep_rep.rfind('/',1) logger.log("Malleho rename, malleho_ep_name_start : " + str(malleho_ep_name_start), logger.DEBUG) malleho_ep_name = malleho_ep_rep[len(malleho_ep_rep)-malleho_ep_name_start+1:len(malleho_ep_rep)] logger.log("Malleho rename, malleho_ep_name : " + malleho_ep_name, logger.DEBUG) logger.log("Malleho rename, ep_obj.show.name : " + ep_obj.show.name, logger.DEBUG) proper_path = malleho_ep_name + proper_path[len(ep_obj.show.name.decode("utf-8")):len(proper_path.decode("utf-8"))] logger.log("Malleho rename, new_proper_path : " + proper_path, logger.DEBUG) orig_extension = self.file_name.rpartition('.')[-1] new_base_name = ek.ek(os.path.basename, proper_path) new_file_name = new_base_name + '.' + orig_extension else: # if we're not renaming then there's no new base name, we'll just use the existing name new_base_name = None new_file_name = self.file_name try: # move the episode and associated files to the show dir if sickbeard.KEEP_PROCESSED_DIR: self._copy(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES) else: self._move(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES) except (OSError, IOError): raise exceptions.PostProcessingFailed("Unable to move the files to their new home") # put the new location in the database for cur_ep in [ep_obj] + ep_obj.relatedEps: with cur_ep.lock: cur_ep.location = ek.ek(os.path.join, dest_path, new_file_name) cur_ep.saveToDB() # log it to history history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group) # send notifications notifiers.notify_download(ep_obj.prettyName()) # generate nfo/tbn ep_obj.createMetaFiles() ep_obj.saveToDB() # do the library update notifiers.xbmc_notifier.update_library(ep_obj.show.name) # do the library update for Plex Media Server notifiers.plex_notifier.update_library() # do the library update for synoindex notifiers.synoindex_notifier.addFile(ep_obj.location) # do the library update for trakt notifiers.trakt_notifier.update_library(ep_obj) # do the library update for pyTivo notifiers.pytivo_notifier.update_library(ep_obj) self._run_extra_scripts(ep_obj) return True
def process(self): """ Post-process a given file """ self._log(u"Processing " + self.file_path + " (" + str(self.nzb_name) + ")") if ek.ek(os.path.isdir, self.file_path): self._log(u"File " + self.file_path + " seems to be a directory") return False for ignore_file in self.IGNORED_FILESTRINGS: if ignore_file in self.file_path: self._log(u"File " + self.file_path + " is ignored type, skipping") return False # reset per-file stuff self.in_history = False # reset the anidb episode object self.anidbEpisode = None # try to find the file info (show, season, episodes, quality, version) = self._find_info() if not show: self._log(u"This show isn't in your list, you need to add it to SB before post-processing an episode", logger.WARNING) raise exceptions.PostProcessingFailed() elif season == None or not episodes: self._log(u"Not enough information to determine what episode this is", logger.DEBUG) self._log(u"Quitting post-processing", logger.DEBUG) return False # retrieve/create the corresponding TVEpisode objects ep_obj = self._get_ep_obj(show, season, episodes) # get the quality of the episode we're processing if quality: self._log(u"Snatch history had a quality in it, using that: " + common.Quality.qualityStrings[quality], logger.DEBUG) new_ep_quality = quality else: new_ep_quality = self._get_quality(ep_obj) logger.log(u"Quality of the episode we're processing: " + str(new_ep_quality), logger.DEBUG) # see if this is a priority download (is it snatched, in history, PROPER, or BEST) priority_download = self._is_priority(ep_obj, new_ep_quality) self._log(u"Is ep a priority download: " + str(priority_download), logger.DEBUG) # get the version of the episode we're processing if version: self._log(u"Snatch history had a version in it, using that: v" + str(version), logger.DEBUG) new_ep_version = version else: new_ep_version = -1 # check for an existing file existing_file_status = self._checkForExistingFile(ep_obj.location) # if it's not priority then we don't want to replace smaller files in case it was a mistake if not priority_download: # if there's an existing file that we don't want to replace stop here if existing_file_status == PostProcessor.EXISTS_LARGER: if self.is_proper: self._log( u"File exists and new file is smaller, new file is a proper/repack, marking it safe to replace", logger.DEBUG) return True else: self._log(u"File exists and new file is smaller, marking it unsafe to replace", logger.DEBUG) return False elif existing_file_status == PostProcessor.EXISTS_SAME: self._log(u"File exists and new file is same size, marking it unsafe to replace", logger.DEBUG) return False # if the file is priority then we're going to replace it even if it exists else: self._log( u"This download is marked a priority download so I'm going to replace an existing file if I find one", logger.DEBUG) # delete the existing file (and company) for cur_ep in [ep_obj] + ep_obj.relatedEps: try: self._delete(cur_ep.location, associated_files=True) # clean up any left over folders if cur_ep.location: helpers.delete_empty_folders(ek.ek(os.path.dirname, cur_ep.location), keep_dir=ep_obj.show._location) except (OSError, IOError): raise exceptions.PostProcessingFailed("Unable to delete the existing files") # set the status of the episodes # for curEp in [ep_obj] + ep_obj.relatedEps: # curEp.status = common.Quality.compositeStatus(common.SNATCHED, new_ep_quality) # if the show directory doesn't exist then make it if allowed if not ek.ek(os.path.isdir, ep_obj.show._location) and sickbeard.CREATE_MISSING_SHOW_DIRS: self._log(u"Show directory doesn't exist, creating it", logger.DEBUG) try: ek.ek(os.mkdir, ep_obj.show._location) # do the library update for synoindex notifiers.synoindex_notifier.addFolder(ep_obj.show._location) except (OSError, IOError): raise exceptions.PostProcessingFailed("Unable to create the show directory: " + ep_obj.show._location) # get metadata for the show (but not episode because it hasn't been fully processed) ep_obj.show.writeMetadata(True) # update the ep info before we rename so the quality & release name go into the name properly sql_l = [] for cur_ep in [ep_obj] + ep_obj.relatedEps: with cur_ep.lock: if self.release_name: self._log("Found release name " + self.release_name, logger.DEBUG) cur_ep.release_name = self.release_name else: cur_ep.release_name = "" if ep_obj.status in common.Quality.SNATCHED_BEST: cur_ep.status = common.Quality.compositeStatus(common.ARCHIVED, new_ep_quality) else: cur_ep.status = common.Quality.compositeStatus(common.DOWNLOADED, new_ep_quality) cur_ep.subtitles = [] cur_ep.subtitles_searchcount = 0 cur_ep.subtitles_lastsearch = '0001-01-01 00:00:00' cur_ep.is_proper = self.is_proper cur_ep.version = new_ep_version if self.release_group: cur_ep.release_group = self.release_group else: cur_ep.release_group = "" sql_l.append(cur_ep.get_sql()) if len(sql_l) > 0: myDB = db.DBConnection() myDB.mass_action(sql_l) # Just want to keep this consistent for failed handling right now releaseName = show_name_helpers.determineReleaseName(self.folder_path, self.nzb_name) if releaseName is not None: failed_history.logSuccess(releaseName) else: self._log(u"Couldn't find release in snatch history", logger.WARNING) # find the destination folder try: proper_path = ep_obj.proper_path() proper_absolute_path = ek.ek(os.path.join, ep_obj.show.location, proper_path) dest_path = ek.ek(os.path.dirname, proper_absolute_path) except exceptions.ShowDirNotFoundException: raise exceptions.PostProcessingFailed( u"Unable to post-process an episode if the show dir doesn't exist, quitting") self._log(u"Destination folder for this episode: " + dest_path, logger.DEBUG) # create any folders we need helpers.make_dirs(dest_path) # figure out the base name of the resulting episode file if sickbeard.RENAME_EPISODES: orig_extension = self.file_name.rpartition('.')[-1] new_base_name = ek.ek(os.path.basename, proper_path) new_file_name = new_base_name + '.' + orig_extension else: # if we're not renaming then there's no new base name, we'll just use the existing name new_base_name = None new_file_name = self.file_name # add to anidb if ep_obj.show.is_anime and sickbeard.ANIDB_USE_MYLIST: self._add_to_anidb_mylist(self.file_path) try: # move the episode and associated files to the show dir if self.process_method == "copy": self._copy(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles) elif self.process_method == "move": self._move(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles) elif self.process_method == "hardlink": self._hardlink(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles) elif self.process_method == "symlink": self._moveAndSymlink(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles) else: logger.log(u"Unknown process method: " + str(self.process_method), logger.ERROR) raise exceptions.PostProcessingFailed("Unable to move the files to their new home") except (OSError, IOError): raise exceptions.PostProcessingFailed("Unable to move the files to their new home") # download subtitles if sickbeard.USE_SUBTITLES and ep_obj.show.subtitles: for cur_ep in [ep_obj] + ep_obj.relatedEps: with cur_ep.lock: cur_ep.location = ek.ek(os.path.join, dest_path, new_file_name) cur_ep.downloadSubtitles(force=True) # put the new location in the database sql_l = [] for cur_ep in [ep_obj] + ep_obj.relatedEps: with cur_ep.lock: cur_ep.location = ek.ek(os.path.join, dest_path, new_file_name) sql_l.append(cur_ep.get_sql()) if len(sql_l) > 0: myDB = db.DBConnection() myDB.mass_action(sql_l) # set file modify stamp to show airdate if sickbeard.AIRDATE_EPISODES: for cur_ep in [ep_obj] + ep_obj.relatedEps: with cur_ep.lock: cur_ep.airdateModifyStamp() # generate nfo/tbn ep_obj.createMetaFiles() # log it to history history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group, new_ep_version) # send notifications notifiers.notify_download(ep_obj._format_pattern('%SN - %Sx%0E - %EN - %QN')) # do the library update for KODI notifiers.kodi_notifier.update_library(ep_obj.show.name) # do the library update for Plex notifiers.plex_notifier.update_library() # do the library update for NMJ # nmj_notifier kicks off its library update when the notify_download is issued (inside notifiers) # do the library update for Synology Indexer notifiers.synoindex_notifier.addFile(ep_obj.location) # do the library update for pyTivo notifiers.pytivo_notifier.update_library(ep_obj) # do the library update for Trakt notifiers.trakt_notifier.update_library(ep_obj) self._run_extra_scripts(ep_obj) return True
def process(self): """ Post-process a given file """ self._log(u"Processing " + self.file_path + " (" + str(self.nzb_name) + ")") if ek.ek(os.path.isdir, self.file_path): self._log(u"File " + self.file_path + " seems to be a directory") return False for ignore_file in self.IGNORED_FILESTRINGS: if ignore_file in self.file_path: self._log(u"File " + self.file_path + " is ignored type, skipping") return False # reset per-file stuff self.in_history = False # reset the anidb episode object self.anidbEpisode = None # try to find the file info (show, season, episodes, quality, version) = self._find_info() if not show: self._log( u"This show isn't in your list, you need to add it to SB before post-processing an episode", logger.WARNING) raise exceptions.PostProcessingFailed() elif season == None or not episodes: self._log( u"Not enough information to determine what episode this is", logger.DEBUG) self._log(u"Quitting post-processing", logger.DEBUG) return False # retrieve/create the corresponding TVEpisode objects ep_obj = self._get_ep_obj(show, season, episodes) # get the quality of the episode we're processing if quality: self._log( u"Snatch history had a quality in it, using that: " + common.Quality.qualityStrings[quality], logger.DEBUG) new_ep_quality = quality else: new_ep_quality = self._get_quality(ep_obj) logger.log( u"Quality of the episode we're processing: " + str(new_ep_quality), logger.DEBUG) # see if this is a priority download (is it snatched, in history, PROPER, or BEST) priority_download = self._is_priority(ep_obj, new_ep_quality) self._log(u"Is ep a priority download: " + str(priority_download), logger.DEBUG) # get the version of the episode we're processing if version: self._log( u"Snatch history had a version in it, using that: v" + str(version), logger.DEBUG) new_ep_version = version else: new_ep_version = -1 # check for an existing file existing_file_status = self._checkForExistingFile(ep_obj.location) # if it's not priority then we don't want to replace smaller files in case it was a mistake if not priority_download: # if there's an existing file that we don't want to replace stop here if existing_file_status == PostProcessor.EXISTS_LARGER: if self.is_proper: self._log( u"File exists and new file is smaller, new file is a proper/repack, marking it safe to replace", logger.DEBUG) return True else: self._log( u"File exists and new file is smaller, marking it unsafe to replace", logger.DEBUG) return False elif existing_file_status == PostProcessor.EXISTS_SAME: self._log( u"File exists and new file is same size, marking it unsafe to replace", logger.DEBUG) return False # if the file is priority then we're going to replace it even if it exists else: self._log( u"This download is marked a priority download so I'm going to replace an existing file if I find one", logger.DEBUG) # delete the existing file (and company) for cur_ep in [ep_obj] + ep_obj.relatedEps: try: self._delete(cur_ep.location, associated_files=True) # clean up any left over folders if cur_ep.location: helpers.delete_empty_folders( ek.ek(os.path.dirname, cur_ep.location), keep_dir=ep_obj.show._location) except (OSError, IOError): raise exceptions.PostProcessingFailed( "Unable to delete the existing files") # set the status of the episodes # for curEp in [ep_obj] + ep_obj.relatedEps: # curEp.status = common.Quality.compositeStatus(common.SNATCHED, new_ep_quality) # if the show directory doesn't exist then make it if allowed if not ek.ek( os.path.isdir, ep_obj.show._location) and sickbeard.CREATE_MISSING_SHOW_DIRS: self._log(u"Show directory doesn't exist, creating it", logger.DEBUG) try: ek.ek(os.mkdir, ep_obj.show._location) # do the library update for synoindex notifiers.synoindex_notifier.addFolder(ep_obj.show._location) except (OSError, IOError): raise exceptions.PostProcessingFailed( "Unable to create the show directory: " + ep_obj.show._location) # get metadata for the show (but not episode because it hasn't been fully processed) ep_obj.show.writeMetadata(True) # update the ep info before we rename so the quality & release name go into the name properly sql_l = [] trakt_data = [] for cur_ep in [ep_obj] + ep_obj.relatedEps: with cur_ep.lock: if self.release_name: self._log("Found release name " + self.release_name, logger.DEBUG) cur_ep.release_name = self.release_name else: cur_ep.release_name = "" if ep_obj.status in common.Quality.SNATCHED_BEST: cur_ep.status = common.Quality.compositeStatus( common.ARCHIVED, new_ep_quality) else: cur_ep.status = common.Quality.compositeStatus( common.DOWNLOADED, new_ep_quality) cur_ep.subtitles = [] cur_ep.subtitles_searchcount = 0 cur_ep.subtitles_lastsearch = '0001-01-01 00:00:00' cur_ep.is_proper = self.is_proper cur_ep.version = new_ep_version if self.release_group: cur_ep.release_group = self.release_group else: cur_ep.release_group = "" sql_l.append(cur_ep.get_sql()) trakt_data.append((cur_ep.season, cur_ep.episode)) data = notifiers.trakt_notifier.trakt_episode_data_generate(trakt_data) if sickbeard.USE_TRAKT and sickbeard.TRAKT_SYNC_WATCHLIST and sickbeard.TRAKT_REMOVE_WATCHLIST: logger.log( u"Remove episodes, showid: indexerid " + str(show.indexerid) + ", Title " + str(show.name) + " to Traktv Watchlist", logger.DEBUG) if data: notifiers.trakt_notifier.update_watchlist(show, data_episode=data, update="remove") if len(sql_l) > 0: myDB = db.DBConnection() myDB.mass_action(sql_l) # Just want to keep this consistent for failed handling right now releaseName = show_name_helpers.determineReleaseName( self.folder_path, self.nzb_name) if releaseName is not None: failed_history.logSuccess(releaseName) else: self._log(u"Couldn't find release in snatch history", logger.WARNING) # find the destination folder try: proper_path = ep_obj.proper_path() proper_absolute_path = ek.ek(os.path.join, ep_obj.show.location, proper_path) dest_path = ek.ek(os.path.dirname, proper_absolute_path) except exceptions.ShowDirNotFoundException: raise exceptions.PostProcessingFailed( u"Unable to post-process an episode if the show dir doesn't exist, quitting" ) self._log(u"Destination folder for this episode: " + dest_path, logger.DEBUG) # create any folders we need helpers.make_dirs(dest_path) # figure out the base name of the resulting episode file if sickbeard.RENAME_EPISODES: orig_extension = self.file_name.rpartition('.')[-1] new_base_name = ek.ek(os.path.basename, proper_path) new_file_name = new_base_name + '.' + orig_extension else: # if we're not renaming then there's no new base name, we'll just use the existing name new_base_name = None new_file_name = self.file_name # add to anidb if ep_obj.show.is_anime and sickbeard.ANIDB_USE_MYLIST: self._add_to_anidb_mylist(self.file_path) try: # move the episode and associated files to the show dir if self.process_method == "copy": self._copy(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles) elif self.process_method == "move": self._move(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles) elif self.process_method == "hardlink": self._hardlink( self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles) elif self.process_method == "symlink": self._moveAndSymlink( self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles) else: logger.log( u"Unknown process method: " + str(self.process_method), logger.ERROR) raise exceptions.PostProcessingFailed( "Unable to move the files to their new home") except (OSError, IOError): raise exceptions.PostProcessingFailed( "Unable to move the files to their new home") # download subtitles if sickbeard.USE_SUBTITLES and ep_obj.show.subtitles: for cur_ep in [ep_obj] + ep_obj.relatedEps: with cur_ep.lock: cur_ep.location = ek.ek(os.path.join, dest_path, new_file_name) cur_ep.downloadSubtitles(force=True) # put the new location in the database sql_l = [] for cur_ep in [ep_obj] + ep_obj.relatedEps: with cur_ep.lock: cur_ep.location = ek.ek(os.path.join, dest_path, new_file_name) sql_l.append(cur_ep.get_sql()) if len(sql_l) > 0: myDB = db.DBConnection() myDB.mass_action(sql_l) # set file modify stamp to show airdate if sickbeard.AIRDATE_EPISODES: for cur_ep in [ep_obj] + ep_obj.relatedEps: with cur_ep.lock: cur_ep.airdateModifyStamp() # generate nfo/tbn ep_obj.createMetaFiles() # log it to history history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group, new_ep_version) # send notifications notifiers.notify_download( ep_obj._format_pattern('%SN - %Sx%0E - %EN - %QN')) # do the library update for KODI notifiers.kodi_notifier.update_library(ep_obj.show.name) # do the library update for Plex notifiers.plex_notifier.update_library() # do the library update for NMJ # nmj_notifier kicks off its library update when the notify_download is issued (inside notifiers) # do the library update for Synology Indexer notifiers.synoindex_notifier.addFile(ep_obj.location) # do the library update for pyTivo notifiers.pytivo_notifier.update_library(ep_obj) # do the library update for Trakt notifiers.trakt_notifier.update_library(ep_obj) self._run_extra_scripts(ep_obj) return True
def process(self): """ Post-process a given file """ self._log(u'Processing %s%s' % (self.file_path, (u'<br />.. from nzb %s' % str(self.nzb_name), u'')[None is self.nzb_name])) if ek.ek(os.path.isdir, self.file_path): self._log(u'File %s<br />.. seems to be a directory' % self.file_path) return False for ignore_file in self.IGNORED_FILESTRINGS: if ignore_file in self.file_path: self._log(u'File %s<br />.. is ignored type, skipping' % self.file_path) return False # reset per-file stuff self.in_history = False self.anidbEpisode = None # try to find the file info (show, season, episodes, quality) = self._find_info() # if we don't have it then give up if not show: self._log(u'Please add the show to your SickGear then try to post process an episode', logger.WARNING) raise exceptions.PostProcessingFailed() elif None is season or not episodes: self._log(u'Quitting this post process, could not determine what episode this is', logger.DEBUG) return False # retrieve/create the corresponding TVEpisode objects ep_obj = self._get_ep_obj(show, season, episodes) # get the quality of the episode we're processing if common.Quality.UNKNOWN == quality: new_ep_quality = self._get_quality(ep_obj) else: new_ep_quality = quality self._log(u'Using "%s" quality from the snatch history' % common.Quality.qualityStrings[new_ep_quality], logger.DEBUG) # see if it's safe to replace existing episode (is download snatched, PROPER, better quality) if not self._safe_replace(ep_obj, new_ep_quality): # if it's not safe to replace, stop here self._log(u'Quitting this post process', logger.DEBUG) return False # delete the existing file (and company) for cur_ep in [ep_obj] + ep_obj.relatedEps: try: self._delete(cur_ep.location, associated_files=True) # clean up any left over folders if cur_ep.location: helpers.delete_empty_folders(ek.ek(os.path.dirname, cur_ep.location), keep_dir=ep_obj.show.location) except (OSError, IOError): raise exceptions.PostProcessingFailed(u'Unable to delete the existing files') # set the status of the episodes # for curEp in [ep_obj] + ep_obj.relatedEps: # curEp.status = common.Quality.compositeStatus(common.SNATCHED, new_ep_quality) # if the show directory doesn't exist then make it if allowed if not ek.ek(os.path.isdir, ep_obj.show.location) and sickbeard.CREATE_MISSING_SHOW_DIRS: self._log(u'Show directory does not exist, creating it', logger.DEBUG) try: ek.ek(os.mkdir, ep_obj.show.location) # do the library update for synoindex notifiers.synoindex_notifier.addFolder(ep_obj.show.location) except (OSError, IOError): raise exceptions.PostProcessingFailed(u'Unable to create show directory: ' + ep_obj.show.location) # get metadata for the show (but not episode because it hasn't been fully processed) ep_obj.show.writeMetadata(True) # if we're processing an episode of type anime, get the anime version anime_version = (-1, self.anime_version)[ep_obj.show.is_anime and None is not self.anime_version and self.anime_version] # update the ep info before we rename so the quality & release name go into the name properly sql_l = [] for cur_ep in [ep_obj] + ep_obj.relatedEps: with cur_ep.lock: if self.release_name: self._log(u'Found release name ' + self.release_name, logger.DEBUG) cur_ep.release_name = self.release_name or '' cur_ep.status = common.Quality.compositeStatus( **({'status': common.DOWNLOADED, 'quality': new_ep_quality}, {'status': common.ARCHIVED, 'quality': new_ep_quality}) [ep_obj.status in common.Quality.SNATCHED_BEST]) cur_ep.release_group = self.release_group or '' cur_ep.is_proper = self.is_proper cur_ep.version = anime_version cur_ep.subtitles = [] cur_ep.subtitles_searchcount = 0 cur_ep.subtitles_lastsearch = '0001-01-01 00:00:00' sql = cur_ep.get_sql() if None is not sql: sql_l.append(sql) if 0 < len(sql_l): my_db = db.DBConnection() my_db.mass_action(sql_l) # Just want to keep this consistent for failed handling right now release_name = show_name_helpers.determineReleaseName(self.folder_path, self.nzb_name) if None is not release_name: failed_history.logSuccess(release_name) else: self._log(u'No release found in snatch history', logger.WARNING) # find the destination folder try: proper_path = ep_obj.proper_path() proper_absolute_path = ek.ek(os.path.join, ep_obj.show.location, proper_path) dest_path = ek.ek(os.path.dirname, proper_absolute_path) except exceptions.ShowDirNotFoundException: raise exceptions.PostProcessingFailed( u'Unable to post process an episode because the show dir does not exist, quitting') self._log(u'Destination folder for this episode is ' + dest_path, logger.DEBUG) # create any folders we need if not helpers.make_dirs(dest_path): raise exceptions.PostProcessingFailed(u'Unable to create destination folder: ' + dest_path) # figure out the base name of the resulting episode file if sickbeard.RENAME_EPISODES: new_base_name = ek.ek(os.path.basename, proper_path) new_file_name = new_base_name + '.' + self.file_name.rpartition('.')[-1] else: # if we're not renaming then there's no new base name, we'll just use the existing name new_base_name = None new_file_name = self.file_name # add to anidb if sickbeard.ANIDB_USE_MYLIST and ep_obj.show.is_anime: self._add_to_anidb_mylist(self.file_path) try: # move the episode and associated files to the show dir args_link = {'file_path': self.file_path, 'new_path': dest_path, 'new_base_name': new_base_name, 'associated_files': sickbeard.MOVE_ASSOCIATED_FILES} args_cpmv = {'subtitles': sickbeard.USE_SUBTITLES and ep_obj.show.subtitles, 'action_tmpl': u' %s<br />.. to %s'} args_cpmv.update(args_link) if 'copy' == self.process_method: self._copy(**args_cpmv) elif 'move' == self.process_method: self._move(**args_cpmv) elif 'hardlink' == self.process_method: self._hardlink(**args_link) elif 'symlink' == self.process_method: self._move_and_symlink(**args_link) else: logger.log(u'Unknown process method: ' + str(self.process_method), logger.ERROR) raise exceptions.PostProcessingFailed(u'Unable to move the files to the new location') except (OSError, IOError): raise exceptions.PostProcessingFailed(u'Unable to move the files to the new location') # download subtitles dosubs = sickbeard.USE_SUBTITLES and ep_obj.show.subtitles # put the new location in the database sql_l = [] for cur_ep in [ep_obj] + ep_obj.relatedEps: with cur_ep.lock: cur_ep.location = ek.ek(os.path.join, dest_path, new_file_name) if dosubs: cur_ep.downloadSubtitles(force=True) # set file modify stamp to show airdate if sickbeard.AIRDATE_EPISODES: cur_ep.airdateModifyStamp() sql = cur_ep.get_sql() if None is not sql: sql_l.append(sql) if 0 < len(sql_l): my_db = db.DBConnection() my_db.mass_action(sql_l) # generate nfo/tbn ep_obj.createMetaFiles() # log it to history history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group, anime_version) # send notifications notifiers.notify_download(ep_obj._format_pattern('%SN - %Sx%0E - %EN - %QN')) # do the library update for XBMC notifiers.xbmc_notifier.update_library(ep_obj.show.name) # do the library update for Kodi notifiers.kodi_notifier.update_library(ep_obj.show.name) # do the library update for Plex notifiers.plex_notifier.update_library(ep_obj) # do the library update for NMJ # nmj_notifier kicks off its library update when the notify_download is issued (inside notifiers) # do the library update for Synology Indexer notifiers.synoindex_notifier.addFile(ep_obj.location) # do the library update for pyTivo notifiers.pytivo_notifier.update_library(ep_obj) # do the library update for Trakt notifiers.trakt_notifier.update_library(ep_obj) self._run_extra_scripts(ep_obj) return True
def process(self): """ Post-process a given file or (if failed) dir/nzb """ self._log(u"Processing " + self.file_path + " (" + str(self.nzb_name) + ")") if os.path.isdir(self.file_path) and not self.failed: self._log(u"File " + self.file_path + " seems to be a directory") return False for ignore_file in self.IGNORED_FILESTRINGS: if ignore_file in self.file_path: self._log(u"File " + self.file_path + " is ignored type, skipping") return False # reset per-file stuff self.in_history = False # try to find the file info (tvdb_id, season, episodes) = self._find_info() # if we don't have it then give up if not tvdb_id or season == None or not episodes: return False # retrieve/create the corresponding TVEpisode objects ep_obj = self._get_ep_obj(tvdb_id, season, episodes) if self.failed: release_name = self._get_release_name() if release_name is not None: self._log(u"Marking release as bad: " + release_name, logger.DEBUG) myDB = db.DBConnection("failed.db") myDB.select("INSERT INTO failed (release) VALUES (?)", [re.sub("[\.\-\ ]", "_", release_name)]) else: self._log(u"Release name not found. Can't mark as invalid. REPORT THIS", logger.ERROR) return False logger.log(u"Setting episode(s) back to Wanted", logger.DEBUG) for curEp in [ep_obj] + ep_obj.relatedEps: self._log(u"Setting episode back to wanted: " + curEp.name) with curEp.lock: curEp.status = int(common.WANTED) curEp.saveToDB() # we 'succeeded' in the sense that no errors were encountered return True # get the quality of the episode we're processing new_ep_quality = self._get_quality(ep_obj) logger.log(u"Quality of the episode we're processing: " + str(new_ep_quality), logger.DEBUG) # see if this is a priority download (is it snatched, in history, or PROPER) priority_download = self._is_priority(ep_obj, new_ep_quality) self._log(u"Is ep a priority download: " + str(priority_download), logger.DEBUG) # set the status of the episodes for curEp in [ep_obj] + ep_obj.relatedEps: curEp.status = common.Quality.compositeStatus(common.SNATCHED, new_ep_quality) # check for an existing file existing_file_status = self._checkForExistingFile(ep_obj.location) # if it's not priority then we don't want to replace smaller files in case it was a mistake if not priority_download: # if there's an existing file that we don't want to replace stop here if existing_file_status in (PostProcessor.EXISTS_LARGER, PostProcessor.EXISTS_SAME): self._log( u"File exists and we are not going to replace it because it's not smaller, quitting post-processing", logger.DEBUG, ) return False elif existing_file_status == PostProcessor.EXISTS_SMALLER: self._log(u"File exists and is smaller than the new file so I'm going to replace it", logger.DEBUG) elif existing_file_status != PostProcessor.DOESNT_EXIST: self._log( u"Unknown existing file status. This should never happen, please log this as a bug.", logger.ERROR ) return False # if the file is priority then we're going to replace it even if it exists else: self._log( u"This download is marked a priority download so I'm going to replace an existing file if I find one", logger.DEBUG, ) # delete the existing file (and company) for cur_ep in [ep_obj] + ep_obj.relatedEps: try: self._delete(cur_ep.location, associated_files=True) # clean up any left over folders if cur_ep.location: helpers.delete_empty_folders( ek.ek(os.path.dirname, cur_ep.location), keep_dir=ep_obj.show._location ) except (OSError, IOError): raise exceptions.PostProcessingFailed("Unable to delete the existing files") # if the show directory doesn't exist then make it if allowed if not ek.ek(os.path.isdir, ep_obj.show._location) and sickbeard.CREATE_MISSING_SHOW_DIRS: self._log(u"Show directory doesn't exist, creating it", logger.DEBUG) try: ek.ek(os.mkdir, ep_obj.show._location) # do the library update for synoindex notifiers.synoindex_notifier.addFolder(ep_obj.show._location) except (OSError, IOError): raise exceptions.PostProcessingFailed("Unable to create the show directory: " + ep_obj.show._location) # get metadata for the show (but not episode because it hasn't been fully processed) ep_obj.show.writeMetadata(True) # update the ep info before we rename so the quality & release name go into the name properly for cur_ep in [ep_obj] + ep_obj.relatedEps: with cur_ep.lock: cur_release_name = self._get_release_name() if cur_release_name: self._log("Found release name " + cur_release_name, logger.DEBUG) cur_ep.release_name = cur_release_name else: logger.log("good results: " + repr(self.good_results), logger.DEBUG) cur_ep.status = common.Quality.compositeStatus(common.DOWNLOADED, new_ep_quality) cur_ep.saveToDB() # find the destination folder try: proper_path = ep_obj.proper_path() proper_absolute_path = ek.ek(os.path.join, ep_obj.show.location, proper_path) dest_path = ek.ek(os.path.dirname, proper_absolute_path) notifiers.synoindex_notifier.addFolder(dest_path) except exceptions.ShowDirNotFoundException: raise exceptions.PostProcessingFailed( u"Unable to post-process an episode if the show dir doesn't exist, quitting" ) self._log(u"Destination folder for this episode: " + dest_path, logger.DEBUG) # create any folders we need helpers.make_dirs(dest_path) # figure out the base name of the resulting episode file if sickbeard.RENAME_EPISODES: orig_extension = self.file_name.rpartition(".")[-1] new_base_name = ek.ek(os.path.basename, proper_path) new_file_name = new_base_name + "." + orig_extension else: # if we're not renaming then there's no new base name, we'll just use the existing name new_base_name = None new_file_name = self.file_name try: # move the episode and associated files to the show dir if sickbeard.KEEP_PROCESSED_DIR: self._copy(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES) else: self._move(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES) except (OSError, IOError): raise exceptions.PostProcessingFailed("Unable to move the files to their new home") # put the new location in the database for cur_ep in [ep_obj] + ep_obj.relatedEps: with cur_ep.lock: cur_ep.location = ek.ek(os.path.join, dest_path, new_file_name) cur_ep.saveToDB() # log it to history history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group) # send notifications notifiers.notify_download(ep_obj.prettyName()) # generate nfo/tbn ep_obj.createMetaFiles() ep_obj.saveToDB() # do the library update for XBMC notifiers.xbmc_notifier.update_library(ep_obj.show.name) # do the library update for Plex notifiers.plex_notifier.update_library() # do the library update for NMJ # nmj_notifier kicks off its library update when the notify_download is issued (inside notifiers) # do the library update for Synology Indexer notifiers.synoindex_notifier.addFile(ep_obj.location) # do the library update for pyTivo notifiers.pytivo_notifier.update_library(ep_obj) # do the library update for Trakt notifiers.trakt_notifier.update_library(ep_obj) self._run_extra_scripts(ep_obj) return True
self._log(u"File exists and is smaller than the new file so I'm going to replace it", logger.DEBUG) elif existing_file_status != PostProcessor.DOESNT_EXIST: self._log(u"Unknown existing file status. This should never happen, please log this as a bug.", logger.ERROR) return False # if the file is priority then we're going to replace it even if it exists else: self._log(u"This download is marked a priority download so I'm going to replace an existing file if I find one", logger.DEBUG) # delete the existing file (and company) for cur_ep in [ep_obj] + ep_obj.relatedEps: try: self._delete(cur_ep.location, associated_files=True) # clean up any left over folders if cur_ep.location: helpers.delete_empty_folders(ek.ek(os.path.dirname, cur_ep.location), keep_dir=ep_obj.show._location) except (OSError, IOError): raise exceptions.PostProcessingFailed("Unable to delete the existing files") # if the show directory doesn't exist then make it if allowed if not ek.ek(os.path.isdir, ep_obj.show._location) and sickbeard.CREATE_MISSING_SHOW_DIRS: self._log(u"Show directory doesn't exist, creating it", logger.DEBUG) try: ek.ek(os.mkdir, ep_obj.show._location) except (OSError, IOError): raise exceptions.PostProcessingFailed("Unable to create the show directory: " + ep_obj.show._location) # get metadata for the show (but not episode because it hasn't been fully processed) ep_obj.show.writeMetadata(True)