def delete_folder(folder, check_empty=True): """ Removes a folder from the filesystem :param folder: Path to folder to remove :param check_empty: Boolean, check if the folder is empty before removing it, defaults to True :return: True on success, False on failure """ # check if it's a folder if not os.path.isdir(folder): return False # check if it isn't TV_DOWNLOAD_DIR if sickrage.srConfig.TV_DOWNLOAD_DIR: if real_path(folder) == real_path(sickrage.srConfig.TV_DOWNLOAD_DIR): return False # check if it's empty folder when wanted checked try: if check_empty: check_files = os.listdir(folder) if check_files: sickrage.srLogger.info( "Not deleting folder {} found the following files: {}". format(folder, check_files)) return False sickrage.srLogger.info("Deleting folder (if it's empty): " + folder) os.rmdir(folder) else: sickrage.srLogger.info("Deleting folder: " + folder) removetree(folder) except (OSError, IOError) as e: sickrage.srLogger.warning( "Warning: unable to delete folder: {}: {}".format(folder, e)) return False return True
def delete_folder(folder, check_empty=True): """ Removes a folder from the filesystem :param folder: Path to folder to remove :param check_empty: Boolean, check if the folder is empty before removing it, defaults to True :return: True on success, False on failure """ # check if it's a folder if not os.path.isdir(folder): return False # check if it isn't TV_DOWNLOAD_DIR if sickrage.srConfig.TV_DOWNLOAD_DIR: if real_path(folder) == real_path(sickrage.srConfig.TV_DOWNLOAD_DIR): return False # check if it's empty folder when wanted checked try: if check_empty: check_files = os.listdir(folder) if check_files: sickrage.srLogger.info( "Not deleting folder {} found the following files: {}".format(folder, check_files) ) return False sickrage.srLogger.info("Deleting folder (if it's empty): " + folder) os.rmdir(folder) else: sickrage.srLogger.info("Deleting folder: " + folder) removetree(folder) except (OSError, IOError) as e: sickrage.srLogger.warning("Warning: unable to delete folder: {}: {}".format(folder, e)) return False return True
def start(self): self.PID = os.getpid() # set socket timeout socket.setdefaulttimeout(sickrage.srConfig.SOCKET_TIMEOUT) # init version updater self.VERSIONUPDATER = srVersionUpdater() # init updater and get current version self.VERSION = self.VERSIONUPDATER.updater.version # init services self.SCHEDULER = srScheduler() self.WEBSERVER = srWebServer() self.INDEXER_API = srIndexerApi # init caches self.NAMECACHE = srNameCache() # init queues self.SHOWUPDATER = srShowUpdater() self.SHOWQUEUE = srShowQueue() self.SEARCHQUEUE = srSearchQueue() # init searchers self.DAILYSEARCHER = srDailySearcher() self.BACKLOGSEARCHER = srBacklogSearcher() self.PROPERSEARCHER = srProperSearcher() self.TRAKTSEARCHER = srTraktSearcher() self.SUBTITLESEARCHER = srSubtitleSearcher() # init postprocessor self.AUTOPOSTPROCESSOR = srPostProcessor() # migrate old database file names to new ones if not os.path.exists(main_db.MainDB().filename) and os.path.exists("sickbeard.db"): helpers.moveFile("sickbeard.db", main_db.MainDB().filename) # initialize the main SB database main_db.MainDB().InitialSchema().upgrade() # initialize the cache database cache_db.CacheDB().InitialSchema().upgrade() # initialize the failed downloads database failed_db.FailedDB().InitialSchema().upgrade() # fix up any db problems main_db.MainDB().SanityCheck() # load data for shows from database self.load_shows() if sickrage.srConfig.DEFAULT_PAGE not in ('home', 'schedule', 'history', 'news', 'IRC'): sickrage.srConfig.DEFAULT_PAGE = 'home' if not makeDir(sickrage.srConfig.CACHE_DIR): sickrage.srLogger.error("!!! Creating local cache dir failed") sickrage.srConfig.CACHE_DIR = get_temp_dir() # Check if we need to perform a restore of the cache folder try: restore_dir = os.path.join(sickrage.DATA_DIR, 'restore') if os.path.exists(restore_dir) and os.path.exists(os.path.join(restore_dir, 'cache')): def restore_cache(src_dir, dst_dir): def path_leaf(path): head, tail = os.path.split(path) return tail or os.path.basename(head) try: if os.path.isdir(dst_dir): bak_filename = '{}-{}'.format(path_leaf(dst_dir), datetime.now().strftime('%Y%m%d_%H%M%S')) shutil.move(dst_dir, os.path.join(os.path.dirname(dst_dir), bak_filename)) shutil.move(src_dir, dst_dir) sickrage.srLogger.info("Restore: restoring cache successful") except Exception as E: sickrage.srLogger.error("Restore: restoring cache failed: {}".format(E.message)) restore_cache(os.path.join(restore_dir, 'cache'), sickrage.srConfig.CACHE_DIR) except Exception as e: sickrage.srLogger.error("Restore: restoring cache failed: {}".format(e.message)) finally: if os.path.exists(os.path.join(sickrage.DATA_DIR, 'restore')): try: removetree(os.path.join(sickrage.DATA_DIR, 'restore')) except Exception as e: sickrage.srLogger.error("Restore: Unable to remove the restore directory: {}".format(e.message)) for cleanupDir in ['mako', 'sessions', 'indexers']: try: removetree(os.path.join(sickrage.srConfig.CACHE_DIR, cleanupDir)) except Exception as e: sickrage.srLogger.warning( "Restore: Unable to remove the cache/{} directory: {1}".format(cleanupDir, e)) if sickrage.srConfig.WEB_PORT < 21 or sickrage.srConfig.WEB_PORT > 65535: sickrage.srConfig.WEB_PORT = 8081 if not sickrage.srConfig.WEB_COOKIE_SECRET: sickrage.srConfig.WEB_COOKIE_SECRET = generateCookieSecret() # attempt to help prevent users from breaking links by using a bad url if not sickrage.srConfig.ANON_REDIRECT.endswith('?'): sickrage.srConfig.ANON_REDIRECT = '' if not re.match(r'\d+\|[^|]+(?:\|[^|]+)*', sickrage.srConfig.ROOT_DIRS): sickrage.srConfig.ROOT_DIRS = '' sickrage.srConfig.NAMING_FORCE_FOLDERS = check_force_season_folders() if sickrage.srConfig.NZB_METHOD not in ('blackhole', 'sabnzbd', 'nzbget'): sickrage.srConfig.NZB_METHOD = 'blackhole' if not sickrage.srConfig.PROVIDER_ORDER: sickrage.srConfig.PROVIDER_ORDER = self.providersDict[GenericProvider.NZB].keys() + \ self.providersDict[GenericProvider.TORRENT].keys() if sickrage.srConfig.TORRENT_METHOD not in ( 'blackhole', 'utorrent', 'transmission', 'deluge', 'deluged', 'download_station', 'rtorrent', 'qbittorrent', 'mlnet'): sickrage.srConfig.TORRENT_METHOD = 'blackhole' if sickrage.srConfig.PROPER_SEARCHER_INTERVAL not in ('15m', '45m', '90m', '4h', 'daily'): sickrage.srConfig.PROPER_SEARCHER_INTERVAL = 'daily' if sickrage.srConfig.AUTOPOSTPROCESSOR_FREQ < sickrage.srConfig.MIN_AUTOPOSTPROCESSOR_FREQ: sickrage.srConfig.AUTOPOSTPROCESSOR_FREQ = sickrage.srConfig.MIN_AUTOPOSTPROCESSOR_FREQ if sickrage.srConfig.NAMECACHE_FREQ < sickrage.srConfig.MIN_NAMECACHE_FREQ: sickrage.srConfig.NAMECACHE_FREQ = sickrage.srConfig.MIN_NAMECACHE_FREQ if sickrage.srConfig.DAILY_SEARCHER_FREQ < sickrage.srConfig.MIN_DAILY_SEARCHER_FREQ: sickrage.srConfig.DAILY_SEARCHER_FREQ = sickrage.srConfig.MIN_DAILY_SEARCHER_FREQ sickrage.srConfig.MIN_BACKLOG_SEARCHER_FREQ = get_backlog_cycle_time() if sickrage.srConfig.BACKLOG_SEARCHER_FREQ < sickrage.srConfig.MIN_BACKLOG_SEARCHER_FREQ: sickrage.srConfig.BACKLOG_SEARCHER_FREQ = sickrage.srConfig.MIN_BACKLOG_SEARCHER_FREQ if sickrage.srConfig.VERSION_UPDATER_FREQ < sickrage.srConfig.MIN_VERSION_UPDATER_FREQ: sickrage.srConfig.VERSION_UPDATER_FREQ = sickrage.srConfig.MIN_VERSION_UPDATER_FREQ if sickrage.srConfig.SHOWUPDATE_HOUR > 23: sickrage.srConfig.SHOWUPDATE_HOUR = 0 elif sickrage.srConfig.SHOWUPDATE_HOUR < 0: sickrage.srConfig.SHOWUPDATE_HOUR = 0 if sickrage.srConfig.SUBTITLE_SEARCHER_FREQ < sickrage.srConfig.MIN_SUBTITLE_SEARCHER_FREQ: sickrage.srConfig.SUBTITLE_SEARCHER_FREQ = sickrage.srConfig.MIN_SUBTITLE_SEARCHER_FREQ sickrage.srConfig.NEWS_LATEST = sickrage.srConfig.NEWS_LAST_READ if sickrage.srConfig.SUBTITLES_LANGUAGES[0] == '': sickrage.srConfig.SUBTITLES_LANGUAGES = [] sickrage.srConfig.TIME_PRESET = sickrage.srConfig.TIME_PRESET_W_SECONDS.replace(":%S", "") # initialize metadata_providers self.metadataProviderDict = get_metadata_generator_dict() for cur_metadata_tuple in [(sickrage.srConfig.METADATA_KODI, kodi), (sickrage.srConfig.METADATA_KODI_12PLUS, kodi_12plus), (sickrage.srConfig.METADATA_MEDIABROWSER, mediabrowser), (sickrage.srConfig.METADATA_PS3, ps3), (sickrage.srConfig.METADATA_WDTV, wdtv), (sickrage.srConfig.METADATA_TIVO, tivo), (sickrage.srConfig.METADATA_MEDE8ER, mede8er)]: (cur_metadata_config, cur_metadata_class) = cur_metadata_tuple tmp_provider = cur_metadata_class.metadata_class() tmp_provider.set_config(cur_metadata_config) self.metadataProviderDict[tmp_provider.name] = tmp_provider # add version checker job to scheduler self.SCHEDULER.add_job( self.VERSIONUPDATER.run, srIntervalTrigger( **{'hours': sickrage.srConfig.VERSION_UPDATER_FREQ, 'min': sickrage.srConfig.MIN_VERSION_UPDATER_FREQ}), name="VERSIONUPDATER", id="VERSIONUPDATER", replace_existing=True ) # add network timezones updater job to scheduler self.SCHEDULER.add_job( update_network_dict, srIntervalTrigger(**{'days': 1}), name="TZUPDATER", id="TZUPDATER", replace_existing=True ) # add namecache updater job to scheduler self.SCHEDULER.add_job( self.NAMECACHE.run, srIntervalTrigger(**{'minutes': sickrage.srConfig.NAMECACHE_FREQ, 'min': sickrage.srConfig.MIN_NAMECACHE_FREQ}), name="NAMECACHE", id="NAMECACHE", replace_existing=True ) # add show queue job to scheduler self.SCHEDULER.add_job( self.SHOWQUEUE.run, srIntervalTrigger(**{'seconds': 3}), name="SHOWQUEUE", id="SHOWQUEUE", replace_existing=True ) # add search queue job to scheduler self.SCHEDULER.add_job( self.SEARCHQUEUE.run, srIntervalTrigger(**{'seconds': 1}), name="SEARCHQUEUE", id="SEARCHQUEUE", replace_existing=True ) # add show updater job to scheduler self.SCHEDULER.add_job( self.SHOWUPDATER.run, srIntervalTrigger( **{'hours': 1, 'start_date': datetime.now().replace(hour=sickrage.srConfig.SHOWUPDATE_HOUR)}), name="SHOWUPDATER", id="SHOWUPDATER", replace_existing=True ) # add daily search job to scheduler self.SCHEDULER.add_job( self.DAILYSEARCHER.run, srIntervalTrigger( **{'minutes': sickrage.srConfig.DAILY_SEARCHER_FREQ, 'min': sickrage.srConfig.MIN_DAILY_SEARCHER_FREQ}), name="DAILYSEARCHER", id="DAILYSEARCHER", replace_existing=True ) # add backlog search job to scheduler self.SCHEDULER.add_job( self.BACKLOGSEARCHER.run, srIntervalTrigger( **{'minutes': sickrage.srConfig.BACKLOG_SEARCHER_FREQ, 'min': sickrage.srConfig.MIN_BACKLOG_SEARCHER_FREQ}), name="BACKLOG", id="BACKLOG", replace_existing=True ) # add auto-postprocessing job to scheduler job = self.SCHEDULER.add_job( self.AUTOPOSTPROCESSOR.run, srIntervalTrigger(**{'minutes': sickrage.srConfig.AUTOPOSTPROCESSOR_FREQ, 'min': sickrage.srConfig.MIN_AUTOPOSTPROCESSOR_FREQ}), name="POSTPROCESSOR", id="POSTPROCESSOR", replace_existing=True ) (job.pause, job.resume)[sickrage.srConfig.PROCESS_AUTOMATICALLY]() # add find proper job to scheduler job = self.SCHEDULER.add_job( self.PROPERSEARCHER.run, srIntervalTrigger(**{ 'minutes': {'15m': 15, '45m': 45, '90m': 90, '4h': 4 * 60, 'daily': 24 * 60}[ sickrage.srConfig.PROPER_SEARCHER_INTERVAL]}), name="PROPERSEARCHER", id="PROPERSEARCHER", replace_existing=True ) (job.pause, job.resume)[sickrage.srConfig.DOWNLOAD_PROPERS]() # add trakt.tv checker job to scheduler job = self.SCHEDULER.add_job( self.TRAKTSEARCHER.run, srIntervalTrigger(**{'hours': 1}), name="TRAKTSEARCHER", id="TRAKTSEARCHER", replace_existing=True, ) (job.pause, job.resume)[sickrage.srConfig.USE_TRAKT]() # add subtitles finder job to scheduler job = self.SCHEDULER.add_job( self.SUBTITLESEARCHER.run, srIntervalTrigger(**{'hours': sickrage.srConfig.SUBTITLE_SEARCHER_FREQ}), name="SUBTITLESEARCHER", id="SUBTITLESEARCHER", replace_existing=True ) (job.pause, job.resume)[sickrage.srConfig.USE_SUBTITLES]() # add scheduler callback self.SCHEDULER.start()
def update(self): """ Downloads the latest source tarball from github and installs it over the existing version. """ tar_download_url = 'http://github.com/' + sickrage.srConfig.GIT_ORG + '/' + sickrage.srConfig.GIT_REPO + '/tarball/' + self.version try: # prepare the update dir sr_update_dir = os.path.join(sickrage.PROG_DIR, 'sr-update') if os.path.isdir(sr_update_dir): sickrage.srLogger.info("Clearing out update folder " + sr_update_dir + " before extracting") removetree(sr_update_dir) sickrage.srLogger.info("Creating update folder " + sr_update_dir + " before extracting") os.makedirs(sr_update_dir) # retrieve file sickrage.srLogger.info("Downloading update from " + repr(tar_download_url)) tar_download_path = os.path.join(sr_update_dir, 'sr-update.tar') download_file(tar_download_url, tar_download_path) if not os.path.isfile(tar_download_path): sickrage.srLogger.warning( "Unable to retrieve new version from " + tar_download_url + ", can't update") return False if not tarfile.is_tarfile(tar_download_path): sickrage.srLogger.error("Retrieved version from " + tar_download_url + " is corrupt, can't update") return False # extract to sr-update dir sickrage.srLogger.info("Extracting file " + tar_download_path) tar = tarfile.open(tar_download_path) tar.extractall(sr_update_dir) tar.close() # delete .tar.gz sickrage.srLogger.info("Deleting file " + tar_download_path) os.remove(tar_download_path) # find update dir name update_dir_contents = [ x for x in os.listdir(sr_update_dir) if os.path.isdir(os.path.join(sr_update_dir, x)) ] if len(update_dir_contents) != 1: sickrage.srLogger.error( "Invalid update data, update failed: " + str(update_dir_contents)) return False content_dir = os.path.join(sr_update_dir, update_dir_contents[0]) # walk temp folder and move files to main folder sickrage.srLogger.info("Moving files from " + content_dir + " to " + sickrage.PROG_DIR) for dirname, _, filenames in os.walk( content_dir): # @UnusedVariable dirname = dirname[len(content_dir) + 1:] for curfile in filenames: old_path = os.path.join(content_dir, dirname, curfile) new_path = os.path.join(sickrage.PROG_DIR, dirname, curfile) # Avoid DLL access problem on WIN32/64 # These files needing to be updated manually # or find a way to kill the access from memory if curfile in ('unrar.dll', 'unrar64.dll'): try: os.chmod(new_path, stat.S_IWRITE) os.remove(new_path) os.renames(old_path, new_path) except Exception as e: sickrage.srLogger.debug("Unable to update " + new_path + ': ' + e.message) os.remove( old_path ) # Trash the updated file without moving in new path continue if os.path.isfile(new_path): os.remove(new_path) os.renames(old_path, new_path) except Exception as e: sickrage.srLogger.error("Error while trying to update: {}".format( e.message)) sickrage.srLogger.debug("Traceback: " + traceback.format_exc()) return False # Notify update successful sickrage.srCore.NOTIFIERS.notify_git_update( sickrage.srCore.NEWEST_VERSION_STRING) return True
def tearDown_test_show_dir(): if os.path.exists(SHOWDIR): removetree(SHOWDIR)
def tearDown_test_episode_file(): if os.path.exists(FILEDIR): removetree(FILEDIR)
def update(self): """ Downloads the latest source tarball from github and installs it over the existing version. """ tar_download_url = 'http://github.com/' + sickrage.srConfig.GIT_ORG + '/' + sickrage.srConfig.GIT_REPO + '/tarball/' + self.version try: # prepare the update dir sr_update_dir = os.path.join(sickrage.PROG_DIR, 'sr-update') if os.path.isdir(sr_update_dir): sickrage.srLogger.info("Clearing out update folder " + sr_update_dir + " before extracting") removetree(sr_update_dir) sickrage.srLogger.info("Creating update folder " + sr_update_dir + " before extracting") os.makedirs(sr_update_dir) # retrieve file sickrage.srLogger.info("Downloading update from " + repr(tar_download_url)) tar_download_path = os.path.join(sr_update_dir, 'sr-update.tar') download_file(tar_download_url, tar_download_path) if not os.path.isfile(tar_download_path): sickrage.srLogger.warning( "Unable to retrieve new version from " + tar_download_url + ", can't update") return False if not tarfile.is_tarfile(tar_download_path): sickrage.srLogger.error("Retrieved version from " + tar_download_url + " is corrupt, can't update") return False # extract to sr-update dir sickrage.srLogger.info("Extracting file " + tar_download_path) tar = tarfile.open(tar_download_path) tar.extractall(sr_update_dir) tar.close() # delete .tar.gz sickrage.srLogger.info("Deleting file " + tar_download_path) os.remove(tar_download_path) # find update dir name update_dir_contents = [x for x in os.listdir(sr_update_dir) if os.path.isdir(os.path.join(sr_update_dir, x))] if len(update_dir_contents) != 1: sickrage.srLogger.error("Invalid update data, update failed: " + str(update_dir_contents)) return False content_dir = os.path.join(sr_update_dir, update_dir_contents[0]) # walk temp folder and move files to main folder sickrage.srLogger.info("Moving files from " + content_dir + " to " + sickrage.PROG_DIR) for dirname, _, filenames in os.walk(content_dir): # @UnusedVariable dirname = dirname[len(content_dir) + 1:] for curfile in filenames: old_path = os.path.join(content_dir, dirname, curfile) new_path = os.path.join(sickrage.PROG_DIR, dirname, curfile) # Avoid DLL access problem on WIN32/64 # These files needing to be updated manually # or find a way to kill the access from memory if curfile in ('unrar.dll', 'unrar64.dll'): try: os.chmod(new_path, stat.S_IWRITE) os.remove(new_path) os.renames(old_path, new_path) except Exception as e: sickrage.srLogger.debug("Unable to update " + new_path + ': ' + e.message) os.remove(old_path) # Trash the updated file without moving in new path continue if os.path.isfile(new_path): os.remove(new_path) os.renames(old_path, new_path) except Exception as e: sickrage.srLogger.error("Error while trying to update: {}".format(e.message)) sickrage.srLogger.debug("Traceback: " + traceback.format_exc()) return False # Notify update successful sickrage.srCore.NOTIFIERS.notify_git_update(sickrage.srCore.NEWEST_VERSION_STRING) return True