def index(self, limit=None): if limit is None: if sickbeard.HISTORY_LIMIT: limit = int(sickbeard.HISTORY_LIMIT) else: limit = 100 else: limit = try_int(limit, 100) sickbeard.HISTORY_LIMIT = limit sickbeard.save_config() history = self.history.get(limit) t = PageTemplate(rh=self, filename='history.mako') submenu = [ {'title': 'Clear History', 'path': 'history/clearHistory', 'icon': 'ui-icon ui-icon-trash', 'class': 'clearhistory', 'confirm': True}, {'title': 'Trim History', 'path': 'history/trimHistory', 'icon': 'menu-icon-cut', 'class': 'trimhistory', 'confirm': True}, ] return t.render(historyResults=history.detailed, compactResults=history.compact, limit=limit, submenu=submenu, title='History', header='History', topmenu='history', controller='history', action='index')
def migrate_config(self): """ Calls each successive migration until the config is the same version as SG expects """ if self.config_version > self.expected_config_version: logger.log_error_and_exit( u'Your config version (%s) has been incremented past what this version of SickGear supports (%s).\n' 'If you have used other forks or a newer version of SickGear, your config file may be unusable due to ' 'their modifications.' % (self.config_version, self.expected_config_version)) sickbeard.CONFIG_VERSION = self.config_version while self.config_version < self.expected_config_version: next_version = self.config_version + 1 if next_version in self.migration_names: migration_name = ': %s' % self.migration_names[next_version] else: migration_name = '' logger.log(u'Backing up config before upgrade') if not helpers.backupVersionedFile(sickbeard.CONFIG_FILE, self.config_version): logger.log_error_and_exit(u'Config backup failed, abort upgrading config') else: logger.log(u'Proceeding with upgrade') # do the migration, expect a method named _migrate_v<num> logger.log(u'Migrating config up to version %s %s' % (next_version, migration_name)) getattr(self, '_migrate_v%s' % next_version)() self.config_version = next_version # save new config after migration sickbeard.CONFIG_VERSION = self.config_version logger.log(u'Saving config file to disk') sickbeard.save_config()
def saveAnime(self, use_anidb=None, anidb_username=None, anidb_password=None, anidb_use_mylist=None, split_home=None): """ Save anime related settings """ results = [] sickbeard.USE_ANIDB = config.checkbox_to_value(use_anidb) sickbeard.ANIDB_USERNAME = anidb_username sickbeard.ANIDB_PASSWORD = anidb_password sickbeard.ANIDB_USE_MYLIST = config.checkbox_to_value(anidb_use_mylist) sickbeard.ANIME_SPLIT_HOME = config.checkbox_to_value(split_home) sickbeard.save_config() if results: for x in results: logger.log(x, logger.ERROR) ui.notifications.error('Error(s) Saving Configuration', '<br>\n'.join(results)) else: ui.notifications.message('Configuration Saved', ek(os.path.join, sickbeard.CONFIG_FILE)) return self.redirect('/config/anime/')
def migrate_config(self): """ Calls each successive migration until the config is the same version as SB expects """ if self.config_version > self.expected_config_version: logger.log_error_and_exit(u"Your config version (" + str(self.config_version) + ") has been incremented past what this version of Sick Beard supports (" + str(self.expected_config_version) + ").\n" + \ "If you have used other forks or a newer version of Sick Beard, your config file may be unusable due to their modifications.") sickbeard.CONFIG_VERSION = self.config_version while self.config_version < self.expected_config_version: next_version = self.config_version + 1 if next_version in self.migration_names: migration_name = ': ' + self.migration_names[next_version] else: migration_name = '' logger.log(u"Backing up config before upgrade") if not helpers.backupVersionedFile(sickbeard.CONFIG_FILE, self.config_version): logger.log_error_and_exit(u"Config backup failed, abort upgrading config") else: logger.log(u"Proceeding with upgrade") # do the migration, expect a method named _migrate_v<num> logger.log(u"Migrating config up to version " + str(next_version) + migration_name) getattr(self, '_migrate_v' + str(next_version))() self.config_version = next_version # save new config after migration sickbeard.CONFIG_VERSION = self.config_version logger.log(u"Saving config file to disk") sickbeard.save_config()
def trakt_request(self, path, data=None, headers=None, url=None, method='GET', count=0): if None is sickbeard.TRAKT_TOKEN: logger.log(u'You must get a Trakt token. Check your Trakt settings', logger.WARNING) return {} headers = headers or self.headers url = url or self.api_url count += 1 headers['Authorization'] = 'Bearer ' + sickbeard.TRAKT_TOKEN try: resp = self.session.request(method, url + path, headers=headers, timeout=self.timeout, data=json.dumps(data) if data else [], verify=self.verify) # check for http errors and raise if any are present resp.raise_for_status() # convert response to json resp = resp.json() except requests.RequestException as e: code = getattr(e.response, 'status_code', None) if not code: if 'timed out' in e: logger.log(u'Timeout connecting to Trakt. Try to increase timeout value in Trakt settings', logger.WARNING) # This is pretty much a fatal error if there is no status_code # It means there basically was no response at all else: logger.log(u'Could not connect to Trakt. Error: {0}'.format(e), logger.WARNING) elif 502 == code: # Retry the request, Cloudflare had a proxying issue logger.log(u'Retrying trakt api request: %s' % path, logger.WARNING) return self.trakt_request(path, data, headers, url, method, count=count) elif 401 == code: if self.trakt_token(refresh=True, count=count): sickbeard.save_config() return self.trakt_request(path, data, headers, url, method, count=count) else: logger.log(u'Unauthorized. Please check your Trakt settings', logger.WARNING) raise traktAuthException() elif code in (500, 501, 503, 504, 520, 521, 522): # http://docs.trakt.apiary.io/#introduction/status-codes logger.log(u'Trakt may have some issues and it\'s unavailable. Try again later please', logger.WARNING) elif 404 == code: logger.log(u'Trakt error (404) the resource does not exist: %s' % url + path, logger.WARNING) else: logger.log(u'Could not connect to Trakt. Code error: {0}'.format(code), logger.ERROR) return {} # check and confirm Trakt call did not fail if isinstance(resp, dict) and 'failure' == resp.get('status', None): if 'message' in resp: raise traktException(resp['message']) if 'error' in resp: raise traktException(resp['error']) else: raise traktException('Unknown Error') return resp
def execute(self): backupDatabase(12) # update the default quality so we dont grab the wrong qualities after migration -- should have really been a config migration sickbeard.QUALITY_DEFAULT = self._update_composite_qualities(sickbeard.QUALITY_DEFAULT) sickbeard.save_config() # upgrade previous HD to HD720p -- shift previous qualities to new placevalues old_hd = common.Quality.combineQualities([common.Quality.HDTV, common.Quality.HDWEBDL >> 2, common.Quality.HDBLURAY >> 3], []) new_hd = common.Quality.combineQualities([common.Quality.HDTV, common.Quality.HDWEBDL, common.Quality.HDBLURAY], []) # update ANY -- shift existing qualities and add new 1080p qualities, note that rawHD was not added to the ANY template old_any = common.Quality.combineQualities([common.Quality.SDTV, common.Quality.SDDVD, common.Quality.HDTV, common.Quality.HDWEBDL >> 2, common.Quality.HDBLURAY >> 3, common.Quality.UNKNOWN], []) new_any = common.Quality.combineQualities([common.Quality.SDTV, common.Quality.SDDVD, common.Quality.HDTV, common.Quality.FULLHDTV, common.Quality.HDWEBDL, common.Quality.FULLHDWEBDL, common.Quality.HDBLURAY, common.Quality.FULLHDBLURAY, common.Quality.UNKNOWN], []) # update qualities (including templates) logger.log(u"[1/4] Updating pre-defined templates and the quality for each show...", logger.MESSAGE) ql = [] shows = self.connection.select("SELECT * FROM tv_shows") for cur_show in shows: if cur_show["quality"] == old_hd: new_quality = new_hd elif cur_show["quality"] == old_any: new_quality = new_any else: new_quality = self._update_composite_qualities(cur_show["quality"]) ql.append(["UPDATE tv_shows SET quality = ? WHERE show_id = ?", [new_quality, cur_show["show_id"]]]) self.connection.mass_action(ql) # update status that are are within the old hdwebdl (1<<3 which is 8) and better -- exclude unknown (1<<15 which is 32768) logger.log(u"[2/4] Updating the status for the episodes within each show...", logger.MESSAGE) ql = [] episodes = self.connection.select("SELECT * FROM tv_episodes WHERE status < 3276800 AND status >= 800") for cur_episode in episodes: ql.append(["UPDATE tv_episodes SET status = ? WHERE episode_id = ?", [self._update_status(cur_episode["status"]), cur_episode["episode_id"]]]) self.connection.mass_action(ql) # make two separate passes through the history since snatched and downloaded (action & quality) may not always coordinate together # update previous history so it shows the correct action logger.log(u"[3/4] Updating history to reflect the correct action...", logger.MESSAGE) ql = [] historyAction = self.connection.select("SELECT * FROM history WHERE action < 3276800 AND action >= 800") for cur_entry in historyAction: ql.append(["UPDATE history SET action = ? WHERE showid = ? AND date = ?", [self._update_status(cur_entry["action"]), cur_entry["showid"], cur_entry["date"]]]) self.connection.mass_action(ql) # update previous history so it shows the correct quality logger.log(u"[4/4] Updating history to reflect the correct quality...", logger.MESSAGE) ql = [] historyQuality = self.connection.select("SELECT * FROM history WHERE quality < 32768 AND quality >= 8") for cur_entry in historyQuality: ql.append(["UPDATE history SET quality = ? WHERE showid = ? AND date = ?", [self._update_quality(cur_entry["quality"]), cur_entry["showid"], cur_entry["date"]]]) self.connection.mass_action(ql) self.incDBVersion() # cleanup and reduce db if any previous data was removed logger.log(u"Performing a vacuum on the database.", logger.DEBUG) self.connection.action("VACUUM")
def execute(self): use_torrents = False use_nzbs = False use_vods = False for cur_provider in sickbeard.providers.sortedProviderList(): if cur_provider.isEnabled(): if cur_provider.providerType == GenericProvider.NZB: use_nzbs = True logger.log(u"Provider " + cur_provider.name + " is enabled, enabling NZBs in the upgrade") break elif cur_provider.providerType == GenericProvider.TORRENT: use_torrents = True logger.log(u"Provider " + cur_provider.name + " is enabled, enabling Torrents in the upgrade") break elif cur_provider.providerType == GenericProvider.VOD: use_vods = True logger.log(u"Provider "+cur_provider.name+" is enabled, enabling VideoOnDemand in the upgrade") break sickbeard.USE_TORRENTS = use_torrents sickbeard.USE_NZBS = use_nzbs sickbeard.USE_VODS = use_vods sickbeard.save_config() self.incDBVersion()
def migrate_config(self): """ Calls each successive migration until the config is the same version as SB expects """ sickbeard.CONFIG_VERSION = self.config_version while self.config_version < self.expected_config_version: next_version = self.config_version + 1 if next_version in self.migration_names: migration_name = ': ' + self.migration_names[next_version] else: migration_name = '' helpers.backupVersionedFile(sickbeard.CONFIG_FILE, self.config_version) # do the migration, expect a method named _migrate_v<num> logger.log(u"Migrating config up to version " + str(next_version) + migration_name) getattr(self, '_migrate_v' + str(next_version))() self.config_version = next_version # save new config after migration sickbeard.CONFIG_VERSION = self.config_version logger.log(u"Saving config file to disk") sickbeard.save_config()
def saveIRC(self, irc_bot=None, irc_server=None, irc_channel=None, irc_key=None, irc_nick=None): results = [] if irc_bot == "on": irc_bot = 1 else: irc_bot = 0 config.change_IRC_BOT(irc_bot) config.change_IRC_SERVER(irc_server) config.change_IRC_CHANNEL(irc_channel, irc_key) config.change_IRC_NICK(irc_nick) sickbeard.save_config() if len(results) > 0: for x in results: logger.log(x, logger.ERROR) flash['error'] = 'Error(s) Saving Configuration' flash['error-detail'] = "<br />\n".join(results) else: flash['message'] = 'Configuration Saved' raise cherrypy.HTTPRedirect("index")
def execute(self): dir_results = self.connection.select("SELECT location FROM tv_shows") dir_counts = {} for cur_dir in dir_results: cur_root_dir = ek.ek(os.path.dirname, ek.ek(os.path.normpath, cur_dir["location"])) if cur_root_dir not in dir_counts: dir_counts[cur_root_dir] = 1 else: dir_counts[cur_root_dir] += 1 logger.log(u"Dir counts: "+str(dir_counts), logger.DEBUG) if not dir_counts: self.incDBVersion() return default_root_dir = dir_counts.values().index(max(dir_counts.values())) new_root_dirs = str(default_root_dir)+'|'+'|'.join(dir_counts.keys()) logger.log(u"Setting ROOT_DIRS to: "+new_root_dirs, logger.DEBUG) sickbeard.ROOT_DIRS = new_root_dirs sickbeard.save_config() self.incDBVersion()
def saveSubtitles(self, use_subtitles=None, subtitles_plugins=None, subtitles_languages=None, subtitles_dir=None, subtitles_perfect_match=None, service_order=None, subtitles_history=None, subtitles_finder_frequency=None, subtitles_multi=None, embedded_subtitles_all=None, subtitles_extra_scripts=None, subtitles_pre_scripts=None, subtitles_hearing_impaired=None, addic7ed_user=None, addic7ed_pass=None, itasa_user=None, itasa_pass=None, legendastv_user=None, legendastv_pass=None, opensubtitles_user=None, opensubtitles_pass=None, subtitles_download_in_pp=None, subtitles_keep_only_wanted=None): """ Save Subtitle Search related settings """ results = [] config.change_SUBTITLES_FINDER_FREQUENCY(subtitles_finder_frequency) config.change_USE_SUBTITLES(use_subtitles) sickbeard.SUBTITLES_LANGUAGES = [code.strip() for code in subtitles_languages.split(',') if code.strip() in subtitles.subtitle_code_filter()] if subtitles_languages else [] sickbeard.SUBTITLES_DIR = subtitles_dir sickbeard.SUBTITLES_PERFECT_MATCH = config.checkbox_to_value(subtitles_perfect_match) sickbeard.SUBTITLES_HISTORY = config.checkbox_to_value(subtitles_history) sickbeard.EMBEDDED_SUBTITLES_ALL = config.checkbox_to_value(embedded_subtitles_all) sickbeard.SUBTITLES_HEARING_IMPAIRED = config.checkbox_to_value(subtitles_hearing_impaired) sickbeard.SUBTITLES_MULTI = 1 if len(sickbeard.SUBTITLES_LANGUAGES) > 1 else config.checkbox_to_value(subtitles_multi) sickbeard.SUBTITLES_DOWNLOAD_IN_PP = config.checkbox_to_value(subtitles_download_in_pp) sickbeard.SUBTITLES_KEEP_ONLY_WANTED = config.checkbox_to_value(subtitles_keep_only_wanted) sickbeard.SUBTITLES_EXTRA_SCRIPTS = [x.strip() for x in subtitles_extra_scripts.split('|') if x.strip()] sickbeard.SUBTITLES_PRE_SCRIPTS = [x.strip() for x in subtitles_pre_scripts.split('|') if x.strip()] # Subtitles services services_str_list = service_order.split() subtitles_services_list = [] subtitles_services_enabled = [] for curServiceStr in services_str_list: cur_service, cur_enabled = curServiceStr.split(':') subtitles_services_list.append(cur_service) subtitles_services_enabled.append(int(cur_enabled)) sickbeard.SUBTITLES_SERVICES_LIST = subtitles_services_list sickbeard.SUBTITLES_SERVICES_ENABLED = subtitles_services_enabled sickbeard.ADDIC7ED_USER = addic7ed_user or '' sickbeard.ADDIC7ED_PASS = addic7ed_pass or '' sickbeard.ITASA_USER = itasa_user or '' sickbeard.ITASA_PASS = itasa_pass or '' sickbeard.LEGENDASTV_USER = legendastv_user or '' sickbeard.LEGENDASTV_PASS = legendastv_pass or '' sickbeard.OPENSUBTITLES_USER = opensubtitles_user or '' sickbeard.OPENSUBTITLES_PASS = opensubtitles_pass or '' sickbeard.save_config() # Reset provider pool so next time we use the newest settings subtitles.get_provider_pool.invalidate() if results: for x in results: logger.log(x, logger.ERROR) ui.notifications.error('Error(s) Saving Configuration', '<br>\n'.join(results)) else: ui.notifications.message('Configuration Saved', ek(os.path.join, sickbeard.CONFIG_FILE)) return self.redirect('/config/subtitles/')
def _bypassCaptcha(self): sitekey = None sickbeard.TORRENTDAY_UID = None sickbeard.TORRENTDAY_PASS = None sickbeard.save_config() from lib.python_anticaptcha import AnticaptchaClient, NoCaptchaTaskProxylessTask, AnticaptchaException client = AnticaptchaClient(sickbeard.TORRENTDAY_ANTICAPTCHA_KEY) logger.log( "[{0}] {1} Anti-Captcha.com Balance: {2}".format( self.name, self.funcName(), client.getBalance() ) ) try: ret = self.session.get(self.url + '/login.php', verify=False) sitekey = re.search('data-sitekey="(.+?)"', ret.content).group(1) except AttributeError: logger.log( "[{0}] {1} Can't extract sitekey from {2}/login.php.".format( self.name, self.funcName(), self.url ), logger.ERROR ) return False if sitekey is None: return False logger.log( "[{0}] {1} Requesting Anti-Captcha.com Job.".format( self.name, self.funcName() ) ) try: task = NoCaptchaTaskProxylessTask(self.url + "/login.php", sitekey) job = client.createTask(task) job.join() except AnticaptchaException, e: logger.log( "[{0}] {1} Error Attempting anti-captcha.com job: {2}".format( self.name, self.funcName(), e ) ) return False
def delete_account(account): if account in sickbeard.TRAKT_ACCOUNTS: try: TraktAPI().trakt_request('/oauth/revoke', send_oauth=account, method='POST') except TraktException: logger.log('Failed to remove account from trakt.tv') sickbeard.TRAKT_ACCOUNTS.pop(account) sickbeard.save_config() return True return False
def _check_auth(self, **kwargs): try: secret_key = 'secret_key=' + re.split('secret_key\s*=\s*([0-9a-zA-Z]+)', self.api_key)[1] except (StandardError, Exception): raise sickbeard.exceptions.AuthException('Invalid secret key for %s in Media Providers/Options' % self.name) if secret_key != self.api_key: self.api_key = secret_key sickbeard.save_config() return True
def replace_account(account, token, refresh_token, token_valid_date, refresh): if account in sickbeard.TRAKT_ACCOUNTS: sickbeard.TRAKT_ACCOUNTS[account].token = token sickbeard.TRAKT_ACCOUNTS[account].refresh_token = refresh_token sickbeard.TRAKT_ACCOUNTS[account].token_valid_date = token_valid_date if not refresh: sickbeard.TRAKT_ACCOUNTS[account].reset_name() sickbeard.TRAKT_ACCOUNTS[account].reset_auth_failure() sickbeard.save_config() return True else: return False
def auth_failure(self): if self.auth_fail < self.max_auth_fail: if self.last_fail: time_diff = datetime.datetime.now() - self.last_fail if self.auth_fail % 3 == 0: if time_diff > datetime.timedelta(days=1): self.inc_auth_failure() sickbeard.save_config() elif time_diff > datetime.timedelta(minutes=15): self.inc_auth_failure() if self.auth_fail == self.max_auth_fail or time_diff > datetime.timedelta(hours=6): sickbeard.save_config() else: self.inc_auth_failure()
def saveGeneral(self, log_dir=None, web_port=None, web_log=None, web_ipv6=None, launch_browser=None, web_username=None, web_password=None, version_notify=None): results = [] if web_ipv6 == "on": web_ipv6 = 1 else: web_ipv6 = 0 if web_log == "on": web_log = 1 else: web_log = 0 if launch_browser == "on": launch_browser = 1 else: launch_browser = 0 if version_notify == "on": version_notify = 1 else: version_notify = 0 if not config.change_LOG_DIR(log_dir): results += ["Unable to create directory " + os.path.normpath(log_dir) + ", log dir not changed."] sickbeard.LAUNCH_BROWSER = launch_browser sickbeard.WEB_PORT = int(web_port) sickbeard.WEB_IPV6 = web_ipv6 sickbeard.WEB_LOG = web_log sickbeard.WEB_USERNAME = web_username sickbeard.WEB_PASSWORD = web_password config.change_VERSION_NOTIFY(version_notify) sickbeard.save_config() if len(results) > 0: for x in results: logger.log(x, logger.ERROR) ui.notifications.error('Error(s) Saving Configuration', '<br />\n'.join(results)) else: ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickbeard.CONFIG_FILE) ) redirect("/config/general/")
def saveNotifications(self, xbmc_notify_onsnatch=None, xbmc_notify_ondownload=None, xbmc_update_library=None, xbmc_host=None, xbmc_username=None, xbmc_password=None, use_growl=None, growl_host=None, growl_password=None, ): results = [] if xbmc_notify_onsnatch == "on": xbmc_notify_onsnatch = 1 else: xbmc_notify_onsnatch = 0 if xbmc_notify_ondownload == "on": xbmc_notify_ondownload = 1 else: xbmc_notify_ondownload = 0 if xbmc_update_library == "on": xbmc_update_library = 1 else: xbmc_update_library = 0 if use_growl == "on": use_growl = 1 else: use_growl = 0 sickbeard.XBMC_NOTIFY_ONSNATCH = xbmc_notify_onsnatch sickbeard.XBMC_NOTIFY_ONDOWNLOAD = xbmc_notify_ondownload sickbeard.XBMC_UPDATE_LIBRARY = xbmc_update_library sickbeard.XBMC_HOST = xbmc_host sickbeard.XBMC_USERNAME = xbmc_username sickbeard.XBMC_PASSWORD = xbmc_password sickbeard.USE_GROWL = use_growl sickbeard.GROWL_HOST = growl_host sickbeard.GROWL_PASSWORD = growl_password sickbeard.save_config() if len(results) > 0: for x in results: logger.log(x, logger.ERROR) flash.error('Error(s) Saving Configuration', '<br />\n'.join(results)) else: flash.message('Configuration Saved') redirect("/config/notifications/")
def execute(self): backupDatabase(self.checkDBVersion()) # update the default quality so we dont grab the wrong qualities after migration sickbeard.QUALITY_DEFAULT = self._update_composite_qualities(sickbeard.QUALITY_DEFAULT) sickbeard.save_config() # upgrade previous HD to HD720p -- shift previous qualities to new placevalues old_hd = common.Quality.combineQualities([common.Quality.HDTV, common.Quality.HDWEBDL >> 2, common.Quality.HDBLURAY >> 3], []) new_hd = common.Quality.combineQualities([common.Quality.HDTV, common.Quality.HDWEBDL, common.Quality.HDBLURAY], []) # update ANY -- shift existing qualities and add new 1080p qualities, note that rawHD was not added to the ANY template old_any = common.Quality.combineQualities([common.Quality.SDTV, common.Quality.SDDVD, common.Quality.HDTV, common.Quality.HDWEBDL >> 2, common.Quality.HDBLURAY >> 3, common.Quality.UNKNOWN], []) new_any = common.Quality.combineQualities([common.Quality.SDTV, common.Quality.SDDVD, common.Quality.HDTV, common.Quality.FULLHDTV, common.Quality.HDWEBDL, common.Quality.FULLHDWEBDL, common.Quality.HDBLURAY, common.Quality.FULLHDBLURAY, common.Quality.UNKNOWN], []) # update qualities (including templates) shows = self.connection.select("SELECT * FROM tv_shows") for cur_show in shows: if cur_show["quality"] == old_hd: new_quality = new_hd elif cur_show["quality"] == old_any: new_quality = new_any else: new_quality = self._update_composite_qualities(cur_show["quality"]) self.connection.action("UPDATE tv_shows SET quality = ? WHERE tvdb_id = ?", [new_quality, cur_show["tvdb_id"]]) # update status that are are within the old hdwebdl (1<<3 which is 8) and better -- exclude unknown (1<<15 which is 32768) episodes = self.connection.select("SELECT * FROM tv_episodes WHERE status/100 < 32768 AND status/100 >= 8") for cur_episode in episodes: self.connection.action("UPDATE tv_episodes SET status = ? WHERE episode_id = ?", [self._update_status(cur_episode["status"]), cur_episode["episode_id"]]) # make two seperate passes through the history since snatched and downloaded (action & quality) may not always coordinate together # update previous history so it shows the correct action historyAction = self.connection.select("SELECT * FROM history WHERE action/100 < 32768 AND action/100 >= 8") for cur_entry in historyAction: self.connection.action("UPDATE history SET action = ? WHERE showid = ? AND date = ?", [self._update_status(cur_entry["action"]), cur_entry["showid"], cur_entry["date"]]) # update previous history so it shows the correct quality historyQuality = self.connection.select("SELECT * FROM history WHERE quality < 32768 AND quality >= 8") for cur_entry in historyQuality: self.connection.action("UPDATE history SET quality = ? WHERE showid = ? AND date = ?", [self._update_quality(cur_entry["quality"]), cur_entry["showid"], cur_entry["date"]]) self.incDBVersion()
def saveAddShowDefaults(defaultStatus, anyQualities, bestQualities, defaultFlattenFolders, subtitles=False, anime=False, scene=False, defaultStatusAfter=WANTED): allowed_qualities = anyQualities.split(',') if anyQualities else [] preferred_qualities = bestQualities.split(',') if bestQualities else [] new_quality = Quality.combineQualities([int(quality) for quality in allowed_qualities], [int(quality) for quality in preferred_qualities]) sickbeard.STATUS_DEFAULT = int(defaultStatus) sickbeard.STATUS_DEFAULT_AFTER = int(defaultStatusAfter) sickbeard.QUALITY_DEFAULT = int(new_quality) sickbeard.FLATTEN_FOLDERS_DEFAULT = config.checkbox_to_value(defaultFlattenFolders) sickbeard.SUBTITLES_DEFAULT = config.checkbox_to_value(subtitles) sickbeard.ANIME_DEFAULT = config.checkbox_to_value(anime) sickbeard.SCENE_DEFAULT = config.checkbox_to_value(scene) sickbeard.save_config()
def _authorised(self, **kwargs): result = False if self.digest: digest = [x[::-1] for x in self.digest[::-1].rpartition('=')] self.digest = digest[2] + digest[1] + quote(unquote(digest[0])) params = dict( logged_in=(lambda y='': all( [self.session.cookies.get_dict(domain='.speed.cd') and self.session.cookies.clear('.speed.cd') is None or True] + ['RSS' in y, 'type="password"' not in y, self.has_all_cookies(['speedian'], 'inSpeed_')] + [(self.session.cookies.get('inSpeed_' + c) or 'sg!no!pw') in self.digest for c in ['speedian']])), failed_msg=(lambda y=None: None), post_params={'login': False}) result = super(SpeedCDProvider, self)._authorised(**params) if not result and not self.failure_count: if self.digest: self.get_url('%slogout.php' % self.url_base, skip_auth=True, post_data={'submit.x': 24, 'submit.y': 11}) self.digest = '' params = dict( logged_in=(lambda y='': all( [self.session.cookies.get_dict(domain='.speed.cd') and self.session.cookies.clear('.speed.cd') is None or True] + [bool(y), not re.search('(?i)type="password"', y)] + [re.search('(?i)Logout', y) or not self.digest or (self.session.cookies.get('inSpeed_speedian') or 'sg!no!pw') in self.digest])), failed_msg=(lambda y='': ( re.search(r'(?i)(username|password)((<[^>]+>)|\W)*' + '(or|and|/|\s)((<[^>]+>)|\W)*(password|incorrect)', y) and u'Invalid username or password for %s. Check settings' or u'Failed to authenticate or parse a response from %s, abort provider')), post_params={'form_tmpl': True}) self.urls['login_action'] = self.urls.get('do_login') session = super(SpeedCDProvider, self)._authorised(session=None, resp_sess=True, **params) self.urls['login_action'] = None if session: self.digest = 'inSpeed_speedian=%s' % session.cookies.get('inSpeed_speedian') sickbeard.save_config() result = True logger.log('Cookie details for %s updated.' % self.name, logger.DEBUG) elif not self.failure_count: logger.log('Invalid cookie details for %s and login failed. Check settings' % self.name, logger.ERROR) return result
def migrate_config(self): """ Calls each successive migration until the config is the same version as SB expects """ if self.config_version > self.expected_config_version: logger.log_error_and_exit(u"Your config version (" + str( self.config_version) + ") has been incremented past what this version of SickRage supports (" + str( self.expected_config_version) + ").\n" + \ "If you have used other forks or a newer version of SickRage, your config file may be unusable due to their modifications.") sickbeard.CONFIG_VERSION = self.config_version while self.config_version < self.expected_config_version: next_version = self.config_version + 1 if next_version in self.migration_names: migration_name = ': ' + self.migration_names[next_version] else: migration_name = '' logger.log(u"Backing up config before upgrade") if not helpers.backupVersionedFile(sickbeard.CONFIG_FILE, self.config_version): logger.log_error_and_exit(u"Config backup failed, abort upgrading config") else: logger.log(u"Proceeding with upgrade") # do the migration, expect a method named _migrate_v<num> logger.log(u"Migrating config up to version " + str(next_version) + migration_name) getattr(self, '_migrate_v' + str(next_version))() self.config_version = next_version # save new config after migration sickbeard.CONFIG_VERSION = self.config_version logger.log(u"Saving config file to disk") sickbeard.save_config()
def execute(self): use_torrents = False use_nzbs = False for cur_provider in sickbeard.providers.sortedProviderList(): if cur_provider.isEnabled(): if cur_provider.providerType == GenericProvider.NZB: use_nzbs = True logger.log(u"Provider " + cur_provider.name + " is enabled, enabling NZBs in the upgrade") break elif cur_provider.providerType == GenericProvider.TORRENT: use_torrents = True logger.log(u"Provider " + cur_provider.name + " is enabled, enabling Torrents in the upgrade") break sickbeard.USE_TORRENTS = use_torrents sickbeard.USE_NZBS = use_nzbs sickbeard.save_config() self.incDBVersion()
def saveGeneral(self, log_dir=None, web_port=None, web_log=None, launch_browser=None, create_metadata=None, web_username=None, web_password=None, quality_default=None, season_folders_default=None, version_notify=None, naming_show_name=None, naming_ep_type=None, naming_multi_ep_type=None): results = [] if web_log == "on": web_log = 1 else: web_log = 0 if launch_browser == "on": launch_browser = 1 else: launch_browser = 0 if create_metadata == "on": create_metadata = 1 else: create_metadata = 0 if season_folders_default == "on": season_folders_default = 1 else: season_folders_default = 0 if version_notify == "on": version_notify = 1 else: version_notify = 0 if naming_show_name == "on": naming_show_name = 1 else: naming_show_name = 0 if not config.change_LOG_DIR(log_dir): results += ["Unable to create directory " + os.path.normpath(log_dir) + ", log dir not changed."] sickbeard.LAUNCH_BROWSER = launch_browser sickbeard.CREATE_METADATA = create_metadata sickbeard.SEASON_FOLDERS_DEFAULT = int(season_folders_default) sickbeard.QUALITY_DEFAULT = int(quality_default) sickbeard.NAMING_SHOW_NAME = naming_show_name sickbeard.NAMING_EP_TYPE = int(naming_ep_type) sickbeard.NAMING_MULTI_EP_TYPE = int(naming_multi_ep_type) sickbeard.WEB_PORT = int(web_port) sickbeard.WEB_LOG = web_log sickbeard.WEB_USERNAME = web_username sickbeard.WEB_PASSWORD = web_password config.change_VERSION_NOTIFY(version_notify) sickbeard.save_config() if len(results) > 0: for x in results: logger.log(x, logger.ERROR) flash.error('Error(s) Saving Configuration', '<br />\n'.join(results)) else: flash.message('Configuration Saved') redirect("/config/general/")
def saveGeneral(self, log_dir=None, log_nr=5, log_size=1, web_port=None, notify_on_login=None, web_log=None, encryption_version=None, web_ipv6=None, trash_remove_show=None, trash_rotate_logs=None, update_frequency=None, skip_removed_files=None, indexerDefaultLang='en', ep_default_deleted_status=None, launch_browser=None, showupdate_hour=3, web_username=None, api_key=None, indexer_default=None, timezone_display=None, cpu_preset='NORMAL', web_password=None, version_notify=None, enable_https=None, https_cert=None, https_key=None, handle_reverse_proxy=None, sort_article=None, auto_update=None, notify_on_update=None, proxy_setting=None, proxy_indexers=None, anon_redirect=None, git_path=None, git_remote=None, calendar_unprotected=None, calendar_icons=None, debug=None, ssl_verify=None, no_restart=None, coming_eps_missed_range=None, fuzzy_dating=None, trim_zero=None, date_preset=None, date_preset_na=None, time_preset=None, indexer_timeout=None, download_url=None, rootDir=None, theme_name=None, default_page=None, git_reset=None, git_username=None, git_password=None, display_all_seasons=None, subliminal_log=None, privacy_level='normal'): results = [] # Misc sickbeard.DOWNLOAD_URL = download_url sickbeard.INDEXER_DEFAULT_LANGUAGE = indexerDefaultLang sickbeard.EP_DEFAULT_DELETED_STATUS = ep_default_deleted_status sickbeard.SKIP_REMOVED_FILES = config.checkbox_to_value(skip_removed_files) sickbeard.LAUNCH_BROWSER = config.checkbox_to_value(launch_browser) config.change_SHOWUPDATE_HOUR(showupdate_hour) config.change_VERSION_NOTIFY(config.checkbox_to_value(version_notify)) sickbeard.AUTO_UPDATE = config.checkbox_to_value(auto_update) sickbeard.NOTIFY_ON_UPDATE = config.checkbox_to_value(notify_on_update) # sickbeard.LOG_DIR is set in config.change_LOG_DIR() sickbeard.LOG_NR = log_nr sickbeard.LOG_SIZE = float(log_size) sickbeard.TRASH_REMOVE_SHOW = config.checkbox_to_value(trash_remove_show) sickbeard.TRASH_ROTATE_LOGS = config.checkbox_to_value(trash_rotate_logs) config.change_UPDATE_FREQUENCY(update_frequency) sickbeard.LAUNCH_BROWSER = config.checkbox_to_value(launch_browser) sickbeard.SORT_ARTICLE = config.checkbox_to_value(sort_article) sickbeard.CPU_PRESET = cpu_preset sickbeard.ANON_REDIRECT = anon_redirect sickbeard.PROXY_SETTING = proxy_setting sickbeard.PROXY_INDEXERS = config.checkbox_to_value(proxy_indexers) sickbeard.GIT_USERNAME = git_username sickbeard.GIT_PASSWORD = git_password # sickbeard.GIT_RESET = config.checkbox_to_value(git_reset) # Force GIT_RESET sickbeard.GIT_RESET = 1 sickbeard.GIT_PATH = git_path sickbeard.GIT_REMOTE = git_remote sickbeard.CALENDAR_UNPROTECTED = config.checkbox_to_value(calendar_unprotected) sickbeard.CALENDAR_ICONS = config.checkbox_to_value(calendar_icons) sickbeard.NO_RESTART = config.checkbox_to_value(no_restart) sickbeard.DEBUG = config.checkbox_to_value(debug) sickbeard.SSL_VERIFY = config.checkbox_to_value(ssl_verify) # sickbeard.LOG_DIR is set in config.change_LOG_DIR() sickbeard.COMING_EPS_MISSED_RANGE = try_int(coming_eps_missed_range, 7) sickbeard.DISPLAY_ALL_SEASONS = config.checkbox_to_value(display_all_seasons) sickbeard.NOTIFY_ON_LOGIN = config.checkbox_to_value(notify_on_login) sickbeard.WEB_PORT = try_int(web_port) sickbeard.WEB_IPV6 = config.checkbox_to_value(web_ipv6) # sickbeard.WEB_LOG is set in config.change_LOG_DIR() if config.checkbox_to_value(encryption_version) == 1: sickbeard.ENCRYPTION_VERSION = 2 else: sickbeard.ENCRYPTION_VERSION = 0 sickbeard.WEB_USERNAME = web_username sickbeard.WEB_PASSWORD = web_password # Reconfigure the logger only if subliminal setting changed if sickbeard.SUBLIMINAL_LOG != config.checkbox_to_value(subliminal_log): logger.reconfigure_levels() sickbeard.SUBLIMINAL_LOG = config.checkbox_to_value(subliminal_log) sickbeard.PRIVACY_LEVEL = privacy_level.lower() sickbeard.FUZZY_DATING = config.checkbox_to_value(fuzzy_dating) sickbeard.TRIM_ZERO = config.checkbox_to_value(trim_zero) if date_preset: sickbeard.DATE_PRESET = date_preset if indexer_default: sickbeard.INDEXER_DEFAULT = try_int(indexer_default) if indexer_timeout: sickbeard.INDEXER_TIMEOUT = try_int(indexer_timeout) if time_preset: sickbeard.TIME_PRESET_W_SECONDS = time_preset sickbeard.TIME_PRESET = sickbeard.TIME_PRESET_W_SECONDS.replace(u':%S', u'') sickbeard.TIMEZONE_DISPLAY = timezone_display if not config.change_LOG_DIR(log_dir, web_log): results += ['Unable to create directory {dir}, ' 'log directory not changed.'.format(dir=ek(os.path.normpath, log_dir))] sickbeard.API_KEY = api_key sickbeard.ENABLE_HTTPS = config.checkbox_to_value(enable_https) if not config.change_HTTPS_CERT(https_cert): results += ['Unable to create directory {dir}, ' 'https cert directory not changed.'.format(dir=ek(os.path.normpath, https_cert))] if not config.change_HTTPS_KEY(https_key): results += ['Unable to create directory {dir}, ' 'https key directory not changed.'.format(dir=ek(os.path.normpath, https_key))] sickbeard.HANDLE_REVERSE_PROXY = config.checkbox_to_value(handle_reverse_proxy) sickbeard.THEME_NAME = theme_name sickbeard.DEFAULT_PAGE = default_page sickbeard.save_config() if results: for x in results: logger.log(x, logger.ERROR) ui.notifications.error('Error(s) Saving Configuration', '<br>\n'.join(results)) else: ui.notifications.message('Configuration Saved', ek(os.path.join, sickbeard.CONFIG_FILE)) return self.redirect('/config/general/')
def execute(self): if sickbeard.SAB_HOST.endswith('/sabnzbd/'): sickbeard.SAB_HOST = sickbeard.SAB_HOST.replace('/sabnzbd/','/') sickbeard.save_config() self.incDBVersion()
def index(self, limit=None): sickbeard.HISTORY_LIMIT = limit = try_int( limit or sickbeard.HISTORY_LIMIT or 100, 100) sickbeard.save_config() compact = [] data = self.history.get(limit) for row in data: action = { 'action': row[b'action'], 'provider': row[b'provider'], 'resource': row[b'resource'], 'time': row[b'date'] } # noinspection PyTypeChecker if not any( (history[b'show_id'] == row[b'show_id'] and history[b'season'] == row[b'season'] and history[b'episode'] == row[b'episode'] and history[b'quality'] == row[b'quality']) for history in compact): history = { 'actions': [action], 'episode': row[b'episode'], 'quality': row[b'quality'], 'resource': row[b'resource'], 'season': row[b'season'], 'show_id': row[b'show_id'], 'show_name': row[b'show_name'] } compact.append(history) else: index = [ i for i, item in enumerate(compact) if item[b'show_id'] == row[b'show_id'] and item[b'season'] == row[b'season'] and item[b'episode'] == row[b'episode'] and item[b'quality'] == row[b'quality'] ][0] history = compact[index] history[b'actions'].append(action) history[b'actions'].sort(key=lambda x: x[b'time'], reverse=True) t = PageTemplate(rh=self, filename="history.mako") submenu = [ { 'title': _('Remove Selected'), 'path': 'history/removeHistory', 'icon': 'fa fa-eraser', 'class': 'removehistory', 'confirm': False }, { 'title': _('Clear History'), 'path': 'history/clearHistory', 'icon': 'fa fa-trash', 'class': 'clearhistory', 'confirm': True }, { 'title': _('Trim History'), 'path': 'history/trimHistory', 'icon': 'fa fa-scissors', 'class': 'trimhistory', 'confirm': True }, ] return t.render(historyResults=data, compactResults=compact, limit=limit, submenu=submenu, title=_('History'), header=_('History'), topmenu="history", controller="history", action="index")
def _authorised(self, logged_in=None, post_params=None, failed_msg=None, url=None, timeout=30): maxed_out = (lambda y: re.search(r'(?i)[1-3]((<[^>]+>)|\W)*' + '(attempts|tries|remain)[\W\w]{,40}?(remain|left|attempt)', y)) logged_in, failed_msg = [None is not a and a or b for (a, b) in ( (logged_in, (lambda y=None: self.has_all_cookies())), (failed_msg, (lambda y='': maxed_out(y) and u'Urgent abort, running low on login attempts. ' + u'Password flushed to prevent service disruption to %s.' or (re.search(r'(?i)(username|password)((<[^>]+>)|\W)*' + '(or|and|/|\s)((<[^>]+>)|\W)*(password|incorrect)', y) and u'Invalid username or password for %s. Check settings' or u'Failed to authenticate or parse a response from %s, abort provider'))) )] if logged_in() and (not hasattr(self, 'urls') or bool(len(getattr(self, 'urls')))): return True if not self._valid_home(): return False if hasattr(self, 'digest'): self.cookies = re.sub(r'(?i)([\s\']+|cookie\s*:)', '', self.digest) success, msg = self._check_cookie() if not success: self.cookies = None logger.log(u'%s: [%s]' % (msg, self.cookies), logger.WARNING) return False elif not self._check_auth(): return False if isinstance(url, type([])): for i in range(0, len(url)): helpers.getURL(url.pop(), session=self.session) passfield, userfield = None, None if not url: if hasattr(self, 'urls'): url = self.urls.get('login_action') if url: response = helpers.getURL(url, session=self.session) if None is response: return False try: post_params = isinstance(post_params, type({})) and post_params or {} form = 'form_tmpl' in post_params and post_params.pop('form_tmpl') if form: form = re.findall( '(?is)(<form[^>]+%s.*?</form>)' % (True is form and 'login' or form), response) response = form and form[0] or response action = re.findall('<form[^>]+action=[\'"]([^\'"]*)', response)[0] url = action if action.startswith('http') else \ url if not action else \ (url + action) if action.startswith('?') else \ (self.urls.get('login_base') or self.urls['config_provider_home_uri']) + action.lstrip('/') tags = re.findall(r'(?is)(<input[^>]*?name=[\'"][^\'"]+[^>]*)', response) attrs = [[(re.findall(r'(?is)%s=[\'"]([^\'"]+)' % attr, x) or [''])[0] for attr in ['type', 'name', 'value']] for x in tags] for itype, name, value in attrs: if 'password' in [itype, name]: passfield = name if name not in ('username', 'password') and 'password' != itype: post_params.setdefault(name, value) except KeyError: return super(TorrentProvider, self)._authorised() else: url = self.urls.get('login') if not url: return super(TorrentProvider, self)._authorised() if hasattr(self, 'username') and hasattr(self, 'password'): if not post_params: post_params = dict(username=self.username, password=self.password) elif isinstance(post_params, type({})): if self.username not in post_params.values(): post_params['username'] = self.username if self.password not in post_params.values(): post_params[(passfield, 'password')[not passfield]] = self.password response = helpers.getURL(url, post_data=post_params, session=self.session, timeout=timeout) if response: if logged_in(response): return True if maxed_out(response) and hasattr(self, 'password'): self.password = None sickbeard.save_config() logger.log(failed_msg(response) % self.name, logger.ERROR) return False
def saveEpisodeDownloads(self, nzb_dir=None, sab_username=None, sab_password=None, sab_apikey=None, sab_category=None, sab_host=None, use_nzb=None, use_torrent=None, torrent_dir=None, nzb_method=None, usenet_retention=None, search_frequency=None, backlog_search_frequency=None, tv_download_dir=None, keep_processed_dir=None, process_automatically=None, rename_episodes=None, keep_processed_file=None): results = [] if not config.change_TV_DOWNLOAD_DIR(tv_download_dir): results += ["Unable to create directory " + os.path.normpath(tv_download_dir) + ", dir not changed."] if not config.change_NZB_DIR(nzb_dir): results += ["Unable to create directory " + os.path.normpath(nzb_dir) + ", dir not changed."] if not config.change_TORRENT_DIR(torrent_dir): results += ["Unable to create directory " + os.path.normpath(torrent_dir) + ", dir not changed."] config.change_SEARCH_FREQUENCY(search_frequency) config.change_BACKLOG_SEARCH_FREQUENCY(backlog_search_frequency) if process_automatically == "on": process_automatically = 1 else: process_automatically = 0 if rename_episodes == "on": rename_episodes = 1 else: rename_episodes = 0 if keep_processed_dir == "on": keep_processed_dir = 1 else: keep_processed_dir = 0 if keep_processed_file == "on": keep_processed_file = 1 else: keep_processed_file = 0 if use_nzb == "on": use_nzb = 1 else: use_nzb = 0 if use_torrent == "on": use_torrent = 1 else: use_torrent = 0 if usenet_retention == None: usenet_retention = 200 sickbeard.PROCESS_AUTOMATICALLY = process_automatically sickbeard.KEEP_PROCESSED_DIR = keep_processed_dir sickbeard.KEEP_PROCESSED_FILE = keep_processed_file sickbeard.RENAME_EPISODES = rename_episodes sickbeard.NZB_METHOD = nzb_method sickbeard.USENET_RETENTION = int(usenet_retention) sickbeard.SEARCH_FREQUENCY = int(search_frequency) sickbeard.USE_NZB = use_nzb sickbeard.USE_TORRENT = use_torrent sickbeard.SAB_USERNAME = sab_username sickbeard.SAB_PASSWORD = sab_password sickbeard.SAB_APIKEY = sab_apikey sickbeard.SAB_CATEGORY = sab_category sickbeard.SAB_HOST = sab_host sickbeard.save_config() if len(results) > 0: for x in results: logger.log(x, logger.ERROR) flash.error('Error(s) Saving Configuration', '<br />\n'.join(results)) else: flash.message('Configuration Saved') redirect("/config/episodedownloads/")
def __init__(self, options=None): threading.Thread.__init__(self) self.daemon = True self.alive = True self.name = 'TORNADO' self.io_loop = None self.server = None self.options = options or {} self.options.setdefault('port', 8081) self.options.setdefault('host', '0.0.0.0') self.options.setdefault('log_dir', None) self.options.setdefault('username', '') self.options.setdefault('password', '') self.options.setdefault('web_root', None) assert isinstance(self.options['port'], int) assert 'data_root' in self.options # web root self.options['web_root'] = ('/' + self.options['web_root'].lstrip('/')) if self.options['web_root'] else '' # tornado setup self.enable_https = self.options['enable_https'] self.https_cert = self.options['https_cert'] self.https_key = self.options['https_key'] if self.enable_https: make_cert = False update_cfg = False for (attr, ext) in [('https_cert', '.crt'), ('https_key', '.key')]: ssl_path = getattr(self, attr, None) if ssl_path and not os.path.isfile(ssl_path): if not ssl_path.endswith(ext): setattr(self, attr, os.path.join(ssl_path, 'server%s' % ext)) setattr(sickbeard, attr.upper(), 'server%s' % ext) make_cert = True # If either the HTTPS certificate or key do not exist, make some self-signed ones. if make_cert: if not create_https_certificates(self.https_cert, self.https_key): logger.log(u'Unable to create CERT/KEY files, disabling HTTPS') update_cfg |= False is not sickbeard.ENABLE_HTTPS sickbeard.ENABLE_HTTPS = False self.enable_https = False else: update_cfg = True if not (os.path.isfile(self.https_cert) and os.path.isfile(self.https_key)): logger.log(u'Disabled HTTPS because of missing CERT and KEY files', logger.WARNING) update_cfg |= False is not sickbeard.ENABLE_HTTPS sickbeard.ENABLE_HTTPS = False self.enable_https = False if update_cfg: sickbeard.save_config() # Load the app self.app = Application([], debug=True, serve_traceback=True, autoreload=False, compress_response=True, cookie_secret=sickbeard.COOKIE_SECRET, xsrf_cookies=True, login_url='%s/login/' % self.options['web_root']) re_host_pattern = re_valid_hostname() # webui login/logout handlers self.app.add_handlers(re_host_pattern, [ (r'%s/login(/?)' % self.options['web_root'], webserve.LoginHandler), (r'%s/logout(/?)' % self.options['web_root'], webserve.LogoutHandler), ]) # Web calendar handler (Needed because option Unprotected calendar) self.app.add_handlers(re_host_pattern, [ (r'%s/calendar' % self.options['web_root'], webserve.CalendarHandler), ]) # Static File Handlers self.app.add_handlers(re_host_pattern, [ # favicon (r'%s/(favicon\.ico)' % self.options['web_root'], webserve.BaseStaticFileHandler, {'path': os.path.join(self.options['data_root'], 'images/ico/favicon.ico')}), # images (r'%s/images/(.*)' % self.options['web_root'], webserve.BaseStaticFileHandler, {'path': os.path.join(self.options['data_root'], 'images')}), # cached images (r'%s/cache/images/(.*)' % self.options['web_root'], webserve.BaseStaticFileHandler, {'path': os.path.join(sickbeard.CACHE_DIR, 'images')}), # css (r'%s/css/(.*)' % self.options['web_root'], webserve.BaseStaticFileHandler, {'path': os.path.join(self.options['data_root'], 'css')}), # javascript (r'%s/js/(.*)' % self.options['web_root'], webserve.BaseStaticFileHandler, {'path': os.path.join(self.options['data_root'], 'js')}), (r'%s/kodi/(.*)' % self.options['web_root'], webserve.RepoHandler, {'path': os.path.join(sickbeard.CACHE_DIR, 'clients', 'kodi'), 'default_filename': 'index.html'}), ]) # Main Handler self.app.add_handlers(re_host_pattern, [ (r'%s/api/builder(/?)(.*)' % self.options['web_root'], webserve.ApiBuilder), (r'%s/api(/?.*)' % self.options['web_root'], webapi.Api), (r'%s/imagecache(/?.*)' % self.options['web_root'], webserve.CachedImages), (r'%s/cache(/?.*)' % self.options['web_root'], webserve.Cache), (r'%s/config/general(/?.*)' % self.options['web_root'], webserve.ConfigGeneral), (r'%s/config/search(/?.*)' % self.options['web_root'], webserve.ConfigSearch), (r'%s/config/providers(/?.*)' % self.options['web_root'], webserve.ConfigProviders), (r'%s/config/subtitles(/?.*)' % self.options['web_root'], webserve.ConfigSubtitles), (r'%s/config/postProcessing(/?.*)' % self.options['web_root'], webserve.ConfigPostProcessing), (r'%s/config/notifications(/?.*)' % self.options['web_root'], webserve.ConfigNotifications), (r'%s/config/anime(/?.*)' % self.options['web_root'], webserve.ConfigAnime), (r'%s/config(/?.*)' % self.options['web_root'], webserve.Config), (r'%s/errorlogs(/?.*)' % self.options['web_root'], webserve.ErrorLogs), (r'%s/history(/?.*)' % self.options['web_root'], webserve.History), (r'%s/home/is_alive(/?.*)' % self.options['web_root'], webserve.IsAliveHandler), (r'%s/home/addShows(/?.*)' % self.options['web_root'], webserve.NewHomeAddShows), (r'%s/home/postprocess(/?.*)' % self.options['web_root'], webserve.HomePostProcess), (r'%s/home(/?.*)' % self.options['web_root'], webserve.Home), (r'%s/manage/manageSearches(/?.*)' % self.options['web_root'], webserve.ManageSearches), (r'%s/manage/showProcesses(/?.*)' % self.options['web_root'], webserve.showProcesses), (r'%s/manage/(/?.*)' % self.options['web_root'], webserve.Manage), (r'%s/ui(/?.*)' % self.options['web_root'], webserve.UI), (r'%s/browser(/?.*)' % self.options['web_root'], webserve.WebFileBrowser), (r'%s(/?update_watched_state_kodi/?)' % self.options['web_root'], webserve.NoXSRFHandler), (r'%s(/?.*)' % self.options['web_root'], webserve.MainHandler), ])
def execute(self): db.backup_database('sickbeard.db', self.checkDBVersion()) # update the default quality so we dont grab the wrong qualities after migration sickbeard.QUALITY_DEFAULT = self._update_composite_qualities(sickbeard.QUALITY_DEFAULT) sickbeard.save_config() # upgrade previous HD to HD720p -- shift previous qualities to new placevalues old_hd = common.Quality.combineQualities( [common.Quality.HDTV, common.Quality.HDWEBDL >> 2, common.Quality.HDBLURAY >> 3], []) new_hd = common.Quality.combineQualities([common.Quality.HDTV, common.Quality.HDWEBDL, common.Quality.HDBLURAY], []) # update ANY -- shift existing qualities and add new 1080p qualities, note that rawHD was not added to the ANY template old_any = common.Quality.combineQualities( [common.Quality.SDTV, common.Quality.SDDVD, common.Quality.HDTV, common.Quality.HDWEBDL >> 2, common.Quality.HDBLURAY >> 3, common.Quality.UNKNOWN], []) new_any = common.Quality.combineQualities( [common.Quality.SDTV, common.Quality.SDDVD, common.Quality.HDTV, common.Quality.FULLHDTV, common.Quality.HDWEBDL, common.Quality.FULLHDWEBDL, common.Quality.HDBLURAY, common.Quality.FULLHDBLURAY, common.Quality.UNKNOWN], []) # update qualities (including templates) logger.log(u'[1/4] Updating pre-defined templates and the quality for each show...', logger.MESSAGE) cl = [] shows = self.connection.select('SELECT * FROM tv_shows') for cur_show in shows: if old_hd == cur_show['quality']: new_quality = new_hd elif old_any == cur_show['quality']: new_quality = new_any else: new_quality = self._update_composite_qualities(cur_show['quality']) cl.append(['UPDATE tv_shows SET quality = ? WHERE show_id = ?', [new_quality, cur_show['show_id']]]) self.connection.mass_action(cl) # update status that are are within the old hdwebdl (1<<3 which is 8) and better -- exclude unknown (1<<15 which is 32768) logger.log(u'[2/4] Updating the status for the episodes within each show...', logger.MESSAGE) cl = [] episodes = self.connection.select('SELECT * FROM tv_episodes WHERE status < 3276800 AND status >= 800') for cur_episode in episodes: cl.append(['UPDATE tv_episodes SET status = ? WHERE episode_id = ?', [self._update_status(cur_episode['status']), cur_episode['episode_id']]]) self.connection.mass_action(cl) # make two seperate passes through the history since snatched and downloaded (action & quality) may not always coordinate together # update previous history so it shows the correct action logger.log(u'[3/4] Updating history to reflect the correct action...', logger.MESSAGE) cl = [] history_action = self.connection.select('SELECT * FROM history WHERE action < 3276800 AND action >= 800') for cur_entry in history_action: cl.append(['UPDATE history SET action = ? WHERE showid = ? AND date = ?', [self._update_status(cur_entry['action']), cur_entry['showid'], cur_entry['date']]]) self.connection.mass_action(cl) # update previous history so it shows the correct quality logger.log(u'[4/4] Updating history to reflect the correct quality...', logger.MESSAGE) cl = [] history_quality = self.connection.select('SELECT * FROM history WHERE quality < 32768 AND quality >= 8') for cur_entry in history_quality: cl.append(['UPDATE history SET quality = ? WHERE showid = ? AND date = ?', [self._update_quality(cur_entry['quality']), cur_entry['showid'], cur_entry['date']]]) self.connection.mass_action(cl) self.incDBVersion() # cleanup and reduce db if any previous data was removed logger.log(u'Performing a vacuum on the database.', logger.DEBUG) self.connection.action('VACUUM') return self.checkDBVersion()
def setPosterSortDir(direction): sickbeard.POSTER_SORTDIR = int(direction) sickbeard.save_config()
def setPosterSortBy(sort): if sort not in ('name', 'date', 'network', 'progress'): sort = 'name' sickbeard.POSTER_SORTBY = sort sickbeard.save_config()
def saveProviders(self, newzbin=None, newzbin_username=None, newzbin_password=None, tvbinz=None, tvbinz_uid=None, tvbinz_hash=None, nzbs=None, nzbs_uid=None, nzbs_hash=None, nzbmatrix=None, nzbmatrix_username=None, nzbmatrix_apikey=None, tvnzb=None, tvbinz_auth=None, tvbinz_sabuid=None, provider_order=None): results = [] if newzbin == "on": newzbin = 1 else: newzbin = 0 if tvbinz == "on": tvbinz = 1 elif tvbinz != None: tvbinz = 0 if nzbs == "on": nzbs = 1 else: nzbs = 0 if nzbmatrix == "on": nzbmatrix = 1 else: nzbmatrix = 0 if tvnzb == "on": tvnzb = 1 else: tvnzb = 0 sickbeard.NEWZBIN = newzbin sickbeard.NEWZBIN_USERNAME = newzbin_username sickbeard.NEWZBIN_PASSWORD = newzbin_password if tvbinz != None: sickbeard.TVBINZ = tvbinz if tvbinz_uid: sickbeard.TVBINZ_UID = tvbinz_uid if tvbinz_sabuid: sickbeard.TVBINZ_SABUID = tvbinz_sabuid if tvbinz_hash: sickbeard.TVBINZ_HASH = tvbinz_hash if tvbinz_auth: sickbeard.TVBINZ_AUTH = tvbinz_auth sickbeard.NZBS = nzbs sickbeard.NZBS_UID = nzbs_uid sickbeard.NZBS_HASH = nzbs_hash sickbeard.NZBMATRIX = nzbmatrix sickbeard.NZBMATRIX_USERNAME = nzbmatrix_username sickbeard.NZBMATRIX_APIKEY = nzbmatrix_apikey sickbeard.TVNZB = tvnzb sickbeard.PROVIDER_ORDER = provider_order.split() sickbeard.save_config() if len(results) > 0: for x in results: logger.log(x, logger.ERROR) flash.error('Error(s) Saving Configuration', '<br />\n'.join(results)) else: flash.message('Configuration Saved') redirect("/config/providers/")
def trakt_collection_remove_account(account_id): if account_id in sickbeard.TRAKT_UPDATE_COLLECTION: sickbeard.TRAKT_UPDATE_COLLECTION.pop(account_id) sickbeard.save_config() return True return False