def launch_browser(self): if not self.no_launch and self.config.general.launch_browser: launch_browser( protocol=('http', 'https')[self.config.general.enable_https], host=(get_internal_ip(), self.web_host)[self.web_host != ''], startport=self.config.general.web_port)
def run(self): self.started = True # load languages tornado.locale.load_gettext_translations(sickrage.LOCALE_DIR, 'messages') # clear mako cache folder mako_cache = os.path.join(sickrage.app.cache_dir, 'mako') if os.path.isdir(mako_cache): shutil.rmtree(mako_cache) # video root if sickrage.app.config.root_dirs: root_dirs = sickrage.app.config.root_dirs.split('|') self.video_root = root_dirs[int(root_dirs[0]) + 1] # web root if sickrage.app.config.web_root: sickrage.app.config.web_root = sickrage.app.config.web_root = ( '/' + sickrage.app.config.web_root.lstrip('/').strip('/')) # api root self.api_root = r'%s/api/%s' % (sickrage.app.config.web_root, sickrage.app.config.api_key) # tornado setup if sickrage.app.config.enable_https: # If either the HTTPS certificate or key do not exist, make some self-signed ones. if not (sickrage.app.config.https_cert and os.path.exists( sickrage.app.config.https_cert)) or not ( sickrage.app.config.https_key and os.path.exists(sickrage.app.config.https_key)): if not create_https_certificates( sickrage.app.config.https_cert, sickrage.app.config.https_key): sickrage.app.log.info( "Unable to create CERT/KEY files, disabling HTTPS") sickrage.app.config.enable_https = False if not (os.path.exists(sickrage.app.config.https_cert) and os.path.exists(sickrage.app.config.https_key)): sickrage.app.log.warning( "Disabled HTTPS because of missing CERT and KEY files") sickrage.app.config.enable_https = False # Load the app self.app = Application( [ # api (r'%s(/?.*)' % self.api_root, ApiHandler), # redirect to web root (r"(?!%s)(.*)" % sickrage.app.config.web_root, RedirectHandler, { "url": "%s/{0}" % sickrage.app.config.web_root }), # api key (r'%s/getkey(/?.*)' % sickrage.app.config.web_root, KeyHandler ), # api builder (r'%s/api/builder' % sickrage.app.config.web_root, RedirectHandler, { "url": sickrage.app.config.web_root + '/apibuilder/' }), # login (r'%s/login(/?)' % sickrage.app.config.web_root, LoginHandler), # logout (r'%s/logout(/?)' % sickrage.app.config.web_root, LogoutHandler ), # calendar (r'%s/calendar' % sickrage.app.config.web_root, CalendarHandler ), # favicon (r'%s/(favicon\.ico)' % sickrage.app.config.web_root, StaticFileHandler, { "path": os.path.join(sickrage.app.config.gui_static_dir, 'images/favicon.ico') }), # images (r'%s/images/(.*)' % sickrage.app.config.web_root, StaticImageHandler, { "path": os.path.join(sickrage.app.config.gui_static_dir, 'images') }), # css (r'%s/css/(.*)' % sickrage.app.config.web_root, StaticFileHandler, { "path": os.path.join(sickrage.app.config.gui_static_dir, 'css') }), # scss (r'%s/scss/(.*)' % sickrage.app.config.web_root, StaticFileHandler, { "path": os.path.join(sickrage.app.config.gui_static_dir, 'scss') }), # fonts (r'%s/fonts/(.*)' % sickrage.app.config.web_root, StaticFileHandler, { "path": os.path.join(sickrage.app.config.gui_static_dir, 'fonts') }), # javascript (r'%s/js/(.*)' % sickrage.app.config.web_root, StaticFileHandler, { "path": os.path.join(sickrage.app.config.gui_static_dir, 'js') }), # videos (r'%s/videos/(.*)' % sickrage.app.config.web_root, StaticFileHandler, { "path": self.video_root }), ] + Route.get_routes(sickrage.app.config.web_root), debug=True, autoreload=False, gzip=sickrage.app.config.web_use_gzip, xheaders=sickrage.app.config.handle_reverse_proxy, cookie_secret=sickrage.app.config.web_cookie_secret, login_url='%s/login/' % sickrage.app.config.web_root) self.server = HTTPServer(self.app, no_keep_alive=True) if sickrage.app.config.enable_https: self.server.ssl_options = { "certfile": sickrage.app.config.https_cert, "keyfile": sickrage.app.config.https_key } try: self.server.listen(sickrage.app.config.web_port, None) sickrage.app.log.info("SiCKRAGE :: STARTED") sickrage.app.log.info("SiCKRAGE :: VERSION:[{}]".format( sickrage.app.version_updater.version)) sickrage.app.log.info("SiCKRAGE :: CONFIG:[{}] [v{}]".format( sickrage.app.config_file, sickrage.app.config.config_version)) sickrage.app.log.info("SiCKRAGE :: URL:[{}://{}:{}/]".format( ('http', 'https')[sickrage.app.config.enable_https], sickrage.app.config.web_host, sickrage.app.config.web_port)) # launch browser window if all([ not sickrage.app.no_launch, sickrage.app.config.launch_browser ]): threading.Thread( None, lambda: launch_browser( ('http', 'https')[sickrage.app.config.enable_https ], sickrage.app.config.web_host, sickrage.app.config.web_port), name="LAUNCH-BROWSER").start() sickrage.app.io_loop.start() except socket.error as e: sickrage.app.log.warning(e.strerror) raise SystemExit
def run(self): self.started = True # clear mako cache folder mako_cache = os.path.join(sickrage.CACHE_DIR, 'mako') if os.path.isdir(mako_cache): shutil.rmtree(mako_cache) # video root if sickrage.srCore.srConfig.ROOT_DIRS: root_dirs = sickrage.srCore.srConfig.ROOT_DIRS.split('|') self.video_root = root_dirs[int(root_dirs[0]) + 1] # web root if sickrage.srCore.srConfig.WEB_ROOT: sickrage.srCore.srConfig.WEB_ROOT = sickrage.srCore.srConfig.WEB_ROOT = ( '/' + sickrage.srCore.srConfig.WEB_ROOT.lstrip('/').strip('/')) # api root if not sickrage.srCore.srConfig.API_KEY: sickrage.srCore.srConfig.API_KEY = generateApiKey() self.api_root = r'%s/api/%s' % (sickrage.srCore.srConfig.WEB_ROOT, sickrage.srCore.srConfig.API_KEY) # tornado setup if sickrage.srCore.srConfig.ENABLE_HTTPS: # If either the HTTPS certificate or key do not exist, make some self-signed ones. if not ( sickrage.srCore.srConfig.HTTPS_CERT and os.path.exists( sickrage.srCore.srConfig.HTTPS_CERT)) or not ( sickrage.srCore.srConfig.HTTPS_KEY and os.path.exists(sickrage.srCore.srConfig.HTTPS_KEY)): if not create_https_certificates(sickrage.srCore.srConfig.HTTPS_CERT, sickrage.srCore.srConfig.HTTPS_KEY): sickrage.srCore.srLogger.info("Unable to create CERT/KEY files, disabling HTTPS") sickrage.srCore.srConfig.ENABLE_HTTPS = False if not (os.path.exists(sickrage.srCore.srConfig.HTTPS_CERT) and os.path.exists( sickrage.srCore.srConfig.HTTPS_KEY)): sickrage.srCore.srLogger.warning("Disabled HTTPS because of missing CERT and KEY files") sickrage.srCore.srConfig.ENABLE_HTTPS = False # Load the app self.app = Application( [ # api (r'%s(/?.*)' % self.api_root, ApiHandler), # redirect to web root (r"(?!%s)(.*)" % sickrage.srCore.srConfig.WEB_ROOT, RedirectHandler, {"url": "%s/{0}" % sickrage.srCore.srConfig.WEB_ROOT}), # api key (r'%s/getkey(/?.*)' % sickrage.srCore.srConfig.WEB_ROOT, KeyHandler), # api builder (r'%s/api/builder' % sickrage.srCore.srConfig.WEB_ROOT, RedirectHandler, {"url": sickrage.srCore.srConfig.WEB_ROOT + '/apibuilder/'}), # login (r'%s/login(/?)' % sickrage.srCore.srConfig.WEB_ROOT, LoginHandler), # logout (r'%s/logout(/?)' % sickrage.srCore.srConfig.WEB_ROOT, LogoutHandler), # calendar (r'%s/calendar' % sickrage.srCore.srConfig.WEB_ROOT, CalendarHandler), # favicon (r'%s/(favicon\.ico)' % sickrage.srCore.srConfig.WEB_ROOT, StaticFileHandler, {"path": os.path.join(sickrage.srCore.srConfig.GUI_DIR, 'images/ico/favicon.ico')}), # images (r'%s/images/(.*)' % sickrage.srCore.srConfig.WEB_ROOT, StaticImageHandler, {"path": os.path.join(sickrage.srCore.srConfig.GUI_DIR, 'images')}), # css (r'%s/css/(.*)' % sickrage.srCore.srConfig.WEB_ROOT, StaticFileHandler, {"path": os.path.join(sickrage.srCore.srConfig.GUI_DIR, 'css')}), # scss (r'%s/scss/(.*)' % sickrage.srCore.srConfig.WEB_ROOT, StaticFileHandler, {"path": os.path.join(sickrage.srCore.srConfig.GUI_DIR, 'scss')}), # fonts (r'%s/fonts/(.*)' % sickrage.srCore.srConfig.WEB_ROOT, StaticFileHandler, {"path": os.path.join(sickrage.srCore.srConfig.GUI_DIR, 'fonts')}), # javascript (r'%s/js/(.*)' % sickrage.srCore.srConfig.WEB_ROOT, StaticFileHandler, {"path": os.path.join(sickrage.srCore.srConfig.GUI_DIR, 'js')}), # videos (r'%s/videos/(.*)' % sickrage.srCore.srConfig.WEB_ROOT, StaticFileHandler, {"path": self.video_root}), ] + Route.get_routes(sickrage.srCore.srConfig.WEB_ROOT), debug=True, autoreload=False, gzip=sickrage.srCore.srConfig.WEB_USE_GZIP, xheaders=sickrage.srCore.srConfig.HANDLE_REVERSE_PROXY, cookie_secret=sickrage.srCore.srConfig.WEB_COOKIE_SECRET, login_url='%s/login/' % sickrage.srCore.srConfig.WEB_ROOT) self.server = HTTPServer(self.app, no_keep_alive=True) if sickrage.srCore.srConfig.ENABLE_HTTPS: self.server.ssl_options = { "certfile": sickrage.srCore.srConfig.HTTPS_CERT, "keyfile": sickrage.srCore.srConfig.HTTPS_KEY } try: self.server.listen(sickrage.WEB_PORT or sickrage.srCore.srConfig.WEB_PORT, None) sickrage.srCore.srLogger.info( "SiCKRAGE :: STARTED") sickrage.srCore.srLogger.info( "SiCKRAGE :: VERSION:[{}]".format(sickrage.srCore.VERSIONUPDATER.version)) sickrage.srCore.srLogger.info( "SiCKRAGE :: CONFIG:[{}] [v{}]".format(sickrage.CONFIG_FILE, sickrage.srCore.srConfig.CONFIG_VERSION)) sickrage.srCore.srLogger.info( "SiCKRAGE :: URL:[{}://{}:{}/]".format( ('http', 'https')[sickrage.srCore.srConfig.ENABLE_HTTPS], sickrage.srCore.srConfig.WEB_HOST, sickrage.srCore.srConfig.WEB_PORT)) # launch browser window if all([not sickrage.NOLAUNCH, sickrage.srCore.srConfig.LAUNCH_BROWSER]): threading.Thread(None, lambda: launch_browser( ('http', 'https')[sickrage.srCore.srConfig.ENABLE_HTTPS], sickrage.srCore.srConfig.WEB_HOST, sickrage.srCore.srConfig.WEB_PORT ), name="LAUNCH-BROWSER").start() sickrage.io_loop.start() except socket.error as e: sickrage.srCore.srLogger.warning(e.strerror) raise SystemExit
def start(self): self.started = True self.io_loop = IOLoop.current() # thread name threading.currentThread().setName('CORE') # init core classes self.main_db = MainDB(self.db_type, self.db_prefix, self.db_host, self.db_port, self.db_username, self.db_password) self.cache_db = CacheDB(self.db_type, self.db_prefix, self.db_host, self.db_port, self.db_username, self.db_password) self.notifier_providers = NotifierProviders() self.metadata_providers = MetadataProviders() self.search_providers = SearchProviders() self.log = Logger() self.config = Config() self.alerts = Notifications() self.scheduler = TornadoScheduler() self.wserver = WebServer() self.name_cache = NameCache() self.show_queue = ShowQueue() self.search_queue = SearchQueue() self.postprocessor_queue = PostProcessorQueue() self.version_updater = VersionUpdater() self.show_updater = ShowUpdater() self.tz_updater = TimeZoneUpdater() self.rsscache_updater = RSSCacheUpdater() self.daily_searcher = DailySearcher() self.failed_snatch_searcher = FailedSnatchSearcher() self.backlog_searcher = BacklogSearcher() self.proper_searcher = ProperSearcher() self.trakt_searcher = TraktSearcher() self.subtitle_searcher = SubtitleSearcher() self.auto_postprocessor = AutoPostProcessor() self.upnp_client = UPNPClient() self.quicksearch_cache = QuicksearchCache() # setup oidc client realm = KeycloakRealm(server_url='https://auth.sickrage.ca', realm_name='sickrage') self.oidc_client = realm.open_id_connect( client_id=self.oidc_client_id, client_secret=self.oidc_client_secret) # Check if we need to perform a restore first if os.path.exists( os.path.abspath(os.path.join(self.data_dir, 'restore'))): success = restore_app_data( os.path.abspath(os.path.join(self.data_dir, 'restore')), self.data_dir) self.log.info("Restoring SiCKRAGE backup: %s!" % ("FAILED", "SUCCESSFUL")[success]) if success: shutil.rmtree(os.path.abspath( os.path.join(self.data_dir, 'restore')), ignore_errors=True) # migrate old database file names to new ones if os.path.isfile( os.path.abspath(os.path.join(self.data_dir, 'sickbeard.db'))): if os.path.isfile(os.path.join(self.data_dir, 'sickrage.db')): helpers.move_file( os.path.join(self.data_dir, 'sickrage.db'), os.path.join( self.data_dir, '{}.bak-{}'.format( 'sickrage.db', datetime.datetime.now().strftime( '%Y%m%d_%H%M%S')))) helpers.move_file( os.path.abspath(os.path.join(self.data_dir, 'sickbeard.db')), os.path.abspath(os.path.join(self.data_dir, 'sickrage.db'))) # init encryption public and private keys encryption.initialize() # load config self.config.load() # set language self.config.change_gui_lang(self.config.gui_lang) # set socket timeout socket.setdefaulttimeout(self.config.socket_timeout) # setup logger settings self.log.logSize = self.config.log_size self.log.logNr = self.config.log_nr self.log.logFile = os.path.join(self.data_dir, 'logs', 'sickrage.log') self.log.debugLogging = self.config.debug self.log.consoleLogging = not self.quiet # start logger self.log.start() # user agent if self.config.random_user_agent: self.user_agent = UserAgent().random uses_netloc.append('scgi') FancyURLopener.version = self.user_agent # set torrent client web url torrent_webui_url(True) # Check available space try: total_space, available_space = get_free_space(self.data_dir) if available_space < 100: self.log.warning( 'Shutting down as SiCKRAGE needs some space to work. You\'ll get corrupted data otherwise. Only %sMB left', available_space) return except Exception: self.log.error('Failed getting disk space: %s', traceback.format_exc()) # perform database startup actions for db in [self.main_db, self.cache_db]: # perform integrity check db.integrity_check() # migrate database db.migrate() # sync database repo db.sync_db_repo() # cleanup db.cleanup() # load name cache self.name_cache.load() if self.config.default_page not in ('schedule', 'history', 'IRC'): self.config.default_page = 'home' # cleanup cache folder for folder in ['mako', 'sessions', 'indexers']: try: shutil.rmtree(os.path.join(sickrage.app.cache_dir, folder), ignore_errors=True) except Exception: continue if self.config.web_port < 21 or self.config.web_port > 65535: self.config.web_port = 8081 if not self.config.web_cookie_secret: self.config.web_cookie_secret = generate_secret() # attempt to help prevent users from breaking links by using a bad url if not self.config.anon_redirect.endswith('?'): self.config.anon_redirect = '' if not re.match(r'\d+\|[^|]+(?:\|[^|]+)*', self.config.root_dirs): self.config.root_dirs = '' self.config.naming_force_folders = check_force_season_folders() if self.config.nzb_method not in ('blackhole', 'sabnzbd', 'nzbget'): self.config.nzb_method = 'blackhole' if self.config.torrent_method not in ('blackhole', 'utorrent', 'transmission', 'deluge', 'deluged', 'download_station', 'rtorrent', 'qbittorrent', 'mlnet', 'putio'): self.config.torrent_method = 'blackhole' if self.config.autopostprocessor_freq < self.config.min_autopostprocessor_freq: self.config.autopostprocessor_freq = self.config.min_autopostprocessor_freq if self.config.daily_searcher_freq < self.config.min_daily_searcher_freq: self.config.daily_searcher_freq = self.config.min_daily_searcher_freq if self.config.backlog_searcher_freq < self.config.min_backlog_searcher_freq: self.config.backlog_searcher_freq = self.config.min_backlog_searcher_freq if self.config.version_updater_freq < self.config.min_version_updater_freq: self.config.version_updater_freq = self.config.min_version_updater_freq if self.config.subtitle_searcher_freq < self.config.min_subtitle_searcher_freq: self.config.subtitle_searcher_freq = self.config.min_subtitle_searcher_freq if self.config.failed_snatch_age < self.config.min_failed_snatch_age: self.config.failed_snatch_age = self.config.min_failed_snatch_age if self.config.proper_searcher_interval not in ('15m', '45m', '90m', '4h', 'daily'): self.config.proper_searcher_interval = 'daily' if self.config.showupdate_hour < 0 or self.config.showupdate_hour > 23: self.config.showupdate_hour = 0 # add version checker job self.scheduler.add_job(self.version_updater.run, IntervalTrigger( hours=self.config.version_updater_freq, ), name=self.version_updater.name, id=self.version_updater.name) # add network timezones updater job self.scheduler.add_job(self.tz_updater.run, IntervalTrigger(days=1, ), name=self.tz_updater.name, id=self.tz_updater.name) # add show updater job self.scheduler.add_job(self.show_updater.run, IntervalTrigger( days=1, start_date=datetime.datetime.now().replace( hour=self.config.showupdate_hour)), name=self.show_updater.name, id=self.show_updater.name) # add rss cache updater job self.scheduler.add_job(self.rsscache_updater.run, IntervalTrigger(minutes=15, ), name=self.rsscache_updater.name, id=self.rsscache_updater.name) # add daily search job self.scheduler.add_job( self.daily_searcher.run, IntervalTrigger(minutes=self.config.daily_searcher_freq, start_date=datetime.datetime.now() + datetime.timedelta(minutes=4)), name=self.daily_searcher.name, id=self.daily_searcher.name) # add failed snatch search job self.scheduler.add_job( self.failed_snatch_searcher.run, IntervalTrigger(hours=1, start_date=datetime.datetime.now() + datetime.timedelta(minutes=4)), name=self.failed_snatch_searcher.name, id=self.failed_snatch_searcher.name) # add backlog search job self.scheduler.add_job( self.backlog_searcher.run, IntervalTrigger(minutes=self.config.backlog_searcher_freq, start_date=datetime.datetime.now() + datetime.timedelta(minutes=30)), name=self.backlog_searcher.name, id=self.backlog_searcher.name) # add auto-postprocessing job self.scheduler.add_job( self.auto_postprocessor.run, IntervalTrigger(minutes=self.config.autopostprocessor_freq), name=self.auto_postprocessor.name, id=self.auto_postprocessor.name) # add find proper job self.scheduler.add_job( self.proper_searcher.run, IntervalTrigger(minutes={ '15m': 15, '45m': 45, '90m': 90, '4h': 4 * 60, 'daily': 24 * 60 }[self.config.proper_searcher_interval]), name=self.proper_searcher.name, id=self.proper_searcher.name) # add trakt.tv checker job self.scheduler.add_job(self.trakt_searcher.run, IntervalTrigger(hours=1), name=self.trakt_searcher.name, id=self.trakt_searcher.name) # add subtitles finder job self.scheduler.add_job( self.subtitle_searcher.run, IntervalTrigger(hours=self.config.subtitle_searcher_freq), name=self.subtitle_searcher.name, id=self.subtitle_searcher.name) # add upnp client job self.scheduler.add_job( self.upnp_client.run, IntervalTrigger(seconds=self.upnp_client._nat_portmap_lifetime), name=self.upnp_client.name, id=self.upnp_client.name) # add namecache update job self.scheduler.add_job(self.name_cache.build_all, IntervalTrigger(days=1, ), name=self.name_cache.name, id=self.name_cache.name) # start scheduler service self.scheduler.start() # start queue's self.io_loop.add_callback(self.search_queue.watch) self.io_loop.add_callback(self.show_queue.watch) self.io_loop.add_callback(self.postprocessor_queue.watch) # fire off startup events self.io_loop.run_in_executor(None, self.quicksearch_cache.run) self.io_loop.run_in_executor(None, self.name_cache.run) self.io_loop.run_in_executor(None, self.version_updater.run) self.io_loop.run_in_executor(None, self.tz_updater.run) # start web server self.wserver.start() # launch browser window if all( [not sickrage.app.no_launch, sickrage.app.config.launch_browser]): self.io_loop.run_in_executor( None, lambda: launch_browser(('http', 'https')[ sickrage.app.config.enable_https ], sickrage.app.config.web_host, sickrage.app.config.web_port)) def started(): self.log.info("SiCKRAGE :: STARTED") self.log.info("SiCKRAGE :: APP VERSION:[{}]".format( sickrage.version())) self.log.info("SiCKRAGE :: CONFIG VERSION:[v{}]".format( self.config.config_version)) self.log.info("SiCKRAGE :: DATABASE VERSION:[v{}]".format( self.main_db.version)) self.log.info("SiCKRAGE :: DATABASE TYPE:[{}]".format( self.db_type)) self.log.info("SiCKRAGE :: URL:[{}://{}:{}{}]".format( ('http', 'https')[self.config.enable_https], self.config.web_host, self.config.web_port, self.config.web_root)) # start io_loop self.io_loop.add_callback(started) self.io_loop.start()
def start(self): self.started = True # thread name threading.currentThread().setName('CORE') # Check if we need to perform a restore first if os.path.exists( os.path.abspath(os.path.join(sickrage.DATA_DIR, 'restore'))): success = restoreSR( os.path.abspath(os.path.join(sickrage.DATA_DIR, 'restore')), sickrage.DATA_DIR) print("Restoring SiCKRAGE backup: %s!\n" % ("FAILED", "SUCCESSFUL")[success]) if success: shutil.rmtree(os.path.abspath( os.path.join(sickrage.DATA_DIR, 'restore')), ignore_errors=True) # migrate old database file names to new ones if os.path.isfile( os.path.abspath(os.path.join(sickrage.DATA_DIR, 'sickbeard.db'))): if os.path.isfile(os.path.join(sickrage.DATA_DIR, 'sickrage.db')): helpers.moveFile( os.path.join(sickrage.DATA_DIR, 'sickrage.db'), os.path.join( sickrage.DATA_DIR, '{}.bak-{}'.format( 'sickrage.db', datetime.datetime.now().strftime( '%Y%m%d_%H%M%S')))) helpers.moveFile( os.path.abspath(os.path.join(sickrage.DATA_DIR, 'sickbeard.db')), os.path.abspath(os.path.join(sickrage.DATA_DIR, 'sickrage.db'))) # load config self.srConfig.load() # set socket timeout socket.setdefaulttimeout(self.srConfig.SOCKET_TIMEOUT) # setup logger settings self.srLogger.logSize = self.srConfig.LOG_SIZE self.srLogger.logNr = self.srConfig.LOG_NR self.srLogger.logFile = self.srConfig.LOG_FILE self.srLogger.debugLogging = sickrage.DEBUG self.srLogger.consoleLogging = not sickrage.QUITE # start logger self.srLogger.start() # Check available space try: total_space, available_space = getFreeSpace(sickrage.DATA_DIR) if available_space < 100: self.srLogger.error( 'Shutting down as SiCKRAGE needs some space to work. You\'ll get corrupted data otherwise. Only %sMB left', available_space) sickrage.restart = False return except: self.srLogger.error('Failed getting diskspace: %s', traceback.format_exc()) # perform database startup actions for db in [self.mainDB, self.cacheDB, self.failedDB]: # initialize database db.initialize() # check integrity of database db.check_integrity() # migrate database db.migrate() # misc database cleanups db.cleanup() # compact main database if not sickrage.DEVELOPER and self.srConfig.LAST_DB_COMPACT < time.time( ) - 604800: # 7 days self.mainDB.compact() self.srConfig.LAST_DB_COMPACT = int(time.time()) # load data for shows from database self.load_shows() if self.srConfig.DEFAULT_PAGE not in ('home', 'schedule', 'history', 'news', 'IRC'): self.srConfig.DEFAULT_PAGE = 'home' # cleanup cache folder for folder in ['mako', 'sessions', 'indexers']: try: shutil.rmtree(os.path.join(sickrage.CACHE_DIR, folder), ignore_errors=True) except Exception: continue # init anidb connection if self.srConfig.USE_ANIDB: try: self.ADBA_CONNECTION = adba.Connection( keepAlive=True, log=lambda msg: self.srLogger.debug( "AniDB: %s " % msg)).auth(self.srConfig.ANIDB_USERNAME, self.srConfig.ANIDB_PASSWORD) except Exception as e: self.srLogger.warning("AniDB exception msg: %r " % repr(e)) if self.srConfig.WEB_PORT < 21 or self.srConfig.WEB_PORT > 65535: self.srConfig.WEB_PORT = 8081 if not self.srConfig.WEB_COOKIE_SECRET: self.srConfig.WEB_COOKIE_SECRET = generateCookieSecret() # attempt to help prevent users from breaking links by using a bad url if not self.srConfig.ANON_REDIRECT.endswith('?'): self.srConfig.ANON_REDIRECT = '' if not re.match(r'\d+\|[^|]+(?:\|[^|]+)*', self.srConfig.ROOT_DIRS): self.srConfig.ROOT_DIRS = '' self.srConfig.NAMING_FORCE_FOLDERS = check_force_season_folders() if self.srConfig.NZB_METHOD not in ('blackhole', 'sabnzbd', 'nzbget'): self.srConfig.NZB_METHOD = 'blackhole' if self.srConfig.TORRENT_METHOD not in ('blackhole', 'utorrent', 'transmission', 'deluge', 'deluged', 'download_station', 'rtorrent', 'qbittorrent', 'mlnet', 'putio'): self.srConfig.TORRENT_METHOD = 'blackhole' if self.srConfig.PROPER_SEARCHER_INTERVAL not in ('15m', '45m', '90m', '4h', 'daily'): self.srConfig.PROPER_SEARCHER_INTERVAL = 'daily' if self.srConfig.AUTOPOSTPROCESSOR_FREQ < self.srConfig.MIN_AUTOPOSTPROCESSOR_FREQ: self.srConfig.AUTOPOSTPROCESSOR_FREQ = self.srConfig.MIN_AUTOPOSTPROCESSOR_FREQ if self.srConfig.NAMECACHE_FREQ < self.srConfig.MIN_NAMECACHE_FREQ: self.srConfig.NAMECACHE_FREQ = self.srConfig.MIN_NAMECACHE_FREQ if self.srConfig.DAILY_SEARCHER_FREQ < self.srConfig.MIN_DAILY_SEARCHER_FREQ: self.srConfig.DAILY_SEARCHER_FREQ = self.srConfig.MIN_DAILY_SEARCHER_FREQ self.srConfig.MIN_BACKLOG_SEARCHER_FREQ = self.BACKLOGSEARCHER.get_backlog_cycle_time( ) if self.srConfig.BACKLOG_SEARCHER_FREQ < self.srConfig.MIN_BACKLOG_SEARCHER_FREQ: self.srConfig.BACKLOG_SEARCHER_FREQ = self.srConfig.MIN_BACKLOG_SEARCHER_FREQ if self.srConfig.VERSION_UPDATER_FREQ < self.srConfig.MIN_VERSION_UPDATER_FREQ: self.srConfig.VERSION_UPDATER_FREQ = self.srConfig.MIN_VERSION_UPDATER_FREQ if self.srConfig.SHOWUPDATE_HOUR > 23: self.srConfig.SHOWUPDATE_HOUR = 0 elif self.srConfig.SHOWUPDATE_HOUR < 0: self.srConfig.SHOWUPDATE_HOUR = 0 if self.srConfig.SUBTITLE_SEARCHER_FREQ < self.srConfig.MIN_SUBTITLE_SEARCHER_FREQ: self.srConfig.SUBTITLE_SEARCHER_FREQ = self.srConfig.MIN_SUBTITLE_SEARCHER_FREQ if self.srConfig.SUBTITLES_LANGUAGES[0] == '': self.srConfig.SUBTITLES_LANGUAGES = [] # add version checker job self.srScheduler.add_job( self.VERSIONUPDATER.run, srIntervalTrigger( **{ 'hours': self.srConfig.VERSION_UPDATER_FREQ, 'min': self.srConfig.MIN_VERSION_UPDATER_FREQ }), name="VERSIONUPDATER", id="VERSIONUPDATER") # add network timezones updater job self.srScheduler.add_job(update_network_dict, srIntervalTrigger(**{'days': 1}), name="TZUPDATER", id="TZUPDATER") # add show updater job self.srScheduler.add_job( self.SHOWUPDATER.run, srIntervalTrigger( **{ 'days': 1, 'start_date': datetime.datetime.now().replace( hour=self.srConfig.SHOWUPDATE_HOUR) }), name="SHOWUPDATER", id="SHOWUPDATER") # add show next episode job self.srScheduler.add_job(self.SHOWUPDATER.nextEpisode, srIntervalTrigger(**{'hours': 1}), name="SHOWNEXTEP", id="SHOWNEXTEP") # add daily search job self.srScheduler.add_job(self.DAILYSEARCHER.run, srIntervalTrigger( **{ 'minutes': self.srConfig.DAILY_SEARCHER_FREQ, 'min': self.srConfig.MIN_DAILY_SEARCHER_FREQ, 'start_date': datetime.datetime.now() + datetime.timedelta(minutes=4) }), name="DAILYSEARCHER", id="DAILYSEARCHER") # add backlog search job self.srScheduler.add_job( self.BACKLOGSEARCHER.run, srIntervalTrigger( **{ 'minutes': self.srConfig.BACKLOG_SEARCHER_FREQ, 'min': self.srConfig.MIN_BACKLOG_SEARCHER_FREQ, 'start_date': datetime.datetime.now() + datetime.timedelta(minutes=30) }), name="BACKLOG", id="BACKLOG") # add auto-postprocessing job self.srScheduler.add_job( self.AUTOPOSTPROCESSOR.run, srIntervalTrigger( **{ 'minutes': self.srConfig.AUTOPOSTPROCESSOR_FREQ, 'min': self.srConfig.MIN_AUTOPOSTPROCESSOR_FREQ }), name="POSTPROCESSOR", id="POSTPROCESSOR") # add find proper job self.srScheduler.add_job( self.PROPERSEARCHER.run, srIntervalTrigger( **{ 'minutes': { '15m': 15, '45m': 45, '90m': 90, '4h': 4 * 60, 'daily': 24 * 60 }[self.srConfig.PROPER_SEARCHER_INTERVAL] }), name="PROPERSEARCHER", id="PROPERSEARCHER") # add trakt.tv checker job self.srScheduler.add_job(self.TRAKTSEARCHER.run, srIntervalTrigger(**{'hours': 1}), name="TRAKTSEARCHER", id="TRAKTSEARCHER") # add subtitles finder job self.srScheduler.add_job( self.SUBTITLESEARCHER.run, srIntervalTrigger( **{'hours': self.srConfig.SUBTITLE_SEARCHER_FREQ}), name="SUBTITLESEARCHER", id="SUBTITLESEARCHER") # start scheduler service self.srScheduler.start() # Pause/Resume PROPERSEARCHER job (self.srScheduler.get_job('PROPERSEARCHER').pause, self.srScheduler.get_job('PROPERSEARCHER').resume )[self.srConfig.DOWNLOAD_PROPERS]() # Pause/Resume TRAKTSEARCHER job (self.srScheduler.get_job('TRAKTSEARCHER').pause, self.srScheduler.get_job('TRAKTSEARCHER').resume )[self.srConfig.USE_TRAKT]() # Pause/Resume SUBTITLESEARCHER job (self.srScheduler.get_job('SUBTITLESEARCHER').pause, self.srScheduler.get_job('SUBTITLESEARCHER').resume )[self.srConfig.USE_SUBTITLES]() # Pause/Resume POSTPROCESS job (self.srScheduler.get_job('POSTPROCESSOR').pause, self.srScheduler.get_job('POSTPROCESSOR').resume )[self.srConfig.PROCESS_AUTOMATICALLY]() # start queue's self.SEARCHQUEUE.start() self.SHOWQUEUE.start() # start webserver self.srWebServer.start() self.srLogger.info("SiCKRAGE :: STARTED") self.srLogger.info("SiCKRAGE :: VERSION:[{}]".format( self.VERSIONUPDATER.version)) self.srLogger.info("SiCKRAGE :: CONFIG:[{}] [v{}]".format( sickrage.CONFIG_FILE, self.srConfig.CONFIG_VERSION)) self.srLogger.info("SiCKRAGE :: URL:[{}://{}:{}/]".format( ('http', 'https')[self.srConfig.ENABLE_HTTPS], self.srConfig.WEB_HOST, self.srConfig.WEB_PORT)) # launch browser window if all( [not sickrage.NOLAUNCH, sickrage.srCore.srConfig.LAUNCH_BROWSER]): threading.Thread( None, lambda: launch_browser( ('http', 'https')[sickrage.srCore.srConfig.ENABLE_HTTPS], self.srConfig.WEB_HOST, sickrage.srCore.srConfig.WEB_PORT) ).start() # start ioloop event handler self.io_loop.start()
def start(self): self.started = True self.io_loop = IOLoop.current() # thread name threading.currentThread().setName('CORE') # patch modules with encoding kludge patch_modules() # init core classes self.notifier_providers = NotifierProviders() self.metadata_providers = MetadataProviders() self.search_providers = SearchProviders() self.log = Logger() self.config = Config() self.alerts = Notifications() self.main_db = MainDB() self.cache_db = CacheDB() self.scheduler = TornadoScheduler() self.wserver = WebServer() self.name_cache = NameCache() self.show_queue = ShowQueue() self.search_queue = SearchQueue() self.postprocessor_queue = PostProcessorQueue() self.event_queue = EventQueue() self.version_updater = VersionUpdater() self.show_updater = ShowUpdater() self.tz_updater = TimeZoneUpdater() self.rsscache_updater = RSSCacheUpdater() self.daily_searcher = DailySearcher() self.failed_snatch_searcher = FailedSnatchSearcher() self.backlog_searcher = BacklogSearcher() self.proper_searcher = ProperSearcher() self.trakt_searcher = TraktSearcher() self.subtitle_searcher = SubtitleSearcher() self.auto_postprocessor = AutoPostProcessor() self.upnp_client = UPNPClient() self.quicksearch_cache = QuicksearchCache() # setup oidc client realm = KeycloakRealm(server_url='https://auth.sickrage.ca', realm_name='sickrage') self.oidc_client = realm.open_id_connect(client_id='sickrage-app', client_secret='5d4710b2-ca70-4d39-b5a3-0705e2c5e703') # Check if we need to perform a restore first if os.path.exists(os.path.abspath(os.path.join(self.data_dir, 'restore'))): success = restoreSR(os.path.abspath(os.path.join(self.data_dir, 'restore')), self.data_dir) self.log.info("Restoring SiCKRAGE backup: {}!".format(("FAILED", "SUCCESSFUL")[success])) if success: shutil.rmtree(os.path.abspath(os.path.join(self.data_dir, 'restore')), ignore_errors=True) # migrate old database file names to new ones if os.path.isfile(os.path.abspath(os.path.join(self.data_dir, 'sickbeard.db'))): if os.path.isfile(os.path.join(self.data_dir, 'sickrage.db')): helpers.move_file(os.path.join(self.data_dir, 'sickrage.db'), os.path.join(self.data_dir, '{}.bak-{}' .format('sickrage.db', datetime.datetime.now().strftime( '%Y%m%d_%H%M%S')))) helpers.move_file(os.path.abspath(os.path.join(self.data_dir, 'sickbeard.db')), os.path.abspath(os.path.join(self.data_dir, 'sickrage.db'))) # load config self.config.load() # set language self.config.change_gui_lang(self.config.gui_lang) # set socket timeout socket.setdefaulttimeout(self.config.socket_timeout) # setup logger settings self.log.logSize = self.config.log_size self.log.logNr = self.config.log_nr self.log.logFile = os.path.join(self.data_dir, 'logs', 'sickrage.log') self.log.debugLogging = self.config.debug self.log.consoleLogging = not self.quiet # start logger self.log.start() # user agent if self.config.random_user_agent: self.user_agent = UserAgent().random urlparse.uses_netloc.append('scgi') urllib.FancyURLopener.version = self.user_agent # set torrent client web url torrent_webui_url(True) # Check available space try: total_space, available_space = getFreeSpace(self.data_dir) if available_space < 100: self.log.error('Shutting down as SiCKRAGE needs some space to work. You\'ll get corrupted data ' 'otherwise. Only %sMB left', available_space) return except Exception: self.log.error('Failed getting disk space: %s', traceback.format_exc()) # perform database startup actions for db in [self.main_db, self.cache_db]: # initialize database db.initialize() # check integrity of database db.check_integrity() # migrate database db.migrate() # misc database cleanups db.cleanup() # upgrade database db.upgrade() # compact main database if self.config.last_db_compact < time.time() - 604800: # 7 days self.main_db.compact() self.config.last_db_compact = int(time.time()) # load name cache self.name_cache.load() # load data for shows from database self.load_shows() if self.config.default_page not in ('schedule', 'history', 'IRC'): self.config.default_page = 'home' # cleanup cache folder for folder in ['mako', 'sessions', 'indexers']: try: shutil.rmtree(os.path.join(sickrage.app.cache_dir, folder), ignore_errors=True) except Exception: continue if self.config.web_port < 21 or self.config.web_port > 65535: self.config.web_port = 8081 if not self.config.web_cookie_secret: self.config.web_cookie_secret = generate_secret() # attempt to help prevent users from breaking links by using a bad url if not self.config.anon_redirect.endswith('?'): self.config.anon_redirect = '' if not re.match(r'\d+\|[^|]+(?:\|[^|]+)*', self.config.root_dirs): self.config.root_dirs = '' self.config.naming_force_folders = check_force_season_folders() if self.config.nzb_method not in ('blackhole', 'sabnzbd', 'nzbget'): self.config.nzb_method = 'blackhole' if self.config.torrent_method not in ('blackhole', 'utorrent', 'transmission', 'deluge', 'deluged', 'download_station', 'rtorrent', 'qbittorrent', 'mlnet', 'putio'): self.config.torrent_method = 'blackhole' if self.config.autopostprocessor_freq < self.config.min_autopostprocessor_freq: self.config.autopostprocessor_freq = self.config.min_autopostprocessor_freq if self.config.daily_searcher_freq < self.config.min_daily_searcher_freq: self.config.daily_searcher_freq = self.config.min_daily_searcher_freq if self.config.backlog_searcher_freq < self.config.min_backlog_searcher_freq: self.config.backlog_searcher_freq = self.config.min_backlog_searcher_freq if self.config.version_updater_freq < self.config.min_version_updater_freq: self.config.version_updater_freq = self.config.min_version_updater_freq if self.config.subtitle_searcher_freq < self.config.min_subtitle_searcher_freq: self.config.subtitle_searcher_freq = self.config.min_subtitle_searcher_freq if self.config.failed_snatch_age < self.config.min_failed_snatch_age: self.config.failed_snatch_age = self.config.min_failed_snatch_age if self.config.proper_searcher_interval not in ('15m', '45m', '90m', '4h', 'daily'): self.config.proper_searcher_interval = 'daily' if self.config.showupdate_hour < 0 or self.config.showupdate_hour > 23: self.config.showupdate_hour = 0 # add version checker job self.scheduler.add_job( self.version_updater.run, IntervalTrigger( hours=self.config.version_updater_freq, ), name=self.version_updater.name, id=self.version_updater.name ) # add network timezones updater job self.scheduler.add_job( self.tz_updater.run, IntervalTrigger( days=1, ), name=self.tz_updater.name, id=self.tz_updater.name ) # add show updater job self.scheduler.add_job( self.show_updater.run, IntervalTrigger( days=1, start_date=datetime.datetime.now().replace(hour=self.config.showupdate_hour) ), name=self.show_updater.name, id=self.show_updater.name ) # add rss cache updater job self.scheduler.add_job( self.rsscache_updater.run, IntervalTrigger( minutes=15, ), name=self.rsscache_updater.name, id=self.rsscache_updater.name ) # add daily search job self.scheduler.add_job( self.daily_searcher.run, IntervalTrigger( minutes=self.config.daily_searcher_freq, start_date=datetime.datetime.now() + datetime.timedelta(minutes=4) ), name=self.daily_searcher.name, id=self.daily_searcher.name ) # add failed snatch search job self.scheduler.add_job( self.failed_snatch_searcher.run, IntervalTrigger( hours=1, start_date=datetime.datetime.now() + datetime.timedelta(minutes=4) ), name=self.failed_snatch_searcher.name, id=self.failed_snatch_searcher.name ) # add backlog search job self.scheduler.add_job( self.backlog_searcher.run, IntervalTrigger( minutes=self.config.backlog_searcher_freq, start_date=datetime.datetime.now() + datetime.timedelta(minutes=30) ), name=self.backlog_searcher.name, id=self.backlog_searcher.name ) # add auto-postprocessing job self.scheduler.add_job( self.auto_postprocessor.run, IntervalTrigger( minutes=self.config.autopostprocessor_freq ), name=self.auto_postprocessor.name, id=self.auto_postprocessor.name ) # add find proper job self.scheduler.add_job( self.proper_searcher.run, IntervalTrigger( minutes={ '15m': 15, '45m': 45, '90m': 90, '4h': 4 * 60, 'daily': 24 * 60 }[self.config.proper_searcher_interval] ), name=self.proper_searcher.name, id=self.proper_searcher.name ) # add trakt.tv checker job self.scheduler.add_job( self.trakt_searcher.run, IntervalTrigger( hours=1 ), name=self.trakt_searcher.name, id=self.trakt_searcher.name ) # add subtitles finder job self.scheduler.add_job( self.subtitle_searcher.run, IntervalTrigger( hours=self.config.subtitle_searcher_freq ), name=self.subtitle_searcher.name, id=self.subtitle_searcher.name ) # add upnp client job self.scheduler.add_job( self.upnp_client.run, IntervalTrigger( seconds=self.upnp_client._nat_portmap_lifetime ), name=self.upnp_client.name, id=self.upnp_client.name ) # add namecache update job self.scheduler.add_job( self.name_cache.build_all, IntervalTrigger( days=1, ), name=self.name_cache.name, id=self.name_cache.name ) # start scheduler service self.scheduler.start() # start queue's self.search_queue.start() self.show_queue.start() self.postprocessor_queue.start() self.event_queue.start() # fire off startup events self.event_queue.fire_event(self.name_cache.build_all) self.event_queue.fire_event(self.version_updater.run) self.event_queue.fire_event(self.tz_updater.run) # start webserver self.wserver.start() # launch browser window if all([not sickrage.app.no_launch, sickrage.app.config.launch_browser]): self.event_queue.fire_event(lambda: launch_browser(('http', 'https')[sickrage.app.config.enable_https], sickrage.app.config.web_host, sickrage.app.config.web_port)) # start ioloop self.io_loop.start()
def run(self): self.started = True # load languages tornado.locale.load_gettext_translations(sickrage.LOCALE_DIR, 'messages') # clear mako cache folder mako_cache = os.path.join(sickrage.app.cache_dir, 'mako') if os.path.isdir(mako_cache): shutil.rmtree(mako_cache) # video root if sickrage.app.config.root_dirs: root_dirs = sickrage.app.config.root_dirs.split('|') self.video_root = root_dirs[int(root_dirs[0]) + 1] # web root if sickrage.app.config.web_root: sickrage.app.config.web_root = sickrage.app.config.web_root = ( '/' + sickrage.app.config.web_root.lstrip('/').strip('/')) # api root self.api_root = r'%s/api/%s' % (sickrage.app.config.web_root, sickrage.app.config.api_key) # tornado setup if sickrage.app.config.enable_https: # If either the HTTPS certificate or key do not exist, make some self-signed ones. if not ( sickrage.app.config.https_cert and os.path.exists( sickrage.app.config.https_cert)) or not ( sickrage.app.config.https_key and os.path.exists(sickrage.app.config.https_key)): if not create_https_certificates(sickrage.app.config.https_cert, sickrage.app.config.https_key): sickrage.app.log.info("Unable to create CERT/KEY files, disabling HTTPS") sickrage.app.config.enable_https = False if not (os.path.exists(sickrage.app.config.https_cert) and os.path.exists( sickrage.app.config.https_key)): sickrage.app.log.warning("Disabled HTTPS because of missing CERT and KEY files") sickrage.app.config.enable_https = False # Load the app self.app = Application( [ # api (r'%s(/?.*)' % self.api_root, ApiHandler), # redirect to web root (r"(?!%s)(.*)" % sickrage.app.config.web_root, RedirectHandler, {"url": "%s/{0}" % sickrage.app.config.web_root}), # api key (r'%s/getkey(/?.*)' % sickrage.app.config.web_root, KeyHandler), # api builder (r'%s/api/builder' % sickrage.app.config.web_root, RedirectHandler, {"url": sickrage.app.config.web_root + '/apibuilder/'}), # login (r'%s/login(/?)' % sickrage.app.config.web_root, LoginHandler), # logout (r'%s/logout(/?)' % sickrage.app.config.web_root, LogoutHandler), # calendar (r'%s/calendar' % sickrage.app.config.web_root, CalendarHandler), # favicon (r'%s/(favicon\.ico)' % sickrage.app.config.web_root, StaticFileHandler, {"path": os.path.join(sickrage.app.config.gui_static_dir, 'images/favicon.ico')}), # images (r'%s/images/(.*)' % sickrage.app.config.web_root, StaticImageHandler, {"path": os.path.join(sickrage.app.config.gui_static_dir, 'images')}), # css (r'%s/css/(.*)' % sickrage.app.config.web_root, StaticFileHandler, {"path": os.path.join(sickrage.app.config.gui_static_dir, 'css')}), # scss (r'%s/scss/(.*)' % sickrage.app.config.web_root, StaticFileHandler, {"path": os.path.join(sickrage.app.config.gui_static_dir, 'scss')}), # fonts (r'%s/fonts/(.*)' % sickrage.app.config.web_root, StaticFileHandler, {"path": os.path.join(sickrage.app.config.gui_static_dir, 'fonts')}), # javascript (r'%s/js/(.*)' % sickrage.app.config.web_root, StaticFileHandler, {"path": os.path.join(sickrage.app.config.gui_static_dir, 'js')}), # videos (r'%s/videos/(.*)' % sickrage.app.config.web_root, StaticFileHandler, {"path": self.video_root}), ] + Route.get_routes(sickrage.app.config.web_root), debug=True, autoreload=False, gzip=sickrage.app.config.web_use_gzip, xheaders=sickrage.app.config.handle_reverse_proxy, cookie_secret=sickrage.app.config.web_cookie_secret, login_url='%s/login/' % sickrage.app.config.web_root) self.server = HTTPServer(self.app, no_keep_alive=True) if sickrage.app.config.enable_https: self.server.ssl_options = { "certfile": sickrage.app.config.https_cert, "keyfile": sickrage.app.config.https_key } try: self.server.listen(sickrage.app.config.web_port, None) sickrage.app.log.info( "SiCKRAGE :: STARTED") sickrage.app.log.info( "SiCKRAGE :: VERSION:[{}]".format(sickrage.app.version_updater.version)) sickrage.app.log.info( "SiCKRAGE :: CONFIG:[{}] [v{}]".format(sickrage.app.config_file, sickrage.app.config.config_version)) sickrage.app.log.info( "SiCKRAGE :: URL:[{}://{}:{}/]".format( ('http', 'https')[sickrage.app.config.enable_https], sickrage.app.config.web_host, sickrage.app.config.web_port)) # launch browser window if all([not sickrage.app.no_launch, sickrage.app.config.launch_browser]): threading.Thread(None, lambda: launch_browser( ('http', 'https')[sickrage.app.config.enable_https], sickrage.app.config.web_host, sickrage.app.config.web_port ), name="LAUNCH-BROWSER").start() sickrage.app.io_loop.start() except socket.error as e: sickrage.app.log.warning(e.strerror) raise SystemExit