def sort_schedules(all_events, now=None): """Sort the schedules, based on order of happening from now `all_events=True`: Return an event for each active day `all_events=False`: Return only first occurring event of the week `now` : for testing: simulated localtime() """ day_min = 24 * 60 week_min = 7 * day_min events = [] now = now or time.localtime() now_hm = now[3] * 60 + now[4] now = now[6] * day_min + now_hm for schedule in cfg.schedules(): parms = None try: # Note: the last parameter can have spaces (category name)! enabled, m, h, dd, action, parms = schedule.split(None, 5) except: try: enabled, m, h, dd, action = schedule.split(None, 4) except: continue # Bad schedule, ignore action = action.strip() if dd == "*": dd = "1234567" if not dd.isdigit(): continue # Bad schedule, ignore for d in dd: then = (int(d) - 1) * day_min + int(h) * 60 + int(m) dif = then - now if all_events and dif < 0: # Expired event will occur again after a week dif = dif + week_min events.append((dif, action, parms, schedule, enabled)) if not all_events: break events.sort(key=lambda x: x[0]) return events
def sort_schedules(all_events, now=None): """ Sort the schedules, based on order of happening from now `all_events=True`: Return an event for each active day `all_events=False`: Return only first occurring event of the week `now` : for testing: simulated localtime() """ day_min = 24 * 60 week_min = 7 * day_min events = [] now = now or time.localtime() now_hm = now[3] * 60 + now[4] now = now[6] * day_min + now_hm for schedule in cfg.schedules(): parms = None try: # Note: the last parameter can have spaces (category name)! enabled, m, h, dd, action, parms = schedule.split(None, 5) except: try: enabled, m, h, dd, action = schedule.split(None, 4) except: continue # Bad schedule, ignore action = action.strip() if dd == '*': dd = '1234567' if not dd.isdigit(): continue # Bad schedule, ignore for d in dd: then = (int(d) - 1) * day_min + int(h) * 60 + int(m) dif = then - now if all_events and dif < 0: # Expired event will occur again after a week dif = dif + week_min events.append((dif, action, parms, schedule, enabled)) if not all_events: break events.sort(lambda x, y: x[0] - y[0]) return events
def sort_schedules(forward): """ Sort the schedules, based on order of happening from now forward: assume expired daily event to occur tomorrow """ events = [] now = time.localtime() now_hm = int(now[3])*60 + int(now[4]) now = int(now[6])*24*60 + now_hm for schedule in cfg.schedules(): parms = None try: m, h, d, action, parms = schedule.split(None, 4) except: try: m, h, d, action = schedule.split(None, 3) except: continue # Bad schedule, ignore action = action.strip() try: then = int(h)*60 + int(m) if d == '*': d = int(now/(24*60)) if forward and (then < now_hm): d = (d + 1) % 7 else: d = int(d)-1 then = d*24*60 + then except: continue # Bad schedule, ignore dif = then - now if dif < 0: dif = dif + 7*24*60 events.append((dif, action, parms, schedule)) events.sort(lambda x, y: x[0]-y[0]) return events
def init(): """ Create the scheduler and set all required events """ global __SCHED reset_guardian() __SCHED = kronos.ThreadedScheduler() rss_planned = False for schedule in cfg.schedules(): arguments = [] argument_list = None try: m, h, d, action_name = schedule.split() except: m, h, d, action_name, argument_list = schedule.split(None, 4) if argument_list: arguments = argument_list.split() action_name = action_name.lower() try: m = int(m) h = int(h) except: logging.warning(T('Bad schedule %s at %s:%s'), action_name, m, h) continue if d.isdigit(): d = [int(i) for i in d] else: d = range(1, 8) if action_name == 'resume': action = scheduled_resume arguments = [] elif action_name == 'pause': action = sabnzbd.downloader.Downloader.do.pause arguments = [] elif action_name == 'pause_all': action = sabnzbd.pause_all arguments = [] elif action_name == 'shutdown': action = sabnzbd.shutdown_program arguments = [] elif action_name == 'restart': action = sabnzbd.restart_program arguments = [] elif action_name == 'pause_post': action = pp_pause elif action_name == 'resume_post': action = pp_resume elif action_name == 'speedlimit' and arguments != []: action = sabnzbd.downloader.Downloader.do.limit_speed elif action_name == 'enable_server' and arguments != []: action = sabnzbd.enable_server elif action_name == 'disable_server' and arguments != []: action = sabnzbd.disable_server elif action_name == 'scan_folder': action = sabnzbd.dirscanner.dirscan elif action_name == 'rss_scan': action = rss.run_method rss_planned = True elif action_name == 'remove_failed': action = sabnzbd.api.history_remove_failed elif action_name == 'remove_completed': action = sabnzbd.api.history_remove_completed elif action_name == 'enable_quota': action = sabnzbd.bpsmeter.BPSMeter.do.set_status arguments = [True] elif action_name == 'disable_quota': action = sabnzbd.bpsmeter.BPSMeter.do.set_status arguments = [False] elif action_name == 'pause_all_low': action = sabnzbd.nzbqueue.NzbQueue.do.pause_on_prio arguments = [LOW_PRIORITY] elif action_name == 'pause_all_normal': action = sabnzbd.nzbqueue.NzbQueue.do.pause_on_prio arguments = [NORMAL_PRIORITY] elif action_name == 'pause_all_high': action = sabnzbd.nzbqueue.NzbQueue.do.pause_on_prio arguments = [HIGH_PRIORITY] elif action_name == 'resume_all_low': action = sabnzbd.nzbqueue.NzbQueue.do.resume_on_prio arguments = [LOW_PRIORITY] elif action_name == 'resume_all_normal': action = sabnzbd.nzbqueue.NzbQueue.do.resume_on_prio arguments = [NORMAL_PRIORITY] elif action_name == 'resume_all_high': action = sabnzbd.nzbqueue.NzbQueue.do.resume_on_prio arguments = [HIGH_PRIORITY] else: logging.warning(T('Unknown action: %s'), action_name) continue logging.debug("scheduling %s(%s) on days %s at %02d:%02d", action_name, arguments, d, h, m) __SCHED.add_daytime_task(action, action_name, d, None, (h, m), kronos.method.sequential, arguments, None) # Set Guardian interval to 30 seconds __SCHED.add_interval_task(sched_guardian, "Guardian", 15, 30, kronos.method.sequential, None, None) # Set RSS check interval if not rss_planned: interval = cfg.rss_rate() delay = random.randint(0, interval - 1) logging.debug("Scheduling RSS interval task every %s min (delay=%s)", interval, delay) sabnzbd.rss.next_run(time.time() + delay * 60) __SCHED.add_interval_task(rss.run_method, "RSS", delay * 60, interval * 60, kronos.method.sequential, None, None) __SCHED.add_single_task(rss.run_method, 'RSS', 15, kronos.method.sequential, None, None) if cfg.version_check(): # Check for new release, once per week on random time m = random.randint(0, 59) h = random.randint(0, 23) d = (random.randint(1, 7), ) logging.debug("Scheduling VersionCheck on day %s at %s:%s", d[0], h, m) __SCHED.add_daytime_task(sabnzbd.misc.check_latest_version, 'VerCheck', d, None, (h, m), kronos.method.sequential, [], None) action, hour, minute = sabnzbd.bpsmeter.BPSMeter.do.get_quota() if action: logging.info('Setting schedule for quota check daily at %s:%s', hour, minute) __SCHED.add_daytime_task(action, 'quota_reset', range(1, 8), None, (hour, minute), kronos.method.sequential, [], None) logging.info('Setting schedule for midnight BPS reset') __SCHED.add_daytime_task(sabnzbd.bpsmeter.midnight_action, 'midnight_bps', range(1, 8), None, (0, 0), kronos.method.sequential, [], None) # Subscribe to special schedule changes cfg.rss_rate.callback(schedule_guard)
def initialize(pause_downloader=False, clean_up=False, evalSched=False, repair=0): global __INITIALIZED__, __SHUTTING_DOWN__, LOGFILE, WEBLOGFILE, LOGHANDLER, GUIHANDLER, AMBI_LOCALHOST, WAITEXIT, DAEMON, MY_NAME, MY_FULLNAME, NEW_VERSION, DIR_HOME, DIR_APPDATA, DIR_LCLDATA, DIR_PROG, DIR_INTERFACES, DARWIN, RESTART_REQ if __INITIALIZED__: return False __SHUTTING_DOWN__ = False # Set global database connection for Web-UI threads cherrypy.engine.subscribe("start_thread", get_db_connection) # Paused? pause_downloader = pause_downloader or cfg.start_paused() # Clean-up, if requested if clean_up: # New admin folder filesystem.remove_all(cfg.admin_dir.get_path(), "*.sab") # Optionally wait for "incomplete" to become online if cfg.wait_for_dfolder(): wait_for_download_folder() else: cfg.download_dir.set(cfg.download_dir(), create=True) cfg.download_dir.set_create(True) # Set access rights for "incomplete" base folder filesystem.set_permissions(cfg.download_dir.get_path(), recursive=False) # If dirscan_dir cannot be created, set a proper value anyway. # Maybe it's a network path that's temporarily missing. path = cfg.dirscan_dir.get_path() if not os.path.exists(path): filesystem.create_real_path(cfg.dirscan_dir.ident(), "", path, False) # Set call backs for Config items cfg.cache_limit.callback(new_limit) cfg.cherryhost.callback(guard_restart) cfg.cherryport.callback(guard_restart) cfg.web_dir.callback(guard_restart) cfg.web_color.callback(guard_restart) cfg.username.callback(guard_restart) cfg.password.callback(guard_restart) cfg.log_dir.callback(guard_restart) cfg.https_port.callback(guard_restart) cfg.https_cert.callback(guard_restart) cfg.https_key.callback(guard_restart) cfg.enable_https.callback(guard_restart) cfg.top_only.callback(guard_top_only) cfg.pause_on_post_processing.callback(guard_pause_on_pp) cfg.quota_size.callback(guard_quota_size) cfg.quota_day.callback(guard_quota_dp) cfg.quota_period.callback(guard_quota_dp) cfg.language.callback(guard_language) cfg.enable_https_verification.callback(guard_https_ver) guard_https_ver() # Set cache limit if not cfg.cache_limit() or (cfg.cache_limit() in ("200M", "450M") and (sabnzbd.WIN32 or sabnzbd.DARWIN)): cfg.cache_limit.set(misc.get_cache_limit()) ArticleCache.do.new_limit(cfg.cache_limit.get_int()) check_incomplete_vs_complete() # Set language files lang.set_locale_info("SABnzbd", DIR_LANGUAGE) lang.set_language(cfg.language()) sabnzbd.api.clear_trans_cache() sabnzbd.change_queue_complete_action(cfg.queue_complete(), new=False) # One time conversion "speedlimit" in schedules. if not cfg.sched_converted(): schedules = cfg.schedules() newsched = [] for sched in schedules: if "speedlimit" in sched: newsched.append(re.sub(r"(speedlimit \d+)$", r"\1K", sched)) else: newsched.append(sched) cfg.schedules.set(newsched) cfg.sched_converted.set(1) # Second time schedule conversion if cfg.sched_converted() != 2: cfg.schedules.set(["%s %s" % (1, schedule) for schedule in cfg.schedules()]) cfg.sched_converted.set(2) config.save_config() # Convert auto-sort if cfg.auto_sort() == "0": cfg.auto_sort.set("") elif cfg.auto_sort() == "1": cfg.auto_sort.set("avg_age asc") # Add hostname to the whitelist if not cfg.host_whitelist(): cfg.host_whitelist.set(socket.gethostname()) # Do repair if requested if check_repair_request(): repair = 2 pause_downloader = True # Initialize threads rss.init() paused = BPSMeter.do.read() NzbQueue() Downloader(pause_downloader or paused) Decoder() Assembler() PostProcessor() NzbQueue.do.read_queue(repair) DirScanner() Rating() URLGrabber() scheduler.init() if evalSched: scheduler.analyse(pause_downloader) logging.info("All processes started") RESTART_REQ = False __INITIALIZED__ = True return True
def init(): """ Create the scheduler and set all required events """ global __SCHED reset_guardian() __SCHED = kronos.ThreadedScheduler() rss_planned = False for schedule in cfg.schedules(): arguments = [] argument_list = None try: m, h, d, action_name = schedule.split() except: m, h, d, action_name, argument_list = schedule.split(None, 4) if argument_list: arguments = argument_list.split() action_name = action_name.lower() try: m = int(m) h = int(h) except: logging.warning(Ta('Bad schedule %s at %s:%s'), action_name, m, h) continue if d.isdigit(): d = [int(d)] else: d = range(1, 8) if action_name == 'resume': action = scheduled_resume arguments = [] elif action_name == 'pause': action = sabnzbd.downloader.Downloader.do.pause arguments = [] elif action_name == 'pause_all': action = sabnzbd.pause_all arguments = [] elif action_name == 'shutdown': action = sabnzbd.shutdown_program arguments = [] elif action_name == 'restart': action = sabnzbd.restart_program arguments = [] elif action_name == 'pause_post': action = pp_pause elif action_name == 'resume_post': action = pp_resume elif action_name == 'speedlimit' and arguments != []: action = sabnzbd.downloader.Downloader.do.limit_speed elif action_name == 'enable_server' and arguments != []: action = sabnzbd.enable_server elif action_name == 'disable_server' and arguments != []: action = sabnzbd.disable_server elif action_name == 'scan_folder': action = sabnzbd.dirscanner.dirscan elif action_name == 'rss_scan': action = rss.run_method rss_planned = True else: logging.warning(Ta('Unknown action: %s'), action_name) continue logging.debug("scheduling %s(%s) on days %s at %s:%s", action_name, arguments, d, h, m) __SCHED.add_daytime_task(action, action_name, d, None, (h, m), kronos.method.sequential, arguments, None) # Set Guardian interval to 30 seconds __SCHED.add_interval_task(sched_guardian, "Guardian", 15, 30, kronos.method.sequential, None, None) # Set RSS check interval if not rss_planned: interval = cfg.rss_rate() delay = random.randint(0, interval-1) logging.debug("Scheduling RSS interval task every %s min (delay=%s)", interval, delay) __SCHED.add_interval_task(rss.run_method, "RSS", delay*60, interval*60, kronos.method.sequential, None, None) __SCHED.add_single_task(rss.run_method, 'RSS', 15, kronos.method.sequential, None, None) if cfg.version_check(): # Check for new release, once per week on random time m = random.randint(0, 59) h = random.randint(0, 23) d = (random.randint(1, 7), ) logging.debug("Scheduling VersionCheck on day %s at %s:%s", d[0], h, m) __SCHED.add_daytime_task(sabnzbd.misc.check_latest_version, 'VerCheck', d, None, (h, m), kronos.method.sequential, [], None) if cfg.newzbin_bookmarks(): interval = cfg.bookmark_rate() delay = random.randint(0, interval-1) logging.debug("Scheduling Bookmark interval task every %s min (delay=%s)", interval, delay) __SCHED.add_interval_task(Bookmarks.do.run, 'Bookmarks', delay*60, interval*60, kronos.method.sequential, None, None) __SCHED.add_single_task(Bookmarks.do.run, 'Bookmarks', 20, kronos.method.sequential, None, None) # Subscribe to special schedule changes cfg.newzbin_bookmarks.callback(schedule_guard) cfg.bookmark_rate.callback(schedule_guard) cfg.rss_rate.callback(schedule_guard)
def initialize(pause_downloader=False, clean_up=False, evalSched=False, repair=0): global __INITIALIZED__, __SHUTTING_DOWN__,\ LOGFILE, WEBLOGFILE, LOGHANDLER, GUIHANDLER, AMBI_LOCALHOST, WAITEXIT, \ DAEMON, MY_NAME, MY_FULLNAME, NEW_VERSION, \ DIR_HOME, DIR_APPDATA, DIR_LCLDATA, DIR_PROG, DIR_INTERFACES, \ DARWIN, RESTART_REQ, OLD_QUEUE if __INITIALIZED__: return False __SHUTTING_DOWN__ = False # Set global database connection for Web-UI threads cherrypy.engine.subscribe('start_thread', connect_db) # Paused? pause_downloader = pause_downloader or cfg.start_paused() # Clean-up, if requested if clean_up: # New admin folder misc.remove_all(cfg.admin_dir.get_path(), '*.sab') # Optionally wait for "incomplete" to become online if cfg.wait_for_dfolder(): wait_for_download_folder() else: cfg.download_dir.set(cfg.download_dir(), create=True) cfg.download_dir.set_create(True) # Set access rights for "incomplete" base folder misc.set_permissions(cfg.download_dir.get_path(), recursive=False) # If dirscan_dir cannot be created, set a proper value anyway. # Maybe it's a network path that's temporarily missing. path = cfg.dirscan_dir.get_path() if not os.path.exists(path): sabnzbd.misc.create_real_path(cfg.dirscan_dir.ident(), '', path, False) # Set call backs for Config items cfg.cache_limit.callback(new_limit) cfg.cherryhost.callback(guard_restart) cfg.cherryport.callback(guard_restart) cfg.web_dir.callback(guard_restart) cfg.web_color.callback(guard_restart) cfg.username.callback(guard_restart) cfg.password.callback(guard_restart) cfg.log_dir.callback(guard_restart) cfg.https_port.callback(guard_restart) cfg.https_cert.callback(guard_restart) cfg.https_key.callback(guard_restart) cfg.enable_https.callback(guard_restart) cfg.top_only.callback(guard_top_only) cfg.pause_on_post_processing.callback(guard_pause_on_pp) cfg.growl_server.callback(sabnzbd.notifier.change_value) cfg.growl_password.callback(sabnzbd.notifier.change_value) cfg.quota_size.callback(guard_quota_size) cfg.quota_day.callback(guard_quota_dp) cfg.quota_period.callback(guard_quota_dp) cfg.fsys_type.callback(guard_fsys_type) cfg.language.callback(sabnzbd.notifier.reset_growl) cfg.enable_https_verification.callback(guard_https_ver) guard_https_ver() # Set Posix filesystem encoding sabnzbd.encoding.change_fsys(cfg.fsys_type()) # Set cache limit if not cfg.cache_limit() or (cfg.cache_limit() == '200M' and (sabnzbd.WIN32 or sabnzbd.DARWIN)): cfg.cache_limit.set(misc.get_cache_limit()) ArticleCache.do.new_limit(cfg.cache_limit.get_int()) check_incomplete_vs_complete() # Set language files lang.set_locale_info('SABnzbd', DIR_LANGUAGE) lang.set_language(cfg.language()) sabnzbd.api.clear_trans_cache() OLD_QUEUE = check_old_queue() sabnzbd.change_queue_complete_action(cfg.queue_complete(), new=False) # One time conversion "speedlimit" in schedules. if not cfg.sched_converted(): schedules = cfg.schedules() newsched = [] for sched in schedules: if 'speedlimit' in sched: newsched.append(re.sub(r'(speedlimit \d+)$', r'\1K', sched)) else: newsched.append(sched) cfg.schedules.set(newsched) cfg.sched_converted.set(1) # Second time schedule conversion if cfg.sched_converted() != 2: cfg.schedules.set(['%s %s' % (1, schedule) for schedule in cfg.schedules()]) cfg.sched_converted.set(2) if check_repair_request(): repair = 2 pause_downloader = True # Initialize threads rss.init() paused = BPSMeter.do.read() PostProcessor() NzbQueue() Assembler() NzbQueue.do.read_queue(repair) Downloader(pause_downloader or paused) DirScanner() Rating() URLGrabber() scheduler.init() if evalSched: scheduler.analyse(pause_downloader) logging.info('All processes started') RESTART_REQ = False __INITIALIZED__ = True return True
def initialize(pause_downloader=False, clean_up=False, evalSched=False, repair=0): global __INITIALIZED__, __SHUTTING_DOWN__,\ LOGFILE, WEBLOGFILE, LOGHANDLER, GUIHANDLER, AMBI_LOCALHOST, WAITEXIT, \ DAEMON, MY_NAME, MY_FULLNAME, NEW_VERSION, \ DIR_HOME, DIR_APPDATA, DIR_LCLDATA, DIR_PROG, DIR_INTERFACES, \ DARWIN, RESTART_REQ, OLD_QUEUE if __INITIALIZED__: return False __SHUTTING_DOWN__ = False # Set global database connection for Web-UI threads cherrypy.engine.subscribe('start_thread', connect_db) # Paused? pause_downloader = pause_downloader or cfg.start_paused() # Clean-up, if requested if clean_up: # New admin folder misc.remove_all(cfg.admin_dir.get_path(), '*.sab') # Optionally wait for "incomplete" to become online if cfg.wait_for_dfolder(): wait_for_download_folder() else: cfg.download_dir.set(cfg.download_dir(), create=True) cfg.download_dir.set_create(True) # Set access rights for "incomplete" base folder misc.set_permissions(cfg.download_dir.get_path(), recursive=False) # If dirscan_dir cannot be created, set a proper value anyway. # Maybe it's a network path that's temporarily missing. path = cfg.dirscan_dir.get_path() if not os.path.exists(path): sabnzbd.misc.create_real_path(cfg.dirscan_dir.ident(), '', path, False) # Set call backs for Config items cfg.cache_limit.callback(new_limit) cfg.cherryhost.callback(guard_restart) cfg.cherryport.callback(guard_restart) cfg.web_dir.callback(guard_restart) cfg.web_dir2.callback(guard_restart) cfg.web_color.callback(guard_restart) cfg.web_color2.callback(guard_restart) cfg.username.callback(guard_restart) cfg.password.callback(guard_restart) cfg.log_dir.callback(guard_restart) cfg.https_port.callback(guard_restart) cfg.https_cert.callback(guard_restart) cfg.https_key.callback(guard_restart) cfg.enable_https.callback(guard_restart) cfg.top_only.callback(guard_top_only) cfg.pause_on_post_processing.callback(guard_pause_on_pp) cfg.growl_server.callback(sabnzbd.notifier.change_value) cfg.growl_password.callback(sabnzbd.notifier.change_value) cfg.quota_size.callback(guard_quota_size) cfg.quota_day.callback(guard_quota_dp) cfg.quota_period.callback(guard_quota_dp) cfg.fsys_type.callback(guard_fsys_type) cfg.language.callback(sabnzbd.notifier.reset_growl) cfg.enable_https_verification.callback(guard_https_ver) guard_https_ver() # Set Posix filesystem encoding sabnzbd.encoding.change_fsys(cfg.fsys_type()) # Set cache limit if sabnzbd.WIN32 or sabnzbd.DARWIN: if cfg.cache_limit() == '' or cfg.cache_limit() == '200M': cfg.cache_limit.set('450M') ArticleCache.do.new_limit(cfg.cache_limit.get_int()) check_incomplete_vs_complete() # Set language files lang.set_locale_info('SABnzbd', DIR_LANGUAGE) lang.set_language(cfg.language()) sabnzbd.api.clear_trans_cache() OLD_QUEUE = check_old_queue() sabnzbd.change_queue_complete_action(cfg.queue_complete(), new=False) sabnzbd.EXTERNAL_IPV6 = test_ipv6() logging.debug('External IPv6 test result: %s', sabnzbd.EXTERNAL_IPV6) # One time conversion "speedlimit" in schedules. if not cfg.sched_converted(): schedules = cfg.schedules() newsched = [] for sched in schedules: if 'speedlimit' in sched: newsched.append(re.sub(r'(speedlimit \d+)$', r'\1K', sched)) else: newsched.append(sched) cfg.schedules.set(newsched) cfg.sched_converted.set(True) if check_repair_request(): repair = 2 pause_downloader = True # Initialize threads rss.init() paused = BPSMeter.do.read() PostProcessor() NzbQueue() Assembler() NzbQueue.do.read_queue(repair) Downloader(pause_downloader or paused) DirScanner() Rating() URLGrabber() scheduler.init() if evalSched: scheduler.analyse(pause_downloader) logging.info('All processes started') RESTART_REQ = False __INITIALIZED__ = True return True
def initialize(pause_downloader=False, clean_up=False, evalSched=False, repair=0): global __INITIALIZED__, __SHUTTING_DOWN__, LOGFILE, WEBLOGFILE, LOGHANDLER, GUIHANDLER, AMBI_LOCALHOST, WAITEXIT, DAEMON, MY_NAME, MY_FULLNAME, NEW_VERSION, DIR_HOME, DIR_APPDATA, DIR_LCLDATA, DIR_PROG, DIR_INTERFACES, DARWIN, RESTART_REQ, OLD_QUEUE if __INITIALIZED__: return False __SHUTTING_DOWN__ = False # Set global database connection for Web-UI threads cherrypy.engine.subscribe("start_thread", connect_db) # Paused? pause_downloader = pause_downloader or cfg.start_paused() # Clean-up, if requested if clean_up: # New admin folder misc.remove_all(cfg.admin_dir.get_path(), "*.sab") # Optionally wait for "incomplete" to become online if cfg.wait_for_dfolder(): wait_for_download_folder() else: cfg.download_dir.set(cfg.download_dir(), create=True) cfg.download_dir.set_create(True) # Set access rights for "incomplete" base folder misc.set_permissions(cfg.download_dir.get_path(), recursive=False) # If dirscan_dir cannot be created, set a proper value anyway. # Maybe it's a network path that's temporarily missing. path = cfg.dirscan_dir.get_path() if not os.path.exists(path): sabnzbd.misc.create_real_path(cfg.dirscan_dir.ident(), "", path, False) # Set call backs for Config items cfg.cache_limit.callback(new_limit) cfg.cherryhost.callback(guard_restart) cfg.cherryport.callback(guard_restart) cfg.web_dir.callback(guard_restart) cfg.web_dir2.callback(guard_restart) cfg.web_color.callback(guard_restart) cfg.web_color2.callback(guard_restart) cfg.log_dir.callback(guard_restart) cfg.https_port.callback(guard_restart) cfg.https_cert.callback(guard_restart) cfg.https_key.callback(guard_restart) cfg.enable_https.callback(guard_restart) cfg.top_only.callback(guard_top_only) cfg.pause_on_post_processing.callback(guard_pause_on_pp) cfg.growl_server.callback(sabnzbd.growler.change_value) cfg.growl_password.callback(sabnzbd.growler.change_value) cfg.quota_size.callback(guard_quota_size) cfg.quota_day.callback(guard_quota_dp) cfg.quota_period.callback(guard_quota_dp) cfg.fsys_type.callback(guard_fsys_type) cfg.language.callback(sabnzbd.growler.reset_growl) cfg.enable_https_verification.callback(guard_https_ver) guard_https_ver() # Set Posix filesystem encoding sabnzbd.encoding.change_fsys(cfg.fsys_type()) # Set cache limit if (sabnzbd.WIN32 or sabnzbd.DARWIN) and not cfg.cache_limit(): cfg.cache_limit.set("200M") ArticleCache.do.new_limit(cfg.cache_limit.get_int()) check_incomplete_vs_complete() # Handle language upgrade from 0.5.x to 0.6.x cfg.language.set(LANG_MAP.get(cfg.language(), cfg.language())) # Set language files lang.set_locale_info("SABnzbd", DIR_LANGUAGE) lang.set_language(cfg.language()) sabnzbd.api.clear_trans_cache() OLD_QUEUE = check_old_queue() sabnzbd.change_queue_complete_action(cfg.queue_complete(), new=False) sabnzbd.EXTERNAL_IPV6 = test_ipv6() logging.debug("External IPv6 test result: %s", sabnzbd.EXTERNAL_IPV6) # One time conversion "speedlimit" in schedules. if not cfg.sched_converted(): schedules = cfg.schedules() newsched = [] for sched in schedules: if "speedlimit" in sched: newsched.append(re.sub(r"(speedlimit \d+)$", r"\1K", sched)) else: newsched.append(sched) cfg.schedules.set(newsched) cfg.sched_converted.set(True) if check_repair_request(): repair = 2 pause_downloader = True else: # Check crash detection file # if load_admin(TERM_FLAG_FILE, remove=True): # Repair mode 2 is a bit over an over-reaction! pass # repair = 2 # Set crash detection file # save_admin(1, TERM_FLAG_FILE) # Initialize threads rss.init() paused = BPSMeter.do.read() PostProcessor() NzbQueue() Assembler() NzbQueue.do.read_queue(repair) Downloader(pause_downloader or paused) DirScanner() Rating() URLGrabber() scheduler.init() if evalSched: scheduler.analyse(pause_downloader) logging.info("All processes started") RESTART_REQ = False __INITIALIZED__ = True return True
def load_schedules(self): rss_planned = False for schedule in cfg.schedules(): arguments = [] argument_list = None try: enabled, m, h, d, action_name = schedule.split() except: try: enabled, m, h, d, action_name, argument_list = schedule.split( None, 5) except: continue # Bad schedule, ignore if argument_list: arguments = argument_list.split() action_name = action_name.lower() try: m = int(m) h = int(h) except: logging.warning(T("Bad schedule %s at %s:%s"), action_name, m, h) continue if d.isdigit(): d = [int(i) for i in d] else: d = DAILY_RANGE if action_name == "resume": action = self.scheduled_resume arguments = [] elif action_name == "pause": action = sabnzbd.Downloader.pause arguments = [] elif action_name == "pause_all": action = sabnzbd.pause_all arguments = [] elif action_name == "shutdown": action = sabnzbd.shutdown_program arguments = [] elif action_name == "restart": action = sabnzbd.restart_program arguments = [] elif action_name == "pause_post": action = pp_pause elif action_name == "resume_post": action = pp_resume elif action_name == "speedlimit" and arguments != []: action = sabnzbd.Downloader.limit_speed elif action_name == "enable_server" and arguments != []: action = sabnzbd.enable_server elif action_name == "disable_server" and arguments != []: action = sabnzbd.disable_server elif action_name == "scan_folder": action = sabnzbd.DirScanner.scan elif action_name == "rss_scan": action = sabnzbd.RSSReader.run rss_planned = True elif action_name == "remove_failed": action = sabnzbd.api.history_remove_failed elif action_name == "remove_completed": action = sabnzbd.api.history_remove_completed elif action_name == "enable_quota": action = sabnzbd.BPSMeter.set_status arguments = [True] elif action_name == "disable_quota": action = sabnzbd.BPSMeter.set_status arguments = [False] elif action_name == "pause_all_low": action = sabnzbd.NzbQueue.pause_on_prio arguments = [LOW_PRIORITY] elif action_name == "pause_all_normal": action = sabnzbd.NzbQueue.pause_on_prio arguments = [NORMAL_PRIORITY] elif action_name == "pause_all_high": action = sabnzbd.NzbQueue.pause_on_prio arguments = [HIGH_PRIORITY] elif action_name == "resume_all_low": action = sabnzbd.NzbQueue.resume_on_prio arguments = [LOW_PRIORITY] elif action_name == "resume_all_normal": action = sabnzbd.NzbQueue.resume_on_prio arguments = [NORMAL_PRIORITY] elif action_name == "resume_all_high": action = sabnzbd.NzbQueue.resume_on_prio arguments = [HIGH_PRIORITY] elif action_name == "pause_cat": action = sabnzbd.NzbQueue.pause_on_cat arguments = [argument_list] elif action_name == "resume_cat": action = sabnzbd.NzbQueue.resume_on_cat arguments = [argument_list] else: logging.warning(T("Unknown action: %s"), action_name) continue if enabled == "1": logging.info("Scheduling %s(%s) on days %s at %02d:%02d", action_name, arguments, d, h, m) self.scheduler.add_daytime_task(action, action_name, d, None, (h, m), args=arguments) else: logging.debug("Skipping %s(%s) on days %s at %02d:%02d", action_name, arguments, d, h, m) # Set RSS check interval if not rss_planned: interval = cfg.rss_rate() delay = random.randint(0, interval - 1) logging.info( "Scheduling RSS interval task every %s min (delay=%s)", interval, delay) sabnzbd.RSSReader.next_run = time.time() + delay * 60 self.scheduler.add_interval_task(sabnzbd.RSSReader.run, "RSS", delay * 60, interval * 60) self.scheduler.add_single_task(sabnzbd.RSSReader.run, "RSS", 15) if cfg.version_check(): # Check for new release, once per week on random time m = random.randint(0, 59) h = random.randint(0, 23) d = (random.randint(1, 7), ) logging.info("Scheduling VersionCheck on day %s at %s:%s", d[0], h, m) self.scheduler.add_daytime_task(sabnzbd.misc.check_latest_version, "VerCheck", d, None, (h, m)) action, hour, minute = sabnzbd.BPSMeter.get_quota() if action: logging.info("Setting schedule for quota check daily at %s:%s", hour, minute) self.scheduler.add_daytime_task(action, "quota_reset", DAILY_RANGE, None, (hour, minute)) if sabnzbd.misc.int_conv(cfg.history_retention()) > 0: logging.info("Setting schedule for midnight auto history-purge") self.scheduler.add_daytime_task( sabnzbd.database.midnight_history_purge, "midnight_history_purge", DAILY_RANGE, None, (0, 0)) logging.info("Setting schedule for midnight BPS reset") self.scheduler.add_daytime_task(sabnzbd.BPSMeter.midnight, "midnight_bps", DAILY_RANGE, None, (0, 0)) logging.info("Setting schedule for server expiration check") self.scheduler.add_daytime_task( sabnzbd.downloader.check_server_expiration, "check_server_expiration", DAILY_RANGE, None, (0, 0)) logging.info("Setting scheduler for server quota check") self.scheduler.add_interval_task( sabnzbd.downloader.check_server_quota, "check_server_quota", 0, 10 * 60, ) # Subscribe to special schedule changes cfg.rss_rate.callback(self.scheduler_restart_guard)
def init(): """ Create the scheduler and set all required events """ global __SCHED reset_guardian() __SCHED = kronos.ThreadedScheduler() rss_planned = False for schedule in cfg.schedules(): arguments = [] argument_list = None try: m, h, d, action_name = schedule.split() except: m, h, d, action_name, argument_list = schedule.split(None, 4) if argument_list: arguments = argument_list.split() action_name = action_name.lower() try: m = int(m) h = int(h) except: logging.warning(Ta('Bad schedule %s at %s:%s'), action_name, m, h) continue if d.isdigit(): d = [int(d)] else: d = range(1, 8) if action_name == 'resume': action = scheduled_resume arguments = [] elif action_name == 'pause': action = sabnzbd.downloader.Downloader.do.pause arguments = [] elif action_name == 'pause_all': action = sabnzbd.pause_all arguments = [] elif action_name == 'shutdown': action = sabnzbd.shutdown_program arguments = [] elif action_name == 'restart': action = sabnzbd.restart_program arguments = [] elif action_name == 'pause_post': action = pp_pause elif action_name == 'resume_post': action = pp_resume elif action_name == 'speedlimit' and arguments != []: action = sabnzbd.downloader.Downloader.do.limit_speed elif action_name == 'enable_server' and arguments != []: action = sabnzbd.enable_server elif action_name == 'disable_server' and arguments != []: action = sabnzbd.disable_server elif action_name == 'scan_folder': action = sabnzbd.dirscanner.dirscan elif action_name == 'rss_scan': action = rss.run_method rss_planned = True else: logging.warning(Ta('Unknown action: %s'), action_name) continue logging.debug("scheduling %s(%s) on days %s at %s:%s", action_name, arguments, d, h, m) __SCHED.add_daytime_task(action, action_name, d, None, (h, m), kronos.method.sequential, arguments, None) # Set Guardian interval to 30 seconds __SCHED.add_interval_task(sched_guardian, "Guardian", 15, 30, kronos.method.sequential, None, None) # Set RSS check interval if not rss_planned: interval = cfg.rss_rate() delay = random.randint(0, interval - 1) logging.debug("Scheduling RSS interval task every %s min (delay=%s)", interval, delay) __SCHED.add_interval_task(rss.run_method, "RSS", delay * 60, interval * 60, kronos.method.sequential, None, None) __SCHED.add_single_task(rss.run_method, 'RSS', 15, kronos.method.sequential, None, None) if cfg.version_check(): # Check for new release, once per week on random time m = random.randint(0, 59) h = random.randint(0, 23) d = (random.randint(1, 7), ) logging.debug("Scheduling VersionCheck on day %s at %s:%s", d[0], h, m) __SCHED.add_daytime_task(sabnzbd.misc.check_latest_version, 'VerCheck', d, None, (h, m), kronos.method.sequential, [], None) if cfg.newzbin_bookmarks(): interval = cfg.bookmark_rate() delay = random.randint(0, interval - 1) logging.debug( "Scheduling Bookmark interval task every %s min (delay=%s)", interval, delay) __SCHED.add_interval_task(Bookmarks.do.run, 'Bookmarks', delay * 60, interval * 60, kronos.method.sequential, None, None) __SCHED.add_single_task(Bookmarks.do.run, 'Bookmarks', 20, kronos.method.sequential, None, None) # Subscribe to special schedule changes cfg.newzbin_bookmarks.callback(schedule_guard) cfg.bookmark_rate.callback(schedule_guard) cfg.rss_rate.callback(schedule_guard)