Exemplo n.º 1
0
 def run(self):
     logger.info('[VersionCheck] Checking for new release on Github.')
     helpers.job_management(write=True, job='Check Version', current_run=helpers.utctimestamp(), status='Running')
     mylar.VERSION_STATUS = 'Running'
     versioncheck.checkGithub()
     helpers.job_management(write=True, job='Check Version', last_run_completed=helpers.utctimestamp(), status='Waiting')
     mylar.VERSION_STATUS = 'Waiting'
Exemplo n.º 2
0
    def run(self):

        logger.info('[SEARCH] Running Search for Wanted.')
        helpers.job_management(write=True, job='Auto-Search', current_run=helpers.utctimestamp(), status='Running')
        mylar.SEARCH_STATUS = 'Running'
        mylar.search.searchforissue()
        helpers.job_management(write=True, job='Auto-Search', last_run_completed=helpers.utctimestamp(), status='Waiting')
        mylar.SEARCH_STATUS = 'Waiting'
Exemplo n.º 3
0
 def run(self):
     logger.info('[WEEKLY] Checking Weekly Pull-list for new releases/updates')
     helpers.job_management(write=True, job='Weekly Pullist', current_run=helpers.utctimestamp(), status='Running')
     mylar.WEEKLY_STATUS = 'Running'
     weeklypull.pullit()
     weeklypull.future_check()
     helpers.job_management(write=True, job='Weekly Pullist', last_run_completed=helpers.utctimestamp(), status='Waiting')
     mylar.WEEKLY_STATUS = 'Waiting'
Exemplo n.º 4
0
 def run(self, sched):
     logger.info('[DBUpdate] Updating Database.')
     helpers.job_management(write=True,
                            job='DB Updater',
                            current_run=helpers.utctimestamp(),
                            status='Running')
     mylar.updater.dbUpdate(sched=sched)
     helpers.job_management(write=True,
                            job='DB Updater',
                            last_run_completed=helpers.utctimestamp(),
                            status='Waiting')
Exemplo n.º 5
0
    def run(self):

        logger.info('[SEARCH] Running Search for Wanted.')
        helpers.job_management(write=True,
                               job='Auto-Search',
                               current_run=helpers.utctimestamp(),
                               status='Running')
        mylar.SEARCH_STATUS = 'Running'
        mylar.search.searchforissue()
        helpers.job_management(write=True,
                               job='Auto-Search',
                               last_run_completed=helpers.utctimestamp(),
                               status='Waiting')
Exemplo n.º 6
0
 def run(self):
     logger.info('[VersionCheck] Checking for new release on Github.')
     helpers.job_management(write=True,
                            job='Check Version',
                            current_run=helpers.utctimestamp(),
                            status='Running')
     mylar.VERSION_STATUS = 'Running'
     versioncheck.checkGithub()
     helpers.job_management(write=True,
                            job='Check Version',
                            last_run_completed=helpers.utctimestamp(),
                            status='Waiting')
     mylar.VERSION_STATUS = 'Waiting'
Exemplo n.º 7
0
 def run(self):
     logger.info(
         '[WEEKLY] Checking Weekly Pull-list for new releases/updates')
     helpers.job_management(write=True,
                            job='Weekly Pullist',
                            current_run=helpers.utctimestamp(),
                            status='Running')
     mylar.WEEKLY_STATUS = 'Running'
     weeklypull.pullit()
     weeklypull.future_check()
     helpers.job_management(write=True,
                            job='Weekly Pullist',
                            last_run_completed=helpers.utctimestamp(),
                            status='Waiting')
Exemplo n.º 8
0
    def run(self, forcerss=None):
        with rss_lock:

            #logger.info('[RSS-FEEDS] RSS Feed Check was last run at : ' + str(mylar.SCHED_RSS_LAST))
            firstrun = "no"
            #check the last run of rss to make sure it's not hammering.
            if mylar.SCHED_RSS_LAST is None or mylar.SCHED_RSS_LAST == '' or mylar.SCHED_RSS_LAST == '0' or forcerss == True:
                logger.info('[RSS-FEEDS] RSS Feed Check Initalizing....')
                firstrun = "yes"
                duration_diff = 0
            else:
                tstamp = float(mylar.SCHED_RSS_LAST)
                duration_diff = abs(helpers.utctimestamp() - tstamp) / 60
            #logger.fdebug('[RSS-FEEDS] Duration diff: %s' % duration_diff)
            if firstrun == "no" and duration_diff < int(
                    mylar.CONFIG.RSS_CHECKINTERVAL):
                logger.fdebug(
                    '[RSS-FEEDS] RSS Check has taken place less than the threshold - not initiating at this time.'
                )
                return

            helpers.job_management(write=True,
                                   job='RSS Feeds',
                                   current_run=helpers.utctimestamp(),
                                   status='Running')
            mylar.RSS_STATUS = 'Running'
            #logger.fdebug('[RSS-FEEDS] Updated RSS Run time to : ' + str(mylar.SCHED_RSS_LAST))

            #function for looping through nzbs/torrent feeds
            if mylar.CONFIG.ENABLE_TORRENT_SEARCH:
                logger.info('[RSS-FEEDS] Initiating Torrent RSS Check.')
                if mylar.CONFIG.ENABLE_PUBLIC:
                    logger.info(
                        '[RSS-FEEDS] Initiating Torrent RSS Feed Check on Demonoid / WorldWideTorrents.'
                    )
                    #rsscheck.torrents(pickfeed='3')   #TP.SE RSS Check (has to be page-parsed)
                    rsscheck.torrents(pickfeed='Public'
                                      )  #TPSE = DEM RSS Check + WWT RSS Check
                if mylar.CONFIG.ENABLE_32P:
                    logger.info(
                        '[RSS-FEEDS] Initiating Torrent RSS Feed Check on 32P.'
                    )
                    if mylar.CONFIG.MODE_32P == 0:
                        logger.fdebug(
                            '[RSS-FEEDS] 32P mode set to Legacy mode. Monitoring New Releases feed only.'
                        )
                        if any([
                                mylar.CONFIG.PASSKEY_32P is None,
                                mylar.CONFIG.PASSKEY_32P == '',
                                mylar.CONFIG.RSSFEED_32P is None,
                                mylar.CONFIG.RSSFEED_32P == ''
                        ]):
                            logger.error(
                                '[RSS-FEEDS] Unable to validate information from provided RSS Feed. Verify that the feed provided is a current one.'
                            )
                        else:
                            rsscheck.torrents(pickfeed='1',
                                              feedinfo=mylar.KEYS_32P)
                    else:
                        logger.fdebug(
                            '[RSS-FEEDS] 32P mode set to Auth mode. Monitoring all personal notification feeds & New Releases feed'
                        )
                        if any([
                                mylar.CONFIG.USERNAME_32P is None,
                                mylar.CONFIG.USERNAME_32P == '',
                                mylar.CONFIG.PASSWORD_32P is None
                        ]):
                            logger.error(
                                '[RSS-FEEDS] Unable to sign-on to 32P to validate settings. Please enter/check your username password in the configuration.'
                            )
                        else:
                            if mylar.KEYS_32P is None:
                                feed32p = auth32p.info32p()
                                feedinfo = feed32p.authenticate()
                                if feedinfo != "disable":
                                    pass
                                else:
                                    mylar.CONFIG.ENABLE_32P = 0
                                    #mylar.config_write()
                            else:
                                feedinfo = mylar.FEEDINFO_32P

                            if feedinfo is None or len(
                                    feedinfo) == 0 or feedinfo == "disable":
                                logger.error(
                                    '[RSS-FEEDS] Unable to retrieve any information from 32P for RSS Feeds. Skipping for now.'
                                )
                            else:
                                rsscheck.torrents(pickfeed='1',
                                                  feedinfo=feedinfo[0])
                                x = 0
                                #assign personal feeds for 32p > +8
                                for fi in feedinfo:
                                    x += 1
                                    pfeed_32p = str(7 + x)
                                    rsscheck.torrents(pickfeed=pfeed_32p,
                                                      feedinfo=fi)

            logger.info(
                '[RSS-FEEDS] Initiating RSS Feed Check for NZB Providers.')
            rsscheck.nzbs(forcerss=forcerss)
            logger.info('[RSS-FEEDS] RSS Feed Check/Update Complete')
            logger.info('[RSS-FEEDS] Watchlist Check for new Releases')
            mylar.search.searchforissue(rsscheck='yes')
            logger.info('[RSS-FEEDS] Watchlist Check complete.')
            if forcerss:
                logger.info('[RSS-FEEDS] Successfully ran a forced RSS Check.')
            helpers.job_management(write=True,
                                   job='RSS Feeds',
                                   last_run_completed=helpers.utctimestamp(),
                                   status='Waiting')
            mylar.RSS_STATUS = 'Waiting'
            return True
Exemplo n.º 9
0
 def run(self, sched):
     logger.info('[DBUpdate] Updating Database.')
     helpers.job_management(write=True, job='DB Updater', current_run=helpers.utctimestamp(), status='Running')
     mylar.updater.dbUpdate(sched=sched)
     helpers.job_management(write=True, job='DB Updater', last_run_completed=helpers.utctimestamp(), status='Waiting')
Exemplo n.º 10
0
    def run(self, forcerss=None):
        with rss_lock:

            #logger.info('[RSS-FEEDS] RSS Feed Check was last run at : ' + str(mylar.SCHED_RSS_LAST))
            firstrun = "no"
            #check the last run of rss to make sure it's not hammering.
            if mylar.SCHED_RSS_LAST is None or mylar.SCHED_RSS_LAST == '' or mylar.SCHED_RSS_LAST == '0' or forcerss == True:
                logger.info('[RSS-FEEDS] RSS Feed Check Initalizing....')
                firstrun = "yes"
                duration_diff = 0
            else:
                tstamp = float(mylar.SCHED_RSS_LAST)
                duration_diff = abs(helpers.utctimestamp() - tstamp)/60
            #logger.fdebug('[RSS-FEEDS] Duration diff: %s' % duration_diff)
            if firstrun == "no" and duration_diff < int(mylar.CONFIG.RSS_CHECKINTERVAL):
                logger.fdebug('[RSS-FEEDS] RSS Check has taken place less than the threshold - not initiating at this time.')
                return

            helpers.job_management(write=True, job='RSS Feeds', current_run=helpers.utctimestamp(), status='Running')
            mylar.RSS_STATUS = 'Running'
            #logger.fdebug('[RSS-FEEDS] Updated RSS Run time to : ' + str(mylar.SCHED_RSS_LAST))

            #function for looping through nzbs/torrent feeds
            if mylar.CONFIG.ENABLE_TORRENT_SEARCH:
                logger.info('[RSS-FEEDS] Initiating Torrent RSS Check.')
                if mylar.CONFIG.ENABLE_PUBLIC:
                    logger.info('[RSS-FEEDS] Initiating Torrent RSS Feed Check on Demonoid / WorldWideTorrents.')
                    rsscheck.torrents(pickfeed='Public')    #TPSE = DEM RSS Check + WWT RSS Check
                if mylar.CONFIG.ENABLE_32P is True:
                    logger.info('[RSS-FEEDS] Initiating Torrent RSS Feed Check on 32P.')
                    if mylar.CONFIG.MODE_32P == 0:
                        logger.fdebug('[RSS-FEEDS] 32P mode set to Legacy mode. Monitoring New Releases feed only.')
                        if any([mylar.CONFIG.PASSKEY_32P is None, mylar.CONFIG.PASSKEY_32P == '', mylar.CONFIG.RSSFEED_32P is None, mylar.CONFIG.RSSFEED_32P == '']):
                            logger.error('[RSS-FEEDS] Unable to validate information from provided RSS Feed. Verify that the feed provided is a current one.')
                        else:
                            rsscheck.torrents(pickfeed='1', feedinfo=mylar.KEYS_32P)
                    else:
                        logger.fdebug('[RSS-FEEDS] 32P mode set to Auth mode. Monitoring all personal notification feeds & New Releases feed')
                        if any([mylar.CONFIG.USERNAME_32P is None, mylar.CONFIG.USERNAME_32P == '', mylar.CONFIG.PASSWORD_32P is None]):
                            logger.error('[RSS-FEEDS] Unable to sign-on to 32P to validate settings. Please enter/check your username password in the configuration.')
                        else:
                            if mylar.KEYS_32P is None:
                                feed32p = auth32p.info32p()
                                feedinfo = feed32p.authenticate()
                                if feedinfo != "disable":
                                    pass
                                else:
                                    helpers.disable_provider('32P')
                            else:
                                feedinfo = mylar.FEEDINFO_32P

                            if feedinfo is None or len(feedinfo) == 0 or feedinfo == "disable":
                                logger.error('[RSS-FEEDS] Unable to retrieve any information from 32P for RSS Feeds. Skipping for now.')
                            else:
                                rsscheck.torrents(pickfeed='1', feedinfo=feedinfo[0])
                                x = 0
                                #assign personal feeds for 32p > +8
                                for fi in feedinfo:
                                    x+=1
                                    pfeed_32p = str(7 + x)
                                    rsscheck.torrents(pickfeed=pfeed_32p, feedinfo=fi)

            logger.info('[RSS-FEEDS] Initiating RSS Feed Check for NZB Providers.')
            rsscheck.nzbs(forcerss=forcerss)
            logger.info('[RSS-FEEDS] RSS Feed Check/Update Complete')
            logger.info('[RSS-FEEDS] Watchlist Check for new Releases')
            mylar.search.searchforissue(rsscheck='yes')
            logger.info('[RSS-FEEDS] Watchlist Check complete.')
            if forcerss:
                logger.info('[RSS-FEEDS] Successfully ran a forced RSS Check.')
            helpers.job_management(write=True, job='RSS Feeds', last_run_completed=helpers.utctimestamp(), status='Waiting')
            mylar.RSS_STATUS = 'Waiting'
            return True
Exemplo n.º 11
0
def start():

    global _INITIALIZED, started

    with INIT_LOCK:

        if _INITIALIZED:

            #load up the previous runs from the job sql table so we know stuff...
            monitors = helpers.job_management()
            SCHED_WEEKLY_LAST = monitors['weekly']
            SCHED_SEARCH_LAST = monitors['search']
            SCHED_UPDATER_LAST = monitors['dbupdater']
            SCHED_MONITOR_LAST = monitors['monitor']
            SCHED_VERSION_LAST = monitors['version']
            SCHED_RSS_LAST = monitors['rss']

            # Start our scheduled background tasks
            SCHED.add_job(func=updater.dbUpdate, id='dbupdater', name='DB Updater', args=[None,None,True], trigger=IntervalTrigger(hours=0, minutes=5, timezone='UTC'))

            #let's do a run at the Wanted issues here (on startup) if enabled.
            ss = searchit.CurrentSearcher()
            if CONFIG.NZB_STARTUP_SEARCH:
                SCHED.add_job(func=ss.run, id='search', next_run_time=datetime.datetime.utcnow(), name='Auto-Search', trigger=IntervalTrigger(hours=0, minutes=CONFIG.SEARCH_INTERVAL, timezone='UTC'))
            else:
                if SCHED_SEARCH_LAST is not None:
                    search_timestamp = float(SCHED_SEARCH_LAST)
                    logger.fdebug('[AUTO-SEARCH] Search last run @ %s' % datetime.datetime.utcfromtimestamp(search_timestamp))
                else:
                    search_timestamp = helpers.utctimestamp() + (int(CONFIG.SEARCH_INTERVAL) *60)

                duration_diff = (helpers.utctimestamp() - search_timestamp)/60
                if duration_diff >= int(CONFIG.SEARCH_INTERVAL):
                    logger.fdebug('[AUTO-SEARCH]Auto-Search set to a delay of one minute before initialization as it has been %s minutes since the last run' % duration_diff)
                    SCHED.add_job(func=ss.run, id='search', name='Auto-Search', trigger=IntervalTrigger(hours=0, minutes=CONFIG.SEARCH_INTERVAL, timezone='UTC'))
                else:
                    search_diff = datetime.datetime.utcfromtimestamp(helpers.utctimestamp() + ((int(CONFIG.SEARCH_INTERVAL) * 60)  - (duration_diff*60)))
                    logger.fdebug('[AUTO-SEARCH] Scheduling next run @ %s every %s minutes' % (search_diff, CONFIG.SEARCH_INTERVAL))
                    SCHED.add_job(func=ss.run, id='search', name='Auto-Search', next_run_time=search_diff, trigger=IntervalTrigger(hours=0, minutes=CONFIG.SEARCH_INTERVAL, timezone='UTC'))

            if all([CONFIG.ENABLE_TORRENTS, CONFIG.AUTO_SNATCH, OS_DETECT != 'Windows']) and any([CONFIG.TORRENT_DOWNLOADER == 2, CONFIG.TORRENT_DOWNLOADER == 4]):
                logger.info('[AUTO-SNATCHER] Auto-Snatch of completed torrents enabled & attempting to background load....')
                SNPOOL = threading.Thread(target=helpers.worker_main, args=(SNATCHED_QUEUE,), name="AUTO-SNATCHER")
                SNPOOL.start()
                logger.info('[AUTO-SNATCHER] Succesfully started Auto-Snatch add-on - will now monitor for completed torrents on client....')

            if CONFIG.POST_PROCESSING is True and ( all([CONFIG.NZB_DOWNLOADER == 0, CONFIG.SAB_CLIENT_POST_PROCESSING is True]) or all([CONFIG.NZB_DOWNLOADER == 1, CONFIG.NZBGET_CLIENT_POST_PROCESSING is True]) ):
                if CONFIG.NZB_DOWNLOADER == 0:
                    logger.info('[SAB-MONITOR] Completed post-processing handling enabled for SABnzbd. Attempting to background load....')
                elif CONFIG.NZB_DOWNLOADER == 1:
                    logger.info('[NZBGET-MONITOR] Completed post-processing handling enabled for NZBGet. Attempting to background load....')
                NZBPOOL = threading.Thread(target=helpers.nzb_monitor, args=(NZB_QUEUE,), name="AUTO-COMPLETE-NZB")
                NZBPOOL.start()
                if CONFIG.NZB_DOWNLOADER == 0:
                    logger.info('[AUTO-COMPLETE-NZB] Succesfully started Completed post-processing handling for SABnzbd - will now monitor for completed nzbs within sabnzbd and post-process automatically....')
                elif CONFIG.NZB_DOWNLOADER == 1:
                    logger.info('[AUTO-COMPLETE-NZB] Succesfully started Completed post-processing handling for NZBGet - will now monitor for completed nzbs within nzbget and post-process automatically....')


            helpers.latestdate_fix()

            if CONFIG.ALT_PULL == 2:
                weektimer = 4
            else:
                weektimer = 24

            #weekly pull list gets messed up if it's not populated first, so let's populate it then set the scheduler.
            logger.info('[WEEKLY] Checking for existance of Weekly Comic listing...')

            #now the scheduler (check every 24 hours)
            weekly_interval = weektimer * 60 * 60
            try:
                if SCHED_WEEKLY_LAST:
                    pass
            except:
                SCHED_WEEKLY_LAST = None

            weektimestamp = helpers.utctimestamp()
            if SCHED_WEEKLY_LAST is not None:
                weekly_timestamp = float(SCHED_WEEKLY_LAST)
            else:
                weekly_timestamp = weektimestamp + weekly_interval

            ws = weeklypullit.Weekly()
            duration_diff = (weektimestamp - weekly_timestamp)/60

            if abs(duration_diff) >= weekly_interval/60:
                logger.info('[WEEKLY] Weekly Pull-Update initializing immediately as it has been %s hours since the last run' % abs(duration_diff/60))
                SCHED.add_job(func=ws.run, id='weekly', name='Weekly Pullist', next_run_time=datetime.datetime.utcnow(), trigger=IntervalTrigger(hours=weektimer, minutes=0, timezone='UTC'))
            else:
                weekly_diff = datetime.datetime.utcfromtimestamp(weektimestamp + (weekly_interval - (duration_diff * 60)))
                logger.fdebug('[WEEKLY] Scheduling next run for @ %s every %s hours' % (weekly_diff, weektimer))
                SCHED.add_job(func=ws.run, id='weekly', name='Weekly Pullist', next_run_time=weekly_diff, trigger=IntervalTrigger(hours=weektimer, minutes=0, timezone='UTC'))

            #initiate startup rss feeds for torrents/nzbs here...
            if CONFIG.ENABLE_RSS:
                logger.info('[RSS-FEEDS] Initiating startup-RSS feed checks.')
                if SCHED_RSS_LAST is not None:
                    rss_timestamp = float(SCHED_RSS_LAST)
                    logger.info('[RSS-FEEDS] RSS last run @ %s' % datetime.datetime.utcfromtimestamp(rss_timestamp))
                else:
                    rss_timestamp = helpers.utctimestamp() + (int(CONFIG.RSS_CHECKINTERVAL) *60)
                rs = rsscheckit.tehMain()
                duration_diff = (helpers.utctimestamp() - rss_timestamp)/60
                if duration_diff >= int(CONFIG.RSS_CHECKINTERVAL):
                    SCHED.add_job(func=rs.run, id='rss', name='RSS Feeds', args=[True], next_run_time=datetime.datetime.utcnow(), trigger=IntervalTrigger(hours=0, minutes=int(CONFIG.RSS_CHECKINTERVAL), timezone='UTC'))
                else:
                    rss_diff = datetime.datetime.utcfromtimestamp(helpers.utctimestamp() + (int(CONFIG.RSS_CHECKINTERVAL) * 60) - (duration_diff * 60))
                    logger.fdebug('[RSS-FEEDS] Scheduling next run for @ %s every %s minutes' % (rss_diff, CONFIG.RSS_CHECKINTERVAL))
                    SCHED.add_job(func=rs.run, id='rss', name='RSS Feeds', args=[True], next_run_time=rss_diff, trigger=IntervalTrigger(hours=0, minutes=int(CONFIG.RSS_CHECKINTERVAL), timezone='UTC'))

            if CONFIG.CHECK_GITHUB:
                vs = versioncheckit.CheckVersion()
                SCHED.add_job(func=vs.run, id='version', name='Check Version', trigger=IntervalTrigger(hours=0, minutes=CONFIG.CHECK_GITHUB_INTERVAL, timezone='UTC'))

            ##run checkFolder every X minutes (basically Manual Run Post-Processing)
            if CONFIG.ENABLE_CHECK_FOLDER:
                if CONFIG.DOWNLOAD_SCAN_INTERVAL >0:
                    logger.info('[FOLDER MONITOR] Enabling folder monitor for : ' + str(CONFIG.CHECK_FOLDER) + ' every ' + str(CONFIG.DOWNLOAD_SCAN_INTERVAL) + ' minutes.')
                    fm = PostProcessor.FolderCheck()
                    SCHED.add_job(func=fm.run, id='monitor', name='Folder Monitor', trigger=IntervalTrigger(hours=0, minutes=int(CONFIG.DOWNLOAD_SCAN_INTERVAL), timezone='UTC'))
                else:
                    logger.error('[FOLDER MONITOR] You need to specify a monitoring time for the check folder option to work')

            logger.info('Firing up the Background Schedulers now....')
            try:
                SCHED.start()
                #update the job db here
                logger.info('Background Schedulers successfully started...')
                helpers.job_management(write=True)
            except Exception as e:
                logger.info(e)
                SCHED.print_jobs()

        started = True
Exemplo n.º 12
0
def start():

    global _INITIALIZED, started

    with INIT_LOCK:

        if _INITIALIZED:

            #load up the previous runs from the job sql table so we know stuff...
            monitors = helpers.job_management()
            SCHED_WEEKLY_LAST = monitors['weekly']
            SCHED_SEARCH_LAST = monitors['search']
            SCHED_UPDATER_LAST = monitors['dbupdater']
            SCHED_MONITOR_LAST = monitors['monitor']
            SCHED_VERSION_LAST = monitors['version']
            SCHED_RSS_LAST = monitors['rss']

            # Start our scheduled background tasks
            SCHED.add_job(func=updater.dbUpdate, id='dbupdater', name='DB Updater', args=[None,None,True], trigger=IntervalTrigger(hours=5, minutes=5, timezone='UTC'))

            #let's do a run at the Wanted issues here (on startup) if enabled.
            ss = searchit.CurrentSearcher()
            if CONFIG.NZB_STARTUP_SEARCH:
                SCHED.add_job(func=ss.run, id='search', next_run_time=datetime.datetime.utcnow(), name='Auto-Search', trigger=IntervalTrigger(hours=0, minutes=CONFIG.SEARCH_INTERVAL, timezone='UTC'))
            else:
                if SCHED_SEARCH_LAST is not None:
                    search_timestamp = float(SCHED_SEARCH_LAST)
                    logger.fdebug('[AUTO-SEARCH] Search last run @ %s' % datetime.datetime.utcfromtimestamp(search_timestamp))
                else:
                    search_timestamp = helpers.utctimestamp() + (int(CONFIG.SEARCH_INTERVAL) *60)

                duration_diff = (helpers.utctimestamp() - search_timestamp)/60
                if duration_diff >= int(CONFIG.SEARCH_INTERVAL):
                    logger.fdebug('[AUTO-SEARCH]Auto-Search set to a delay of one minute before initialization as it has been %s minutes since the last run' % duration_diff)
                    SCHED.add_job(func=ss.run, id='search', name='Auto-Search', trigger=IntervalTrigger(hours=0, minutes=CONFIG.SEARCH_INTERVAL, timezone='UTC'))
                else:
                    search_diff = datetime.datetime.utcfromtimestamp(helpers.utctimestamp() + ((int(CONFIG.SEARCH_INTERVAL) * 60)  - (duration_diff*60)))
                    logger.fdebug('[AUTO-SEARCH] Scheduling next run @ %s every %s minutes' % (search_diff, CONFIG.SEARCH_INTERVAL))
                    SCHED.add_job(func=ss.run, id='search', name='Auto-Search', next_run_time=search_diff, trigger=IntervalTrigger(hours=0, minutes=CONFIG.SEARCH_INTERVAL, timezone='UTC'))

            if all([CONFIG.ENABLE_TORRENTS, CONFIG.AUTO_SNATCH, OS_DETECT != 'Windows']) and any([CONFIG.TORRENT_DOWNLOADER == 2, CONFIG.TORRENT_DOWNLOADER == 4]):
                logger.info('[AUTO-SNATCHER] Auto-Snatch of completed torrents enabled & attempting to background load....')
                SNPOOL = threading.Thread(target=helpers.worker_main, args=(SNATCHED_QUEUE,), name="AUTO-SNATCHER")
                SNPOOL.start()
                logger.info('[AUTO-SNATCHER] Succesfully started Auto-Snatch add-on - will now monitor for completed torrents on client....')

            if CONFIG.POST_PROCESSING is True and ( all([CONFIG.NZB_DOWNLOADER == 0, CONFIG.SAB_CLIENT_POST_PROCESSING is True]) or all([CONFIG.NZB_DOWNLOADER == 1, CONFIG.NZBGET_CLIENT_POST_PROCESSING is True]) ):
                if CONFIG.NZB_DOWNLOADER == 0:
                    logger.info('[SAB-MONITOR] Completed post-processing handling enabled for SABnzbd. Attempting to background load....')
                elif CONFIG.NZB_DOWNLOADER == 1:
                    logger.info('[NZBGET-MONITOR] Completed post-processing handling enabled for NZBGet. Attempting to background load....')
                NZBPOOL = threading.Thread(target=helpers.nzb_monitor, args=(NZB_QUEUE,), name="AUTO-COMPLETE-NZB")
                NZBPOOL.start()
                if CONFIG.NZB_DOWNLOADER == 0:
                    logger.info('[AUTO-COMPLETE-NZB] Succesfully started Completed post-processing handling for SABnzbd - will now monitor for completed nzbs within sabnzbd and post-process automatically....')
                elif CONFIG.NZB_DOWNLOADER == 1:
                    logger.info('[AUTO-COMPLETE-NZB] Succesfully started Completed post-processing handling for NZBGet - will now monitor for completed nzbs within nzbget and post-process automatically....')

            logger.info('[SEARCH-QUEUE] Attempting to background load the search queue....')
            SEARCHPOOL = threading.Thread(target=helpers.search_queue, args=(SEARCH_QUEUE,), name="SEARCH-QUEUE")
            SEARCHPOOL.start()

            if all([CONFIG.POST_PROCESSING is True, CONFIG.API_ENABLED is True]):
                logger.info('[POST-PROCESS-QUEUE] Post Process queue enabled & monitoring for api requests....')
                PPPOOL = threading.Thread(target=helpers.postprocess_main, args=(PP_QUEUE,), name="POST-PROCESS-QUEUE")
                PPPOOL.start()
                logger.info('[POST-PROCESS-QUEUE] Succesfully started Post-Processing Queuer....')

            helpers.latestdate_fix()

            if CONFIG.ALT_PULL == 2:
                weektimer = 4
            else:
                weektimer = 24

            #weekly pull list gets messed up if it's not populated first, so let's populate it then set the scheduler.
            logger.info('[WEEKLY] Checking for existance of Weekly Comic listing...')

            #now the scheduler (check every 24 hours)
            weekly_interval = weektimer * 60 * 60
            try:
                if SCHED_WEEKLY_LAST:
                    pass
            except:
                SCHED_WEEKLY_LAST = None

            weektimestamp = helpers.utctimestamp()
            if SCHED_WEEKLY_LAST is not None:
                weekly_timestamp = float(SCHED_WEEKLY_LAST)
            else:
                weekly_timestamp = weektimestamp + weekly_interval

            ws = weeklypullit.Weekly()
            duration_diff = (weektimestamp - weekly_timestamp)/60

            if abs(duration_diff) >= weekly_interval/60:
                logger.info('[WEEKLY] Weekly Pull-Update initializing immediately as it has been %s hours since the last run' % abs(duration_diff/60))
                SCHED.add_job(func=ws.run, id='weekly', name='Weekly Pullist', next_run_time=datetime.datetime.utcnow(), trigger=IntervalTrigger(hours=weektimer, minutes=0, timezone='UTC'))
            else:
                weekly_diff = datetime.datetime.utcfromtimestamp(weektimestamp + (weekly_interval - (duration_diff * 60)))
                logger.fdebug('[WEEKLY] Scheduling next run for @ %s every %s hours' % (weekly_diff, weektimer))
                SCHED.add_job(func=ws.run, id='weekly', name='Weekly Pullist', next_run_time=weekly_diff, trigger=IntervalTrigger(hours=weektimer, minutes=0, timezone='UTC'))

            #initiate startup rss feeds for torrents/nzbs here...
            rs = rsscheckit.tehMain()
            if CONFIG.ENABLE_RSS:
                logger.info('[RSS-FEEDS] Initiating startup-RSS feed checks.')
                if SCHED_RSS_LAST is not None:
                    rss_timestamp = float(SCHED_RSS_LAST)
                    logger.info('[RSS-FEEDS] RSS last run @ %s' % datetime.datetime.utcfromtimestamp(rss_timestamp))
                else:
                    rss_timestamp = helpers.utctimestamp() + (int(CONFIG.RSS_CHECKINTERVAL) *60)
                duration_diff = (helpers.utctimestamp() - rss_timestamp)/60
                if duration_diff >= int(CONFIG.RSS_CHECKINTERVAL):
                    SCHED.add_job(func=rs.run, id='rss', name='RSS Feeds', args=[True], next_run_time=datetime.datetime.utcnow(), trigger=IntervalTrigger(hours=0, minutes=int(CONFIG.RSS_CHECKINTERVAL), timezone='UTC'))
                else:
                    rss_diff = datetime.datetime.utcfromtimestamp(helpers.utctimestamp() + (int(CONFIG.RSS_CHECKINTERVAL) * 60) - (duration_diff * 60))
                    logger.fdebug('[RSS-FEEDS] Scheduling next run for @ %s every %s minutes' % (rss_diff, CONFIG.RSS_CHECKINTERVAL))
                    SCHED.add_job(func=rs.run, id='rss', name='RSS Feeds', args=[True], next_run_time=rss_diff, trigger=IntervalTrigger(hours=0, minutes=int(CONFIG.RSS_CHECKINTERVAL), timezone='UTC'))
            #else:
            #    SCHED.add_job(func=rs.run, id='rss', name='RSS Feeds', args=[True], trigger=IntervalTrigger(hours=0, minutes=int(CONFIG.RSS_CHECKINTERVAL), timezone='UTC'))
            #    SCHED.pause_job('rss')

            if CONFIG.CHECK_GITHUB:
                vs = versioncheckit.CheckVersion()
                SCHED.add_job(func=vs.run, id='version', name='Check Version', trigger=IntervalTrigger(hours=0, minutes=CONFIG.CHECK_GITHUB_INTERVAL, timezone='UTC'))

            ##run checkFolder every X minutes (basically Manual Run Post-Processing)
            if CONFIG.ENABLE_CHECK_FOLDER:
                if CONFIG.DOWNLOAD_SCAN_INTERVAL >0:
                    logger.info('[FOLDER MONITOR] Enabling folder monitor for : ' + str(CONFIG.CHECK_FOLDER) + ' every ' + str(CONFIG.DOWNLOAD_SCAN_INTERVAL) + ' minutes.')
                    fm = PostProcessor.FolderCheck()
                    SCHED.add_job(func=fm.run, id='monitor', name='Folder Monitor', trigger=IntervalTrigger(hours=0, minutes=int(CONFIG.DOWNLOAD_SCAN_INTERVAL), timezone='UTC'))
                else:
                    logger.error('[FOLDER MONITOR] You need to specify a monitoring time for the check folder option to work')

            logger.info('Firing up the Background Schedulers now....')
            try:
                SCHED.start()
                #update the job db here
                logger.info('Background Schedulers successfully started...')
                helpers.job_management(write=True)
            except Exception as e:
                logger.info(e)
                SCHED.print_jobs()

        started = True