async def _(session: CommandSession): global noobList1 user = session.event.user_id group = session.event.group_id try: with open( Path('.') / 'ATRI' / 'plugins' / 'noobList' / 'noobGroup.json', 'r') as f: data = json.load(f) except: data = {} try: with open( Path('.') / 'ATRI' / 'plugins' / 'noobList' / 'noobList.json', 'r') as f: data1 = json.load(f) except: data1 = {} if str(group) in data.keys(): pass else: if str(user) in data1.keys(): pass else: if 0 <= now_time() < 5.5: pass else: msg = str(session.event.message) bL = {} pattern = r"[nNηиɴИ][tT][rR]|[牛🐂]头人" if re.findall(pattern, msg): await session.send('你妈的,牛头人,' + response.request_api(KC_URL)) noobList1.append(user) print(noobList1) print(countX(noobList1, user)) if countX(noobList1, user) == 5: if user == master: await session.send( '是主人的话...那算了...呜呜\n即使到达了ATRI的最低忍耐限度......') noobList1 = list(set(noobList1)) pass else: await session.send( f'[CQ:at,qq={user}]哼!接下来10分钟别想让我理你!') bL[f"{user}"] = f"{user}" file = Path( '.' ) / 'ATRI' / 'plugins' / 'noobList' / 'noobList.json' f = open(file, 'w') f.write(json.dumps(bL)) f.close() noobList1 = list(set(noobList1)) print(noobList1) delta = timedelta(minutes=10) trigger = DateTrigger(run_date=datetime.now() + delta) scheduler.add_job( #type: ignore func=rmQQfromNoobLIST, trigger=trigger, args=(user), misfire_grace_time=60, )
class JobManager: """ Central overseer that manages the measurement jobs. """ _instance: Optional['JobManager'] = None @classmethod def initialize( cls, datadir: Path ) -> 'JobManager': if cls._instance is None: cls._instance = JobManager( datadir ) return cls._instance @classmethod def get_instance( cls ) -> 'JobManager': if cls._instance is None: raise RuntimeError( "Attempted to obtain manager before initializing it." ) return cls._instance @classmethod def run_job( cls, job: Job ) -> None: """ Executes the specified job once. :param job: The job to execute. """ _LOGGER.debug( "Running job '%s'.", job.id ) timestamp = datetime.now( pytz.utc ) try: output = speedtest.run_test( server_id = job.server_id, server_name = job.server_name ) result = { 'success': True, 'time': timestamp.isoformat(), 'result': output } except speedtest.TestError as e: _LOGGER.exception( "Test could not be completed." ) result = { 'success': False, 'timestamp': timestamp.isoformat(), 'error': str( e ), 'stdout': e.stdout, 'stderr': e.stderr, } with open( cls.get_instance().output_file( job ), 'a' ) as f: f.write( json.dumps( result ) ) f.write( ',\n' ) # Line break to make it slightly more readable _LOGGER.debug( "Finished running job '%s'.", job.id ) def __init__( self, datadir: Path ): """ Initializes a new manager that uses the specified directory to store data. :param datadir: The path of the directory where data should be stored. """ _LOGGER.debug( "Initializing manager." ) try: _LOGGER.info( "Using %s", speedtest.get_version() ) # Also implicitly check installed except speedtest.TestError: _LOGGER.exception( "Obtaining Speedtest CLI version caused an error." ) _LOGGER.critical( "The Speedtest CLI could not accessed. Is it installed in this system?" ) sys.exit( 1 ) database_path = datadir / 'jobs.db' self.storage = datadir / 'results' self.storage.mkdir( mode = 0o770, exist_ok = True ) self.engine = create_engine( f'sqlite:///{database_path}' ) Base.metadata.create_all( self.engine ) self.Session = orm.sessionmaker( bind = self.engine ) jobstores = { 'default': SQLAlchemyJobStore( engine = self.engine ) } executors = { 'default': ThreadPoolExecutor( 1 ) } job_defaults = { 'coalesce': True, 'max_instances': 1, 'misfire_grace_time': 5 * 60, # Can be up to 5 minutes late } self.scheduler = BackgroundScheduler( jobstores = jobstores, executors = executors, job_defaults = job_defaults, timezone = pytz.utc ) self.scheduler.add_listener( self.job_stopped, mask = events.EVENT_JOB_REMOVED ) _LOGGER.debug( "Manager initialized." ) def start( self ): """ Starts processing jobs. """ _LOGGER.info( "Manager starting." ) self.scheduler.start() _LOGGER.debug( "Manager started." ) def shutdown( self, wait: bool = True ): """ Shuts down the manager, stopping job processing. :param wait: If True, waits for all currently executing jobs to finish before returning. """ _LOGGER.info( "Manager stopping." ) self.scheduler.shutdown( wait = wait ) _LOGGER.debug( "Manager stopped." ) def job_stopped( self, event: events.JobEvent ) -> None: id: str = event.job_id with self.transaction() as session: job: JobMetadata = session.query( JobMetadata ).filter_by( id = id ).first() job.running = False def output_file( self, job: Job ) -> Path: """ Determines the path to the output file of the job identified by the given ID. :param job: The job to get the path for. :return: The path of the output file for the given job. """ return self.storage / f'{job.id}.result' # Not really proper JSON def load_results( self, job: Job ) -> Sequence[JSONData]: """ Loads the results obtained so far for the given job. :param job: The job to load results for. :return: The results of the given job, as a list of JSON objects. """ output_file = self.output_file( job ) if not output_file.exists(): return [] with open( output_file, 'r' ) as f: results = f.read() results = '[' + results[:-2] + ']' # Remove trailing comma and line break and add brackets return json.loads( results ) @contextmanager def transaction( self ) -> orm.Session: """ Provide a transactional scope around a series of operations. """ session: orm.Session = self.Session() try: yield session session.commit() except: session.rollback() raise finally: session.close() def new_job( self, job: Job ) -> None: """ Registers the given job. :param job: The job to register. :raises IDExistsError: if the ID of the given job is already in use. """ _LOGGER.info( "Registering job '%s'.", job.id ) _LOGGER.debug( "Job '%s' (%s) has target %d|'%s', starts at %s and ends at %s with interval %s.", job.id, job.title, job.server_id, job.server_name, job.start, job.end, job.interval ) with self.transaction() as session: try: if session.query( JobMetadata ).filter_by( id = job.id ).count() > 0: raise IDExistsError( "There is already metadata for the given ID." ) new_job = JobMetadata( job ) session.add( new_job ) if job.interval: _LOGGER.debug( "Creating an interval-triggered job." ) if job.end is not None and job.end < ( now := datetime.now( pytz.utc ) ): raise PastEndError( now, job ) trigger = IntervalTrigger( seconds = int( job.interval.total_seconds() ), start_date = job.start if job.start is not None else datetime.now( pytz.utc ), end_date = job.end ) else: _LOGGER.debug( "Creating a date-triggered job." ) trigger = DateTrigger( run_date = job.start )
def add_date_job(self,mission,widget=None): if not widget: widget = self.__widget trigger = DateTrigger(run_date=mission['time']) self.add_job(func=lambda:widget.send_show_signal(mission['id']), trigger=trigger, id=mission['id'])
async def from_rss_async(self, source) -> str: rss_source = self.rss[source] print( time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + "检查RSS源:{}".format(rss_source["name"])) try: async with aiohttp.ClientSession() as session: async with session.get( rss_source["source"], headers=rss_source.get("headers")) as response: code = response.status if code != 200: print("rss源错误:{},返回值:{}".format( rss_source["name"], code)) return None res = await response.text() except aiohttp.client_exceptions.ClientConnectionError: print("rss源连接错误:" + rss_source["name"]) return None except Exception as e: print("未知错误{} {}".format(type(e).__name__, e)) return None feed = feedparser.parse(res) if feed["bozo"]: print("rss源解析错误:" + rss_source["name"]) return None if self.news_interval_auto: updated = feed.feed.updated if updated is not None: # 获取rss上次刷新时间 lastBuildDate = parsedate_tz(updated) nt = datetime.datetime.fromtimestamp( time.mktime(lastBuildDate[:-1]) - lastBuildDate[-1] + 28800) nt += datetime.timedelta(minutes=25) after10min = datetime.datetime.now() + datetime.timedelta( minutes=10) if nt > after10min: # 执行时间改为上次刷新后25分钟 self.scheduler.reschedule_job( job_id=source, jobstore='default', trigger=DateTrigger(nt), ) if len(feed["entries"]) == 0: print("rss无效:" + rss_source["name"]) return None last_id = rss_source.get("last_id") rss_source["last_id"] = feed["entries"][0]["id"] if last_id is None: print("rss初始化:" + rss_source["name"]) return None news_list = list() for item in feed["entries"]: if item["id"] == last_id: break news_list.append(rss_source["pattern"].format_map(item)) if news_list: return (rss_source["name"] + "更新:\n=======\n" + "\n-------\n".join(news_list)) else: return None
def get_or_create_trigger(self) -> BaseTrigger: return DateTrigger()
def trigger(self) -> DateTrigger: """Trigger for ReserveAbortJobs.""" return DateTrigger(run_date=None) # Run immediately
# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. logging.basicConfig() if not os.path.exists("settings/config.json"): print("|! Couldn't find config.json!") print("|! rename the config.json.example to config.json and edit it as required.") print("|! After that, run the script again.") exit(1) utils = Utils() schedule_interval = utils.readConfig()['interval'] use_schedule = utils.readConfig()['use_schedule'] scheduler = BlockingScheduler() scheduler.add_job(Cachet, trigger=DateTrigger(run_date=datetime.datetime.now()), id='initial') scheduler.add_job(Cachet, 'interval', seconds=schedule_interval, id='constant') print('Press Ctrl+{0} to exit'.format('Break' if os.name == 'nt' else 'C')) if use_schedule: try: scheduler.start() except (KeyboardInterrupt, SystemExit): pass else: Cachet()
timezone=get_localzone(), # "Asia/Seoul" daemon=True) # JOB Task 생성 # - 입력받은 텍스트를 출력 def execute(text): print(text) # JOB Schedule 등록 # - task : 입력받은 텍스트를 출력 # - trigger : date # - jobstore : default scheduler.add_job(execute, DateTrigger(), args=["[DateTrigger] Hello, Apscheduler!!!"]) # JOB Schedule 등록 # - task : 5초마다 반복하며, 입력받은 텍스트를 출력 # - trigger : interval # - jobstore : default scheduler.add_job(execute, IntervalTrigger(seconds=5), args=["[IntervalTrigger] Hello, Apscheduler!!!"]) # JOB Schedule 등록 # - task : 3초마다 반복하며, 입력받은 텍스트를 출력 # - trigger : cron # - jobstore : default scheduler.add_job(execute,
mktime(entry.published_parsed)) pub_localized = utc.localize(pub_datetime) pub_eastern = pub_localized.astimezone(est) if not FeedItem.objects.filter( title=entry.title.encode('ascii', 'ignore')): FeedItem.objects.create( feed=feed, title=entry.title.encode('ascii', 'ignore'), url=entry.link, summary=strip_tags(entry.summary.encode('ascii', 'ignore')), pub_date=pub_eastern) @sched.scheduled_job('interval', days=1) def remove_old_feed_items(): for item in FeedItem.objects.all(): if item.pub_date < utc.localize(datetime.datetime.now() - datetime.timedelta( days=1)).astimezone(est): item.delete() # Run jobs immediately on deploy sched.add_job(func=update_newsfeed, trigger=DateTrigger(run_date=datetime.datetime.now())) sched.add_job(func=remove_old_feed_items, trigger=DateTrigger(run_date=datetime.datetime.now())) sched.start()
def test_str(self, timezone): trigger = DateTrigger(datetime(2009, 7, 6), timezone) assert str(trigger) == "date[2009-07-06 00:00:00 CEST]"
def test_pickle(self, timezone): """Test that the trigger is pickleable.""" trigger = DateTrigger(date(2016, 4, 3), timezone=timezone) data = pickle.dumps(trigger, 2) trigger2 = pickle.loads(data) assert trigger2.run_date == trigger.run_date
def test_repr(self, timezone): trigger = DateTrigger(datetime(2009, 7, 6), timezone) assert repr(trigger) == "<DateTrigger (run_date='2009-07-06 00:00:00 CEST')>"
def configure(self, aasxconfig): """Configures the triggers and jobs out of the given configuration :param lxml.etree.ElementTree configuration: XML DOM tree of the configuration """ # add each trigger of the configuration to the scheduler triggerList = aasxconfig.GetTriggersList() for trigger_xe in triggerList: trigger_type = trigger_xe.tag trigger_id = trigger_xe.attrib["ID"] if trigger_type == "DateTrigger": kwargs = {} kwargs["run_date"] = trigger_xe.attrib["RunDateTime"] if "TimeZone" in trigger_xe.attrib: kwargs["timezone"] = trigger_xe.attrib["TimeZone"] self.triggers[trigger_id] = DateTrigger(**kwargs) elif trigger_type == "IntervalTrigger": kwargs = {} if "Weeks" in trigger_xe.attrib: kwargs["weeks"] = int(trigger_xe.attrib["Weeks"]) if "Days" in trigger_xe.attrib: kwargs["days"] = int(trigger_xe.attrib["Days"]) if "Hours" in trigger_xe.attrib: kwargs["hours"] = int(trigger_xe.attrib["Hours"]) if "Minutes" in trigger_xe.attrib: kwargs["minutes"] = int(trigger_xe.attrib["Minutes"]) if "Seconds" in trigger_xe.attrib: kwargs["seconds"] = int(trigger_xe.attrib["Seconds"]) if "StartDateTime" in trigger_xe.attrib: kwargs["start_date"] = trigger_xe.attrib["StartDateTime"] if "EndDateTime" in trigger_xe.attrib: kwargs["end_date"] = trigger_xe.attrib["EndDateTime"] if "TimeZone" in trigger_xe.attrib: kwargs["timezone"] = trigger_xe.attrib["TimeZone"] self.triggers[trigger_id] = IntervalTrigger(**kwargs) elif trigger_type == "CronTrigger": kwargs = {} if "Year" in trigger_xe.attrib: kwargs["year"] = trigger_xe.attrib["Year"] if "Month" in trigger_xe.attrib: kwargs["month"] = trigger_xe.attrib["Month"] if "Day" in trigger_xe.attrib: kwargs["day"] = trigger_xe.attrib["Day"] if "Week" in trigger_xe.attrib: kwargs["week"] = trigger_xe.attrib["Week"] if "WeekDay" in trigger_xe.attrib: kwargs["day_of_week"] = trigger_xe.attrib["WeekDay"] if "Hour" in trigger_xe.attrib: kwargs["hour"] = trigger_xe.attrib["Hour"] if "Minute" in trigger_xe.attrib: kwargs["minute"] = trigger_xe.attrib["Minute"] if "Second" in trigger_xe.attrib: kwargs["second"] = trigger_xe.attrib["Second"] if "StartDateTime" in trigger_xe.attrib: kwargs["start_date"] = trigger_xe.attrib["StartDateTime"] if "EndDateTime" in trigger_xe.attrib: kwargs["end_date"] = trigger_xe.attrib["EndDateTime"] if "TimeZone" in trigger_xe.attrib: kwargs["timezone"] = trigger_xe.attrib["TimeZone"] self.triggers[trigger_id] = CronTrigger(**kwargs) else: raise Exception( "This PYAAS implementation can not handle the trigger type '{0}'" .format(trigger_type)) job_xes = trigger_xe.xpath("./Job") for job_xe in job_xes: job_id = job_xe.attrib["ID"] module_name = job_xe.attrib["Function"] params = [self.saas] param_xes = job_xe.xpath("./*") for param_xe in param_xes: if param_xe.tag == "ChannelRef": params.append(param_xe.attrib["RefID"]) elif param_xe.tag == "Constant": t = param_xe.attrib["Type"] v = param_xe.attrib["Value"] if t == "str": params.append(str(v)) if t == "int": params.append(int(v)) if t == "float": params.append(float(v)) else: raise Exception("ERROR: Disallowed Job parameter") if module_name not in sys.modules: self.f_modules[module_name] = import_module("modules" + module_name) f = self.f_modules[module_name].function trigger = self.triggers[trigger_id] self.scheduler.add_job(f, trigger=trigger, args=params, id=job_id, replace_existing=True)
def trigger(self) -> DateTrigger: """Trigger for ReserveTimeoutJobs.""" return DateTrigger(run_date=current_timestamp() + timedelta(seconds=30))
def schedule(self, task, start_time: dt.datetime = None): trigger = None if start_time is not None: trigger = DateTrigger(start_time) self._scheduler.add_job(task, trigger=trigger)
async def _(bot: Bot, event: Event, state: dict): global lsp_stack user = str(event.user_id) group = str(event.group_id) if not is_enabled(_func_name, group): await setu_get.finish("该功能不可用") if check_list(user): await setu_get.finish("冲的太多了,休息一下吧") if counter(lsp_stack, user) == 5: en_lsp(user) lsp_stack = list(set(lsp_stack)) delta = timedelta(minutes=10) trigger = DateTrigger(run_date=datetime.now() + delta) scheduler.add_job(func=de_lsp, trigger=trigger, args=(user, ), misfire_grace_time=60) r18_switch = 0 ON_FILE = Path("./src/plugins/pic_search/on_list.json") if ON_FILE.is_file(): with open(ON_FILE, 'r') as file: data = ujson.load(file) if group in data: r18_switch = 1 if data[group] == "on" else 0 args = str(event.message).strip().split() if len(args) > 1: state["keyword"] = args[1] await bot.send(event, "别急,涩图在搜索了") if state.get("keyword") == "local": setu = await setu_loader(user) else: key = state.get("keyword") setu = await setu_linker(user, key, mode=r18_switch) if not setu: msg = [{ "type": "at", "data": { "qq": user } }, { "type": "text", "data": { "text": "连接超时,涩图找丢了" } }] await setu_get.finish(msg) lsp_stack.append(user) sender_info = dict(event.sender) if sender_info["card"]: user_name = sender_info["card"] else: user_name = sender_info["nickname"] # lsp榜单更新 KSP = Path("./src/plugins/pic_search/King_of_LSP.json") if not KSP.is_file(): sp_data = {} else: with open(KSP, 'r') as f: sp_data = ujson.load(f) if group not in sp_data: sp_data[group] = {} if user not in sp_data[group]: sp_data[group][user] = {"name": user_name, "times": 0} if sp_data[group][user]["name"] != user_name: sp_data[group][user]["name"] = user_name sp_data[group][user]["times"] += 1 with open(KSP, 'w') as file: ujson.dump(sp_data, file) await setu_get.finish(setu)
def DScheduler(cls, action, start_date=None, execute_date=None, end_date=None, execute_interval=3, tz=None, **kwargs): """ 一个依托于时间驱动的实时任务,action所挂载的任务由相应的时间驱动, 这跟run方法由K线更新驱动不一样,时区功能未起作用 :param action: :param start_date:like '09:30:00' :param execute_date:like '09:30:00-11:30:00' or '09:30:00-11:30:00 13:00:00-15:00:00' :param end_date:like '15:00:00' :param execute_interval:连续任务的执行时间间隔,以秒计 :param tz:时区 :return: """ fmt = '%Y-%m-%d %H:%M:%S' if start_date is not None: try: sdt = dt.datetime.strptime('2000-01-01 ' + start_date, fmt) except Exception: raise TypeError( 'this start_date param like a "09:30:00" string') if execute_date is not None: try: xdt = [] dts = execute_date.split(' ') for et in dts: t = et.split('-') s = dt.datetime.strptime('2000-01-01 ' + t[0], fmt) e = dt.datetime.strptime('2000-01-01 ' + t[1], fmt) # if s > e: # 如果execute的start大于end说明是当天的end到第二天的start # raise TypeError('execute start datetime must less than end') xdt.append([s, e]) del s, e, t del dts except Exception: raise TypeError( 'this start_date param like a "09:30:00-11:30:00" or' ' "09:30:00-11:30:00 13:00:00-15:00:00"') if end_date is not None: try: edt = dt.datetime.strptime('2000-01-01 ' + end_date, fmt) except Exception: raise TypeError('this end_date param like a "15:30:00" string') if tz is not None: if tz not in pytz.all_timezones: raise ValueError( 'Only timezones from the pytz library are supported') else: tz = pytz.timezone(tz) # from pytz import FixedOffset, utc from apscheduler.triggers.date import DateTrigger from apscheduler.triggers.interval import IntervalTrigger from apscheduler.executors.pool import ThreadPoolExecutor, ProcessPoolExecutor calibration() while 1: # scheduler = BlockingScheduler(daemonic=False) # crt = CalfDateTime.now(tz, offset) crt = dt.datetime.now() if tz is None else dt.datetime.now( tz=tz).replace(tzinfo=None) tdy = dt.datetime(crt.year, crt.month, crt.day) # 非交易日 if not action.is_trade_day(tdy): print(fontcolor.F_RED + '-' * 80) print('Note:Non-transaction date;Datetime:' + str(crt)) print('-' * 80 + fontcolor.END) delta = (tdy + dt.timedelta(days=1) - crt).seconds delta = 1 if delta < 1 else delta time.sleep(delta) # sleep to next day continue # 交易日 else: try: nsds = list() executors = { 'default': ThreadPoolExecutor(4), 'processpool': ProcessPoolExecutor(4) } job_defaults = {'coalesce': True, 'max_instances': 1} scheduler = BackgroundScheduler(executors=executors, job_defaults=job_defaults, daemonic=False, timezone=tz) if start_date is not None: d = tdy + dt.timedelta(hours=sdt.hour, minutes=sdt.minute, seconds=sdt.second) nsds.append(d + dt.timedelta(days=1)) def action_start(args): print(fontcolor.F_GREEN + '-' * 80) print('Calf-Note:start task running on ', dt.datetime.now(tz=tz)) print('-' * 80 + fontcolor.END) try: def start(**args): action.start(**args) t = threading.Thread(target=start, args=(args, )) t.start() except Exception as ep: ExceptionInfo(ep) scheduler.add_job(func=action_start, trigger=DateTrigger(d), id='action_start', args=[kwargs]) if execute_date is not None: def action_execute(args): print(fontcolor.F_GREEN + '-' * 80) print('Calf-Note:execute task running on ', dt.datetime.now(tz=tz)) print('-' * 80 + fontcolor.END) try: def exe(**args): action.execute(**args) t = threading.Thread(target=exe, args=(args, )) t.start() t.join(execute_interval - 1) # action.execute(args=args) except Exception as ep: ExceptionInfo(ep) for x in xdt: sd = tdy + dt.timedelta(hours=x[0].hour, minutes=x[0].minute, seconds=x[0].second) ed = tdy + dt.timedelta(hours=x[1].hour, minutes=x[1].minute, seconds=x[1].second) if sd > ed: # 当出现了‘21:30:00-04:00:00’这种类型的格式,表示任务执行时间应该 # 从当天的21:30到第二天的04:00 ed = ed + dt.timedelta(days=1) else: pass scheduler.add_job(func=action_execute, trigger=IntervalTrigger( seconds=execute_interval, start_date=sd, end_date=ed), args=[kwargs]) nsds.append(sd + dt.timedelta(days=1)) if end_date is not None: def action_end(args): print(fontcolor.F_GREEN + '-' * 80) print('Calf-Note:end task running on ', dt.datetime.now(tz=tz)) print('-' * 80 + fontcolor.END) try: def end(**args): action.end(**args) t = threading.Thread(target=end, args=(args, )) t.start() except Exception as ep: ExceptionInfo(ep) d = tdy + dt.timedelta(hours=edt.hour, minutes=edt.minute, seconds=edt.second) nsds.append(d + dt.timedelta(days=1)) scheduler.add_job(func=action_end, trigger=DateTrigger(d), id='action_end', timezone=tz, args=[kwargs]) print(fontcolor.F_GREEN + '-' * 80) print('Note:enter Calf real task and mount these tasks:') scheduler.print_jobs() print('Datetime:' + str(crt)) print('-' * 80 + fontcolor.END) scheduler.start() # 计算距离下一次启动应该休眠多久 if len(nsds) == 0: break # ed = CalfDateTime.now(tz, offset) nd = dt.datetime.now() if tz is None else dt.datetime.now( tz=tz).replace(tzinfo=None) delta = (min(nsds) - nd) delta = delta.seconds + delta.days * 86400 - 1 print(fontcolor.F_YELLOW + '-' * 80) print( 'Note:Calf will sleep {0} seconds and restart on {1}:'. format(delta, min(nsds))) print('Datetime:', str(crt)) print('-' * 80 + fontcolor.END) delta = 1 if delta < 1 else delta time.sleep(delta) scheduler.shutdown(wait=False) del scheduler except Exception as e: ExceptionInfo(e) pass
def trigger(self) -> DateTrigger: """Return APScheduler trigger information for scheduling ReserveJob's.""" return DateTrigger(run_date=None) # Run immediately