def get_scheduler(store_path=None, log_file=None): if store_path is None: store_path = r'jobstore.sqlite' if log_file is None: log_file = r'logger.log' scheduler = TornadoScheduler({'apscheduler.timezone': 'Asia/Shanghai'}) jobstores = { 'default': RedisJobStore(host='10.134.103.241', port=6379) } executors = { 'default': ThreadPoolExecutor(20), 'processpool': ProcessPoolExecutor(5) } job_defaults = { 'coalesce': False, 'max_instances': 1 } scheduler.configure(jobstores=jobstores, executors=executors) # 日志定制 scheduler._logger = modify_logger(scheduler._logger, log_file=log_file) return scheduler
def get_scheduler(store_path=None, log_file=None): if store_path is None: store_path = r'jobstore.sqlite' if log_file is None: log_file = r'logger.log' scheduler = TornadoScheduler({'apscheduler.timezone': 'Asia/Shanghai'}) jobstores = {'default': RedisJobStore(host='10.134.103.241', port=6379)} executors = { 'default': ThreadPoolExecutor(20), 'processpool': ProcessPoolExecutor(5) } job_defaults = {'coalesce': False, 'max_instances': 1} scheduler.configure(jobstores=jobstores, executors=executors) # # 事件记录 # scheduler.add_listener( # lambda event: event_listener(event, scheduler), # EVENT_JOB_EXECUTED | EVENT_JOB_ERROR | EVENT_JOB_ADDED | EVENT_JOB_SUBMITTED | EVENT_JOB_REMOVED # ) # 日志定制 scheduler._logger = modify_logger(scheduler._logger, log_file=log_file) return scheduler