def run(): print('开启定时任务') scheduler = BlockingScheduler() scheduler.add_job(func=run_clawer, trigger='interval', minutes=CRAWLER_TIME) # 每20分钟抓取一次 scheduler.add_job(func=update_ip, trigger='interval', hours=UPDATE_TIME) # 每隔4小时执行一次更新ip scheduler._logger = logging scheduler.start()
def run(): print('开启定时任务') scheduler = BlockingScheduler() scheduler.add_job(func=run_crawler, trigger='interval', minutes=CRAWLER_TIME) # 每20分钟抓取一次 scheduler.add_job(func=update_baidu_connection, trigger='interval', minutes=UPDATE_BAIDU_CONN_TIME) # 每隔30min测试一次baidu连接 scheduler._logger = logging scheduler.start()
def pp(if_test): # 关键操作需截图 try: if if_test: url = 'http://test.alltobid.com/moni/gerenlogin.html' test_loc_config() # url = 'https://www.baidu.com' else: url = 'https://paimai2.alltobid.com/bid/' path = os.path.join(os.getcwd(), 'resource') os.environ['PATH'] += '{}{}{}'.format(os.pathsep, path, os.pathsep) # 判断保护模式,win10的ie11需将http://localhost加入到白名单 driver = webdriver.Ie() driver.implicitly_wait(1) driver.maximize_window() # driver.set_window_size(800, 720) driver.get(url) time.sleep(1) # 时间同步 try: get_sys_time.setTime(get_sys_time.getTime(url)) logging.info('时间同步成功') except Exception: logging.error('时间同步失败') traceback.print_exc() # 配置参数 # 11:29:00开始记录当前价 scheduler = BlockingScheduler() if if_test: # test trig = CronTrigger(minute=(time.localtime().tm_min + 4) % 60, second=0) logging.info('test start minute: ' + str((time.localtime().tm_min + 4) % 60)) else: trig = CronTrigger(hour=11, minute=29, second=0) scheduler.add_job(func=begin, args=(driver, ), trigger=trig) # scheduler.add_listener(my_listener, EVENT_JOB_EXECUTED | EVENT_JOB_ERROR) scheduler._logger = logging scheduler.start() except Exception: traceback.print_exc() finally: logging.info('程序结束') quit()
def sanySpiderJob(): print("开始任务") # 创建一个调度器 scheduler = BlockingScheduler() # 将任务触发器, 运行方法添加进入调度器 scheduler.add_job(job_function, 'cron', hour=15, minute=48) print('Press Ctral+{0} to exit '.format('Break' if os.name == 'nt' else 'C ')) scheduler.start() # try: # scheduler.start() # except (KeyboardInterrupt, SystemExit): # logging.debug(traceback.format_exc()) scheduler._logger = logging
def __init__(self, outime): # 下载目录变量 self.rootpath = os.path.join(os.getcwd(), 'download') self.db = os.path.join(os.getcwd(), 'last.key') # 新建scheduler scheduler = BlockingScheduler() print(outime) if outime == '0': time.sleep(300) self.startWall() elif outime == '1': # 间隔5小时执行一次 try: # print(u'here') scheduler.add_job(func=self.startWall, trigger='cron', hours='*/5') # 这里的调度任务是独立的一个线程 scheduler._logger = logging # 记录log scheduler.start() except Exception as e: print(e) pass elif outime == '2': # 间隔1小时执行一次 # scheduler.add_job(func=self.startWall, trigger='cron', hours='*/1') # 这里的调度任务是独立的一个线程 scheduler.add_job(func=self.startWall, trigger='cron', second='*/5') scheduler._logger = logging # 记录log scheduler.start()
def date_example1(): scheduler = BlockingScheduler() scheduler.add_job('sys:stdout.write', 'date', run_date=datetime(2019, 05, 20, 18, 54), args=['tick\n']) print('Press Ctrl+{0} to exit'.format('Break' if os.name == 'nt' else 'C')) scheduler.add_listener(event_listener, EVENT_JOB_EXECUTED | EVENT_JOB_ERROR) scheduler._logger = logger try: scheduler.start() except (KeyboardInterrupt, SystemExit): pass
def startCronTask(task, **interval_config): # 定义全局变量scheduler,用于控制定时任务的启动和停止 global scheduler scheduler = BlockingScheduler() scheduler.add_listener(CronTask_listener, EVENT_JOB_EXECUTED | EVENT_JOB_ERROR) scheduler._logger = logger logger.info( '==================================== 新的日志分段 ==============================================' ) scheduler.add_job(func=task, trigger='interval', **interval_config, id='push_to_github') logger.info('当前所有定时任务job1:%s', scheduler.get_jobs()) logger.info('定时任务调度器状态1:%s', scheduler.state) scheduler.start()
def run(self): """ 按计划更新数据 """ sche = BlockingScheduler() sche._logger = Logger.get_logger() """ trade_calendar:每天,00:00 """ self.jobs['update_trade_calendar'] = sche.add_job( self.__update_trade_calendar, 'cron', day='*', misfire_grace_time=30) """ stock_basic :每天,00:00 """ self.jobs['update_stock_basic'] = sche.add_job( self.__update_stock_basic, 'cron', day='*', misfire_grace_time=30) """ stock_company :每天,00:00 """ self.jobs['update_stock_company'] = sche.add_job( self.__update_stock_company, 'cron', day='*', misfire_grace_time=30) """ daily_basic is running:工作日,17:30 """ self.jobs['update_daily_basic'] = sche.add_job( self.__update_daily_basic, 'cron', day_of_week='mon-fri', hour='17', minute='30', misfire_grace_time=30) """ stock_daily is running:工作日,17:30 """ self.jobs['update_stock_daily'] = sche.add_job( self.__update_stock_daily, 'cron', day_of_week='mon-fri', hour='17', minute='30', misfire_grace_time=30) """ stock_weekly is running:每周6,00:00 """ self.jobs['update_stock_weekly'] = sche.add_job( self.__update_stock_weekly, 'cron', day_of_week='sat', misfire_grace_time=30) """ stock_monthly is running:每月,第一天 """ self.jobs['update_stock_monthly'] = sche.add_job( self.__update_stock_monthly, 'cron', month='*', misfire_grace_time=30) """ concept is running:每天 """ self.jobs['update_concept'] = sche.add_job( self.__update_concept, 'cron', day='*', misfire_grace_time=30) """ concept_detail is running:每天,01:00 """ self.jobs['update_concept_detail'] = sche.add_job( self.__update_concept_detail, 'cron', day='*', hour='1', misfire_grace_time=30) """ 开启所有计划任务 """ sche.start()
args=(), trigger='cron', minute='*/1', id='cron调度!获取配置并写入到数据库!') # date:只在某个时间点执行一次run_date(datetime|str) scheduler.add_job(func=write_config_md5_to_db, args=(), trigger='date', run_date=datetime(2019, 7, 25, 10, 9), id='date调度!获取配置并写入到数据库!') # interval:每隔一段时间执行一次week=0 | days=0 | hours=0 | minutes=0 | seconds=0, start_date=None, # end_date=None, timezone=None scheduler.add_job(func=write_config_md5_to_db, args=(), trigger='interval', minutes=1, start_date=datetime(2019, 7, 25, 9, 35), end_date=datetime(2019, 7, 25, 9, 44), id='interval调度!获取配置并写入到数据库') # 加载时间处理函数 scheduler.add_listener(my_listener, EVENT_JOB_EXECUTED | EVENT_JOB_ERROR) # 记录日志 scheduler._logger = logging # 开始调度 try: scheduler.start() except KeyboardInterrupt: print('收到停止调度命令!正在退出!')
sched = BlockingScheduler(job_defaults=job_defaults) # 有date, interval, cron可供选择,其实看字面意思也可以知道,date表示具体的一次性任务,interval表示循环任务,cron表示定时任务 # 检测下单 def my_record(): record.okex_v5() def my_listener(event): if event.exception: print('任务出错了。') # sms.send_wrong_sms() sched.shutdown() time.sleep(5) sched.start() else: pass my_record() sched.add_job(func=my_record, trigger='interval', seconds=150) sched.add_listener(my_listener, EVENT_JOB_EXECUTED | EVENT_JOB_ERROR) sched._logger = logging sched.start() except Exception as e: tools.warning(str(e)) sched.shutdown() time.sleep(5) sched.start()
def date_task(x): print(dt.now().strftime("%Y-%m-%d %H:%M:%S"), x) print(str(dt.now())) bs = BlockingScheduler() bs.add_job(func=interval_task, args=("循环任务", ), trigger="interval", seconds=5, id="interval") #year,month,day,week,day_of_week,hour,minute,second,start_date,end_date bs.add_job(func=cron_task, args=("定时任务", ), trigger="cron", second="*/5", minute="*", id="cron") #在next_run_time时间点运行一次 bs.add_job(func=date_task, args=("一次性任务", ), trigger="date", next_run_time=dt.now() + timedelta(seconds=15), id="date") bs._logger = logging bs.start()
printMsg('========定时任务结束========') print("************************************************") def my_listener(event): if event.exception: print('任务出错了!!!!!!') else: print('任务照常运行...') if __name__ == '__main__': scheduler = BlockingScheduler() cronStr = "*/" + schedulerInterval if schedulerCrontype == "second": trigger = CronTrigger(second=cronStr) elif schedulerCrontype == "minute": trigger = CronTrigger(minute=cronStr) # trigger = CronTrigger(second='*/3') # trigger = CronTrigger(minute='*/1') scheduler.add_job(getWeather, trigger) # scheduler.add_job(getRain2H, 'cron', minute='*/5', hour='*') scheduler.add_listener(my_listener, EVENT_JOB_EXECUTED | EVENT_JOB_ERROR) scheduler._logger = logging # 行启用 scheduler 模块的日记记录 scheduler.daemon = True try: scheduler.start() except Exception as e: print("scheduler启动异常=" + e)
# -*- coding: utf-8 -*- from apscheduler.schedulers.background import BackgroundScheduler from apscheduler.schedulers.blocking import BlockingScheduler from tools import log import events from config import * if __name__ == '__main__': # 程序启动初始化 logger = log.logger_generator(logger_name='MusicClock') mk_dirs([excluded_file, tts_location, time_report_tts_location]) # 从启动器启动任务/进行初始化 events.schedules.initiator(logger_name='MusicClock') # 初始化任务调度器 BlockScheduler = BlockingScheduler() BackScheduler = BackgroundScheduler() BackScheduler._logger = logger BlockScheduler._logger = logger logger.info(f'[ {logger.name} ] 的 [ 任务调度器 ] 初始化完成') # 在调度器上增加任务 events.schedules.add_block_schedule_jobs(BlockScheduler) events.schedules.add_back_schedule_jobs(BackScheduler) # 获取所有的进程任务及时间安排,发送钉钉 BackScheduler.start() BlockScheduler.start()
"-v", "test_News.py", "--alluredir", rootPath + '/report/xml_{time}'.format( time=datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S')), "--reruns=2", # pip install pytest-rerunfailures "--reruns-delay=2" ]) xml_report_path, html_report_path = CommonsTool.rmdir5() print(xml_report_path) print(html_report_path) os.popen("allure generate {xml_report_path} -o {html_report_path} --clean". format(xml_report_path=xml_report_path, html_report_path=html_report_path)).read() if gm.get_value("errfunc") != [] and gm.get_value("errfunc") != 'Null_': # 发送邮件提醒 CommonsTool.send_email(gm.get_value("nowtime"), gm.get_value("errfunc")) # 删除变量 gm.del_map("errfunc") gm.del_map("errmsg") if __name__ == '__main__': # run() print("启动定时任务", datetime.datetime.now().strftime('%Y-%m-%d %H-%M-%S')) apscheduler = BlockingScheduler() apscheduler.add_job(func=run, trigger='cron', minute='*/10') #30分钟执行一次 apscheduler._logger = log apscheduler.start()
def example4(current_path, rootpaths): print('into example4......') # ####产生数据日期设定,用户可以设置,时间精确到年月日 # set_time = '20200309' # ####根目录设置 # rootpath = 'C:\\Users\\Administrator\\Desktop\\test\\dataaaaa\\' set_times = time.strftime('%Y%m%d%H%M%S', time.localtime(time.time())) ####产生数据日期设定,用户可以设置,时间精确到年月日 yyyy = set_times[0:4] yyyymm = set_times[0:6] yyyymmdd = set_times[0:8] yyyymmddhh = set_times[0:10] yyyymmddhhmm = set_times[0:12] ####设置替换时间的匹配表达式 repace_yyyymmdd = '_' + yyyymmdd repace_yyyymmddhh = '_' + yyyymmddhh repace_yyyymmddhhmm = '_' + yyyymmddhhmm ####根目录设置 rootpath = rootpaths ####产品路径,统一由yyyy调整为yyyymm if ('Windows' == platform.system()): ####根据src路径下的样本数据,来产生设定日期的1天的数据 ,样例数据在当前路径下 srcpathA = current_path + '\\gen_IONO_CET_ION\\201907\\20190716\\' ##1小时的样本数据 srcpathB = current_path + '\\gen_IONO_CET_ISM\\201907\\20190716\\' ##1小时的样本数据 srcpathC = current_path + '\\gen_IONO_FDS_ION\\2019\\20191115\\' ##24小时的样本数据 srcpathD = current_path + '\\gen_IONO_FDS_ISM\\202001\\20200101\\CDZJ\\CDZJ_ISM01_DBD_L11_01H_20200101000000.txt' ##24小时的样本数据 srcpathE = current_path + '\\gen_SOLAR_FDS_SOT\\201910\\20191012\\' srcpathF = current_path + '\\gen_SOLAR_CMA_SRT\\201803\\20180328\\SDZM\\YJGC_SDWH_TYSD_20180328_061601_L0_0000_01S.txt' srcpathG = current_path + '\\gen_SOLAR_FDS_SRT\\201912\\20191201\\' srcpath5 = current_path + '\\Data\\TEST\\FDS\\geomag\\FGM\\2020\\20200306\\' ##24小时的样本数据 srcpath6 = current_path + '\\Data\\TEST\\FDS\\atmos\\MET\\2020\\20200306\\' ##24小时的样本数据 srcpath7 = current_path + '\\Data\\TEST\\FDS\\atmos\\MST\\2020\\20200306\\' ##24小时的样本数据 srcpath8 = '' srcpath9 = '' srcpath10 = '' despathA = rootpath + '\\Data\\TEST\\CET\\iono\\ION\\' + yyyymm + '\\' + yyyymmdd + '\\' ##1小时的样本数据 despathB = rootpath + '\\Data\\TEST\\CET\\iono\\ISM\\' + yyyymm + '\\' + yyyymmdd + '\\' ##1小时的样本数据 despathC = rootpath + '\\Data\\TEST\\FDS\\iono\\ION\\' + yyyymm + '\\' + yyyymmdd + '\\' ##24小时的样本数据 despathD = rootpath + '\\Data\\TEST\\FDS\\iono\\ISM\\' + yyyymm + '\\' + yyyymmdd + '\\' ##24小时的样本数据 despathE = rootpath + '\\Data\\TEST\\FDS\\solar\\SOT\\' + yyyymm + '\\' + yyyymmdd + '\\' ##1小时的样本数据 despathF = rootpath + '\\Data\\TEST\\CMA\\solar\\SRT\\' + yyyymm + '\\' + yyyymmdd + '\\' ##1小时的样本数据 despathG = rootpath + '\\Data\\TEST\\FDS\\solar\\SRT\\' + yyyymm + '\\' + yyyymmdd + '\\' ##1小时的样本数据 despath5 = rootpath + '\\Data\\TEST\\FDS\\geomag\\FGM\\' + yyyymm + '\\' + yyyymmdd + '\\' ##24小时的样本数据 despath6 = rootpath + '\\Data\\TEST\\FDS\\atmos\\MET\\' + yyyymm + '\\' + yyyymmdd + '\\' ##24小时的样本数据 despath7 = rootpath + '\\Data\\TEST\\FDS\\atmos\\MST\\' + yyyymm + '\\' + yyyymmdd + '\\' ##24小时的样本数据 despath8 = '' despath9 = '' despath10 = '' if ('Linux' == platform.system()): ####根据src路径下的样本数据,来产生设定日期的1天的数据,样例数据在当前路径下 srcpathA = current_path + '/gen_IONO_CET_ION/201907/20190716/' ##1小时的样本数据 srcpathB = current_path + '/gen_IONO_CET_ISM/201907/20190716/' ##1小时的样本数据 srcpathC = current_path + '/gen_IONO_FDS_ION/2019/20191115/' ##24小时的样本数据 srcpathD = current_path + '/gen_IONO_FDS_ISM/202001/20200101/CDZJ/CDZJ_ISM01_DBD_L11_01H_20200101000000.txt' ##24小时的样本数据 srcpathE = current_path + '/gen_SOLAR_FDS_SOT/201910/20191012/' ##24小时的样本数据 srcpathF = current_path + '/gen_SOLAR_CMA_SRT/201803/20180328/SDZM/YJGC_SDWH_TYSD_20180328_061601_L0_0000_01S.txt' srcpathG = current_path + '/gen_SOLAR_FDS_SRT/201912/20191201/' srcpath5 = current_path + '/gen_FDS_GEOMAG_FGM/202003/20200330/' ##15分钟的样本数据 srcpath6 = current_path + '/gen_FDS_ATMOS_MET/201201/20120101/' ##1小时的样本数据 srcpath7 = current_path + '/gen_FDS_ATMOS_MST/201911/20191105/' ##30分钟的样本数据 srcpath71 = current_path + '/gen_FDS_ATMOS_MST/201911/20191106/' ##30分钟的样本数据 srcpath8 = current_path + '/gen_FDS_GEOMAG_FGM/202003/20200329/' ##3小时的样本数据 srcpath9 = current_path + '/gen_MDP_ATMOS_LID/201910/20191022/' ##1天的样本数据 srcpath10 = current_path + '/gen_MDP_GEOMAG_FGM/201910/20191021/' ##1天的样本数据 srcpath11 = current_path + '/gen_FDS_ATMOS_CMA_UPAR/202003/20200331/' ##12小时的样本数据 srcpath12 = current_path + '/gen_FDS_GEOMAG_FGM/202003/20200331/' ##24小时的样本数据 srcpath13 = current_path + '/gen_FDS_ATMOS_AFD_UPAR/202004/20200401/' ##24小时的样本数据 # despathA = rootpath + '/kjtq_data/CET/iono/ION/' + yyyymm + '/' + yyyymmdd + '/' ##1小时的样本数据 # despathB = rootpath + '/kjtq_data/CET/iono/ISM/' + yyyymm + '/' + yyyymmdd + '/' ##1小时的样本数据 # despathC = rootpath + '/kjtq_data/FDS/iono/ION/' + yyyymm + '/' + yyyymmdd + '/' ##24小时的样本数据 # despathD = rootpath + '/kjtq_data/FDS/iono/ISM/' + yyyymm + '/' + yyyymmdd + '/' ##24小时的样本数据 # despathE = rootpath + '/kjtq_data/FDS/solar/SOT/' + yyyymm + '/' + yyyymmdd + '/' ##1小时的样本数据 # despathF = rootpath + '/kjtq_data/CMA/solar/SRT/' + yyyymm + '/' + yyyymmdd + '/' ##1小时的样本数据 # despathG = rootpath + '/kjtq_data/FDS/solar/SRT/' + yyyymm + '/' + yyyymmdd + '/' ##1小时的样本数据 despathA = rootpath + '/kjtq_data/CET/iono/ION/XXXM_ION/' + yyyymm + '/' + yyyymmdd + '/' ##1小时的样本数据 despathB = rootpath + '/kjtq_data/CET/iono/ISM/XXXM_ISM/' + yyyymm + '/' + yyyymmdd + '/' ##1小时的样本数据 despathC = rootpath + '/kjtq_data/FDS/iono/ION/XXXJ_ION/' + yyyymm + '/' + yyyymmdd + '/' ##24小时的样本数据 despathD = rootpath + '/kjtq_data/FDS/iono/ISM/XXXJ_ISM/' + yyyymm + '/' + yyyymmdd + '/' ##24小时的样本数据 despathE = rootpath + '/kjtq_data/FDS/solar/SOT/' + yyyymm + '/' + yyyymmdd + '/' ##1小时的样本数据 despathF = rootpath + '/kjtq_data/CMA/solar/SRT/' + yyyymm + '/' + yyyymmdd + '/' ##1小时的样本数据 despathG = rootpath + '/kjtq_data/FDS/solar/SRT/XXXJ_SRT' + yyyymm + '/' + yyyymmdd + '/' ##1小时的样本数据 despath5 = rootpath + '/kjtq_data/FDS/geomag/FGM/XXXJ_FGM/' + yyyymm + '/' + yyyymmdd + '/' ##15分钟的样本数据 despath6 = rootpath + '/kjtq_data/FDS/atmos/MET/XXXJ_MET/' + yyyymm + '/' + yyyymmdd + '/' ##1小时的样本数据 despath7 = rootpath + '/kjtq_data/FDS/atmos/MST/XXXM_MST/' + yyyymm + '/' + yyyymmdd + '/' ##30分钟的样本数据 despath71 = rootpath + '/kjtq_data/FDS/atmos/MST/XXXJ_MST/' + yyyymm + '/' + yyyymmdd + '/' ##30分钟的样本数据 despath8 = rootpath + '/kjtq_data/FDS/geomag/FGM/XXXJ_FGM/' + yyyymm + '/' + yyyymmdd + '/' ##3小时的样本数据 despath9 = rootpath + '/kjtq_data/MDP/atmos/LID/' + yyyymm + '/' + yyyymmdd + '/' ##1天的样本数据 despath10 = rootpath + '/kjtq_data/MDP/geomag/FGM/' + yyyymm + '/' + yyyymmdd + '/' ##1天的样本数据 despath11 = rootpath + '/kjtq_data/CMA/atmos/UPC/' + yyyymm + '/' + yyyymmdd + '/' ##12小时的样本数据 despath12 = rootpath + '/kjtq_data/FDS/geomag/FGM/XXXJ_FGM/' + yyyymm + '/' + yyyymmdd + '/' ##24小时的样本数据 despath13 = rootpath + '/kjtq_data/CMA/atmos/UPA/' + yyyymm + '/' + yyyymmdd + '/' ##24小时的样本数据 ####创建目标文件目录结构 ####实际测试发现,创建目标目录的子目录时候,也使用makedirs创建多级目录,所以如下目录创建,可以不调用 # if not os.path.exists(despathA): # os.makedirs(despathA) # if not os.path.exists(despathB): # os.makedirs(despathB) # if not os.path.exists(despathC): # os.makedirs(despathC) # if not os.path.exists(despathD): # os.makedirs(despathD) # if not os.path.exists(despathE): # os.makedirs(despathE) # if not os.path.exists(despathF): # os.makedirs(despathF) # if not os.path.exists(despathG): # os.makedirs(despathG) ####需要增加srcpath的合法性,否则后续程序会进入一种无法预知的状态 if not os.path.exists(srcpathA): exit('do not exist %s' % srcpathA) if not os.path.exists(srcpathB): exit('do not exist %s' % srcpathB) if not os.path.exists(srcpathC): exit('do not exist %s' % srcpathC) if not os.path.exists(srcpathD): exit('do not exist %s' % srcpathD) if not os.path.exists(srcpathE): exit('do not exist %s' % srcpathE) if not os.path.exists(srcpathF): exit('do not exist %s' % srcpathF) if not os.path.exists(srcpathG): exit('do not exist %s' % srcpathG) #### 启动定时任务 scheduler = BlockingScheduler() #阻塞方式 #sheduler = BackgroundScheduler()#非阻塞方式 #### 用户添加自己的用例add_job #### despath,需要根据每天的日期校验替换,否则所有数据都存放到启动任务当天的文件夹下 jobA = scheduler.add_job( func=gen_IONO_CET_ION_scheduler, args=[srcpathA, despathA], trigger='cron', hour='8-18', id='gen_IONO_CET_ION_scheduler') # #每天的08-18小时开始执行 #jobB = scheduler.add_job(func=gen_IONO_CET_ISM_scheduler, args=[srcpathB, despathB], trigger='cron', hour='0-23',id='gen_IONO_CET_ISM_scheduler') ##每天的00-23小时开始执行 jobC = scheduler.add_job( func=gen_IONO_FDS_ION_scheduler, args=[srcpathC, despathC], trigger='cron', minute='00,15,30,45', id='gen_IONO_FDS_ION_scheduler') ##每小时的00,15,30,45分开始执行 #jobD = scheduler.add_job(func=gen_IONO_FDS_ISM_scheduler, args=[srcpathD, despathD], trigger='cron', hour='0-23',id='gen_IONO_FDS_ISM_scheduler')##每隔1个小时1次 #jobE1 = scheduler.add_job(func=gen_SOLAR_FDS_SOT_scheduler, args=[srcpathE, despathE,'CGC'], trigger='cron', hour='14-14',minute='00,30',id='gen_SOLAR_FDS_SOT_scheduler CGC')##每隔30分钟执行1次 #jobE2 = scheduler.add_job(func=gen_SOLAR_FDS_SOT_scheduler, args=[srcpathE, despathE,'CGQ'], trigger='cron', hour='14-14',minute='05,10,15,20,25,30,35,40,45,50,55',id='gen_SOLAR_FDS_SOT_scheduler CGQ')##每隔5分钟执行1次 #jobE3 = scheduler.add_job(func=gen_SOLAR_FDS_SOT_scheduler, args=[srcpathE, despathE,'CGS'], trigger='cron', hour='14-14',minute='05,10,15,20,25,30,35,40,45,50,55',id='gen_SOLAR_FDS_SOT_scheduler CGS')##每隔5分钟执行1次 #jobE4 = scheduler.add_job(func=gen_SOLAR_FDS_SOT_scheduler, args=[srcpathE, despathE,'CHA'], trigger='cron', hour='14-14',minute='05,10,15,20,25,30,35,40,45,50,55',id='gen_SOLAR_FDS_SOT_scheduler CHA')##每隔5分钟执行1次 jobF = scheduler.add_job( func=gen_SOLAR_CMA_SRT_scheduler, args=[srcpathF, despathF], trigger='cron', hour='6-18', minute='00,03,06,09,12,15,18,21,24,27,30,33,36,39,42,45,48,51,54,57', id='gen_SOLAR_CMA_SRT_scheduler') ##每隔3分钟执行1次,06:00除外 jobG = scheduler.add_job( func=gen_SOLAR_FDS_SRT_scheduler, args=[srcpathG, despathG], trigger='cron', hour='0-23', minute='00,15,30,45', id='gen_SOLAR_FDS_SRT_scheduler') ##每小时的00,15,30,45分钟开始执行,00:00:00除外 job5 = scheduler.add_job(func=copy_modify_yyyymmddhhmm_once, args=[srcpath5, despath5], trigger='cron', minute='00,15,30,45', id='FDS_geomag_FGM15M') ##每小时的00,15,30,45分开始执行 job6 = scheduler.add_job(func=copy_modify_yyyymmddhhmm_once, args=[srcpath6, despath6], trigger='cron', minute='00', id='FDS_atmos_MET') ##每小时的00分开始执行 job7 = scheduler.add_job(func=copy_modify_yyyymmddhhmm_once, args=[srcpath7, despath7], trigger='cron', minute='00,30', id='FDS_atmos_MST') ##每小时的00,30分开始执行 job71 = scheduler.add_job(func=copy_modify_yyyymmddhhmm_once, args=[srcpath71, despath71], trigger='cron', minute='00,30', id='FDS_atmos_MST1') ##每小时的00,30分开始执行 job8 = scheduler.add_job( func=copy_modify_yyyymmddhhmm_once, args=[srcpath8, despath8], trigger='cron', hour='00,03,06,09,12,15,18,21', id='FDS_geomag_FGM3H') ##每天的00,03,06,09,12,15,18,21时开始执行 job9 = scheduler.add_job(func=copy_modify_yyyymmddhhmm_once_mdp, args=[srcpath9, despath9], trigger='cron', hour='00', id='MDP_atmos_LID') ##每天的00时00分开始执行 job10 = scheduler.add_job(func=copy_modify_yyyymmddhhmm_once_mdp, args=[srcpath10, despath10], trigger='cron', hour='00', id='MDP_geomag_FGM') ##每天的00时00分开始执行 #job10 = scheduler.add_job(func=copy_modify_yyyymmddhhmm_once_mdp, args=[srcpath10, despath10],trigger='cron',minute='05,10,15,20,25,30,35,40,45,50,55', id='MDP_geomag_FGM') ##每天的00时00分开始执行 #job11 = scheduler.add_job(func=copy_modify_yyyymmdd_matchHH_scheduler, args=[srcpath11, despath11],trigger='cron',hour='00,12', id='FDS_atmos_cma_upar') ##每天的00时、12时开始执行 job11 = scheduler.add_job(func=copy_modify_yyyymmddhhmm_once_mdp, args=[srcpath11, despath11], trigger='cron', hour='00,12', id='FDS_atmos_cma_upar') ##每天的00时、12时开始执行 job12 = scheduler.add_job(func=copy_modify_yyyymmdd_matchHH_scheduler, args=[srcpath12, despath12], trigger='cron', hour='00', id='FDS_atmos_FGM24H') ##每天的00时开始执行 #job13 = scheduler.add_job(func=copy_modify_yyyymmdd_matchHH_scheduler, args=[srcpath13, despath13],trigger='cron',hour='12', id='FDS_atmos_AFD_UPAR') ##每天的00时开始执行 job13 = scheduler.add_job(func=copy_modify_yyyymmddhhmm_once_mdp, args=[srcpath13, despath13], trigger='cron', hour='12', id='FDS_atmos_AFD_UPAR') ##每天的00时开始执行 #### 任务列表 #print (scheduler.get_jobs()) #### 日志推送到邮箱 jobX = scheduler.add_job( func=send_mail_segment, trigger='cron', hour='0,21', id='send_mail_segment') ##每天0点,3点,8点,19点, 20点 推送1次 #### 生产IRI网格数据 jobR = scheduler.add_job(func=gen_IRI, trigger='cron', hour='22', id='gen_IRI') ##每天的00时开始执行 #### 定时清理4天前的数据 #### 过期天数,设置成4天,保留前72小时的数据即可,多留1天的余量数据 expire_day = 7 cleanpath1 = '/kjtq_data/' cleanpath11 = '/kjtq_data/FDS/solar/' cleanpath2 = '/kjtq_data/localdatafiles/' cleanpath3 = '/kjtq_data/localplugins/IRI' #jobY1 = scheduler.add_job(func=clean_dirs, args=[cleanpath1,expire_day], trigger='cron', hour='00',id='/Data/TEST/') #每天0点开始清理 jobY11 = scheduler.add_job(func=clean_dirs, args=[cleanpath11, expire_day], trigger='cron', hour='00', id=cleanpath11) jobY2 = scheduler.add_job(func=clean_dirs, args=[cleanpath2, expire_day], trigger='cron', hour='00', id=cleanpath2) #每天0点开始清理 #jobY3 = scheduler.add_job(func=clean_dirs, args=[cleanpath3,expire_day], trigger='cron', hour='00',id=cleanpath3) #每天0点开始清理 #### 监听任务 scheduler.add_listener(listener, EVENT_JOB_EXECUTED | EVENT_JOB_ERROR) #### 任务日志 logging = log_setting() scheduler._logger = logging #### 启动任务,只能启动1次,不可以重复启动 try: print('begin start......') ##start阻塞 scheduler.start() print('end start......') except Exception as e: exit(str(e))