Exemple #1
0
executors = {
    'default': ThreadPoolExecutor(20),
    'processpool': ProcessPoolExecutor(5)
}

job_defaults= {
    'coalesce': True,
    'max_instances': 1
}

timezone = get_localzone()

schedular = BlockingScheduler(jobstores=jobstores, executors=executors,job_defaults=job_defaults, timezone=timezone)

schedular.add_jobstore(redis_job_store)



@schedular.scheduled_job(id="start_covid_19_processor", name="start_covid_19_processor", trigger='interval', minutes=30, misfire_grace_time=None)
def run_covid_19_processor():

    logger = logging.getLogger(log_tag)
    logger.info("Starting job processing")

    try:
        start_covid_19_processor()
        print "Sync tak dispatcher"
    except:
        logger.info("Exception at processing onboarding", exc_info=True)
Exemple #2
0
    'default': ThreadPoolExecutor(20),

    # 'default': {'type': 'threadpool', 'max_workers': 20},
    # 'processpool': ProcessPoolExecutor(max_workers=5)
}
conf = {
    # redis配置
    "host": '127.0.0.1',
    "port": 6379,
    "db": 0,  # 连接15号数据库
    "max_connections": 10  # redis最大支持300个连接数
}
# 创建定时任务的调度器对象
scheduler = BlockingScheduler(executors=executors)
# 添加任务持久化存储方式,如果未安装redis可省略此步骤
scheduler.add_jobstore(jobstore='redis', **conf)


# 使用configure方法进行配置
# scheduler.configure(executors=executors)


# 定义定时任务
def my_job(param1, param2):
    print(param1)
    print(param2)


def main():
    # 向调度器中添加定时任务
    # scheduler.add_job(my_job, 'date', args=[100, 'python'], run_date=date(2019, 12, 19))
Exemple #3
0
# encoding: utf-8
# -*- coding: utf-8 -*-
# author = ‘LW’

from apscheduler.schedulers.background import BackgroundScheduler, BlockingScheduler
from django_apscheduler.jobstores import DjangoJobStore, register_events, register_job
import datetime

scheduler = BlockingScheduler()
scheduler.add_jobstore(DjangoJobStore(), 'default')


@register_job(scheduler, "interval", seconds=10)
def myTestJob():
    print("myTestJob auto run {}".format(datetime.datetime.now()))


register_events(scheduler)

# scheduler.start()
Exemple #4
0
# 使用mysql存储作业
# url = 'mysql://*****:*****@localhost/Sched'

#TODO 模版区域
jobstores = {'default': SQLAlchemyJobStore(url=url)}
executors = {
    'default': ThreadPoolExecutor(20),
    'processpool': ProcessPoolExecutor(5)
}
job_defaults = {'coalesce': False, 'max_instances': 3}


def job_fun(text="zach"):
    print("{}\t".format(text), time.ctime())


#sched = BlockingScheduler(jobstores=jobstores,executors=executors,job_defaults=job_defaults)
# 简单执行
sched = BlockingScheduler()
sched.add_jobstore('sqlalchemy', url=url)
# 定时执行
specid = "zach1"
job = sched.add_job(job_fun, 'interval', seconds=5, id=specid, args=[
    specid,
])
job2 = sched.add_job(job_fun,
                     'interval',
                     start_date='2017-06-19 10:42:40',
                     seconds=5)

sched.start()
from apscheduler.jobstores.mongodb import MongoDBJobStore
from pymongo import MongoClient
from apscheduler.executors.pool import ThreadPoolExecutor, ProcessPoolExecutor
import time

host = '127.0.0.1'
port = '27017'


def job_fun(text="zach"):
    print("{}\t".format(text), time.ctime())


#sched = BlockingScheduler(jobstores=jobstores,executors=executors,job_defaults=job_defaults)
# 简单执行
sched = BlockingScheduler()
client = MongoClient(host=host, port=port)
storge = MongoDBJobStore(client=client)
sched.add_jobstore(storge)
# 定时执行
specid = "zach1"
job = sched.add_job(job_fun, 'interval', seconds=5, id=specid, args=[
    specid,
])
job2 = sched.add_job(job_fun,
                     'interval',
                     start_date='2017-06-19 10:42:40',
                     seconds=5)

job.modify(max_instances=6, name='Alternate name')
sched.start()