示例#1
0
def create_scheduler():
    new_scheduler = AsyncIOScheduler(
        jobstores={"default": MemoryJobStore()},
        executors={"default": AsyncIOExecutor()},
        job_defaults={
            "coalesce": False,
            "max_instances": 3
        },
        timezone=utc,
    )
    return new_scheduler
示例#2
0
async def _(event):
    if event.fwd_from:
        return
    sed = await edit_or_reply(event, "`Started AutoName.`")
    scheduler = AsyncIOScheduler(executors={
        'default': AsyncIOExecutor(),
    })
    scheduler.add_job(auto_name,
                      'interval',
                      args=[event.pattern_match.group(1)],
                      minutes=5)
    scheduler.start()
def init_scheduler():
    """Initiate scheduller"""
    # https://apscheduler.readthedocs.io/en/latest/modules/schedulers/base.html#apscheduler.schedulers.base.BaseScheduler.add_job

    # TODO UTC must match timezone from config
    try:
        jobstores = {
            'default': SQLAlchemyJobStore(
                engine=ax_model.engine,
                tablename='_ax_scheduler_jobs'
            )
        }

        executors = {
            'default': AsyncIOExecutor()
        }

        loop = asyncio.get_event_loop()
        this.scheduler = AsyncIOScheduler(
            jobstores=jobstores,
            executors=executors,
            timezone=ax_misc.timezone,
            event_loop=loop)
        this.scheduler.start()

        # Job cleaning /uploads/tmp folder. deletes files that are expired
        all_jobs = this.scheduler.get_jobs()
        if "clear_tmp_files" not in [job.name for job in all_jobs]:
            this.scheduler.add_job(
                clear_tmp_files,
                trigger='cron',
                minute='30',
                coalesce=True,
                misfire_grace_time=60,
                id='clear_tmp_files')

        if "prn_job" not in [job.name for job in all_jobs]:
            now_dt = ax_misc.date() + timedelta(seconds=5)
            this.scheduler.add_job(
                prn_job,
                'date',
                run_date=now_dt,
                args=['SCHEDULER WORKS'],
                id='prn_job',
                misfire_grace_time=60
            )

    except Exception:
        logger.exception('Error initiating scheduler module. ')
        raise
    def create_scheduler(self):
        self.scheduler = AsyncIOScheduler()

        self.scheduler.configure(
            jobstores={
                'default': SQLAlchemyJobStore(url=self.dsn),
            },
            executors={
                'default': AsyncIOExecutor(),
            },
            job_defaults={
                'coalesce': False,
                'max_instances': 1,
                'misfire_grace_time': (60 * 60)
            },
            timezone="UTC",
        )
示例#5
0
#    GNU Affero General Public License for more details.
#
#    You should have received a copy of the GNU Affero General Public License
#    along with this program.  If not, see <https://www.gnu.org/licenses/>.

import asyncio
import time
from telethon.errors import FloodWaitError
from telethon.tl import functions
from uniborg.util import edit_or_reply, friday_on_cmd, sudo_cmd
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from apscheduler.executors.asyncio import AsyncIOExecutor
from fridaybot.function.auto_tools import auto_name, auto_bio, auto_pic
from fridaybot import ALIVE_NAME, CMD_HELP

scheduler = AsyncIOScheduler(executors={'default': AsyncIOExecutor()})

@friday.on(friday_on_cmd(pattern="autoname(?: |$)(.*)"))
@friday.on(sudo_cmd(pattern="autoname(?: |$)(.*)", allow_sudo=True))
async def _(event):
    if event.fwd_from:
        return
    sed = await edit_or_reply(event, "`Started AutoName Your Name Will Be Changed Every 1 Min, According To TimeZone Given. To Terminate This Process Use .stop Cmd`")
    scheduler.add_job(auto_name, 'interval', args=[event.pattern_match.group(1)], minutes=1, id='autoname')
    
@friday.on(friday_on_cmd(pattern="autopic(?: |$)(.*)"))
@friday.on(sudo_cmd(pattern="autopic(?: |$)(.*)", allow_sudo=True))
async def _(event):
    if event.fwd_from:
        return
    sed = await edit_or_reply(event, "`Started AutoPic Your Name Will Be Changed Every 1 Min, According To TimeZone Given. To Terminate This Process Use .stop Cmd`")
示例#6
0
#    This program is distributed in the hope that it will be useful,
#    but WITHOUT ANY WARRANTY; without even the implied warranty of
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#    GNU Affero General Public License for more details.
#
#    You should have received a copy of the GNU Affero General Public License
#    along with this program.  If not, see <https://www.gnu.org/licenses/>.

from apscheduler.executors.asyncio import AsyncIOExecutor
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from uniborg.util import WhiteEye_on_cmd, edit_or_reply, sudo_cmd

from WhiteEyeUserBot import CMD_HELP
from WhiteEyeUserBot.functions.matic_tool import auto_bio, auto_name, auto_pic

scheduler = AsyncIOScheduler(executors={"default": AsyncIOExecutor()})


@WhiteEye.on(WhiteEye_on_cmd(pattern="autoname(?: |$)(.*)"))
@WhiteEye.on(sudo_cmd(pattern="autoname(?: |$)(.*)", allow_sudo=True))
async def autoname(event):
    if event.fwd_from:
        return
    await edit_or_reply(
        event,
        "`Started AutoName Your Name Will Be Changed Every 1 Min, According To TimeZone Given. To Terminate This Process Use .stop Cmd`",
    )
    scheduler.add_job(
        auto_name,
        "interval",
        args=[event.pattern_match.group(1)],
示例#7
0
from apscheduler.executors.asyncio import AsyncIOExecutor
from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from pytz import utc

from loop import event_loop
from settings import DATABASE_CONNECTION_URL

jobstores = {
    'default': SQLAlchemyJobStore(url=DATABASE_CONNECTION_URL),
}
executors = {
    'default': AsyncIOExecutor(),
}
job_defaults = {
    'coalesce': False,
    'max_instances': 3,
}


scheduler = AsyncIOScheduler(
    jobstores=jobstores,
    executors=executors,
    job_defaults=job_defaults,
    timezone=utc,
    event_loop=event_loop,
)
def asyncio_executor(asyncio_scheduler):
    executor = AsyncIOExecutor()
    executor.start(asyncio_scheduler, 'default')
    yield executor
    executor.shutdown()
示例#9
0
 def __init__(self) -> None:
     self._application: "Optional[weakref.ReferenceType[Application]]" = None
     self._executor = AsyncIOExecutor()
     self.scheduler = AsyncIOScheduler(timezone=pytz.utc, executors={"default": self._executor})
示例#10
0
# 启动调度
scheduler.start()

second_redis_jobstore = RedisJobStore(
    db=2,
    jobs_key="apschedulers.second_jobs",
    run_times_key="apschedulers.second_run_times",
    host="127.0.0.1",
    port=6379,
    password="******"
)

scheduler.add_jobstore(second_redis_jobstore, 'second')
# 定义executor 使用asyncio是的调度执行规则
second_executor = AsyncIOExecutor()
scheduler.add_executor(second_executor, "second")


# ***********               关于 APScheduler中有关Event相关使用示例               *************
# 定义函数监听事件
def job_execute(event):
    """
    监听事件处理
    :param event:
    :return:
    """
    print(
        "job执行job:\ncode => {}\njob.id => {}\njobstore=>{}".format(
            event.code,
            event.job_id,
示例#11
0
 def add_async_executor(self):
     executor = AsyncIOExecutor()
     self._executors_alias = "async"
     self.add_executor(executor=executor, alias=self._executors_alias)
def asyncio_executor(asyncio_scheduler):
    executor = AsyncIOExecutor()
    executor.start(asyncio_scheduler, 'default')
    yield executor
    executor.shutdown()
from apscheduler.executors.asyncio import AsyncIOExecutor
from apscheduler.jobstores.redis import RedisJobStore
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from pytz import utc

from app import config

BASE_JOB_ID = "posting_{channel_id}_{user_id}"

jobstores = {
    "default":
    RedisJobStore(host=config.REDIS_HOST, password=config.REDIS_PASSWORD)
}

executors = {"default": AsyncIOExecutor()}

job_defaults = {
    "coalesce": False,
    "max_instances": 3,
    "misfire_grace_time": 330
}

apscheduler = AsyncIOScheduler(jobstores=jobstores,
                               executors=executors,
                               job_defaults=job_defaults,
                               timezone=utc)


def setup():
    apscheduler.start()
示例#14
0
from apscheduler.executors.asyncio import AsyncIOExecutor
from apscheduler.job import Job
from apscheduler.jobstores.base import JobLookupError
from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from apscheduler.triggers.cron import CronTrigger

from app.db import db_engine, db_session
from app.models import Route, Schedule

log = logging.getLogger(__name__)

jobstores = {'default': SQLAlchemyJobStore(engine=db_engine)}
jobdefaults = {'misfire_grace_time': 30, 'coalesce': True}
executors = {'default': AsyncIOExecutor()}

Scheduler = AsyncIOScheduler(
    jobstores=jobstores,
    executors=executors,
    timezone='Europe/Moscow',
    job_defaults=jobdefaults,
)


def get_active_schedules():
    """Returns all schedules with is_active property."""
    schedules = Schedule.get_active()
    log.info('Found %s active schedules.', len(schedules))
    return schedules
示例#15
0
# You should have received a copy of the GNU Affero General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.

from apscheduler.executors.asyncio import AsyncIOExecutor
from apscheduler.jobstores.redis import RedisJobStore
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from pytz import utc

from utah.config import get_str_key, get_int_key
from utah.utils.logger import log

DEFAULT = "default"

jobstores = {
    DEFAULT:
    RedisJobStore(host=get_str_key("REDIS_URI"),
                  port=get_str_key("REDIS_PORT"),
                  password=get_str_key("REDIS_PASS"))
}
executors = {DEFAULT: AsyncIOExecutor()}
job_defaults = {"coalesce": False, "max_instances": 3}

scheduler = AsyncIOScheduler(jobstores=jobstores,
                             executors=executors,
                             job_defaults=job_defaults,
                             timezone=utc)

log.info("Starting apscheduller...")
scheduler.start()
示例#16
0
 def _create_default_executor(self):
     from apscheduler.executors.asyncio import AsyncIOExecutor
     return AsyncIOExecutor()
示例#17
0
from aiogram.utils.executor import Executor
from apscheduler.jobstores.redis import RedisJobStore
from apscheduler.executors.asyncio import AsyncIOExecutor
from apscheduler.schedulers.asyncio import AsyncIOScheduler

from src.settings import REDIS_HOST, REDIS_PORT, REDIS_DB_JOBS, REDIS_PASSWORD

jobstores = {
    "default":
    RedisJobStore(db=REDIS_DB_JOBS,
                  host=REDIS_HOST,
                  port=REDIS_PORT,
                  password=REDIS_PASSWORD)
}
executors = {
    "default": AsyncIOExecutor(),
}
job_defaults = {"coalesce": False, "max_instances": 3}

scheduler = AsyncIOScheduler(jobstorer=jobstores,
                             executors=executors,
                             job_defaults=job_defaults)


async def on_startup(dispatcher: Dispatcher):
    scheduler.start()


async def on_shutdown(dispatcher: Dispatcher):
    scheduler.shutdown()