Пример #1
0
def stop_job(task_id, job_id):
    logger = g.logger.bind(operation="stop", task_id=task_id, job_id=job_id)

    logger.debug("Getting job...")
    job = Job.get_by_id(task_id=task_id, job_id=job_id)

    if job is None:
        logger.error("Job not found in task.")
        abort(404)

        return

    execution = job.get_last_execution()

    if execution is not None and execution.status == JobExecution.Status.running:
        logger.debug("Stopping current execution...")
        executor = current_app.load_executor()
        executor.stop_job(job.task, job, execution)
        logger.debug("Current execution stopped.")

    scheduler = Scheduler("jobs", connection=current_app.redis)

    if "enqueued_id" in job.metadata and job.metadata[
            "enqueued_id"] in scheduler:
        scheduler.cancel(job.metadata["enqueued_id"])
        job.scheduled = False
        job.save()

    logger.debug("Job stopped.")

    return get_job_summary(task_id, job_id)
Пример #2
0
 def __init__(self, conf: RqConfig, prefix: str = ''):
     self.redis_conn = Redis(host=conf.HOST, port=conf.PORT, db=conf.DB)
     self.queue = Queue(connection=self.redis_conn)
     self.prefix = prefix
     self.scheduler = Scheduler(connection=self.redis_conn, queue=self.queue)
     self.scheduler_conf_path = conf.SCHEDULER_CONF_PATH
     self.control = Control(self.redis_conn)
Пример #3
0
 def test_lock_handover_between_multiple_schedulers(self):
     lock_key = Scheduler.scheduler_lock_key
     self.assertNotIn(lock_key, tl(self.testconn.keys('*')))
     scheduler1 = Scheduler(connection=self.testconn, interval=20)
     scheduler2 = Scheduler(connection=self.testconn, interval=20)
     scheduler1.register_birth()
     scheduler1.acquire_lock()
     scheduler2.register_birth()
     scheduler2.acquire_lock()
     # Both schedulers are still active/registered
     self.assertIn(scheduler1.key, tl(self.testconn.keys('*')))
     self.assertIn(scheduler2.key, tl(self.testconn.keys('*')))
     scheduler1.remove_lock()
     self.assertNotIn(lock_key, tl(self.testconn.keys('*')))
     scheduler2.acquire_lock()
     self.assertIn(lock_key, tl(self.testconn.keys('*')))
Пример #4
0
def add_scheduled_task(request):
    from rq_scheduler import Scheduler
    from datetime import datetime

    task = request.GET.get('task')
    period = request.GET.get('period')
    queue = request.GET.get('queue')
    parameters = request.GET.get('parameters')
    scheduler = Scheduler(queue_name=queue, connection=tasks.redis_conn)

    if parameters:
        scheduler.schedule(
            scheduled_time=datetime.utcnow(),   # Time for first execution
            func=getattr(tasks, task),       # Function to be queued
            args=(parameters,),
            # Time before the function is called again, in seconds
            interval=int(period),
            # Repeat this number of times (None means repeat forever)
            repeat=None
        )
    else:
        scheduler.schedule(
            scheduled_time=datetime.utcnow(),   # Time for first execution
            func=getattr(tasks, task),       # Function to be queued
            # Time before the function is called again, in seconds
            interval=int(period),
            # Repeat this number of times (None means repeat forever)
            repeat=None
        )
    return HttpResponse('Success')
Пример #5
0
def view_scheduled_jobs():
    with Connection(redis.from_url(app.config['REDIS_URL'])) as conn:
        q = Queue()
        scheduler = Scheduler(connection=conn)
        jobs_list = scheduler.get_jobs()
        for j in jobs_list:
            print(j)
Пример #6
0
def cancel_scheduled_task(request):
    job_id = request.GET.get('job_id')
    from rq_scheduler import Scheduler

    scheduler = Scheduler('parser')
    scheduler.cancel(job_id)
    return HttpResponse('Success')
Пример #7
0
    def addModule(self, mod):
        logger.info('Adding module to hub {}'.format(mod['name']))
        # Store the module object as a document
        m = RedisModule(self.dconn, self.sconn, self.autocomplete, mod['name'])
        m.save(mod)

        # Add a reference to it in the master catalog
        self.dconn.jsonset(
            self._hubkey, Path('.modules["{}"]'.format(m.get_id())), {
                'id': m.get_id(),
                'key': m.get_key(),
                'created': str(_toepoch(self._ts)),
            })

        # Schedule a job to refresh repository statistics, starting from now and every hour
        s = Scheduler(connection=self.qconn)
        job = s.schedule(
            scheduled_time=datetime(1970, 1, 1),
            func=callRedisModuleUpateStats,
            args=[m.get_id()],
            interval=60 * 60,  # every hour
            repeat=None,  # indefinitely
            ttl=0,
            result_ttl=0)
        return m
Пример #8
0
    def set_up(cls, config):

        for queue_name in ("default", "email", "kpi"):
            queue = Queue(connection=Redis(
                host=config["host"],
                port=int(config["port"]),
                password=config["password"],
            ),
                          **config["queues"][queue_name])
            setattr(cls, queue_name, queue)

        cls.scheduler = Scheduler(connection=Redis(
            host=config["host"],
            port=int(config["port"]),
            password=config["password"],
        ))

        cls.failed_queue = get_failed_queue(connection=Redis(
            host=config["host"],
            port=int(config["port"]),
            password=config["password"],
        ))

        push_connection(
            Redis(
                host=config["host"],
                port=int(config["port"]),
                password=config["password"],
            ))
Пример #9
0
def start_fetching():
    scheduler = Scheduler(connection=get_connection())
    from app.tasks import fetch_data
    scheduler.schedule(scheduled_time=datetime.utcnow(),
                       func=fetch_data,
                       interval=int(60),
                       repeat=None)
Пример #10
0
def list_synchronization():
    r = StrictRedis()
    scheduler = Scheduler(connection=r)
    list_of_job_instances = scheduler.get_jobs()
    for job_instance in list_of_job_instances:
        job_id = job_instance.id
        print(job_id)
Пример #11
0
def setup_scheduled_jobs(app):  # pragma: no cover
    """Setup scheduled jobs."""
    app.logger.error("This isn't really an error, setup scheduled jobs was triggered!")
    from datetime import datetime
    from pybossa.jobs import enqueue_periodic_jobs, schedule_job, \
        get_quarterly_date
    from rq_scheduler import Scheduler
    redis_conn = sentinel.master
    scheduler = Scheduler(queue_name='scheduled_jobs', connection=redis_conn)
    MINUTE = 60
    HOUR = 60 * 60
    MONTH = 30 * (24 * HOUR)
    first_quaterly_execution = get_quarterly_date(datetime.utcnow())
    JOBS = [dict(name=enqueue_periodic_jobs, args=['email'], kwargs={},
                 interval=(1 * MINUTE), timeout=(10 * MINUTE)),
            dict(name=enqueue_periodic_jobs, args=['maintenance'], kwargs={},
                 interval=(1 * MINUTE), timeout=(10 * MINUTE)),
            dict(name=enqueue_periodic_jobs, args=['super'], kwargs={},
                 interval=(10 * MINUTE), timeout=(10 * MINUTE)),
            dict(name=enqueue_periodic_jobs, args=['high'], kwargs={},
                 interval=(1 * HOUR), timeout=(10 * MINUTE)),
            dict(name=enqueue_periodic_jobs, args=['medium'], kwargs={},
                 interval=(12 * HOUR), timeout=(10 * MINUTE)),
            dict(name=enqueue_periodic_jobs, args=['low'], kwargs={},
                 interval=(24 * HOUR), timeout=(10 * MINUTE)),
            dict(name=enqueue_periodic_jobs, args=['monthly'], kwargs={},
                 interval=(1 * MONTH), timeout=(30 * MINUTE)),
            dict(name=enqueue_periodic_jobs, args=['quaterly'], kwargs={},
                 interval=(3 * MONTH), timeout=(30 * MINUTE),
                 scheduled_time=first_quaterly_execution)]
    app.logger.error("This isn't really an error, the length of JOBS is "+str(len(JOBS)))
    for job in JOBS:
        app.logger.error("This isn't really an error, this is just one job: "+str(job))
        schedule_job(job, scheduler, app)
Пример #12
0
def start_synchronization(restart_q_name, path_q_name, file_q_name, target,
                          root, interval, job_name, hdlr):

    root_abs = realpath(root)

    r = StrictRedis()
    scheduler = Scheduler(connection=r)

    if job_name in scheduler:
        logger.error("job exists")
        return

    if interval is not None:
        scheduler.schedule(
            scheduled_time=datetime.utcnow(),
            func=restart,
            args=[path_q_name, file_q_name, target, root_abs, root_abs, hdlr],
            interval=interval,
            queue_name=restart_q_name,
            id=job_name)
    else:
        restart_q = Queue(restart_q_name, connection=r)
        restart_q.enqueue(restart,
                          path_q_name,
                          file_q_name,
                          target,
                          root_abs,
                          root_abs,
                          hdlr,
                          job_id=job_name)
Пример #13
0
def reenqueue_monitor_due_to_break(task_id, job_id, execution_id):
    args = [task_id, job_id, execution_id]
    delta = timedelta(seconds=1.0)

    scheduler = Scheduler("monitor", connection=current_app.redis)
    dt = datetime.utcnow() + delta
    enqueued = scheduler.enqueue_at(dt, monitor_job, *args)
Пример #14
0
def process(id_, data_dir, clean_in=10):
    """
    Job to remove the exif data from an uploaded image.
    
    The exif data is saved as a json file.
    
    If the image had an exif thumbnail, it is saved as a separate file.
    """
    path = os.path.join(data_dir, "{}.jpg".format(id_))
    exif = ExifImage(path)
    
    exif.thumb()
    exif.dump()
    exif.clean()
    
    job = get_current_job()
    
    # schedule the cleanup task
    now = datetime.datetime.now()
    scheduler = Scheduler(queue_name=job.origin, connection=get_current_connection())
    scheduler.enqueue_in(datetime.timedelta(minutes=clean_in), cleanup, id_, data_dir)
    
    removed_by = now+datetime.timedelta(minutes=clean_in)
    
    print("Added at: {}".format(now.isoformat()))
    print("Removed by: {}".format(removed_by.isoformat()))
    
    return {
        'thumb': exif.thumb_name,
        'json': exif.json_name,
        'removed_around': removed_by.isoformat()
    }
Пример #15
0
def replay_request(request_id, destination_id, retries=None):
    '''Replays request from source to destination, retrying when appropriate'''
    app = setup_app()
    if retries is None:
        retries = app.config.get('API_REPLAY_RETRIES')
    request = Request.get(request_id)
    destination = Destination.get(destination_id)
    headers = {
        'X-Original-{}'.format(k): v for k, v in request.headers.items()
    }
    headers['Content-Type'] = request.headers.get('Content-Type', '')
    # TODO: add exception handling here
    response = requests.request(
        method=request.method,
        url=destination.url,
        data=request.body,
        headers=headers,
    )
    if response.status_code < 200 or response.status_code > 300:
        if retries:
            with app.app_context():
                scheduler = Scheduler(connection=redis_store.connection)
                delay = app.config.get('API_REPLAY_BASE') ** (
                    app.config.get('API_REPLAY_RETRIES') - retries
                )
                scheduler.enqueue_at(
                    datetime.utcnow() + timedelta(seconds=delay),
                    replay_request,
                    request.id,
                    destination.id,
                    retries=retries - 1,
                )
        else:
            raise BadResponseException(response.status_code, response.text)
Пример #16
0
def get_events(cal_id, url, message_req):
    """
    Creates a REST request for calendar API to get upcoming 10 events and
    schedules reminders accordingly
    """
    # REST request
    URI = f'https://content.googleapis.com/calendar/v3/calendars/' \
          f'{urllib.parse.quote(cal_id)}/events' \
          f'?key={KEY}' \
          f'&maxResults=10' \
          f'&singleEvents=true' \
          f'&fields=items(summary%2Cstart)' \
          f'&timeMin={datetime.datetime.utcnow().isoformat()}Z'
    print(URI)
    response = requests.get(URI).json()["items"]  # retrieving events
    scheduler = Scheduler("randomseed", connection=Redis())  # constructs
    # scheduler
    # with cal_id as identifier
    for event in response:
        try:
            st_time = datetime.datetime.strptime(f'{event["start"]["date"]}Z',
                                                 '%Y-%m-%dT%H:%M:%SZ')
        except ValueError:
            st_time = datetime.datetime.strptime(f'{event["start"]["date"]}Z',
                                                 '%Y-%m-%dZ')
        message_req["message"]["text"] = f'Are you trying to fail this ' \
                                         f'class!? SMH - {event["summary"]} is ' \
                                         f'coming up!'
        scheduler.enqueue_at(st_time, "send_message(url, message_req)")
Пример #17
0
def add_scheduled_task(request):

    task = request.GET.get('task')
    period = request.GET.get('period')
    queue = request.GET.get('queue')
    parameters = request.GET.get('parameters')

    from rq import use_connection
    from rq_scheduler import Scheduler
    from datetime import datetime

    use_connection()  # Use RQ's default Redis connection
    scheduler = Scheduler(queue)  # Get a scheduler for the "default" queue

    if parameters:
        scheduler.schedule(
            scheduled_time=datetime.now(),  # Time for first execution
            func=getattr(tasks, task),  # Function to be queued
            args=[int(parameters)],
            interval=
            period,  # Time before the function is called again, in seconds
            repeat=
            None  # Repeat this number of times (None means repeat forever)
        )
    else:
        scheduler.schedule(
            scheduled_time=datetime.now(),  # Time for first execution
            func=getattr(tasks, task),  # Function to be queued
            interval=
            period,  # Time before the function is called again, in seconds
            repeat=
            None  # Repeat this number of times (None means repeat forever)
        )
    return HttpResponse('Success')
Пример #18
0
def setup_scheduled_jobs(app):  # pragma: no cover
    """Setup scheduled jobs."""
    from datetime import datetime
    from pybossa.jobs import enqueue_periodic_jobs, schedule_job, \
        get_quarterly_date
    from rq_scheduler import Scheduler
    redis_conn = sentinel.master
    scheduler = Scheduler(queue_name='scheduled_jobs', connection=redis_conn)
    MINUTE = 60
    HOUR = 60 * 60
    MONTH = 30 * (24 * HOUR)
    first_quaterly_execution = get_quarterly_date(datetime.utcnow())
    JOBS = [dict(name=enqueue_periodic_jobs, args=['super'], kwargs={},
                 interval=(10 * MINUTE), timeout=(10 * MINUTE)),
            dict(name=enqueue_periodic_jobs, args=['high'], kwargs={},
                 interval=(1 * HOUR), timeout=(10 * MINUTE)),
            dict(name=enqueue_periodic_jobs, args=['medium'], kwargs={},
                 interval=(12 * HOUR), timeout=(10 * MINUTE)),
            dict(name=enqueue_periodic_jobs, args=['low'], kwargs={},
                 interval=(24 * HOUR), timeout=(10 * MINUTE)),
            dict(name=enqueue_periodic_jobs, args=['monthly'], kwargs={},
                 interval=(1 * MONTH), timeout=(30 * MINUTE)),
            dict(name=enqueue_periodic_jobs, args=['quaterly'], kwargs={},
                 interval=(3 * MONTH), timeout=(30 * MINUTE),
                 scheduled_time=first_quaterly_execution)]

    for job in JOBS:
        schedule_job(job, scheduler)
Пример #19
0
    def get_context_data(self, **kwargs):
        ctx = super(SchedulerDetails, self).get_context_data(**kwargs)
        if Scheduler is None:
            # rq_scheduler is not installed
            raise Http404
        scheduler = Scheduler(self.connection)
        queue = Queue(self.kwargs['queue'], connection=self.connection)

        def cond(job_tuple):
            job, next_run = job_tuple
            return job.origin == queue.name

        jobs = filter(cond, scheduler.get_jobs(with_times=True))

        ctx.update({
            'queue':
            queue,
            'jobs':
            [serialize_scheduled_job(job, next_run) for job, next_run in jobs],
            'has_permission':
            True,
            'title':
            "Jobs scheduled on '%s' queue" % queue.name,
        })
        return ctx
Пример #20
0
def schedule():
    """Creates scheduler object."""
    build_scheduler = Scheduler(connection=WorkerQueues.connection)
    log.info('scheduler created')

    cleanup_interval = int(os.getenv('RENKU_SVC_CLEANUP_INTERVAL', 60))
    log.info('cleanup interval set to {}'.format(cleanup_interval))

    build_scheduler.schedule(
        scheduled_time=datetime.utcnow(),
        queue_name=CLEANUP_QUEUE_FILES,
        func=cache_files_cleanup,
        interval=cleanup_interval,
        result_ttl=cleanup_interval + 1,
    )

    build_scheduler.schedule(
        scheduled_time=datetime.utcnow(),
        queue_name=CLEANUP_QUEUE_PROJECTS,
        func=cache_project_cleanup,
        interval=cleanup_interval,
        result_ttl=cleanup_interval + 1,
    )

    log_level = os.getenv('RQ_WORKER_LOG_LEVEL', 'INFO')
    setup_loghandlers(log_level)
    log.info('log level set to {}'.format(log_level))

    yield build_scheduler
Пример #21
0
def get_scheduled_tasks(request):

    # Use RQ's default Redis connection
    # use_connection()
    # Get a scheduler for the "default" queue
    scheduler = Scheduler(connection=tasks.redis_conn)
    list_of_job_instances = scheduler.get_jobs()

    jobdata = list()
    for job in list_of_job_instances:
        if "interval" in job.meta:
            interval = job.meta["interval"]
        else:
            interval = 0

        job_dict = {
            'job_id': job._id,
            'task': job.description,
            'period': interval,
            'args': job.args,
            'queue': "default"}

        jobdata.append(job_dict)

    data = json.dumps(jobdata)
    return HttpResponse(data, content_type='application/json')
Пример #22
0
def create_app(configfile=None):
    app = Flask(__name__)

    app.config.from_object("app.config.Config")

    sentry_sdk.init(app.config["SENTRY_URL"], integrations=[FlaskIntegration()],
                    traces_sample_rate=app.config["SENTRY_RATE"])

    app.redis = redis.from_url(app.config['REDISTOGO_URL'])
    app.task_queue = rq.Queue(
        app.config['REDIS_QUEUES'], connection=app.redis, default_timeout=3600)
    # app.task_queue.failed_job_registry.requeue()
    app.scheduler = Scheduler(connection=app.redis, queue=app.task_queue)

    Bootstrap(app)

    from app.frontend import public_bp
    app.register_blueprint(public_bp)

    # app.config.from_object(rqmonitor.defaults)
    # app.config['RQ_MONITOR_REDIS_URL'] = app.config['REDISTOGO_URL']
    # app.register_blueprint(rqmonitor.monitor_blueprint, url_prefix="/rq")

    # app.config.from_object(rq_dashboard.default_settings)
    # app.config['RQ_DASHBOARD_REDIS_URL'] = app.config['REDISTOGO_URL']
    # app.register_blueprint(rq_dashboard.blueprint, url_prefix="/rqd")

    # app.config.from_object(rq_scheduler_dashboard.default_settings)
    # app.register_blueprint(
    #     rq_scheduler_dashboard.blueprint, url_prefix="/rqsd")

    return app
Пример #23
0
def create_app():
    global app, db, migrate, login, bootstrap
    import instance.config as cfg
    app.config['DEBUG'] = cfg.DEBUG
    app.config['SECRET_KEY'] = 'secretkey'

    # database set up
    app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///Info.db'
    app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False

    # Initialize Redis and RQ
    app.config['REDIS_URL'] = 'redis://'
    app.redis = Redis.from_url(app.config['REDIS_URL'])
    # The queue where periodic tasks are submitted
    queue_name = 'ann_tasks'
    app.scheduler = Scheduler(queue_name, connection=app.redis)

    db.init_app(app)
    login.init_app(app)
    migrate.init_app(app, db)
    bootstrap.init_app(app)

    from app import models, views

    return app
Пример #24
0
def set_schedule(conn, task, cron_string):

    s = Scheduler(connection=conn)
    kw = json.loads(task.kwargs)

    kw.update({
        "site": frappe.local.site,
        "user": frappe.session.user
    })

    job = s.cron(
        id=task.job_id,
        description=task.title,
        # A cron string (e.g. "0 0 * * 0")
        cron_string=cron_string,
        # Function to be queued
        func=task.method,
        # Arguments passed into function when executed
        # args=[task.args],
        # Keyword arguments passed into function when executed
        kwargs=kw,
        # Repeat this number of times (None means repeat forever)
        repeat=None,
        # In which queue the job should be put in
        queue_name=task.queue
    )
    print " ** scheduled in '" + task.queue + "' queue, with id: " + job.get_id() + " at " + cron_string

    return job.get_id()
Пример #25
0
 def get_scheduler(name='default', interval=60):
     """
     Returns an RQ Scheduler instance using parameters defined in
     ``RQ_QUEUES``
     """
     return Scheduler(name, interval=interval,
                      connection=get_connection(name))
Пример #26
0
def add_scheduled_task(request):
    task = request.GET.get('task')
    cron_string = request.GET.get('cron_string')
    cron_string = urllib.unquote(cron_string)

    print cron_string

    queue = request.GET.get('queue')
    parameters = request.GET.get('parameters')
    scheduler = Scheduler(queue_name=queue, connection=tasks.redis_conn)

    if parameters:
        args = (parameters,)
    else:
        args = None

    print tasks

    scheduler.cron(
        cron_string=cron_string,   # Time for first execution
        func=getattr(tasks, task),       # Function to be queued
        args=args,
        repeat=None,                      # Repeat this number of times (None means repeat forever),
        timeout=14400,
        queue_name=queue,
    )

    return HttpResponse('Success')
Пример #27
0
    def queue_job(self):
        """Enqueue a job based on self.q_mode and return queued job.

This will enqueue directly if q_mode == 'q' and use a scheduler if
the q_mode == 's'>
        """
        args = [return_true]
        kwargs = {
            'job_ttl': 10 * self.probe_time,
            'job_result_ttl': 20 * self.probe_time
        }
        if self.q_mode == 'q':  # Need to remove job_ for direct queue
            my_queue = Queue(self.qname, connection=Redis())
            if 'job_ttl' in kwargs:
                kwargs['ttl'] = kwargs.pop('job_ttl')
            if 'job_result_ttl' in kwargs:
                kwargs['result_ttl'] = kwargs.pop('job_result_ttl')
            launcher = my_queue.enqueue
        elif self.q_mode == 's':
            sched = Scheduler(queue_name=self.qname, connection=Redis())
            launcher = sched.enqueue_in
            # be careful as enqueue_in does not accept normal args/kwargs
            # like ttl and result_ttol
            args.insert(0, datetime.timedelta(seconds=1))
        else:
            raise ValueError('Invalid q_mode: "%s"' % self.q_mode)
        job = launcher(*args, **kwargs)
        return job
Пример #28
0
def cancel_all(queue_name):
    scheduler = Scheduler()
    for job in scheduler.get_jobs():
        if job.origin == queue_name:
            scheduler.cancel(job)

    return dict(status='OK')
Пример #29
0
    def __init__(self):
        self._redis_conn = Redis(**current_app.config['REDIS_SETTINGS'])
        self.queue_name = self.q_names[self.stage][self.model_name]

        self.queue = Queue(self.queue_name, connection=self._redis_conn)
        self._rq_scheduler_instance = Scheduler(
            queue=self._queue,
            connection=self._redis_conn)
Пример #30
0
 def generate(self):
     scheduler = Scheduler(connection=Redis())
     try:
         scheduler.schedule(datetime.utcnow(),
                            func=check_today_lessons,
                            interval=60 * 60 * 24)
     except ConnectionError:
         print('Redis server is not available')