Example #1
0
def main():
    parser = argparse.ArgumentParser(description='Runs RQ scheduler')
    parser.add_argument('-H', '--host', default=os.environ.get('RQ_REDIS_HOST', 'localhost'), help="Redis host")
    parser.add_argument('-p', '--port', default=int(os.environ.get('RQ_REDIS_PORT', 6379)), type=int, help="Redis port number")
    parser.add_argument('-d', '--db', default=int(os.environ.get('RQ_REDIS_DB', 0)), type=int, help="Redis database")
    parser.add_argument('-P', '--password', default=os.environ.get('RQ_REDIS_PASSWORD'), help="Redis password")
    parser.add_argument('--url', '-u', default=os.environ.get('RQ_REDIS_URL')
        , help='URL describing Redis connection details. \
            Overrides other connection arguments if supplied.')
    parser.add_argument('-i', '--interval', default=60, type=int
        , help="How often the scheduler checks for new jobs to add to the \
            queue (in seconds).")
    parser.add_argument('--path', default='.', help='Specify the import path.')
    parser.add_argument('--pid', help='A filename to use for the PID file.', metavar='FILE')
    args = parser.parse_args()
    if args.path:
        sys.path = args.path.split(':') + sys.path
    if args.pid:
        pid = str(os.getpid())
        filename = args.pid
        with open(filename, 'w') as f:
            f.write(pid)
    if args.url is not None:
        connection = Redis.from_url(args.url)
    else:
        connection = Redis(args.host, args.port, args.db, args.password)
    scheduler = Scheduler(connection=connection, interval=args.interval)
    scheduler.run()
Example #2
0
def main():
    parser = argparse.ArgumentParser(description='Runs RQ scheduler')
    parser.add_argument('-H', '--host', default='localhost', help="Redis host")
    parser.add_argument('-p',
                        '--port',
                        default=6379,
                        type=int,
                        help="Redis port number")
    parser.add_argument('-d',
                        '--db',
                        default=0,
                        type=int,
                        help="Redis database")
    parser.add_argument('-P',
                        '--password',
                        default=None,
                        help="Redis password")
    parser.add_argument(
        '-i',
        '--interval',
        default=60,
        type=int,
        help="How often the scheduler checks for new jobs to add to the \
            queue (in seconds).")
    args = parser.parse_args()
    connection = Redis(args.host, args.port, args.db, args.password)
    scheduler = Scheduler(connection=connection, interval=args.interval)
    scheduler.run()
Example #3
0
def main():
    parser = argparse.ArgumentParser(description="Runs RQ scheduler")
    parser.add_argument("-H", "--host", default="localhost", help="Redis host")
    parser.add_argument("-p", "--port", default=6379, type=int, help="Redis port number")
    parser.add_argument("-d", "--db", default=0, type=int, help="Redis database")
    parser.add_argument("-P", "--password", default=None, help="Redis password")
    args = parser.parse_args()
    connection = Redis(args.host, args.port, args.db, args.password)
    scheduler = Scheduler(connection=connection)
    scheduler.run()
Example #4
0
def main():
    parser = argparse.ArgumentParser(description="Runs RQ scheduler")
    parser.add_argument(
        "-b", "--burst", action="store_true", default=False, help="Run in burst mode (quit after all work is done)"
    )
    parser.add_argument("-H", "--host", default=os.environ.get("RQ_REDIS_HOST", "localhost"), help="Redis host")
    parser.add_argument(
        "-p", "--port", default=int(os.environ.get("RQ_REDIS_PORT", 6379)), type=int, help="Redis port number"
    )
    parser.add_argument("-d", "--db", default=int(os.environ.get("RQ_REDIS_DB", 0)), type=int, help="Redis database")
    parser.add_argument("-P", "--password", default=os.environ.get("RQ_REDIS_PASSWORD"), help="Redis password")
    parser.add_argument("--verbose", "-v", action="store_true", default=False, help="Show more output")
    parser.add_argument(
        "--url",
        "-u",
        default=os.environ.get("RQ_REDIS_URL"),
        help="URL describing Redis connection details. \
            Overrides other connection arguments if supplied.",
    )
    parser.add_argument(
        "-i",
        "--interval",
        default=60.0,
        type=float,
        help="How often the scheduler checks for new jobs to add to the \
            queue (in seconds, can be floating-point for more precision).",
    )
    parser.add_argument("--path", default=".", help="Specify the import path.")
    parser.add_argument("--pid", help="A filename to use for the PID file.", metavar="FILE")

    args = parser.parse_args()

    if args.path:
        sys.path = args.path.split(":") + sys.path

    if args.pid:
        pid = str(os.getpid())
        filename = args.pid
        with open(filename, "w") as f:
            f.write(pid)

    if args.url is not None:
        connection = Redis.from_url(args.url)
    else:
        connection = Redis(args.host, args.port, args.db, args.password)

    if args.verbose:
        level = "DEBUG"
    else:
        level = "INFO"
    setup_loghandlers(level)

    scheduler = Scheduler(connection=connection, interval=args.interval)
    scheduler.run(burst=args.burst)
def main():
    parser = argparse.ArgumentParser(description='Runs RQ scheduler')
    parser.add_argument('-H', '--host', default='localhost', help="Redis host")
    parser.add_argument('-p', '--port', default=6379, type=int, help="Redis port number")
    parser.add_argument('-d', '--db', default=0, type=int, help="Redis database")
    parser.add_argument('-P', '--password', default=None, help="Redis password")
    parser.add_argument('-i', '--interval', default=60, type=int
        , help="How often the scheduler checks for new jobs to add to the \
            queue (in seconds).")
    args = parser.parse_args()
    connection = Redis(args.host, args.port, args.db, args.password)
    scheduler = Scheduler(connection=connection, interval=args.interval)
    scheduler.run()
Example #6
0
def main():
    parser = argparse.ArgumentParser(description='Runs RQ scheduler')
    parser.add_argument('-b', '--burst', action='store_true', default=False, help='Run in burst mode (quit after all work is done)')
    parser.add_argument('-H', '--host', default=os.environ.get('RQ_REDIS_HOST', 'localhost'), help="Redis host")
    parser.add_argument('-p', '--port', default=int(os.environ.get('RQ_REDIS_PORT', 6379)), type=int, help="Redis port number")
    parser.add_argument('-d', '--db', default=int(os.environ.get('RQ_REDIS_DB', 0)), type=int, help="Redis database")
    parser.add_argument('-P', '--password', default=os.environ.get('RQ_REDIS_PASSWORD'), help="Redis password")
    parser.add_argument('--verbose', '-v', action='store_true', default=False, help='Show more output')
    parser.add_argument('--quiet', action='store_true', default=False, help='Show less output')
    parser.add_argument('--url', '-u', default=os.environ.get('RQ_REDIS_URL')
        , help='URL describing Redis connection details. \
            Overrides other connection arguments if supplied.')
    parser.add_argument('-i', '--interval', default=60.0, type=float
        , help="How often the scheduler checks for new jobs to add to the \
            queue (in seconds, can be floating-point for more precision).")
    parser.add_argument('--path', default='.', help='Specify the import path.')
    parser.add_argument('--pid', help='A filename to use for the PID file.', metavar='FILE')
    parser.add_argument('-j', '--job-class', help='Custom RQ Job class')
    parser.add_argument('-q', '--queue-class', help='Custom RQ Queue class')

    args = parser.parse_args()

    if args.path:
        sys.path = args.path.split(':') + sys.path

    if args.pid:
        pid = str(os.getpid())
        filename = args.pid
        with open(filename, 'w') as f:
            f.write(pid)

    if args.url is not None:
        connection = Redis.from_url(args.url)
    else:
        connection = Redis(args.host, args.port, args.db, args.password)

    if args.verbose:
        level = 'DEBUG'
    elif args.quiet:
        level = 'WARNING'
    else:
        level = 'INFO'
    setup_loghandlers(level)

    scheduler = Scheduler(connection=connection,
                          interval=args.interval,
                          job_class=args.job_class,
                          queue_class=args.queue_class)
    scheduler.run(burst=args.burst)
Example #7
0
    def test_enqueue_job(self):
        """
        When scheduled job is enqueued, make sure:
        - Job is removed from the sorted set of scheduled jobs
        - "enqueued_at" attribute is properly set
        - Job appears in the right queue
        """
        now = datetime.utcnow()
        queue_name = 'foo'
        scheduler = Scheduler(connection=self.testconn, queue_name=queue_name)

        job = scheduler.enqueue_at(now, say_hello)
        self.scheduler.enqueue_job(job)
        self.assertNotIn(job, tl(self.testconn.zrange(scheduler.scheduled_jobs_key, 0, 10)))
        job = Job.fetch(job.id, connection=self.testconn)
        self.assertTrue(job.enqueued_at is not None)
        queue = scheduler.get_queue_for_job(job)
        self.assertIn(job, queue.jobs)
        queue = Queue.from_queue_key('rq:queue:{0}'.format(queue_name))
        self.assertIn(job, queue.jobs)
Example #8
0
def run_scheduler():
    conn_kwargs = {
        'db': app_settings.config.get('REDIS_DB') or 0,
        'password': app_settings.config.get('REDIS_PWD')
    }
    if all(
            app_settings.config.get(attr)
            for attr in ['REDIS_MASTER_DNS', 'REDIS_PORT']):
        master = StrictRedis(host=app_settings.config['REDIS_MASTER_DNS'],
                             port=app_settings.config['REDIS_PORT'],
                             **conn_kwargs)
    else:
        sentinel = Sentinel(app_settings.config['REDIS_SENTINEL'])
        master = sentinel.master_for(app_settings.config['REDIS_MASTER'],
                                     **conn_kwargs)
    scheduler = Scheduler(connection=master)
    while True:
        try:
            scheduler.run()
        except ValueError:
            sleep(600)
Example #9
0
def main():
    parser = argparse.ArgumentParser(description='Runs RQ scheduler')
    parser.add_argument('-H', '--host', default='localhost', help="Redis host")
    parser.add_argument('-p',
                        '--port',
                        default=6379,
                        type=int,
                        help="Redis port number")
    parser.add_argument('-d',
                        '--db',
                        default=0,
                        type=int,
                        help="Redis database")
    parser.add_argument('-P',
                        '--password',
                        default=None,
                        help="Redis password")
    args = parser.parse_args()
    connection = Redis(args.host, args.port, args.db, args.password)
    scheduler = Scheduler(connection=connection)
    scheduler.run()
Example #10
0
def scheduler():
    """Run rq-scheduler"""
    redis_client = get_rq_redis_client()
    scheduler = Scheduler(connection=redis_client)

    # Create the RediSearch index and begin indexing immediately.
    # If a previous index exists, delete it.
    tasks.index(config.sites, rebuild_index=True)

    # Schedule an indexing job to run every 30 minutes.
    #
    # This performs an update-in-place using the existing RediSearch index.
    #
    # TODO: We currently don't try to detect if we have outdated content in
    # the index -- i.e. when we reindexed a site, a URL was leftover in the
    # index that we didn't find on this round of indexing.
    #
    # NOTE: We need to define this here, at the time we run this command,
    # because there is no deduplication in the cron() method, and this app has
    # no "exactly once" startup/initialization step that we could use to call
    # code only once.
    scheduler.cron(
        "*/60 * * * *",
        func=tasks.index,
        args=[config.sites, False],
        use_local_timezone=True,
        timeout=tasks.INDEXING_TIMEOUT
    )

    scheduler.run()
Example #11
0
def main():
    parser = argparse.ArgumentParser(description='Runs RQ scheduler')
    parser.add_argument('-H', '--host', default='localhost', help="Redis host")
    parser.add_argument('-p', '--port', default=6379, type=int, help="Redis port number")
    parser.add_argument('-d', '--db', default=0, type=int, help="Redis database")
    parser.add_argument('-P', '--password', default=None, help="Redis password")
    parser.add_argument('--url', '-u', default=None
        , help='URL describing Redis connection details. \
            Overrides other connection arguments if supplied.')
    parser.add_argument('-i', '--interval', default=60, type=int
        , help="How often the scheduler checks for new jobs to add to the \
            queue (in seconds).")
    parser.add_argument('--path', default='.', help='Specify the import path.')
    args = parser.parse_args()
    if args.path:
        sys.path = args.path.split(':') + sys.path
    if args.url is not None:
        connection = Redis.from_url(args.url)
    else:
        connection = Redis(args.host, args.port, args.db, args.password)
    scheduler = Scheduler(connection=connection, interval=args.interval)
    scheduler.run()
Example #12
0
def schedule(scheduler: Scheduler, redis_client: Redis,
             config: Optional[AppConfiguration] = None):
    queue = Queue(connection=redis_client)
    keys = Keys(prefix=config.key_prefix)

    for site in config.sites.values():
        job = queue.enqueue(tasks.index,
                            args=[site, config],
                            kwargs={
                                "force": True
                            },
                            job_timeout=tasks.INDEXING_TIMEOUT)

        # Track in-progress indexing tasks in a Redis set, so that we can
        # check if indexing is in-progress. Tasks should remove their
        # IDs from the set, so that when the set is empty, we think
        # indexing is done.
        redis_client.sadd(keys.startup_indexing_job_ids(), job.id)

        # Schedule an indexing job to run every 60 minutes.
        #
        # This performs an update-in-place using the existing RediSearch index.
        #
        # NOTE: We need to define this here, at the time we run this command,
        # because there is no deduplication in the cron() method, and this app has
        # no "exactly once" startup/initialization step that we could use to call
        # code only once.
        scheduler.cron(
            "*/60 * * * *",
            func=tasks.index,
            args=[site],
            kwargs={
                "force": False
            },
            use_local_timezone=True,
            timeout=tasks.INDEXING_TIMEOUT
        )

    redis_client.expire(keys.startup_indexing_job_ids(), tasks.INDEXING_TIMEOUT)
Example #13
0
def run_scheduler():
    """Initializes a rq scheduler."""
    conn = Redis(
        host=app.config['RQ_DEFAULT_HOST'],
        port=app.config['RQ_DEFAULT_PORT'],
        db=0,
        password=app.config['RQ_DEFAULT_PASSWORD']
    )

    setup_loghandlers('INFO')
    scheduler = Scheduler(connection=conn, interval=60.0)
    for _ in range(10):
        try:
            scheduler.run()
        except ValueError as exc:
            if exc.message == 'There\'s already an active RQ scheduler':
                scheduler.log.info(
                    'An RQ scheduler instance is already running. Retrying in '
                    '%d seconds.', 10,
                )
                time.sleep(10)
            else:
                raise exc
Example #14
0
def run_scheduler():
    """Initializes a rq scheduler."""
    conn = Redis(
        host=app.config['RQ_DEFAULT_HOST'],
        port=app.config['RQ_DEFAULT_PORT'],
        db=0,
        password=app.config['RQ_DEFAULT_PASSWORD']
    )

    setup_loghandlers('INFO')
    scheduler = Scheduler(connection=conn, interval=60.0)
    for _ in xrange(10):
        try:
            scheduler.run()
        except ValueError as exc:
            if exc.message == 'There\'s already an active RQ scheduler':
                scheduler.log.info(
                    'An RQ scheduler instance is already running. Retrying in '
                    '%d seconds.', 10,
                )
                time.sleep(10)
            else:
                raise exc
Example #15
0
class Scheduler(object):
    JN = 1

    def __init__(self):
        from rq_scheduler.scheduler import Scheduler
        self.scheduler = Scheduler(connection=redis, interval=60)

    def check_jobs(self):
        now = datetime.utcnow()
        self.my_jobs = self.scheduler.get_jobs(with_times=True)
        ## check correct n of jobs
        if len(self.my_jobs) < self.JN:
            return False
        ## check expired jobs
        for j, t in self.my_jobs:
            if t <= now:
                return False
        return True

    def delete_jobs(self):
        for el in self.my_jobs:
            self.scheduler.cancel(el[0])
        self.my_jobs = []

    def create_jobs(self):
        # version and docs grab
        date = datetime.utcnow() + timedelta(seconds=15)
        job = self.scheduler.schedule(
            scheduled_time=date,
            func=update_base,
            interval=3600,
            repeat=None
        )
        self.my_jobs.append((job.id, date))
        # extensions grab
        date = datetime.utcnow() + timedelta(seconds=45)
        job = self.scheduler.schedule(
            scheduled_time=date,
            func=update_extensions,
            interval=7200,
            repeat=None
        )
        self.my_jobs.append((job.id, date))

    def run(self):
        self.check_jobs()
        self.delete_jobs()
        self.create_jobs()
        self.scheduler.run()
Example #16
0
 def test_birth_and_death_registration(self):
     """
     When scheduler registers it's birth, besides creating a key, it should
     also set an expiry that's a few seconds longer than it's polling
     interval so it automatically expires if scheduler is unexpectedly 
     terminated.
     """
     key = Scheduler.scheduler_key
     self.assertNotIn(key, tl(self.testconn.keys('*')))
     scheduler = Scheduler(connection=self.testconn, interval=20)
     scheduler.register_birth()
     self.assertIn(key, tl(self.testconn.keys('*')))
     self.assertEqual(self.testconn.ttl(key), 30)
     self.assertFalse(self.testconn.hexists(key, 'death'))
     self.assertRaises(ValueError, scheduler.register_birth)
     scheduler.register_death()
     self.assertTrue(self.testconn.hexists(key, 'death'))
def main():
	conn      = redis.from_url(os.getenv('REDISCLOUD_URL', 'redis://localhost:6379'))
	scheduler = Scheduler(connection=conn, interval=30)
	scheduler.run()
Example #18
0
class TestScheduler(RQTestCase):

    def setUp(self):
        super(TestScheduler, self).setUp()
        self.scheduler = Scheduler(connection=self.testconn)
    
    def test_birth_and_death_registration(self):
        """
        When scheduler registers it's birth, besides creating a key, it should
        also set an expiry that's a few seconds longer than it's polling
        interval so it automatically expires if scheduler is unexpectedly 
        terminated.
        """
        key = Scheduler.scheduler_key
        self.assertNotIn(key, tl(self.testconn.keys('*')))
        scheduler = Scheduler(connection=self.testconn, interval=20)
        scheduler.register_birth()
        self.assertIn(key, tl(self.testconn.keys('*')))
        self.assertEqual(self.testconn.ttl(key), 30)
        self.assertFalse(self.testconn.hexists(key, 'death'))
        self.assertRaises(ValueError, scheduler.register_birth)
        scheduler.register_death()
        self.assertTrue(self.testconn.hexists(key, 'death'))

    def test_create_job(self):
        """
        Ensure that jobs are created properly.
        """
        job = self.scheduler._create_job(say_hello, args=(), kwargs={})
        job_from_queue = Job.fetch(job.id, connection=self.testconn)
        self.assertEqual(job, job_from_queue)
        self.assertEqual(job_from_queue.func, say_hello)

    def test_job_not_persisted_if_commit_false(self):
        """
        Ensure jobs are only saved to Redis if commit=True.
        """
        job = self.scheduler._create_job(say_hello, commit=False)
        self.assertEqual(self.testconn.hgetall(job.key), {})

    def test_create_scheduled_job(self):
        """
        Ensure that scheduled jobs are put in the scheduler queue with the right score
        """
        scheduled_time = datetime.utcnow()
        job = self.scheduler.enqueue_at(scheduled_time, say_hello)
        self.assertEqual(job, Job.fetch(job.id, connection=self.testconn))
        self.assertIn(job.id,
            tl(self.testconn.zrange(self.scheduler.scheduled_jobs_key, 0, 1)))
        self.assertEqual(self.testconn.zscore(self.scheduler.scheduled_jobs_key, job.id),
                         to_unix(scheduled_time))

    def test_enqueue_in(self):
        """
        Ensure that jobs have the right scheduled time.
        """
        right_now = datetime.utcnow()
        time_delta = timedelta(minutes=1)
        job = self.scheduler.enqueue_in(time_delta, say_hello)
        self.assertIn(job.id,
                      tl(self.testconn.zrange(self.scheduler.scheduled_jobs_key, 0, 1)))
        self.assertEqual(self.testconn.zscore(self.scheduler.scheduled_jobs_key, job.id),
                         to_unix(right_now + time_delta))
        time_delta = timedelta(hours=1)
        job = self.scheduler.enqueue_in(time_delta, say_hello)
        self.assertEqual(self.testconn.zscore(self.scheduler.scheduled_jobs_key, job.id),
                         to_unix(right_now + time_delta))

    def test_get_jobs(self):
        """
        Ensure get_jobs() returns all jobs until the specified time.
        """
        now = datetime.utcnow()
        job = self.scheduler.enqueue_at(now, say_hello)
        self.assertIn(job, self.scheduler.get_jobs(now))
        future_time = now + timedelta(hours=1)
        job = self.scheduler.enqueue_at(future_time, say_hello)
        self.assertIn(job, self.scheduler.get_jobs(timedelta(hours=1, seconds=1)))
        self.assertIn(job, [j[0] for j in self.scheduler.get_jobs(with_times=True)])
        self.assertIsInstance(self.scheduler.get_jobs(with_times=True)[0][1], datetime)
        self.assertNotIn(job, self.scheduler.get_jobs(timedelta(minutes=59, seconds=59)))
    
    def test_get_jobs_to_queue(self):
        """
        Ensure that jobs scheduled the future are not queued.
        """
        now = datetime.utcnow()
        job = self.scheduler.enqueue_at(now, say_hello)
        self.assertIn(job, self.scheduler.get_jobs_to_queue())
        future_time = now + timedelta(hours=1)
        job = self.scheduler.enqueue_at(future_time, say_hello)
        self.assertNotIn(job, self.scheduler.get_jobs_to_queue())

    def test_enqueue_job(self):
        """
        When scheduled job is enqueued, make sure:
        - Job is removed from the sorted set of scheduled jobs
        - "enqueued_at" attribute is properly set
        - Job appears in the right queue
        """
        now = datetime.utcnow()
        queue_name = 'foo'
        scheduler = Scheduler(connection=self.testconn, queue_name=queue_name)

        job = scheduler.enqueue_at(now, say_hello)
        self.scheduler.enqueue_job(job)
        self.assertNotIn(job, tl(self.testconn.zrange(scheduler.scheduled_jobs_key, 0, 10)))
        job = Job.fetch(job.id, connection=self.testconn)
        self.assertTrue(job.enqueued_at is not None)
        queue = scheduler.get_queue_for_job(job)
        self.assertIn(job, queue.jobs)
        queue = Queue.from_queue_key('rq:queue:{0}'.format(queue_name))
        self.assertIn(job, queue.jobs)

    def test_job_membership(self):
        now = datetime.utcnow()
        job = self.scheduler.enqueue_at(now, say_hello)
        self.assertIn(job, self.scheduler)
        self.assertIn(job.id, self.scheduler)
        self.assertNotIn("non-existing-job-id", self.scheduler)

    def test_cancel_scheduled_job(self):
        """
        When scheduled job is canceled, make sure:
        - Job is removed from the sorted set of scheduled jobs
        """
        # schedule a job to be enqueued one minute from now
        time_delta = timedelta(minutes=1)
        job = self.scheduler.enqueue_in(time_delta, say_hello)
        # cancel the scheduled job and check that it's gone from the set
        self.scheduler.cancel(job)
        self.assertNotIn(job.id, tl(self.testconn.zrange(
            self.scheduler.scheduled_jobs_key, 0, 1)))

    def test_change_execution_time(self):
        """
        Ensure ``change_execution_time`` is called, ensure that job's score is updated
        """
        job = self.scheduler.enqueue_at(datetime.utcnow(), say_hello)
        new_date = datetime(2010, 1, 1)
        self.scheduler.change_execution_time(job, new_date)
        self.assertEqual(to_unix(new_date),
            self.testconn.zscore(self.scheduler.scheduled_jobs_key, job.id))
        self.scheduler.cancel(job)
        self.assertRaises(ValueError, self.scheduler.change_execution_time, job, new_date)

    def test_args_kwargs_are_passed_correctly(self):
        """
        Ensure that arguments and keyword arguments are properly saved to jobs.
        """
        job = self.scheduler.enqueue_at(datetime.utcnow(), simple_addition, 1, 1, 1)
        self.assertEqual(job.args, (1, 1, 1))
        job = self.scheduler.enqueue_at(datetime.utcnow(), simple_addition, z=1, y=1, x=1)
        self.assertEqual(job.kwargs, {'x': 1, 'y': 1, 'z': 1})
        job = self.scheduler.enqueue_at(datetime.utcnow(), simple_addition, 1, z=1, y=1)
        self.assertEqual(job.kwargs, {'y': 1, 'z': 1})
        self.assertEqual(job.args, (1,))

        time_delta = timedelta(minutes=1)
        job = self.scheduler.enqueue_in(time_delta, simple_addition, 1, 1, 1)
        self.assertEqual(job.args, (1, 1, 1))
        job = self.scheduler.enqueue_in(time_delta, simple_addition, z=1, y=1, x=1)
        self.assertEqual(job.kwargs, {'x': 1, 'y': 1, 'z': 1})
        job = self.scheduler.enqueue_in(time_delta, simple_addition, 1, z=1, y=1)
        self.assertEqual(job.kwargs, {'y': 1, 'z': 1})
        self.assertEqual(job.args, (1,))

    def test_enqueue_is_deprecated(self):
        """
        Ensure .enqueue() throws a DeprecationWarning
        """
        with warnings.catch_warnings(record=True) as w:
            # Enable all warnings
            warnings.simplefilter("always")
            job = self.scheduler.enqueue(datetime.utcnow(), say_hello)
            self.assertEqual(1, len(w))
            self.assertEqual(w[0].category, DeprecationWarning)

    def test_enqueue_periodic(self):
        """
        Ensure .enqueue_periodic() throws a DeprecationWarning
        """
        with warnings.catch_warnings(record=True) as w:
            # Enable all warnings
            warnings.simplefilter("always")
            job = self.scheduler.enqueue_periodic(datetime.utcnow(), 1, None, say_hello)
            self.assertEqual(1, len(w))
            self.assertEqual(w[0].category, DeprecationWarning)

    def test_interval_and_repeat_persisted_correctly(self):
        """
        Ensure that interval and repeat attributes get correctly saved in Redis.
        """
        job = self.scheduler.schedule(datetime.utcnow(), say_hello, interval=10, repeat=11)
        job_from_queue = Job.fetch(job.id, connection=self.testconn)
        self.assertEqual(job_from_queue.meta['interval'], 10)
        self.assertEqual(job_from_queue.meta['repeat'], 11)

    def test_repeat_without_interval_raises_error(self):
        # Ensure that an error is raised if repeat is specified without interval
        def create_job():
            self.scheduler.schedule(datetime.utcnow(), say_hello, repeat=11)
        self.assertRaises(ValueError, create_job)

    def test_job_with_intervals_get_rescheduled(self):
        """
        Ensure jobs with interval attribute are put back in the scheduler
        """
        time_now = datetime.utcnow()
        interval = 10
        job = self.scheduler.schedule(time_now, say_hello, interval=interval)
        self.scheduler.enqueue_job(job)
        self.assertIn(job.id,
            tl(self.testconn.zrange(self.scheduler.scheduled_jobs_key, 0, 1)))
        self.assertEqual(self.testconn.zscore(self.scheduler.scheduled_jobs_key, job.id),
                         to_unix(time_now) + interval)

        # Now the same thing using enqueue_periodic
        job = self.scheduler.enqueue_periodic(time_now, interval, None, say_hello)
        self.scheduler.enqueue_job(job)
        self.assertIn(job.id,
            tl(self.testconn.zrange(self.scheduler.scheduled_jobs_key, 0, 1)))
        self.assertEqual(self.testconn.zscore(self.scheduler.scheduled_jobs_key, job.id),
                         to_unix(time_now) + interval)

    def test_job_with_repeat(self):
        """
        Ensure jobs with repeat attribute are put back in the scheduler
        X (repeat) number of times
        """
        time_now = datetime.utcnow()
        interval = 10
        # If job is repeated once, the job shouldn't be put back in the queue
        job = self.scheduler.schedule(time_now, say_hello, interval=interval, repeat=1)
        self.scheduler.enqueue_job(job)
        self.assertNotIn(job.id,
            tl(self.testconn.zrange(self.scheduler.scheduled_jobs_key, 0, 1)))

        # If job is repeated twice, it should only be put back in the queue once
        job = self.scheduler.schedule(time_now, say_hello, interval=interval, repeat=2)
        self.scheduler.enqueue_job(job)
        self.assertIn(job.id,
            tl(self.testconn.zrange(self.scheduler.scheduled_jobs_key, 0, 1)))
        self.scheduler.enqueue_job(job)
        self.assertNotIn(job.id,
            tl(self.testconn.zrange(self.scheduler.scheduled_jobs_key, 0, 1)))

        time_now = datetime.utcnow()
        # Now the same thing using enqueue_periodic
        job = self.scheduler.enqueue_periodic(time_now, interval, 1, say_hello)
        self.scheduler.enqueue_job(job)
        self.assertNotIn(job.id,
            tl(self.testconn.zrange(self.scheduler.scheduled_jobs_key, 0, 1)))

        # If job is repeated twice, it should only be put back in the queue once
        job = self.scheduler.enqueue_periodic(time_now, interval, 2, say_hello)
        self.scheduler.enqueue_job(job)
        self.assertIn(job.id,
            tl(self.testconn.zrange(self.scheduler.scheduled_jobs_key, 0, 1)))
        self.scheduler.enqueue_job(job)
        self.assertNotIn(job.id,
            tl(self.testconn.zrange(self.scheduler.scheduled_jobs_key, 0, 1)))

    def test_missing_jobs_removed_from_scheduler(self):
        """
        Ensure jobs that don't exist when queued are removed from the scheduler.
        """
        job = self.scheduler.schedule(datetime.utcnow(), say_hello)
        job.cancel()
        self.scheduler.get_jobs_to_queue()
        self.assertNotIn(job.id, tl(self.testconn.zrange(
            self.scheduler.scheduled_jobs_key, 0, 1)))

    def test_periodic_jobs_sets_ttl(self):
        """
        Ensure periodic jobs set result_ttl to infinite.
        """
        job = self.scheduler.schedule(datetime.utcnow(), say_hello, interval=5)
        job_from_queue = Job.fetch(job.id, connection=self.testconn)
        self.assertEqual(job.result_ttl, -1)

    def test_run(self):
        """
        Check correct signal handling in Scheduler.run().
        """
        def send_stop_signal():
            """
            Sleep for 1 second, then send a INT signal to ourself, so the
            signal handler installed by scheduler.run() is called.
            """
            time.sleep(1)
            os.kill(os.getpid(), signal.SIGINT)
        thread = Thread(target=send_stop_signal)
        thread.start()
        self.assertRaises(SystemExit, self.scheduler.run)
        thread.join()

    def test_scheduler_w_o_explicit_connection(self):
        """
        Ensure instantiating Scheduler w/o explicit connection works.
        """
        s = Scheduler()
        self.assertEqual(s.connection, self.testconn)

    def test_no_functions_from__main__module(self):
        """
        Ensure functions from the __main__ module are not accepted for scheduling.
        """
        def dummy():
            return 1
        # Fake __main__ module function
        dummy.__module__ = "__main__"
        self.assertRaises(ValueError, self.scheduler._create_job, dummy)
Example #19
0
 def setUp(self):
     super(TestScheduler, self).setUp()
     self.scheduler = Scheduler(connection=self.testconn)
Example #20
0
def main():
    scheduler = Scheduler(connection=scheduler_conn, interval=10)
    scheduler.run()
Example #21
0
import app.logging
from app.rich_menu import RichMenu
from app.scheduling import ReminderJob, ReminderWorker
from rasa.lineagent import LineAgent
from rasa.store import scheduler_store, tracker_store

logger = logging.getLogger(__name__)

logger.debug("Starting worker")

line_access_token = os.getenv('LINE_CHANNEL_ACCESS_TOKEN', None)
rich_menu = RichMenu(line_access_token)
rich_menu.setup()

agent = LineAgent.load("models/dialogue",
                       interpreter=RasaNLUInterpreter("models/current/nlu"),
                       tracker_store=tracker_store)

workerKwargs = {"rich_menu": rich_menu, "agent": agent}

listen = ['high', 'default', 'low']
scheduler = Scheduler(connection=scheduler_store,
                      interval=60,
                      job_class=ReminderJob)
Process(target=scheduler.run).start()
with Connection(scheduler_store):
    worker = ReminderWorker(map(Queue, listen), job_class=ReminderJob)
    logger.info("Worker is ready.")
    worker.work(workerKwargs=workerKwargs)
Example #22
0
def main():

    # set up minimal argparser to get -c option
    parser = argparse.ArgumentParser(
        add_help=False  # help will be picked up later when we redfine parser
    )
    parser.add_argument('-c', "--config", help='Use an rq config file')
    args, remaining_argv = parser.parse_known_args()

    # config, pass 1: read environment vars
    config = {
        KEY_HOST : os.environ.get('RQ_REDIS_HOST', 'localhost'),
        KEY_PORT : int(os.environ.get('RQ_REDIS_PORT', 6379)),
        KEY_DB : int(os.environ.get('RQ_REDIS_DB', 0)),
        KEY_PASSWORD : os.environ.get('RQ_REDIS_PASSWORD'),
        KEY_URL : os.environ.get('RQ_REDIS_URL')
    }

    # config, pass 2: read config file
    if args.config:
        # bit of a hack, this, but does allow helpers.read_config_file to work...
        sys.path.insert( 0, os.path.dirname(os.path.realpath(args.config)) )
        rq_config = helpers.read_config_file( args.config )
        # map rq settings to our own config dict
        config[KEY_URL] = rq_config.get("REDIS_URL", config[KEY_URL])
        config[KEY_HOST] = rq_config.get("REDIS_HOST", config[KEY_HOST])
        config[KEY_PORT] = rq_config.get("REDIS_PORT", config[KEY_PORT])
        config[KEY_DB] = rq_config.get("REDIS_DB", config[KEY_DB])
        config[KEY_PASSWORD] = rq_config.get("REDIS_PASSWORD",config[KEY_PASSWORD])

    # config, pass 3: read commandline args. overwrites any other config.
    parser = argparse.ArgumentParser(
        parents=[parser]  # inherit from existing parser
    )
    parser.add_argument('-H', '--host', default=config[KEY_HOST], help="Redis host")
    parser.add_argument('-p', '--port', default=config[KEY_PORT], type=int, help="Redis port number")
    parser.add_argument('-d', '--db', default=config[KEY_DB], type=int, help="Redis database")
    parser.add_argument('-P', '--password', default=config[KEY_PASSWORD], help="Redis password")
    parser.add_argument('--verbose', '-v', action='store_true', default=False, help='Show more output')
    parser.add_argument('--url', '-u', default=config[KEY_URL]
        , help='URL describing Redis connection details. \
            Overrides other connection arguments if supplied.')
    parser.add_argument('-i', '--interval', default=60.0, type=float
        , help="How often the scheduler checks for new jobs to add to the \
            queue (in seconds, can be floating-point for more precision).")
    parser.add_argument('--path', default='.', help='Specify the import path.')
    parser.add_argument('--pid', help='A filename to use for the PID file.', metavar='FILE')
    
    args = parser.parse_args( remaining_argv )
    
    if args.path:
        sys.path = args.path.split(':') + sys.path
    
    if args.pid:
        pid = str(os.getpid())
        filename = args.pid
        with open(filename, 'w') as f:
            f.write(pid)
    
    if args.url is not None:
        connection = Redis.from_url(args.url)
    else:
        connection = Redis(args.host, args.port, args.db, args.password)

    if args.verbose:
        level = 'DEBUG'
    else:
        level = 'INFO'
    setup_loghandlers(level)

    scheduler = Scheduler(connection=connection, interval=args.interval)
    scheduler.run()
Example #23
0
def main():
    parser = argparse.ArgumentParser(description='Runs RQ scheduler')
    parser.add_argument('-H',
                        '--host',
                        default=os.environ.get('RQ_REDIS_HOST', 'localhost'),
                        help="Redis host")
    parser.add_argument('-p',
                        '--port',
                        default=int(os.environ.get('RQ_REDIS_PORT', 6379)),
                        type=int,
                        help="Redis port number")
    parser.add_argument('-d',
                        '--db',
                        default=int(os.environ.get('RQ_REDIS_DB', 0)),
                        type=int,
                        help="Redis database")
    parser.add_argument('-P',
                        '--password',
                        default=os.environ.get('RQ_REDIS_PASSWORD'),
                        help="Redis password")
    parser.add_argument('--verbose',
                        '-v',
                        action='store_true',
                        default=False,
                        help='Show more output')
    parser.add_argument('--url',
                        '-u',
                        default=os.environ.get('RQ_REDIS_URL'),
                        help='URL describing Redis connection details. \
            Overrides other connection arguments if supplied.')
    parser.add_argument(
        '-i',
        '--interval',
        default=60.0,
        type=float,
        help="How often the scheduler checks for new jobs to add to the \
            queue (in seconds, can be floating-point for more precision).")
    parser.add_argument('--path', default='.', help='Specify the import path.')
    parser.add_argument('--pid',
                        help='A filename to use for the PID file.',
                        metavar='FILE')

    args = parser.parse_args()

    if args.path:
        sys.path = args.path.split(':') + sys.path

    if args.pid:
        pid = str(os.getpid())
        filename = args.pid
        with open(filename, 'w') as f:
            f.write(pid)

    if args.url is not None:
        connection = Redis.from_url(args.url)

    elif os.getenv('REDISTOGO_URL'):
        redis_url = os.getenv('REDISTOGO_URL')
        if not redis_url:
            raise RuntimeError('Set up Redis To Go first.')

        urlparse.uses_netloc.append('redis')
        url = urlparse.urlparse(redis_url)
        connection = Redis(host=url.hostname,
                           port=url.port,
                           db=0,
                           password=url.password)
    elif args.host is not None:
        connection = Redis(args.host, args.port, args.db, args.password)
    else:
        connection = Redis()

    if args.verbose:
        level = 'DEBUG'
    else:
        level = 'INFO'
    setup_loghandlers(level)

    scheduler = Scheduler(connection=connection, interval=args.interval)
    scheduler.run()
Example #24
0
def scheduler():
    """Run rq-scheduler"""
    redis_client = get_rq_redis_client()
    scheduler = Scheduler(connection=redis_client)
    schedule(scheduler, redis_client, config)
    scheduler.run()
Example #25
0
 def __init__(self):
     from rq_scheduler.scheduler import Scheduler
     self.scheduler = Scheduler(connection=redis, interval=60)
Example #26
0
def main():
    parser = argparse.ArgumentParser(description='Runs RQ scheduler')
    parser.add_argument('-H', '--host',
                        default=os.environ.get('RQ_REDIS_HOST', 'localhost'),
                        help="Redis host")
    parser.add_argument('-p', '--port',
                        default=int(os.environ.get('RQ_REDIS_PORT', 6379)),
                        type=int,
                        help="Redis port number")
    parser.add_argument('-d', '--db',
                        default=int(os.environ.get('RQ_REDIS_DB', 0)),
                        type=int, help="Redis database")
    parser.add_argument('-P', '--password',
                        default=os.environ.get('RQ_REDIS_PASSWORD'),
                        help="Redis password")
    parser.add_argument('--verbose', '-v',
                        action='store_true',
                        default=False,
                        help='Show more output')
    parser.add_argument('--url', '-u',
                        default=os.environ.get('RQ_REDIS_URL'),
                        help='URL describing Redis connection details. \
            Overrides other connection arguments if supplied.')
    parser.add_argument('-i', '--interval',
                        default=60.0,
                        type=float,
                        help="How often the scheduler checks for new jobs to add to the \
            queue (in seconds, can be floating-point for more precision).")
    parser.add_argument('--path',
                        default='.',
                        help='Specify the import path.')
    parser.add_argument('--pid',
                        help='A filename to use for the PID file.',
                        metavar='FILE')

    args = parser.parse_args()

    if args.path:
        sys.path = args.path.split(':') + sys.path

    if args.pid:
        pid = str(os.getpid())
        filename = args.pid
        with open(filename, 'w') as f:
            f.write(pid)

    if args.url is not None:
        connection = Redis.from_url(args.url)

    elif os.getenv('REDISTOGO_URL'):
        redis_url = os.getenv('REDISTOGO_URL')
        if not redis_url:
            raise RuntimeError('Set up Redis To Go first.')

        urlparse.uses_netloc.append('redis')
        url = urlparse.urlparse(redis_url)
        connection = Redis(host=url.hostname, port=url.port, db=0, password=url.password)
    elif args.host is not None:
        connection = Redis(args.host, args.port, args.db, args.password)
    else:
        connection = Redis()

    if args.verbose:
        level = 'DEBUG'
    else:
        level = 'INFO'
    setup_loghandlers(level)

    scheduler = Scheduler(connection=connection, interval=args.interval)
    scheduler.run()
import logging
import os

import redis
from rq.utils import ColorizingStreamHandler
from rq_scheduler.scheduler import Scheduler as RQScheduler

redis_url = os.getenv('REDISTO_URL', 'redis://localhost:6379')
conn = redis.from_url(redis_url)


def setup_loghandlers(level='INFO'):
    logger = logging.getLogger('scheduler')
    if not logger.handlers:
        logger.setLevel(level)
        formatter = logging.Formatter(fmt='%(asctime)s %(message)s',
                                      datefmt='%H:%M:%S')
        handler = ColorizingStreamHandler()
        handler.setFormatter(formatter)
        logger.addHandler(handler)

setup_loghandlers()
logger = logging.getLogger('scheduler')


if __name__ == '__main__':
    scheduler = RQScheduler(connection=conn)
    logger.info('Starting scheduler')
    scheduler.run()
Example #28
0
def main():
    parser = argparse.ArgumentParser(description='Runs RQ scheduler')
    parser.add_argument('-b',
                        '--burst',
                        action='store_true',
                        default=False,
                        help='Run in burst mode (quit after all work is done)')
    parser.add_argument('-H',
                        '--host',
                        default=os.environ.get('RQ_REDIS_HOST', 'localhost'),
                        help="Redis host")
    parser.add_argument('-p',
                        '--port',
                        default=int(os.environ.get('RQ_REDIS_PORT', 6379)),
                        type=int,
                        help="Redis port number")
    parser.add_argument('-d',
                        '--db',
                        default=int(os.environ.get('RQ_REDIS_DB', 0)),
                        type=int,
                        help="Redis database")
    parser.add_argument('-P',
                        '--password',
                        default=os.environ.get('RQ_REDIS_PASSWORD'),
                        help="Redis password")
    parser.add_argument('--verbose',
                        '-v',
                        action='store_true',
                        default=False,
                        help='Show more output')
    parser.add_argument('--quiet',
                        '-q',
                        action='store_true',
                        default=False,
                        help='Show less output')
    parser.add_argument('--url',
                        '-u',
                        default=os.environ.get('RQ_REDIS_URL'),
                        help='URL describing Redis connection details. \
            Overrides other connection arguments if supplied.')
    parser.add_argument(
        '-i',
        '--interval',
        default=60.0,
        type=float,
        help="How often the scheduler checks for new jobs to add to the \
            queue (in seconds, can be floating-point for more precision).")
    parser.add_argument('--path', default='.', help='Specify the import path.')
    parser.add_argument('--pid',
                        help='A filename to use for the PID file.',
                        metavar='FILE')
    parser.add_argument('-j', '--job-class', help='Custom RQ Job class')
    parser.add_argument('-q', '--queue-class', help='Custom RQ Queue class')

    args = parser.parse_args()

    if args.path:
        sys.path = args.path.split(':') + sys.path

    if args.pid:
        pid = str(os.getpid())
        filename = args.pid
        with open(filename, 'w') as f:
            f.write(pid)

    if args.url is not None:
        connection = Redis.from_url(args.url)
    else:
        connection = Redis(args.host, args.port, args.db, args.password)

    if args.verbose:
        level = 'DEBUG'
    elif args.quiet:
        level = 'WARNING'
    else:
        level = 'INFO'
    setup_loghandlers(level)

    scheduler = Scheduler(connection=connection,
                          interval=args.interval,
                          job_class=args.job_class,
                          queue_class=args.queue_class)
    scheduler.run(burst=args.burst)
Example #29
0
 def test_scheduler_w_o_explicit_connection(self):
     """
     Ensure instantiating Scheduler w/o explicit connection works.
     """
     s = Scheduler()
     self.assertEqual(s.connection, self.testconn)
# -*- coding: utf-8 -*-
"""
    IncetOps.rqscheduler_worker
    ~~~~~~~~~~~~~~

    The working process of the RQ-Scheduler queue.

    :copyright: (c) 2018 by staugur.
    :license: MIT, see LICENSE for more details.
"""

if __name__ == '__main__':
    import setproctitle
    from redis import from_url
    from config import GLOBAL, REDIS
    from rq_scheduler.scheduler import Scheduler
    setproctitle.setproctitle(GLOBAL['ProcessName'] + '.rqscheduler')
    scheduler = Scheduler(connection=from_url(REDIS), interval=1)
    scheduler.run()