Example #1
0
 def test_job_data(self):
     schedule_job('fòo', schedule_in=-1, other_arg='lol')
     self.assertEqual(list(pending_jobs()), [{
         'id': 'fòo',
         'other_arg': 'lol'
     }])
     self.assertEqual(len(list(pending_jobs())), 0)
Example #2
0
    def test_legacy_redis(self):
        connection = redis.Redis(**REDIS)
        for i in range(10):
            schedule_job('foo{0}'.format(i), schedule_in=-1,
                         connection=connection)

        jobs = list(pending_jobs(connection=connection, reschedule_in=-1))
        self.assertEqual(len(jobs), 10)

        jobs = list(pending_jobs(connection=r, reschedule_in=-1))
        self.assertEqual(len(jobs), 10)
Example #3
0
    def test_remove_keys(self):
        schedule_job('foobar', schedule_in=-1, attr='stuff', other=12,
                     thing='blah blah')
        jobs = list(pending_jobs())
        self.assertEqual(jobs, [{'id': 'foobar', 'attr': 'stuff',
                                 'other': 12, 'thing': 'blah blah'}])

        schedule_job('foobar', schedule_in=-1, attr=None, other=None,
                     thing='blah blah')
        jobs = list(pending_jobs())
        self.assertEqual(jobs, [{'id': 'foobar', 'thing': 'blah blah'}])
Example #4
0
    def test_legacy_redis(self):
        connection = redis.Redis(**REDIS)
        for i in range(10):
            schedule_job('foo{0}'.format(i),
                         schedule_in=-1,
                         connection=connection)

        jobs = list(pending_jobs(connection=connection, reschedule_in=-1))
        self.assertEqual(len(jobs), 10)

        jobs = list(pending_jobs(connection=r, reschedule_in=-1))
        self.assertEqual(len(jobs), 10)
Example #5
0
    def handle_sentry(self, *args, **kwargs):
        if args:
            pk = args[0]
            feed = UniqueFeed.objects.get(pk=pk)
            return update_feed(
                feed.url, etag=feed.etag, modified=feed.modified,
                subscribers=feed.subscribers,
                request_timeout=feed.request_timeout,
                backoff_factor=feed.backoff_factor, error=feed.error,
                link=feed.link, title=feed.title, hub=feed.hub,
            )

        ratio = UniqueFeed.UPDATE_PERIOD // 5
        limit = max(
            1, UniqueFeed.objects.filter(muted=False).count() // ratio) * 2

        # Avoid queueing if the default or store queue is already full
        conn = redis.Redis(**settings.REDIS)
        for name in ['default', 'store']:
            queue = Queue(name=name, connection=conn)
            if queue.count > limit:
                logger.info(
                    "{0} queue longer than limit, skipping update "
                    "({1} > {2})".format(name, queue.count, limit))

        jobs = pending_jobs(limit=limit,
                            reschedule_in=UniqueFeed.UPDATE_PERIOD * 60)
        for job in jobs:
            url = job.pop('id')
            job.pop('last_update', None)
            enqueue(update_feed, args=[url], kwargs=job,
                    timeout=UniqueFeed.TIMEOUT_BASE * job.get(
                        'backoff_factor', 1))
Example #6
0
    def test_schedule_limit_items_count(self):
        for i in range(100):
            schedule_job('foo{0}'.format(i), schedule_in=-1)

        jobs = list(pending_jobs(limit=10))
        self.assertEqual(len(jobs), 10)
        self.assertEqual(len(list(scheduled_jobs())), 90)
Example #7
0
    def test_schedule_limit_items_count(self):
        for i in range(100):
            schedule_job('foo{0}'.format(i), schedule_in=-1)

        jobs = list(pending_jobs(limit=10))
        self.assertEqual(len(jobs), 10)
        self.assertEqual(len(list(scheduled_jobs())), 90)
Example #8
0
    def handle_sentry(self, *args, **kwargs):
        if args:
            pk = args[0]
            feed = UniqueFeed.objects.get(pk=pk)
            data = feed.job_details
            return update_feed(
                feed.url,
                etag=data.get("etag"),
                modified=data.get("modified"),
                subscribers=data["subscribers"],
                backoff_factor=data["backoff_factor"],
                error=data.get("error"),
                link=data.get("link"),
                title=data.get("title"),
                hub=data.get("hub"),
            )

        ratio = UniqueFeed.UPDATE_PERIOD // 5
        limit = max(1, UniqueFeed.objects.filter(muted=False).count() // ratio) * 2

        # Avoid queueing if the default or store queue is already full
        conn = get_redis_connection()
        for name in ["default", "store"]:
            queue = Queue(name=name, connection=conn)
            if queue.count > limit:
                logger.info(
                    "{0} queue longer than limit, skipping update " "({1} > {2})".format(name, queue.count, limit)
                )
                return

        jobs = pending_jobs(limit=limit, reschedule_in=UniqueFeed.UPDATE_PERIOD * 60, connection=get_redis_connection())
        for job in jobs:
            url = job.pop("id")
            job.pop("last_update", None)
            enqueue(update_feed, args=[url], kwargs=job, timeout=UniqueFeed.TIMEOUT_BASE * job.get("backoff_factor", 1))
Example #9
0
    def test_reschedule(self):
        schedule_job('baz', schedule_in=-1)
        schedule_job('foo', schedule_in=10)
        jobs = list(pending_jobs(reschedule_in=20))
        self.assertEqual(jobs, [{'id': 'baz'}])

        schedule = list(scheduled_jobs(with_times=True))
        foo = schedule[0]
        baz = schedule[1]
        self.assertEqual(foo[0], ('foo'))
        self.assertEqual(baz[0], ('baz'))
        self.assertEqual(foo[1] + 10, baz[1])
Example #10
0
    def test_reschedule(self):
        schedule_job('baz', schedule_in=-1)
        schedule_job('foo', schedule_in=10)
        jobs = list(pending_jobs(reschedule_in=20))
        self.assertEqual(jobs, [{'id': 'baz'}])

        schedule = list(scheduled_jobs(with_times=True))
        foo = schedule[0]
        baz = schedule[1]
        self.assertEqual(foo[0], ('foo'))
        self.assertEqual(baz[0], ('baz'))
        self.assertEqual(foo[1] + 10, baz[1])
Example #11
0
    def test_remove_keys(self):
        schedule_job('foobar',
                     schedule_in=-1,
                     attr='stuff',
                     other=12,
                     thing='blah blah')
        jobs = list(pending_jobs())
        self.assertEqual(jobs, [{
            'id': 'foobar',
            'attr': 'stuff',
            'other': 12,
            'thing': 'blah blah'
        }])

        schedule_job('foobar',
                     schedule_in=-1,
                     attr=None,
                     other=None,
                     thing='blah blah')
        jobs = list(pending_jobs())
        self.assertEqual(jobs, [{'id': 'foobar', 'thing': 'blah blah'}])
Example #12
0
    def handle_sentry(self, *args, **kwargs):
        if args:
            pk = args[0]
            feed = UniqueFeed.objects.get(pk=pk)
            data = feed.job_details
            return update_feed(
                feed.url,
                etag=data.get('etag'),
                modified=data.get('modified'),
                subscribers=data.get('subscribers', 1),
                backoff_factor=data['backoff_factor'],
                error=data.get('error'),
                link=data.get('link'),
                title=data.get('title'),
                hub=data.get('hub'),
            )

        ratio = UniqueFeed.UPDATE_PERIOD // 5
        limit = max(
            1,
            UniqueFeed.objects.filter(muted=False).count() // ratio) * 2

        # Avoid queueing if the default or store queue is already full
        conn = get_redis_connection()
        for name in ['default', 'store']:
            queue = Queue(name=name, connection=conn)
            if queue.count > limit:
                logger.info("queue longer than limit, skipping update",
                            queue=name,
                            count=queue.count,
                            limit=limit)
                return

        jobs = pending_jobs(limit=limit,
                            reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                            connection=get_redis_connection())
        for job in jobs:
            url = job.pop('id')
            job.pop('last_update', None)
            enqueue(update_feed,
                    args=[url],
                    kwargs=job,
                    timeout=UniqueFeed.TIMEOUT_BASE *
                    job.get('backoff_factor', 1))
Example #13
0
    def handle_sentry(self, *args, **kwargs):
        if args:
            pk = args[0]
            feed = UniqueFeed.objects.get(pk=pk)
            data = feed.job_details
            return update_feed(
                feed.url, etag=data.get('etag'), modified=data.get('modified'),
                subscribers=data.get('subscribers', 1),
                backoff_factor=data['backoff_factor'], error=data.get('error'),
                link=data.get('link'), title=data.get('title'),
                hub=data.get('hub'),
            )

        ratio = UniqueFeed.UPDATE_PERIOD // 5
        limit = max(
            1, UniqueFeed.objects.filter(muted=False).count() // ratio) * 2

        # Avoid queueing if the default or store queue is already full
        conn = get_redis_connection()
        for name in ['default', 'store']:
            queue = Queue(name=name, connection=conn)
            if queue.count > limit:
                logger.info(
                    "queue longer than limit, skipping update",
                    queue=name, count=queue.count, limit=limit)
                return

        jobs = pending_jobs(limit=limit,
                            reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                            connection=get_redis_connection())
        for job in jobs:
            url = job.pop('id')
            job.pop('last_update', None)
            enqueue(update_feed, args=[url], kwargs=job,
                    timeout=UniqueFeed.TIMEOUT_BASE * job.get(
                        'backoff_factor', 1))
    def test_update_feeds(self):
        u = UniqueFeed.objects.create(
            url='http://example.com/feed0',
        )
        u.schedule()
        patch_job(
            u.url,
            last_update=(timezone.now() - timedelta(hours=1)).strftime('%s')
        )
        u.schedule()
        UniqueFeed.objects.create(
            url='http://example.com/feed1',
        ).schedule()
        with self.assertNumQueries(0):
            jobs = list(pending_jobs(
                limit=5, reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                connection=get_redis_connection()))
            self.assertEqual(len(jobs), 1)
            self.assertEqual(jobs[0]['id'], u.url)

        u.delete()
        delete_job(u.url, connection=get_redis_connection())
        with self.assertNumQueries(0):
            urls = list(pending_jobs(
                limit=5, reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                connection=get_redis_connection()))
            self.assertEqual(len(urls), 0)

        u = UniqueFeed.objects.create(
            url='http://example.com/backoff',
        )
        u.schedule()
        patch_job(
            u.url, backoff_factor=10,
            last_update=(timezone.now() - timedelta(hours=28)).strftime('%s')
        )
        u.schedule()
        with self.assertNumQueries(0):
            jobs = list(pending_jobs(
                limit=5, reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                connection=get_redis_connection()))
            self.assertEqual(len(jobs), 0)
        patch_job(u.url, backoff_factor=9)
        u.schedule()
        with self.assertNumQueries(0):
            jobs = list(pending_jobs(
                limit=5, reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                connection=get_redis_connection()))
            self.assertEqual(len(jobs), 1)
            self.assertEqual(jobs[0]['id'], u.url)
            self.assertEqual(
                UniqueFeed.TIMEOUT_BASE * jobs[0]['backoff_factor'], 180)

        patch_job(u.url, last_update=int(time.time()))
        u.schedule()
        with self.assertNumQueries(0):
            jobs = list(pending_jobs(
                limit=5, reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                connection=get_redis_connection()))
            self.assertEqual(len(jobs), 0)

        UniqueFeed.objects.create(
            url='http://example.com/lol',
        )

        for u in UniqueFeed.objects.all():
            patch_job(u.url, last_update=(
                timezone.now() - timedelta(hours=54)).strftime('%s'))

        # No subscribers -> deletion
        with self.assertNumQueries(2):
            call_command('delete_unsubscribed')
        self.assertEqual(UniqueFeed.objects.count(), 0)

        u = UniqueFeed.objects.create(
            url='http://example.com/foo',
        )
        u.schedule()
        patch_job(
            u.url,
            last_update=(timezone.now() - timedelta(hours=2)).strftime('%s'))
        u.schedule()
        u = UniqueFeed.objects.create(
            url='http://example.com/bar',
        )
        u.schedule()
        patch_job(
            u.url,
            last_update=(timezone.now() - timedelta(hours=2)).strftime('%s'))
        u.schedule()
        jobs = list(pending_jobs(
            limit=5, reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
            connection=get_redis_connection()))
        self.assertEqual(len(jobs), 2)
        self.assertEqual(jobs[0]['id'], 'http://example.com/bar')
        self.assertEqual(jobs[1]['id'], 'http://example.com/foo')
Example #15
0
 def test_schedule_non_unicode_data(self):
     schedule_job('bad',
                  schedule_in=-1,
                  etag=b'2013/6/29 \xa4W\xa4\xc8 09:51:31')
     job = list(pending_jobs())[0]
     self.assertEqual(job['etag'], b'2013/6/29 \xa4W\xa4\xc8 09:51:31')
Example #16
0
 def test_schedule_non_unicode_data(self):
     schedule_job('bad', schedule_in=-1,
                  etag=b'2013/6/29 \xa4W\xa4\xc8 09:51:31')
     job = list(pending_jobs())[0]
     self.assertEqual(job['etag'], b'2013/6/29 \xa4W\xa4\xc8 09:51:31')
Example #17
0
 def test_ordering(self):
     schedule_job('foo', schedule_in=-1)
     schedule_job('bar', schedule_in=-2)
     jobs = list(pending_jobs())
     self.assertEqual(jobs[0]['id'], 'bar')
     self.assertEqual(jobs[1]['id'], 'foo')
Example #18
0
    def test_custom_connection(self):
        for i in range(10):
            schedule_job('foo{0}'.format(i), schedule_in=-1, connection=r)

        jobs = list(pending_jobs(connection=r))
        self.assertEqual(len(jobs), 10)
Example #19
0
 def test_schedule_in_future(self):
     schedule_job('lol', schedule_in=10)
     self.assertEqual(len(list(pending_jobs())), 0)
     delete_job('lol')
Example #20
0
 def test_reschedule_existing(self):
     schedule_job('lol', schedule_in=-1)
     schedule_job('lol', schedule_in=10)
     self.assertEqual(len(list(pending_jobs())), 0)
     schedule_job('lol', schedule_in=-1)
     self.assertEqual(len(list(pending_jobs())), 1)
Example #21
0
 def test_job_data(self):
     schedule_job('fòo', schedule_in=-1, other_arg='lol')
     self.assertEqual(list(pending_jobs()), [{'id': 'fòo',
                                              'other_arg': 'lol'}])
     self.assertEqual(len(list(pending_jobs())), 0)
Example #22
0
 def test_job_deletion(self):
     schedule_job('bar', schedule_in=-1)
     delete_job('bar')
     self.assertEqual(len(list(pending_jobs())), 0)
Example #23
0
    def test_custom_connection(self):
        for i in range(10):
            schedule_job('foo{0}'.format(i), schedule_in=-1, connection=r)

        jobs = list(pending_jobs(connection=r))
        self.assertEqual(len(jobs), 10)
Example #24
0
 def test_job_deletion(self):
     schedule_job('bar', schedule_in=-1)
     delete_job('bar')
     self.assertEqual(len(list(pending_jobs())), 0)
Example #25
0
 def test_schedule_in_future(self):
     schedule_job('lol', schedule_in=10)
     self.assertEqual(len(list(pending_jobs())), 0)
     delete_job('lol')
Example #26
0
    def test_update_feeds(self):
        u = UniqueFeed.objects.create(
            url='http://example.com/feed0',
            last_update=timezone.now() - timedelta(hours=1),
        )
        u.schedule()
        UniqueFeed.objects.create(
            url='http://example.com/feed1',
        ).schedule()
        with self.assertNumQueries(0):
            jobs = list(pending_jobs(
                limit=5, reschedule_in=UniqueFeed.UPDATE_PERIOD * 60))
            self.assertEqual(len(jobs), 1)
            self.assertEqual(jobs[0]['id'], u.url)

        u.delete()
        delete_job(u.url)
        with self.assertNumQueries(0):
            urls = list(pending_jobs(
                limit=5, reschedule_in=UniqueFeed.UPDATE_PERIOD * 60))
            self.assertEqual(len(urls), 0)

        u = UniqueFeed.objects.create(
            url='http://example.com/backoff',
            last_update=timezone.now() - timedelta(hours=28),
            backoff_factor=10,
        )
        with self.assertNumQueries(0):
            jobs = list(pending_jobs(
                limit=5, reschedule_in=UniqueFeed.UPDATE_PERIOD * 60))
            self.assertEqual(len(jobs), 0)
        u.backoff_factor = 9
        u.save()
        u.schedule()
        with self.assertNumQueries(0):
            jobs = list(pending_jobs(
                limit=5, reschedule_in=UniqueFeed.UPDATE_PERIOD * 60))
            self.assertEqual(len(jobs), 1)
            self.assertEqual(jobs[0]['id'], u.url)
            self.assertEqual(
                UniqueFeed.TIMEOUT_BASE * jobs[0]['backoff_factor'], 180)

        UniqueFeed.objects.update(last_update=timezone.now())
        with self.assertNumQueries(0):
            jobs = list(pending_jobs(
                limit=5, reschedule_in=UniqueFeed.UPDATE_PERIOD * 60))
            self.assertEqual(len(jobs), 0)

        UniqueFeed.objects.create(
            url='http://example.com/lol',
        )

        UniqueFeed.objects.update(
            last_update=timezone.now() - timedelta(hours=54))

        # No subscribers -> deletion
        with self.assertNumQueries(2):
            call_command('delete_unsubscribed')
        self.assertEqual(UniqueFeed.objects.count(), 0)

        UniqueFeed.objects.create(
            url='http://example.com/foo',
            last_update=timezone.now() - timedelta(hours=2),
        ).schedule()
        UniqueFeed.objects.create(
            url='http://example.com/bar',
            last_update=timezone.now() - timedelta(hours=2),
            last_loop=timezone.now() - timedelta(hours=2),
        ).schedule()
        jobs = list(pending_jobs(
            limit=5, reschedule_in=UniqueFeed.UPDATE_PERIOD * 60))
        self.assertEqual(len(jobs), 2)
        self.assertEqual(jobs[0]['id'], 'http://example.com/bar')
        self.assertEqual(jobs[1]['id'], 'http://example.com/foo')
Example #27
0
    def test_update_feeds(self):
        u = UniqueFeed.objects.create(url='http://example.com/feed0', )
        u.schedule()
        patch_job(u.url,
                  last_update=(timezone.now() -
                               timedelta(hours=1)).strftime('%s'))
        u.schedule()
        UniqueFeed.objects.create(url='http://example.com/feed1', ).schedule()
        with self.assertNumQueries(0):
            jobs = list(
                pending_jobs(limit=5,
                             reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                             connection=get_redis_connection()))
            self.assertEqual(len(jobs), 1)
            self.assertEqual(jobs[0]['id'], u.url)

        u.delete()
        delete_job(u.url, connection=get_redis_connection())
        with self.assertNumQueries(0):
            urls = list(
                pending_jobs(limit=5,
                             reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                             connection=get_redis_connection()))
            self.assertEqual(len(urls), 0)

        u = UniqueFeed.objects.create(url='http://example.com/backoff', )
        u.schedule()
        patch_job(u.url,
                  backoff_factor=10,
                  last_update=(timezone.now() -
                               timedelta(hours=28)).strftime('%s'))
        u.schedule()
        with self.assertNumQueries(0):
            jobs = list(
                pending_jobs(limit=5,
                             reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                             connection=get_redis_connection()))
            self.assertEqual(len(jobs), 0)
        patch_job(u.url, backoff_factor=9)
        u.schedule()
        with self.assertNumQueries(0):
            jobs = list(
                pending_jobs(limit=5,
                             reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                             connection=get_redis_connection()))
            self.assertEqual(len(jobs), 1)
            self.assertEqual(jobs[0]['id'], u.url)
            self.assertEqual(
                UniqueFeed.TIMEOUT_BASE * jobs[0]['backoff_factor'], 180)

        patch_job(u.url, last_update=int(time.time()))
        u.schedule()
        with self.assertNumQueries(0):
            jobs = list(
                pending_jobs(limit=5,
                             reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                             connection=get_redis_connection()))
            self.assertEqual(len(jobs), 0)

        UniqueFeed.objects.create(url='http://example.com/lol', )

        for u in UniqueFeed.objects.all():
            patch_job(u.url,
                      last_update=(timezone.now() -
                                   timedelta(hours=54)).strftime('%s'))

        # No subscribers -> deletion
        with self.assertNumQueries(2):
            call_command('delete_unsubscribed')
        self.assertEqual(UniqueFeed.objects.count(), 0)

        u = UniqueFeed.objects.create(url='http://example.com/foo', )
        u.schedule()
        patch_job(u.url,
                  last_update=(timezone.now() -
                               timedelta(hours=2)).strftime('%s'))
        u.schedule()
        u = UniqueFeed.objects.create(url='http://example.com/bar', )
        u.schedule()
        patch_job(u.url,
                  last_update=(timezone.now() -
                               timedelta(hours=2)).strftime('%s'))
        u.schedule()
        jobs = list(
            pending_jobs(limit=5,
                         reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                         connection=get_redis_connection()))
        self.assertEqual(len(jobs), 2)
        self.assertEqual(jobs[0]['id'], 'http://example.com/bar')
        self.assertEqual(jobs[1]['id'], 'http://example.com/foo')
Example #28
0
 def test_reschedule_existing(self):
     schedule_job('lol', schedule_in=-1)
     schedule_job('lol', schedule_in=10)
     self.assertEqual(len(list(pending_jobs())), 0)
     schedule_job('lol', schedule_in=-1)
     self.assertEqual(len(list(pending_jobs())), 1)
Example #29
0
 def test_ordering(self):
     schedule_job('foo', schedule_in=-1)
     schedule_job('bar', schedule_in=-2)
     jobs = list(pending_jobs())
     self.assertEqual(jobs[0]['id'], 'bar')
     self.assertEqual(jobs[1]['id'], 'foo')