Beispiel #1
0
 def handle_redirection(self, old_url, new_url):
     logger.info("feed moved", old_url=old_url, new_url=new_url)
     Feed.objects.filter(url=old_url).update(url=new_url)
     unique, created = self.get_or_create(url=new_url)
     if created:
         unique.schedule()
         if not settings.TESTS:
             enqueue_favicon(new_url)
     self.filter(url=old_url).delete()
     delete_job(old_url, connection=get_redis_connection())
Beispiel #2
0
 def handle_redirection(self, old_url, new_url):
     logger.debug(u"{0} moved to {1}".format(old_url, new_url))
     Feed.objects.filter(url=old_url).update(url=new_url)
     unique, created = self.get_or_create(url=new_url)
     if created:
         unique.schedule()
         if not settings.TESTS:
             enqueue_favicon(new_url)
     self.filter(url=old_url).delete()
     delete_job(old_url, connection=get_redis_connection())
Beispiel #3
0
    def handle_sentry(self, *args, **kwargs):
        existing_jobs = set(scheduled_jobs())
        target = set(UniqueFeed.objects.filter(muted=False).values_list(
            'url', flat=True))

        to_delete = existing_jobs - target
        if to_delete:
            logger.info(
                "Deleting {0} jobs from the scheduler".format(len(to_delete)))
            for job_id in to_delete:
                delete_job(job_id)

        to_add = target - existing_jobs
        if to_add:
            logger.info("Adding {0} jobs to the scheduler".format(len(to_add)))
            for chunk in chunked(to_add, 10000):
                uniques = UniqueFeed.objects.filter(url__in=chunk)
                for unique in uniques:
                    unique.schedule()
Beispiel #4
0
    def handle_sentry(self, *args, **kwargs):
        connection = get_redis_connection()
        existing_jobs = set(scheduled_jobs(connection=connection))
        target = set(UniqueFeed.objects.filter(muted=False).values_list(
            'url', flat=True))

        to_delete = existing_jobs - target
        if to_delete:
            logger.info("deleting jobs from the scheduler",
                        count=len(to_delete))
            for job_id in to_delete:
                delete_job(job_id, connection=connection)

        to_add = target - existing_jobs
        if to_add:
            logger.info("adding jobs to the scheduler", count=len(to_add))
            for chunk in chunked(to_add, 10000):
                uniques = UniqueFeed.objects.filter(url__in=chunk)
                for unique in uniques:
                    unique.schedule()
Beispiel #5
0
 def mute_feed(self, url, reason):
     delete_job(url, connection=get_redis_connection())
     self.filter(url=url).update(muted=True, error=reason)
Beispiel #6
0
 def mute_feed(self, url, reason):
     delete_job(url, connection=get_redis_connection())
     self.filter(url=url).update(muted=True, error=reason)
    def test_update_feeds(self):
        u = UniqueFeed.objects.create(
            url='http://example.com/feed0',
        )
        u.schedule()
        patch_job(
            u.url,
            last_update=(timezone.now() - timedelta(hours=1)).strftime('%s')
        )
        u.schedule()
        UniqueFeed.objects.create(
            url='http://example.com/feed1',
        ).schedule()
        with self.assertNumQueries(0):
            jobs = list(pending_jobs(
                limit=5, reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                connection=get_redis_connection()))
            self.assertEqual(len(jobs), 1)
            self.assertEqual(jobs[0]['id'], u.url)

        u.delete()
        delete_job(u.url, connection=get_redis_connection())
        with self.assertNumQueries(0):
            urls = list(pending_jobs(
                limit=5, reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                connection=get_redis_connection()))
            self.assertEqual(len(urls), 0)

        u = UniqueFeed.objects.create(
            url='http://example.com/backoff',
        )
        u.schedule()
        patch_job(
            u.url, backoff_factor=10,
            last_update=(timezone.now() - timedelta(hours=28)).strftime('%s')
        )
        u.schedule()
        with self.assertNumQueries(0):
            jobs = list(pending_jobs(
                limit=5, reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                connection=get_redis_connection()))
            self.assertEqual(len(jobs), 0)
        patch_job(u.url, backoff_factor=9)
        u.schedule()
        with self.assertNumQueries(0):
            jobs = list(pending_jobs(
                limit=5, reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                connection=get_redis_connection()))
            self.assertEqual(len(jobs), 1)
            self.assertEqual(jobs[0]['id'], u.url)
            self.assertEqual(
                UniqueFeed.TIMEOUT_BASE * jobs[0]['backoff_factor'], 180)

        patch_job(u.url, last_update=int(time.time()))
        u.schedule()
        with self.assertNumQueries(0):
            jobs = list(pending_jobs(
                limit=5, reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                connection=get_redis_connection()))
            self.assertEqual(len(jobs), 0)

        UniqueFeed.objects.create(
            url='http://example.com/lol',
        )

        for u in UniqueFeed.objects.all():
            patch_job(u.url, last_update=(
                timezone.now() - timedelta(hours=54)).strftime('%s'))

        # No subscribers -> deletion
        with self.assertNumQueries(2):
            call_command('delete_unsubscribed')
        self.assertEqual(UniqueFeed.objects.count(), 0)

        u = UniqueFeed.objects.create(
            url='http://example.com/foo',
        )
        u.schedule()
        patch_job(
            u.url,
            last_update=(timezone.now() - timedelta(hours=2)).strftime('%s'))
        u.schedule()
        u = UniqueFeed.objects.create(
            url='http://example.com/bar',
        )
        u.schedule()
        patch_job(
            u.url,
            last_update=(timezone.now() - timedelta(hours=2)).strftime('%s'))
        u.schedule()
        jobs = list(pending_jobs(
            limit=5, reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
            connection=get_redis_connection()))
        self.assertEqual(len(jobs), 2)
        self.assertEqual(jobs[0]['id'], 'http://example.com/bar')
        self.assertEqual(jobs[1]['id'], 'http://example.com/foo')
Beispiel #8
0
 def test_schedule_in_future(self):
     schedule_job('lol', schedule_in=10)
     self.assertEqual(len(list(pending_jobs())), 0)
     delete_job('lol')
Beispiel #9
0
 def test_job_deletion(self):
     schedule_job('bar', schedule_in=-1)
     delete_job('bar')
     self.assertEqual(len(list(pending_jobs())), 0)
Beispiel #10
0
    def test_update_feeds(self):
        u = UniqueFeed.objects.create(
            url='http://example.com/feed0',
            last_update=timezone.now() - timedelta(hours=1),
        )
        u.schedule()
        UniqueFeed.objects.create(
            url='http://example.com/feed1',
        ).schedule()
        with self.assertNumQueries(0):
            jobs = list(pending_jobs(
                limit=5, reschedule_in=UniqueFeed.UPDATE_PERIOD * 60))
            self.assertEqual(len(jobs), 1)
            self.assertEqual(jobs[0]['id'], u.url)

        u.delete()
        delete_job(u.url)
        with self.assertNumQueries(0):
            urls = list(pending_jobs(
                limit=5, reschedule_in=UniqueFeed.UPDATE_PERIOD * 60))
            self.assertEqual(len(urls), 0)

        u = UniqueFeed.objects.create(
            url='http://example.com/backoff',
            last_update=timezone.now() - timedelta(hours=28),
            backoff_factor=10,
        )
        with self.assertNumQueries(0):
            jobs = list(pending_jobs(
                limit=5, reschedule_in=UniqueFeed.UPDATE_PERIOD * 60))
            self.assertEqual(len(jobs), 0)
        u.backoff_factor = 9
        u.save()
        u.schedule()
        with self.assertNumQueries(0):
            jobs = list(pending_jobs(
                limit=5, reschedule_in=UniqueFeed.UPDATE_PERIOD * 60))
            self.assertEqual(len(jobs), 1)
            self.assertEqual(jobs[0]['id'], u.url)
            self.assertEqual(
                UniqueFeed.TIMEOUT_BASE * jobs[0]['backoff_factor'], 180)

        UniqueFeed.objects.update(last_update=timezone.now())
        with self.assertNumQueries(0):
            jobs = list(pending_jobs(
                limit=5, reschedule_in=UniqueFeed.UPDATE_PERIOD * 60))
            self.assertEqual(len(jobs), 0)

        UniqueFeed.objects.create(
            url='http://example.com/lol',
        )

        UniqueFeed.objects.update(
            last_update=timezone.now() - timedelta(hours=54))

        # No subscribers -> deletion
        with self.assertNumQueries(2):
            call_command('delete_unsubscribed')
        self.assertEqual(UniqueFeed.objects.count(), 0)

        UniqueFeed.objects.create(
            url='http://example.com/foo',
            last_update=timezone.now() - timedelta(hours=2),
        ).schedule()
        UniqueFeed.objects.create(
            url='http://example.com/bar',
            last_update=timezone.now() - timedelta(hours=2),
            last_loop=timezone.now() - timedelta(hours=2),
        ).schedule()
        jobs = list(pending_jobs(
            limit=5, reschedule_in=UniqueFeed.UPDATE_PERIOD * 60))
        self.assertEqual(len(jobs), 2)
        self.assertEqual(jobs[0]['id'], 'http://example.com/bar')
        self.assertEqual(jobs[1]['id'], 'http://example.com/foo')
Beispiel #11
0
    def test_update_feeds(self):
        u = UniqueFeed.objects.create(url='http://example.com/feed0', )
        u.schedule()
        patch_job(u.url,
                  last_update=(timezone.now() -
                               timedelta(hours=1)).strftime('%s'))
        u.schedule()
        UniqueFeed.objects.create(url='http://example.com/feed1', ).schedule()
        with self.assertNumQueries(0):
            jobs = list(
                pending_jobs(limit=5,
                             reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                             connection=get_redis_connection()))
            self.assertEqual(len(jobs), 1)
            self.assertEqual(jobs[0]['id'], u.url)

        u.delete()
        delete_job(u.url, connection=get_redis_connection())
        with self.assertNumQueries(0):
            urls = list(
                pending_jobs(limit=5,
                             reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                             connection=get_redis_connection()))
            self.assertEqual(len(urls), 0)

        u = UniqueFeed.objects.create(url='http://example.com/backoff', )
        u.schedule()
        patch_job(u.url,
                  backoff_factor=10,
                  last_update=(timezone.now() -
                               timedelta(hours=28)).strftime('%s'))
        u.schedule()
        with self.assertNumQueries(0):
            jobs = list(
                pending_jobs(limit=5,
                             reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                             connection=get_redis_connection()))
            self.assertEqual(len(jobs), 0)
        patch_job(u.url, backoff_factor=9)
        u.schedule()
        with self.assertNumQueries(0):
            jobs = list(
                pending_jobs(limit=5,
                             reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                             connection=get_redis_connection()))
            self.assertEqual(len(jobs), 1)
            self.assertEqual(jobs[0]['id'], u.url)
            self.assertEqual(
                UniqueFeed.TIMEOUT_BASE * jobs[0]['backoff_factor'], 180)

        patch_job(u.url, last_update=int(time.time()))
        u.schedule()
        with self.assertNumQueries(0):
            jobs = list(
                pending_jobs(limit=5,
                             reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                             connection=get_redis_connection()))
            self.assertEqual(len(jobs), 0)

        UniqueFeed.objects.create(url='http://example.com/lol', )

        for u in UniqueFeed.objects.all():
            patch_job(u.url,
                      last_update=(timezone.now() -
                                   timedelta(hours=54)).strftime('%s'))

        # No subscribers -> deletion
        with self.assertNumQueries(2):
            call_command('delete_unsubscribed')
        self.assertEqual(UniqueFeed.objects.count(), 0)

        u = UniqueFeed.objects.create(url='http://example.com/foo', )
        u.schedule()
        patch_job(u.url,
                  last_update=(timezone.now() -
                               timedelta(hours=2)).strftime('%s'))
        u.schedule()
        u = UniqueFeed.objects.create(url='http://example.com/bar', )
        u.schedule()
        patch_job(u.url,
                  last_update=(timezone.now() -
                               timedelta(hours=2)).strftime('%s'))
        u.schedule()
        jobs = list(
            pending_jobs(limit=5,
                         reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                         connection=get_redis_connection()))
        self.assertEqual(len(jobs), 2)
        self.assertEqual(jobs[0]['id'], 'http://example.com/bar')
        self.assertEqual(jobs[1]['id'], 'http://example.com/foo')
Beispiel #12
0
 def test_schedule_in_future(self):
     schedule_job('lol', schedule_in=10)
     self.assertEqual(len(list(pending_jobs())), 0)
     delete_job('lol')
Beispiel #13
0
 def test_job_deletion(self):
     schedule_job('bar', schedule_in=-1)
     delete_job('bar')
     self.assertEqual(len(list(pending_jobs())), 0)