Пример #1
0
 def test_scheduled_jobs(self):
     schedule_job('jòb', schedule_in=10)
     schedule_job('ötherjòb', schedule_in=20)
     schedule = scheduled_jobs(with_times=True)
     self.assertEqual([s[0] for s in schedule], ['jòb', 'ötherjòb'])
     schedule = list(scheduled_jobs())
     self.assertEqual(schedule, ['jòb', 'ötherjòb'])
Пример #2
0
 def test_scheduled_jobs(self):
     schedule_job('jòb', schedule_in=10)
     schedule_job('ötherjòb', schedule_in=20)
     schedule = scheduled_jobs(with_times=True)
     self.assertEqual([s[0] for s in schedule], ['jòb', 'ötherjòb'])
     schedule = list(scheduled_jobs())
     self.assertEqual(schedule, ['jòb', 'ötherjòb'])
Пример #3
0
    def test_schedule_limit_items_count(self):
        for i in range(100):
            schedule_job('foo{0}'.format(i), schedule_in=-1)

        jobs = list(pending_jobs(limit=10))
        self.assertEqual(len(jobs), 10)
        self.assertEqual(len(list(scheduled_jobs())), 90)
Пример #4
0
    def test_schedule_limit_items_count(self):
        for i in range(100):
            schedule_job('foo{0}'.format(i), schedule_in=-1)

        jobs = list(pending_jobs(limit=10))
        self.assertEqual(len(jobs), 10)
        self.assertEqual(len(list(scheduled_jobs())), 90)
Пример #5
0
 def handle_sentry(self, *args, **kwargs):
     existing_jobs = set(scheduled_jobs())
     for url in existing_jobs:
         details = job_details(url)
         attrs = {
             'title': '',
             'link': '',
             'etag': '',
             'modified': '',
             'error': '',
             'hub': '',
             'backoff_factor': 1,
             'subscribers': 1,
         }
         string_keys = ['title', 'link', 'etag', 'modified', 'error', 'hub']
         for key in attrs:
             if key in details:
                 value = details[key]
                 if key in string_keys and not isinstance(value,
                                                          basestring):
                     value = str(value)
                 attrs[key] = value
         attrs['title'] = attrs['title'][:2048]
         if 'last_update' in details:
             attrs['last_update'] = timezone.make_aware(
                 datetime.utcfromtimestamp(details['last_update']),
                 pytz.utc)
         UniqueFeed.objects.filter(url=url).update(**attrs)
Пример #6
0
    def test_reschedule(self):
        schedule_job('baz', schedule_in=-1)
        schedule_job('foo', schedule_in=10)
        jobs = list(pending_jobs(reschedule_in=20))
        self.assertEqual(jobs, [{'id': 'baz'}])

        schedule = list(scheduled_jobs(with_times=True))
        foo = schedule[0]
        baz = schedule[1]
        self.assertEqual(foo[0], ('foo'))
        self.assertEqual(baz[0], ('baz'))
        self.assertEqual(foo[1] + 10, baz[1])
Пример #7
0
    def test_reschedule(self):
        schedule_job('baz', schedule_in=-1)
        schedule_job('foo', schedule_in=10)
        jobs = list(pending_jobs(reschedule_in=20))
        self.assertEqual(jobs, [{'id': 'baz'}])

        schedule = list(scheduled_jobs(with_times=True))
        foo = schedule[0]
        baz = schedule[1]
        self.assertEqual(foo[0], ('foo'))
        self.assertEqual(baz[0], ('baz'))
        self.assertEqual(foo[1] + 10, baz[1])
Пример #8
0
    def graph_data(self, request):
        jobs = list(scheduled_jobs(with_times=True, connection=get_redis_connection()))

        timespan = jobs[-1][1] - jobs[0][1]
        interval = math.ceil(timespan / 500)
        start = jobs[0][1]
        counts = [0]
        for url, time in jobs:
            while len(counts) * interval < time - start:
                counts.append(0)
            counts[-1] += 1

        return HttpResponse(json.dumps({"max": max(counts), "counts": counts, "timespan": timespan}))
Пример #9
0
    def graph_data(self, request):
        jobs = list(scheduled_jobs(with_times=True))

        timespan = jobs[-1][1] - jobs[0][1]
        interval = math.ceil(timespan / 500)
        start = jobs[0][1]
        counts = [0]
        for url, time in jobs:
            while len(counts) * interval < time - start:
                counts.append(0)
            counts[-1] += 1

        return HttpResponse(json.dumps({'max': max(counts),
                                        'counts': counts,
                                        'timespan': timespan}))
Пример #10
0
    def graph_data(self, request):
        jobs = list(scheduled_jobs(with_times=True,
                                   connection=get_redis_connection()))

        timespan = jobs[-1][1] - jobs[0][1]
        interval = math.ceil(timespan / 500)
        start = jobs[0][1]
        counts = [0]
        for _url, time in jobs:
            while len(counts) * interval < time - start:
                counts.append(0)
            counts[-1] += 1

        return HttpResponse(json.dumps({'max': max(counts),
                                        'counts': counts,
                                        'timespan': timespan}))
Пример #11
0
    def handle_sentry(self, *args, **kwargs):
        existing_jobs = set(scheduled_jobs())
        target = set(UniqueFeed.objects.filter(muted=False).values_list(
            'url', flat=True))

        to_delete = existing_jobs - target
        if to_delete:
            logger.info(
                "Deleting {0} jobs from the scheduler".format(len(to_delete)))
            for job_id in to_delete:
                delete_job(job_id)

        to_add = target - existing_jobs
        if to_add:
            logger.info("Adding {0} jobs to the scheduler".format(len(to_add)))
            for chunk in chunked(to_add, 10000):
                uniques = UniqueFeed.objects.filter(url__in=chunk)
                for unique in uniques:
                    unique.schedule()
Пример #12
0
    def handle_sentry(self, *args, **kwargs):
        connection = get_redis_connection()
        existing_jobs = set(scheduled_jobs(connection=connection))
        target = set(UniqueFeed.objects.filter(muted=False).values_list(
            'url', flat=True))

        to_delete = existing_jobs - target
        if to_delete:
            logger.info("deleting jobs from the scheduler",
                        count=len(to_delete))
            for job_id in to_delete:
                delete_job(job_id, connection=connection)

        to_add = target - existing_jobs
        if to_add:
            logger.info("adding jobs to the scheduler", count=len(to_add))
            for chunk in chunked(to_add, 10000):
                uniques = UniqueFeed.objects.filter(url__in=chunk)
                for unique in uniques:
                    unique.schedule()