def _create_periodic_task(self): beat_schedule = settings.CELERY_BEAT_SCHEDULE.get( "send-hooks-failures-reports") periodic_task = PeriodicTask(name="Periodic Task Mock", enabled=True, task=beat_schedule.get("task")) periodic_task.save()
def save(self, *args, **kwargs): # save object to get its id # pass config id as third task parameter if self.id is None: super(Config, self).save(*args, **kwargs) # Call the "real" save() method. # join module path,name and id id_pass = [self.module_path, self.module_name, self.id] task_name = "ingester.tasks.ingestsource" if self.ingester_task is not None: setattr(self.ingester_task, 'name', "{}-Task".format(self.name)) setattr(self.ingester_task, 'interval', self.schedule) setattr(self.ingester_task, 'task', task_name) setattr(self.ingester_task, 'args', json.dumps(id_pass)) setattr(self.ingester_task, 'enabled', self.enabled) self.ingester_task.save() else: obj = PeriodicTask(name="{}-Task".format(self.name), interval=self.schedule, task=task_name, args=json.dumps(id_pass), enabled=self.enabled) obj.save() self.ingester_task = obj super(Config, self).save(*args, **kwargs) # Call the "real" save() method.
def setup_scheduler(self, app): self.app = app self.entry_name, entry = self.create_conf_entry() self.app.conf.beat_schedule = {self.entry_name: entry} self.m1 = PeriodicTask(name=self.entry_name, interval=self.create_interval_schedule())
def save(self, *args, **kwargs): """ To be called, when Config is created or updated :param args: :param kwargs: :return: """ # save object to get its id if self.id is None: super(Config, self).save(*args, **kwargs) # Call the "real" save() method. # set initial start and end date depending on schedule if self.start_date is None: time_interval = self.schedule.time_interval self.start_date = self.schedule.min_date if time_interval == "all": # harvester cannot go into future for one time exevution self.end_date = self.schedule.max_date # all end dates into the future elif time_interval == "month": self.end_date = self.schedule.min_date + datetime.timedelta( days=30) if self.schedule.max_date is not None: self.end_date = min(self.end_date, self.schedule.max_date) elif time_interval == "week": self.end_date = self.schedule.min_date + datetime.timedelta( days=7) if self.schedule.max_date is not None: self.end_date = min(self.end_date, self.schedule.max_date) elif time_interval == "day": self.end_date = self.schedule.min_date + datetime.timedelta( days=1) if self.schedule.max_date is not None: self.end_date = min(self.end_date, self.schedule.max_date) # join module path,name and id task_args = [self.module_path, self.module_name, self.id] if self.celery_task is not None: # config is updated, update task setattr(self.celery_task, 'name', "{}-Task".format(self.name)) setattr(self.celery_task, 'interval', self.schedule.schedule) setattr(self.celery_task, 'task', "harvester.tasks.harvestsource") setattr(self.celery_task, 'args', json.dumps(task_args)) setattr(self.celery_task, 'enabled', self.enabled) self.celery_task.save() else: obj = PeriodicTask(name="{}-Task".format(self.name), interval=self.schedule.schedule, task="harvester.tasks.harvestsource", args=json.dumps(task_args), enabled=self.enabled) obj.save() self.celery_task = obj super(Config, self).save(*args, **kwargs) # Call the "real" save() method.
def create_publish_task(self): clocked = ClockedSchedule(clocked_time=self.publish_date) clocked.save() publish_task = PeriodicTask( clocked=clocked, name=f'Publish confession with id:{self.pk}', task='publish_confession_task', kwargs={"instance_id": self.pk}, one_off=True) publish_task.save()
def create_task(): """Adds scraping task to celery jobs""" if not PeriodicTask.objects.filter(name=TaskName).first(): p = PeriodicTask( **{ "name": TaskName, "task": "websecmap.api.apis.zorgkaart.scrape", "crontab": CrontabSchedule.objects.get(id=7), } ) p.save() log.info(f"Created Periodic Task for zorgkaart scraper with name: {TaskName}")
def create_task(self, task, name, arguments): try: self.task = PeriodicTask.objects.get(name=name) except PeriodicTask.DoesNotExist: self.task = PeriodicTask(name=name) if not self.task.crontab: crontab = CrontabSchedule.objects.create() self.task.crontab = crontab self.task.task = task self.task.kwargs = json.dumps(arguments) self.task.save() self.save()
class test_modeladmin_PeriodicTaskAdmin(SchedulerCase): @pytest.mark.django_db() @pytest.fixture(autouse=True) def setup_scheduler(self, app): self.app = app self.site = AdminSite() self.request_factory = RequestFactory() interval_schedule = self.create_interval_schedule() entry_name, entry = self.create_conf_entry() self.app.conf.beat_schedule = {entry_name: entry} self.m1 = PeriodicTask(name=entry_name, interval=interval_schedule) self.m1.task = 'celery.backend_cleanup' self.m1.save() entry_name, entry = self.create_conf_entry() self.app.conf.beat_schedule = {entry_name: entry} self.m2 = PeriodicTask(name=entry_name, interval=interval_schedule) self.m2.task = 'celery.backend_cleanup' self.m2.save() def patch_request(self, request): """patch request to allow for django messages storage""" setattr(request, 'session', 'session') messages = FallbackStorage(request) setattr(request, '_messages', messages) return request # don't hang if broker is down # https://github.com/celery/celery/issues/4627 @pytest.mark.timeout(5) def test_run_task(self): ma = PeriodicTaskAdmin(PeriodicTask, self.site) self.request = self.patch_request(self.request_factory.get('/')) ma.run_tasks(self.request, PeriodicTask.objects.filter(id=self.m1.id)) assert len(self.request._messages._queued_messages) == 1 queued_message = self.request._messages._queued_messages[0].message assert queued_message == '1 task was successfully run' # don't hang if broker is down # https://github.com/celery/celery/issues/4627 @pytest.mark.timeout(5) def test_run_tasks(self): ma = PeriodicTaskAdmin(PeriodicTask, self.site) self.request = self.patch_request(self.request_factory.get('/')) ma.run_tasks(self.request, PeriodicTask.objects.all()) assert len(self.request._messages._queued_messages) == 1 queued_message = self.request._messages._queued_messages[0].message assert queued_message == '2 tasks were successfully run'
def setup_scheduler(self, app): self.app = app self.site = AdminSite() self.request_factory = RequestFactory() entry_name, entry = self.create_conf_entry() self.app.conf.beat_schedule = {entry_name: entry} self.m1 = PeriodicTask(name=entry_name) self.m1.task = 'celery.backend_cleanup' self.m1.save() entry_name, entry = self.create_conf_entry() self.app.conf.beat_schedule = {entry_name: entry} self.m2 = PeriodicTask(name=entry_name) self.m2.task = 'celery.backend_cleanup' self.m2.save()
def schedule_cron(task_name, crontable, args=[], kwargs={}): """ Schedules a task using UNIX cron table. E.g. "* * * * *" is every minute, "0 * * * *" every hour with 0 minutes. Idk, google it or read a manual for it. """ parsed = parse_crontab(crontable) schedule, _ = CrontabSchedule.objects.get_or_create(**parsed) ptask_name = "%s_%s" % (task_name, datetime.now() ) # create some name for the period task ptask = PeriodicTask(crontab=schedule, name=ptask_name, task=task_name, args=json.dumps([x for x in args]), kwargs=json.dumps(dict(kwargs))) ptask.save() return TaskScheduler.objects.create(periodic_task=ptask)
def test_validate_unique_raises_if_schedule_not_set(self): with self.assertRaises(ValidationError) as cm: PeriodicTask(name='task0').validate_unique() self.assertEquals( cm.exception.args[0], 'One of clocked, interval, crontab, or solar must be set.', )
def test_save_raises_for_multiple_schedules(self): schedules = [('crontab', CrontabSchedule()), ('interval', IntervalSchedule()), ('solar', SolarSchedule()), ('clocked', ClockedSchedule())] for i, options in enumerate(combinations(schedules, 2)): with self.assertRaises(ValidationError): PeriodicTask(name='task{}'.format(i), **dict(options)).save()
def test_validate_unique_raises_for_multiple_schedules(self): schedules = [('crontab', CrontabSchedule()), ('interval', IntervalSchedule()), ('solar', SolarSchedule()), ('clocked', ClockedSchedule())] for options in combinations(schedules, 2): with self.assertRaises(ValidationError): PeriodicTask(**dict(options)).validate_unique()
def create(self, validated_data): instance = super(Audit_JobSerializer, self).create(validated_data) datetime = validated_data['plan_time'] is_static_job = validated_data.get('is_static_job', False) schedule = CrontabSchedule.objects.filter((Q(minute=datetime.minute)) & (Q(hour=datetime.hour)) & (Q(day_of_month=datetime.day)) & (Q(month_of_year=datetime.month))) if not schedule: schedule = CrontabSchedule(minute=datetime.minute, hour=datetime.hour, day_of_month=datetime.day, month_of_year=datetime.month) schedule.save() else: schedule = schedule[0] task = PeriodicTask(crontab=schedule, name=str(uuid.uuid4()), task=ANALYSIS_FUNCTION, args=json.dumps([str(instance.id)]), description='audit_job') task.save() instance.task = task instance.save() return instance
def to_internal_value(self, data): # retrieve the customer or raise validation error if customer does not exist try: customer = models.Customer.objects.get(pk=data['customer']) except KeyError: raise serializers.ValidationError( {'customer': 'This field is required'}) except models.Customer.DoesNotExist: raise serializers.ValidationError( {'customer': 'This field must be a valid customer id'}) # create the appropriate task type, either watchman or repairshopr if data['task_type'] == 'watchman': task_type = 'reporter.tasks_watchman.update_client' task_args = [customer.watchman_group_id] elif data['task_type'] == 'repairshopr': task_type = 'reporter.tasks_repairshopr.update_client' task_args = [customer.repairshopr_id] else: raise serializers.ValidationError({ 'task_type': 'This field must be either "watchman" or "repairshopr"' }) # json format the task args task_args = json.dumps(task_args) # check that the customer has the appropriate service id for the specified task type if data['task_type'] == 'watchman' and customer.watchman_group_id is None: raise serializers.ValidationError({ 'task_type': 'The specified customer does not have a Watchman ID defined' }) if data['task_type'] == 'repairshopr' and customer.repairshopr_id is None: raise serializers.ValidationError({ 'task_type': 'The specified customer does not have a RepairShopr ID defined' }) # create the task name task_name = '{} {}'.format(customer.name, data['task_type']) # validate the cron data if not re.fullmatch(r'([1-5]?[0-9]|\*)', data['periodic_task']['minute']) or \ not re.fullmatch(r'1?[0-9]|2[0-3]|\*', data['periodic_task']['hour']) or \ not re.fullmatch(r'[0-6]|\*', data['periodic_task']['day_of_week']) or \ not re.fullmatch(r'[1-9]|[1-2][0-9]|3[0-1]|\*', data['periodic_task']['day_of_month']) or \ not re.fullmatch(r'[1-9]|1[0-2]|\*', data['periodic_task']['month_of_year']): raise serializers.ValidationError( {'periodic_task': 'Cron parameters are not valid'}) # get or create the cron schedule cron, _ = CrontabSchedule.objects.get_or_create( **data['periodic_task']) # check if a periodic task with the same name already exists if PeriodicTask.objects.filter(name=task_name).exists(): raise serializers.ValidationError( 'A periodic task with this name already exists') # prepare the periodic task task = PeriodicTask(crontab=cron, name=task_name, task=task_type, args=task_args) return task
def update_notification_status(request): # update book tracking status trackingStatus = request.POST['value'] == 'true' book = Book.objects.get(id=request.POST['id']) book.tracking = trackingStatus book.save() # change the intervl value if needed interval, _ = IntervalSchedule.objects.get_or_create( every=30, period=IntervalSchedule.MINUTES) # create or update notification task if not PeriodicTask.objects.filter(name='book_' + request.POST['id']).exists(): notificationTask = PeriodicTask( name='book_' + request.POST['id'], task='dashboard.tasks.send_notification') else: notificationTask = PeriodicTask.objects.get(name='book_' + request.POST['id']) notificationTask.kwargs = json.dumps({'id': request.POST['id']}) notificationTask.interval = interval notificationTask.enabled = trackingStatus notificationTask.save() return JsonResponse({'success': True})
def task_create_or_update(self): if not self.task: schedule, created = IntervalSchedule.objects.get_or_create( every=1, period=IntervalSchedule.DAYS, ) task = PeriodicTask( interval=schedule, name=f'{self.brand.name}.good.{self.url}', task='parsing.tasks.good', kwargs=json.dumps({ 'url': self.url, 'mapping': self.mapping() }), enabled=self.enabled, ) else: task.interval = self.schedule task.kwargs = json.dumps({ 'url': self.url, 'mapping': self.mapping() }) task.enabled = self.enabled task.save() return task
class test_DatabaseSchedulerFromAppConf(SchedulerCase): Scheduler = TrackingScheduler @pytest.mark.django_db() @pytest.fixture(autouse=True) def setup_scheduler(self, app): self.app = app self.entry_name, entry = self.create_conf_entry() self.app.conf.beat_schedule = {self.entry_name: entry} self.m1 = PeriodicTask(name=self.entry_name, interval=self.create_interval_schedule()) def test_constructor(self): s = self.Scheduler(app=self.app) assert isinstance(s._dirty, set) assert s._last_sync is None assert s.sync_every def test_periodic_task_model_enabled_schedule(self): s = self.Scheduler(app=self.app) sched = s.schedule assert len(sched) == 2 assert 'celery.backend_cleanup' in sched assert self.entry_name in sched for n, e in sched.items(): assert isinstance(e, s.Entry) if n == 'celery.backend_cleanup': assert e.options['expires'] == 12 * 3600 assert e.model.expires is None assert e.model.expire_seconds == 12 * 3600 def test_periodic_task_model_disabled_schedule(self): self.m1.enabled = False self.m1.save() s = self.Scheduler(app=self.app) sched = s.schedule assert sched assert len(sched) == 1 assert 'celery.backend_cleanup' in sched assert self.entry_name not in sched
class test_modeladmin_PeriodicTaskAdmin(SchedulerCase): @pytest.mark.django_db() @pytest.fixture(autouse=True) def setup_scheduler(self, app): self.app = app self.site = AdminSite() self.request_factory = RequestFactory() entry_name, entry = self.create_conf_entry() self.app.conf.beat_schedule = {entry_name: entry} self.m1 = PeriodicTask(name=entry_name) self.m1.task = 'celery.backend_cleanup' self.m1.save() entry_name, entry = self.create_conf_entry() self.app.conf.beat_schedule = {entry_name: entry} self.m2 = PeriodicTask(name=entry_name) self.m2.task = 'celery.backend_cleanup' self.m2.save() def patch_request(self, request): """patch request to allow for django messages storage""" setattr(request, 'session', 'session') messages = FallbackStorage(request) setattr(request, '_messages', messages) return request def test_run_task(self): ma = PeriodicTaskAdmin(PeriodicTask, self.site) self.request = self.patch_request(self.request_factory.get('/')) ma.run_tasks(self.request, PeriodicTask.objects.filter(id=self.m1.id)) assert len(self.request._messages._queued_messages) == 1 queued_message = self.request._messages._queued_messages[0].message assert queued_message == '1 task was successfully run' def test_run_tasks(self): ma = PeriodicTaskAdmin(PeriodicTask, self.site) self.request = self.patch_request(self.request_factory.get('/')) ma.run_tasks(self.request, PeriodicTask.objects.all()) assert len(self.request._messages._queued_messages) == 1 queued_message = self.request._messages._queued_messages[0].message assert queued_message == '2 tasks were successfully run'
def createTaskSnmpGet(host_nomeTabela_snmpGet, host_ip, host_porta, host_community, templates, item_id, item_nome, item_oid, item_intervaloAtualizacao, item_intervaloAtualizacaoUn): template_ids = '' for template in templates: template_ids = template_ids + ('_template_id:' + str(template.id)) intervalo_id = None novoIntervalo = IntervalSchedule(every=item_intervaloAtualizacao, period=item_intervaloAtualizacaoUn) intervalosCadastrados = IntervalSchedule.objects.all() for intervalo in intervalosCadastrados: if (intervalo.every == novoIntervalo.every) and (intervalo.period == novoIntervalo.period): intervalo_id = intervalo.id print('ACHEI UM INTERVALO NO BD') if intervalo_id is None: novoIntervalo.save() intervalo_id = novoIntervalo.id print('NÃO ACHEI UM INTERVALO E SALVEI NO BD') novaTask = PeriodicTask(name='SNMPGETTASK=' + host_nomeTabela_snmpGet + str(template_ids) + '_item_id:' + str(item_id), task='hosts.tasks.task_snmp_get', args='[' + '"' + host_nomeTabela_snmpGet + '", "' + host_community + '", "' + str(item_id) + '", "' + item_nome + '", "' + host_ip + '", "' + item_oid + '", "' + str(host_porta) + '", "' + str(template_ids) + '"]', kwargs='{}', enabled=1, interval_id=intervalo_id, one_off=0, headers='{}') novaTask.save() print('CRIEI UMA NOVA TAREFA')
def createTaskCleanData(host_nomeTabela_snmpGet, templates, item_id, item_tempoArmazenamentoDados, item_tempoArmazenamentoDadosUn): template_ids = '' for template in templates: template_ids = template_ids + ('_template_id:' + str(template.id)) intervalo_id = None novoIntervalo = IntervalSchedule(every=1, period='minutes') intervalosCadastrados = IntervalSchedule.objects.all() for intervalo in intervalosCadastrados: if (intervalo.every == novoIntervalo.every) and (intervalo.period == novoIntervalo.period): intervalo_id = intervalo.id print('ACHEI UM INTERVALO NO BD') if intervalo_id is None: novoIntervalo.save() intervalo_id = novoIntervalo.id print('NÃO ACHEI UM INTERVALO E SALVEI NO BD') novaTask = PeriodicTask(name='CLEANDATATASK=' + host_nomeTabela_snmpGet + str(template_ids) + '_item_id:' + str(item_id), task='hosts.tasks.task_clean_data', args='[' + '"' + str(item_id) + '", "' + str(item_tempoArmazenamentoDados) + '", "' + item_tempoArmazenamentoDadosUn + '", "' + host_nomeTabela_snmpGet + '"]', kwargs='{}', enabled=1, interval_id=intervalo_id, one_off=0, headers='{}') novaTask.save() print('CRIEI UMA NOVA TAREFA')
def schedule_every(task_name, period, every, args=[], kwargs={}): """ schedules a task by name every "every" "period". So an example call would be: TaskScheduler('seconds', 'mycustomtask', 30, [1,2,3]) that would schedule your custom task to run every 30 seconds with the arguments 1,2 and 3 passed to the actual task. """ permissible_periods = ['hours', 'days', 'seconds', 'minutes'] if period not in permissible_periods: raise Exception('Invalid period specified') # create the periodic task and the interval ptask_name = "%s_%s" % (task_name, datetime.now() ) # create some name for the period task interval_schedules = IntervalSchedule.objects.filter(period=period, every=every) if interval_schedules: # just check if interval schedules exist like that already and reuse em interval_schedule = interval_schedules[0] else: # create a brand new interval schedule interval_schedule = IntervalSchedule() interval_schedule.every = every # should check to make sure this is a positive int interval_schedule.period = period interval_schedule.save() ptask = PeriodicTask(name=ptask_name, task=task_name, interval=interval_schedule) if args: ptask.args = args if kwargs: ptask.kwargs = kwargs ptask.save() return TaskScheduler.objects.create(periodic_task=ptask)
def save_model(self, request, obj, form, change): if obj.periodic_task_id is None: periodic_task = PeriodicTask() else: periodic_task = obj.periodic_task periodic_task.name = obj.name periodic_task.task = "webwatcher.tasks.check" for field in periodic_task_field_names: setattr(periodic_task, field, form.cleaned_data[field]) periodic_task.save() obj.periodic_task_id = periodic_task.id obj.save() periodic_task.args = f"[{obj.id}]" periodic_task.save() check.apply_async(args=[obj.id], countdown=5)
class test_DatabaseSchedulerFromAppConf(SchedulerCase): Scheduler = TrackingScheduler @pytest.mark.django_db() @pytest.fixture(autouse=True) def setup_scheduler(self, app): self.app = app self.entry_name, entry = self.create_conf_entry() self.app.conf.beat_schedule = {self.entry_name: entry} self.m1 = PeriodicTask(name=self.entry_name) def test_constructor(self): s = self.Scheduler(app=self.app) assert isinstance(s._dirty, set) assert s._last_sync is None assert s.sync_every def test_periodic_task_model_enabled_schedule(self): s = self.Scheduler(app=self.app) sched = s.schedule assert len(sched) == 2 assert 'celery.backend_cleanup' in sched assert self.entry_name in sched for n, e in sched.items(): assert isinstance(e, s.Entry) def test_periodic_task_model_disabled_schedule(self): self.m1.enabled = False self.m1.save() s = self.Scheduler(app=self.app) sched = s.schedule assert sched assert len(sched) == 1 assert 'celery.backend_cleanup' in sched assert self.entry_name not in sched
def create_periodic_obj(**kwargs): """ :param kwargs: :return: """ name = kwargs.get('name', None) task = kwargs.get('task', 'seecode.services.scan.sched_start') interval = kwargs.get('interval', None) crontab = kwargs.get('crontab', None) args = kwargs.get('args', '') kkwargs = kwargs.get('kkwargs', '') queue = kwargs.get('queue', 'sched') exchange = kwargs.get('exchange', None) priority = kwargs.get('priority', 1) if not all(( name, task, queue, )): raise ParameterIsEmptyException( u'"name, task, queue" parameters cannot be empty !') obj = PeriodicTask( name=name, task=task, interval=interval, crontab=crontab, args=args, kwargs=kkwargs, queue=queue, exchange=exchange, priority=priority, ) obj.save() PeriodicTasks.changed(obj) return obj
def handle(self, *args, **options): PeriodicTask.objects.filter(enabled=False, one_off=True).delete() tomorrow = (datetime.date.today() + datetime.timedelta(days=1)).weekday() planned_bazes_qs = Baz.objects.exclude( sent_to_targets__target=OuterRef("id")).only( 'sent_to_targets__target') customers = User.objects.prefetch_related('user_targets') \ .filter(user_targets__isnull=False) \ .annotate( targets_count=Count('user_targets'), targets_once=ExpressionWrapper( F('targets_count') / F('interval') + 1, output_field=IntegerField()), ).exclude(targets_once=0) periodic_tasks = [] count_tasks = 0 for customer in customers: all_targets = customer.user_targets.annotate( planned_baz=Subquery(planned_bazes_qs.values('id')[:1]), count_sent=Coalesce( Subquery( Baz.objects.filter(sent_to_targets__target=OuterRef( "id")).only('sent_to_targets__target').values( 'sent_to_targets__target').annotate( count=Count('pk')).values('count')), 0)).order_by('count_sent')[:customer.targets_once] tomorrow_targets = (target for target in all_targets if target.weekday == str(tomorrow)) for target in tomorrow_targets: schedule, _ = CrontabSchedule.objects.get_or_create( minute=target.email_time.minute, hour=target.email_time.hour, day_of_week=target.weekday, day_of_month='*', month_of_year='*', timezone=settings.CELERY_TIMEZONE) new_task = PeriodicTask( crontab=schedule, name='Send Bazinga to {}'.format(target.email), task='main.tasks.send_email_task', args=[target.id, target.planned_baz], one_off=True, ) periodic_tasks.append(new_task) count_tasks += 1 PeriodicTask.objects.bulk_create(periodic_tasks, ignore_conflicts=True) self.stdout.write( self.style.SUCCESS( 'Created tasks for {} users.'.format(count_tasks)))
def save_announcement_signal(sender, instance, **kwargs): """ After an announcement is saves, check if it's a draft and that it should auto-publish the results. If it should, then check if there is already a beat task scheduled and replace it, or create a new schedule """ task_name = "Autopublication task for announcement #{}".format(instance.id) if instance.draft and instance.auto_publish: reload_maybe() # djconfig for "AI" user sending_user, _ = User.objects.get_or_create( pk=config.hs_hackerspace_ai, defaults={ 'username': "******", 'password': "******", }, ) schedule, _ = ClockedSchedule.objects.get_or_create( clocked_time=instance.datetime_released) # PeriodicTask doesn't have an update_or_create method for some reason, so do it long way # https://github.com/celery/django-celery-beat/issues/106 defaults = { 'clocked': schedule, 'task': 'announcements.tasks.publish_announcement', 'queue': 'default', 'kwargs': json.dumps({ # beat needs json serializable args, so make sure they are 'user_id': sending_user.id, 'announcement_id': instance.id, 'absolute_url': instance.get_absolute_url(), }), 'one_off': True, 'enabled': True, } try: task = PeriodicTask.objects.get(name=task_name) for key, value in defaults.items(): setattr(task, key, value) task.save() except PeriodicTask.DoesNotExist: new_values = {'name': task_name} new_values.update(defaults) task = PeriodicTask(**new_values) task.save() else: # There shouldn't be a task so delete if it exists try: task = PeriodicTask.objects.get(name=task_name) task.delete() except PeriodicTask.DoesNotExist: pass
def test_validate_unique_raises_for_multiple_schedules(self): schedules = [('crontab', CrontabSchedule()), ('interval', IntervalSchedule()), ('solar', SolarSchedule()), ('clocked', ClockedSchedule())] expected_error_msg = ( 'Only one of clocked, interval, crontab, or solar ' 'must be set') for i, options in enumerate(combinations(schedules, 2)): name = 'task{}'.format(i) options_dict = dict(options) with self.assertRaises(ValidationError) as cm: PeriodicTask(name=name, **options_dict).validate_unique() errors = cm.exception.args[0] self.assertEqual(errors.keys(), options_dict.keys()) for error_msg in errors.values(): self.assertEqual(error_msg, [expected_error_msg])
def task_create_or_update(self): if self.task is None: task = PeriodicTask( interval=self.schedule, name=f'{self.brand.name}.robots.txt', task='parsing.tasks.robots_txt', kwargs=json.dumps({'url': self.url}), enabled=self.enabled, ) else: task = self.task task.interval = self.schedule task.kwargs = json.dumps({'url': self.url}) task.enabled = self.enabled task.save() return task
def task_create_or_update(self): if self.task is None: schedule, created = IntervalSchedule.objects.get_or_create( every=1, period=IntervalSchedule.DAYS, ) task = PeriodicTask( interval=schedule, name=f'{self.site.brand.name}.sitemap.{self.url}', task='parsing.tasks.sitemap', kwargs=json.dumps({'url': self.url}), enabled=self.enabled, ) else: task = self.task task.interval = self.schedule task.kwargs = json.dumps({'url': self.url}) task.enabled = self.enabled task.save() return task
def __create_or_update_task_object(self): if self.periodic_task is None: kwargs = {'badge_id': self.id} task = PeriodicTask( task='badgeupdater.server.tasks.update_badge_task', kwargs=json.dumps(kwargs)) else: task = self.periodic_task schedule, _ = IntervalSchedule.objects.get_or_create( every=round(self.refresh_interval.total_seconds()), period=IntervalSchedule.SECONDS) task.interval = schedule task.name = f'Update {self.id} badge' task.save() self.periodic_task = task
def create_tweet_task(post_id): """ Create a django celery beat PeriodicTask with ClockedSchedule for each post. """ post = Post.objects.get(pk=post_id) clocked_schedule = ClockedSchedule(clocked_time=post.publishDateTime) clocked_schedule.save() task_data = dict(name="PublishTweet{}".format(str(post_id)), task="socials.tasks.publish_tweet_job", clocked=clocked_schedule, kwargs=json.dumps({"post_id": str(post_id)})) periodic_task = PeriodicTask(**task_data) periodic_task.enabled = True periodic_task.one_off = True periodic_task.save() post.publishTask = periodic_task post.save()
def setup_scheduler(self, app): self.app = app self.entry_name, entry = self.create_conf_entry() self.app.conf.beat_schedule = {self.entry_name: entry} self.m1 = PeriodicTask(name=self.entry_name)