def add_job(self, job: AppSchedulerJob): with transaction.atomic(): try: return DjangoJob.objects.create( id=job.id, next_run_time=get_django_internal_datetime(job.next_run_time), job_state=pickle.dumps(job.__getstate__(), self.pickle_protocol), ) except IntegrityError: raise ConflictingIdError(job.id)
def update_job(self, job: AppSchedulerJob): # Acquire lock for update with transaction.atomic(): try: db_job = DjangoJob.objects.get(id=job.id) db_job.next_run_time = get_django_internal_datetime( job.next_run_time) db_job.job_state = pickle.dumps(job.__getstate__(), self.pickle_protocol) db_job.save() except DjangoJob.DoesNotExist: raise JobLookupError(job.id)
def atomic_update_or_create( cls, lock, job_id: str, run_time: datetime, status: str, exception: str = None, traceback: str = None, ) -> "DjangoJobExecution": """ Uses an APScheduler lock to ensures that only one database entry can be created / updated at a time. This keeps django_apscheduler in sync with APScheduler and maintains a 1:1 mapping between APScheduler events that are triggered and the corresponding DjangoJobExecution model instances that are persisted to the database. :param lock: The lock to use when updating the database - probably obtained by calling _scheduler._create_lock() :param job_id: The ID to the APScheduler job that this job execution is for. :param run_time: The scheduler runtime for this job execution. :param status: The new status for ths job execution. :param exception: Details of any exceptions that need to be logged. :param traceback: Traceback of any exceptions that occurred while executing the job. :return: The ID of the newly created or updated DjangoJobExecution. """ # Ensure that only one update / created can be processed at a time, staying in sync with corresponding # scheduler. with lock: # Convert all datetimes to internal Django format before doing calculations and persisting in the database. run_time = get_django_internal_datetime(run_time) finished = get_django_internal_datetime(timezone.now()) duration = (finished - run_time).total_seconds() finished = finished.timestamp() try: with transaction.atomic(): job_execution = DjangoJobExecution.objects.select_for_update( of=("self",) ).get(job_id=job_id, run_time=run_time) if status == DjangoJobExecution.SENT: # Ignore 'submission' events for existing job executions. APScheduler does not appear to # guarantee the order in which events are received, and it is not unusual to receive an # `executed` before the corresponding `submitted` event. We just discard `submitted` events # that are received after the job has already been executed. # # If there are any more instances like this then we probably want to implement a full blown # state machine using something like `pytransitions` # See https://github.com/pytransitions/transitions return job_execution job_execution.finished = finished job_execution.duration = duration job_execution.status = status if exception: job_execution.exception = exception if traceback: job_execution.traceback = traceback job_execution.save() except DjangoJobExecution.DoesNotExist: # Execution was not created by a 'submit' previously - do so now if status == DjangoJobExecution.SENT: # Don't log durations until after job has been submitted for execution finished = None duration = None job_execution = DjangoJobExecution.objects.create( job_id=job_id, run_time=run_time, status=status, duration=duration, finished=finished, exception=exception, traceback=traceback, ) return job_execution
def get_due_jobs(self, now) -> List[AppSchedulerJob]: dt = get_django_internal_datetime(now) return self._get_jobs(next_run_time__lte=dt)
def test_get_django_internal_datetime_makes_aware_if_django_timezone_support_enabled( settings, ): settings.USE_TZ = True internal_dt = util.get_django_internal_datetime(datetime.now()) assert timezone.is_aware(internal_dt)
def test_get_django_internal_datetime_makes_naive_if_django_timzone_support_disabled( settings, ): settings.USE_TZ = False internal_dt = util.get_django_internal_datetime(datetime.now(tz=pytz.utc)) assert timezone.is_naive(internal_dt)