Exemplo n.º 1
0
    def _encode_job(self, job: Job):
        job_state = job.__getstate__()
        job_state["kwargs"]["config"] = None
        job_state["kwargs"]["bot"] = None
        # new_kwargs = job_state["kwargs"]
        # new_kwargs["config"] = None
        # new_kwargs["bot"] = None
        # job_state["kwargs"] = new_kwargs
        encoded = base64.b64encode(
            pickle.dumps(job_state, self.pickle_protocol))
        out = {
            "_id": job.id,
            "next_run_time": datetime_to_utc_timestamp(job.next_run_time),
            "job_state": encoded.decode("ascii"),
        }
        job_state["kwargs"]["config"] = self.config
        job_state["kwargs"]["bot"] = self.bot
        # new_kwargs = job_state["kwargs"]
        # new_kwargs["config"] = self.config
        # new_kwargs["bot"] = self.bot
        # job_state["kwargs"] = new_kwargs
        # log.debug(f"Encoding job id: {job.id}\n"
        #           f"Encoded as: {out}")

        return out
Exemplo n.º 2
0
 def add_job(self, job: AppSchedulerJob):
     with transaction.atomic():
         try:
             return DjangoJob.objects.create(
                 id=job.id,
                 next_run_time=get_django_internal_datetime(job.next_run_time),
                 job_state=pickle.dumps(job.__getstate__(), self.pickle_protocol),
             )
         except IntegrityError:
             raise ConflictingIdError(job.id)
Exemplo n.º 3
0
    def update_job(self, job: AppSchedulerJob):
        # Acquire lock for update
        with transaction.atomic():
            try:
                db_job = DjangoJob.objects.get(id=job.id)

                db_job.next_run_time = get_django_internal_datetime(
                    job.next_run_time)
                db_job.job_state = pickle.dumps(job.__getstate__(),
                                                self.pickle_protocol)

                db_job.save()

            except DjangoJob.DoesNotExist:
                raise JobLookupError(job.id)
Exemplo n.º 4
0
    def _encode_job(self, job: Job):
        job_state = job.__getstate__()
        new_args = list(job_state["args"])
        new_args[0]["config"] = None
        new_args[0]["bot"] = None
        job_state["args"] = tuple(new_args)
        encoded = base64.b64encode(pickle.dumps(job_state, self.pickle_protocol))
        out = {
            "_id": job.id,
            "next_run_time": datetime_to_utc_timestamp(job.next_run_time),
            "job_state": encoded.decode("ascii"),
        }
        new_args = list(job_state["args"])
        new_args[0]["config"] = self.config
        new_args[0]["bot"] = self.bot
        job_state["args"] = tuple(new_args)
        # log.debug(f"Encoding job id: {job.id}\n"
        #           f"Encoded as: {out}")

        return out
Exemplo n.º 5
0
 def _prepare_job(job: APSJob) -> APSJob:
     """
     Erase all unpickable data from telegram.ext.Job
     Args:
         job (:obj:`apscheduler.job`): The job to be processed.
     """
     # make new job which is copy of actual job cause
     # modifying actual job also modifies jobs in threadpool
     # executor which are currently running/going to run and
     # we'll get incorrect argument instead of CallbackContext.
     prepped_job = APSJob.__new__(APSJob)
     prepped_job.__setstate__(job.__getstate__())
     # remove CallbackContext from job args since
     # it includes refrences to dispatcher which
     # is unpickleable. we'll recreate CallbackContext
     # in _reconstitute_job method.
     if isinstance(job.args[0], CallbackContext):
         tg_job = job.args[0].job
         # APScheduler stores args as tuple.
         prepped_job.args = (tg_job.name, tg_job.context)
     return prepped_job
Exemplo n.º 6
0
class TestJob(object):
    RUNTIME = datetime(2010, 12, 13, 0, 8, 0)

    def setup(self):
        self.trigger = SimpleTrigger(self.RUNTIME)
        self.job = Job(self.trigger, dummyfunc, [], {}, 1, False)

    def test_compute_next_run_time(self):
        self.job.compute_next_run_time(self.RUNTIME - timedelta(microseconds=1))
        eq_(self.job.next_run_time, self.RUNTIME)

        self.job.compute_next_run_time(self.RUNTIME)
        eq_(self.job.next_run_time, self.RUNTIME)

        self.job.compute_next_run_time(self.RUNTIME + timedelta(microseconds=1))
        eq_(self.job.next_run_time, None)

    def test_compute_run_times(self):
        expected_times = [self.RUNTIME + timedelta(seconds=1),
                          self.RUNTIME + timedelta(seconds=2)]
        self.job.trigger = IntervalTrigger(timedelta(seconds=1), self.RUNTIME)
        self.job.compute_next_run_time(expected_times[0])
        eq_(self.job.next_run_time, expected_times[0])

        run_times = self.job.get_run_times(self.RUNTIME)
        eq_(run_times, [])

        run_times = self.job.get_run_times(expected_times[0])
        eq_(run_times, [expected_times[0]])

        run_times = self.job.get_run_times(expected_times[1])
        eq_(run_times, expected_times)

    def test_max_runs(self):
        self.job.max_runs = 1
        self.job.runs += 1
        self.job.compute_next_run_time(self.RUNTIME)
        eq_(self.job.next_run_time, None)

    def test_eq_num(self):
        # Just increasing coverage here
        assert not self.job == 'dummyfunc'

    def test_getstate(self):
        state = self.job.__getstate__()
        eq_(state, dict(trigger=self.trigger,
                        func_ref='testjob:dummyfunc',
                        name='dummyfunc', args=[],
                        kwargs={}, misfire_grace_time=1,
                        coalesce=False, max_runs=None,
                        max_instances=1, runs=0))

    def test_setstate(self):
        trigger = SimpleTrigger('2010-12-14 13:05:00')
        state = dict(trigger=trigger, name='testjob.dummyfunc',
                     func_ref='testjob:dummyfunc',
                     args=[], kwargs={}, misfire_grace_time=2, max_runs=2,
                     coalesce=True, max_instances=2, runs=1)
        self.job.__setstate__(state)
        eq_(self.job.trigger, trigger)
        eq_(self.job.func, dummyfunc)
        eq_(self.job.max_runs, 2)
        eq_(self.job.coalesce, True)
        eq_(self.job.max_instances, 2)
        eq_(self.job.runs, 1)
        assert not hasattr(self.job, 'func_ref')
        assert isinstance(self.job._lock, lock_type)

    def test_jobs_equal(self):
        assert self.job == self.job

        job2 = Job(SimpleTrigger(self.RUNTIME), lambda: None, [], {}, 1, False)
        assert self.job != job2

        job2.id = self.job.id = 123
        eq_(self.job, job2)

        assert self.job != 'bleh'

    def test_instances(self):
        self.job.max_instances = 2
        eq_(self.job.instances, 0)

        self.job.add_instance()
        eq_(self.job.instances, 1)

        self.job.add_instance()
        eq_(self.job.instances, 2)

        assert_raises(MaxInstancesReachedError, self.job.add_instance)

        self.job.remove_instance()
        eq_(self.job.instances, 1)

        self.job.remove_instance()
        eq_(self.job.instances, 0)

        assert_raises(AssertionError, self.job.remove_instance)

    def test_repr(self):
        self.job.compute_next_run_time(self.RUNTIME)
        eq_(repr(self.job),
            "<Job (name=dummyfunc, "
            "trigger=<SimpleTrigger (run_date=datetime.datetime(2010, 12, 13, 0, 8))>)>")
        eq_(str(self.job),
            "dummyfunc (trigger: date[2010-12-13 00:08:00], "
            "next run at: 2010-12-13 00:08:00)")
Exemplo n.º 7
0
class TestJob(object):
    RUNTIME = datetime(2010, 12, 13, 0, 8, 0)

    def setup(self):
        self.trigger = SimpleTrigger(self.RUNTIME)
        self.job = Job(self.trigger, dummyfunc, [], {}, 1, False)

    def test_compute_next_run_time(self):
        self.job.compute_next_run_time(self.RUNTIME -
                                       timedelta(microseconds=1))
        eq_(self.job.next_run_time, self.RUNTIME)

        self.job.compute_next_run_time(self.RUNTIME)
        eq_(self.job.next_run_time, self.RUNTIME)

        self.job.compute_next_run_time(self.RUNTIME +
                                       timedelta(microseconds=1))
        eq_(self.job.next_run_time, None)

    def test_compute_run_times(self):
        expected_times = [
            self.RUNTIME + timedelta(seconds=1),
            self.RUNTIME + timedelta(seconds=2)
        ]
        self.job.trigger = IntervalTrigger(timedelta(seconds=1), self.RUNTIME)
        self.job.compute_next_run_time(expected_times[0])
        eq_(self.job.next_run_time, expected_times[0])

        run_times = self.job.get_run_times(self.RUNTIME)
        eq_(run_times, [])

        run_times = self.job.get_run_times(expected_times[0])
        eq_(run_times, [expected_times[0]])

        run_times = self.job.get_run_times(expected_times[1])
        eq_(run_times, expected_times)

    def test_max_runs(self):
        self.job.max_runs = 1
        self.job.runs += 1
        self.job.compute_next_run_time(self.RUNTIME)
        eq_(self.job.next_run_time, None)

    def test_eq_num(self):
        # Just increasing coverage here
        assert not self.job == 'dummyfunc'

    def test_getstate(self):
        state = self.job.__getstate__()
        eq_(
            state,
            dict(trigger=self.trigger,
                 func_ref='testjob:dummyfunc',
                 name='dummyfunc',
                 args=[],
                 kwargs={},
                 misfire_grace_time=1,
                 coalesce=False,
                 max_runs=None,
                 max_instances=1,
                 runs=0))

    def test_setstate(self):
        trigger = SimpleTrigger('2010-12-14 13:05:00')
        state = dict(trigger=trigger,
                     name='testjob.dummyfunc',
                     func_ref='testjob:dummyfunc',
                     args=[],
                     kwargs={},
                     misfire_grace_time=2,
                     max_runs=2,
                     coalesce=True,
                     max_instances=2,
                     runs=1)
        self.job.__setstate__(state)
        eq_(self.job.trigger, trigger)
        eq_(self.job.func, dummyfunc)
        eq_(self.job.max_runs, 2)
        eq_(self.job.coalesce, True)
        eq_(self.job.max_instances, 2)
        eq_(self.job.runs, 1)
        assert not hasattr(self.job, 'func_ref')
        assert isinstance(self.job._lock, lock_type)

    def test_jobs_equal(self):
        assert self.job == self.job

        job2 = Job(SimpleTrigger(self.RUNTIME), lambda: None, [], {}, 1, False)
        assert self.job != job2

        job2.id = self.job.id = 123
        eq_(self.job, job2)

        assert self.job != 'bleh'

    def test_instances(self):
        self.job.max_instances = 2
        eq_(self.job.instances, 0)

        self.job.add_instance()
        eq_(self.job.instances, 1)

        self.job.add_instance()
        eq_(self.job.instances, 2)

        assert_raises(MaxInstancesReachedError, self.job.add_instance)

        self.job.remove_instance()
        eq_(self.job.instances, 1)

        self.job.remove_instance()
        eq_(self.job.instances, 0)

        assert_raises(AssertionError, self.job.remove_instance)

    def test_repr(self):
        self.job.compute_next_run_time(self.RUNTIME)
        eq_(
            repr(self.job), "<Job (name=dummyfunc, "
            "trigger=<SimpleTrigger "
            "(run_date=datetime.datetime(2010, 12, 13, 0, 8))>)>")
        eq_(
            str(self.job), "dummyfunc (trigger: date[2010-12-13 00:08:00], "
            "next run at: 2010-12-13 00:08:00)")
Exemplo n.º 8
0
from dateutil.tz import gettz

from apscheduler.job import Job
from apscheduler.triggers import IntervalTrigger
from apscheduler.scripts import HttpScript

t = IntervalTrigger({}, minutes=5, timezone=gettz('Asia/Chongqing'))
s = HttpScript(url='http://baidu.com')

job = Job(t, s)
print job.__getstate__()