Beispiel #1
0
async def test_defer_job_violate_queueing_lock(pg_job_store):
    await pg_job_store.defer_job(
        jobs.Job(
            id=1,
            queue="queue_a",
            task_name="task_1",
            lock="lock_1",
            queueing_lock="queueing_lock",
            task_kwargs={"a": "b"},
        )
    )
    with pytest.raises(exceptions.AlreadyEnqueued) as excinfo:
        await pg_job_store.defer_job(
            jobs.Job(
                id=2,
                queue="queue_a",
                task_name="task_2",
                lock="lock_2",
                queueing_lock="queueing_lock",
                task_kwargs={"c": "d"},
            )
        )
        assert isinstance(excinfo.value.__cause__, psycopg2.errors.UniqueViolation)
        assert (
            excinfo.value.__cause__.diag.constraint_name
            == "procrastinate_jobs_queueing_lock_idx"
        )
Beispiel #2
0
async def setup(pg_job_store):
    await pg_job_store.defer_job(
        jobs.Job(
            queue="q1",
            lock="lock1",
            queueing_lock="queueing_lock1",
            task_name="task_foo",
            task_kwargs={"key": "a"},
        ))

    j2 = jobs.Job(
        queue="q1",
        lock="lock2",
        queueing_lock="queueing_lock2",
        task_name="task_bar",
        task_kwargs={"key": "b"},
    )
    j2 = attr.evolve(j2, id=await pg_job_store.defer_job(j2))
    await pg_job_store.finish_job(j2, jobs.Status.FAILED)

    j3 = jobs.Job(
        queue="q2",
        lock="lock3",
        queueing_lock="queueing_lock3",
        task_name="task_foo",
        task_kwargs={"key": "c"},
    )
    j3 = attr.evolve(j3, id=await pg_job_store.defer_job(j3))
    await pg_job_store.finish_job(j3, jobs.Status.SUCCEEDED)
Beispiel #3
0
async def test_delete_old_jobs_multiple_jobs(get_all, pg_job_store):
    await pg_job_store.defer_job(
        jobs.Job(
            id=0,
            queue="queue_a",
            task_name="task_1",
            lock="lock_1",
            task_kwargs={"a": "b"},
        ))
    await pg_job_store.defer_job(
        jobs.Job(
            id=0,
            queue="queue_b",
            task_name="task_2",
            lock="lock_2",
            task_kwargs={"a": "b"},
        ))

    # We start both jobs
    job_a = await pg_job_store.fetch_job(queues=["queue_a"])
    job_b = await pg_job_store.fetch_job(queues=["queue_b"])
    # We finish both jobs
    await pg_job_store.finish_job(job_a, status=jobs.Status.SUCCEEDED)
    await pg_job_store.finish_job(job_b, status=jobs.Status.SUCCEEDED)
    # We back date the events for job_a
    await pg_job_store.execute_query(
        f"UPDATE procrastinate_events SET at=at - INTERVAL '2 hours'"
        f"WHERE job_id={job_a.id}")

    # Only job_a is deleted
    await pg_job_store.delete_old_jobs(nb_hours=2)
    rows = await get_all("procrastinate_jobs", "id")
    assert len(rows) == 1
    assert rows[0]["id"] == job_b.id
Beispiel #4
0
async def test_run_job_pass_context(app):
    result = []

    @app.task(queue="yay", name="job", pass_context=True)
    def task_func(test_context, a):
        result.extend([test_context, a])

    job = jobs.Job(
        id=16,
        task_kwargs={"a": 1},
        lock="sherlock",
        queueing_lock="houba",
        task_name="job",
        queue="yay",
    )
    test_worker = worker.Worker(app,
                                queues=["yay"],
                                name="my_worker",
                                additional_context={"foo": "bar"})
    context = test_worker.context_for_worker(worker_id=3)

    await test_worker.run_job(job=job, worker_id=3)

    context = context.evolve(task=task_func)

    assert result == [
        context,
        1,
    ]
Beispiel #5
0
async def test_delete_old_job_filter_on_end_date(get_all, pg_job_store, pg_connector):
    await pg_job_store.defer_job(
        jobs.Job(
            id=0,
            queue="queue_a",
            task_name="task_1",
            lock="lock_1",
            queueing_lock="queueing_lock_1",
            task_kwargs={"a": "b"},
        )
    )
    # We start the job
    job = await pg_job_store.fetch_job(queues=["queue_a"])
    # We finish the job
    await pg_job_store.finish_job(job, status=jobs.Status.SUCCEEDED)
    # We back date only the start event
    await pg_connector.execute_query(
        f"UPDATE procrastinate_events SET at=at - INTERVAL '2 hours'"
        f"WHERE job_id={job.id} AND TYPE='started'"
    )

    # Job is not deleted since it finished recently
    await pg_job_store.delete_old_jobs(nb_hours=2)
    rows = await get_all("procrastinate_jobs", "id")
    assert len(rows) == 1
Beispiel #6
0
async def test_delete_old_jobs_job_is_not_finished(get_all, pg_job_store, pg_connector):
    await pg_job_store.defer_job(
        jobs.Job(
            id=0,
            queue="queue_a",
            task_name="task_1",
            lock="lock_1",
            queueing_lock="queueing_lock_1",
            task_kwargs={"a": "b"},
        )
    )

    # No started job
    await pg_job_store.delete_old_jobs(nb_hours=0)
    assert len(await get_all("procrastinate_jobs", "id")) == 1

    # We start a job
    job = await pg_job_store.fetch_job(queues=["queue_a"])
    # We back date the started event
    await pg_connector.execute_query(
        f"UPDATE procrastinate_events SET at=at - INTERVAL '2 hours'"
        f"WHERE job_id={job.id}"
    )

    # The job is not finished so it's not deleted
    await pg_job_store.delete_old_jobs(nb_hours=0)
    assert len(await get_all("procrastinate_jobs", "id")) == 1
Beispiel #7
0
async def test_run_job_pass_context(app):
    result = []

    @app.task(queue="yay", name="job", pass_context=True)
    def task_func(test_context, a):
        result.extend([test_context, a])

    job = jobs.Job(
        id=16,
        task_kwargs={"a": 1},
        lock="sherlock",
        queueing_lock="houba",
        task_name="job",
        queue="yay",
    )
    test_worker = worker.Worker(app, queues=["yay"], name="my_worker")
    context = job_context.JobContext(
        worker_name="my_worker",
        worker_id=3,
        worker_queues=["yay"],
        job=job,
        task=task_func,
    )
    test_worker.current_contexts[3] = context
    await test_worker.run_job(job=job, worker_id=3)

    assert result == [
        context,
        1,
    ]
Beispiel #8
0
async def test_job_deferrer_defer_async(job_store, connector):

    job = jobs.Job(
        queue="marsupilami",
        lock="sher",
        queueing_lock="houba",
        task_name="mytask",
        task_kwargs={"a": "b"},
    )

    deferrer = jobs.JobDeferrer(job=job, job_store=job_store)
    id = await deferrer.defer_async(c=3)

    assert id == 1

    assert connector.jobs == {
        1: {
            "args": {"a": "b", "c": 3},
            "attempts": 0,
            "id": 1,
            "lock": "sher",
            "queueing_lock": "houba",
            "queue_name": "marsupilami",
            "scheduled_at": None,
            "status": "todo",
            "task_name": "mytask",
        }
    }
Beispiel #9
0
def configure_task(
    *,
    name: str,
    job_store: store.JobStore,
    lock: Optional[str] = None,
    queueing_lock: Optional[str] = None,
    task_kwargs: Optional[types.JSONDict] = None,
    schedule_at: Optional[datetime.datetime] = None,
    schedule_in: Optional[Dict[str, int]] = None,
    queue: str = jobs.DEFAULT_QUEUE,
) -> jobs.JobDeferrer:
    if schedule_at and schedule_in is not None:
        raise ValueError("Cannot set both schedule_at and schedule_in")

    if schedule_in is not None:
        schedule_at = utils.utcnow() + datetime.timedelta(**schedule_in)

    task_kwargs = task_kwargs or {}
    return jobs.JobDeferrer(
        job=jobs.Job(
            id=None,
            lock=lock,
            queueing_lock=queueing_lock,
            task_name=name,
            queue=queue,
            task_kwargs=task_kwargs,
            scheduled_at=schedule_at,
        ),
        job_store=job_store,
    )
Beispiel #10
0
async def test_defer_job(pg_job_store, get_all):
    queue = "marsupilami"
    job = jobs.Job(
        id=0,
        queue=queue,
        task_name="bob",
        lock="sher",
        queueing_lock="houba",
        task_kwargs={"a": 1, "b": 2},
    )
    pk = await pg_job_store.defer_job(job=job)

    result = await get_all(
        "procrastinate_jobs",
        "id",
        "args",
        "status",
        "lock",
        "queueing_lock",
        "task_name",
    )
    assert result == [
        {
            "id": pk,
            "args": {"a": 1, "b": 2},
            "status": "todo",
            "lock": "sher",
            "queueing_lock": "houba",
            "task_name": "bob",
        }
    ]
Beispiel #11
0
async def test_get_stalled_jobs(get_all, pg_job_store):
    await pg_job_store.defer_job(
        jobs.Job(
            id=0,
            queue="queue_a",
            task_name="task_1",
            lock="lock_1",
            task_kwargs={"a": "b"},
        ))
    job_id = (await get_all("procrastinate_jobs", "id"))[0]["id"]

    # No started job
    assert await pg_job_store.get_stalled_jobs(nb_seconds=3600) == []

    # We start a job and fake its `started_at`
    job = await pg_job_store.fetch_job(queues=["queue_a"])
    await pg_job_store.execute_query(
        f"UPDATE procrastinate_jobs SET started_at=NOW() - INTERVAL '30 minutes' "
        f"WHERE id={job_id}")

    # Nb_seconds parameter
    assert await pg_job_store.get_stalled_jobs(nb_seconds=3600) == []
    assert await pg_job_store.get_stalled_jobs(nb_seconds=1800) == [job]

    # Queue parameter
    assert await pg_job_store.get_stalled_jobs(nb_seconds=1800,
                                               queue="queue_a") == [job]
    assert await pg_job_store.get_stalled_jobs(nb_seconds=1800,
                                               queue="queue_b") == []
    # Task name parameter
    assert await pg_job_store.get_stalled_jobs(nb_seconds=1800,
                                               task_name="task_1") == [job]
    assert (await pg_job_store.get_stalled_jobs(nb_seconds=1800,
                                                task_name="task_2") == [])
Beispiel #12
0
async def test_run_job_log_result(caplog, app, job_store):
    caplog.set_level("INFO")

    result = []

    def task_func(a, b):  # pylint: disable=unused-argument
        s = a + b
        result.append(s)
        return s

    task = tasks.Task(task_func, app=app, queue="yay", name="job")

    app.tasks = {"task_func": task}

    job = jobs.Job(
        id=16,
        task_kwargs={
            "a": 9,
            "b": 3
        },
        lock="sherlock",
        task_name="task_func",
        queue="yay",
    )
    test_worker = worker.Worker(app, queues=["yay"])
    await test_worker.run_job(job)

    assert result == [12]

    records = [
        record for record in caplog.records if record.action == "job_success"
    ]
    assert len(records) == 1
    record = records[0]
    assert record.result == 12
Beispiel #13
0
def configure_task(
    *,
    name: str,
    job_store: store.BaseJobStore,
    lock: Optional[str] = None,
    task_kwargs: Optional[types.JSONDict] = None,
    schedule_at: Optional[datetime.datetime] = None,
    schedule_in: Optional[Dict[str, int]] = None,
    queue: str = jobs.DEFAULT_QUEUE,
) -> jobs.JobDeferrer:
    if schedule_at and schedule_in is not None:
        raise ValueError("Cannot set both schedule_at and schedule_in")

    if schedule_in is not None:
        schedule_at = pendulum.now("UTC").add(**schedule_in)

    lock = lock or str(uuid.uuid4())
    task_kwargs = task_kwargs or {}
    return jobs.JobDeferrer(
        job=jobs.Job(
            id=None,
            lock=lock,
            task_name=name,
            queue=queue,
            task_kwargs=task_kwargs,
            scheduled_at=schedule_at,
        ),
        job_store=job_store,
    )
Beispiel #14
0
async def test_finish_job(get_all, pg_job_store):
    await pg_job_store.defer_job(
        jobs.Job(
            id=0,
            queue="queue_a",
            task_name="task_1",
            lock="lock_1",
            task_kwargs={"a": "b"},
        ))
    job = await pg_job_store.fetch_job(queues=["queue_a"])

    assert await get_all("procrastinate_jobs", "status") == [{
        "status": "doing"
    }]
    started_at = (await get_all("procrastinate_jobs",
                                "started_at"))[0]["started_at"]
    assert started_at.date() == datetime.datetime.utcnow().date()
    assert await get_all("procrastinate_jobs", "attempts") == [{"attempts": 0}]

    await pg_job_store.finish_job(job=job, status=jobs.Status.SUCCEEDED)

    expected = [{
        "status": "succeeded",
        "started_at": started_at,
        "attempts": 1
    }]
    assert (await get_all("procrastinate_jobs", "status", "started_at",
                          "attempts") == expected)
Beispiel #15
0
async def test_delete_old_jobs_parameters(get_all, pg_job_store, status,
                                          nb_hours, queue, include_error,
                                          should_delete):
    await pg_job_store.defer_job(
        jobs.Job(
            id=0,
            queue="queue_a",
            task_name="task_1",
            lock="lock_1",
            task_kwargs={"a": "b"},
        ))

    # We start a job and fake its `started_at`
    job = await pg_job_store.fetch_job(queues=["queue_a"])
    # We finish the job
    await pg_job_store.finish_job(job, status=status)
    # We back date its events
    await pg_job_store.execute_query(
        f"UPDATE procrastinate_events SET at=at - INTERVAL '2 hours'"
        f"WHERE job_id={job.id}")

    await pg_job_store.delete_old_jobs(nb_hours=nb_hours,
                                       queue=queue,
                                       include_error=include_error)
    nb_jobs = len(await get_all("procrastinate_jobs", "id"))
    if should_delete:
        assert nb_jobs == 0
    else:
        assert nb_jobs == 1
Beispiel #16
0
 async def defer_job():
     await asyncio.sleep(0.5)
     await pg_job_store.defer_job(
         jobs.Job(id=0,
                  queue="yay",
                  task_name="oh",
                  lock="sher",
                  task_kwargs={}))
Beispiel #17
0
def test_job_scheduled_at_naive(job_store):
    with pytest.raises(ValueError):
        jobs.Job(
            id=12,
            queue="marsupilami",
            lock="sher",
            task_name="mytask",
            task_kwargs={"a": "b"},
            scheduled_at=pendulum.naive(2000, 1, 1),
        )
Beispiel #18
0
 def factory(**kwargs):
     defaults = {
         "id": next(serial),
         "task_name": f"task_{random_str()}",
         "task_kwargs": {},
         "lock": str(uuid.uuid4()),
         "queueing_lock": None,
         "queue": f"queue_{random_str()}",
     }
     final_kwargs = defaults.copy()
     final_kwargs.update(kwargs)
     return jobs.Job(**final_kwargs)
Beispiel #19
0
async def test_run_job_not_found(app, job_store):
    job = jobs.Job(
        id=16,
        task_kwargs={
            "a": 9,
            "b": 3
        },
        lock="sherlock",
        task_name="job",
        queue="yay",
    )
    test_worker = worker.Worker(app, queues=["yay"])
    with pytest.raises(exceptions.TaskNotFound):
        await test_worker.run_job(job)
Beispiel #20
0
async def test_get_job_no_result(pg_job_store, job):
    # Add a first started job
    await pg_job_store.defer_job(
        jobs.Job(
            id=1,
            queue="queue_a",
            task_name="task_1",
            lock="lock_1",
            task_kwargs={"a": "b"},
        ))
    await pg_job_store.fetch_job(queues=None)

    # Now add the job we're testing
    await pg_job_store.defer_job(job)

    assert await pg_job_store.fetch_job(queues=["queue_a"]) is None
Beispiel #21
0
async def test_finish_job_retry(get_all, pg_job_store):
    await pg_job_store.defer_job(
        jobs.Job(
            id=0,
            queue="queue_a",
            task_name="task_1",
            lock="lock_1",
            task_kwargs={"a": "b"},
        ))
    job1 = await pg_job_store.fetch_job(queues=None)
    await pg_job_store.finish_job(job=job1, status=jobs.Status.TODO)

    job2 = await pg_job_store.fetch_job(queues=None)

    assert job2.id == job1.id
    assert job2.attempts == job1.attempts + 1
Beispiel #22
0
async def test_run_job(app, job_store):
    result = []

    @app.task(queue="yay", name="task_func")
    def task_func(a, b):
        result.append(a + b)

    job = jobs.Job(
        id=16,
        task_kwargs={
            "a": 9,
            "b": 3
        },
        lock="sherlock",
        task_name="task_func",
        queue="yay",
    )
    test_worker = worker.Worker(app, queues=["yay"])
    await test_worker.run_job(job)

    assert result == [12]
Beispiel #23
0
def test_job_get_context(job_store, scheduled_at, context_scheduled_at):

    job = jobs.Job(
        id=12,
        queue="marsupilami",
        lock="sher",
        task_name="mytask",
        task_kwargs={"a": "b"},
        scheduled_at=scheduled_at,
        attempts=42,
    )

    assert job.get_context() == {
        "id": 12,
        "queue": "marsupilami",
        "lock": "sher",
        "task_name": "mytask",
        "task_kwargs": {"a": "b"},
        "scheduled_at": context_scheduled_at,
        "attempts": 42,
    }
Beispiel #24
0
async def test_get_stalled_jobs(get_all, pg_job_store, pg_connector):
    await pg_job_store.defer_job(
        jobs.Job(
            id=0,
            queue="queue_a",
            task_name="task_1",
            lock="lock_1",
            queueing_lock="queueing_lock_1",
            task_kwargs={"a": "b"},
        )
    )
    job_id = (await get_all("procrastinate_jobs", "id"))[0]["id"]

    # No started job
    assert await pg_job_store.get_stalled_jobs(nb_seconds=3600) == []

    # We start a job and fake its `started` state in the database
    job = await pg_job_store.fetch_job(queues=["queue_a"])
    await pg_connector.execute_query(
        "INSERT INTO procrastinate_events(job_id, type, at) VALUES "
        "(%(job_id)s, 'started', NOW() - INTERVAL '30 minutes')",
        job_id=job_id,
    )

    # Nb_seconds parameter
    assert await pg_job_store.get_stalled_jobs(nb_seconds=3600) == []
    assert await pg_job_store.get_stalled_jobs(nb_seconds=1800) == [job]

    # Queue parameter
    assert await pg_job_store.get_stalled_jobs(nb_seconds=1800, queue="queue_a") == [
        job
    ]
    assert await pg_job_store.get_stalled_jobs(nb_seconds=1800, queue="queue_b") == []
    # Task name parameter
    assert await pg_job_store.get_stalled_jobs(nb_seconds=1800, task_name="task_1") == [
        job
    ]
    assert (
        await pg_job_store.get_stalled_jobs(nb_seconds=1800, task_name="task_2") == []
    )
Beispiel #25
0
async def test_run_job_retry(app, job_store):
    def job(a, b):  # pylint: disable=unused-argument
        raise ValueError("nope")

    task = tasks.Task(job, app=app, queue="yay", name="job", retry=True)
    task.func = job

    app.tasks = {"job": task}

    job = jobs.Job(
        id=16,
        task_kwargs={
            "a": 9,
            "b": 3
        },
        lock="sherlock",
        task_name="job",
        queue="yay",
    )
    test_worker = worker.Worker(app, queues=["yay"])
    with pytest.raises(exceptions.JobRetry):
        await test_worker.run_job(job)
Beispiel #26
0
async def test_run_job_concurrency_warning(app, caplog):
    # Running a sync task with concurrency > 1 should raise a warning
    result = []
    caplog.set_level(logging.WARNING)

    @app.task(queue="yay", name="job")
    def task_func(a):
        result.append(a)

    job = jobs.Job(
        id=16,
        task_kwargs={"a": 1},
        lock="sherlock",
        queueing_lock="houba",
        task_name="job",
        queue="yay",
    )
    test_worker = worker.Worker(app, concurrency=2)
    await test_worker.run_job(job=job, worker_id=0)

    assert result == [1]
    assert [(r.action, r.levelname) for r in caplog.records
            ] == [("concurrent_sync_task", "WARNING")], caplog.records
Beispiel #27
0
async def test_run_job_error(app):
    def job(a, b):  # pylint: disable=unused-argument
        raise ValueError("nope")

    task = tasks.Task(job, blueprint=app, queue="yay", name="job")
    task.func = job

    app.tasks = {"job": task}

    job = jobs.Job(
        id=16,
        task_kwargs={
            "a": 9,
            "b": 3
        },
        lock="sherlock",
        queueing_lock="houba",
        task_name="job",
        queue="yay",
    )
    test_worker = worker.Worker(app, queues=["yay"])
    with pytest.raises(exceptions.JobError):
        await test_worker.run_job(job=job, worker_id=3)
Beispiel #28
0
def test_job_deferrer_defer(job_store):

    job = jobs.Job(
        queue="marsupilami", lock="sher", task_name="mytask", task_kwargs={"a": "b"}
    )

    id = jobs.JobDeferrer(job=job, job_store=job_store).defer(c=3)

    assert id == 1

    assert job_store.jobs == {
        1: {
            "args": {"a": "b", "c": 3},
            "attempts": 0,
            "id": 1,
            "lock": "sher",
            "queue_name": "marsupilami",
            "scheduled_at": None,
            "started_at": None,
            "status": "todo",
            "task_name": "mytask",
        }
    }
Beispiel #29
0
def test_job_get_context(scheduled_at, context_scheduled_at):

    job = jobs.Job(
        id=12,
        queue="marsupilami",
        lock="sher",
        queueing_lock="houba",
        task_name="mytask",
        task_kwargs={"a": "b"},
        scheduled_at=scheduled_at,
        attempts=42,
    )

    assert job.log_context() == {
        "id": 12,
        "queue": "marsupilami",
        "lock": "sher",
        "queueing_lock": "houba",
        "task_name": "mytask",
        "task_kwargs": {"a": "b"},
        "scheduled_at": context_scheduled_at,
        "attempts": 42,
        "call_string": "mytask[12](a='b')",
    }
Beispiel #30
0
async def test_finish_job(get_all, pg_job_store):
    await pg_job_store.defer_job(
        jobs.Job(
            id=0,
            queue="queue_a",
            task_name="task_1",
            lock="lock_1",
            queueing_lock="queueing_lock_1",
            task_kwargs={"a": "b"},
        )
    )
    job = await pg_job_store.fetch_job(queues=["queue_a"])

    assert await get_all("procrastinate_jobs", "status") == [{"status": "doing"}]
    events = await get_all("procrastinate_events", "type", "at")
    events_started = list(filter(lambda e: e["type"] == "started", events))
    assert len(events_started) == 1
    started_at = events_started[0]["at"]
    assert started_at.date() == datetime.datetime.utcnow().date()
    assert await get_all("procrastinate_jobs", "attempts") == [{"attempts": 0}]

    await pg_job_store.finish_job(job=job, status=jobs.Status.SUCCEEDED)
    expected = [{"status": "succeeded", "attempts": 1}]
    assert await get_all("procrastinate_jobs", "status", "attempts") == expected