def test_detail_job_error(foss_server: str, foss: Fossology): job_id = secrets.randbelow(1000) responses.add(responses.GET, f"{foss_server}/api/v1/jobs/{job_id}", status=404) responses.add(responses.GET, f"{foss_server}/api/v1/jobs/{job_id}", status=404) with pytest.raises(FossologyApiError) as excinfo: foss.detail_job(job_id, wait=True) assert f"Error while getting details for job {job_id}" in str(excinfo.value) with pytest.raises(FossologyApiError) as excinfo: foss.detail_job(job_id) assert f"Error while getting details for job {job_id}" in str(excinfo.value)
def test_schedule_jobs(foss: Fossology, upload: Upload, foss_schedule_agents: Dict): job = foss.schedule_jobs(foss.rootFolder, upload, foss_schedule_agents) assert job.name == upload.uploadname jobs = foss.list_jobs(upload=upload) assert len(jobs) == 2 job = foss.detail_job(jobs[1].id, wait=True, timeout=30) assert job.status == "Completed" assert ( f"Job '{job.name}' ({job.id}) queued on {job.queueDate} (Status: {job.status} ETA: {job.eta})" in str(job) ) # Use pagination jobs = foss.list_jobs(upload=upload, page_size=1, page=2) assert len(jobs) == 1 assert jobs[0].id == job.id