예제 #1
0
def test_delete_job(authenticated_client):
    pipeline = Pipeline.query.all()[0]
    image = Image.query.all()[0]
    with app.app_context():
        job = Job(pipeline, image, "", "")
        db.session.add(job)
        job.state = 'in_queue'
        db.session.commit()
        job_id = job.id
    result_url = '/pipeline/%s/job/%s' % (job.pipeline_id, job.id)
    rv = authenticated_client.delete(result_url)
    result = json.loads(rv.data.decode('utf-8'))
    assert result.get('error') == 'Cannot delete jobs in queue, try again later'
    assert rv.status_code == 200

    job.state = 'running'
    db.session.add(job)
    db.session.commit()
    rv = authenticated_client.delete(result_url)
    result = json.loads(rv.data.decode('utf-8'))
    assert result.get('error') == "Job didn't update properly, try again later"

    job.container_id = '1'
    db.session.add(job)
    db.session.commit()
    with patch('kabuto.connection.Sender.broadcast'):
        rv = authenticated_client.delete(result_url)

    job = Job.query.filter_by(id=job_id).all()
    assert not job
예제 #2
0
def test_kill_job(authenticated_client):
    pipeline = Pipeline.query.all()[0]
    image = Image.query.all()[0]
    with app.app_context():
        job = Job(pipeline, image, "", "")
        db.session.add(job)
        job.state = 'running'
        job.container_id = '1'
        db.session.commit()
        job_id = job.id
    result_url = '/pipeline/%s/job/%s/kill' % (job.pipeline_id, job.id)
    rv = authenticated_client.get(result_url)
    assert rv.status_code == 200
    result = json.loads(rv.data.decode('utf-8'))
    print(result)
    assert result.get('message') == "Success"
예제 #3
0
파일: test_logs.py 프로젝트: adimian/kabuto
def test_logs(preloaded_client):
    ac = preloaded_client
    with app.app_context():
        app.config["JOB_LOGS_DIR"] = os.path.join(ROOT_DIR, "data")

        url = "/execution/1/logs"
        r = ac.get(url)
        data = json.loads(r.data.decode('utf-8'))

        assert data['size'] == 30
        assert data['filename'] == 'job_1_logs.txt'

        r = ac.post(url)
        expected = """Some log line
Another log line"""
        assert r.data.decode('utf-8') == expected

        r = ac.post(url, data={"start_byte": 5, "size": 20})
        expected = """log line
Another log"""
        assert r.data.decode('utf-8') == expected
예제 #4
0
def test_download_result(authenticated_client):
    pipeline = Pipeline.query.all()[0]
    image = Image.query.all()[0]
    with app.app_context():
        job = Job(pipeline, image, "", "")
        db.session.add(job)
        db.session.commit()
        with open(os.path.join(job.results_path, "results.txt"), "w") as fh:
            fh.write("some results")

        result_url = '/pipeline/%s/job/%s?result' % (job.pipeline_id, job.id)
        rv = authenticated_client.get(result_url)
        assert rv.status_code == 200
        result = json.loads(rv.data.decode('utf-8'))
        assert list(result.keys()) == ["error"]

        job.state = "done"
        db.session.add(job)
        db.session.commit()
        result_url = '/pipeline/%s/job/%s?result' % (job.pipeline_id, job.id)
        rv = authenticated_client.get(result_url)
        assert rv.status_code == 200
        expected_file = "results.txt"
        zp = zipfile.ZipFile(BytesIO(rv.data))
        il = zp.infolist()
        assert len(il) == 1
        for zf in il:
            assert zf.filename in expected_file

        job.results_path = os.path.join(job.results_path, "does_not_exist")
        db.session.add(job)
        db.session.commit()
        rv = authenticated_client.get(result_url)
        data = json.loads(rv.data.decode('utf-8'))
        assert data.get("error", None)

    rv = authenticated_client.get("/pipeline/%s/job/%s?result" % (0, 999))
    data = json.loads(rv.data.decode('utf-8'))
    assert data.get('error', None)
    assert data['error'] == "Job not found"
예제 #5
0
def test_update_pipeline(preloaded_client):
    with app.app_context():
        u = User.query.filter_by(login='******').first()
        pipeline = Pipeline("my first pipeline", u)
        image = Image.query.all()[0]
        job1 = Job(pipeline, image, "", "")
        job2 = Job(pipeline, image, "", "")
        job3 = Job(pipeline, image, "", "")
        job1_id = job1.id
        job2_id = job2.id
        job3_id = job3.id
        db.session.add(pipeline)
        db.session.add(job1)
        db.session.add(job2)
        db.session.add(job3)
        db.session.commit()
        pipeline_id = pipeline.id

        assert job1.sequence_number == 0
        assert job2.sequence_number == 1
        assert job3.sequence_number == 2

        rv = preloaded_client.put('/pipeline/999')
        data = json.loads(rv.data.decode('utf-8'))
        assert data.get('error')

        # arrange_jobs
        arranged_jobs = [str(job2.id), str(job3.id), str(job1.id)]
        rv = preloaded_client.put('/pipeline/%s' % pipeline_id,
                                  data={
                                      'name': 'my edited pipeline',
                                      'rearrange_jobs': ",".join(arranged_jobs)
                                  })

        pipeline = Pipeline.query.filter_by(id=pipeline_id).first()
        job1 = Job.query.filter_by(id=job1_id).first()
        job2 = Job.query.filter_by(id=job2_id).first()
        job3 = Job.query.filter_by(id=job3_id).first()
        assert pipeline.name == 'my edited pipeline'
        assert len(pipeline.jobs.all()) == 3
        assert job1.sequence_number == 2
        assert job2.sequence_number == 0
        assert job3.sequence_number == 1
        data = json.loads(rv.data.decode('utf-8'))
        assert data.get('rearrange_jobs')
        assert data['rearrange_jobs'] == "Successfully removed jobs"

        # arrange jobs error
        wrong_jobs = [str(job2.id), str(job3.id)]
        rv = preloaded_client.put('/pipeline/%s' % pipeline_id,
                                  data={
                                      'name': 'my edited pipeline',
                                      'rearrange_jobs': ",".join(wrong_jobs)
                                  })
        data = json.loads(rv.data.decode('utf-8'))
        assert data.get('rearrange_jobs')
        error = ("Could not rearrange jobs. rearrange ids are [%s] while" %
                 ", ".join(wrong_jobs))
        assert error in data['rearrange_jobs']

        # remove jobs
        job1 = Job.query.filter_by(id=job1_id).first()
        job3 = Job.query.filter_by(id=job3_id).first()
        rv = preloaded_client.put('/pipeline/%s' % pipeline_id,
                                  data={
                                      'name':
                                      'my edited pipeline',
                                      'remove_jobs':
                                      ",".join([str(job1.id),
                                                str(job3.id)])
                                  })
        pipeline = Pipeline.query.filter_by(id=pipeline_id).first()
        assert len(pipeline.jobs.all()) == 1
        assert pipeline.jobs.all()[0].id == job2_id