Beispiel #1
0
    def test_job_permissions_for_archive(self):
        ai = AlgorithmImageFactory(ready=True)
        archive = ArchiveFactory()

        # Fake an image upload via a session
        u = UserFactory()
        s = UploadSessionFactory(creator=u)
        im = ImageFactory()
        s.image_set.set([im])

        archive.images.set([im])
        archive.algorithms.set([ai.algorithm])

        create_algorithm_jobs_for_archive(archive_pks=[archive.pk])

        job = Job.objects.get()

        # The archive editors, users and uploaders, algorithm editors and job
        # viewers should be able to view the job
        assert get_groups_with_set_perms(job) == {
            archive.editors_group: {"view_job"},
            archive.users_group: {"view_job"},
            archive.uploaders_group: {"view_job"},
            ai.algorithm.editors_group: {"view_job"},
            job.viewers: {"view_job"},
        }
        # No-one should be able to change the job
        assert (get_users_with_perms(job,
                                     attach_perms=True,
                                     with_group_users=False) == {})
        # No-one should be in the viewers group
        assert {*job.viewers.user_set.all()} == set()
Beispiel #2
0
    def test_job_permissions_for_challenge(self):
        ai = AlgorithmImageFactory(ready=True)
        archive = ArchiveFactory()
        evaluation = EvaluationFactory(submission__phase__archive=archive,
                                       submission__algorithm_image=ai)

        # Fake an image upload via a session
        u = UserFactory()
        s = UploadSessionFactory(creator=u)
        im = ImageFactory()
        s.image_set.set([im])

        archive.images.set([im])

        create_algorithm_jobs_for_evaluation(evaluation_pk=evaluation.pk)

        job = Job.objects.get()

        # Only the challenge admins and job viewers should be able to view the
        # job. NOTE: NOT THE ALGORITHM EDITORS, they are the participants
        # to the challenge and should not be able to see the test data
        assert get_groups_with_set_perms(job) == {
            evaluation.submission.phase.challenge.admins_group: {"view_job"},
            job.viewers: {"view_job"},
        }
        # No-one should be able to change the job
        assert (get_users_with_perms(job,
                                     attach_perms=True,
                                     with_group_users=False) == {})
        # No-one should be in the viewers group
        assert {*job.viewers.user_set.all()} == set()
Beispiel #3
0
    def test_job_permissions_for_session(self):
        ai = AlgorithmImageFactory(ready=True)

        u = UserFactory()
        s = UploadSessionFactory(creator=u)
        im = ImageFactory()
        s.image_set.set([im])

        create_algorithm_jobs_for_session(upload_session_pk=s.pk,
                                          algorithm_image_pk=ai.pk)

        job = Job.objects.get()

        # Editors and viewers should be able to view the job
        assert get_groups_with_set_perms(job) == {
            ai.algorithm.editors_group: {"view_job"},
            job.viewers: {"view_job"},
        }
        # The Session Creator should be able to change the job
        assert get_users_with_perms(job,
                                    attach_perms=True,
                                    with_group_users=False) == {
                                        u: ["change_job"]
                                    }
        # The only member of the viewers group should be the creator
        assert {*job.viewers.user_set.all()} == {u}
def test_linked_task_called_with_session_pk(settings):
    # Override the celery settings
    settings.task_eager_propagates = (True, )
    settings.task_always_eager = (True, )

    called = {}

    @shared_task
    def local_linked_task(*_, **kwargs):
        called.update(**kwargs)

    session = UploadSessionFactory()

    with capture_on_commit_callbacks(execute=True):
        session.process_images(linked_task=local_linked_task.signature())

    assert called == {"upload_session_pk": session.pk}
Beispiel #5
0
def test_soft_time_limit(_):
    session = UploadSessionFactory()
    session.status = session.REQUEUED
    session.save()
    build_images(upload_session_pk=session.pk)
    session.refresh_from_db()
    assert session.status == session.FAILURE
    assert session.error_message == "Time limit exceeded."
def test_civ_post_image_valid(kind, rf):
    # setup
    user = UserFactory()
    upload = UploadSessionFactory(status=RawImageUploadSession.PENDING,
                                  creator=user)
    interface = ComponentInterfaceFactory(kind=kind)

    civ = {"interface": interface.slug, "upload_session": upload.api_url}

    # test
    request = rf.get("/foo")
    request.user = user
    serializer = ComponentInterfaceValuePostSerializer(
        data=civ, context={"request": request})

    # verify
    assert serializer.is_valid()
Beispiel #7
0
def test_linked_task_called_with_session_pk(settings):
    # Override the celery settings
    settings.task_eager_propagates = (True,)
    settings.task_always_eager = (True,)

    called = {}

    @shared_task
    def local_linked_task(*_, **kwargs):
        called.update(**kwargs)

    session = UploadSessionFactory()

    session.process_images()

    assert called == {}

    session.status = session.REQUEUED
    session.save()

    session.process_images(linked_task=local_linked_task)

    assert called == {"upload_session_pk": str(session.pk)}
def test_civ_post_upload_permission_validation(kind, rf):
    # setup
    user = UserFactory()
    upload = UploadSessionFactory()
    interface = ComponentInterfaceFactory(kind=kind)

    civ = {"interface": interface.slug, "upload_session": upload.api_url}

    # test
    request = rf.get("/foo")
    request.user = user
    serializer = ComponentInterfaceValuePostSerializer(
        data=civ, context={"request": request})

    # verify
    assert not serializer.is_valid()
    assert ("Invalid hyperlink - Object does not exist"
            in serializer.errors["upload_session"][0])
    def test_job_permissions_for_challenge(self):
        ai = AlgorithmImageFactory(ready=True)
        archive = ArchiveFactory()
        evaluation = EvaluationFactory(submission__phase__archive=archive,
                                       submission__algorithm_image=ai)

        # Fake an image upload via a session
        u = UserFactory()
        s = UploadSessionFactory(creator=u)
        im = ImageFactory()
        s.image_set.set([im])

        civ = ComponentInterfaceValueFactory(
            image=im, interface=ai.algorithm.inputs.get())
        archive_item = ArchiveItemFactory(archive=archive)
        with capture_on_commit_callbacks(execute=True):
            archive_item.values.add(civ)

        create_algorithm_jobs_for_evaluation(evaluation_pk=evaluation.pk)

        job = Job.objects.get()

        # Only the challenge admins and job viewers should be able to view the
        # job and logs.
        # NOTE: NOT THE *ALGORITHM* EDITORS, they are the participants
        # to the challenge and should not be able to see the test data
        assert get_groups_with_set_perms(job) == {
            evaluation.submission.phase.challenge.admins_group: {
                "view_job",
                "view_logs",
            },
            job.viewers: {"view_job"},
        }
        # No-one should be able to change the job
        assert (get_users_with_perms(job,
                                     attach_perms=True,
                                     with_group_users=False) == {})
        # No-one should be in the viewers group
        assert {*job.viewers.user_set.all()} == set()
    def test_job_permissions_for_archive(self):
        ai = AlgorithmImageFactory(ready=True)
        archive = ArchiveFactory()

        # Fake an image upload via a session
        u = UserFactory()
        s = UploadSessionFactory(creator=u)
        im = ImageFactory()
        s.image_set.set([im])

        civ = ComponentInterfaceValueFactory(
            image=im, interface=ai.algorithm.inputs.get())
        archive_item = ArchiveItemFactory(archive=archive)
        with capture_on_commit_callbacks(execute=True):
            archive_item.values.add(civ)

        archive.algorithms.set([ai.algorithm])

        create_algorithm_jobs_for_archive(archive_pks=[archive.pk])

        job = Job.objects.get()

        # The archive editors, users and uploaders and job
        # viewers should be able to view the job.
        # NOTE: NOT THE ALGORITHM EDITORS, if they need
        # access the job can be shared with them.
        assert get_groups_with_set_perms(job) == {
            archive.editors_group: {"view_job"},
            archive.users_group: {"view_job"},
            archive.uploaders_group: {"view_job"},
            job.viewers: {"view_job"},
        }
        # No-one should be able to change the job
        assert (get_users_with_perms(job,
                                     attach_perms=True,
                                     with_group_users=False) == {})
        # No-one should be in the viewers group
        assert {*job.viewers.user_set.all()} == set()
Beispiel #11
0
def test_get_metrics():
    AlgorithmJobFactory()
    EvaluationFactory()
    SessionFactory()
    s = UploadSessionFactory()
    s.status = s.REQUEUED
    s.save()

    # Note, this is the format expected by CloudWatch,
    # consult the API when changing this
    result = _get_metrics()

    assert result == [
        {
            "Namespace":
            "testserver/algorithms",
            "MetricData": [
                {
                    "MetricName": "JobsQueued",
                    "Value": 1,
                    "Unit": "Count"
                },
                {
                    "MetricName": "JobsStarted",
                    "Value": 0,
                    "Unit": "Count"
                },
                {
                    "MetricName": "JobsReQueued",
                    "Value": 0,
                    "Unit": "Count"
                },
                {
                    "MetricName": "JobsFailed",
                    "Value": 0,
                    "Unit": "Count"
                },
                {
                    "MetricName": "JobsSucceeded",
                    "Value": 0,
                    "Unit": "Count"
                },
                {
                    "MetricName": "JobsCancelled",
                    "Value": 0,
                    "Unit": "Count"
                },
                {
                    "MetricName": "JobsProvisioning",
                    "Value": 0,
                    "Unit": "Count",
                },
                {
                    "MetricName": "JobsProvisioned",
                    "Value": 0,
                    "Unit": "Count"
                },
                {
                    "MetricName": "JobsExecuting",
                    "Value": 0,
                    "Unit": "Count"
                },
                {
                    "MetricName": "JobsExecuted",
                    "Value": 0,
                    "Unit": "Count"
                },
                {
                    "MetricName": "JobsParsingOutputs",
                    "Value": 0,
                    "Unit": "Count",
                },
                {
                    "MetricName": "JobsExecutingAlgorithm",
                    "Value": 0,
                    "Unit": "Count",
                },
            ],
        },
        {
            "Namespace":
            "testserver/evaluation",
            "MetricData": [
                {
                    "MetricName": "EvaluationsQueued",
                    "Value": 1,
                    "Unit": "Count",
                },
                {
                    "MetricName": "EvaluationsStarted",
                    "Value": 0,
                    "Unit": "Count",
                },
                {
                    "MetricName": "EvaluationsReQueued",
                    "Value": 0,
                    "Unit": "Count",
                },
                {
                    "MetricName": "EvaluationsFailed",
                    "Value": 0,
                    "Unit": "Count",
                },
                {
                    "MetricName": "EvaluationsSucceeded",
                    "Value": 0,
                    "Unit": "Count",
                },
                {
                    "MetricName": "EvaluationsCancelled",
                    "Value": 0,
                    "Unit": "Count",
                },
                {
                    "MetricName": "EvaluationsProvisioning",
                    "Value": 0,
                    "Unit": "Count",
                },
                {
                    "MetricName": "EvaluationsProvisioned",
                    "Value": 0,
                    "Unit": "Count",
                },
                {
                    "MetricName": "EvaluationsExecuting",
                    "Value": 0,
                    "Unit": "Count",
                },
                {
                    "MetricName": "EvaluationsExecuted",
                    "Value": 0,
                    "Unit": "Count",
                },
                {
                    "MetricName": "EvaluationsParsingOutputs",
                    "Value": 0,
                    "Unit": "Count",
                },
                {
                    "MetricName": "EvaluationsExecutingAlgorithm",
                    "Value": 0,
                    "Unit": "Count",
                },
            ],
        },
        {
            "Namespace":
            "testserver/workstations",
            "MetricData": [
                {
                    "MetricName": "SessionsQueued",
                    "Value": 1,
                    "Unit": "Count"
                },
                {
                    "MetricName": "SessionsStarted",
                    "Value": 0,
                    "Unit": "Count"
                },
                {
                    "MetricName": "SessionsRunning",
                    "Value": 0,
                    "Unit": "Count"
                },
                {
                    "MetricName": "SessionsFailed",
                    "Value": 0,
                    "Unit": "Count"
                },
                {
                    "MetricName": "SessionsStopped",
                    "Value": 0,
                    "Unit": "Count"
                },
            ],
        },
        {
            "Namespace":
            "testserver/cases",
            "MetricData": [
                {
                    "MetricName": "RawImageUploadSessionsQueued",
                    "Value": 1,
                    "Unit": "Count",
                },
                {
                    "MetricName": "RawImageUploadSessionsStarted",
                    "Value": 0,
                    "Unit": "Count",
                },
                {
                    "MetricName": "RawImageUploadSessionsReQueued",
                    "Value": 1,
                    "Unit": "Count",
                },
                {
                    "MetricName": "RawImageUploadSessionsFailed",
                    "Value": 0,
                    "Unit": "Count",
                },
                {
                    "MetricName": "RawImageUploadSessionsSucceeded",
                    "Value": 0,
                    "Unit": "Count",
                },
                {
                    "MetricName": "RawImageUploadSessionsCancelled",
                    "Value": 0,
                    "Unit": "Count",
                },
            ],
        },
    ]