コード例 #1
0
def test_imageset_annotationset_download(
    client, two_challenge_sets, phase, kind
):
    """Only participants of a challenge should be able to download imageset images."""

    imageset = two_challenge_sets.challenge_set_1.challenge.imageset_set.get(
        phase=phase
    )
    image_file = ImageFileFactory()
    imageset.images.add(image_file.image)

    annotationset = AnnotationSetFactory(base=imageset, kind=kind)
    annotation_file = ImageFileFactory()
    annotationset.images.add(annotation_file.image)

    tests = [
        # (
        #   image response + annotation response not test ground truth,
        #   annotation response - testing gt,
        #   user
        # )
        (403, 403, None),
        (403, 403, UserFactory()),
        (403, 403, UserFactory(is_staff=True)),
        (403, 403, two_challenge_sets.challenge_set_1.non_participant),
        (302, 403, two_challenge_sets.challenge_set_1.participant),
        (302, 403, two_challenge_sets.challenge_set_1.participant1),
        (302, 302, two_challenge_sets.challenge_set_1.creator),
        (302, 302, two_challenge_sets.challenge_set_1.admin),
        (403, 403, two_challenge_sets.challenge_set_2.non_participant),
        (403, 403, two_challenge_sets.challenge_set_2.participant),
        (403, 403, two_challenge_sets.challenge_set_2.participant1),
        (403, 403, two_challenge_sets.challenge_set_2.creator),
        (403, 403, two_challenge_sets.challenge_set_2.admin),
        (302, 302, two_challenge_sets.admin12),
        (302, 403, two_challenge_sets.participant12),
        (302, 302, two_challenge_sets.admin1participant2),
    ]

    for test in tests:

        response = get_view_for_user(
            url=image_file.file.url, client=client, user=test[2]
        )
        assert response.status_code == test[0]

        response = get_view_for_user(
            url=annotation_file.file.url, client=client, user=test[2]
        )
        if phase == ImageSet.TESTING and kind == AnnotationSet.GROUNDTRUTH:
            # testing ground truth
            assert response.status_code == test[1]
        else:
            # training ground truth, training predictions and
            # ground truth predictions
            assert response.status_code == test[0]
コード例 #2
0
def test_algorithm_with_invalid_output(client, algorithm_image, settings):
    # Override the celery settings
    settings.task_eager_propagates = (True, )
    settings.task_always_eager = (True, )

    assert Job.objects.count() == 0

    # Create the algorithm image
    algorithm_container, sha256 = algorithm_image
    alg = AlgorithmImageFactory(image__from_path=algorithm_container,
                                image_sha256=sha256,
                                ready=True)

    # Make sure the job fails when trying to upload an invalid file
    detection_interface = ComponentInterfaceFactory(
        store_in_database=False,
        relative_path="some_text.txt",
        slug="detection-json-file",
        kind=ComponentInterface.Kind.JSON,
    )
    alg.algorithm.outputs.add(detection_interface)
    alg.save()
    image_file = ImageFileFactory(file__from_path=Path(__file__).parent /
                                  "resources" / "input_file.tif")

    with capture_on_commit_callbacks(execute=True):
        execute_jobs(algorithm_image=alg, images=[image_file.image])

    jobs = Job.objects.filter(algorithm_image=alg,
                              inputs__image=image_file.image,
                              status=Job.FAILURE).all()
    assert len(jobs) == 1
    assert jobs.first().error_message == "Invalid filetype."
    assert len(jobs[0].outputs.all()) == 2
コード例 #3
0
    def test_file_too_large_throws_error(self, tmpdir):
        image = ImageFactoryWithImageFile()

        # Remove zraw file
        old_raw = image.files.get(file__endswith=".zraw")
        raw_file_name = Path(old_raw.file.name).name
        old_raw.delete()

        # Create fake too large zraw file
        too_large_file_raw = tmpdir.join(raw_file_name)
        f = too_large_file_raw.open(mode="wb")
        f.seek(settings.MAX_SITK_FILE_SIZE)
        f.write(b"\0")
        f.close()

        # Add too large file as ImageFile model to image.files
        too_large_file_field = factory.django.FileField(
            from_path=str(too_large_file_raw))
        too_large_imagefile = ImageFileFactory(file=too_large_file_field)
        image.files.add(too_large_imagefile)

        # Try to open and catch expected exception
        try:
            image.get_sitk_image()
            pytest.fail("No File exceeds maximum exception")
        except IOError as e:
            assert "File exceeds maximum file size." in str(e)
コード例 #4
0
def test_image_response(client):
    image_file = ImageFileFactory()

    response = get_view_for_user(url=image_file.file.url,
                                 client=client,
                                 user=None)

    # Forbidden view
    assert response.status_code == 404
    assert not response.has_header("x-accel-redirect")

    staff_user = UserFactory(is_staff=True)

    response = get_view_for_user(url=image_file.file.url,
                                 client=client,
                                 user=staff_user)

    assert response.status_code == 200
    assert response.has_header("x-accel-redirect")

    redirect = response.get("x-accel-redirect")

    assert redirect.startswith(
        f"/{settings.PROTECTED_S3_STORAGE_KWARGS['bucket_name']}/")
    assert "AWSAccessKeyId" in redirect
    assert "Signature" in redirect
    assert "Expires" in redirect
コード例 #5
0
def test_image_response(client, settings, cloudfront, tmpdir):
    settings.CLOUDFRONT_PRIVATE_KEY_BASE64 = (
        "LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlDWFFJQkFBS0JnUURBN2tp"
        "OWdJL2xSeWdJb09qVjF5eW1neDZGWUZsekorejFBVE1hTG81N25MNTdBYXZXCmhiNjhI"
        "WVk4RUEwR0pVOXhRZE1WYUhCb2dGM2VpQ1dZWFNVWkNXTS8rTTUrWmNkUXJhUlJTY3Vj"
        "bW42ZzRFdlkKMks0VzJweGJxSDh2bVVpa1B4aXI0MUVlQlBMak1Pekt2Ynp6UXk5ZS96"
        "eklRVlJFS1NwLzd5MW15d0lEQVFBQgpBb0dBQmM3bXA3WFlIeW51UFp4Q2hqV05KWklx"
        "K0E3M2dtMEFTRHY2QXQ3RjhWaTlyMHhVbFFlL3YwQVFTM3ljCk44UWx5UjRYTWJ6TUxZ"
        "azN5anhGRFhvNFpLUXRPR3pMR3RlQ1Uyc3JBTmlMdjI2L2ltWEE4RlZpZFpmdFRBdEwK"
        "dmlXUVpCVlBUZVlJQTY5QVRVWVBFcTBhNXU1d2pHeVVPaWo5T1d5dXkwMW1iUGtDUVFE"
        "bHVZb05wUE9la1EwWgpXclBnSjVyeGM4ZjZ6RzM3WlZvREJpZXhxdFZTaElGNVczeFl1"
        "V2hXNWtZYjBobGlZZmtxMTVjUzd0OW05NWgzCjFRSmYveEkvQWtFQTF2OWwvV04xYTFO"
        "M3JPSzRWR29Db2t4N2tSMlN5VE1TYlpnRjlJV0pOT3VnUi9XWnc3SFQKbmppcE8zYzlk"
        "eTFNczlwVUt3VUY0NmQ3MDQ5Y2s4SHdkUUpBUmdyU0t1TFdYTXlCSCsvbDFEeC9JNHRY"
        "dUFKSQpybFB5bytWbWlPYzdiNU56SHB0a1NIRVBmUjlzMU9LMFZxamtuY2xxQ0ozSWc4"
        "Nk9NRXRFRkJ6alpRSkJBS1l6CjQ3MGhjUGthR2s3dEtZQWdQNDhGdnhSc256ZW9vcHRV"
        "Ulc1RStNK1BRMlc5aURQUE9YOTczOStYaTAyaEdFV0YKQjBJR2JRb1RSRmRFNFZWY1BL"
        "MENRUUNlUzg0bE9EbEMwWTJCWnYySnhXM09zdi9Xa1VRNGRzbGZBUWwxVDMwMwo3dXd3"
        "cjdYVHJvTXY4ZElGUUlQcmVvUGhSS21kL1NiSnpiaUtmUy80UURoVQotLS0tLUVORCBS"
        "U0EgUFJJVkFURSBLRVktLS0tLQo=")
    settings.CLOUDFRONT_KEY_PAIR_ID = "PK123456789754"
    settings.PROTECTED_S3_STORAGE_USE_CLOUDFRONT = cloudfront

    image_file = ImageFileFactory()
    user = UserFactory()

    response = get_view_for_user(url=image_file.file.url,
                                 client=client,
                                 user=user)

    # Forbidden view
    assert response.status_code == 403
    assert not response.has_header("x-accel-redirect")

    assign_perm("view_image", user, image_file.image)

    response = get_view_for_user(url=image_file.file.url,
                                 client=client,
                                 user=user)

    assert response.status_code == 302
    assert not response.has_header("x-accel-redirect")

    redirect = response.url

    if cloudfront:
        assert redirect.startswith(
            f"https://{settings.PROTECTED_S3_STORAGE_CLOUDFRONT_DOMAIN}/")

        assert "AWSAccessKeyId" not in redirect
        assert "Signature" in redirect
        assert "Expires" in redirect
    else:
        assert redirect.startswith(
            f"{settings.AWS_S3_ENDPOINT_URL}/"
            f"{settings.PROTECTED_S3_STORAGE_KWARGS['bucket_name']}/")

        assert "AWSAccessKeyId" in redirect
        assert "Signature" in redirect
        assert "Expires" in redirect
コード例 #6
0
def test_input_prefixes(tmp_path, settings):
    interfaces = [
        ComponentInterfaceFactory(
            kind=InterfaceKindChoices.BOOL, relative_path="test/bool.json"
        ),
        ComponentInterfaceFactory(
            kind=InterfaceKindChoices.IMAGE, relative_path="images/test-image"
        ),
        ComponentInterfaceFactory(
            kind=InterfaceKindChoices.CSV, relative_path="test.csv"
        ),
    ]
    civs = [
        ComponentInterfaceValueFactory(interface=interfaces[0], value=True),
        ComponentInterfaceValueFactory(
            interface=interfaces[1],
            image=ImageFileFactory(
                file__from_path=Path(__file__).parent.parent
                / "algorithms_tests"
                / "resources"
                / "input_file.tif"
            ).image,
        ),
        ComponentInterfaceValueFactory(interface=interfaces[2]),
    ]
    settings.COMPONENTS_AMAZON_ECS_NFS_MOUNT_POINT = tmp_path

    executor = AmazonECSExecutorStub(
        job_id="algorithms-job-00000000-0000-0000-0000-000000000000",
        exec_image_sha256="",
        exec_image_repo_tag="",
        memory_limit=4,
        time_limit=60,
        requires_gpu=False,
    )
    executor.provision(
        input_civs=civs,
        input_prefixes={
            str(civs[0].pk): "first/output/",
            str(civs[1].pk): "second/output",
        },
    )

    assert {str(f.relative_to(tmp_path)) for f in tmp_path.glob("**/*")} == {
        "algorithms",
        "algorithms/job",
        "algorithms/job/00000000-0000-0000-0000-000000000000",
        "algorithms/job/00000000-0000-0000-0000-000000000000/input",
        "algorithms/job/00000000-0000-0000-0000-000000000000/input/test.csv",
        "algorithms/job/00000000-0000-0000-0000-000000000000/input/first",
        "algorithms/job/00000000-0000-0000-0000-000000000000/input/first/output",
        "algorithms/job/00000000-0000-0000-0000-000000000000/input/first/output/test",
        "algorithms/job/00000000-0000-0000-0000-000000000000/input/first/output/test/bool.json",
        "algorithms/job/00000000-0000-0000-0000-000000000000/input/second",
        "algorithms/job/00000000-0000-0000-0000-000000000000/input/second/output",
        "algorithms/job/00000000-0000-0000-0000-000000000000/input/second/output/images",
        "algorithms/job/00000000-0000-0000-0000-000000000000/input/second/output/images/test-image",
        "algorithms/job/00000000-0000-0000-0000-000000000000/input/second/output/images/test-image/input_file.tif",
        "algorithms/job/00000000-0000-0000-0000-000000000000/output",
    }
コード例 #7
0
    def test_permission_required_views(self, client):
        rius = RawImageUploadSessionFactory()
        image_file_dzi = ImageFileFactory(image_type="DZI")
        image_file_mh = ImageFactoryWithImageFile(
            color_space=Image.COLOR_SPACE_GRAY)
        u = UserFactory()

        for view_name, kwargs, permission, obj in [
            (
                "raw-image-upload-session-detail",
                {
                    "pk": rius.pk
                },
                "view_rawimageuploadsession",
                rius,
            ),
            (
                "osd-image-detail",
                {
                    "pk": image_file_dzi.image.pk
                },
                "view_image",
                image_file_dzi.image,
            ),
            (
                "vtk-image-detail",
                {
                    "pk": image_file_mh.pk
                },
                "view_image",
                image_file_mh,
            ),
        ]:
            response = get_view_for_user(
                client=client,
                viewname=f"cases:{view_name}",
                reverse_kwargs=kwargs,
                user=u,
            )

            assert response.status_code == 403

            assign_perm(permission, u, obj)

            response = get_view_for_user(
                client=client,
                viewname=f"cases:{view_name}",
                reverse_kwargs=kwargs,
                user=u,
            )

            assert response.status_code == 200

            remove_perm(permission, u, obj)
コード例 #8
0
def test_algorithm(client, algorithm_image, settings):
    # Override the celery settings
    settings.task_eager_propagates = (True, )
    settings.task_always_eager = (True, )

    assert Job.objects.count() == 0

    # Create the algorithm image
    algorithm_container, sha256 = algorithm_image
    alg = AlgorithmImageFactory(
        image__from_path=algorithm_container,
        image_sha256=sha256,
        ready=True,
    )

    # We should not be able to download image
    with pytest.raises(NotImplementedError):
        _ = alg.image.url

    # Run the algorithm, it will create a results.json and an output.tif
    image_file = ImageFileFactory(file__from_path=Path(__file__).parent /
                                  "resources" / "input_file.tif", )
    execute_jobs(algorithm_image=alg, images=[image_file.image])
    jobs = Job.objects.filter(algorithm_image=alg).all()

    # There should be a single, successful job
    assert len(jobs) == 1
    assert jobs[0].stdout.endswith("Greetings from stdout\n")
    assert jobs[0].stderr.endswith('("Hello from stderr")\n')
    assert jobs[0].error_message == ""
    assert jobs[0].status == jobs[0].SUCCESS

    # The job should have two ComponentInterfaceValues,
    # one for the results.json and one for output.tif
    assert len(jobs[0].outputs.all()) == 2
    json_result_interface = ComponentInterface.objects.get(
        slug="results-json-file")
    json_result_civ = jobs[0].outputs.get(interface=json_result_interface)
    assert json_result_civ.value == {
        "entity": "out.tif",
        "metrics": {
            "abnormal": 0.19,
            "normal": 0.81
        },
    }

    heatmap_interface = ComponentInterface.objects.get(slug="generic-overlay")
    heatmap_civ = jobs[0].outputs.get(interface=heatmap_interface)

    assert heatmap_civ.image.name == "output.tif"
コード例 #9
0
def test_image_file_cleanup():
    filename = f"{uuid.uuid4()}.zraw"

    i = ImageFactory()
    f = ImageFileFactory(image=i)
    f.file.save(filename, File(get_temporary_image()))

    storage = f.file.storage
    filepath = f.file.name

    assert storage.exists(name=filepath)

    i.delete()

    assert not storage.exists(name=filepath)
コード例 #10
0
def test_image_file_migration():
    filename = f"{uuid.uuid4()}.zraw"

    i = ImageFactory()
    f = ImageFileFactory(image=i)
    f.file.save(filename, File(get_temporary_image()))

    old_name = image_file_path(f, filename)
    new_name = original_image_file_path(f, filename)

    storage = f.file.storage
    old_file_size = f.file.file.size

    assert old_name != new_name
    assert f.file.name == old_name
    assert storage.exists(old_name)
    assert not storage.exists(new_name)

    storage.copy(from_name=old_name, to_name=new_name)
    f.file.name = new_name
    f.save()
    storage.delete(old_name)

    assert not storage.exists(old_name)
    assert storage.exists(new_name)
    f.refresh_from_db()
    assert f.file.name == new_name
    assert f.file.file.size == old_file_size

    for _ in range(2):
        call_command("migrate_images")

        assert storage.exists(old_name)
        assert not storage.exists(new_name)
        f.refresh_from_db()
        assert f.file.name == old_name
        assert f.file.file.size == old_file_size
コード例 #11
0
def test_algorithm_with_invalid_output(client, algorithm_image, settings):
    # Override the celery settings
    settings.task_eager_propagates = (True, )
    settings.task_always_eager = (True, )

    assert Job.objects.count() == 0

    # Create the algorithm image
    algorithm_container, sha256 = algorithm_image
    alg = AlgorithmImageFactory(image__from_path=algorithm_container,
                                image_sha256=sha256,
                                ready=True)

    # Make sure the job fails when trying to upload an invalid file
    detection_interface = ComponentInterfaceFactory(
        store_in_database=False,
        relative_path="some_text.txt",
        slug="detection-json-file",
        kind=ComponentInterface.Kind.ANY,
    )
    alg.algorithm.outputs.add(detection_interface)
    alg.save()
    image_file = ImageFileFactory(file__from_path=Path(__file__).parent /
                                  "resources" / "input_file.tif")
    civ = ComponentInterfaceValueFactory(image=image_file.image,
                                         interface=alg.algorithm.inputs.get(),
                                         file=None)

    with capture_on_commit_callbacks() as callbacks:
        create_algorithm_jobs(algorithm_image=alg, civ_sets=[{civ}])
    recurse_callbacks(callbacks=callbacks)

    jobs = Job.objects.filter(algorithm_image=alg,
                              inputs__image=image_file.image,
                              status=Job.FAILURE).all()
    assert len(jobs) == 1
    assert (jobs.first().error_message ==
            "The file produced at /output/some_text.txt is not valid json")
    assert len(jobs[0].outputs.all()) == 0
コード例 #12
0
def test_image_response(client, settings, cloudfront, tmpdir):
    pem = tmpdir.join("cf.pem")
    pem.write(
        dedent("""
            -----BEGIN RSA PRIVATE KEY-----
            MIICXQIBAAKBgQDA7ki9gI/lRygIoOjV1yymgx6FYFlzJ+z1ATMaLo57nL57AavW
            hb68HYY8EA0GJU9xQdMVaHBogF3eiCWYXSUZCWM/+M5+ZcdQraRRScucmn6g4EvY
            2K4W2pxbqH8vmUikPxir41EeBPLjMOzKvbzzQy9e/zzIQVREKSp/7y1mywIDAQAB
            AoGABc7mp7XYHynuPZxChjWNJZIq+A73gm0ASDv6At7F8Vi9r0xUlQe/v0AQS3yc
            N8QlyR4XMbzMLYk3yjxFDXo4ZKQtOGzLGteCU2srANiLv26/imXA8FVidZftTAtL
            viWQZBVPTeYIA69ATUYPEq0a5u5wjGyUOij9OWyuy01mbPkCQQDluYoNpPOekQ0Z
            WrPgJ5rxc8f6zG37ZVoDBiexqtVShIF5W3xYuWhW5kYb0hliYfkq15cS7t9m95h3
            1QJf/xI/AkEA1v9l/WN1a1N3rOK4VGoCokx7kR2SyTMSbZgF9IWJNOugR/WZw7HT
            njipO3c9dy1Ms9pUKwUF46d7049ck8HwdQJARgrSKuLWXMyBH+/l1Dx/I4tXuAJI
            rlPyo+VmiOc7b5NzHptkSHEPfR9s1OK0VqjknclqCJ3Ig86OMEtEFBzjZQJBAKYz
            470hcPkaGk7tKYAgP48FvxRsnzeooptURW5E+M+PQ2W9iDPPOX9739+Xi02hGEWF
            B0IGbQoTRFdE4VVcPK0CQQCeS84lODlC0Y2BZv2JxW3Osv/WkUQ4dslfAQl1T303
            7uwwr7XTroMv8dIFQIPreoPhRKmd/SbJzbiKfS/4QDhU
            -----END RSA PRIVATE KEY-----
            """))

    settings.CLOUDFRONT_PRIVATE_KEY_PATH = pem.strpath
    settings.CLOUDFRONT_KEY_PAIR_ID = "PK123456789754"
    settings.PROTECTED_S3_STORAGE_USE_CLOUDFRONT = cloudfront

    image_file = ImageFileFactory()
    user = UserFactory()

    response = get_view_for_user(url=image_file.file.url,
                                 client=client,
                                 user=user)

    # Forbidden view
    assert response.status_code == 403
    assert not response.has_header("x-accel-redirect")

    assign_perm("view_image", user, image_file.image)

    response = get_view_for_user(url=image_file.file.url,
                                 client=client,
                                 user=user)

    assert response.status_code == 302
    assert not response.has_header("x-accel-redirect")

    redirect = response.url

    if cloudfront:
        assert redirect.startswith(
            f"https://{settings.PROTECTED_S3_STORAGE_CLOUDFRONT_DOMAIN}/")

        assert "AWSAccessKeyId" not in redirect
        assert "Signature" in redirect
        assert "Expires" in redirect
    else:
        assert redirect.startswith(
            f"{settings.PROTECTED_S3_STORAGE_KWARGS['endpoint_url']}/"
            f"{settings.PROTECTED_S3_STORAGE_KWARGS['bucket_name']}/")

        assert "AWSAccessKeyId" in redirect
        assert "Signature" in redirect
        assert "Expires" in redirect
コード例 #13
0
def test_imageset_annotationset_download(client, TwoChallengeSets, phase,
                                         kind):
    """
    Only participants of a challenge should be able to download imageset images
    """

    imageset = TwoChallengeSets.ChallengeSet1.challenge.imageset_set.get(
        phase=phase)
    image_file = ImageFileFactory()
    imageset.images.add(image_file.image)

    annotationset = AnnotationSetFactory(base=imageset, kind=kind)
    annotation_file = ImageFileFactory()
    annotationset.images.add(annotation_file.image)

    staff_user = UserFactory(is_staff=True)
    staff_token = Token.objects.create(user=staff_user)

    tests = [
        # (
        #   image response + annotation response not test ground truth,
        #   annotation response - testing gt,
        #   user
        # )
        (404, 404, None),
        (200, 200, staff_user),
        (404, 404, TwoChallengeSets.ChallengeSet1.non_participant),
        (200, 404, TwoChallengeSets.ChallengeSet1.participant),
        (200, 404, TwoChallengeSets.ChallengeSet1.participant1),
        (200, 200, TwoChallengeSets.ChallengeSet1.creator),
        (200, 200, TwoChallengeSets.ChallengeSet1.admin),
        (404, 404, TwoChallengeSets.ChallengeSet2.non_participant),
        (404, 404, TwoChallengeSets.ChallengeSet2.participant),
        (404, 404, TwoChallengeSets.ChallengeSet2.participant1),
        (404, 404, TwoChallengeSets.ChallengeSet2.creator),
        (404, 404, TwoChallengeSets.ChallengeSet2.admin),
        (200, 200, TwoChallengeSets.admin12),
        (200, 404, TwoChallengeSets.participant12),
        (200, 200, TwoChallengeSets.admin1participant2),
    ]

    for test in tests:

        response = get_view_for_user(url=image_file.file.url,
                                     client=client,
                                     user=test[2])
        assert response.status_code == test[0]

        response = get_view_for_user(url=annotation_file.file.url,
                                     client=client,
                                     user=test[2])
        if phase == ImageSet.TESTING and kind == AnnotationSet.GROUNDTRUTH:
            # testing ground truth
            assert response.status_code == test[1]
        else:
            # training ground truth, training predictions and
            # ground truth predictions
            assert response.status_code == test[0]

    # Someone with a staff token should be able to get all images
    response = client.get(image_file.file.url,
                          HTTP_AUTHORIZATION=f"Token {staff_token.key}")
    assert response.status_code == 200

    response = client.get(annotation_file.file.url,
                          HTTP_AUTHORIZATION=f"Token {staff_token.key}")
    assert response.status_code == 200
コード例 #14
0
def test_algorithm_multiple_inputs(client, algorithm_io_image, settings,
                                   component_interfaces):
    # Override the celery settings
    settings.task_eager_propagates = (True, )
    settings.task_always_eager = (True, )

    creator = UserFactory()

    assert Job.objects.count() == 0

    # Create the algorithm image
    algorithm_container, sha256 = algorithm_io_image
    alg = AlgorithmImageFactory(image__from_path=algorithm_container,
                                image_sha256=sha256,
                                ready=True)
    alg.algorithm.add_editor(creator)

    alg.algorithm.inputs.set(ComponentInterface.objects.all())
    # create the job
    job = Job.objects.create(creator=creator, algorithm_image=alg)

    expected = []
    for ci in ComponentInterface.objects.all():
        if ci.kind in InterfaceKind.interface_type_image():
            image_file = ImageFileFactory(
                file__from_path=Path(__file__).parent / "resources" /
                "input_file.tif")
            job.inputs.add(
                ComponentInterfaceValueFactory(interface=ci,
                                               image=image_file.image,
                                               file=None))
            expected.append("file")
        elif ci.kind in InterfaceKind.interface_type_file():
            job.inputs.add(
                ComponentInterfaceValueFactory(
                    interface=ci,
                    file__from_path=Path(__file__).parent / "resources" /
                    "test.json",
                ))
            expected.append("json")
        else:
            job.inputs.add(
                ComponentInterfaceValueFactory(interface=ci,
                                               value="test",
                                               file=None))
            expected.append("test")

    # Nested on_commits created by these tasks
    with capture_on_commit_callbacks(execute=True):
        with capture_on_commit_callbacks(execute=True):
            run_algorithm_job_for_inputs(job_pk=job.pk, upload_pks=[])

    job = Job.objects.get()
    assert job.status == job.SUCCESS
    assert {x[0]
            for x in job.input_files} == set(job.outputs.first().value.keys())
    assert sorted(
        map(
            lambda x: x if x != {} else "json",
            job.outputs.first().value.values(),
        )) == sorted(expected)
コード例 #15
0
def test_algorithm(client, algorithm_image, settings):
    # Override the celery settings
    settings.task_eager_propagates = (True, )
    settings.task_always_eager = (True, )

    assert Job.objects.count() == 0

    # Create the algorithm image
    algorithm_container, sha256 = algorithm_image
    alg = AlgorithmImageFactory(image__from_path=algorithm_container,
                                image_sha256=sha256,
                                ready=True)

    # We should not be able to download image
    with pytest.raises(NotImplementedError):
        _ = alg.image.url

    # Run the algorithm, it will create a results.json and an output.tif
    image_file = ImageFileFactory(file__from_path=Path(__file__).parent /
                                  "resources" / "input_file.tif")

    with capture_on_commit_callbacks(execute=True):
        execute_jobs(algorithm_image=alg, images=[image_file.image])

    jobs = Job.objects.filter(algorithm_image=alg).all()

    # There should be a single, successful job
    assert len(jobs) == 1

    assert jobs[0].stdout.endswith("Greetings from stdout\n")
    assert jobs[0].stderr.endswith('("Hello from stderr")\n')
    assert jobs[0].error_message == ""
    assert jobs[0].status == jobs[0].SUCCESS

    # The job should have two ComponentInterfaceValues,
    # one for the results.json and one for output.tif
    assert len(jobs[0].outputs.all()) == 2
    json_result_interface = ComponentInterface.objects.get(
        slug="results-json-file")
    json_result_civ = jobs[0].outputs.get(interface=json_result_interface)
    assert json_result_civ.value == {
        "entity": "out.tif",
        "metrics": {
            "abnormal": 0.19,
            "normal": 0.81
        },
    }

    heatmap_interface = ComponentInterface.objects.get(slug="generic-overlay")
    heatmap_civ = jobs[0].outputs.get(interface=heatmap_interface)

    assert heatmap_civ.image.name == "output.tif"

    # We add another ComponentInterface with file value and run the algorithm again
    detection_interface = ComponentInterfaceFactory(
        store_in_database=False,
        relative_path="detection_results.json",
        title="detection-json-file",
        slug="detection-json-file",
        kind=ComponentInterface.Kind.JSON,
    )
    alg.algorithm.outputs.add(detection_interface)
    alg.save()
    image_file = ImageFileFactory(file__from_path=Path(__file__).parent /
                                  "resources" / "input_file.tif")

    with capture_on_commit_callbacks(execute=True):
        execute_jobs(algorithm_image=alg, images=[image_file.image])

    jobs = Job.objects.filter(algorithm_image=alg,
                              inputs__image=image_file.image).all()
    # There should be a single, successful job
    assert len(jobs) == 1

    # The job should have three ComponentInterfaceValues,
    # one with the detection_results store in the file
    assert len(jobs[0].outputs.all()) == 3
    detection_civ = jobs[0].outputs.get(interface=detection_interface)
    assert not detection_civ.value
    assert re.search("detection_results.*json$", detection_civ.file.name)
コード例 #16
0
def test_provision(tmp_path, settings):
    interfaces = [
        ComponentInterfaceFactory(
            kind=InterfaceKindChoices.BOOL, relative_path="test/bool.json"
        ),
        ComponentInterfaceFactory(
            kind=InterfaceKindChoices.IMAGE, relative_path="images/test-image"
        ),
        ComponentInterfaceFactory(
            kind=InterfaceKindChoices.CSV, relative_path="test.csv"
        ),
    ]
    civs = [
        ComponentInterfaceValueFactory(interface=interfaces[0], value=True),
        ComponentInterfaceValueFactory(
            interface=interfaces[1],
            image=ImageFileFactory(
                file__from_path=Path(__file__).parent.parent
                / "algorithms_tests"
                / "resources"
                / "input_file.tif"
            ).image,
        ),
        ComponentInterfaceValueFactory(interface=interfaces[2]),
    ]
    civs[2].file.save("whatever.csv", ContentFile(b"foo,\nbar,\n"))

    settings.COMPONENTS_AMAZON_ECS_NFS_MOUNT_POINT = tmp_path

    executor = AmazonECSExecutorStub(
        job_id="algorithms-job-00000000-0000-0000-0000-000000000000",
        exec_image_sha256="",
        exec_image_repo_tag="",
        memory_limit=4,
        time_limit=60,
        requires_gpu=False,
    )

    executor.provision(input_civs=civs, input_prefixes={})
    executor.execute()
    executor.handle_event(
        event={
            # Minimal successful event
            "taskDefinitionArn": "arn:aws:ecs:region:123456789012:task-definition/algorithms-job-00000000-0000-0000-0000-000000000000:1",
            "group": "components-gpu",
            "stopCode": "EssentialContainerExited",
            "containers": [
                {
                    "exitCode": 143,
                    "name": "algorithms-job-00000000-0000-0000-0000-000000000000-timeout",
                },
                {
                    "exitCode": 0,
                    "name": "algorithms-job-00000000-0000-0000-0000-000000000000",
                },
            ],
            "startedAt": "2021-09-25T10:50:24.248Z",
            "stoppedAt": "2021-09-25T11:02:30.776Z",
        }
    )

    assert executor.duration == datetime.timedelta(
        seconds=726, microseconds=528000
    )

    assert {str(f.relative_to(tmp_path)) for f in tmp_path.glob("**/*")} == {
        "algorithms",
        "algorithms/job",
        "algorithms/job/00000000-0000-0000-0000-000000000000",
        "algorithms/job/00000000-0000-0000-0000-000000000000/input",
        "algorithms/job/00000000-0000-0000-0000-000000000000/input/test.csv",
        "algorithms/job/00000000-0000-0000-0000-000000000000/input/test",
        "algorithms/job/00000000-0000-0000-0000-000000000000/input/test/bool.json",
        "algorithms/job/00000000-0000-0000-0000-000000000000/input/images",
        "algorithms/job/00000000-0000-0000-0000-000000000000/input/images/test-image",
        "algorithms/job/00000000-0000-0000-0000-000000000000/input/images/test-image/input_file.tif",
        "algorithms/job/00000000-0000-0000-0000-000000000000/output",
        "algorithms/job/00000000-0000-0000-0000-000000000000/output/metrics.json",
        "algorithms/job/00000000-0000-0000-0000-000000000000/output/results.json",
        "algorithms/job/00000000-0000-0000-0000-000000000000/output/test.csv",
        "algorithms/job/00000000-0000-0000-0000-000000000000/output/test",
        "algorithms/job/00000000-0000-0000-0000-000000000000/output/test/bool.json",
        "algorithms/job/00000000-0000-0000-0000-000000000000/output/images",
        "algorithms/job/00000000-0000-0000-0000-000000000000/output/images/test-image",
        "algorithms/job/00000000-0000-0000-0000-000000000000/output/images/test-image/input_file.tif",
    }

    # Exclude the CIV reading as this is unsupported
    outputs = executor.get_outputs(output_interfaces=interfaces)
    assert len(outputs) == 3

    executor.deprovision()

    assert {str(f.relative_to(tmp_path)) for f in tmp_path.glob("**/*")} == {
        "algorithms",
        "algorithms/job",
    }
コード例 #17
0
def test_algorithm_multiple_inputs(client, algorithm_io_image, settings,
                                   component_interfaces):
    # Override the celery settings
    settings.task_eager_propagates = (True, )
    settings.task_always_eager = (True, )

    creator = UserFactory()

    assert Job.objects.count() == 0

    # Create the algorithm image
    algorithm_container, sha256 = algorithm_io_image
    alg = AlgorithmImageFactory(image__from_path=algorithm_container,
                                image_sha256=sha256,
                                ready=True)
    alg.algorithm.add_editor(creator)

    alg.algorithm.inputs.set(ComponentInterface.objects.all())
    alg.algorithm.outputs.set(
        [ComponentInterface.objects.get(slug="results-json-file")])
    # create the job
    job = Job.objects.create(creator=creator, algorithm_image=alg)

    expected = []
    for ci in ComponentInterface.objects.exclude(
            kind=InterfaceKindChoices.ZIP):
        if ci.is_image_kind:
            image_file = ImageFileFactory(
                file__from_path=Path(__file__).parent / "resources" /
                "input_file.tif")
            job.inputs.add(
                ComponentInterfaceValueFactory(interface=ci,
                                               image=image_file.image))
            expected.append("file")
        elif ci.is_file_kind:
            civ = ComponentInterfaceValueFactory(interface=ci)
            civ.file.save("test", File(BytesIO(b"")))
            civ.save()
            job.inputs.add(civ)
            expected.append("file")
        else:
            job.inputs.add(
                ComponentInterfaceValueFactory(interface=ci, value="test"))
            expected.append("test")

    with capture_on_commit_callbacks() as callbacks:
        run_algorithm_job_for_inputs(job_pk=job.pk, upload_pks=[])
    recurse_callbacks(callbacks=callbacks)

    job.refresh_from_db()
    assert job.error_message == ""
    assert job.status == job.SUCCESS

    # Remove fake value for score
    output_dict = job.outputs.first().value
    output_dict.pop("score")

    assert {f"/input/{x.relative_path}"
            for x in job.inputs.all()} == set(output_dict.keys())
    assert sorted(map(lambda x: x if x != {} else "json",
                      output_dict.values())) == sorted(expected)