def test_job_post_permissions(client, is_staff, expected_response): # Staff users should be able to post a job user = UserFactory(is_staff=is_staff) im = ImageFactory() algo_image = AlgorithmImageFactory() response = get_view_for_user( viewname="api:algorithms-job-list", user=user, client=client, method=client.post, data={ "image": im.api_url, "algorithm_image": algo_image.api_url }, content_type="application/json", ) assert response.status_code == expected_response
def test_upload_sessions_create(client): user = UserFactory() ai = AlgorithmImageFactory() ai.algorithm.add_user(user) response = get_view_for_user( viewname="api:upload-session-list", user=user, client=client, method=client.post, data={"algorithm_image": ai.api_url}, content_type="application/json", ) assert response.status_code == 201 upload_session = RawImageUploadSession.objects.get( pk=response.data.get("pk")) assert upload_session.algorithm_image == ai
def test_unmatched_interface_filter_subset(self): ai = AlgorithmImageFactory() cis = ComponentInterfaceFactory.create_batch(2) ai.algorithm.inputs.set(cis) civ_sets = [{ # Extra interface ComponentInterfaceValueFactory(interface=cis[0]), ComponentInterfaceValueFactory(interface=cis[1]), ComponentInterfaceValueFactory( interface=ComponentInterfaceFactory()), }] filtered_civ_sets = filter_civs_for_algorithm(civ_sets=civ_sets, algorithm_image=ai) assert len(filtered_civ_sets) == 1 assert {civ.interface for civ in filtered_civ_sets[0]} == {*cis}
def test_algorithm_list_view(client): ai1, ai2 = AlgorithmImageFactory(), AlgorithmImageFactory() user = UserFactory(is_staff=True) response = get_view_for_user( viewname="algorithms:image-list", client=client, user=user ) assert ai1.get_absolute_url() in response.rendered_content assert ai2.get_absolute_url() in response.rendered_content ai1.delete() response = get_view_for_user( viewname="algorithms:image-list", client=client, user=user ) assert ai1.get_absolute_url() not in response.rendered_content assert ai2.get_absolute_url() in response.rendered_content
def test_algorithm_with_permission(self): user = UserFactory() alg = AlgorithmFactory() alg.add_editor(user=user) alg.inputs.clear() alg.outputs.clear() ai = AlgorithmImageFactory(ready=True, algorithm=alg) AlgorithmJobFactory(algorithm_image=ai, status=4) p = PhaseFactory(submission_kind=Phase.SubmissionKind.ALGORITHM) form = SubmissionForm( user=user, phase=p, data={"algorithm": alg.pk, "creator": user, "phase": p}, ) assert form.errors == {} assert "algorithm" not in form.errors assert form.is_valid()
def test_algorithm_api_permissions(client): tests = [(UserFactory(), 403), (UserFactory(is_staff=True), 200)] algorithm_image = AlgorithmImageFactory() for test in tests: response = get_view_for_user( client=client, viewname="api:algorithms-image-list", user=test[0], content_type="application/json", ) assert response.status_code == test[1] response = get_view_for_user( client=client, viewname="api:algorithms-image-detail", reverse_kwargs={"pk": algorithm_image.pk}, user=test[0], content_type="application/json", ) assert response.status_code == test[1]
def test_algorithm_detail_flexible_inputs(client): editor = UserFactory() alg = AlgorithmFactory(use_flexible_inputs=False) alg.add_editor(editor) AlgorithmImageFactory(algorithm=alg, ready=True) flexi_input_url = reverse( viewname="algorithms:execution-session-create-new", kwargs={"slug": alg.slug}, ) response = get_view_for_user( viewname="algorithms:detail", reverse_kwargs={"slug": slugify(alg.slug)}, client=client, user=editor, method=client.get, follow=True, **{"HTTP_X_REQUESTED_WITH": "XMLHttpRequest"}, ) assert response.status_code == 200 assert flexi_input_url not in response.rendered_content alg.use_flexible_inputs = True alg.save() response = get_view_for_user( viewname="algorithms:detail", reverse_kwargs={"slug": slugify(alg.slug)}, client=client, user=editor, method=client.get, follow=True, **{"HTTP_X_REQUESTED_WITH": "XMLHttpRequest"}, ) assert response.status_code == 200 assert flexi_input_url in response.rendered_content
def test_upload_copy(): user = UserFactory() upload = UserUpload.objects.create(creator=user, filename="test.tar.gz") presigned_urls = upload.generate_presigned_urls(part_numbers=[1]) response = put(presigned_urls["1"], data=b"123") upload.complete_multipart_upload(parts=[{ "ETag": response.headers["ETag"], "PartNumber": 1 }]) upload.save() ai = AlgorithmImageFactory(creator=user, image=None) assert not ai.image upload.copy_object(to_field=ai.image) assert (ai.image.name == f"docker/images/algorithms/algorithmimage/{ai.pk}/test.tar.gz") assert ai.image.storage.exists(name=ai.image.name) with ai.image.open() as f: assert f.read() == b"123"
def test_job_permissions_for_challenge(self): ai = AlgorithmImageFactory(ready=True) archive = ArchiveFactory() evaluation = EvaluationFactory(submission__phase__archive=archive, submission__algorithm_image=ai) # Fake an image upload via a session u = UserFactory() s = UploadSessionFactory(creator=u) im = ImageFactory() s.image_set.set([im]) civ = ComponentInterfaceValueFactory( image=im, interface=ai.algorithm.inputs.get()) archive_item = ArchiveItemFactory(archive=archive) with capture_on_commit_callbacks(execute=True): archive_item.values.add(civ) create_algorithm_jobs_for_evaluation(evaluation_pk=evaluation.pk) job = Job.objects.get() # Only the challenge admins and job viewers should be able to view the # job and logs. # NOTE: NOT THE *ALGORITHM* EDITORS, they are the participants # to the challenge and should not be able to see the test data assert get_groups_with_set_perms(job) == { evaluation.submission.phase.challenge.admins_group: { "view_job", "view_logs", }, job.viewers: {"view_job"}, } # No-one should be able to change the job assert (get_users_with_perms(job, attach_perms=True, with_group_users=False) == {}) # No-one should be in the viewers group assert {*job.viewers.user_set.all()} == set()
def test_job_permissions_for_archive(self): ai = AlgorithmImageFactory(ready=True) archive = ArchiveFactory() # Fake an image upload via a session u = UserFactory() s = UploadSessionFactory(creator=u) im = ImageFactory() s.image_set.set([im]) civ = ComponentInterfaceValueFactory( image=im, interface=ai.algorithm.inputs.get()) archive_item = ArchiveItemFactory(archive=archive) with capture_on_commit_callbacks(execute=True): archive_item.values.add(civ) archive.algorithms.set([ai.algorithm]) create_algorithm_jobs_for_archive(archive_pks=[archive.pk]) job = Job.objects.get() # The archive editors, users and uploaders and job # viewers should be able to view the job. # NOTE: NOT THE ALGORITHM EDITORS, if they need # access the job can be shared with them. assert get_groups_with_set_perms(job) == { archive.editors_group: {"view_job"}, archive.users_group: {"view_job"}, archive.uploaders_group: {"view_job"}, job.viewers: {"view_job"}, } # No-one should be able to change the job assert (get_users_with_perms(job, attach_perms=True, with_group_users=False) == {}) # No-one should be in the viewers group assert {*job.viewers.user_set.all()} == set()
def test_image_file_create(client): user = UserFactory() ai = AlgorithmImageFactory() ai.algorithm.add_user(user) upload_session = RawImageUploadSessionFactory(creator=user, algorithm_image=ai) response = get_view_for_user( viewname="api:upload-session-file-list", user=user, client=client, method=client.post, data={ "upload_session": upload_session.api_url, "filename": "dummy.bin", }, content_type="application/json", ) assert response.status_code == 201 image_file = RawImageFile.objects.get(pk=response.data.get("pk")) assert image_file.upload_session == upload_session upload_session = RawImageUploadSessionFactory() response = get_view_for_user( viewname="api:upload-session-file-list", user=user, client=client, method=client.post, data={ "upload_session": upload_session.api_url, "filename": "dummy.bin", }, content_type="application/json", ) assert response.status_code == 400
def setUp(self): interface = ComponentInterface.objects.get( slug="generic-medical-image" ) archive = ArchiveFactory() ais = ArchiveItemFactory.create_batch(2) archive.items.set(ais) input_civs = ComponentInterfaceValueFactory.create_batch( 2, interface=interface ) output_civs = ComponentInterfaceValueFactory.create_batch( 2, interface=interface ) for ai, civ in zip(ais, input_civs): ai.values.set([civ]) alg = AlgorithmImageFactory() submission = SubmissionFactory(algorithm_image=alg) submission.phase.archive = archive submission.phase.save() submission.phase.algorithm_inputs.set([interface]) jobs = [] for inpt, output in zip(input_civs, output_civs): j = AlgorithmJobFactory(status=Job.SUCCESS, algorithm_image=alg) j.inputs.set([inpt]) j.outputs.set([output]) jobs.append(j) self.evaluation = EvaluationFactory( submission=submission, status=Evaluation.EXECUTING_PREREQUISITES ) self.jobs = jobs self.output_civs = output_civs
def test_process_images_api_view(client, settings): # Override the celery settings settings.task_eager_propagates = (True,) settings.task_always_eager = (True,) user = UserFactory() us = RawImageUploadSessionFactory(creator=user) algorithm_image = AlgorithmImageFactory(ready=True) algorithm_image.algorithm.add_user(user) f = StagedFileFactory( file__from_path=Path(__file__).parent / "resources" / "image10x10x10.mha" ) RawImageFileFactory(upload_session=us, staged_file_id=f.file_id) def request_processing(): return get_view_for_user( viewname="api:upload-session-process-images", reverse_kwargs={"pk": us.pk}, user=user, client=client, method=client.patch, data={"algorithm": algorithm_image.algorithm.slug}, content_type="application/json", ) # First request should work response = request_processing() assert response.status_code == 200 # Jobs should only be run once response = request_processing() assert response.status_code == 400
def test_permission_required_list_views(self, client): ai = AlgorithmImageFactory(ready=True) u = UserFactory() j = AlgorithmJobFactory(algorithm_image=ai) for view_name, kwargs, permission, objs in [ ("list", {}, "view_algorithm", {ai.algorithm}), ( "job-list", { "slug": j.algorithm_image.algorithm.slug }, "view_job", {j}, ), ]: def _get_view(): return get_view_for_user( client=client, viewname=f"algorithms:{view_name}", reverse_kwargs=kwargs, user=u, ) response = _get_view() assert response.status_code == 200 assert set() == {*response.context[-1]["object_list"]} assign_perm(permission, u, list(objs)) response = _get_view() assert response.status_code == 200 assert objs == {*response.context[-1]["object_list"]} for obj in objs: remove_perm(permission, u, obj)
def test_job_create(client): im = ImageFactory() algo_i = AlgorithmImageFactory() user = UserFactory(is_staff=True) response = get_view_for_user( viewname="api:algorithms-job-list", user=user, client=client, method=client.post, data={ "image": im.api_url, "algorithm_image": algo_i.api_url }, content_type="application/json", ) assert response.status_code == 201 job = Job.objects.get(pk=response.data.get("pk")) assert job.image == im assert job.algorithm_image == algo_i
def test_algorithm_image_group_permissions_are_assigned(): ai = AlgorithmImageFactory() perms = get_group_perms(ai.algorithm.editors_group, ai) assert "view_algorithmimage" in perms assert "change_algorithmimage" in perms
def test_permission_required_views(self, client): ai = AlgorithmImageFactory(ready=True) s = RawImageUploadSessionFactory() u = UserFactory() j = AlgorithmJobFactory(algorithm_image=ai) p = AlgorithmPermissionRequestFactory(algorithm=ai.algorithm) VerificationFactory(user=u, is_verified=True) for view_name, kwargs, permission, obj, redirect in [ ("create", {}, "algorithms.add_algorithm", None, None), ( "detail", {"slug": ai.algorithm.slug}, "view_algorithm", ai.algorithm, reverse( "algorithms:permission-request-create", kwargs={"slug": ai.algorithm.slug}, ), ), ( "update", {"slug": ai.algorithm.slug}, "change_algorithm", ai.algorithm, None, ), ( "image-create", {"slug": ai.algorithm.slug}, "change_algorithm", ai.algorithm, None, ), ( "image-detail", {"slug": ai.algorithm.slug, "pk": ai.pk}, "view_algorithmimage", ai, None, ), ( "image-update", {"slug": ai.algorithm.slug, "pk": ai.pk}, "change_algorithmimage", ai, None, ), ( "execution-session-create-batch", {"slug": ai.algorithm.slug}, "execute_algorithm", ai.algorithm, None, ), ( "execution-session-create", {"slug": ai.algorithm.slug}, "execute_algorithm", ai.algorithm, None, ), ( "execution-session-detail", {"slug": ai.algorithm.slug, "pk": s.pk}, "view_rawimageuploadsession", s, None, ), ( "job-experiment-detail", {"slug": ai.algorithm.slug, "pk": j.pk}, "view_job", j, None, ), ( "job-detail", {"slug": ai.algorithm.slug, "pk": j.pk}, "view_job", j, None, ), ( "job-update", {"slug": ai.algorithm.slug, "pk": j.pk}, "change_job", j, None, ), ( "job-viewers-update", {"slug": ai.algorithm.slug, "pk": j.pk}, "change_job", j, None, ), ( "editors-update", {"slug": ai.algorithm.slug}, "change_algorithm", ai.algorithm, None, ), ( "users-update", {"slug": ai.algorithm.slug}, "change_algorithm", ai.algorithm, None, ), ( "permission-request-update", {"slug": ai.algorithm.slug, "pk": p.pk}, "change_algorithm", ai.algorithm, None, ), ]: def _get_view(): return get_view_for_user( client=client, viewname=f"algorithms:{view_name}", reverse_kwargs=kwargs, user=u, ) response = _get_view() if redirect is not None: assert response.status_code == 302 assert response.url == redirect else: assert response.status_code == 403 assign_perm(permission, u, obj) response = _get_view() assert response.status_code == 200 remove_perm(permission, u, obj)
def test_algorithm_jobs_list_view(client): editor = UserFactory() alg = AlgorithmFactory(public=True) alg.add_editor(editor) im = AlgorithmImageFactory(algorithm=alg) for x in range(50): created = timezone.now() - datetime.timedelta(days=x + 365) job = AlgorithmJobFactory(algorithm_image=im, status=Job.SUCCESS) job.created = created job.save() job.viewer_groups.add(alg.editors_group) response = get_view_for_user( viewname="algorithms:job-list", reverse_kwargs={"slug": slugify(alg.slug)}, client=client, user=editor, method=client.get, follow=True, ) assert response.status_code == 200 response = get_view_for_user( viewname="algorithms:job-list", reverse_kwargs={"slug": slugify(alg.slug)}, client=client, user=editor, method=client.get, follow=True, data={ "length": 10, "draw": 1, "order[0][dir]": "desc", "order[0][column]": 0, }, **{"HTTP_X_REQUESTED_WITH": "XMLHttpRequest"}, ) resp = response.json() assert resp["recordsTotal"] == 50 assert len(resp["data"]) == 10 response = get_view_for_user( viewname="algorithms:job-list", reverse_kwargs={"slug": slugify(alg.slug)}, client=client, user=editor, method=client.get, follow=True, data={ "length": 50, "draw": 1, "order[0][dir]": "desc", "order[0][column]": 0, }, **{"HTTP_X_REQUESTED_WITH": "XMLHttpRequest"}, ) resp = response.json() assert resp["recordsTotal"] == 50 assert len(resp["data"]) == 50 response = get_view_for_user( viewname="algorithms:job-list", reverse_kwargs={"slug": slugify(alg.slug)}, client=client, user=editor, method=client.get, follow=True, data={ "length": 50, "draw": 1, "order[0][dir]": "asc", "order[0][column]": 0, }, **{"HTTP_X_REQUESTED_WITH": "XMLHttpRequest"}, ) resp_new = response.json() assert resp_new["recordsTotal"] == 50 assert resp_new["data"] == resp["data"][::-1] response = get_view_for_user( viewname="algorithms:job-list", reverse_kwargs={"slug": slugify(alg.slug)}, client=client, user=editor, method=client.get, follow=True, data={ "length": 50, "draw": 1, "search[value]": job.creator.username, "order[0][column]": 0, }, **{"HTTP_X_REQUESTED_WITH": "XMLHttpRequest"}, ) resp = response.json() assert resp["recordsTotal"] == 50 assert resp["recordsFiltered"] == 1 assert len(resp["data"]) == 1
def test_no_jobs_workflow(self): ai = AlgorithmImageFactory() with capture_on_commit_callbacks() as callbacks: create_algorithm_jobs(algorithm_image=ai, civ_sets=[]) assert len(callbacks) == 0
def test_no_images_does_nothing(self): ai = AlgorithmImageFactory() create_algorithm_jobs(algorithm_image=ai, images=[]) assert Job.objects.count() == 0
def test_jobs_workflow(self): ai = AlgorithmImageFactory() images = [ImageFactory(), ImageFactory()] workflow = execute_jobs(algorithm_image=ai, images=images) assert workflow is not None
def test_no_jobs_workflow(self): ai = AlgorithmImageFactory() workflow = execute_jobs(algorithm_image=ai, images=[]) assert workflow is None
def test_archive_item_form(client, settings): # Override the celery settings settings.task_eager_propagates = (True,) settings.task_always_eager = (True,) archive = ArchiveFactory() editor = UserFactory() archive.editors_group.user_set.add(editor) ci = ComponentInterfaceFactory( kind=InterfaceKind.InterfaceKindChoices.BOOL ) civ = ComponentInterfaceValueFactory( interface=ci, value=True, file=None, image=None ) ai = ArchiveItemFactory(archive=archive) ai.values.add(civ) response = get_view_for_user( viewname="archives:item-edit", client=client, method=client.get, reverse_kwargs={"slug": archive.slug, "id": ai.pk}, follow=True, user=editor, ) assert response.status_code == 200 for _ci in ComponentInterface.objects.all(): assert _ci.slug in response.rendered_content assert f'id="id_{ci.slug}" checked' in response.rendered_content assert Job.objects.count() == 0 alg = AlgorithmFactory() AlgorithmImageFactory(algorithm=alg, ready=True) alg.inputs.set([ci]) with capture_on_commit_callbacks(execute=True): archive.algorithms.add(alg) assert Job.objects.count() == 1 civ_count = ComponentInterfaceValue.objects.count() with capture_on_commit_callbacks(execute=True): with capture_on_commit_callbacks(execute=True): response = get_view_for_user( viewname="archives:item-edit", client=client, method=client.post, reverse_kwargs={"slug": archive.slug, "id": ai.pk}, data={ci.slug: False}, follow=True, user=editor, ) assert ai.values.filter(pk=civ.pk).count() == 0 # This should created a new CIV as they are immutable assert ComponentInterfaceValue.objects.count() == civ_count + 1 # A new job should have been created, because the value for 'bool' # has changed assert Job.objects.count() == 2 with capture_on_commit_callbacks(execute=True): with capture_on_commit_callbacks(execute=True): response = get_view_for_user( viewname="archives:item-edit", client=client, method=client.post, reverse_kwargs={"slug": archive.slug, "id": ai.pk}, data={ci.slug: True}, follow=True, user=editor, ) # New jobs should be created as there is a new CIV assert Job.objects.count() == 3 assert ComponentInterfaceValue.objects.count() == civ_count + 2
def test_algorithm_multiple_inputs(client, algorithm_io_image, settings, component_interfaces): # Override the celery settings settings.task_eager_propagates = (True, ) settings.task_always_eager = (True, ) creator = UserFactory() assert Job.objects.count() == 0 # Create the algorithm image algorithm_container, sha256 = algorithm_io_image alg = AlgorithmImageFactory(image__from_path=algorithm_container, image_sha256=sha256, ready=True) alg.algorithm.add_editor(creator) alg.algorithm.inputs.set(ComponentInterface.objects.all()) # create the job job = Job.objects.create(creator=creator, algorithm_image=alg) expected = [] for ci in ComponentInterface.objects.all(): if ci.kind in InterfaceKind.interface_type_image(): image_file = ImageFileFactory( file__from_path=Path(__file__).parent / "resources" / "input_file.tif") job.inputs.add( ComponentInterfaceValueFactory(interface=ci, image=image_file.image, file=None)) expected.append("file") elif ci.kind in InterfaceKind.interface_type_file(): job.inputs.add( ComponentInterfaceValueFactory( interface=ci, file__from_path=Path(__file__).parent / "resources" / "test.json", )) expected.append("json") else: job.inputs.add( ComponentInterfaceValueFactory(interface=ci, value="test", file=None)) expected.append("test") # Nested on_commits created by these tasks with capture_on_commit_callbacks(execute=True): with capture_on_commit_callbacks(execute=True): run_algorithm_job_for_inputs(job_pk=job.pk, upload_pks=[]) job = Job.objects.get() assert job.status == job.SUCCESS assert {x[0] for x in job.input_files} == set(job.outputs.first().value.keys()) assert sorted( map( lambda x: x if x != {} else "json", job.outputs.first().value.values(), )) == sorted(expected)
def test_algorithm(client, algorithm_image, settings): # Override the celery settings settings.task_eager_propagates = (True, ) settings.task_always_eager = (True, ) assert Job.objects.count() == 0 # Create the algorithm image algorithm_container, sha256 = algorithm_image alg = AlgorithmImageFactory(image__from_path=algorithm_container, image_sha256=sha256, ready=True) # We should not be able to download image with pytest.raises(NotImplementedError): _ = alg.image.url # Run the algorithm, it will create a results.json and an output.tif image_file = ImageFileFactory(file__from_path=Path(__file__).parent / "resources" / "input_file.tif") with capture_on_commit_callbacks(execute=True): execute_jobs(algorithm_image=alg, images=[image_file.image]) jobs = Job.objects.filter(algorithm_image=alg).all() # There should be a single, successful job assert len(jobs) == 1 assert jobs[0].stdout.endswith("Greetings from stdout\n") assert jobs[0].stderr.endswith('("Hello from stderr")\n') assert jobs[0].error_message == "" assert jobs[0].status == jobs[0].SUCCESS # The job should have two ComponentInterfaceValues, # one for the results.json and one for output.tif assert len(jobs[0].outputs.all()) == 2 json_result_interface = ComponentInterface.objects.get( slug="results-json-file") json_result_civ = jobs[0].outputs.get(interface=json_result_interface) assert json_result_civ.value == { "entity": "out.tif", "metrics": { "abnormal": 0.19, "normal": 0.81 }, } heatmap_interface = ComponentInterface.objects.get(slug="generic-overlay") heatmap_civ = jobs[0].outputs.get(interface=heatmap_interface) assert heatmap_civ.image.name == "output.tif" # We add another ComponentInterface with file value and run the algorithm again detection_interface = ComponentInterfaceFactory( store_in_database=False, relative_path="detection_results.json", title="detection-json-file", slug="detection-json-file", kind=ComponentInterface.Kind.JSON, ) alg.algorithm.outputs.add(detection_interface) alg.save() image_file = ImageFileFactory(file__from_path=Path(__file__).parent / "resources" / "input_file.tif") with capture_on_commit_callbacks(execute=True): execute_jobs(algorithm_image=alg, images=[image_file.image]) jobs = Job.objects.filter(algorithm_image=alg, inputs__image=image_file.image).all() # There should be a single, successful job assert len(jobs) == 1 # The job should have three ComponentInterfaceValues, # one with the detection_results store in the file assert len(jobs[0].outputs.all()) == 3 detection_civ = jobs[0].outputs.get(interface=detection_interface) assert not detection_civ.value assert re.search("detection_results.*json$", detection_civ.file.name)
def test_jobs_workflow(self): ai = AlgorithmImageFactory() images = [ImageFactory(), ImageFactory()] with capture_on_commit_callbacks() as callbacks: execute_jobs(algorithm_image=ai, images=images) assert len(callbacks) == 1
def test_algorithm_multiple_inputs(client, algorithm_io_image, settings, component_interfaces): # Override the celery settings settings.task_eager_propagates = (True, ) settings.task_always_eager = (True, ) creator = UserFactory() assert Job.objects.count() == 0 # Create the algorithm image algorithm_container, sha256 = algorithm_io_image alg = AlgorithmImageFactory(image__from_path=algorithm_container, image_sha256=sha256, ready=True) alg.algorithm.add_editor(creator) alg.algorithm.inputs.set(ComponentInterface.objects.all()) alg.algorithm.outputs.set( [ComponentInterface.objects.get(slug="results-json-file")]) # create the job job = Job.objects.create(creator=creator, algorithm_image=alg) expected = [] for ci in ComponentInterface.objects.exclude( kind=InterfaceKindChoices.ZIP): if ci.is_image_kind: image_file = ImageFileFactory( file__from_path=Path(__file__).parent / "resources" / "input_file.tif") job.inputs.add( ComponentInterfaceValueFactory(interface=ci, image=image_file.image)) expected.append("file") elif ci.is_file_kind: civ = ComponentInterfaceValueFactory(interface=ci) civ.file.save("test", File(BytesIO(b""))) civ.save() job.inputs.add(civ) expected.append("file") else: job.inputs.add( ComponentInterfaceValueFactory(interface=ci, value="test")) expected.append("test") with capture_on_commit_callbacks() as callbacks: run_algorithm_job_for_inputs(job_pk=job.pk, upload_pks=[]) recurse_callbacks(callbacks=callbacks) job.refresh_from_db() assert job.error_message == "" assert job.status == job.SUCCESS # Remove fake value for score output_dict = job.outputs.first().value output_dict.pop("score") assert {f"/input/{x.relative_path}" for x in job.inputs.all()} == set(output_dict.keys()) assert sorted(map(lambda x: x if x != {} else "json", output_dict.values())) == sorted(expected)