def test_non_zip_submission_failure(client, evaluation_image, submission_file, settings): # Override the celery settings settings.task_eager_propagates = (True, ) settings.task_always_eager = (True, ) # Upload a submission and create an evaluation eval_container, sha256 = evaluation_image method = MethodFactory(image__from_path=eval_container, image_sha256=sha256, ready=True) # Try with a 7z file submission = SubmissionFactory( predictions_file__from_path=Path(__file__).parent / "resources" / "submission.7z", phase=method.phase, ) # The evaluation method should return the correct answer assert len(submission.evaluation_set.all()) == 1 evaluation = submission.evaluation_set.first() assert evaluation.error_message.endswith( "7z-compressed files are not supported.") assert evaluation.status == evaluation.FAILURE
def setUp(self) -> None: self.method = MethodFactory(ready=True, phase__archive=ArchiveFactory()) self.algorithm_image = AlgorithmImageFactory() self.images = ImageFactory.create_batch(3) self.method.phase.archive.images.set(self.images[:2])
def test_submission_evaluation( client, evaluation_image, submission_file, settings ): # Override the celery settings settings.task_eager_propagates = (True,) settings.task_always_eager = (True,) # Upload a submission and create an evaluation eval_container, sha256 = evaluation_image method = MethodFactory( image__from_path=eval_container, image_sha256=sha256, ready=True ) # We should not be able to download methods with pytest.raises(NotImplementedError): _ = method.image.url # This will create an evaluation, and we'll wait for it to be executed with capture_on_commit_callbacks() as callbacks: submission = SubmissionFactory( predictions_file__from_path=submission_file, phase=method.phase ) recurse_callbacks(callbacks=callbacks) # The evaluation method should return the correct answer assert len(submission.evaluation_set.all()) == 1 evaluation = submission.evaluation_set.first() assert evaluation.stdout.endswith("Greetings from stdout\n") assert evaluation.stderr.endswith('warn("Hello from stderr")\n') assert evaluation.error_message == "" assert evaluation.status == evaluation.SUCCESS assert ( evaluation.outputs.get(interface__slug="metrics-json-file").value[ "acc" ] == 0.5 ) # Try with a csv file with capture_on_commit_callbacks() as callbacks: submission = SubmissionFactory( predictions_file__from_path=Path(__file__).parent / "resources" / "submission.csv", phase=method.phase, ) recurse_callbacks(callbacks=callbacks) evaluation = submission.evaluation_set.first() assert len(submission.evaluation_set.all()) == 1 assert evaluation.status == evaluation.SUCCESS assert ( evaluation.outputs.get(interface__slug="metrics-json-file").value[ "acc" ] == 0.5 )
def test_method_permissions(self): """Only challenge admins should be able to view methods.""" m: Method = MethodFactory() assert get_groups_with_set_perms(m) == { m.phase.challenge.admins_group: {"view_method"} } assert get_users_with_perms(m, with_group_users=False).count() == 0
def test_permission_filtered_views(self, client): u = UserFactory() p = PhaseFactory() m = MethodFactory(phase=p) s = SubmissionFactory(phase=p, creator=u) e = EvaluationFactory(method=m, submission=s, rank=1, status=Evaluation.SUCCESS) for view_name, kwargs, permission, obj in [ ("method-list", {}, "view_method", m), ("submission-list", {}, "view_submission", s), ("list", {}, "view_evaluation", e), ( "leaderboard", { "slug": e.submission.phase.slug }, "view_evaluation", e, ), ]: assign_perm(permission, u, obj) response = get_view_for_user( client=client, viewname=f"evaluation:{view_name}", reverse_kwargs={ "challenge_short_name": e.submission.phase.challenge.short_name, **kwargs, }, user=u, ) assert response.status_code == 200 assert obj in response.context[-1]["object_list"] remove_perm(permission, u, obj) response = get_view_for_user( client=client, viewname=f"evaluation:{view_name}", reverse_kwargs={ "challenge_short_name": e.submission.phase.challenge.short_name, **kwargs, }, user=u, ) assert response.status_code == 200 assert obj not in response.context[-1]["object_list"]
def challenge_set_with_evaluation(challenge_set): """ Creates a challenge with two methods. To use this you must mark the test with `@pytest.mark.django_db`. """ eval_challenge_set = namedtuple("eval_challenge_set", ["challenge_set", "method"]) phase = challenge_set.challenge.phase_set.get() method = MethodFactory(phase=phase, creator=challenge_set.creator) return eval_challenge_set(challenge_set, method)
def test_method_validation_not_a_docker_tar(submission_file): """Upload something that isn't a docker file should be invalid.""" method = MethodFactory(image__from_path=submission_file) assert method.ready is False validate_docker_image( pk=method.pk, app_label=method._meta.app_label, model_name=method._meta.model_name, ) method = Method.objects.get(pk=method.pk) assert method.ready is False assert "manifest.json not found" in method.status
def test_method_validation_root_dockerfile(root_image): """Uploading two images in a tar archive should fail.""" method = MethodFactory(image__from_path=root_image) assert method.ready is False validate_docker_image( pk=method.pk, app_label=method._meta.app_label, model_name=method._meta.model_name, ) method = Method.objects.get(pk=method.pk) assert method.ready is False assert "runs as root" in method.status
def test_method_validation_invalid_dockerfile(alpine_images): """Uploading two images in a tar archive should fail.""" method = MethodFactory(image__from_path=alpine_images) assert method.ready is False validate_docker_image( pk=method.pk, app_label=method._meta.app_label, model_name=method._meta.model_name, ) method = Method.objects.get(pk=method.pk) assert method.ready is False assert "should only have 1 image" in method.status
def setUp(self) -> None: self.method = MethodFactory(ready=True, phase__archive=ArchiveFactory()) self.algorithm_image = AlgorithmImageFactory() interface = ComponentInterfaceFactory() self.algorithm_image.algorithm.inputs.set([interface]) self.images = ImageFactory.create_batch(3) for image in self.images[:2]: civ = ComponentInterfaceValueFactory(image=image, interface=interface) ai = ArchiveItemFactory(archive=self.method.phase.archive) ai.values.add(civ)
def test_method_validation(evaluation_image): """The validator should set the correct sha256 and set the ready bit.""" container, sha256 = evaluation_image method = MethodFactory(image__from_path=container) # The method factory fakes the sha256 on creation assert method.image_sha256 != sha256 assert method.ready is False validate_docker_image( pk=method.pk, app_label=method._meta.app_label, model_name=method._meta.model_name, ) method = Method.objects.get(pk=method.pk) assert method.image_sha256 == sha256 assert method.ready is True
def test_container_pushing(evaluation_image): container, sha256 = evaluation_image method = MethodFactory(image__from_path=container) push_container_image(instance=method) response = requests.get( f"http://{settings.COMPONENTS_REGISTRY_URL}/v2/_catalog") assert response.status_code == 200 assert "gc.localhost/evaluation/method" in response.json()["repositories"] response = requests.get( f"http://{settings.COMPONENTS_REGISTRY_URL}/v2/gc.localhost/evaluation/method/tags/list" ) assert response.status_code == 200 assert str(method.pk) in response.json()["tags"]
def test_evaluation_notifications(client, evaluation_image, submission_file, settings): # Override the celery settings settings.task_eager_propagates = (True, ) settings.task_always_eager = (True, ) # Try to upload a submission without a method in place with capture_on_commit_callbacks(execute=True): submission = SubmissionFactory( predictions_file__from_path=submission_file) # Missing should result in notification for admins of the challenge # There are 2 notifications here. The second is about admin addition to the # challenge, both notifications are for the admin. for notification in Notification.objects.all(): assert notification.user == submission.phase.challenge.creator assert "there is no valid evaluation method" in Notification.objects.filter( message="missing method").get().print_notification( user=submission.phase.challenge.creator) # Add method and upload a submission eval_container, sha256 = evaluation_image method = MethodFactory(image__from_path=eval_container, image_sha256=sha256, ready=True) # clear notifications for easier testing later Notification.objects.all().delete() # create submission and wait for it to be evaluated with capture_on_commit_callbacks() as callbacks: submission = SubmissionFactory( predictions_file__from_path=submission_file, phase=method.phase) recurse_callbacks(callbacks=callbacks) # creator of submission and admins of challenge should get notification # about successful submission recipients = list(submission.phase.challenge.get_admins()) recipients.append(submission.creator) assert Notification.objects.count() == len(recipients) for recipient in recipients: assert str(recipient) in str(Notification.objects.all()) result_string = format_html('<a href="{}">result</a>', submission.get_absolute_url()) submission_string = format_html('<a href="{}">submission</a>', submission.get_absolute_url()) challenge_string = format_html( '<a href="{}">{}</a>', submission.phase.challenge.get_absolute_url(), submission.phase.challenge.short_name, ) assert f"There is a new {result_string} for {challenge_string}" in Notification.objects.filter( user=recipients[0]).get().print_notification(user=recipients[0]) assert f"Your {submission_string} to {challenge_string} succeeded" in Notification.objects.filter( user=recipients[1]).get().print_notification(user=recipients[1]) Notification.objects.all().delete() # update evaluation status to failed evaluation = submission.evaluation_set.first() evaluation.update_status(status=evaluation.FAILURE) assert evaluation.status == evaluation.FAILURE # notifications for admin and creator of submission assert Notification.objects.count() == len(recipients) for recipient in recipients: assert str(recipient) in str(Notification.objects.all()) assert f"The {submission_string} from {user_profile_link(Notification.objects.filter(user=recipients[0]).get().actor)} to {challenge_string} failed" in Notification.objects.filter( user=recipients[0]).get().print_notification(user=recipients[0]) assert f"Your {submission_string} to {challenge_string} failed" in Notification.objects.filter( user=recipients[1]).get().print_notification(user=recipients[1]) # check that when admin unsubscribed from phase, they no longer # receive notifications about activity related to that phase Notification.objects.all().delete() unfollow(user=submission.phase.challenge.creator, obj=submission.phase) evaluation.update_status(status=evaluation.SUCCESS) assert str(submission.phase.challenge.creator) not in str( Notification.objects.all())