def test_success_language_agnostic(self): self.task.submission_format = ["foo", "bar", "baz"] self.task_type.get_user_managers.return_value = ["spam", "ham", "eggs"] self.files = {"foo": FOO_CONTENT, "spam": SPAM_CONTENT, "input": INPUT_CONTENT} self.language = None self.digests = {"bar": bytes_digest(BAR_CONTENT), "ham": bytes_digest(HAM_CONTENT)} user_test = self.call() self.assertUserTestIsValid( user_test, self.timestamp, None, {"foo": FOO_CONTENT, "bar": BAR_CONTENT}, {"spam": SPAM_CONTENT, "ham": HAM_CONTENT}, INPUT_CONTENT)
def setUp(self): super(TestDumpExporter, self).setUp() if not os.path.exists(config.temp_dir): os.makedirs(config.temp_dir) self.base = os.path.join(tempfile.mkdtemp(), "target") self.dump = None # Add a file to be used as a statement. self.st_content = b"statement" self.st_digest = bytes_digest(self.st_content) self.add_fsobject(self.st_digest, self.st_content) # Add a file to be used as a submission source. self.file_content = b"source" self.file_digest = bytes_digest(self.file_content) self.add_fsobject(self.file_digest, self.file_content) # Add a file to be used as an executable. self.exe_content = b"executable" self.exe_digest = bytes_digest(self.exe_content) self.add_fsobject(self.exe_digest, self.exe_content) self.contest = self.add_contest(description="你好") self.participation = self.add_participation(contest=self.contest) self.user = self.participation.user self.task = self.add_task(contest=self.contest) self.statement = self.add_statement(task=self.task, digest=self.st_digest) self.dataset = self.add_dataset(task=self.task) self.task.active_dataset = self.task.datasets[0] self.submission = self.add_submission(self.task, self.participation) self.file = self.add_file(submission=self.submission, digest=self.file_digest) # Add the executable to the submission self.submission_result = self.add_submission_result( submission=self.submission, dataset=self.dataset) self.add_executable(self.submission_result, digest=self.exe_digest) # Another contest self.other_contest = self.add_contest() # User and task not attached to any contest. self.unattached_user = self.add_user() self.unattached_task = self.add_task() self.session.commit()
def setUp(self): super().setUp() self.target = self.get_path("target") self.dump = None # Add a file to be used as a statement. self.st_content = b"statement" self.st_digest = bytes_digest(self.st_content) self.add_fsobject(self.st_digest, self.st_content) # Add a file to be used as a submission source. self.file_content = b"source" self.file_digest = bytes_digest(self.file_content) self.add_fsobject(self.file_digest, self.file_content) # Add a file to be used as an executable. self.exe_content = b"executable" self.exe_digest = bytes_digest(self.exe_content) self.add_fsobject(self.exe_digest, self.exe_content) self.contest = self.add_contest(description="你好") self.participation = self.add_participation(contest=self.contest) self.user = self.participation.user self.task = self.add_task(contest=self.contest) self.statement = self.add_statement( task=self.task, digest=self.st_digest) self.dataset = self.add_dataset(task=self.task) self.task.active_dataset = self.task.datasets[0] self.submission = self.add_submission(self.task, self.participation) self.file = self.add_file( submission=self.submission, digest=self.file_digest) # Add the executable to the submission self.submission_result = self.add_submission_result( submission=self.submission, dataset=self.dataset) self.add_executable(self.submission_result, digest=self.exe_digest) # Another contest self.other_contest = self.add_contest() # User and task not attached to any contest. self.unattached_user = self.add_user() self.unattached_task = self.add_task() self.session.commit()
def test_success_language_agnostic(self): self.task.submission_format = ["foo", "bar", "baz"] self.files = {"foo": FOO_CONTENT} self.language = None self.digests = {"bar": bytes_digest(BAR_CONTENT)} submission = self.call() self.assertSubmissionIsValid( submission, self.timestamp, None, {"foo": FOO_CONTENT, "bar": BAR_CONTENT}, True)
def test_input_filled_in_from_previous_test(self): del self.files["input"] self.digests["input"] = bytes_digest(INPUT_CONTENT) user_test = self.call() self.assertUserTestIsValid( user_test, self.timestamp, "MockLanguage", {"foo.%l": FOO_CONTENT, "bar.%l": BAR_CONTENT}, {"spammock.1": SPAM_CONTENT, "hammock.1": HAM_CONTENT}, INPUT_CONTENT)
def assertUserTestIsValid(self, user_test, timestamp, language, files, managers, input_): # Ensure pending user tests are sent to the DB and given IDs. self.session.flush() # Ensure the user test is in the DB. db_user_test = self.session.query(UserTest) \ .filter(UserTest.id == user_test.id).first() self.assertIs(user_test, db_user_test) # And that it has the expected fields. self.assertEqual(user_test.timestamp, timestamp) self.assertEqual(user_test.language, language) self.assertCountEqual(user_test.files.keys(), files.keys()) self.assertCountEqual((f.digest for f in user_test.files.values()), (bytes_digest(b) for b in files.values())) self.assertCountEqual(user_test.managers.keys(), managers.keys()) self.assertCountEqual((f.digest for f in user_test.managers.values()), (bytes_digest(b) for b in managers.values())) self.assertEqual(user_test.input, bytes_digest(input_) if input_ is not None else None)
def assertSubmissionIsValid(self, submission, timestamp, language, files, official): # Ensure pending submissions are sent to the DB and given IDs. self.session.flush() # Ensure the submission is in the DB. db_submission = self.session.query(Submission) \ .filter(Submission.id == submission.id).first() self.assertIs(submission, db_submission) # And that it has the expected fields. self.assertEqual(submission.timestamp, timestamp) self.assertEqual(submission.language, language) self.assertCountEqual(submission.files.keys(), files.keys()) self.assertCountEqual((f.digest for f in submission.files.values()), (bytes_digest(b) for b in files.values())) self.assertIs(submission.official, official)
def test_file_duplicates(self): """Send multiple copies of the a file into FileCacher. Generates a random file and attempts to store them into the FileCacher. FC should handle this gracefully and only end up with one copy. """ content = os.urandom(100) digest = bytes_digest(content) # Test writing the same file to the DB in parallel. # Create empty files. num_files = 4 fobjs = [] for _ in range(num_files): fobj = self.file_cacher.backend.create_file(digest) # As the file contains random data, we don't expect to have put # this into the DB previously. assert fobj is not None fobjs.append(fobj) # Close them in a different order. Seed to make the shuffle # deterministic. r = random.Random() r.seed(num_files) r.shuffle(fobjs) # Write the files and commit them. for i, fobj in enumerate(fobjs): fobj.write(content) # Ensure that only one copy made it into the database. commit_ok = \ self.file_cacher.backend.commit_file(fobj, digest, desc='Copy %d' % i) # Only the first commit should succeed. assert commit_ok == (i == 0), \ "Commit of %d was %s unexpectedly" % (i, commit_ok) # Check that the file was stored correctly. self.check_stored_file(digest)
def setUp(self): # Choose a size that is larger than FileCacher.CHUNK_SIZE. self.content = \ bytes(random.getrandbits(8) for _ in range(17 * 1024)) self.digest = bytes_digest(self.content) self.filename = "foobar.pdf" self.mimetype = "image/jpeg" self.file_cacher = Mock() self.file_cacher.get_file = Mock( side_effect=lambda digest: io.BytesIO(self.content)) self.file_cacher.get_size = Mock(return_value=len(self.content)) self.serve_file = True self.provide_filename = True self.wsgi_app = \ FileServerMiddleware(self.file_cacher,self.wrapped_wsgi_app) self.environ_builder = EnvironBuilder("/some/url") self.client = Client(self.wsgi_app, Response)
def setUp(self): # We need to wrap the generator in a list because of a # shortcoming of future's bytes implementation. # Choose a size that is larger than FileCacher.CHUNK_SIZE. self.content = \ bytes([random.getrandbits(8) for _ in range(2 ** 14 + 1024)]) self.digest = bytes_digest(self.content) self.filename = "foobar.pdf" self.mimetype = "image/jpeg" self.file_cacher = Mock() self.file_cacher.get_file = Mock( side_effect=lambda digest: io.BytesIO(self.content)) self.file_cacher.get_size = Mock(return_value=len(self.content)) self.serve_file = True self.provide_filename = True self.wsgi_app = \ FileServerMiddleware(self.file_cacher,self.wrapped_wsgi_app) self.environ_builder = EnvironBuilder("/some/url") self.client = Client(self.wsgi_app, Response)
def test_empty(self): self.assertEqual(bytes_digest(b""), _EMPTY_DIGEST)
def test_success(self): self.assertEqual(bytes_digest(b"content"), _CONTENT_DIGEST)
class TestDumpImporter(DatabaseMixin, FileSystemMixin, unittest.TestCase): GENERATED_FILE_CONTENT = b"content" NON_GENERATED_FILE_CONTENT = b"source" GENERATED_FILE_DIGEST = bytes_digest(GENERATED_FILE_CONTENT) NON_GENERATED_FILE_DIGEST = bytes_digest(NON_GENERATED_FILE_CONTENT) FILES = { GENERATED_FILE_DIGEST: ("desc", GENERATED_FILE_CONTENT), NON_GENERATED_FILE_DIGEST: ("subsource", NON_GENERATED_FILE_CONTENT), } DUMP = { "contest_key": { "_class": "Contest", "name": "contestname", "description": "contest description 你好", "tasks": ["task_key"], }, "task_key": { "_class": "Task", "name": "taskname", "title": "task title", "num": 0, "contest": "contest_key", "datasets": ["dataset_key"], "active_dataset": "dataset_key", "submissions": ["sub_key"], }, "dataset_key": { "_class": "Dataset", "task_type": "Batch", "task_type_parameters": "[]", "score_type": "Sum", "score_type_parameters": "[]", "time_limit": 1.0, "memory_limit": 512, "description": "dataset description", "task": "task_key", }, "user_key": { "_class": "User", "username": "******", "first_name": "First Name", "last_name": "Last Name", "password": "******", }, "part_key": { "_class": "Participation", "user": "******", "contest": "contest_key", "submissions": ["sub_key"], }, "sub_key": { "_class": "Submission", "timestamp": 1234567890.123, "participation": "part_key", "task": "task_key", }, "file_key": { "_class": "File", "submission": "sub_key", "filename": "source", "digest": NON_GENERATED_FILE_DIGEST, }, "sr_key": { "_class": "SubmissionResult", "submission": "sub_key", "dataset": "dataset_key", "executables": { "exe": "exe_key" }, }, "exe_key": { "_class": "Executable", "submission_result": "sr_key", "filename": "exe", "dataset": "dataset_key", "digest": GENERATED_FILE_DIGEST, }, "_version": version, "_objects": ["contest_key", "user_key"], } def setUp(self): super(TestDumpImporter, self).setUp() # Another contest, to make sure it's not wiped on import. self.other_contest = self.add_contest() self.session.commit() self.other_contest_name = self.other_contest.name self.other_contest_description = self.other_contest.description def tearDown(self): self.delete_data() super(TestDumpImporter, self).tearDown() def do_import(self, drop=False, load_files=True, skip_generated=False, skip_submissions=False): """Create an importer and call do_import in a convenient way""" return DumpImporter(drop, self.base_dir, load_files=load_files, load_model=True, skip_generated=skip_generated, skip_submissions=skip_submissions, skip_user_tests=False, skip_print_jobs=False).do_import() def write_dump(self, dump): destination = self.get_path("contest.json") if PY3: with io.open(destination, "wt", encoding="utf-8") as f: json.dump(dump, f, indent=4, sort_keys=True) else: with io.open(destination, "wb") as f: json.dump(dump, f, indent=4, sort_keys=True) def write_files(self, data): """Write files and descriptions on the filesystem. data ({str: (str, bytes)}): dictionary mapping digest to description and content. """ f_path = self.makedirs("files") d_path = self.makedirs("descriptions") for digest, (desc, content) in iteritems(data): with io.open(os.path.join(d_path, digest), "wt", encoding="utf-8") as f: f.write(desc) with io.open(os.path.join(f_path, digest), "wb") as f: f.write(content) def assertContestInDb(self, name, description, task_names_and_titles, usernames_and_last_names): """Assert that the contest with the given data is in the DB The query is done by contest name, and to avoid caching, we query from a brand new session. """ db_contests = self.session.query(Contest)\ .filter(Contest.name == name).all() self.assertEqual(len(db_contests), 1) c = db_contests[0] self.assertEqual(c.name, name) self.assertEqual(c.description, description) assertCountEqual(self, [(t.name, t.title) for t in c.tasks], task_names_and_titles) assertCountEqual(self, [(u.user.username, u.user.last_name) for u in c.participations], usernames_and_last_names) def assertContestNotInDb(self, name): """Assert that the contest with the given name is not in the DB.""" db_contests = self.session.query(Contest)\ .filter(Contest.name == name).all() self.assertEqual(len(db_contests), 0) def assertFileInDb(self, digest, description, content): """Assert that the file with the given data is in the DB.""" fsos = self.session.query(FSObject)\ .filter(FSObject.digest == digest).all() self.assertEqual(len(fsos), 1) fso = fsos[0] self.assertEqual(fso.digest, digest) self.assertEqual(fso.description, description) self.assertEqual(fso.get_lobject().read(), content) def assertFileNotInDb(self, digest): """Assert that the file with the given digest is not in the DB.""" fsos = self.session.query(FSObject)\ .filter(FSObject.digest == digest).all() self.assertEqual(len(fsos), 0) def test_import(self): """Test importing everything, while keeping the existing contest.""" self.write_dump(TestDumpImporter.DUMP) self.write_files(TestDumpImporter.FILES) self.assertTrue(self.do_import()) self.assertContestInDb("contestname", "contest description 你好", [("taskname", "task title")], [("username", "Last Name")]) self.assertContestInDb(self.other_contest_name, self.other_contest_description, [], []) self.assertFileInDb(TestDumpImporter.GENERATED_FILE_DIGEST, "desc", b"content") self.assertFileInDb(TestDumpImporter.NON_GENERATED_FILE_DIGEST, "subsource", b"source") def test_import_with_drop(self): """Test importing everything, but dropping existing data.""" self.write_dump(TestDumpImporter.DUMP) self.write_files(TestDumpImporter.FILES) # Need to close the session and reopen it, otherwise the drop hangs. self.session.close() self.assertTrue(self.do_import(drop=True)) self.session = Session() self.assertContestInDb("contestname", "contest description 你好", [("taskname", "task title")], [("username", "Last Name")]) self.assertContestNotInDb(self.other_contest_name) self.assertFileInDb(TestDumpImporter.GENERATED_FILE_DIGEST, "desc", b"content") self.assertFileInDb(TestDumpImporter.NON_GENERATED_FILE_DIGEST, "subsource", b"source") def test_import_skip_generated(self): """Test importing everything but the generated data.""" self.write_dump(TestDumpImporter.DUMP) self.write_files(TestDumpImporter.FILES) self.assertTrue(self.do_import(skip_generated=True)) self.assertContestInDb("contestname", "contest description 你好", [("taskname", "task title")], [("username", "Last Name")]) self.assertContestInDb(self.other_contest_name, self.other_contest_description, [], []) self.assertFileNotInDb(TestDumpImporter.GENERATED_FILE_DIGEST) self.assertFileInDb(TestDumpImporter.NON_GENERATED_FILE_DIGEST, "subsource", b"source") def test_import_skip_files(self): """Test importing the json but not the files.""" self.write_dump(TestDumpImporter.DUMP) self.write_files(TestDumpImporter.FILES) self.assertTrue(self.do_import(load_files=False)) self.assertContestInDb("contestname", "contest description 你好", [("taskname", "task title")], [("username", "Last Name")]) self.assertContestInDb(self.other_contest_name, self.other_contest_description, [], []) self.assertFileNotInDb(TestDumpImporter.GENERATED_FILE_DIGEST) self.assertFileNotInDb(TestDumpImporter.NON_GENERATED_FILE_DIGEST) def test_import_old(self): """Test importing an old dump. This does not pretend to be exhaustive, just makes sure the happy path of the updaters run successfully. """ self.write_dump({ "contest_key": { "_class": "Contest", "name": "contestname", "description": "contest description", "start": 1234567890.000, "stop": 1324567890.000, "token_initial": 2, "token_gen_number": 1, "token_gen_time": 10, "token_total": 100, "token_max": 100, "tasks": ["task_key"], }, "task_key": { "_class": "Task", "name": "taskname", "title": "task title", "num": 0, "primary_statements": "[\"en\", \"ja\"]", "token_initial": None, "token_gen_number": 0, "token_gen_time": 0, "token_total": None, "token_max": None, "task_type": "Batch", "task_type_parameters": "[]", "score_type": "Sum", "score_type_parameters": "[]", "time_limit": 0.0, "memory_limit": None, "contest": "contest_key", "managers": {}, "testcases": {}, "submissions": ["sub_key"], "user_tests": [], }, "user_key": { "_class": "User", "username": "******", "first_name": "First Name", "last_name": "Last Name", "password": "******", "email": "", "ip": "0.0.0.0", "preferred_languages": "[\"en\", \"it_IT\"]", "contest": "contest_key", "submissions": ["sub_key"], }, "sub_key": { "_class": "Submission", "timestamp": 1234567890.123, "language": "c", "user": "******", "task": "task_key", "compilation_text": "OK [1.234 - 20]", "executables": { "exe": "exe_key" }, "evaluations": [], }, "exe_key": { "_class": "Executable", "submission": "sub_key", "filename": "exe", "digest": TestDumpImporter.GENERATED_FILE_DIGEST, }, "_version": 1, "_objects": ["contest_key", "user_key"], }) self.write_files(TestDumpImporter.FILES) self.assertTrue(self.do_import(skip_generated=True)) self.assertContestInDb("contestname", "contest description", [("taskname", "task title")], [("username", "Last Name")]) self.assertContestInDb(self.other_contest_name, self.other_contest_description, [], []) self.assertFileNotInDb("040f06fd774092478d450774f5ba30c5da78acc8")
# Needs to be first to allow for monkey patching the DB connection string. from cmstestsuite.unit_tests.databasemixin import DatabaseMixin from cms.db import File, Submission from cmscommon.datetime import make_datetime from cmscommon.digest import bytes_digest from cmscontrib.AddSubmission import add_submission from cmstestsuite.unit_tests.filesystemmixin import FileSystemMixin _TS = 1_234_567_890 _CONTENT_1 = b"this is a source file" _CONTENT_2 = b"this is another source" _CONTENT_3 = b"this is one more" _DIGEST_1 = bytes_digest(_CONTENT_1) _DIGEST_2 = bytes_digest(_CONTENT_2) _DIGEST_3 = bytes_digest(_CONTENT_3) _FILENAME_1 = "file.c" _FILENAME_2 = "file" _FILENAME_3 = "file.py" _LANGUAGE_1 = "C11 / gcc" class TestAddSubmissionMixin(DatabaseMixin, FileSystemMixin): """Mixin for testing AddSubmission with different tasks.""" def setUp(self): super().setUp() self.write_file(_FILENAME_1, _CONTENT_1) self.write_file(_FILENAME_2, _CONTENT_2)
def test_string(self): with self.assertRaises(TypeError): bytes_digest("")
def test_long(self): content = b"0" * 1_000_000 self.write_file(self.filename, content) self.assertEqual(path_digest(self.path), bytes_digest(content))
def unique_digest(): """Return a unique digest-like string.""" return bytes_digest(unique_unicode_id().encode("utf-8"))
from cmstestsuite.unit_tests.databasemixin import DatabaseMixin from cms.db import File, Submission from cmscommon.datetime import make_datetime from cmscommon.digest import bytes_digest from cmscontrib.AddSubmission import add_submission from cmstestsuite.unit_tests.filesystemmixin import FileSystemMixin _TS = 1_234_567_890 _CONTENT_1 = b"this is a source file" _CONTENT_2 = b"this is another source" _CONTENT_3 = b"this is one more" _DIGEST_1 = bytes_digest(_CONTENT_1) _DIGEST_2 = bytes_digest(_CONTENT_2) _DIGEST_3 = bytes_digest(_CONTENT_3) _FILENAME_1 = "file.c" _FILENAME_2 = "file" _FILENAME_3 = "file.py" _LANGUAGE_1 = "C11 / gcc" class TestAddSubmissionMixin(DatabaseMixin, FileSystemMixin): """Mixin for testing AddSubmission with different tasks.""" def setUp(self): super().setUp() self.write_file(_FILENAME_1, _CONTENT_1)
def test_long(self): content = b"0" * 1000000 self.write_file(content) self.assertEqual(path_digest(self.path), bytes_digest(content))
def setUp(self): super().setUp() # Set up patches and mocks for a successful run. These are all # controlled by the following values, which can be changed to # make some steps fail. The task will require a language-aware # submission with three files: foo.%l, bar.%l and baz.%l; the # first will be provided by the contestant, the second will be # fetched from the previous submission (as partial submissions # will be allowed), the third will be missing. self.contest = self.add_contest( languages=["MockLanguage", "AnotherMockLanguage"]) self.participation = self.add_participation( contest=self.contest) self.task = self.add_task( submission_format=["foo.%l", "bar.%l", "baz.%l"], contest=self.contest) self.dataset = self.add_dataset( task=self.task) self.task.active_dataset = self.dataset self.timestamp = make_datetime() self.tornado_files = sentinel.tornado_files self.language_name = sentinel.language_name self.official = True self.received_files = sentinel.received_files self.files = {"foo.%l": FOO_CONTENT} # Multiple extensions, primary one doesn't start with a period. self.language = make_language("MockLanguage", ["mock.1", ".mock2"]) self.digests = {"bar.%l": bytes_digest(BAR_CONTENT)} self.submit_local_copy_path = unique_unicode_id() patcher = patch('cms.db.Dataset.task_type_object', new_callable=PropertyMock) self.task_type = patcher.start().return_value self.addCleanup(patcher.stop) self.task_type.ALLOW_PARTIAL_SUBMISSION = True patcher = patch( "cms.server.contest.submission.workflow.check_max_number") self.check_max_number = patcher.start() self.addCleanup(patcher.stop) self.check_max_number.return_value = True patcher = patch( "cms.server.contest.submission.workflow.check_min_interval") self.check_min_interval = patcher.start() self.addCleanup(patcher.stop) self.check_min_interval.return_value = True patcher = patch( "cms.server.contest.submission.workflow.extract_files_from_tornado") self.extract_files_from_tornado = patcher.start() self.addCleanup(patcher.stop) self.extract_files_from_tornado.return_value = self.received_files patcher = patch( "cms.server.contest.submission.workflow.match_files_and_language") self.match_files_and_language = patcher.start() self.addCleanup(patcher.stop) # Use side_effect to keep it working if we reassign the values. self.match_files_and_language.side_effect = \ lambda *args, **kwargs: (self.files, self.language) patcher = patch( "cms.server.contest.submission.workflow" ".fetch_file_digests_from_previous_submission") self.fetch_file_digests_from_previous_submission = patcher.start() self.addCleanup(patcher.stop) # Use side_effect to keep it working if we reassign the value. self.fetch_file_digests_from_previous_submission.side_effect = \ lambda *args, **kwargs: self.digests patcher = patch.object(config, "submit_local_copy", True) patcher.start() self.addCleanup(patcher.stop) patcher = patch.object( config, "submit_local_copy_path", self.submit_local_copy_path) patcher.start() self.addCleanup(patcher.stop) patcher = patch( "cms.server.contest.submission.workflow.store_local_copy") self.store_local_copy = patcher.start() self.addCleanup(patcher.stop) self.file_cacher = MagicMock() self.file_cacher.put_file_content.side_effect = \ lambda content, _: bytes_digest(content)
# Needs to be first to allow for monkey patching the DB connection string. from cms import config from cmstestsuite.unit_tests.databasemixin import DatabaseMixin from cms.db import PrintJob from cms.server.contest.printing import accept_print_job, \ UnacceptablePrintJob, PrintingDisabled from cmscommon.datetime import make_datetime from cmscommon.digest import bytes_digest MockHTTPFile = namedtuple("FakeHTTPFile", ["filename", "body"]) FILE_CONTENT = b"this is a pdf file" FILE_DIGEST = bytes_digest(FILE_CONTENT) @patch.object(config, "printer", "not none") class TestAcceptPrintJob(DatabaseMixin, unittest.TestCase): def setUp(self): super(TestAcceptPrintJob, self).setUp() self.file_cacher = Mock() self.file_cacher.put_file_content.return_value = FILE_DIGEST self.timestamp = make_datetime() self.contest = self.add_contest() self.user = self.add_user() self.participation = self.add_participation( contest=self.contest, user=self.user)